diff --git a/DEPS b/DEPS index d942506..da3067c 100644 --- a/DEPS +++ b/DEPS
@@ -229,7 +229,7 @@ # # CQ_INCLUDE_TRYBOTS=luci.chrome.try:lacros-amd64-generic-chrome-skylab # CQ_INCLUDE_TRYBOTS=luci.chrome.try:lacros-arm-generic-chrome-skylab - 'lacros_sdk_version': '15277.0.0', + 'lacros_sdk_version': '15289.0.0', # Generate location tag metadata to include in tests result data uploaded # to ResultDB. This isn't needed on some configs and the tool that generates @@ -304,7 +304,7 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling Skia # and whatever else without interference from each other. - 'skia_revision': '3939e68c4b4d51326803b8037bab41586b4bfdb5', + 'skia_revision': '656bb22387acdba4ccd9f6a537c3637d53c69db8', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling V8 # and whatever else without interference from each other. @@ -312,7 +312,7 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling ANGLE # and whatever else without interference from each other. - 'angle_revision': '6a8cfc6c3c52b6887501018045082b94a8fd2f07', + 'angle_revision': 'fe704fb573382accd3e7bc7032f1ea310f6f70fd', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling SwiftShader # and whatever else without interference from each other. @@ -331,7 +331,7 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling Fuchsia sdk # and whatever else without interference from each other. - 'fuchsia_version': 'version:11.20221227.1.1', + 'fuchsia_version': 'version:11.20221227.3.1', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling google-toolbox-for-mac # and whatever else without interference from each other. @@ -375,7 +375,7 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling catapult # and whatever else without interference from each other. - 'catapult_revision': '27863274a2f25b92973c10f29b3c4e476fd01a34', + 'catapult_revision': '816a551043358dcb0a5979d2c3af739948a9217d', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling libFuzzer # and whatever else without interference from each other. @@ -383,7 +383,7 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling devtools-frontend # and whatever else without interference from each other. - 'devtools_frontend_revision': 'ffa1d283922b80453ef7f8ec3fe16ca34ad790e7', + 'devtools_frontend_revision': 'f440220a76f4439a611f109244c71dae4ceece13', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling libprotobuf-mutator # and whatever else without interference from each other. @@ -419,7 +419,7 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling feed # and whatever else without interference from each other. - 'dawn_revision': '379e90ec8fe26271a7b19f7fb83774acdd925fb7', + 'dawn_revision': 'd441669dd135c72849358787dd78f3c17f43b17e', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling feed # and whatever else without interference from each other. @@ -455,7 +455,7 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling feed # and whatever else without interference from each other. - 'cros_components_revision': '6b50ee159289ef4b8109c3cd5f1906693b9ce54b', + 'cros_components_revision': 'f0cf6011fac503d6b972f6271642a4a3301ae5b0', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling feed # and whatever else without interference from each other. @@ -776,7 +776,7 @@ 'src/clank': { 'url': 'https://chrome-internal.googlesource.com/clank/internal/apps.git' + '@' + - '909fadddce8c5e06840449b50b9dffb7ed2a915b', + '94d8579312a2b108b6e5adf9fa0116f6b6f1ab77', 'condition': 'checkout_android and checkout_src_internal', }, @@ -965,7 +965,7 @@ 'packages': [ { 'package': 'chromium/third_party/androidx', - 'version': 'c55GFz14gG6wR1gKa8YpAuzIVtEpVr2_uNL9jqvW6RMC', + 'version': 'Sv8vb85QBWGaUnicLdmWSV1LCbWAL85e6jK9O9seND0C', }, ], 'condition': 'checkout_android', @@ -1210,13 +1210,13 @@ }, 'src/third_party/depot_tools': - Var('chromium_git') + '/chromium/tools/depot_tools.git' + '@' + '0b96058844728db8040a7348cc4c61fde453401a', + Var('chromium_git') + '/chromium/tools/depot_tools.git' + '@' + '03af44a5163e9448e375a6bbe7bef1fc0e2bb205', 'src/third_party/devtools-frontend/src': Var('chromium_git') + '/devtools/devtools-frontend' + '@' + Var('devtools_frontend_revision'), 'src/third_party/devtools-frontend-internal': { - 'url': 'https://chrome-internal.googlesource.com/devtools/devtools-internal.git' + '@' + '1d2fc2fb53fb6b78807d4ca9f6c7e908674d8b92', + 'url': 'https://chrome-internal.googlesource.com/devtools/devtools-internal.git' + '@' + 'a7490e9dfc314b8f32c4cfdc96ca731eabb2887d', 'condition': 'checkout_src_internal', }, @@ -1371,7 +1371,7 @@ Var('chromium_git') + '/chromium/deps/hunspell_dictionaries.git' + '@' + '41cdffd71c9948f63c7ad36e1fb0ff519aa7a37e', 'src/third_party/icu': - Var('chromium_git') + '/chromium/deps/icu.git' + '@' + '1b7d391f0528fb3a4976b7541b387ee04f915f83', + Var('chromium_git') + '/chromium/deps/icu.git' + '@' + '2c51e5cc7e0a06cd4cd7cb2ddbac445af9b475ba', 'src/third_party/icu4j': { 'packages': [ @@ -1830,7 +1830,7 @@ Var('chromium_git') + '/external/github.com/gpuweb/cts.git' + '@' + '6a7030e24f882e023c6dbbb47a206d7105e9a9db', 'src/third_party/webrtc': - Var('webrtc_git') + '/src.git' + '@' + 'f52e0152397cda785ff311394d8275f210bd5a20', + Var('webrtc_git') + '/src.git' + '@' + 'ac13324baa3403e36b3a5e350b3c2405203a51c9', # Wuffs' canonical repository is at github.com/google/wuffs, but we use # Skia's mirror of Wuffs, the same as in upstream Skia's DEPS file.
diff --git a/PRESUBMIT.py b/PRESUBMIT.py index e38d831..5593ad5 100644 --- a/PRESUBMIT.py +++ b/PRESUBMIT.py
@@ -4841,14 +4841,20 @@ # to set the limit too low, but the upper limit for "normal" large # files seems to be 1-2 MB, with a handful around 5-8 MB, so # anything over 20 MB is exceptional. - TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024 # 10 MB + TOO_LARGE_FILE_SIZE_LIMIT = 20 * 1024 * 1024 + # Special exemption for a file that is slightly over the limit. + SPECIAL_FILE_SIZE_LIMIT = 25 * 1024 * 1024 + SPECIAL_FILE_NAME = 'transport_security_state_static.json' too_large_files = [] for f in input_api.AffectedFiles(): # Check both added and modified files (but not deleted files). if f.Action() in ('A', 'M'): size = input_api.os_path.getsize(f.AbsoluteLocalPath()) - if size > TOO_LARGE_FILE_SIZE_LIMIT: + limit = (SPECIAL_FILE_SIZE_LIMIT if + f.AbsoluteLocalPath().endswith(SPECIAL_FILE_NAME) else + TOO_LARGE_FILE_SIZE_LIMIT) + if size > limit: too_large_files.append("%s: %d bytes" % (f.LocalPath(), size)) if too_large_files:
diff --git a/ash/ash_strings.grd b/ash/ash_strings.grd index 67c0824..0b7894ca 100644 --- a/ash/ash_strings.grd +++ b/ash/ash_strings.grd
@@ -1278,6 +1278,9 @@ <message name="IDS_ASH_STATUS_TRAY_NOTIFICATIONS_LABEL" desc="The label text shown under do not disturb button in system tray bubble. [CHAR_LIMIT=14]"> Notifications </message> + <message name="IDS_ASH_STATUS_TRAY_DO_NOT_DISTURB" desc="The label text shown in the Do not disturb feature tile in quick settings bubble. [CHAR_LIMIT=14]"> + Do not disturb + </message> <message name="IDS_ASH_STATUS_TRAY_NOTIFICATIONS_TOGGLE_TOOLTIP" desc="The tooltip text of the button that toggles Do-not-disturb state for notifications"> Toggle Do not disturb. <ph name="STATE_TEXT">$1<ex>Do not disturb is on.</ex></ph> </message>
diff --git a/ash/ash_strings_grd/IDS_ASH_STATUS_TRAY_DO_NOT_DISTURB.png.sha1 b/ash/ash_strings_grd/IDS_ASH_STATUS_TRAY_DO_NOT_DISTURB.png.sha1 new file mode 100644 index 0000000..5e36f99 --- /dev/null +++ b/ash/ash_strings_grd/IDS_ASH_STATUS_TRAY_DO_NOT_DISTURB.png.sha1
@@ -0,0 +1 @@ +d2dcc55e3178e964ada6ae1fb423dd9824b9eb6b \ No newline at end of file
diff --git a/ash/capture_mode/capture_mode_constants.h b/ash/capture_mode/capture_mode_constants.h index 6af49ed9..a3d76d94 100644 --- a/ash/capture_mode/capture_mode_constants.h +++ b/ash/capture_mode/capture_mode_constants.h
@@ -100,6 +100,10 @@ constexpr base::TimeDelta kDelayToHideKeyComboDuration = base::Milliseconds(1500); +// The radius of the highlight layer generated on mouse or touch event when the +// demo tools feature is enabled. +constexpr int kHighlightLayerRadius = 36; + } // namespace ash::capture_mode #endif // ASH_CAPTURE_MODE_CAPTURE_MODE_CONSTANTS_H_
diff --git a/ash/capture_mode/capture_mode_demo_tools_controller.cc b/ash/capture_mode/capture_mode_demo_tools_controller.cc index 818f46d..52eb93a2 100644 --- a/ash/capture_mode/capture_mode_demo_tools_controller.cc +++ b/ash/capture_mode/capture_mode_demo_tools_controller.cc
@@ -7,7 +7,6 @@ #include <memory> #include "ash/capture_mode/capture_mode_constants.h" -#include "ash/capture_mode/capture_mode_controller.h" #include "ash/capture_mode/capture_mode_util.h" #include "ash/capture_mode/key_combo_view.h" #include "ash/capture_mode/pointer_highlight_layer.h" @@ -18,6 +17,7 @@ #include "base/containers/contains.h" #include "base/containers/cxx20_erase.h" #include "base/containers/unique_ptr_adapters.h" +#include "base/notreached.h" #include "ui/base/ime/input_method.h" #include "ui/base/ime/text_input_client.h" #include "ui/base/ime/text_input_type.h" @@ -26,6 +26,7 @@ #include "ui/events/event.h" #include "ui/events/event_constants.h" #include "ui/events/keycodes/keyboard_codes_posix.h" +#include "ui/events/pointer_details.h" #include "ui/events/types/event_type.h" #include "ui/gfx/geometry/rect.h" #include "ui/views/animation/animation_builder.h" @@ -38,7 +39,10 @@ constexpr float kHighlightLayerFinalOpacity = 0.f; constexpr float kHighlightLayerInitialScale = 0.1f; constexpr float kHighlightLayerFinalScale = 1.0f; -constexpr base::TimeDelta kScaleUpDuration = base::Milliseconds(1500); +constexpr float kTouchHighlightLayerTouchDownScale = 56.f / 72; +constexpr base::TimeDelta kMouseScaleUpDuration = base::Milliseconds(1500); +constexpr base::TimeDelta kTouchDownScaleUpDuration = base::Milliseconds(200); +constexpr base::TimeDelta kTouchUpScaleUpDuration = base::Milliseconds(1000); int GetModifierFlagForKeyCode(ui::KeyboardCode key_code) { switch (key_code) { @@ -159,7 +163,7 @@ .SetPreemptionStrategy( ui::LayerAnimator::IMMEDIATELY_ANIMATE_TO_NEW_TARGET) .Once() - .SetDuration(kScaleUpDuration) + .SetDuration(kMouseScaleUpDuration) .SetTransform(highlight_layer, scale_up_transform, gfx::Tween::ACCEL_0_40_DECEL_100) .SetOpacity(highlight_layer, kHighlightLayerFinalOpacity, @@ -170,6 +174,29 @@ demo_tools_widget_->SetBounds(CalculateBounds()); } +void CaptureModeDemoToolsController::OnTouchEvent( + ui::EventType event_type, + ui::PointerId pointer_id, + const gfx::PointF& event_location_in_window) { + switch (event_type) { + case ui::ET_TOUCH_PRESSED: { + OnTouchDown(pointer_id, event_location_in_window); + return; + } + case ui::ET_TOUCH_RELEASED: + case ui::ET_TOUCH_CANCELLED: { + OnTouchUp(pointer_id, event_location_in_window); + return; + } + case ui::ET_TOUCH_MOVED: { + OnTouchDragged(pointer_id, event_location_in_window); + return; + } + default: + NOTREACHED(); + } +} + void CaptureModeDemoToolsController::OnTextInputStateChanged( const ui::TextInputClient* client) { UpdateTextInputType(client); @@ -276,4 +303,71 @@ std::move(on_mouse_highlight_animation_ended_callback_for_test_).Run(); } +void CaptureModeDemoToolsController::OnTouchDown( + const ui::PointerId& pointer_id, + const gfx::PointF& event_location_in_window) { + std::unique_ptr<PointerHighlightLayer> touch_highlight_layer = + std::make_unique<PointerHighlightLayer>( + event_location_in_window, + video_recording_watcher_->GetOnCaptureSurfaceWidgetParentWindow() + ->layer()); + ui::Layer* highlight_layer = touch_highlight_layer->layer(); + highlight_layer->SetTransform(capture_mode_util::GetScaleTransformAboutCenter( + highlight_layer, kHighlightLayerInitialScale)); + touch_pointer_id_to_highlight_layer_map_.emplace( + pointer_id, std::move(touch_highlight_layer)); + + const gfx::Transform scale_up_transform = + capture_mode_util::GetScaleTransformAboutCenter( + highlight_layer, kTouchHighlightLayerTouchDownScale); + + views::AnimationBuilder() + .SetPreemptionStrategy( + ui::LayerAnimator::IMMEDIATELY_ANIMATE_TO_NEW_TARGET) + .Once() + .SetDuration(kTouchDownScaleUpDuration) + .SetTransform(highlight_layer, scale_up_transform, + gfx::Tween::ACCEL_0_40_DECEL_100); +} + +void CaptureModeDemoToolsController::OnTouchUp( + const ui::PointerId& pointer_id, + const gfx::PointF& event_location_in_window) { + auto iter = touch_pointer_id_to_highlight_layer_map_.find(pointer_id); + DCHECK(iter != touch_pointer_id_to_highlight_layer_map_.end()); + + std::unique_ptr<PointerHighlightLayer> touch_highlight_layer = + std::move(iter->second); + touch_pointer_id_to_highlight_layer_map_.erase(pointer_id); + + ui::Layer* highlight_layer = touch_highlight_layer->layer(); + DCHECK(highlight_layer); + + const gfx::Transform scale_up_transform = + capture_mode_util::GetScaleTransformAboutCenter( + highlight_layer, kHighlightLayerFinalScale); + + views::AnimationBuilder() + .OnEnded(base::BindOnce( + [](std::unique_ptr<PointerHighlightLayer> touch_highlight_layer) {}, + std::move(touch_highlight_layer))) + .SetPreemptionStrategy( + ui::LayerAnimator::IMMEDIATELY_ANIMATE_TO_NEW_TARGET) + .Once() + .SetDuration(kTouchUpScaleUpDuration) + .SetTransform(highlight_layer, scale_up_transform, + gfx::Tween::ACCEL_0_40_DECEL_100) + .SetOpacity(highlight_layer, kHighlightLayerFinalOpacity, + gfx::Tween::ACCEL_0_80_DECEL_80); +} + +void CaptureModeDemoToolsController::OnTouchDragged( + const ui::PointerId& pointer_id, + const gfx::PointF& event_location_in_window) { + auto* highlight_layer = + touch_pointer_id_to_highlight_layer_map_[pointer_id].get(); + DCHECK(highlight_layer); + highlight_layer->CenterAroundPoint(event_location_in_window); +} + } // namespace ash \ No newline at end of file
diff --git a/ash/capture_mode/capture_mode_demo_tools_controller.h b/ash/capture_mode/capture_mode_demo_tools_controller.h index 403699e..f05c133a 100644 --- a/ash/capture_mode/capture_mode_demo_tools_controller.h +++ b/ash/capture_mode/capture_mode_demo_tools_controller.h
@@ -26,6 +26,9 @@ using MouseHighlightLayers = std::vector<std::unique_ptr<PointerHighlightLayer>>; +using TouchHighlightLayersMap = + base::flat_map<ui::PointerId, std::unique_ptr<PointerHighlightLayer>>; + // Observes and decides whether to show a helper widget representing the // currently pressed key combination or not. The key combination will be used to // construct or modify the `KeyComboViewer`. The @@ -51,6 +54,11 @@ // Refreshes the bounds of the key combo viewer. void RefreshBounds(); + // Decides whether to show the highlight for the touch event or not. + void OnTouchEvent(ui::EventType event_type, + ui::PointerId pointer_id, + const gfx::PointF& event_location_in_window); + // ui::InputMethodObserver: void OnFocus() override {} void OnBlur() override {} @@ -58,10 +66,6 @@ void OnTextInputStateChanged(const ui::TextInputClient* client) override; void OnInputMethodDestroyed(const ui::InputMethod* input_method) override {} - const MouseHighlightLayers& mouse_highlight_layers_for_testing() const { - return mouse_highlight_layers_; - } - private: friend class CaptureModeDemoToolsTestApi; @@ -84,6 +88,22 @@ void OnMouseHighlightAnimationEnded( PointerHighlightLayer* pointer_highlight_layer_ptr); + // Creates a new highlight layer each time it gets called and performs the + // grow animation on it. + void OnTouchDown(const ui::PointerId& pointer_id, + const gfx::PointF& event_location_in_window); + + // Performs the grow-and-fade-out animation on an existing highlight layer + // that corresponds to the given `pointer_id`. + void OnTouchUp(const ui::PointerId& pointer_id, + const gfx::PointF& event_location_in_window); + + // Sets the bounds of the touch highlight layer that corresponds to the + // `pointer_id` based on the `event_location_in_window` of the touch event + // when it gets called on touch dragged. + void OnTouchDragged(const ui::PointerId& pointer_id, + const gfx::PointF& event_location_in_window); + VideoRecordingWatcher* const video_recording_watcher_; views::UniqueWidgetPtr demo_tools_widget_; KeyComboView* key_combo_view_ = nullptr; @@ -105,6 +125,10 @@ // Contains all the mouse highlight layers that are being animated. MouseHighlightLayers mouse_highlight_layers_; + // Maps the PointerHighlightLayer of the touch event by the pointer id as the + // key. + TouchHighlightLayersMap touch_pointer_id_to_highlight_layer_map_; + // If set, it will be called when the mouse highlight animation is completed. base::OnceClosure on_mouse_highlight_animation_ended_callback_for_test_;
diff --git a/ash/capture_mode/capture_mode_demo_tools_test_api.cc b/ash/capture_mode/capture_mode_demo_tools_test_api.cc index 766340b..c001a1151 100644 --- a/ash/capture_mode/capture_mode_demo_tools_test_api.cc +++ b/ash/capture_mode/capture_mode_demo_tools_test_api.cc
@@ -76,4 +76,16 @@ std::move(callback); } +const MouseHighlightLayers& +CaptureModeDemoToolsTestApi::GetMouseHighlightLayers() const { + DCHECK(demo_tools_controller_); + return demo_tools_controller_->mouse_highlight_layers_; +} + +const TouchHighlightLayersMap& +CaptureModeDemoToolsTestApi::GetTouchIdToHighlightLayerMap() const { + DCHECK(demo_tools_controller_); + return demo_tools_controller_->touch_pointer_id_to_highlight_layer_map_; +} + } // namespace ash \ No newline at end of file
diff --git a/ash/capture_mode/capture_mode_demo_tools_test_api.h b/ash/capture_mode/capture_mode_demo_tools_test_api.h index 0864bb0..d5aedf0 100644 --- a/ash/capture_mode/capture_mode_demo_tools_test_api.h +++ b/ash/capture_mode/capture_mode_demo_tools_test_api.h
@@ -7,9 +7,11 @@ #include <vector> +#include "base/containers/flat_map.h" #include "base/functional/callback_forward.h" #include "base/timer/timer.h" #include "ui/events/keycodes/keyboard_codes_posix.h" +#include "ui/events/pointer_details.h" namespace views { class ImageView; @@ -20,6 +22,13 @@ class CaptureModeDemoToolsController; class KeyComboView; +class PointerHighlightLayer; + +using MouseHighlightLayers = + std::vector<std::unique_ptr<PointerHighlightLayer>>; + +using TouchHighlightLayersMap = + base::flat_map<ui::PointerId, std::unique_ptr<PointerHighlightLayer>>; class CaptureModeDemoToolsTestApi { public: @@ -59,6 +68,10 @@ // ends. void SetOnMouseHighlightAnimationEndedCallback(base::OnceClosure callback); + const MouseHighlightLayers& GetMouseHighlightLayers() const; + + const TouchHighlightLayersMap& GetTouchIdToHighlightLayerMap() const; + private: CaptureModeDemoToolsController* const demo_tools_controller_; };
diff --git a/ash/capture_mode/capture_mode_demo_tools_unittests.cc b/ash/capture_mode/capture_mode_demo_tools_unittests.cc index d5b727c8..452993b 100644 --- a/ash/capture_mode/capture_mode_demo_tools_unittests.cc +++ b/ash/capture_mode/capture_mode_demo_tools_unittests.cc
@@ -17,23 +17,28 @@ #include "ash/capture_mode/capture_mode_settings_view.h" #include "ash/capture_mode/capture_mode_test_util.h" #include "ash/capture_mode/capture_mode_types.h" +#include "ash/capture_mode/capture_mode_util.h" #include "ash/capture_mode/key_combo_view.h" #include "ash/capture_mode/pointer_highlight_layer.h" #include "ash/capture_mode/video_recording_watcher.h" #include "ash/constants/ash_features.h" #include "ash/display/window_tree_host_manager.h" +#include "ash/resources/vector_icons/vector_icons.h" #include "ash/shell.h" #include "ash/style/icon_button.h" #include "ash/test/ash_test_base.h" -#include "base/run_loop.h" #include "base/test/scoped_feature_list.h" +#include "base/timer/timer.h" #include "ui/aura/window_tree_host.h" #include "ui/base/ime/fake_text_input_client.h" #include "ui/compositor/scoped_animation_duration_scale_mode.h" #include "ui/events/event_constants.h" #include "ui/events/keycodes/keyboard_codes_posix.h" +#include "ui/events/pointer_details.h" #include "ui/gfx/geometry/point.h" +#include "ui/gfx/geometry/point_f.h" #include "ui/gfx/geometry/rect.h" +#include "ui/gfx/geometry/vector2d.h" #include "ui/views/controls/button/toggle_button.h" #include "ui/views/controls/image_view.h" #include "ui/wm/core/coordinate_conversion.h" @@ -149,6 +154,37 @@ ->SetFocusedTextInputClient(nullptr); } + void DragTouchAndVerifyHighlight(const ui::PointerId& touch_id, + const gfx::Point& touch_point, + const gfx::Vector2d& drag_offset) { + auto* event_generator = GetEventGenerator(); + event_generator->PressTouchId(touch_id, touch_point); + CaptureModeDemoToolsTestApi demo_tools_test_api( + GetCaptureModeDemoToolsController()); + const auto& touch_highlight_map = + demo_tools_test_api.GetTouchIdToHighlightLayerMap(); + const auto iter = + touch_highlight_map.find(static_cast<ui::PointerId>(touch_id)); + ASSERT_TRUE(iter != touch_highlight_map.end()); + const auto* touch_highlight = iter->second.get(); + auto original_touch_highlight_bounds = touch_highlight->layer()->bounds(); + auto* recording_watcher = + CaptureModeController::Get()->video_recording_watcher_for_testing(); + wm::ConvertRectToScreen(recording_watcher->window_being_recorded(), + &original_touch_highlight_bounds); + event_generator->MoveTouchBy(drag_offset.x(), drag_offset.y()); + gfx::PointF updated_event_location{ + event_generator->current_screen_location()}; + const auto expected_touch_highlight_layer_bounds = + capture_mode_util::CalculateHighlightLayerBounds( + updated_event_location, capture_mode::kHighlightLayerRadius); + auto actual_touch_highlight_layer_bounds = original_touch_highlight_bounds; + actual_touch_highlight_layer_bounds.Offset(drag_offset.x(), + drag_offset.y()); + EXPECT_EQ(expected_touch_highlight_layer_bounds, + actual_touch_highlight_layer_bounds); + } + private: base::test::ScopedFeatureList scoped_feature_list_; std::unique_ptr<aura::Window> window_; @@ -545,6 +581,7 @@ StartDemoToolsEnabledVideoRecordingWithParam(); auto* demo_tools_controller = GetCaptureModeDemoToolsController(); EXPECT_TRUE(demo_tools_controller); + CaptureModeDemoToolsTestApi demo_tools_test_api(demo_tools_controller); gfx::Rect confined_bounds_in_screen = GetDemoToolsConfinedBoundsInScreenCoordinates(); @@ -552,13 +589,12 @@ event_generator->MoveMouseTo(confined_bounds_in_screen.CenterPoint()); event_generator->PressLeftButton(); event_generator->ReleaseLeftButton(); - EXPECT_FALSE( - demo_tools_controller->mouse_highlight_layers_for_testing().empty()); - EXPECT_EQ(demo_tools_controller->mouse_highlight_layers_for_testing().size(), - 1u); + const MouseHighlightLayers& highlight_layers = + demo_tools_test_api.GetMouseHighlightLayers(); + EXPECT_FALSE(highlight_layers.empty()); + EXPECT_EQ(highlight_layers.size(), 1u); WaitForMouseHighlightAnimationCompleted(); - EXPECT_TRUE( - demo_tools_controller->mouse_highlight_layers_for_testing().empty()); + EXPECT_TRUE(highlight_layers.empty()); } // Tests that multiple mouse highlight layers will be visible on consecutive @@ -575,12 +611,13 @@ auto* window_being_recorded = recording_watcher->window_being_recorded(); auto* demo_tools_controller = GetCaptureModeDemoToolsController(); EXPECT_TRUE(demo_tools_controller); + CaptureModeDemoToolsTestApi demo_tools_test_api = + CaptureModeDemoToolsTestApi(demo_tools_controller); gfx::Rect inner_rect = GetDemoToolsConfinedBoundsInScreenCoordinates(); inner_rect.Inset(5); - auto& layers_vector = - demo_tools_controller->mouse_highlight_layers_for_testing(); + const auto& layers_vector = demo_tools_test_api.GetMouseHighlightLayers(); auto* event_generator = GetEventGenerator(); for (auto point : {inner_rect.CenterPoint(), inner_rect.origin(), @@ -621,6 +658,90 @@ } } +// Tests that the touch highlight layer will be created on touch +// down and removed on touch up. It also tests that the bounds of the touch +// highlight layer will be updated correctly on the touch drag event. +TEST_P(CaptureModeDemoToolsTestWithAllSources, TouchHighlightTest) { + StartDemoToolsEnabledVideoRecordingWithParam(); + auto* demo_tools_controller = GetCaptureModeDemoToolsController(); + EXPECT_TRUE(demo_tools_controller); + CaptureModeDemoToolsTestApi demo_tools_test_api(demo_tools_controller); + + const gfx::Rect confined_bounds_in_screen = + GetDemoToolsConfinedBoundsInScreenCoordinates(); + auto* event_generator = GetEventGenerator(); + + const auto& touch_highlight_map = + demo_tools_test_api.GetTouchIdToHighlightLayerMap(); + + const auto center_point = confined_bounds_in_screen.CenterPoint(); + event_generator->PressTouchId(0, center_point); + EXPECT_FALSE(touch_highlight_map.empty()); + event_generator->ReleaseTouchId(0); + EXPECT_TRUE(touch_highlight_map.empty()); + + const gfx::Vector2d drag_offset = + gfx::Vector2d(confined_bounds_in_screen.width() / 4, + confined_bounds_in_screen.height() / 4); + DragTouchAndVerifyHighlight(/*touch_id=*/0, /*touch_point=*/center_point, + drag_offset); +} + +// Tests the behaviors when multiple touches are performed. +// 1. The corresponding touch highlight will be generated on touch down; +// 2. The number of touch highlights kept in the demo tools controller is the +// same as the number of touch down events; +// 3. The bounds of the touch highlights will be updated correctly when dragging +// multiple touch events simultaneously; +// 4. The corresponding touch highlight will be removed on touch up. The +// number of touch highlights kept in the demo tools controller will become zero +// when all touches are released or cancelled. +TEST_P(CaptureModeDemoToolsTestWithAllSources, MutiTouchHighlightTest) { + StartDemoToolsEnabledVideoRecordingWithParam(); + auto* demo_tools_controller = GetCaptureModeDemoToolsController(); + EXPECT_TRUE(demo_tools_controller); + CaptureModeDemoToolsTestApi demo_tools_test_api(demo_tools_controller); + + const auto& touch_highlight_map = + demo_tools_test_api.GetTouchIdToHighlightLayerMap(); + EXPECT_TRUE(touch_highlight_map.empty()); + + gfx::Rect inner_rect = GetDemoToolsConfinedBoundsInScreenCoordinates(); + inner_rect.Inset(20); + + struct { + int touch_id; + gfx::Point touch_point; + gfx::Vector2d drag_offset; + } kTestCases[] = { + {/*touch_id=*/1, inner_rect.CenterPoint(), gfx::Vector2d(15, 25)}, + {/*touch_id=*/0, inner_rect.origin(), gfx::Vector2d(10, -20)}, + {/*touch_id=*/2, inner_rect.bottom_right(), gfx::Vector2d(-30, -20)}}; + + // Iterate through the kTestCases and perform the touch down. The + // corresponding touch highlight will be generated. Drag these touch events + // and check if the bounds of the corresponding touch highlight are updated + // correctly. + for (const auto& test_case : kTestCases) { + DragTouchAndVerifyHighlight(test_case.touch_id, test_case.touch_point, + test_case.drag_offset); + } + + EXPECT_EQ(touch_highlight_map.size(), 3u); + + // Release the touch event one by one and the corresponding touch highlight + // layer will be removed. The number of highlight layers kept in the demo + // tools controller will become zero when all touches are released or + // cancelled. + for (const auto& test_case : kTestCases) { + GetEventGenerator()->ReleaseTouchId(test_case.touch_id); + EXPECT_FALSE(touch_highlight_map.contains( + static_cast<ui::PointerId>(test_case.touch_id))); + } + + EXPECT_TRUE(touch_highlight_map.empty()); +} + INSTANTIATE_TEST_SUITE_P(All, CaptureModeDemoToolsTestWithAllSources, testing::Values(CaptureModeSource::kFullscreen,
diff --git a/ash/capture_mode/capture_mode_feature_pod_controller.cc b/ash/capture_mode/capture_mode_feature_pod_controller.cc index aa5c807c..a3116da 100644 --- a/ash/capture_mode/capture_mode_feature_pod_controller.cc +++ b/ash/capture_mode/capture_mode_feature_pod_controller.cc
@@ -48,14 +48,16 @@ std::unique_ptr<FeatureTile> CaptureModeFeaturePodController::CreateTile() { DCHECK(features::IsQsRevampEnabled()); + // TODO(b/263423627): Tile should be compact if applicable. auto feature_tile = std::make_unique<FeatureTile>( base::BindRepeating(&FeaturePodControllerBase::OnIconPressed, weak_ptr_factory_.GetWeakPtr()), - /*is_togglable=*/false, FeatureTile::TileType::kCompact); + /*is_togglable=*/false, FeatureTile::TileType::kPrimary); feature_tile->SetVectorIcon(kCaptureModeIcon); const auto label_text = l10n_util::GetStringUTF16(IDS_ASH_STATUS_TRAY_CAPTURE_MODE_BUTTON_LABEL); feature_tile->SetLabel(label_text); + feature_tile->SetSubLabelVisibility(false); feature_tile->SetTooltipText(label_text); return feature_tile; }
diff --git a/ash/capture_mode/capture_mode_util.cc b/ash/capture_mode/capture_mode_util.cc index de748cb..8e2748e 100644 --- a/ash/capture_mode/capture_mode_util.cc +++ b/ash/capture_mode/capture_mode_util.cc
@@ -524,4 +524,11 @@ return widget && widget->GetNativeWindow()->Contains(target); } +gfx::Rect CalculateHighlightLayerBounds(const gfx::PointF& center_point, + int highlight_layer_radius) { + return gfx::Rect(center_point.x() - highlight_layer_radius, + center_point.y() - highlight_layer_radius, + highlight_layer_radius * 2, highlight_layer_radius * 2); +} + } // namespace ash::capture_mode_util
diff --git a/ash/capture_mode/capture_mode_util.h b/ash/capture_mode/capture_mode_util.h index 1a5e3abe..4f04d8e 100644 --- a/ash/capture_mode/capture_mode_util.h +++ b/ash/capture_mode/capture_mode_util.h
@@ -21,6 +21,7 @@ } // namespace aura namespace gfx { +class PointF; class Rect; class Transform; } // namespace gfx @@ -202,6 +203,12 @@ bool IsEventTargetedOnWidget(const ui::LocatedEvent& event, views::Widget* widget); +// Calculates the highlight layer bounds based on `center_point` which is in the +// coordinates of the window being recorded. +ASH_EXPORT gfx::Rect CalculateHighlightLayerBounds( + const gfx::PointF& center_point, + int highlight_layer_radius); + } // namespace capture_mode_util } // namespace ash
diff --git a/ash/capture_mode/pointer_highlight_layer.cc b/ash/capture_mode/pointer_highlight_layer.cc index 66673b2..e3ebeeea 100644 --- a/ash/capture_mode/pointer_highlight_layer.cc +++ b/ash/capture_mode/pointer_highlight_layer.cc
@@ -4,6 +4,7 @@ #include "ash/capture_mode/pointer_highlight_layer.h" +#include "ash/capture_mode/capture_mode_constants.h" #include "ash/capture_mode/capture_mode_util.h" #include "ash/style/dark_light_mode_controller_impl.h" #include "base/check.h" @@ -12,7 +13,6 @@ #include "ui/compositor/layer.h" #include "ui/compositor/layer_type.h" #include "ui/compositor/paint_recorder.h" -#include "ui/events/event.h" #include "ui/gfx/geometry/dip_util.h" #include "ui/gfx/geometry/point_f.h" #include "ui/gfx/geometry/rect.h" @@ -22,20 +22,10 @@ namespace { -constexpr int kHighlightLayerRadius = 36; -constexpr float kHighlightLayerInitialOpacity = 1.f; constexpr float kLightModeBorderOpacityScaleFactor = 0.8f; const int kHighlightStrokeWidth = 2; -constexpr int kFillsRadius = kHighlightLayerRadius - kHighlightStrokeWidth; - -// Calculates the layer bounds based on the event location in the coordinates of -// the window being recorded. -gfx::Rect CalculateHighlightLayerBounds( - const gfx::PointF& event_location_in_window) { - return gfx::Rect(event_location_in_window.x() - kHighlightLayerRadius, - event_location_in_window.y() - kHighlightLayerRadius, - kHighlightLayerRadius * 2, kHighlightLayerRadius * 2); -} +constexpr int kFillsRadius = + capture_mode::kHighlightLayerRadius - kHighlightStrokeWidth; // Returns the color used for the highlight layer affordance and border. SkColor GetColor() { @@ -48,26 +38,33 @@ PointerHighlightLayer::PointerHighlightLayer( const gfx::PointF& event_location_in_window, ui::Layer* parent_layer) { + DCHECK(parent_layer); SetLayer(std::make_unique<ui::Layer>(ui::LAYER_TEXTURED)); layer()->SetFillsBoundsOpaquely(false); - layer()->SetBounds(CalculateHighlightLayerBounds(event_location_in_window)); - layer()->SetRoundedCornerRadius(gfx::RoundedCornersF(kHighlightLayerRadius)); - layer()->SetOpacity(kHighlightLayerInitialOpacity); + CenterAroundPoint(event_location_in_window); + layer()->SetRoundedCornerRadius( + gfx::RoundedCornersF(capture_mode::kHighlightLayerRadius)); layer()->set_delegate(this); layer()->SetName("PointerHighlightLayer"); - DCHECK(parent_layer); parent_layer->Add(layer()); parent_layer->StackAtTop(layer()); } PointerHighlightLayer::~PointerHighlightLayer() = default; +void PointerHighlightLayer::CenterAroundPoint( + const gfx::PointF& event_location_in_window) { + layer()->SetBounds(capture_mode_util::CalculateHighlightLayerBounds( + event_location_in_window, capture_mode::kHighlightLayerRadius)); +} + void PointerHighlightLayer::OnPaintLayer(const ui::PaintContext& context) { ui::PaintRecorder recorder(context, layer()->size()); gfx::ScopedCanvas scoped_canvas(recorder.canvas()); const float dsf = recorder.canvas()->UndoDeviceScaleFactor(); - const float scaled_highlight_radius = dsf * kHighlightLayerRadius; + const float scaled_highlight_radius = + dsf * capture_mode::kHighlightLayerRadius; const float scaled_fills_radius = dsf * kFillsRadius; const gfx::PointF scaled_highlight_center = gfx::ConvertPointToPixels( capture_mode_util::GetLocalCenterPoint(layer()), dsf);
diff --git a/ash/capture_mode/pointer_highlight_layer.h b/ash/capture_mode/pointer_highlight_layer.h index 082c4b4..0aea30d 100644 --- a/ash/capture_mode/pointer_highlight_layer.h +++ b/ash/capture_mode/pointer_highlight_layer.h
@@ -5,11 +5,9 @@ #ifndef ASH_CAPTURE_MODE_POINTER_HIGHLIGHT_LAYER_H_ #define ASH_CAPTURE_MODE_POINTER_HIGHLIGHT_LAYER_H_ -#include "ui/aura/window.h" #include "ui/compositor/layer_delegate.h" #include "ui/compositor/layer_owner.h" #include "ui/compositor/paint_context.h" -#include "ui/events/event.h" namespace gfx { class PointF; @@ -32,6 +30,9 @@ PointerHighlightLayer& operator=(const PointerHighlightLayer&) = delete; ~PointerHighlightLayer() override; + // Sets bounds of the layer() centered with `event_location_in_window`. + void CenterAroundPoint(const gfx::PointF& event_location_in_window); + // ui::LayerDelegate: void OnPaintLayer(const ui::PaintContext& context) override; void OnDeviceScaleFactorChanged(float old_device_scale_factor,
diff --git a/ash/capture_mode/video_recording_watcher.cc b/ash/capture_mode/video_recording_watcher.cc index a3ddceb..952ead5 100644 --- a/ash/capture_mode/video_recording_watcher.cc +++ b/ash/capture_mode/video_recording_watcher.cc
@@ -79,10 +79,10 @@ return cursor_point; } -// Gets the location of the given mouse |event| in the coordinates of the given -// |window|. +// Gets the location of the given `event` in the coordinates of the given +// `window`. gfx::PointF GetEventLocationInWindow(aura::Window* window, - const ui::MouseEvent& event) { + const ui::LocatedEvent& event) { aura::Window* target = static_cast<aura::Window*>(event.target()); gfx::PointF location = event.location_f(); if (target != window) @@ -556,6 +556,17 @@ } } +void VideoRecordingWatcher::OnTouchEvent(ui::TouchEvent* event) { + if (demo_tools_controller_ && (event->type() == ui::ET_TOUCH_PRESSED || + event->type() == ui::ET_TOUCH_RELEASED || + event->type() == ui::ET_TOUCH_MOVED || + event->type() == ui::ET_TOUCH_CANCELLED)) { + demo_tools_controller_->OnTouchEvent( + event->type(), event->pointer_details().id, + GetEventLocationInWindow(window_being_recorded_, *event)); + } +} + void VideoRecordingWatcher::OnTabletModeStarted() { UpdateCursorOverlayNow(gfx::PointF()); }
diff --git a/ash/capture_mode/video_recording_watcher.h b/ash/capture_mode/video_recording_watcher.h index 4589ab0..660885c 100644 --- a/ash/capture_mode/video_recording_watcher.h +++ b/ash/capture_mode/video_recording_watcher.h
@@ -127,6 +127,7 @@ // ui::EventHandler: void OnKeyEvent(ui::KeyEvent* event) override; void OnMouseEvent(ui::MouseEvent* event) override; + void OnTouchEvent(ui::TouchEvent* event) override; // TabletModeObserver: void OnTabletModeStarted() override;
diff --git a/ash/public/cpp/BUILD.gn b/ash/public/cpp/BUILD.gn index d9f7827..07fd323 100644 --- a/ash/public/cpp/BUILD.gn +++ b/ash/public/cpp/BUILD.gn
@@ -204,6 +204,7 @@ "message_center/arc_notification_manager_delegate.h", "message_center/arc_notifications_host_initializer.cc", "message_center/arc_notifications_host_initializer.h", + "message_center/oobe_notification_constants.h", "message_center_ash.cc", "message_center_ash.h", "metrics_util.cc",
diff --git a/ash/public/cpp/message_center/oobe_notification_constants.h b/ash/public/cpp/message_center/oobe_notification_constants.h new file mode 100644 index 0000000..e342152f --- /dev/null +++ b/ash/public/cpp/message_center/oobe_notification_constants.h
@@ -0,0 +1,16 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef ASH_PUBLIC_CPP_MESSAGE_CENTER_OOBE_NOTIFICATION_CONSTANTS_H_ +#define ASH_PUBLIC_CPP_MESSAGE_CENTER_OOBE_NOTIFICATION_CONSTANTS_H_ + +namespace ash { + +// ID of the locale switch notification that could be shown during the OOBE. +inline constexpr static char kOOBELocaleSwitchNotificationId[] = + "oobe.locale-switch-notification"; + +} // namespace ash + +#endif // ASH_PUBLIC_CPP_MESSAGE_CENTER_OOBE_NOTIFICATION_CONSTANTS_H_
diff --git a/ash/style/ash_color_id.h b/ash/style/ash_color_id.h index cbdcbda..3926759d 100644 --- a/ash/style/ash_color_id.h +++ b/ash/style/ash_color_id.h
@@ -123,7 +123,13 @@ /* Color for icon of the blocked bluetooth device */ \ E_CPONLY(kColorAshIconColorBlocked)\ /* Color for icon in title of app streaming bubble */ \ - E_CPONLY(kColorAshEcheIconColorStreaming) + E_CPONLY(kColorAshEcheIconColorStreaming) \ + /* Color for text of the holding space view with multi select enabled */ \ + E_CPONLY(kColorAshMultiSelectTextColor) \ + /* Color for checkmark icon in holding space */ \ + E_CPONLY(kColorAshCheckmarkIconColor) \ + /* Color for drag image overflow badge text in holding space */ \ + E_CPONLY(kColorAshDragImageOverflowBadgeTextColor) #include "ui/color/color_id_macros.inc"
diff --git a/ash/style/ash_color_mixer.cc b/ash/style/ash_color_mixer.cc index 92ba2bb..c470ee9 100644 --- a/ash/style/ash_color_mixer.cc +++ b/ash/style/ash_color_mixer.cc
@@ -457,6 +457,18 @@ mixer[kColorAshIconColorBlocked] = {gfx::kGoogleGrey100}; mixer[kColorAshEcheIconColorStreaming] = {ui::ColorTransform(SK_ColorGREEN)}; + + mixer[kColorAshMultiSelectTextColor] = + use_dark_color ? ui::ColorTransform(gfx::kGoogleBlue100) + : ui::ColorTransform(gfx::kGoogleBlue800); + + mixer[kColorAshCheckmarkIconColor] = + use_dark_color ? ui::ColorTransform(gfx::kGoogleGrey900) + : ui::ColorTransform(SK_ColorWHITE); + + mixer[kColorAshDragImageOverflowBadgeTextColor] = + use_dark_color ? ui::ColorTransform(gfx::kGoogleGrey900) + : ui::ColorTransform(gfx::kGoogleGrey200); } } // namespace ash
diff --git a/ash/system/holding_space/downloads_section.cc b/ash/system/holding_space/downloads_section.cc index fd39150..057faed6d 100644 --- a/ash/system/holding_space/downloads_section.cc +++ b/ash/system/holding_space/downloads_section.cc
@@ -14,7 +14,6 @@ #include "ash/public/cpp/holding_space/holding_space_util.h" #include "ash/resources/vector_icons/vector_icons.h" #include "ash/strings/grit/ash_strings.h" -#include "ash/style/ash_color_provider.h" #include "ash/system/holding_space/holding_space_item_chip_view.h" #include "ash/system/holding_space/holding_space_ui.h" #include "ash/system/holding_space/holding_space_util.h" @@ -22,6 +21,7 @@ #include "base/callback_helpers.h" #include "base/i18n/rtl.h" #include "ui/base/l10n/l10n_util.h" +#include "ui/base/models/image_model.h" #include "ui/color/color_id.h" #include "ui/compositor/layer.h" #include "ui/gfx/geometry/rect_f.h" @@ -64,6 +64,9 @@ // Chevron. chevron_ = AddChildView(std::make_unique<views::ImageView>()); chevron_->SetFlipCanvasOnPaintForRTLUI(true); + chevron_->SetImage(ui::ImageModel::FromVectorIcon( + kChevronRightSmallIcon, kColorAshIconColorPrimary, + kHoldingSpaceSectionChevronIconSize)); // Focus ring. // Though the entirety of the header is focusable and behaves as a single @@ -85,18 +88,6 @@ } private: - // views::Button: - void OnThemeChanged() override { - views::Button::OnThemeChanged(); - AshColorProvider* const ash_color_provider = AshColorProvider::Get(); - - // Chevron. - chevron_->SetImage(gfx::CreateVectorIcon( - kChevronRightSmallIcon, kHoldingSpaceSectionChevronIconSize, - ash_color_provider->GetContentLayerColor( - AshColorProvider::ContentLayerType::kIconColorPrimary))); - } - void OnPressed() { holding_space_metrics::RecordDownloadsAction( holding_space_metrics::DownloadsAction::kClick);
diff --git a/ash/system/holding_space/holding_space_drag_util.cc b/ash/system/holding_space/holding_space_drag_util.cc index 4d891f90..099f0c0 100644 --- a/ash/system/holding_space/holding_space_drag_util.cc +++ b/ash/system/holding_space/holding_space_drag_util.cc
@@ -12,12 +12,13 @@ #include "ash/public/cpp/holding_space/holding_space_item.h" #include "ash/public/cpp/rounded_image_view.h" #include "ash/public/cpp/style/scoped_light_mode_as_default.h" -#include "ash/style/ash_color_provider.h" +#include "ash/style/ash_color_id.h" #include "ash/style/dark_light_mode_controller_impl.h" #include "ash/system/holding_space/holding_space_item_view.h" #include "base/containers/adapters.h" #include "base/i18n/rtl.h" #include "base/ranges/algorithm.h" +#include "ui/color/color_provider.h" #include "ui/compositor/canvas_painter.h" #include "ui/compositor/compositor.h" #include "ui/gfx/canvas.h" @@ -148,7 +149,10 @@ ~DragImageItemView() override = default; protected: - DragImageItemView() = default; + explicit DragImageItemView(const ui::ColorProvider* color_provider) + : color_provider_(color_provider) {} + + const ui::ColorProvider* color_provider() const { return color_provider_; } // views::View: gfx::Insets GetInsets() const final { @@ -164,13 +168,9 @@ gfx::RectF bounds(GetContentsBounds()); bounds.Inset(gfx::InsetsF(0.5f)); - // NOTE: Background is white when the dark/light mode feature is disabled. cc::PaintFlags flags; flags.setAntiAlias(true); - flags.setColor(features::IsDarkLightModeEnabled() - ? AshColorProvider::Get()->GetBaseLayerColor( - AshColorProvider::BaseLayerType::kOpaque) - : SK_ColorWHITE); + flags.setColor(color_provider_->GetColor(kColorAshShieldAndBaseOpaque)); flags.setLooper(gfx::CreateShadowDrawLooper(GetShadowDetails().values)); canvas->DrawRoundRect(bounds, kDragImageItemViewCornerRadius, flags); } @@ -180,6 +180,8 @@ return gfx::ShadowDetails::Get(kDragImageItemViewElevation, kDragImageItemViewCornerRadius); } + + const ui::ColorProvider* const color_provider_; }; // DragImageItemChipView ------------------------------------------------------- @@ -188,7 +190,9 @@ // chip in the drag image for a collection of holding space item views. class DragImageItemChipView : public DragImageItemView { public: - explicit DragImageItemChipView(const HoldingSpaceItem* item) { + DragImageItemChipView(const HoldingSpaceItem* item, + const ui::ColorProvider* color_provider) + : DragImageItemView(color_provider) { InitLayout(item); } @@ -227,6 +231,15 @@ ScopedLightModeAsDefault scoped_light_mode; auto* label = AddChildView(bubble_utils::CreateLabel( bubble_utils::TypographyStyle::kBody2, item->GetText())); + // Label created via `bubble_utils::CreateLabel()` has an enabled color id, + // which is resolved when the label is added to the views hierarchy. But + // `this` is never added to widget, enabled color id will never be resolved. + // Thus we need to manually resolve it and set the color as the enabled + // color for the label. + if (auto enabled_color_id = label->GetEnabledColorId()) { + label->SetEnabledColor(color_provider()->GetColor(*enabled_color_id)); + } + label->SetElideBehavior(gfx::ElideBehavior::ELIDE_MIDDLE); label->SetHorizontalAlignment(gfx::HorizontalAlignment::ALIGN_LEFT); layout->SetFlexForView(label, 1); @@ -239,7 +252,9 @@ // `item` in the drag image for a collection of holding space item views. class DragImageItemScreenCaptureView : public DragImageItemView { public: - explicit DragImageItemScreenCaptureView(const HoldingSpaceItem* item) { + DragImageItemScreenCaptureView(const HoldingSpaceItem* item, + const ui::ColorProvider* color_provider) + : DragImageItemView(color_provider) { DCHECK(item->IsScreenCapture()); InitLayout(item); } @@ -275,7 +290,8 @@ // if the number of dragged items is > `kDragImageViewMaxItemsToPaint`. class DragImageOverflowBadge : public views::View { public: - explicit DragImageOverflowBadge(size_t count) { + DragImageOverflowBadge(size_t count, const ui::ColorProvider* color_provider) + : color_provider_(color_provider) { DCHECK_GT(count, kDragImageViewMaxItemsToPaint); InitLayout(count); } @@ -298,9 +314,8 @@ ScopedLightModeAsDefault scoped_light_mode; // Background. - SetBackground(views::CreateRoundedRectBackground( - AshColorProvider::Get()->GetControlsLayerColor( - AshColorProvider::ControlsLayerType::kFocusRingColor), + SetBackground(views::CreateThemedRoundedRectBackground( + ui::kColorAshFocusRing, /*radius=*/kDragImageOverflowBadgeMinimumSize.height() / 2)); // Layout. @@ -315,12 +330,16 @@ // Label. auto* label = AddChildView( bubble_utils::CreateLabel(bubble_utils::TypographyStyle::kButton1)); + // `this` is never added to widget, enabled color id will never be resolved. + // Thus we need to manually resolve it and set the color as the enabled + // color for the label. label->SetEnabledColor( - DarkLightModeControllerImpl::Get()->IsDarkModeEnabled() - ? gfx::kGoogleGrey900 - : gfx::kGoogleGrey200); + color_provider_->GetColor(kColorAshDragImageOverflowBadgeTextColor)); + label->SetText(base::UTF8ToUTF16(base::NumberToString(count))); } + + const ui::ColorProvider* const color_provider_; }; // DragImageView --------------------------------------------------------------- @@ -329,7 +348,9 @@ // item `views`. This view expects to be painted to an `SkBitmap`. class DragImageView : public views::View { public: - explicit DragImageView(const std::vector<const HoldingSpaceItem*>& items) { + DragImageView(const std::vector<const HoldingSpaceItem*>& items, + const ui::ColorProvider* color_provider) + : color_provider_(color_provider) { InitLayout(items); } @@ -435,10 +456,11 @@ for (size_t i = 0; i < count; ++i) { if (contains_only_screen_captures) { container->AddChildView( - std::make_unique<DragImageItemScreenCaptureView>(items[i])); + std::make_unique<DragImageItemScreenCaptureView>(items[i], + color_provider_)); } else { container->AddChildView( - std::make_unique<DragImageItemChipView>(items[i])); + std::make_unique<DragImageItemChipView>(items[i], color_provider_)); } } @@ -452,14 +474,15 @@ if (count <= kDragImageViewMaxItemsToPaint) return; - drag_image_overflow_badge_ = - AddChildView(std::make_unique<DragImageOverflowBadge>(count)); + drag_image_overflow_badge_ = AddChildView( + std::make_unique<DragImageOverflowBadge>(count, color_provider_)); // This view's `layout` manager ignores `drag_image_overflow_badge_` as it // is manually positioned relative to the `first_drag_image_item_view_`. layout->SetChildViewIgnoredByLayout(drag_image_overflow_badge_, true); } + const ui::ColorProvider* const color_provider_; views::View* first_drag_image_item_view_ = nullptr; views::View* drag_image_overflow_badge_ = nullptr; }; @@ -470,7 +493,8 @@ void CreateDragImage(const std::vector<const HoldingSpaceItemView*>& views, gfx::ImageSkia* drag_image, - gfx::Vector2d* drag_offset) { + gfx::Vector2d* drag_offset, + const ui::ColorProvider* color_provider) { if (views.empty()) { *drag_image = gfx::ImageSkia(); *drag_offset = gfx::Vector2d(); @@ -481,7 +505,7 @@ const float scale = views::ScaleFactorForDragFromWidget(widget); const bool is_pixel_canvas = widget->GetCompositor()->is_pixel_canvas(); - DragImageView drag_image_view(GetHoldingSpaceItems(views)); + DragImageView drag_image_view(GetHoldingSpaceItems(views), color_provider); drag_image_view.SetSize(drag_image_view.GetPreferredSize()); *drag_image = drag_image_view.GetDragImage(scale, is_pixel_canvas);
diff --git a/ash/system/holding_space/holding_space_drag_util.h b/ash/system/holding_space/holding_space_drag_util.h index f97ca18..8fda21ec 100644 --- a/ash/system/holding_space/holding_space_drag_util.h +++ b/ash/system/holding_space/holding_space_drag_util.h
@@ -12,6 +12,10 @@ class Vector2d; } // namespace gfx +namespace ui { +class ColorProvider; +} // namespace ui + namespace ash { class HoldingSpaceItemView; @@ -25,7 +29,8 @@ // to represent the presence of additional drag items if necessary. void CreateDragImage(const std::vector<const HoldingSpaceItemView*>& views, gfx::ImageSkia* drag_image, - gfx::Vector2d* drag_offset); + gfx::Vector2d* drag_offset, + const ui::ColorProvider* color_provider); } // namespace holding_space_util } // namespace ash
diff --git a/ash/system/holding_space/holding_space_item_chip_view.cc b/ash/system/holding_space/holding_space_item_chip_view.cc index 0913111..d6b7274 100644 --- a/ash/system/holding_space/holding_space_item_chip_view.cc +++ b/ash/system/holding_space/holding_space_item_chip_view.cc
@@ -10,13 +10,14 @@ #include "ash/public/cpp/holding_space/holding_space_client.h" #include "ash/public/cpp/holding_space/holding_space_constants.h" #include "ash/public/cpp/holding_space/holding_space_controller.h" +#include "ash/public/cpp/holding_space/holding_space_image.h" #include "ash/public/cpp/holding_space/holding_space_item.h" #include "ash/public/cpp/holding_space/holding_space_progress.h" #include "ash/public/cpp/holding_space/holding_space_util.h" #include "ash/public/cpp/rounded_image_view.h" #include "ash/resources/vector_icons/vector_icons.h" #include "ash/strings/grit/ash_strings.h" -#include "ash/style/ash_color_provider.h" +#include "ash/style/ash_color_id.h" #include "ash/style/dark_light_mode_controller_impl.h" #include "ash/system/holding_space/holding_space_item_view.h" #include "ash/system/holding_space/holding_space_progress_indicator_util.h" @@ -26,6 +27,7 @@ #include "base/bind.h" #include "ui/base/l10n/l10n_util.h" #include "ui/base/metadata/metadata_impl_macros.h" +#include "ui/base/models/image_model.h" #include "ui/chromeos/styles/cros_styles.h" #include "ui/compositor/layer.h" #include "ui/compositor/layer_owner.h" @@ -230,14 +232,6 @@ return secondary_action; } -// TODO(crbug.com/1202796): Create ash colors. -// Returns the theme color to use for text in multiselect. -SkColor GetMultiSelectTextColor() { - return DarkLightModeControllerImpl::Get()->IsDarkModeEnabled() - ? gfx::kGoogleBlue100 - : gfx::kGoogleBlue800; -} - } // namespace // HoldingSpaceItemChipView ---------------------------------------------------- @@ -287,17 +281,29 @@ .SetID(kHoldingSpaceItemSecondaryActionContainerId) .SetUseDefaultFillLayout(true) .SetVisible(false) - .AddChild(CreateSecondaryActionBuilder() - .CopyAddressTo(&secondary_action_pause_) - .SetID(kHoldingSpaceItemPauseButtonId) - .SetCallback(secondary_action_callback) - .SetVisible(false)) - .AddChild(CreateSecondaryActionBuilder() - .CopyAddressTo(&secondary_action_resume_) - .SetID(kHoldingSpaceItemResumeButtonId) - .SetCallback(secondary_action_callback) - .SetFlipCanvasOnPaintForRTLUI(false) - .SetVisible(false)))) + .AddChild( + CreateSecondaryActionBuilder() + .CopyAddressTo(&secondary_action_pause_) + .SetID(kHoldingSpaceItemPauseButtonId) + .SetCallback(secondary_action_callback) + .SetVisible(false) + .SetImageModel( + views::Button::STATE_NORMAL, + ui::ImageModel::FromVectorIcon( + kPauseIcon, kColorAshButtonIconColor, + kSecondaryActionIconSize))) + .AddChild( + CreateSecondaryActionBuilder() + .CopyAddressTo(&secondary_action_resume_) + .SetID(kHoldingSpaceItemResumeButtonId) + .SetCallback(secondary_action_callback) + .SetFlipCanvasOnPaintForRTLUI(false) + .SetVisible(false) + .SetImageModel( + views::Button::STATE_NORMAL, + ui::ImageModel::FromVectorIcon( + kResumeIcon, kColorAshButtonIconColor, + kSecondaryActionIconSize))))) .AddChild( views::Builder<views::View>() .SetUseDefaultFillLayout(true) @@ -433,18 +439,6 @@ UpdateImage(); UpdateLabels(); - - // Pause. - const SkColor icon_color = AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kButtonIconColor); - secondary_action_pause_->SetImage( - views::Button::STATE_NORMAL, - gfx::CreateVectorIcon(kPauseIcon, kSecondaryActionIconSize, icon_color)); - - // Resume. - secondary_action_resume_->SetImage( - views::Button::STATE_NORMAL, - gfx::CreateVectorIcon(kResumeIcon, kSecondaryActionIconSize, icon_color)); } void HoldingSpaceItemChipView::OnPaintLabelMask(views::Label* label, @@ -596,25 +590,20 @@ // Primary. const std::u16string last_primary_text = primary_label_->GetText(); primary_label_->SetText(item()->GetText()); - primary_label_->SetEnabledColor( - selected() && multiselect - ? GetMultiSelectTextColor() - : AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kTextColorPrimary)); + primary_label_->SetEnabledColorId(selected() && multiselect + ? kColorAshMultiSelectTextColor + : kColorAshTextColorPrimary); // Secondary. const std::u16string last_secondary_text = secondary_label_->GetText(); secondary_label_->SetText( item()->secondary_text().value_or(base::EmptyString16())); - if (GetWidget()) { - secondary_label_->SetEnabledColor( - selected() && multiselect ? GetMultiSelectTextColor() - : item()->secondary_text_color_id() - ? GetColorProvider()->GetColor( - item()->secondary_text_color_id().value()) - : AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kTextColorSecondary)); - } + + secondary_label_->SetEnabledColorId( + selected() && multiselect ? kColorAshMultiSelectTextColor + : item()->secondary_text_color_id() + ? item()->secondary_text_color_id().value() + : kColorAshTextColorSecondary); secondary_label_->SetVisible(!secondary_label_->GetText().empty()); // Tooltip.
diff --git a/ash/system/holding_space/holding_space_item_chip_view.h b/ash/system/holding_space/holding_space_item_chip_view.h index 76e6e5d..1ea2a00 100644 --- a/ash/system/holding_space/holding_space_item_chip_view.h +++ b/ash/system/holding_space/holding_space_item_chip_view.h
@@ -6,7 +6,6 @@ #define ASH_SYSTEM_HOLDING_SPACE_HOLDING_SPACE_ITEM_CHIP_VIEW_H_ #include "ash/ash_export.h" -#include "ash/public/cpp/holding_space/holding_space_image.h" #include "ash/system/holding_space/holding_space_animation_registry.h" #include "ash/system/holding_space/holding_space_item_view.h" #include "ui/base/metadata/metadata_header_macros.h"
diff --git a/ash/system/holding_space/holding_space_item_screen_capture_view.cc b/ash/system/holding_space/holding_space_item_screen_capture_view.cc index 709e38c9..f86ad2d 100644 --- a/ash/system/holding_space/holding_space_item_screen_capture_view.cc +++ b/ash/system/holding_space/holding_space_item_screen_capture_view.cc
@@ -8,18 +8,17 @@ #include "ash/public/cpp/holding_space/holding_space_image.h" #include "ash/public/cpp/holding_space/holding_space_item.h" #include "ash/public/cpp/rounded_image_view.h" -#include "ash/style/ash_color_provider.h" +#include "ash/style/ash_color_id.h" #include "ash/style/dark_light_mode_controller_impl.h" #include "ash/system/holding_space/holding_space_util.h" #include "ash/system/tray/tray_constants.h" #include "base/bind.h" #include "components/vector_icons/vector_icons.h" #include "ui/base/metadata/metadata_impl_macros.h" -#include "ui/gfx/paint_vector_icon.h" +#include "ui/base/models/image_model.h" #include "ui/views/accessibility/view_accessibility.h" #include "ui/views/background.h" #include "ui/views/border.h" -#include "ui/views/controls/button/image_button.h" #include "ui/views/controls/image_view.h" #include "ui/views/layout/box_layout.h" #include "ui/views/layout/box_layout_view.h" @@ -59,12 +58,17 @@ .SetMainAxisAlignment(MainAxisAlignment::kCenter) .SetCrossAxisAlignment(CrossAxisAlignment::kCenter) .SetFocusBehavior(views::View::FocusBehavior::NEVER) - .AddChild(views::Builder<views::ImageView>() - .CopyAddressTo(&play_icon_) - .SetID(kHoldingSpaceScreenCapturePlayIconId) - .SetPreferredSize(kPlayIconSize) - .SetImageSize(gfx::Size(kHoldingSpaceIconSize, - kHoldingSpaceIconSize)))); + .AddChild( + views::Builder<views::ImageView>() + .SetID(kHoldingSpaceScreenCapturePlayIconId) + .SetPreferredSize(kPlayIconSize) + .SetImageSize( + gfx::Size(kHoldingSpaceIconSize, kHoldingSpaceIconSize)) + .SetImage(ui::ImageModel::FromVectorIcon( + vector_icons::kPlayArrowIcon, kColorAshButtonIconColor, + kHoldingSpaceIconSize)) + .SetBackground(holding_space_util::CreateCircleBackground( + kColorAshShieldAndBase80)))); } std::move(builder) @@ -79,10 +83,15 @@ views::FlexSpecification( views::MinimumFlexSizeRule::kScaleToZero, views::MaximumFlexSizeRule::kUnbounded))) - .AddChild(CreatePrimaryActionBuilder(kPrimaryActionSize))) + .AddChild( + CreatePrimaryActionBuilder(kPrimaryActionSize) + .SetBackground(holding_space_util::CreateCircleBackground( + kColorAshShieldAndBase80)))) .AddChild(views::Builder<views::View>() - .CopyAddressTo(&border_) - .SetCanProcessEventsWithinSubtree(false)) + .SetCanProcessEventsWithinSubtree(false) + .SetBorder(views::CreateThemedRoundedRectBorder( + kBorderThickness, kHoldingSpaceCornerRadius, + kColorAshSeparatorColor))) .BuildChildren(); // Subscribe to be notified of changes to `item`'s image. @@ -118,32 +127,7 @@ void HoldingSpaceItemScreenCaptureView::OnThemeChanged() { HoldingSpaceItemView::OnThemeChanged(); - // Border. - border_->SetBorder(views::CreateRoundedRectBorder( - kBorderThickness, kHoldingSpaceCornerRadius, - AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kSeparatorColor))); - - // Image. UpdateImage(); - - // Primary action. - primary_action_container()->SetBackground( - holding_space_util::CreateCircleBackground( - AshColorProvider::Get()->GetBaseLayerColor( - AshColorProvider::BaseLayerType::kTransparent80))); - - if (!play_icon_) - return; - - // Play icon. - play_icon_->SetBackground(holding_space_util::CreateCircleBackground( - AshColorProvider::Get()->GetBaseLayerColor( - AshColorProvider::BaseLayerType::kTransparent80))); - play_icon_->SetImage(gfx::CreateVectorIcon( - vector_icons::kPlayArrowIcon, kHoldingSpaceIconSize, - AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kButtonIconColor))); } void HoldingSpaceItemScreenCaptureView::UpdateImage() {
diff --git a/ash/system/holding_space/holding_space_item_screen_capture_view.h b/ash/system/holding_space/holding_space_item_screen_capture_view.h index 06dea12..a50e7fa 100644 --- a/ash/system/holding_space/holding_space_item_screen_capture_view.h +++ b/ash/system/holding_space/holding_space_item_screen_capture_view.h
@@ -6,15 +6,10 @@ #define ASH_SYSTEM_HOLDING_SPACE_HOLDING_SPACE_ITEM_SCREEN_CAPTURE_VIEW_H_ #include "ash/ash_export.h" -#include "ash/public/cpp/holding_space/holding_space_image.h" #include "ash/system/holding_space/holding_space_item_view.h" #include "ui/base/metadata/metadata_header_macros.h" #include "ui/views/metadata/view_factory.h" -namespace views { -class ImageView; -} // namespace views - namespace ash { class HoldingSpaceItem; @@ -45,9 +40,7 @@ void UpdateImage(); // Owned by view hierarchy. - views::View* border_ = nullptr; RoundedImageView* image_ = nullptr; - views::ImageView* play_icon_ = nullptr; base::CallbackListSubscription image_skia_changed_subscription_; };
diff --git a/ash/system/holding_space/holding_space_item_view.cc b/ash/system/holding_space/holding_space_item_view.cc index 7739c5e..669993e 100644 --- a/ash/system/holding_space/holding_space_item_view.cc +++ b/ash/system/holding_space/holding_space_item_view.cc
@@ -12,7 +12,7 @@ #include "ash/public/cpp/holding_space/holding_space_util.h" #include "ash/public/cpp/shelf_config.h" #include "ash/resources/vector_icons/vector_icons.h" -#include "ash/style/ash_color_provider.h" +#include "ash/style/ash_color_id.h" #include "ash/style/dark_light_mode_controller_impl.h" #include "ash/system/holding_space/holding_space_util.h" #include "ash/system/holding_space/holding_space_view_delegate.h" @@ -146,6 +146,10 @@ GetViewAccessibility().OverrideDescription( std::u16string(), ax::mojom::DescriptionFrom::kAttributeExplicitlyEmpty); + // Background. + SetBackground(views::CreateThemedRoundedRectBackground( + kColorAshControlBackgroundColorInactive, kHoldingSpaceCornerRadius)); + // Layer. SetPaintToLayer(); layer()->SetFillsBoundsOpaquely(false); @@ -262,49 +266,10 @@ void HoldingSpaceItemView::OnThemeChanged() { views::View::OnThemeChanged(); - AshColorProvider* const ash_color_provider = AshColorProvider::Get(); - - // Background. - SetBackground(views::CreateRoundedRectBackground( - ash_color_provider->GetControlsLayerColor( - AshColorProvider::ControlsLayerType::kControlBackgroundColorInactive), - kHoldingSpaceCornerRadius)); - - // Checkmark. - checkmark_->SetBackground(holding_space_util::CreateCircleBackground( - ash_color_provider->GetControlsLayerColor( - AshColorProvider::ControlsLayerType::kFocusRingColor), - kCheckmarkBackgroundSize)); - checkmark_->SetImage(gfx::CreateVectorIcon( - kCheckIcon, kHoldingSpaceIconSize, - DarkLightModeControllerImpl::Get()->IsDarkModeEnabled() - ? gfx::kGoogleGrey900 - : SK_ColorWHITE)); // Focused/selected layers. InvalidateLayer(focused_layer_owner_->layer()); InvalidateLayer(selected_layer_owner_->layer()); - - if (!primary_action_container_) - return; - - // Cancel. - const SkColor icon_color = AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kButtonIconColor); - primary_action_cancel_->SetImageModel( - views::Button::STATE_NORMAL, - ui::ImageModel::FromVectorIcon(kCancelIcon, icon_color, - kHoldingSpaceIconSize)); - - // Pin. - const ui::ImageModel unpinned_icon = ui::ImageModel::FromVectorIcon( - views::kUnpinIcon, icon_color, kHoldingSpaceIconSize); - const ui::ImageModel pinned_icon = ui::ImageModel::FromVectorIcon( - views::kPinIcon, icon_color, kHoldingSpaceIconSize); - primary_action_pin_->SetImageModel(views::Button::STATE_NORMAL, - unpinned_icon); - primary_action_pin_->SetToggledImageModel(views::Button::STATE_NORMAL, - pinned_icon); } void HoldingSpaceItemView::OnHoldingSpaceItemUpdated( @@ -363,7 +328,11 @@ auto checkmark = views::Builder<views::ImageView>(); checkmark.CopyAddressTo(&checkmark_) .SetID(kHoldingSpaceItemCheckmarkId) - .SetVisible(selected()); + .SetVisible(selected()) + .SetBackground(holding_space_util::CreateCircleBackground( + ui::kColorAshFocusRing, kCheckmarkBackgroundSize)) + .SetImage(ui::ImageModel::FromVectorIcon( + kCheckIcon, kColorAshCheckmarkIconColor, kHoldingSpaceIconSize)); return checkmark; } @@ -392,6 +361,10 @@ &HoldingSpaceItemView::OnPrimaryActionPressed, base::Unretained(this))) .SetFocusBehavior(views::View::FocusBehavior::NEVER) + .SetImageModel(views::Button::STATE_NORMAL, + ui::ImageModel::FromVectorIcon( + kCancelIcon, kColorAshButtonIconColor, + kHoldingSpaceIconSize)) .SetImageHorizontalAlignment(HorizontalAlignment::ALIGN_CENTER) .SetImageVerticalAlignment(VerticalAlignment::ALIGN_MIDDLE) .SetPreferredSize(preferred_size) @@ -404,6 +377,15 @@ &HoldingSpaceItemView::OnPrimaryActionPressed, base::Unretained(this))) .SetFocusBehavior(views::View::FocusBehavior::NEVER) + .SetImageModel(views::Button::STATE_NORMAL, + ui::ImageModel::FromVectorIcon( + views::kUnpinIcon, kColorAshButtonIconColor, + kHoldingSpaceIconSize)) + .SetToggledImageModel( + views::Button::STATE_NORMAL, + ui::ImageModel::FromVectorIcon(views::kPinIcon, + kColorAshButtonIconColor, + kHoldingSpaceIconSize)) .SetImageHorizontalAlignment(HorizontalAlignment::ALIGN_CENTER) .SetImageVerticalAlignment(VerticalAlignment::ALIGN_MIDDLE) .SetPreferredSize(preferred_size) @@ -425,8 +407,7 @@ cc::PaintFlags flags; flags.setAntiAlias(true); - flags.setColor(AshColorProvider::Get()->GetControlsLayerColor( - AshColorProvider::ControlsLayerType::kFocusRingColor)); + flags.setColor(GetColorProvider()->GetColor(ui::kColorAshFocusRing)); flags.setStrokeWidth(views::FocusRing::kDefaultHaloThickness); flags.setStyle(cc::PaintFlags::kStroke_Style); @@ -440,8 +421,7 @@ return; const SkColor color = - SkColorSetA(AshColorProvider::Get()->GetControlsLayerColor( - AshColorProvider::ControlsLayerType::kFocusRingColor), + SkColorSetA(GetColorProvider()->GetColor(ui::kColorAshFocusRing), kHoldingSpaceSelectedOverlayOpacity * 0xFF); cc::PaintFlags flags;
diff --git a/ash/system/holding_space/holding_space_tray.cc b/ash/system/holding_space/holding_space_tray.cc index 608709b..fb7cc48 100644 --- a/ash/system/holding_space/holding_space_tray.cc +++ b/ash/system/holding_space/holding_space_tray.cc
@@ -21,6 +21,7 @@ #include "ash/shelf/shelf.h" #include "ash/shell.h" #include "ash/strings/grit/ash_strings.h" +#include "ash/style/ash_color_id.h" #include "ash/system/holding_space/holding_space_animation_registry.h" #include "ash/system/holding_space/holding_space_progress_indicator_util.h" #include "ash/system/holding_space/holding_space_tray_bubble.h" @@ -181,6 +182,10 @@ icon->SetPreferredSize(gfx::Size(kTrayItemSize, kTrayItemSize)); icon->SetPaintToLayer(); icon->layer()->SetFillsBoundsOpaquely(false); + icon->SetImage(ui::ImageModel::FromVectorIcon( + features::IsHoldingSpaceRefreshEnabled() ? kHoldingSpaceRefreshIcon + : kHoldingSpaceIcon, + kColorAshIconColorPrimary, kHoldingSpaceTrayIconSize)); return icon; } @@ -195,6 +200,8 @@ gfx::Size(kHoldingSpaceIconSize, kHoldingSpaceIconSize)); icon->SetPaintToLayer(); icon->layer()->SetFillsBoundsOpaquely(false); + icon->SetImage(gfx::CreateVectorIcon( + views::kUnpinIcon, kColorAshIconColorPrimary, kHoldingSpaceIconSize)); return icon; } @@ -487,19 +494,6 @@ void HoldingSpaceTray::OnThemeChanged() { TrayBackgroundView::OnThemeChanged(); - const SkColor color = AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kIconColorPrimary); - - // Default tray icon. - default_tray_icon_->SetImage(gfx::CreateVectorIcon( - features::IsHoldingSpaceRefreshEnabled() ? kHoldingSpaceRefreshIcon - : kHoldingSpaceIcon, - kHoldingSpaceTrayIconSize, color)); - - // Drop target icon. - drop_target_icon_->SetImage( - gfx::CreateVectorIcon(views::kUnpinIcon, kHoldingSpaceIconSize, color)); - // Progress indicator. progress_indicator_->InvalidateLayer(); }
diff --git a/ash/system/holding_space/holding_space_tray_bubble.cc b/ash/system/holding_space/holding_space_tray_bubble.cc index ea97972..cbdfd7e 100644 --- a/ash/system/holding_space/holding_space_tray_bubble.cc +++ b/ash/system/holding_space/holding_space_tray_bubble.cc
@@ -360,8 +360,7 @@ bounds().width() - kHoldingSpaceChildBubblePadding.width(); // Cache `color` which is consistent across separators. - SkColor color = AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kSeparatorColor); + SkColor color = GetColorProvider()->GetColor(kColorAshSeparatorColor); // Iterate over all children, drawing separators between visible siblings. const views::View* last_visible_child = nullptr;
diff --git a/ash/system/holding_space/holding_space_tray_child_bubble.cc b/ash/system/holding_space/holding_space_tray_child_bubble.cc index 65ee7ca02..741c45a 100644 --- a/ash/system/holding_space/holding_space_tray_child_bubble.cc +++ b/ash/system/holding_space/holding_space_tray_child_bubble.cc
@@ -10,7 +10,7 @@ #include "ash/constants/ash_features.h" #include "ash/public/cpp/holding_space/holding_space_constants.h" #include "ash/public/cpp/style/color_provider.h" -#include "ash/style/ash_color_provider.h" +#include "ash/style/ash_color_id.h" #include "ash/system/holding_space/holding_space_item_views_section.h" #include "ash/system/holding_space/holding_space_util.h" #include "ash/system/holding_space/holding_space_view_delegate.h" @@ -196,6 +196,17 @@ sections_.push_back(AddChildView(std::move(section))); sections_.back()->Init(); } + + // When refresh is enabled, backgrounds and borders are implemented in the + // top-level bubble rather than per child bubble. + if (features::IsHoldingSpaceRefreshEnabled()) { + return; + } + + SetBackground(views::CreateThemedSolidBackground(kColorAshShieldAndBase80)); + SetBorder(std::make_unique<views::HighlightBorder>( + kBubbleCornerRadius, views::HighlightBorder::Type::kHighlightBorder1, + /*use_light_colors=*/false)); } void HoldingSpaceTrayChildBubble::Reset() { @@ -360,28 +371,6 @@ return true; } -void HoldingSpaceTrayChildBubble::OnThemeChanged() { - views::View::OnThemeChanged(); - - // When refresh is enabled, backgrounds and borders are implemented in the - // top-level bubble rather than per child bubble. - if (features::IsHoldingSpaceRefreshEnabled()) - return; - - if (!features::IsDarkLightModeEnabled()) { - layer()->SetColor(AshColorProvider::Get()->GetBaseLayerColor( - AshColorProvider::BaseLayerType::kTransparent80)); - return; - } - - SetBackground( - views::CreateSolidBackground(AshColorProvider::Get()->GetBaseLayerColor( - AshColorProvider::BaseLayerType::kTransparent80))); - SetBorder(std::make_unique<views::HighlightBorder>( - kBubbleCornerRadius, views::HighlightBorder::Type::kHighlightBorder1, - /*use_light_colors=*/false)); -} - void HoldingSpaceTrayChildBubble::MaybeAnimateIn() { // Don't preempt an out animation as new content will populate and be animated // in, if any exists, once the out animation completes.
diff --git a/ash/system/holding_space/holding_space_tray_child_bubble.h b/ash/system/holding_space/holding_space_tray_child_bubble.h index cc3dc87..1aacb01 100644 --- a/ash/system/holding_space/holding_space_tray_child_bubble.h +++ b/ash/system/holding_space/holding_space_tray_child_bubble.h
@@ -83,7 +83,6 @@ void ChildVisibilityChanged(views::View* child) override; void OnGestureEvent(ui::GestureEvent* event) override; bool OnMousePressed(const ui::MouseEvent& event) override; - void OnThemeChanged() override; // Invoked to animate in/out this view if necessary. void MaybeAnimateIn();
diff --git a/ash/system/holding_space/holding_space_tray_child_bubble_unittest.cc b/ash/system/holding_space/holding_space_tray_child_bubble_unittest.cc index e0bf9b86..326cdfaf 100644 --- a/ash/system/holding_space/holding_space_tray_child_bubble_unittest.cc +++ b/ash/system/holding_space/holding_space_tray_child_bubble_unittest.cc
@@ -11,6 +11,7 @@ #include "ash/constants/ash_features.h" #include "ash/public/cpp/holding_space/holding_space_item.h" #include "ash/public/cpp/holding_space/holding_space_section.h" +#include "ash/style/ash_color_id.h" #include "ash/system/holding_space/holding_space_ash_test_base.h" #include "ash/system/holding_space/holding_space_item_chip_view.h" #include "ash/system/holding_space/holding_space_item_view.h" @@ -219,9 +220,9 @@ // Background. auto* background = child_bubble()->GetBackground(); ASSERT_TRUE(background); - EXPECT_EQ(background->get_color(), - AshColorProvider::Get()->GetBaseLayerColor( - AshColorProvider::BaseLayerType::kTransparent80)); + EXPECT_EQ( + background->get_color(), + child_bubble()->GetColorProvider()->GetColor(kColorAshShieldAndBase80)); EXPECT_EQ(layer->background_blur(), ColorProvider::kBackgroundBlurSigma); // Border.
diff --git a/ash/system/holding_space/holding_space_tray_icon_preview.cc b/ash/system/holding_space/holding_space_tray_icon_preview.cc index 944fa34..1e2b5f12 100644 --- a/ash/system/holding_space/holding_space_tray_icon_preview.cc +++ b/ash/system/holding_space/holding_space_tray_icon_preview.cc
@@ -6,7 +6,6 @@ #include <algorithm> -#include "ash/constants/ash_features.h" #include "ash/public/cpp/holding_space/holding_space_constants.h" #include "ash/public/cpp/holding_space/holding_space_image.h" #include "ash/public/cpp/holding_space/holding_space_item.h" @@ -14,7 +13,7 @@ #include "ash/public/cpp/holding_space/holding_space_model_observer.h" #include "ash/public/cpp/shelf_config.h" #include "ash/shelf/shelf.h" -#include "ash/style/ash_color_provider.h" +#include "ash/style/ash_color_id.h" #include "ash/style/dark_light_mode_controller_impl.h" #include "ash/system/holding_space/holding_space_animation_registry.h" #include "ash/system/holding_space/holding_space_progress_indicator_util.h" @@ -23,6 +22,7 @@ #include "ash/system/tray/tray_constants.h" #include "base/bind.h" #include "base/i18n/rtl.h" +#include "ui/color/color_provider.h" #include "ui/compositor/layer.h" #include "ui/compositor/layer_animation_sequence.h" #include "ui/compositor/paint_recorder.h" @@ -674,8 +674,8 @@ // due to pixel rounding. Failure to do so could result in paint artifacts. cc::PaintFlags flags; flags.setAntiAlias(true); - flags.setColor(AshColorProvider::Get()->GetBaseLayerColor( - AshColorProvider::BaseLayerType::kOpaque)); + flags.setColor( + container_->GetColorProvider()->GetColor(kColorAshShieldAndBaseOpaque)); flags.setLooper(gfx::CreateShadowDrawLooper(GetShadowDetails().values)); canvas->DrawCircle( gfx::PointF(contents_bounds.CenterPoint()),
diff --git a/ash/system/holding_space/holding_space_tray_unittest.cc b/ash/system/holding_space/holding_space_tray_unittest.cc index 7b1c1edc..5e1f61e 100644 --- a/ash/system/holding_space/holding_space_tray_unittest.cc +++ b/ash/system/holding_space/holding_space_tray_unittest.cc
@@ -27,6 +27,7 @@ #include "ash/shelf/shelf_widget.h" #include "ash/shell.h" #include "ash/strings/grit/ash_strings.h" +#include "ash/style/ash_color_id.h" #include "ash/system/holding_space/holding_space_animation_registry.h" #include "ash/system/holding_space/holding_space_ash_test_base.h" #include "ash/system/holding_space/holding_space_item_view.h" @@ -600,11 +601,9 @@ auto* prefs = GetSessionControllerClient()->GetUserPrefService(account_id); ASSERT_TRUE(prefs); - const auto expected_chevron_skia = gfx::CreateVectorIcon( + const auto expected_chevron_model = ui::ImageModel::FromVectorIcon( expanded ? kChevronUpSmallIcon : kChevronDownSmallIcon, - kHoldingSpaceSectionChevronIconSize, - AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kIconColorSecondary)); + kColorAshIconColorSecondary, kHoldingSpaceSectionChevronIconSize); // Changes to the section's expanded state should be stored persistently. EXPECT_EQ(holding_space_prefs::IsSuggestionsExpanded(prefs), expanded); @@ -623,9 +622,13 @@ // The section header's chevron icon should indicate whether the section is // expanded or collapsed. + auto* suggestions_section_chevron_icon = + test_api()->GetSuggestionsSectionChevronIcon(); EXPECT_TRUE(gfx::BitmapsAreEqual( - *test_api()->GetSuggestionsSectionChevronIcon()->GetImage().bitmap(), - *expected_chevron_skia.bitmap())); + *suggestions_section_chevron_icon->GetImage().bitmap(), + *expected_chevron_model + .Rasterize(suggestions_section_chevron_icon->GetColorProvider()) + .bitmap())); // The section content should be visible as long as suggestions are // available and the section is expanded. @@ -3755,8 +3758,7 @@ auto* background = bubble->GetBackground(); ASSERT_TRUE(background); EXPECT_EQ(background->get_color(), - AshColorProvider::Get()->GetBaseLayerColor( - AshColorProvider::BaseLayerType::kTransparent80)); + bubble->GetColorProvider()->GetColor(kColorAshShieldAndBase80)); EXPECT_EQ(bubble->layer()->background_blur(), ColorProvider::kBackgroundBlurSigma); @@ -3812,8 +3814,8 @@ IsHoldingSpaceRefreshEnabled() ? kHoldingSpaceRefreshIcon : kHoldingSpaceIcon, kHoldingSpaceTrayIconSize, - AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kIconColorPrimary)) + test_api()->GetDefaultTrayIcon()->GetColorProvider()->GetColor( + kColorAshIconColorPrimary)) .bitmap())); } @@ -3874,8 +3876,7 @@ bitmap.getColor(separator_midpoint_x, separator_midpoint_y); SkColor expected_color = color_utils::GetResultingPaintColor( /*foreground=*/IsHoldingSpaceRefreshEnabled() - ? AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kSeparatorColor) + ? bubble->GetColorProvider()->GetColor(kColorAshSeparatorColor) : SK_ColorTRANSPARENT, /*background=*/bubble->GetBackground() ? bubble->GetBackground()->get_color()
diff --git a/ash/system/holding_space/holding_space_util.cc b/ash/system/holding_space/holding_space_util.cc index d00872a..69c3211 100644 --- a/ash/system/holding_space/holding_space_util.cc +++ b/ash/system/holding_space/holding_space_util.cc
@@ -7,6 +7,7 @@ #include <memory> #include "third_party/abseil-cpp/absl/types/optional.h" +#include "ui/color/color_provider.h" #include "ui/compositor/layer.h" #include "ui/compositor/layer_animation_element.h" #include "ui/compositor/layer_animation_observer.h" @@ -44,26 +45,23 @@ Callback callback_; }; -// CirclePainter --------------------------------------------------------------- +// CircleBackground ------------------------------------------------------------ -class CirclePainter : public views::Painter { +class CircleBackground : public views::Background { public: - CirclePainter(SkColor color, size_t fixed_size) - : color_(color), fixed_size_(fixed_size) {} + CircleBackground(ui::ColorId color_id, size_t fixed_size) + : color_id_(color_id), fixed_size_(fixed_size) {} - CirclePainter(SkColor color, const gfx::InsetsF& insets) - : color_(color), insets_(insets) {} + CircleBackground(ui::ColorId color_id, const gfx::InsetsF& insets) + : color_id_(color_id), insets_(insets) {} - CirclePainter(const CirclePainter&) = delete; - CirclePainter& operator=(const CirclePainter&) = delete; - ~CirclePainter() override = default; + CircleBackground(const CircleBackground&) = delete; + CircleBackground& operator=(const CircleBackground&) = delete; + ~CircleBackground() override = default; - private: - // views::Painter: - gfx::Size GetMinimumSize() const override { return gfx::Size(); } - - void Paint(gfx::Canvas* canvas, const gfx::Size& size) override { - gfx::RectF bounds{gfx::SizeF(size)}; + // views::Background: + void Paint(gfx::Canvas* canvas, views::View* view) const override { + gfx::RectF bounds(view->GetLocalBounds()); if (insets_.has_value()) bounds.Inset(insets_.value()); @@ -75,12 +73,18 @@ cc::PaintFlags flags; flags.setAntiAlias(true); - flags.setColor(color_); + flags.setColor(get_color()); canvas->DrawCircle(bounds.CenterPoint(), radius, flags); } - const SkColor color_; + void OnViewThemeChanged(views::View* view) override { + SetNativeControlColor(view->GetColorProvider()->GetColor(color_id_)); + view->SchedulePaint(); + } + + private: + const ui::ColorId color_id_; const absl::optional<size_t> fixed_size_; const absl::optional<gfx::InsetsF> insets_; }; @@ -144,17 +148,15 @@ observer); } -std::unique_ptr<views::Background> CreateCircleBackground(SkColor color, +std::unique_ptr<views::Background> CreateCircleBackground(ui::ColorId color_id, size_t fixed_size) { - return views::CreateBackgroundFromPainter( - std::make_unique<CirclePainter>(color, fixed_size)); + return std::make_unique<CircleBackground>(color_id, fixed_size); } std::unique_ptr<views::Background> CreateCircleBackground( - SkColor color, + ui::ColorId color_id, const gfx::InsetsF& insets) { - return views::CreateBackgroundFromPainter( - std::make_unique<CirclePainter>(color, insets)); + return std::make_unique<CircleBackground>(color_id, insets); } std::unique_ptr<views::HighlightPathGenerator> CreateHighlightPathGenerator(
diff --git a/ash/system/holding_space/holding_space_util.h b/ash/system/holding_space/holding_space_util.h index c608b77..122c3dc 100644 --- a/ash/system/holding_space/holding_space_util.h +++ b/ash/system/holding_space/holding_space_util.h
@@ -9,7 +9,7 @@ #include "base/callback.h" #include "base/time/time.h" -#include "third_party/skia/include/core/SkColor.h" +#include "ui/color/color_id.h" #include "ui/gfx/geometry/insets_f.h" #include "ui/gfx/geometry/rrect_f.h" #include "ui/views/controls/highlight_path_generator.h" @@ -39,12 +39,12 @@ ui::LayerAnimationObserver* observer); // Creates a circular background of the specified `color` and `fixed_size`. -std::unique_ptr<views::Background> CreateCircleBackground(SkColor color, +std::unique_ptr<views::Background> CreateCircleBackground(ui::ColorId color_id, size_t fixed_size); // Creates a circular background of the specified `color` and optional `insets`. std::unique_ptr<views::Background> CreateCircleBackground( - SkColor color, + ui::ColorId color_id, const gfx::InsetsF& insets = gfx::InsetsF()); // Creates a highlight path generator that determines paths based on logic
diff --git a/ash/system/holding_space/holding_space_view_delegate.cc b/ash/system/holding_space/holding_space_view_delegate.cc index e8728ee..e6cfc0f 100644 --- a/ash/system/holding_space/holding_space_view_delegate.cc +++ b/ash/system/holding_space/holding_space_view_delegate.cc
@@ -477,7 +477,9 @@ // Drag image. gfx::ImageSkia drag_image; gfx::Vector2d drag_offset; - holding_space_util::CreateDragImage(selection, &drag_image, &drag_offset); + holding_space_util::CreateDragImage( + selection, &drag_image, &drag_offset, + bubble_->GetBubbleView()->GetColorProvider()); data->provider().SetDragImage(std::move(drag_image), drag_offset); // Payload.
diff --git a/ash/system/holding_space/pinned_files_section.cc b/ash/system/holding_space/pinned_files_section.cc index 24c2839..00d9b19 100644 --- a/ash/system/holding_space/pinned_files_section.cc +++ b/ash/system/holding_space/pinned_files_section.cc
@@ -18,7 +18,6 @@ #include "ash/session/session_controller_impl.h" #include "ash/shell.h" #include "ash/strings/grit/ash_strings.h" -#include "ash/style/ash_color_provider.h" #include "ash/style/style_util.h" #include "ash/system/holding_space/holding_space_item_chip_view.h" #include "ash/system/holding_space/holding_space_ui.h" @@ -120,17 +119,6 @@ void OnThemeChanged() override { views::Button::OnThemeChanged(); - AshColorProvider* const ash_color_provider = AshColorProvider::Get(); - - // Background. - SetBackground(views::CreateRoundedRectBackground( - ash_color_provider->GetControlsLayerColor( - AshColorProvider::ControlsLayerType:: - kControlBackgroundColorInactive), - kFilesAppChipHeight / 2)); - - // Focus ring. - views::FocusRing::Get(this)->SetColorId(ui::kColorAshFocusRing); // Ink drop. StyleUtil::ConfigureInkDropAttributes( @@ -146,7 +134,7 @@ // Ink drop. views::InkDrop::Get(this)->SetMode(views::InkDropHost::InkDropMode::ON); views::InstallRoundRectHighlightPathGenerator(this, gfx::Insets(), - kFilesAppChipHeight / 2); + kFilesAppChipHeight / 2.f); // Layout. auto* layout = SetLayoutManager(std::make_unique<views::BoxLayout>( @@ -166,6 +154,13 @@ label->SetText(l10n_util::GetStringUTF16( IDS_ASH_HOLDING_SPACE_PINNED_FILES_APP_CHIP_TEXT)); layout->SetFlexForView(label, 1); + + // Focus ring. + views::FocusRing::Get(this)->SetColorId(ui::kColorAshFocusRing); + + // Background. + SetBackground(views::CreateThemedRoundedRectBackground( + kColorAshControlBackgroundColorInactive, kFilesAppChipHeight / 2.f)); } };
diff --git a/ash/system/holding_space/suggestions_section.cc b/ash/system/holding_space/suggestions_section.cc index 86e1e08..670c659 100644 --- a/ash/system/holding_space/suggestions_section.cc +++ b/ash/system/holding_space/suggestions_section.cc
@@ -15,7 +15,6 @@ #include "ash/session/session_controller_impl.h" #include "ash/shell.h" #include "ash/strings/grit/ash_strings.h" -#include "ash/style/ash_color_provider.h" #include "ash/system/holding_space/holding_space_item_chip_view.h" #include "ash/system/holding_space/holding_space_ui.h" #include "ash/system/holding_space/holding_space_util.h" @@ -96,6 +95,9 @@ holding_space_prefs::AddSuggestionsExpandedChangedCallback( pref_change_registrar_.get(), base::BindRepeating(&Header::UpdateState, base::Unretained(this))); + + // Initialize state. + UpdateState(); } private: @@ -110,11 +112,6 @@ : ax::mojom::State::kCollapsed); } - void OnThemeChanged() override { - views::Button::OnThemeChanged(); - UpdateState(); - } - void OnPressed() { auto* prefs = Shell::Get()->session_controller()->GetActivePrefService(); bool expanded = holding_space_prefs::IsSuggestionsExpanded(prefs); @@ -128,13 +125,11 @@ void UpdateState() { // Chevron. auto* prefs = Shell::Get()->session_controller()->GetActivePrefService(); - chevron_->SetImage(gfx::CreateVectorIcon( + chevron_->SetImage(ui::ImageModel::FromVectorIcon( holding_space_prefs::IsSuggestionsExpanded(prefs) ? kChevronUpSmallIcon : kChevronDownSmallIcon, - kHoldingSpaceSectionChevronIconSize, - AshColorProvider::Get()->GetContentLayerColor( - AshColorProvider::ContentLayerType::kIconColorSecondary))); + kColorAshIconColorSecondary, kHoldingSpaceSectionChevronIconSize)); // Accessibility. NotifyAccessibilityEvent(ax::mojom::Event::kStateChanged,
diff --git a/ash/system/message_center/metrics_utils_unittest.cc b/ash/system/message_center/metrics_utils_unittest.cc index 5bd039ab..b6b3a1e4 100644 --- a/ash/system/message_center/metrics_utils_unittest.cc +++ b/ash/system/message_center/metrics_utils_unittest.cc
@@ -14,6 +14,7 @@ #include "base/test/metrics/histogram_tester.h" #include "base/test/scoped_feature_list.h" #include "base/test/task_environment.h" +#include "ui/compositor/scoped_animation_duration_scale_mode.h" #include "ui/events/test/event_generator.h" #include "ui/gfx/image/image.h" #include "ui/message_center/message_center.h" @@ -548,14 +549,17 @@ NotificationCatalogName::kFullRestore; auto notification = CreateNotificationWithCatalogName(catalog_name); - // Add notification to message center. + // Add notification to message center. Use the normal duration for adding the + // notification so that the recorded popup duration is expected. auto* message_center = message_center::MessageCenter::Get(); + absl::optional<ui::ScopedAnimationDurationScaleMode> mode( + ui::ScopedAnimationDurationScaleMode::NORMAL_DURATION); message_center->AddNotification( std::make_unique<message_center::Notification>(*notification)); // Wait for notification popup to time out. - base::TimeDelta popup_timeout_duration = base::Seconds(7); - task_environment()->FastForwardBy(popup_timeout_duration); + constexpr base::TimeDelta kPopupTimeOutDuration(base::Seconds(7)); + task_environment()->FastForwardBy(kPopupTimeOutDuration); // Expect user journey time metric to record the popup duration due to timeout // (value is between 6 and 7 seconds). @@ -564,6 +568,7 @@ EXPECT_TRUE(buckets[0].min >= 6000 && buckets[0].min <= 7000); histograms.ExpectBucketCount(kSystemNotificationPopupDismissedWithin7s, catalog_name, 1); + mode.reset(); message_center->RemoveNotification(notification->id(), /*by_user=*/true); // Dismiss popup within 1s. @@ -571,7 +576,7 @@ message_center->AddNotification( std::make_unique<message_center::Notification>(*notification)); message_center->RemoveNotification(notification->id(), /*by_user=*/true); - task_environment()->FastForwardBy(popup_timeout_duration); + task_environment()->FastForwardBy(kPopupTimeOutDuration); histograms.ExpectBucketCount(kSystemNotificationPopupDismissedWithin1s, catalog_name, 1); @@ -580,9 +585,9 @@ notification->set_never_timeout(true); message_center->AddNotification( std::make_unique<message_center::Notification>(*notification)); - task_environment()->FastForwardBy(popup_timeout_duration + base::Seconds(1)); + task_environment()->FastForwardBy(kPopupTimeOutDuration + base::Seconds(1)); message_center->RemoveNotification(notification->id(), /*by_user=*/true); - task_environment()->FastForwardBy(popup_timeout_duration); + task_environment()->FastForwardBy(kPopupTimeOutDuration); histograms.ExpectBucketCount(kSystemNotificationPopupDismissedAfter7s, catalog_name, 1); }
diff --git a/ash/system/message_center/session_state_notification_blocker.cc b/ash/system/message_center/session_state_notification_blocker.cc index eb87c26..d9ccc36 100644 --- a/ash/system/message_center/session_state_notification_blocker.cc +++ b/ash/system/message_center/session_state_notification_blocker.cc
@@ -4,6 +4,7 @@ #include "ash/system/message_center/session_state_notification_blocker.h" +#include "ash/public/cpp/message_center/oobe_notification_constants.h" #include "ash/session/session_controller_impl.h" #include "ash/shell.h" #include "ash/system/do_not_disturb_notification_controller.h" @@ -57,6 +58,30 @@ active_user_session->user_info.account_id); } +bool IsAllowedDuringOOBE(std::string_view notification_id) { + static const std::string_view kAllowedSystemNotificationIDs[] = { + BatteryNotification::kNotificationId}; + static const std::string_view kAllowedProfileBoundNotificationIDs[] = { + kOOBELocaleSwitchNotificationId}; + + for (const auto& id : kAllowedSystemNotificationIDs) { + if (notification_id == id) { + return true; + } + } + + // Check here not for a full name equivalence, but for a substring existence + // because profile-bound notifications have a profile-specific prefix added + // to them. + for (const auto& id : kAllowedProfileBoundNotificationIDs) { + if (notification_id.find(id) != std::string::npos) { + return true; + } + } + + return false; +} + } // namespace SessionStateNotificationBlocker::SessionStateNotificationBlocker( @@ -104,8 +129,9 @@ return false; } - if (notification.id() == BatteryNotification::kNotificationId) + if (IsAllowedDuringOOBE(notification.id())) { return true; + } return should_show_notification_; } @@ -116,8 +142,9 @@ Shell::Get()->session_controller(); // Never show notifications in kiosk mode. - if (session_controller->IsRunningInAppMode()) + if (session_controller->IsRunningInAppMode()) { return false; + } // Do not show non system notifications for `kLoginNotificationsDelay` // duration. @@ -127,8 +154,9 @@ return false; } - if (notification.id() == BatteryNotification::kNotificationId) + if (IsAllowedDuringOOBE(notification.id())) { return true; + } return should_show_popup_; }
diff --git a/ash/system/message_center/session_state_notification_blocker_unittest.cc b/ash/system/message_center/session_state_notification_blocker_unittest.cc index d65199dd..6393af9e 100644 --- a/ash/system/message_center/session_state_notification_blocker_unittest.cc +++ b/ash/system/message_center/session_state_notification_blocker_unittest.cc
@@ -5,9 +5,11 @@ #include "ash/system/message_center/session_state_notification_blocker.h" #include <memory> +#include <unordered_map> #include "ash/constants/ash_features.h" #include "ash/constants/notifier_catalogs.h" +#include "ash/public/cpp/message_center/oobe_notification_constants.h" #include "ash/session/test_session_controller_client.h" #include "ash/system/do_not_disturb_notification_controller.h" #include "ash/system/power/battery_notification.h" @@ -108,6 +110,17 @@ return blocker_->ShouldShowNotification(notification); } + bool ShouldShowOOBEAllowedNotification(const std::string& notification_id) { + return blocker_->ShouldShowNotification( + CreateDummyNotification(notification_id)); + } + + bool ShouldShowOOBEAllowedNotificationAsPopup( + const std::string& notification_id) { + return blocker_->ShouldShowNotificationAsPopup( + CreateDummyNotification(notification_id)); + } + void SetLockedState(bool locked) { GetSessionControllerClient()->SetSessionState( locked ? SessionState::LOCKED : SessionState::ACTIVE); @@ -120,6 +133,19 @@ : "chromeos-id"; } + message_center::Notification CreateDummyNotification( + const std::string& notification_id) { + message_center::NotifierId notifier_id( + message_center::NotifierType::SYSTEM_COMPONENT, notification_id, + NotificationCatalogName::kTestCatalogName); + message_center::Notification notification( + message_center::NOTIFICATION_TYPE_SIMPLE, notification_id, + u"chromeos-title", u"chromeos-message", ui::ImageModel(), + u"chromeos-source", GURL(), notifier_id, + message_center::RichNotificationData(), nullptr); + return notification; + } + int state_changed_count_ = 0; std::unique_ptr<message_center::NotificationBlocker> blocker_; std::unique_ptr<base::test::ScopedFeatureList> scoped_feature_list_; @@ -296,5 +322,27 @@ EXPECT_TRUE(ShouldShowDoNotDisturbNotification()); } +TEST_P(SessionStateNotificationBlockerTest, NotificationAllowedDuringOOBE) { + const std::unordered_map<std::string, /*expected_notification_allowed=*/bool> + kTestCases = { + {BatteryNotification::kNotificationId, true}, + {kOOBELocaleSwitchNotificationId, true}, + {"new-fancy-notification", false}, + }; + const SessionState kOOBEStates[] = {SessionState::OOBE, + SessionState::LOGIN_PRIMARY, + SessionState::LOGIN_SECONDARY}; + + for (const auto& state : kOOBEStates) { + GetSessionControllerClient()->SetSessionState(state); + for (const auto& test_case : kTestCases) { + EXPECT_EQ(ShouldShowOOBEAllowedNotification(test_case.first), + test_case.second); + EXPECT_EQ(ShouldShowOOBEAllowedNotificationAsPopup(test_case.first), + test_case.second); + } + } +} + } // namespace } // namespace ash
diff --git a/ash/system/unified/quiet_mode_feature_pod_controller.cc b/ash/system/unified/quiet_mode_feature_pod_controller.cc index 48789bd2..2f7d576 100644 --- a/ash/system/unified/quiet_mode_feature_pod_controller.cc +++ b/ash/system/unified/quiet_mode_feature_pod_controller.cc
@@ -14,6 +14,7 @@ #include "ash/strings/grit/ash_strings.h" #include "ash/system/machine_learning/user_settings_event_logger.h" #include "ash/system/unified/feature_pod_button.h" +#include "ash/system/unified/feature_tile.h" #include "ash/system/unified/quick_settings_metrics_util.h" #include "ash/system/unified/unified_system_tray_controller.h" #include "base/metrics/histogram_macros.h" @@ -68,6 +69,34 @@ return button_; } +std::unique_ptr<FeatureTile> QuietModeFeaturePodController::CreateTile() { + DCHECK(features::IsQsRevampEnabled()); + // TODO(b/263423627): Tile should be compact if applicable. + auto tile = std::make_unique<FeatureTile>( + base::BindRepeating(&FeaturePodControllerBase::OnIconPressed, + weak_ptr_factory_.GetWeakPtr()), + /*is_togglable=*/true, FeatureTile::TileType::kPrimary); + tile_ = tile.get(); + + auto* session_controller = Shell::Get()->session_controller(); + const bool visible = session_controller->ShouldShowNotificationTray() && + !session_controller->IsScreenLocked(); + tile_->SetVisible(visible); + if (visible) { + TrackVisibilityUMA(); + } + + // TODO(b/263416361): Update vector icon to its newer version. + tile_->SetVectorIcon(kUnifiedMenuDoNotDisturbIcon); + tile_->SetLabel( + l10n_util::GetStringUTF16(IDS_ASH_STATUS_TRAY_DO_NOT_DISTURB)); + tile_->SetSubLabelVisibility(false); + tile_->SetTooltipText(l10n_util::GetStringFUTF16( + IDS_ASH_STATUS_TRAY_NOTIFICATIONS_TOGGLE_TOOLTIP, + GetQuietModeStateTooltip())); + return tile; +} + QsFeatureCatalogName QuietModeFeaturePodController::GetCatalogName() { return QsFeatureCatalogName::kQuietMode; } @@ -99,6 +128,14 @@ } void QuietModeFeaturePodController::OnQuietModeChanged(bool in_quiet_mode) { + if (features::IsQsRevampEnabled()) { + tile_->SetToggled(in_quiet_mode); + tile_->SetTooltipText(l10n_util::GetStringFUTF16( + IDS_ASH_STATUS_TRAY_NOTIFICATIONS_TOGGLE_TOOLTIP, + GetQuietModeStateTooltip())); + return; + } + button_->SetToggled(in_quiet_mode); button_->SetIconTooltip(l10n_util::GetStringFUTF16( IDS_ASH_STATUS_TRAY_NOTIFICATIONS_TOGGLE_TOOLTIP,
diff --git a/ash/system/unified/quiet_mode_feature_pod_controller.h b/ash/system/unified/quiet_mode_feature_pod_controller.h index 21999a43..df10b41 100644 --- a/ash/system/unified/quiet_mode_feature_pod_controller.h +++ b/ash/system/unified/quiet_mode_feature_pod_controller.h
@@ -11,6 +11,7 @@ #include "ash/constants/quick_settings_catalogs.h" #include "ash/public/cpp/notifier_settings_observer.h" #include "ash/system/unified/feature_pod_controller_base.h" +#include "base/memory/weak_ptr.h" #include "third_party/abseil-cpp/absl/types/optional.h" #include "ui/message_center/message_center_observer.h" @@ -37,6 +38,7 @@ // FeaturePodControllerBase: FeaturePodButton* CreateButton() override; + std::unique_ptr<FeatureTile> CreateTile() override; QsFeatureCatalogName GetCatalogName() override; void OnIconPressed() override; void OnLabelPressed() override; @@ -55,9 +57,13 @@ UnifiedSystemTrayController* const tray_controller_; + // Owned by the views hierarchy. FeaturePodButton* button_ = nullptr; + FeatureTile* tile_ = nullptr; absl::optional<int> last_disabled_count_; + + base::WeakPtrFactory<QuietModeFeaturePodController> weak_ptr_factory_{this}; }; } // namespace ash
diff --git a/ash/system/unified/quiet_mode_feature_pod_controller_unittest.cc b/ash/system/unified/quiet_mode_feature_pod_controller_unittest.cc index f0b215e8..c85143ba 100644 --- a/ash/system/unified/quiet_mode_feature_pod_controller_unittest.cc +++ b/ash/system/unified/quiet_mode_feature_pod_controller_unittest.cc
@@ -7,6 +7,7 @@ #include "ash/constants/ash_features.h" #include "ash/constants/quick_settings_catalogs.h" #include "ash/system/unified/feature_pod_button.h" +#include "ash/system/unified/feature_tile.h" #include "ash/system/unified/unified_system_tray.h" #include "ash/system/unified/unified_system_tray_bubble.h" #include "ash/system/unified/unified_system_tray_controller.h" @@ -19,7 +20,7 @@ // Tests manually control their session state. class QuietModeFeaturePodControllerTest : public NoSessionAshTestBase, - public testing::WithParamInterface<bool> { + public testing::WithParamInterface<std::tuple<bool, bool>> { public: QuietModeFeaturePodControllerTest() = default; @@ -31,28 +32,42 @@ ~QuietModeFeaturePodControllerTest() override = default; void SetUp() override { + auto enabled_features = std::vector<base::test::FeatureRef>(); if (IsOsSettingsAppBadgingToggleEnabled()) { - feature_list_.InitWithFeatures( - /*enabled_features=*/{features::kOsSettingsAppBadgingToggle}, - /*disabled_features=*/{}); + enabled_features.push_back(features::kOsSettingsAppBadgingToggle); } + if (IsQsRevampEnabled()) { + enabled_features.push_back(features::kQsRevamp); + enabled_features.push_back(features::kQsRevampWip); + } + feature_list_.InitWithFeatures(enabled_features, /*disabled_features*/ {}); NoSessionAshTestBase::SetUp(); GetPrimaryUnifiedSystemTray()->ShowBubble(); } void TearDown() override { - button_.reset(); + if (IsQsRevampEnabled()) { + tile_.reset(); + } else { + button_.reset(); + } controller_.reset(); NoSessionAshTestBase::TearDown(); } - bool IsOsSettingsAppBadgingToggleEnabled() { return GetParam(); } + bool IsOsSettingsAppBadgingToggleEnabled() { return std::get<0>(GetParam()); } + + bool IsQsRevampEnabled() { return std::get<1>(GetParam()); } void SetUpButton() { controller_ = std::make_unique<QuietModeFeaturePodController>(tray_controller()); - button_.reset(controller_->CreateButton()); + if (IsQsRevampEnabled()) { + tile_ = controller_->CreateTile(); + } else { + button_.reset(controller_->CreateButton()); + } } UnifiedSystemTrayController* tray_controller() { @@ -65,30 +80,37 @@ void PressLabel() { controller_->OnLabelPressed(); } + bool IsButtonVisible() { + return IsQsRevampEnabled() ? tile_->GetVisible() : button_->GetVisible(); + } + FeaturePodButton* button() { return button_.get(); } private: std::unique_ptr<QuietModeFeaturePodController> controller_; std::unique_ptr<FeaturePodButton> button_; + std::unique_ptr<FeatureTile> tile_; base::test::ScopedFeatureList feature_list_; }; INSTANTIATE_TEST_SUITE_P( All, QuietModeFeaturePodControllerTest, - testing::Bool() /* IsOsSettingsAppBadgingToggleEnabled() */); + testing::Combine( + testing::Bool() /* IsOsSettingsAppBadgingToggleEnabled() */, + testing::Bool() /* IsQsRevampEnabled */)); TEST_P(QuietModeFeaturePodControllerTest, ButtonVisibilityNotLoggedIn) { SetUpButton(); // If not logged in, it should not be visible. - EXPECT_FALSE(button()->GetVisible()); + EXPECT_FALSE(IsButtonVisible()); } TEST_P(QuietModeFeaturePodControllerTest, ButtonVisibilityLoggedIn) { CreateUserSessions(1); SetUpButton(); // If logged in, it should be visible. - EXPECT_TRUE(button()->GetVisible()); + EXPECT_TRUE(IsButtonVisible()); } TEST_P(QuietModeFeaturePodControllerTest, ButtonVisibilityLocked) { @@ -96,10 +118,15 @@ BlockUserSession(UserSessionBlockReason::BLOCKED_BY_LOCK_SCREEN); SetUpButton(); // If locked, it should not be visible. - EXPECT_FALSE(button()->GetVisible()); + EXPECT_FALSE(IsButtonVisible()); } TEST_P(QuietModeFeaturePodControllerTest, IconUMATracking) { + // TODO(b/263505103): Implement Feature Tile metrics. + if (IsQsRevampEnabled()) { + return; + } + CreateUserSessions(1); SetUpButton(); message_center::MessageCenter::Get()->SetQuietMode(false); @@ -147,6 +174,11 @@ } TEST_P(QuietModeFeaturePodControllerTest, LabelUMATracking) { + // Qs Revamp Feature Tile does not have a detailed view. + if (IsQsRevampEnabled()) { + return; + } + CreateUserSessions(1); SetUpButton();
diff --git a/ash/system/unified/unified_system_tray_controller.cc b/ash/system/unified/unified_system_tray_controller.cc index 430b760..bb4757ca 100644 --- a/ash/system/unified/unified_system_tray_controller.cc +++ b/ash/system/unified/unified_system_tray_controller.cc
@@ -654,26 +654,28 @@ void UnifiedSystemTrayController::InitFeatureTiles() { // TODO(b/252871301): Create each feature's tile. - auto accessibility_controller = - std::make_unique<AccessibilityFeaturePodController>(this); - auto bluetooth_controller = - std::make_unique<BluetoothFeaturePodController>(this); - auto screen_capture_controller = - std::make_unique<CaptureModeFeaturePodController>(this); - auto cast_controller = std::make_unique<CastFeaturePodController>(this); - auto vpn_controller = std::make_unique<VPNFeaturePodController>(this); - std::vector<std::unique_ptr<FeatureTile>> tiles; - tiles.push_back(bluetooth_controller->CreateTile()); - tiles.push_back(screen_capture_controller->CreateTile()); - // Placeholder tile. - tiles.push_back( - std::make_unique<FeatureTile>(FeatureTile::TileType::kCompact)); + auto create_tile = + [](std::unique_ptr<FeaturePodControllerBase> controller, + std::vector<std::unique_ptr<FeaturePodControllerBase>>& controllers, + std::vector<std::unique_ptr<FeatureTile>>& tiles) { + tiles.push_back(controller->CreateTile()); + controllers.push_back(std::move(controller)); + }; - tiles.push_back(accessibility_controller->CreateTile()); - tiles.push_back(cast_controller->CreateTile()); - tiles.push_back(vpn_controller->CreateTile()); + create_tile(std::make_unique<BluetoothFeaturePodController>(this), + feature_pod_controllers_, tiles); + create_tile(std::make_unique<CaptureModeFeaturePodController>(this), + feature_pod_controllers_, tiles); + create_tile(std::make_unique<QuietModeFeaturePodController>(this), + feature_pod_controllers_, tiles); + create_tile(std::make_unique<AccessibilityFeaturePodController>(this), + feature_pod_controllers_, tiles); + create_tile(std::make_unique<CastFeaturePodController>(this), + feature_pod_controllers_, tiles); + create_tile(std::make_unique<VPNFeaturePodController>(this), + feature_pod_controllers_, tiles); // More placeholder tiles. while (tiles.size() < 10) { @@ -681,13 +683,6 @@ } quick_settings_view_->AddTiles(std::move(tiles)); - - // Transfer ownership of controllers to this. - feature_pod_controllers_.push_back(std::move(accessibility_controller)); - feature_pod_controllers_.push_back(std::move(bluetooth_controller)); - feature_pod_controllers_.push_back(std::move(cast_controller)); - feature_pod_controllers_.push_back(std::move(screen_capture_controller)); - feature_pod_controllers_.push_back(std::move(vpn_controller)); } void UnifiedSystemTrayController::AddFeaturePodItem(
diff --git a/ash/webui/file_manager/untrusted_resources/files_browsable_content.js b/ash/webui/file_manager/untrusted_resources/files_browsable_content.js index e045dd9..82bc5c29 100644 --- a/ash/webui/file_manager/untrusted_resources/files_browsable_content.js +++ b/ash/webui/file_manager/untrusted_resources/files_browsable_content.js
@@ -8,6 +8,11 @@ let type; /** + * @type {string} The content URL of the source file to preview. + */ +let contentUrl = ''; + +/** * <style> element for (non-PDF) browsable content. */ const style = document.createElement('style'); @@ -61,9 +66,11 @@ return; } - const contentsIframe = document.querySelector('#content'); + // Release Object URLs generated with URL.createObjectURL. + URL.revokeObjectURL(contentUrl); + contentUrl = ''; + const {browsable, subtype, sourceContent} = event.data; - let contentUrl; switch (sourceContent.dataType) { case 'url': contentUrl = sourceContent.data; @@ -71,15 +78,20 @@ case 'blob': contentUrl = URL.createObjectURL(sourceContent.data); break; - default: - contentUrl = 'about:blank'; } - if (browsable && subtype === 'PDF') { - contentUrl += '#view=FitH'; + + let sourceUrl = contentUrl; + if (sourceUrl && browsable && subtype === 'PDF') { + sourceUrl += '#view=FitH'; } type = subtype; - contentUrl = contentUrl || 'about:blank'; - console.log('Setting iframe.src to: ' + contentUrl); - contentsIframe.src = contentUrl; + if (!sourceUrl) { + sourceUrl = 'about:blank'; + type = ''; + } + + const contentsIframe = document.querySelector('#content'); + console.log('Setting iframe.src to:', sourceUrl); + contentsIframe.src = sourceUrl; });
diff --git a/ash/webui/file_manager/untrusted_resources/files_media_content.js b/ash/webui/file_manager/untrusted_resources/files_media_content.js index 24730ae..cad81f57 100644 --- a/ash/webui/file_manager/untrusted_resources/files_media_content.js +++ b/ash/webui/file_manager/untrusted_resources/files_media_content.js
@@ -18,14 +18,12 @@ let loadId = 0; window.addEventListener('message', event => { - if (event.origin !== FILES_APP_SWA_ORIGIN && - event.origin !== LEGACY_FILES_APP_ORIGIN) { + if (event.origin !== FILES_APP_SWA_ORIGIN) { console.error('Unknown origin: ' + event.origin); return; } const currentLoadId = ++loadId; - function isValidLoad() { return currentLoadId === loadId; } @@ -41,7 +39,6 @@ const data = event.data; const sourceContent = data.sourceContent; - switch (sourceContent.dataType) { case 'url': contentUrl = /** @type {string} */ (sourceContent.data);
diff --git a/ash/webui/file_manager/untrusted_resources/url_constants.js b/ash/webui/file_manager/untrusted_resources/url_constants.js index 2b6135f9..23005ec0 100644 --- a/ash/webui/file_manager/untrusted_resources/url_constants.js +++ b/ash/webui/file_manager/untrusted_resources/url_constants.js
@@ -5,10 +5,4 @@ /** * @const {string} */ -const LEGACY_FILES_APP_ORIGIN = - 'chrome-extension://hhaomjibdihmijegdhdafkllkbggdgoj'; - -/** - * @const {string} - */ const FILES_APP_SWA_ORIGIN = 'chrome://file-manager';
diff --git a/ash/webui/media_app_ui/OWNERS b/ash/webui/media_app_ui/OWNERS index ef400a9..9bd52241 100644 --- a/ash/webui/media_app_ui/OWNERS +++ b/ash/webui/media_app_ui/OWNERS
@@ -3,7 +3,6 @@ bugsnash@chromium.org patricialor@chromium.org zafzal@google.com -mcdermottm@google.com per-file *.mojom=set noparent per-file *.mojom=file://ipc/SECURITY_OWNERS
diff --git a/ash/wm/desks/desk.cc b/ash/wm/desks/desk.cc index c88d681..e310a6f 100644 --- a/ash/wm/desks/desk.cc +++ b/ash/wm/desks/desk.cc
@@ -245,6 +245,41 @@ }; // ----------------------------------------------------------------------------- +// Desk::ScopedContentUpdateNotificationDisabler: + +Desk::ScopedContentUpdateNotificationDisabler:: + ScopedContentUpdateNotificationDisabler( + const std::vector<std::unique_ptr<Desk>>& desks, + bool notify_when_destroyed) + : notify_when_destroyed_(notify_when_destroyed) { + DCHECK(!desks.empty()); + + for (auto& desk : desks) { + desks_.push_back(desk.get()); + desks_.back()->SuspendContentUpdateNotification(); + } +} + +Desk::ScopedContentUpdateNotificationDisabler:: + ScopedContentUpdateNotificationDisabler(const std::vector<Desk*>& desks, + bool notify_when_destroyed) + : notify_when_destroyed_(notify_when_destroyed) { + DCHECK(!desks.empty()); + + for (auto* desk : desks) { + desks_.push_back(desk); + desks_.back()->SuspendContentUpdateNotification(); + } +} + +Desk::ScopedContentUpdateNotificationDisabler:: + ~ScopedContentUpdateNotificationDisabler() { + for (auto* desk : desks_) { + desk->ResumeContentUpdateNotification(notify_when_destroyed_); + } +} + +// ----------------------------------------------------------------------------- // Desk: Desk::Desk(int associated_container_id, bool desk_being_restored) @@ -411,10 +446,6 @@ } } -base::AutoReset<bool> Desk::GetScopedNotifyContentChangedDisabler() { - return base::AutoReset<bool>(&should_notify_content_changed_, false); -} - bool Desk::ContainsAppWindows() const { return !GetAllAppWindows().empty(); } @@ -559,90 +590,82 @@ void Desk::MoveNonAppOverviewWindowsToDesk(Desk* target_desk) { DCHECK(Shell::Get()->overview_controller()->InOverviewSession()); - { - // Wait until the end to allow notifying the observers of either desk. - auto this_desk_throttled = GetScopedNotifyContentChangedDisabler(); - auto target_desk_throttled = - target_desk->GetScopedNotifyContentChangedDisabler(); + // Wait until the end to allow notifying the observers of either desk. + auto this_desk_throttled = ScopedContentUpdateNotificationDisabler( + /*desks=*/{this}, /*notify_when_destroyed=*/false); + auto target_desk_throttled = ScopedContentUpdateNotificationDisabler( + /*desks=*/{target_desk}, /*notify_when_destroyed=*/true); - // Create a `aura::WindowTracker` to hold `windows_`'s windows so that we do - // not edit `windows_` in place. - aura::WindowTracker window_tracker(windows_); + // Create a `aura::WindowTracker` to hold `windows_`'s windows so that we do + // not edit `windows_` in place. + aura::WindowTracker window_tracker(windows_); - // Move only the non-app overview windows. - while (!window_tracker.windows().empty()) { - auto* window = window_tracker.Pop(); - if (IsOverviewUiWindow(window)) - MoveWindowToDeskInternal(window, target_desk, window->GetRootWindow()); + // Move only the non-app overview windows. + while (!window_tracker.windows().empty()) { + auto* window = window_tracker.Pop(); + if (IsOverviewUiWindow(window)) { + MoveWindowToDeskInternal(window, target_desk, window->GetRootWindow()); } } - - target_desk->NotifyContentChanged(); } void Desk::MoveWindowsToDesk(Desk* target_desk) { DCHECK(target_desk); - { - ScopedWindowPositionerDisabler window_positioner_disabler; + ScopedWindowPositionerDisabler window_positioner_disabler; - // Throttle notifying the observers, while we move those windows and notify - // them only once when done. - auto this_desk_throttled = GetScopedNotifyContentChangedDisabler(); - auto target_desk_throttled = - target_desk->GetScopedNotifyContentChangedDisabler(); + // Throttle notifying the observers, while we move those windows and notify + // them only once when done. + auto this_and_target_desk_throttled = ScopedContentUpdateNotificationDisabler( + /*desks=*/{this, target_desk}, /*notify_when_destroyed=*/true); - // There are 2 cases in moving floated window during desk removal. - // Case 1: If there's no floated window on the "moved-to" desk, then the - // floated window on the current desk should remain floated. Case 2: If - // there's a floating window on the "moved-to" desk too, unfloat the one on - // the closed desk and retain the one on the "moved-to" desk. - // Special Note: - // Because of Case 2, below operation needs to be done before calling - // `MoveWindowToDeskInternal` on `windows_to_move`. We want to re-parent - // floated window back to desk container before the removal, so all windows - // under the to-be-removed desk's container can be collected in - // `windows_to_move` to move to target desk. - if (chromeos::wm::features::IsFloatWindowEnabled()) { - Shell::Get()->float_controller()->OnMovingAllWindowsOutToDesk( - this, target_desk); - } + // There are 2 cases in moving floated window during desk removal. + // Case 1: If there's no floated window on the "moved-to" desk, then the + // floated window on the current desk should remain floated. Case 2: If + // there's a floating window on the "moved-to" desk too, unfloat the one on + // the closed desk and retain the one on the "moved-to" desk. + // Special Note: + // Because of Case 2, below operation needs to be done before calling + // `MoveWindowToDeskInternal` on `windows_to_move`. We want to re-parent + // floated window back to desk container before the removal, so all windows + // under the to-be-removed desk's container can be collected in + // `windows_to_move` to move to target desk. + if (chromeos::wm::features::IsFloatWindowEnabled()) { + Shell::Get()->float_controller()->OnMovingAllWindowsOutToDesk(this, + target_desk); + } - // Moving windows will change the hierarchy and hence |windows_|, and has to - // be done without changing the relative z-order. So we make a copy of all - // the top-level windows on all the containers of this desk, such that - // windows in each container are copied from top-most (z-order) to - // bottom-most. - // Note that moving windows out of the container and restacking them - // differently may trigger events that lead to destroying a window on the - // list. For example moving the top-most window which has a backdrop will - // cause the backdrop to be destroyed. Therefore observe such events using - // an |aura::WindowTracker|. - aura::WindowTracker windows_to_move; - for (aura::Window* root : Shell::GetAllRootWindows()) { - const aura::Window* container = GetDeskContainerForRoot(root); - for (auto* window : base::Reversed(container->children())) - windows_to_move.Add(window); - } - - auto* mru_tracker = Shell::Get()->mru_window_tracker(); - while (!windows_to_move.windows().empty()) { - auto* window = windows_to_move.Pop(); - if (!CanMoveWindowOutOfDeskContainer(window)) - continue; - - // Note that windows that belong to the same container in - // |windows_to_move| are sorted from top-most to bottom-most, hence - // calling |StackChildAtBottom()| on each in this order will maintain that - // same order in the |target_desk|'s container. - MoveWindowToDeskInternal(window, target_desk, window->GetRootWindow()); - window->parent()->StackChildAtBottom(window); - mru_tracker->OnWindowMovedOutFromRemovingDesk(window); + // Moving windows will change the hierarchy and hence `windows_`, and has to + // be done without changing the relative z-order. So we make a copy of all the + // top-level windows on all the containers of this desk, such that windows in + // each container are copied from top-most (z-order) to bottom-most. Note that + // moving windows out of the container and restacking them differently may + // trigger events that lead to destroying a window on the list. For example + // moving the top-most window which has a backdrop will cause the backdrop to + // be destroyed. Therefore observe such events using an `aura::WindowTracker`. + aura::WindowTracker windows_to_move; + for (aura::Window* root : Shell::GetAllRootWindows()) { + const aura::Window* container = GetDeskContainerForRoot(root); + for (auto* window : base::Reversed(container->children())) { + windows_to_move.Add(window); } } - NotifyContentChanged(); - target_desk->NotifyContentChanged(); + auto* mru_tracker = Shell::Get()->mru_window_tracker(); + while (!windows_to_move.windows().empty()) { + auto* window = windows_to_move.Pop(); + if (!CanMoveWindowOutOfDeskContainer(window)) { + continue; + } + + // Note that windows that belong to the same container in `windows_to_move` + // are sorted from top-most to bottom-most, hence calling + // `StackChildAtBottom()` on each in this order will maintain that same + // order in the target_desk's container. + MoveWindowToDeskInternal(window, target_desk, window->GetRootWindow()); + window->parent()->StackChildAtBottom(window); + mru_tracker->OnWindowMovedOutFromRemovingDesk(window); + } } void Desk::MoveWindowToDesk(aura::Window* window, @@ -655,41 +678,35 @@ DCHECK(base::Contains(windows_, window)); DCHECK(this != target_desk); - { - ScopedWindowPositionerDisabler window_positioner_disabler; + ScopedWindowPositionerDisabler window_positioner_disabler; - // Throttling here is necessary even though we're attempting to move a - // single window. This is because that window might exist in a transient - // window tree, which will result in actually moving multiple windows if the - // transient children used to be on the same container. - // See `wm::TransientWindowManager::OnWindowHierarchyChanged()`. - auto this_desk_throttled = GetScopedNotifyContentChangedDisabler(); - auto target_desk_throttled = - target_desk->GetScopedNotifyContentChangedDisabler(); + // Throttling here is necessary even though we're attempting to move a + // single window. This is because that window might exist in a transient + // window tree, which will result in actually moving multiple windows if the + // transient children used to be on the same container. + // See `wm::TransientWindowManager::OnWindowHierarchyChanged()`. + auto this_and_target_desk_throttled = ScopedContentUpdateNotificationDisabler( + /*desks=*/{this, target_desk}, /*notify_when_destroyed=*/true); - // Always move the root of the transient window tree. We should never move a - // transient child and leave its parent behind. Moving the transient - // descendants that exist on the same desk container will be taken care of - // by `wm::TransientWindowManager::OnWindowHierarchyChanged()`. - aura::Window* transient_root = ::wm::GetTransientRoot(window); - MoveWindowToDeskInternal(transient_root, target_desk, target_root); + // Always move the root of the transient window tree. We should never move a + // transient child and leave its parent behind. Moving the transient + // descendants that exist on the same desk container will be taken care of by + // `wm::TransientWindowManager::OnWindowHierarchyChanged()`. + aura::Window* transient_root = ::wm::GetTransientRoot(window); + MoveWindowToDeskInternal(transient_root, target_desk, target_root); - if (!desks_util::IsWindowVisibleOnAllWorkspaces(window)) { - FixWindowStackingAccordingToGlobalMru(transient_root); - } - - // Unminimize the window so that it shows up in the mini_view after it had - // been dragged and moved to another desk. Don't unminimize if the window is - // visible on all desks since it's being moved during desk activation. - auto* window_state = WindowState::Get(transient_root); - if (unminimize && window_state->IsMinimized() && - !desks_util::IsWindowVisibleOnAllWorkspaces(window)) { - window_state->Unminimize(); - } + if (!desks_util::IsWindowVisibleOnAllWorkspaces(window)) { + FixWindowStackingAccordingToGlobalMru(transient_root); } - NotifyContentChanged(); - target_desk->NotifyContentChanged(); + // Unminimize the window so that it shows up in the mini_view after it had + // been dragged and moved to another desk. Don't unminimize if the window is + // visible on all desks since it's being moved during desk activation. + auto* window_state = WindowState::Get(transient_root); + if (unminimize && window_state->IsMinimized() && + !desks_util::IsWindowVisibleOnAllWorkspaces(window)) { + window_state->Unminimize(); + } } aura::Window* Desk::GetDeskContainerForRoot(aura::Window* root) const { @@ -699,13 +716,15 @@ } void Desk::NotifyContentChanged() { - if (!should_notify_content_changed_) + if (ContentUpdateNotificationSuspended()) { return; + } // Updating the backdrops below may lead to the removal or creation of // backdrop windows in this desk, which can cause us to recurse back here. // Disable this. - auto disable_recursion = GetScopedNotifyContentChangedDisabler(); + auto disable_recursion = ScopedContentUpdateNotificationDisabler( + /*desks=*/{this}, /*notify_when_destroyed=*/false); // The availability and visibility of backdrops of all containers associated // with this desk will be updated *before* notifying observer, so that the @@ -878,6 +897,10 @@ --it->order; } +bool Desk::ContentUpdateNotificationSuspended() const { + return content_update_notification_suspend_count_ != 0; +} + void Desk::MoveWindowToDeskInternal(aura::Window* window, Desk* target_desk, aura::Window* target_root) { @@ -932,4 +955,18 @@ ++g_weekly_active_desks; } +void Desk::SuspendContentUpdateNotification() { + ++content_update_notification_suspend_count_; +} + +void Desk::ResumeContentUpdateNotification(bool notify_when_fully_resumed) { + --content_update_notification_suspend_count_; + DCHECK_GE(content_update_notification_suspend_count_, 0); + + if (!content_update_notification_suspend_count_ && + notify_when_fully_resumed) { + NotifyContentChanged(); + } +} + } // namespace ash
diff --git a/ash/wm/desks/desk.h b/ash/wm/desks/desk.h index 7e64d975..73599ef 100644 --- a/ash/wm/desks/desk.h +++ b/ash/wm/desks/desk.h
@@ -10,7 +10,6 @@ #include <vector> #include "ash/ash_export.h" -#include "base/auto_reset.h" #include "base/containers/flat_map.h" #include "base/guid.h" #include "base/observer_list.h" @@ -50,6 +49,34 @@ virtual void OnDeskNameChanged(const std::u16string& new_name) = 0; }; + // Suspends notification of content updates within its scope. Note that the + // relevant `Desk` must outlive this class. + class ScopedContentUpdateNotificationDisabler { + public: + // `desks` are the desks whose content update will be suspended. If + // `notify_when_destroyed` is true, it will send out a notification when + // this is destroyed and there are no other disablers. + ScopedContentUpdateNotificationDisabler( + const std::vector<std::unique_ptr<Desk>>& desks, + bool notify_when_destroyed); + ScopedContentUpdateNotificationDisabler(const std::vector<Desk*>& desks, + bool notify_when_destroyed); + + ScopedContentUpdateNotificationDisabler( + const ScopedContentUpdateNotificationDisabler&) = delete; + ScopedContentUpdateNotificationDisabler& operator=( + const ScopedContentUpdateNotificationDisabler&) = delete; + + ~ScopedContentUpdateNotificationDisabler(); + + private: + std::vector<Desk*> desks_; + + // Notifies all desks in `desks_` via `NotifyContentChanged()` when this is + // destroyed and there are no other disablers. + const bool notify_when_destroyed_; + }; + // Tracks stacking order for a window that is visible on all desks. This is // used to support per-desk z-orders for all-desk windows. Entries are stored // in ascending `order`. @@ -82,10 +109,6 @@ bool is_active() const { return is_active_; } - bool should_notify_content_changed() const { - return should_notify_content_changed_; - } - bool is_name_set_by_user() const { return is_name_set_by_user_; } bool is_desk_being_removed() const { return is_desk_being_removed_; } @@ -129,8 +152,6 @@ void WillRemoveWindowFromDesk(aura::Window* window); - base::AutoReset<bool> GetScopedNotifyContentChangedDisabler(); - bool ContainsAppWindows() const; // Sets the desk's name to |new_name| and updates the observers. @@ -231,6 +252,9 @@ // or not longer being all-desk). void RemoveAllDeskWindow(aura::Window* window); + // Returns true if notification of content update is suspended. + bool ContentUpdateNotificationSuspended() const; + private: friend class DesksTestApi; @@ -254,6 +278,15 @@ // |g_weekly_active_desks| and set |this| to interacted with. void MaybeIncrementWeeklyActiveDesks(); + // Suspends notification of content update. + void SuspendContentUpdateNotification(); + + // Resumes notification of content update. If `notify_when_fully_resumed` is + // true, it will send out one notification at the end about the content update + // if there are no remaining pending suspensions, e.g. there are no other + // content update notification disablers. + void ResumeContentUpdateNotification(bool notify_when_fully_resumed); + // Uniquely identifies the desk. const base::GUID uuid_; @@ -277,10 +310,11 @@ bool is_active_ = false; - // If false, observers won't be notified of desk's contents changes. This is - // used to throttle those notifications when we add or remove many windows, - // and we want to notify observers only once. - bool should_notify_content_changed_ = true; + // Count of pending content update notification suspensions. If it is greater + // than 0, observers won't be notified of desk's content changes. This is used + // to throttle those notifications when we add or remove many windows, and we + // want to notify observers only once. + int content_update_notification_suspend_count_ = 0; // True if the `PrepareForActivationAnimation()` was called, and this desk's // containers are shown while their layer opacities are temporarily set to 0.
diff --git a/ash/wm/desks/desks_controller.cc b/ash/wm/desks/desks_controller.cc index 952b886..0869f280 100644 --- a/ash/wm/desks/desks_controller.cc +++ b/ash/wm/desks/desks_controller.cc
@@ -677,12 +677,11 @@ // If we are switching users, we don't want to notify desks of content changes // until the user switch animation has shown the new user's windows. const bool is_user_switch = source == DesksSwitchSource::kUserSwitch; - std::vector<base::AutoReset<bool>> desks_scoped_notify_disablers; + absl::optional<Desk::ScopedContentUpdateNotificationDisabler> + desks_scoped_notify_disabler; if (is_user_switch) { - for (const auto& desk_to_notify : desks_) { - desks_scoped_notify_disablers.push_back( - desk_to_notify->GetScopedNotifyContentChangedDisabler()); - } + desks_scoped_notify_disabler.emplace(/*desks=*/desks_, + /*notify_when_destroyed=*/false); } OverviewController* overview_controller = Shell::Get()->overview_controller(); @@ -694,10 +693,9 @@ // switching to a new user, otherwise the multi user switch animation will // animate the same windows that overview watches to determine if the // overview shutdown animation is complete. See https://crbug.com/1001586. - const bool immediate_exit = source == DesksSwitchSource::kUserSwitch; overview_controller->EndOverview( OverviewEndAction::kDeskActivation, - immediate_exit ? OverviewEnterExitType::kImmediateExit + is_user_switch ? OverviewEnterExitType::kImmediateExit : OverviewEnterExitType::kNormal); } return; @@ -1664,7 +1662,9 @@ // No need to spend time refreshing the mini_views of the removed desk. auto removed_desk_mini_views_pauser = - removed_desk->GetScopedNotifyContentChangedDisabler(); + Desk::ScopedContentUpdateNotificationDisabler( + /*desks=*/{removed_desk}, + /*notify_when_destroyed=*/false); // - If the active desk is the one being removed, activate the desk to its // left, if no desk to the left, activate one on the right. @@ -1689,7 +1689,9 @@ // The target desk, which is about to become active, will have its // mini_views refreshed at the end. auto target_desk_mini_view_pauser = - target_desk->GetScopedNotifyContentChangedDisabler(); + Desk::ScopedContentUpdateNotificationDisabler( + /*desks=*/{target_desk}, + /*notify_when_destroyed=*/false); // Exit split view if active, before activating the new desk. We will // restore the split view state of the newly activated desk at the end. @@ -1738,7 +1740,9 @@ } else if (close_type == DeskCloseType::kCombineDesks) { // We will refresh the mini_views of the active desk only once at the end. auto active_desk_mini_view_pauser = - active_desk_->GetScopedNotifyContentChangedDisabler(); + Desk::ScopedContentUpdateNotificationDisabler( + /*desks=*/{active_desk_}, + /*notify_when_destroyed=*/false); removed_desk->MoveWindowsToDesk(active_desk_); @@ -1756,7 +1760,7 @@ // It's OK now to refresh the mini_views of *only* the active desk, and only // if windows from the removed desk moved to it. - DCHECK(active_desk_->should_notify_content_changed()); + DCHECK(!active_desk_->ContentUpdateNotificationSuspended()); if (!removed_desk_windows.empty()) active_desk_->NotifyContentChanged(); @@ -1876,7 +1880,8 @@ // Content changed notifications for this desk should be disabled when // we are destroying the windows. auto throttle_desk_notifications = - removed_desk->GetScopedNotifyContentChangedDisabler(); + Desk::ScopedContentUpdateNotificationDisabler( + /*desks=*/{removed_desk}, /*notify_when_destroyed=*/false); std::vector<aura::Window*> app_windows = removed_desk->GetAllAppWindows();
diff --git a/ash/wm/overview/overview_grid.cc b/ash/wm/overview/overview_grid.cc index 99c800bf..deb7bd9 100644 --- a/ash/wm/overview/overview_grid.cc +++ b/ash/wm/overview/overview_grid.cc
@@ -768,10 +768,17 @@ } void OverviewGrid::RemoveAllItemsForSavedDeskLaunch() { - for (auto& item : window_list_) { - item->RevertHideForSavedDeskLibrary(/*animate=*/false); - item->RestoreWindow(/*reset_transform=*/true, - /*was_saved_desk_library_showing=*/true); + { + // Wait until the end to notify content changes for all desks. + Desk::ScopedContentUpdateNotificationDisabler desks_scoped_notify_disabler( + /*desks=*/DesksController::Get()->desks(), + /*notify_when_destroyed=*/true); + + for (auto& item : window_list_) { + item->RevertHideForSavedDeskLibrary(/*animate=*/false); + item->RestoreWindow(/*reset_transform=*/true, + /*was_saved_desk_library_showing=*/true); + } } window_list_.clear(); num_incognito_windows_ = 0; @@ -1746,8 +1753,16 @@ saved_desk_library_widget_->SetBounds(library_bounds); } - for (auto& overview_mode_item : window_list_) - overview_mode_item->HideForSavedDeskLibrary(/*animate=*/true); + { + // Wait until the end to notify content changes for all desks. + Desk::ScopedContentUpdateNotificationDisabler desks_scoped_notify_disabler( + /*desks=*/DesksController::Get()->desks(), + /*notify_when_destroyed=*/true); + + for (auto& overview_mode_item : window_list_) { + overview_mode_item->HideForSavedDeskLibrary(/*animate=*/true); + } + } // There may be an existing animation in progress triggered by // `HideSavedDeskLibrary()` below, which animates a widget to 0.f before @@ -1781,6 +1796,11 @@ if (already_hiding_grid) return; + // Wait until the end to notify content changes for all desks. + Desk::ScopedContentUpdateNotificationDisabler desks_scoped_notify_disabler( + /*desks=*/DesksController::Get()->desks(), + /*notify_when_destroyed=*/true); + if (exit_overview && overview_session_->enter_exit_overview_type() == OverviewEnterExitType::kImmediateExit) { // Since we're immediately exiting, we don't need to animate anything.
diff --git a/build/android/apk_operations.pydeps b/build/android/apk_operations.pydeps index 1db6059..dad21811 100644 --- a/build/android/apk_operations.pydeps +++ b/build/android/apk_operations.pydeps
@@ -64,8 +64,8 @@ ../../third_party/catapult/devil/devil/utils/zip_utils.py ../../third_party/catapult/third_party/six/six.py ../../third_party/jinja2/__init__.py -../../third_party/jinja2/_compat.py ../../third_party/jinja2/_identifier.py +../../third_party/jinja2/async_utils.py ../../third_party/jinja2/bccache.py ../../third_party/jinja2/compiler.py ../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/compile_resources.pydeps b/build/android/gyp/compile_resources.pydeps index 12c473b6..6701a8a 100644 --- a/build/android/gyp/compile_resources.pydeps +++ b/build/android/gyp/compile_resources.pydeps
@@ -1,8 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/create_app_bundle.pydeps b/build/android/gyp/create_app_bundle.pydeps index 6a5b13e5..83ffedb 100644 --- a/build/android/gyp/create_app_bundle.pydeps +++ b/build/android/gyp/create_app_bundle.pydeps
@@ -13,8 +13,8 @@ ../../../third_party/catapult/devil/devil/utils/__init__.py ../../../third_party/catapult/devil/devil/utils/cmd_helper.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/create_app_bundle_apks.pydeps b/build/android/gyp/create_app_bundle_apks.pydeps index bcbc5ff..79750840 100644 --- a/build/android/gyp/create_app_bundle_apks.pydeps +++ b/build/android/gyp/create_app_bundle_apks.pydeps
@@ -1,8 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_apks.pydeps build/android/gyp/create_app_bundle_apks.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/create_r_java.pydeps b/build/android/gyp/create_r_java.pydeps index 76de786..df708b8d 100644 --- a/build/android/gyp/create_r_java.pydeps +++ b/build/android/gyp/create_r_java.pydeps
@@ -1,8 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_java.pydeps build/android/gyp/create_r_java.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/create_r_txt.pydeps b/build/android/gyp/create_r_txt.pydeps index 1229151..396f9db 100644 --- a/build/android/gyp/create_r_txt.pydeps +++ b/build/android/gyp/create_r_txt.pydeps
@@ -1,8 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_txt.pydeps build/android/gyp/create_r_txt.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/create_ui_locale_resources.pydeps b/build/android/gyp/create_ui_locale_resources.pydeps index 2e6f20f..e7f7647e 100644 --- a/build/android/gyp/create_ui_locale_resources.pydeps +++ b/build/android/gyp/create_ui_locale_resources.pydeps
@@ -1,8 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/jinja_template.pydeps b/build/android/gyp/jinja_template.pydeps index 6990e6a..cf6c6235 100644 --- a/build/android/gyp/jinja_template.pydeps +++ b/build/android/gyp/jinja_template.pydeps
@@ -10,8 +10,8 @@ ../../../third_party/catapult/devil/devil/constants/__init__.py ../../../third_party/catapult/devil/devil/constants/exit_codes.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/prepare_resources.pydeps b/build/android/gyp/prepare_resources.pydeps index de22add..b6aa0a5 100644 --- a/build/android/gyp/prepare_resources.pydeps +++ b/build/android/gyp/prepare_resources.pydeps
@@ -1,8 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/unused_resources.pydeps b/build/android/gyp/unused_resources.pydeps index c244b44..0154648 100644 --- a/build/android/gyp/unused_resources.pydeps +++ b/build/android/gyp/unused_resources.pydeps
@@ -1,8 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/unused_resources.pydeps build/android/gyp/unused_resources.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/gyp/write_build_config.pydeps b/build/android/gyp/write_build_config.pydeps index 8f4c58e..46b749c 100644 --- a/build/android/gyp/write_build_config.pydeps +++ b/build/android/gyp/write_build_config.pydeps
@@ -1,8 +1,8 @@ # Generated by running: # build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py ../../../third_party/jinja2/__init__.py -../../../third_party/jinja2/_compat.py ../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py ../../../third_party/jinja2/bccache.py ../../../third_party/jinja2/compiler.py ../../../third_party/jinja2/defaults.py
diff --git a/build/android/pylib/results/presentation/test_results_presentation.pydeps b/build/android/pylib/results/presentation/test_results_presentation.pydeps index 99409649..031e179 100644 --- a/build/android/pylib/results/presentation/test_results_presentation.pydeps +++ b/build/android/pylib/results/presentation/test_results_presentation.pydeps
@@ -13,8 +13,8 @@ ../../../../../third_party/catapult/devil/devil/utils/__init__.py ../../../../../third_party/catapult/devil/devil/utils/cmd_helper.py ../../../../../third_party/jinja2/__init__.py -../../../../../third_party/jinja2/_compat.py ../../../../../third_party/jinja2/_identifier.py +../../../../../third_party/jinja2/async_utils.py ../../../../../third_party/jinja2/bccache.py ../../../../../third_party/jinja2/compiler.py ../../../../../third_party/jinja2/defaults.py
diff --git a/build/android/test_runner.pydeps b/build/android/test_runner.pydeps index 572b3de..c2c535f 100644 --- a/build/android/test_runner.pydeps +++ b/build/android/test_runner.pydeps
@@ -104,8 +104,8 @@ ../../third_party/colorama/src/colorama/win32.py ../../third_party/colorama/src/colorama/winterm.py ../../third_party/jinja2/__init__.py -../../third_party/jinja2/_compat.py ../../third_party/jinja2/_identifier.py +../../third_party/jinja2/async_utils.py ../../third_party/jinja2/bccache.py ../../third_party/jinja2/compiler.py ../../third_party/jinja2/defaults.py
diff --git a/build/fuchsia/linux_internal.sdk.sha1 b/build/fuchsia/linux_internal.sdk.sha1 index 49831e1..06ea87b 100644 --- a/build/fuchsia/linux_internal.sdk.sha1 +++ b/build/fuchsia/linux_internal.sdk.sha1
@@ -1 +1 @@ -11.20221227.2.1 +11.20221227.3.1
diff --git a/cc/input/scrollbar_animation_controller.cc b/cc/input/scrollbar_animation_controller.cc index 70737a06..ff30f58 100644 --- a/cc/input/scrollbar_animation_controller.cc +++ b/cc/input/scrollbar_animation_controller.cc
@@ -190,6 +190,10 @@ void ScrollbarAnimationController::DidScrollUpdate() { UpdateScrollbarState(); + if (need_thinning_animation_) { + vertical_controller_->DidScrollUpdate(); + horizontal_controller_->DidScrollUpdate(); + } } void ScrollbarAnimationController::UpdateScrollbarState() {
diff --git a/cc/input/single_scrollbar_animation_controller_thinning.cc b/cc/input/single_scrollbar_animation_controller_thinning.cc index bbba143..1e456af 100644 --- a/cc/input/single_scrollbar_animation_controller_thinning.cc +++ b/cc/input/single_scrollbar_animation_controller_thinning.cc
@@ -127,6 +127,13 @@ is_animating_ = false; } +void SingleScrollbarAnimationControllerThinning::DidScrollUpdate() { + if (captured_ || !mouse_is_near_scrollbar_track_) + return; + + CalculateThicknessShouldChange(device_viewport_last_pointer_location_); +} + void SingleScrollbarAnimationControllerThinning::DidMouseDown() { if (!mouse_is_over_scrollbar_thumb_) return; @@ -175,6 +182,12 @@ void SingleScrollbarAnimationControllerThinning::DidMouseMove( const gfx::PointF& device_viewport_point) { + CalculateThicknessShouldChange(device_viewport_point); + device_viewport_last_pointer_location_ = device_viewport_point; +} + +void SingleScrollbarAnimationControllerThinning::CalculateThicknessShouldChange( + const gfx::PointF& device_viewport_point) { ScrollbarLayerImplBase* scrollbar = GetScrollbar(); if (!scrollbar)
diff --git a/cc/input/single_scrollbar_animation_controller_thinning.h b/cc/input/single_scrollbar_animation_controller_thinning.h index 67706bc2d..87b1735d 100644 --- a/cc/input/single_scrollbar_animation_controller_thinning.h +++ b/cc/input/single_scrollbar_animation_controller_thinning.h
@@ -48,6 +48,9 @@ } bool captured() const { return captured_; } + gfx::PointF device_viewport_last_pointer_location() const { + return device_viewport_last_pointer_location_; + } bool Animate(base::TimeTicks now); void StartAnimation(); @@ -55,6 +58,8 @@ void UpdateThumbThicknessScale(); + void DidScrollUpdate(); + void DidMouseDown(); void DidMouseUp(); void DidMouseLeave(); @@ -79,6 +84,7 @@ enum class AnimationChange { NONE, INCREASE, DECREASE }; float ThumbThicknessScaleAt(float progress) const; float ThumbThicknessScaleByMouseDistanceToScrollbar() const; + void CalculateThicknessShouldChange(const gfx::PointF& device_viewport_point); float AdjustScale(float new_value, float current_value, @@ -105,6 +111,11 @@ AnimationChange thickness_change_; base::TimeDelta thinning_duration_; + + // Save last known pointer location in the device viewport for use in + // DidScrollUpdate() to check the pointers proximity to the thumb in case of a + // scroll. + gfx::PointF device_viewport_last_pointer_location_{-1, -1}; }; } // namespace cc
diff --git a/cc/input/single_scrollbar_animation_controller_thinning_unittest.cc b/cc/input/single_scrollbar_animation_controller_thinning_unittest.cc index d6ef694e..81499205 100644 --- a/cc/input/single_scrollbar_animation_controller_thinning_unittest.cc +++ b/cc/input/single_scrollbar_animation_controller_thinning_unittest.cc
@@ -11,6 +11,7 @@ #include "cc/trees/layer_tree_impl.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" +#include "ui/gfx/geometry/test/geometry_util.h" using ::testing::_; using ::testing::Bool; @@ -72,7 +73,7 @@ const bool kIsLeftSideVerticalScrollbar = false; scrollbar_layer_ = AddLayer<SolidColorScrollbarLayerImpl>( - ScrollbarOrientation::HORIZONTAL, kThumbThickness, kTrackStart, + ScrollbarOrientation::VERTICAL, kThumbThickness, kTrackStart, kIsLeftSideVerticalScrollbar); scrollbar_layer_->SetBounds(gfx::Size(kThumbThickness, kTrackLength)); @@ -88,7 +89,7 @@ UpdateActiveTreeDrawProperties(); scrollbar_controller_ = SingleScrollbarAnimationControllerThinning::Create( - scroll_layer->element_id(), ScrollbarOrientation::HORIZONTAL, &client_, + scroll_layer->element_id(), ScrollbarOrientation::VERTICAL, &client_, kThinningDuration); mouse_move_distance_to_trigger_fade_in_ = scrollbar_controller_->MouseMoveDistanceToTriggerFadeIn(); @@ -460,6 +461,84 @@ scrollbar_layer_->thumb_thickness_scale_factor()); } +// Test that the last pointer location variable is set on DidMouseMove calls and +// mouse position variables are correctly updated in DidScrollUpdate() calls. +TEST_P(SingleScrollbarAnimationControllerThinningTest, + HoverTrackAndMoveThumbUnderPointer) { + EXPECT_POINTF_EQ( + gfx::PointF(-1, -1), + scrollbar_controller_->device_viewport_last_pointer_location()); + + // Move mouse on top of the scrollbar track but not the thumb, and verify + // that all variables are correctly set. + gfx::PointF near_scrollbar = NearScrollbar(0, 90); + scrollbar_controller_->DidMouseMove(near_scrollbar); + EXPECT_POINTF_EQ( + near_scrollbar, + scrollbar_controller_->device_viewport_last_pointer_location()); + EXPECT_FALSE(scrollbar_controller_->mouse_is_near_scrollbar_thumb()); + EXPECT_FALSE(scrollbar_controller_->mouse_is_over_scrollbar_thumb()); + EXPECT_TRUE(scrollbar_controller_->mouse_is_near_scrollbar_track()); + scrollbar_controller_->DidMouseDown(); + EXPECT_FALSE(scrollbar_controller_->captured()); + + // Move the thumb to the end of the track so that the pointer is located over + // it. + EXPECT_TRUE(scrollbar_layer_->SetCurrentPos(100)); + scrollbar_controller_->DidScrollUpdate(); + EXPECT_TRUE(scrollbar_controller_->mouse_is_near_scrollbar_thumb()); + EXPECT_TRUE(scrollbar_controller_->mouse_is_over_scrollbar_thumb()); + EXPECT_TRUE(scrollbar_controller_->mouse_is_near_scrollbar_track()); + + // Clicking now should capture the thumb. + scrollbar_controller_->DidMouseDown(); + EXPECT_TRUE(scrollbar_controller_->captured()); +} + +// Test that DidScrollUpdate correctly queues thinning animations when the thumb +// moves under the pointer and when it moves away from it. +TEST_P(SingleScrollbarAnimationControllerThinningTest, + DidScrollUpdateQueuesAnimations) { + // Fluent scrollbars queue animations based on proximity to the track, not the + // thumb, which get queued on DidMouseMove(). For Fluent Scrollbars + // DidScrollUpdate() only updates the mouse location variables, behavior that + // is tested in HoverTrackAndMoveThumbUnderPointer. + if (client_.IsFluentScrollbar()) + return; + + base::TimeTicks time; + time += base::Seconds(1); + + // Move mouse on top of the scrollbar track but not the thumb. No animation + // should be queued. + scrollbar_controller_->DidMouseMove(NearScrollbar(0, 90)); + EXPECT_FALSE(scrollbar_controller_->Animate(time)); + EXPECT_FLOAT_EQ(kIdleThicknessScale, + scrollbar_layer_->thumb_thickness_scale_factor()); + + // Move the thumb to the end of the track so that the pointer is located over + // it. + EXPECT_TRUE(scrollbar_layer_->SetCurrentPos(100)); + scrollbar_controller_->DidScrollUpdate(); + EXPECT_TRUE(scrollbar_controller_->Animate(time)); + + // The thumb should animate and become thick. + time += kThinningDuration; + scrollbar_controller_->Animate(time); + EXPECT_FLOAT_EQ(1.0f, scrollbar_layer_->thumb_thickness_scale_factor()); + + // Move the layer's thumb to its starting position. + EXPECT_TRUE(scrollbar_layer_->SetCurrentPos(0)); + scrollbar_controller_->DidScrollUpdate(); + scrollbar_controller_->Animate(time); + + // The thumb should become thin as the mouse is no longer on top of it. + time += kThinningDuration; + scrollbar_controller_->Animate(time); + EXPECT_FLOAT_EQ(kIdleThicknessScale, + scrollbar_layer_->thumb_thickness_scale_factor()); +} + INSTANTIATE_TEST_SUITE_P(All, SingleScrollbarAnimationControllerThinningTest, Bool());
diff --git a/chrome/VERSION b/chrome/VERSION index 0bc15f1..c39ed25 100644 --- a/chrome/VERSION +++ b/chrome/VERSION
@@ -1,4 +1,4 @@ MAJOR=111 MINOR=0 -BUILD=5504 +BUILD=5505 PATCH=0
diff --git a/chrome/app/chromeos_strings.grdp b/chrome/app/chromeos_strings.grdp index 16c8c14..685ce36 100644 --- a/chrome/app/chromeos_strings.grdp +++ b/chrome/app/chromeos_strings.grdp
@@ -6319,6 +6319,20 @@ Switch themes at sunrise and sunset </message> + <!-- Strings for the locale switch notification inside the OOBE. --> + <message name="IDS_LOCALE_SWITCH_NOTIFICATION_TITLE" desc="Title of the notification that is displayed at the bottom-right corner of the screen."> + Switch device language? + </message> + <message name="IDS_LOCALE_SWITCH_NOTIFICATION_TEXT" desc="Message that proposes users to change their locale inside a notification that is displayed at the bottom-right corner of the screen."> + You can use your preferred Google Account language (<ph name="NEW_LOCALE_FROM_GAIA">$1<ex>Spanish (Latin America)</ex></ph>) + </message> + <message name="IDS_LOCALE_SWITCH_NOTIFICATION_CONFIRM_BUTTON_LABEL" desc="Label of a button that user will use to accept the locale sync. Displayed inside a notification at the bottom-right corner of the screen."> + Switch + </message> + <message name="IDS_LOCALE_SWITCH_NOTIFICATION_CANCEL_BUTTON_LABEL" desc="Label of a button that user will use to cancel the locale sync. Displayed inside a notification at the bottom-right corner of the screen."> + Don't switch + </message> + <!-- CHOOBE Screen --> <!-- TODO(b/258503542): Remove translateable tags when strings are finalized. --> <message name="IDS_OOBE_CHOOBE_TITLE" desc="Title of the CHOOBE screen." translateable="false">
diff --git a/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_CANCEL_BUTTON_LABEL.png.sha1 b/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_CANCEL_BUTTON_LABEL.png.sha1 new file mode 100644 index 0000000..1a2baae --- /dev/null +++ b/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_CANCEL_BUTTON_LABEL.png.sha1
@@ -0,0 +1 @@ +0a853ff2a60cc387019c68ac85eb9d192e8fd56f \ No newline at end of file
diff --git a/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_CONFIRM_BUTTON_LABEL.png.sha1 b/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_CONFIRM_BUTTON_LABEL.png.sha1 new file mode 100644 index 0000000..1a2baae --- /dev/null +++ b/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_CONFIRM_BUTTON_LABEL.png.sha1
@@ -0,0 +1 @@ +0a853ff2a60cc387019c68ac85eb9d192e8fd56f \ No newline at end of file
diff --git a/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_TEXT.png.sha1 b/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_TEXT.png.sha1 new file mode 100644 index 0000000..1a2baae --- /dev/null +++ b/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_TEXT.png.sha1
@@ -0,0 +1 @@ +0a853ff2a60cc387019c68ac85eb9d192e8fd56f \ No newline at end of file
diff --git a/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_TITLE.png.sha1 b/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_TITLE.png.sha1 new file mode 100644 index 0000000..1a2baae --- /dev/null +++ b/chrome/app/chromeos_strings_grdp/IDS_LOCALE_SWITCH_NOTIFICATION_TITLE.png.sha1
@@ -0,0 +1 @@ +0a853ff2a60cc387019c68ac85eb9d192e8fd56f \ No newline at end of file
diff --git a/chrome/browser/about_flags.cc b/chrome/browser/about_flags.cc index fa16153..4e92c93 100644 --- a/chrome/browser/about_flags.cc +++ b/chrome/browser/about_flags.cc
@@ -7632,6 +7632,10 @@ flag_descriptions::kIncognitoNtpRevampDescription, kOsAll, FEATURE_VALUE_TYPE(features::kIncognitoNtpRevamp)}, + {"block-insecure-downloads", flag_descriptions::kBlockInsecureDownloadsName, + flag_descriptions::kBlockInsecureDownloadsDescription, kOsAll, + FEATURE_VALUE_TYPE(features::kBlockInsecureDownloads)}, + {"check-offline-capability", flag_descriptions::kCheckOfflineCapabilityName, flag_descriptions::kCheckOfflineCapabilityDescription, kOsAll, FEATURE_WITH_PARAMS_VALUE_TYPE(blink::features::kCheckOfflineCapability, @@ -8856,16 +8860,6 @@ FEATURE_VALUE_TYPE(commerce::kCommerceHintAndroid)}, #endif - {"autofill-enable-get-details-for-enroll-parsing-in-upload-card-response", - flag_descriptions:: - kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponseName, - flag_descriptions:: - kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponseDescription, - kOsAll, - FEATURE_VALUE_TYPE( - autofill::features:: - kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponse)}, - #if BUILDFLAG(IS_WIN) || BUILDFLAG(IS_LINUX) {"enable-web-bluetooth-confirm-pairing-support", flag_descriptions::kWebBluetoothConfirmPairingSupportName,
diff --git a/chrome/browser/ash/BUILD.gn b/chrome/browser/ash/BUILD.gn index 51684a6..8683cab 100644 --- a/chrome/browser/ash/BUILD.gn +++ b/chrome/browser/ash/BUILD.gn
@@ -1755,6 +1755,8 @@ "login/screens/lacros_data_migration_screen.h", "login/screens/local_state_error_screen.cc", "login/screens/local_state_error_screen.h", + "login/screens/locale_switch_notification.cc", + "login/screens/locale_switch_notification.h", "login/screens/locale_switch_screen.cc", "login/screens/locale_switch_screen.h", "login/screens/management_transition_screen.cc",
diff --git a/chrome/browser/ash/app_list/search/common/keyword_util.cc b/chrome/browser/ash/app_list/search/common/keyword_util.cc new file mode 100644 index 0000000..91edd85 --- /dev/null +++ b/chrome/browser/ash/app_list/search/common/keyword_util.cc
@@ -0,0 +1,23 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "chrome/browser/ash/app_list/search/common/keyword_util.h" + +namespace app_list { + +std::vector<std::string> TokenizeQuery(const std::string& query) { + // TODO(b/262623111): Implement function to tokenize user query into + // individual tokens. + return std::vector<std::string>(); +} + +std::vector<std::string> ExtractKeyword( + const std::vector<std::string>& query_tokens) { + // TODO(b/262623111): Implement function to identify and extract the keywords + // from list of tokens. + + return std::vector<std::string>(); +} + +} // namespace app_list
diff --git a/chrome/browser/ash/app_list/search/common/keyword_util.h b/chrome/browser/ash/app_list/search/common/keyword_util.h new file mode 100644 index 0000000..802deae --- /dev/null +++ b/chrome/browser/ash/app_list/search/common/keyword_util.h
@@ -0,0 +1,23 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef CHROME_BROWSER_ASH_APP_LIST_SEARCH_COMMON_KEYWORD_UTIL_H_ +#define CHROME_BROWSER_ASH_APP_LIST_SEARCH_COMMON_KEYWORD_UTIL_H_ + +#include <string> +#include <vector> + +namespace app_list { + +// Given a user query, processes the query into tokens separated by ' '. +std::vector<std::string> TokenizeQuery(const std::string& query); + +// Provided the list of tokens produced from the user query, returns +// a list of keywords and its associated SearchProvider. +std::vector<std::string> ExtractKeyword( + const std::vector<std::string>& query_tokens); + +} // namespace app_list + +#endif // CHROME_BROWSER_ASH_APP_LIST_SEARCH_COMMON_KEYWORD_UTIL_H_
diff --git a/chrome/browser/ash/crostini/crostini_manager.cc b/chrome/browser/ash/crostini/crostini_manager.cc index 263e53ba..885d752d 100644 --- a/chrome/browser/ash/crostini/crostini_manager.cc +++ b/chrome/browser/ash/crostini/crostini_manager.cc
@@ -3001,6 +3001,7 @@ // UI. But for any created manually also register now (crbug.com/1330168). AddNewLxdContainerToPrefs(profile_, container_id); RegisterContainer(container_id); + SetCreateOptionsUsed(container_id); std::move(callback).Run(CrostiniResult::SUCCESS); break; default:
diff --git a/chrome/browser/ash/login/screens/locale_switch_notification.cc b/chrome/browser/ash/login/screens/locale_switch_notification.cc new file mode 100644 index 0000000..2ce48480c --- /dev/null +++ b/chrome/browser/ash/login/screens/locale_switch_notification.cc
@@ -0,0 +1,247 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "chrome/browser/ash/login/screens/locale_switch_notification.h" + +#include <memory> +#include <string> +#include <vector> + +#include "ash/public/cpp/message_center/oobe_notification_constants.h" +#include "ash/public/cpp/notification_utils.h" +#include "base/no_destructor.h" +#include "chrome/browser/ash/login/ui/login_display_host.h" +#include "chrome/browser/browser_process.h" +#include "chrome/browser/notifications/notification_common.h" +#include "chrome/browser/notifications/notification_display_service.h" +#include "chrome/browser/notifications/notification_display_service_factory.h" +#include "chrome/browser/ui/webui/ash/login/oobe_ui.h" +#include "chrome/grit/generated_resources.h" +#include "chromeos/strings/grit/chromeos_strings.h" +#include "components/vector_icons/vector_icons.h" +#include "content/public/browser/browser_thread.h" +#include "ui/base/l10n/l10n_util.h" +#include "ui/message_center/public/cpp/notification.h" +#include "ui/message_center/public/cpp/notification_delegate.h" + +namespace ash { +namespace { + +using ::message_center::Notification; +using ::message_center::NotificationDelegate; +using ::message_center::NotificationType; +using ::message_center::NotifierId; +using ::message_center::NotifierType; +using ::message_center::RichNotificationData; +using ::message_center::SystemNotificationWarningLevel; + +// Simplest type of notification UI - no progress bars, images etc. +constexpr NotificationType kNotificationType = + message_center::NOTIFICATION_TYPE_SIMPLE; + +// Generic type for notifications that are not from web pages etc. +constexpr NotificationHandler::Type kNotificationHandlerType = + NotificationHandler::Type::TRANSIENT; + +// Chromium logo icon that will displayed on the notification. +const gfx::VectorIcon& kIcon = vector_icons::kProductIcon; + +constexpr SystemNotificationWarningLevel kWarningLevel = + SystemNotificationWarningLevel::NORMAL; + +class LocaleSwitchNotificationDelegate + : public message_center::NotificationDelegate, + public OobeUI::Observer { + public: + LocaleSwitchNotificationDelegate( + std::string new_locale, + Profile* profile, + locale_util::SwitchLanguageCallback callback); + + LocaleSwitchNotificationDelegate(const LocaleSwitchNotificationDelegate&) = + delete; + LocaleSwitchNotificationDelegate& operator=( + const LocaleSwitchNotificationDelegate&) = delete; + + protected: + ~LocaleSwitchNotificationDelegate() override; + + // message_center::NotificationDelegate overrides: + void Click(const absl::optional<int>& button_index, + const absl::optional<std::u16string>& reply) override; + + private: + // OobeUI::Observer overrides: + void OnCurrentScreenChanged(OobeScreenId current_screen, + OobeScreenId new_screen) override; + void OnDestroyingOobeUI() override; + + void CloseNotification(); + + enum class NotificationButton { + kSwitchLocale = 0, + }; + + std::string new_locale_; + Profile* profile_; + locale_util::SwitchLanguageCallback callback_; + + bool is_screen_changed_ = false; +}; + +LocaleSwitchNotificationDelegate::LocaleSwitchNotificationDelegate( + std::string new_locale, + Profile* profile, + locale_util::SwitchLanguageCallback callback) + : new_locale_(std::move(new_locale)), + profile_(profile), + callback_(std::move(callback)) { + LoginDisplayHost* host = LoginDisplayHost::default_host(); + if (!host) { + return; + } + OobeUI* ui = host->GetOobeUI(); + if (ui) { + ui->AddObserver(this); + } +} + +LocaleSwitchNotificationDelegate::~LocaleSwitchNotificationDelegate() { + // This observation removal handles the case when user clicks directly on the + // close button (little cros in the upper-right corner of the notification). + // Delegate is destroyed right after that click. + LoginDisplayHost* host = LoginDisplayHost::default_host(); + if (!host) { + return; + } + OobeUI* ui = host->GetOobeUI(); + if (ui) { + ui->RemoveObserver(this); + } +} + +void LocaleSwitchNotificationDelegate::Click( + const absl::optional<int>& button_index, + const absl::optional<std::u16string>& reply) { + // If |button_index| is empty it means that user clicked on the body of a + // notification. In this case notification will disappear from the screen, but + // user still will be able to see it in the status tray. This will give user a + // chance to change the locale if they accidentally missed the button. + // If user proceeds to the next screen without any interactions with the + // notification it will be removed from the status tray too. + if (!button_index.has_value()) { + return; + } + if (!callback_) { + return; + } + + // Switch locale if user selected the "Switch" option. + if (*button_index == static_cast<int>(NotificationButton::kSwitchLocale)) { + VLOG(1) << "Switching locale to " << new_locale_ + << " from the notification."; + locale_util::SwitchLanguage( + new_locale_, + /*enable_locale_keyboard_layouts=*/false, // The layouts will be synced + // instead. Also new user + // could enable required + // layouts from the settings. + /*login_layouts_only=*/false, std::move(callback_), profile_); + } + + // Remove notification regardless of which button user pressed. + CloseNotification(); +} + +void LocaleSwitchNotificationDelegate::OnDestroyingOobeUI() { + CloseNotification(); +} + +void LocaleSwitchNotificationDelegate::OnCurrentScreenChanged( + OobeScreenId current_screen, + OobeScreenId new_screen) { + // |is_screen_changed_| will be set to |true| when OOBE flow will hit the + // first screen that we will show after the locale switch screen. + if (!is_screen_changed_) { + is_screen_changed_ = true; + return; + } + + // In case we proceed with the OOBE flow and notification is still either + // displayed on the screen or in the status tray we want to remove it and + // cancel the observation. + CloseNotification(); +} + +void LocaleSwitchNotificationDelegate::CloseNotification() { + LoginDisplayHost* host = LoginDisplayHost::default_host(); + if (host) { + OobeUI* ui = host->GetOobeUI(); + if (ui) { + ui->RemoveObserver(this); + } + } + + NotificationDisplayService* nds = + NotificationDisplayServiceFactory::GetForProfile(profile_); + if (nds) { + nds->Close(kNotificationHandlerType, kOOBELocaleSwitchNotificationId); + } +} + +} // namespace + +// static +void LocaleSwitchNotification::Show( + Profile* profile, + std::string new_locale, + locale_util::SwitchLanguageCallback locale_switch_callback) { + // NotifierId for histogram reporting. + static const base::NoDestructor<NotifierId> kNotifierId( + NotifierType::SYSTEM_COMPONENT, kOOBELocaleSwitchNotificationId, + NotificationCatalogName::kLocaleUpdate); + + // Leaving this empty means the notification is attributed to the system - + // ie "Chromium OS" or similar. + static const base::NoDestructor<std::u16string> kEmptyDisplaySource; + + // No origin URL is needed since the notification comes from the system. + static const base::NoDestructor<GURL> kEmptyOriginUrl; + + const std::u16string title = + l10n_util::GetStringUTF16(IDS_LOCALE_SWITCH_NOTIFICATION_TITLE); + + const std::u16string body = l10n_util::GetStringFUTF16( + IDS_LOCALE_SWITCH_NOTIFICATION_TEXT, + l10n_util::GetDisplayNameForLocale( + new_locale, g_browser_process->GetApplicationLocale(), + /*is_for_ui=*/true)); + + const std::u16string accept_label = l10n_util::GetStringUTF16( + IDS_LOCALE_SWITCH_NOTIFICATION_CONFIRM_BUTTON_LABEL); + + const std::u16string cancel_label = l10n_util::GetStringUTF16( + IDS_LOCALE_SWITCH_NOTIFICATION_CANCEL_BUTTON_LABEL); + + RichNotificationData rich_notification_data; + rich_notification_data.buttons.emplace_back(accept_label); + rich_notification_data.buttons.emplace_back(cancel_label); + + const scoped_refptr<LocaleSwitchNotificationDelegate> delegate = + base::MakeRefCounted<LocaleSwitchNotificationDelegate>( + std::move(new_locale), profile, std::move(locale_switch_callback)); + + Notification notification = CreateSystemNotification( + kNotificationType, kOOBELocaleSwitchNotificationId, title, body, + *kEmptyDisplaySource, *kEmptyOriginUrl, *kNotifierId, + rich_notification_data, delegate, kIcon, kWarningLevel); + + NotificationDisplayService* nds = + NotificationDisplayServiceFactory::GetForProfile(profile); + if (nds) { + nds->Display(kNotificationHandlerType, notification, /*metadata=*/nullptr); + } +} + +} // namespace ash
diff --git a/chrome/browser/ash/login/screens/locale_switch_notification.h b/chrome/browser/ash/login/screens/locale_switch_notification.h new file mode 100644 index 0000000..241d36a --- /dev/null +++ b/chrome/browser/ash/login/screens/locale_switch_notification.h
@@ -0,0 +1,29 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef CHROME_BROWSER_ASH_LOGIN_SCREENS_LOCALE_SWITCH_NOTIFICATION_H_ +#define CHROME_BROWSER_ASH_LOGIN_SCREENS_LOCALE_SWITCH_NOTIFICATION_H_ + +#include <string> + +#include "base/callback.h" +#include "base/time/time.h" +#include "chrome/browser/ash/base/locale_util.h" + +class Profile; + +namespace ash { + +// Utility class to display locale switch notification. +class LocaleSwitchNotification { + public: + // Show locale switch notification. + static void Show(Profile* profile, + std::string new_locale, + locale_util::SwitchLanguageCallback locale_switch_callback); +}; + +} // namespace ash + +#endif // CHROME_BROWSER_ASH_LOGIN_SCREENS_LOCALE_SWITCH_NOTIFICATION_H_
diff --git a/chrome/browser/ash/login/screens/locale_switch_screen.cc b/chrome/browser/ash/login/screens/locale_switch_screen.cc index adefa951..5aed02a 100644 --- a/chrome/browser/ash/login/screens/locale_switch_screen.cc +++ b/chrome/browser/ash/login/screens/locale_switch_screen.cc
@@ -4,9 +4,12 @@ #include "chrome/browser/ash/login/screens/locale_switch_screen.h" +#include "base/containers/contains.h" #include "base/time/time.h" #include "chrome/browser/ash/base/locale_util.h" #include "chrome/browser/ash/login/login_pref_names.h" +#include "chrome/browser/ash/login/screens/locale_switch_notification.h" +#include "chrome/browser/ash/login/users/chrome_user_manager_util.h" #include "chrome/browser/ash/login/wizard_context.h" #include "chrome/browser/ash/profiles/profile_helper.h" #include "chrome/browser/browser_process.h" @@ -48,6 +51,8 @@ return "SwitchFailed"; case Result::SWITCH_SUCCEDED: return "SwitchSucceded"; + case Result::SWITCH_DELEGATED: + return "SwitchDelegated"; case Result::NOT_APPLICABLE: return BaseScreen::kNotApplicable; } @@ -100,7 +105,7 @@ std::string locale = profile->GetPrefs()->GetString(language::prefs::kApplicationLocale); DCHECK(!locale.empty()); - SwitchLocale(locale); + SwitchLocale(std::move(locale)); return; } @@ -182,6 +187,32 @@ exit_callback_.Run(Result::NO_SWITCH_NEEDED); return; } + + // Types of users that have a GAIA account and could be used during the + // "Add Person" flow. + static constexpr user_manager::UserType kAddPersonUserTypes[] = { + user_manager::USER_TYPE_REGULAR, user_manager::USER_TYPE_CHILD}; + const user_manager::User* user = + user_manager::UserManager::Get()->GetActiveUser(); + // Don't show notification for the ephemeral logins, proceed with the default + // flow. + if (!chrome_user_manager_util::IsPublicSessionOrEphemeralLogin() && + context()->is_add_person_flow && + base::Contains(kAddPersonUserTypes, user->GetType())) { + VLOG(1) << "Add Person flow detected, delegating locale switch decision" + << " to the user."; + // Delegate language switch to the notification. User will be able to + // decide whether switch/not switch on their own. + Profile* profile = ProfileHelper::Get()->GetProfileByUser(user); + locale_util::SwitchLanguageCallback callback(base::BindOnce( + &LocaleSwitchScreen::OnLanguageChangedNotificationCallback, + weak_factory_.GetWeakPtr())); + LocaleSwitchNotification::Show(profile, std::move(locale), + std::move(callback)); + exit_callback_.Run(Result::SWITCH_DELEGATED); + return; + } + locale_util::SwitchLanguageCallback callback( base::BindOnce(&LocaleSwitchScreen::OnLanguageChangedCallback, weak_factory_.GetWeakPtr())); @@ -206,6 +237,15 @@ exit_callback_.Run(Result::SWITCH_SUCCEDED); } +void LocaleSwitchScreen::OnLanguageChangedNotificationCallback( + const locale_util::LanguageSwitchResult& result) { + if (!result.success) { + return; + } + + view_->UpdateStrings(); +} + void LocaleSwitchScreen::ResetState() { identity_manager_observer_.Reset(); timeout_waiter_.AbandonAndStop();
diff --git a/chrome/browser/ash/login/screens/locale_switch_screen.h b/chrome/browser/ash/login/screens/locale_switch_screen.h index b2ca754..0a957d5 100644 --- a/chrome/browser/ash/login/screens/locale_switch_screen.h +++ b/chrome/browser/ash/login/screens/locale_switch_screen.h
@@ -30,6 +30,7 @@ NO_SWITCH_NEEDED, SWITCH_SUCCEDED, SWITCH_FAILED, + SWITCH_DELEGATED, NOT_APPLICABLE }; @@ -56,6 +57,8 @@ void SwitchLocale(std::string locale); void OnLanguageChangedCallback( const locale_util::LanguageSwitchResult& result); + void OnLanguageChangedNotificationCallback( + const locale_util::LanguageSwitchResult& result); void ResetState(); void OnTimeout();
diff --git a/chrome/browser/ash/login/wizard_context.h b/chrome/browser/ash/login/wizard_context.h index b410d30d..4362b1c 100644 --- a/chrome/browser/ash/login/wizard_context.h +++ b/chrome/browser/ash/login/wizard_context.h
@@ -137,6 +137,9 @@ // True when gesture navigation screen was shown during the OOBE. bool is_gesture_navigation_screen_was_shown = false; + + // True when user is inside the "Add Person" flow. + bool is_add_person_flow = false; }; // Returns |true| if this is an OOBE flow after enterprise enrollment.
diff --git a/chrome/browser/ash/login/wizard_controller.cc b/chrome/browser/ash/login/wizard_controller.cc index 8c523d8..c6613004 100644 --- a/chrome/browser/ash/login/wizard_controller.cc +++ b/chrome/browser/ash/login/wizard_controller.cc
@@ -404,6 +404,8 @@ wizard_context_(wizard_context) { wizard_context_->skip_post_login_screens_for_tests = switches::ShouldSkipOobePostLogin(); + wizard_context_->is_add_person_flow = + StartupUtils::IsOobeCompleted() && StartupUtils::IsDeviceOwned(); AccessibilityManager* accessibility_manager = AccessibilityManager::Get(); if (accessibility_manager) { // accessibility_manager could be null in Tests.
diff --git a/chrome/browser/chrome_browser_interface_binders.cc b/chrome/browser/chrome_browser_interface_binders.cc index 43357d8..b70e9ff 100644 --- a/chrome/browser/chrome_browser_interface_binders.cc +++ b/chrome/browser/chrome_browser_interface_binders.cc
@@ -967,7 +967,11 @@ map); #endif // !defined(OFFICIAL_BUILD) - if (IsCartModuleEnabled()) { + if (IsCartModuleEnabled() && customize_chrome::IsSidePanelEnabled()) { + RegisterWebUIControllerInterfaceBinder<chrome_cart::mojom::CartHandler, + NewTabPageUI, CustomizeChromeUI>( + map); + } else if (IsCartModuleEnabled()) { RegisterWebUIControllerInterfaceBinder<chrome_cart::mojom::CartHandler, NewTabPageUI>(map); } @@ -1017,7 +1021,8 @@ if (user_notes::IsUserNotesEnabled()) { RegisterWebUIControllerInterfaceBinder< - side_panel::mojom::UserNotesPageHandler, UserNotesSidePanelUI>(map); + side_panel::mojom::UserNotesPageHandlerFactory, UserNotesSidePanelUI>( + map); } if (features::IsReadAnythingEnabled()) {
diff --git a/chrome/browser/devtools/devtools_browsertest.cc b/chrome/browser/devtools/devtools_browsertest.cc index 37ffbad..e0d75db 100644 --- a/chrome/browser/devtools/devtools_browsertest.cc +++ b/chrome/browser/devtools/devtools_browsertest.cc
@@ -1115,7 +1115,7 @@ ASSERT_TRUE(content::ExecuteScript(web_frame_rfh, about_blank_javascript)); - web_about_blank_manager.WaitForNavigationFinished(); + ASSERT_TRUE(web_about_blank_manager.WaitForNavigationFinished()); // After navigation, the frame may change. web_frame_rfh = ChildFrameAt(panel_frame_rfh, 2); @@ -1137,7 +1137,7 @@ ASSERT_TRUE(content::ExecuteScript(web_frame_rfh, renavigation_javascript)); - renavigation_manager.WaitForNavigationFinished(); + ASSERT_TRUE(renavigation_manager.WaitForNavigationFinished()); // The old RFH is no longer valid after the renavigation, so we must get the // new one. @@ -1176,7 +1176,7 @@ // This is a bit of a hack to switch to the sidebar pane in the elements panel // that the Iframe has been added to. SwitchToPanel(window_, "iframe_pane"); - web_manager.WaitForNavigationFinished(); + ASSERT_TRUE(web_manager.WaitForNavigationFinished()); std::vector<RenderFrameHost*> rfhs = CollectAllRenderFrameHosts(main_web_contents()); @@ -1321,7 +1321,7 @@ content::TestNavigationManager non_devtools_manager( main_web_contents(), non_dt_extension_test_url); SwitchToExtensionPanel(window_, devtools_extension, "iframe_panel"); - non_devtools_manager.WaitForNavigationFinished(); + ASSERT_TRUE(non_devtools_manager.WaitForNavigationFinished()); std::vector<RenderFrameHost*> rfhs = CollectAllRenderFrameHosts(main_web_contents()); @@ -1393,7 +1393,7 @@ content::TestNavigationManager extension_b_manager(main_web_contents(), extension_b_page_url); SwitchToExtensionPanel(window_, devtools_a_extension, "iframe_panel"); - extension_b_manager.WaitForNavigationFinished(); + ASSERT_TRUE(extension_b_manager.WaitForNavigationFinished()); std::vector<RenderFrameHost*> rfhs = CollectAllRenderFrameHosts(main_web_contents()); @@ -1474,7 +1474,7 @@ content::TestNavigationManager test_page_manager(main_web_contents(), extension_test_url); SwitchToExtensionPanel(window_, extension, "iframe_panel"); - test_page_manager.WaitForNavigationFinished(); + ASSERT_TRUE(test_page_manager.WaitForNavigationFinished()); std::vector<RenderFrameHost*> rfhs = CollectAllRenderFrameHosts(main_web_contents()); @@ -1536,7 +1536,7 @@ content::TestNavigationManager manager(main_web_contents(), devtools_url); ASSERT_TRUE(content::ExecuteScript(main_devtools_rfh, javascript)); - manager.WaitForNavigationFinished(); + ASSERT_TRUE(manager.WaitForNavigationFinished()); std::vector<RenderFrameHost*> rfhs = CollectAllRenderFrameHosts(main_web_contents()); @@ -2745,8 +2745,8 @@ tab->GetController().LoadURL(url, content::Referrer(), ui::PAGE_TRANSITION_LINK, std::string()); - navigation_manager.WaitForNavigationFinished(); - navigation_manager_iframe.WaitForNavigationFinished(); + ASSERT_TRUE(navigation_manager.WaitForNavigationFinished()); + ASSERT_TRUE(navigation_manager_iframe.WaitForNavigationFinished()); EXPECT_TRUE(content::WaitForLoadStop(tab)); std::vector<RenderFrameHost*> frames = @@ -2898,8 +2898,8 @@ tab->GetController().LoadURL(url, content::Referrer(), ui::PAGE_TRANSITION_LINK, std::string()); - navigation_manager.WaitForNavigationFinished(); - navigation_manager_iframe.WaitForNavigationFinished(); + ASSERT_TRUE(navigation_manager.WaitForNavigationFinished()); + ASSERT_TRUE(navigation_manager_iframe.WaitForNavigationFinished()); EXPECT_TRUE(content::WaitForLoadStop(tab)); for (auto* frame : CollectAllRenderFrameHosts(GetInspectedTab())) {
diff --git a/chrome/browser/dips/dips_service.cc b/chrome/browser/dips/dips_service.cc index 9d988ad..0953cd7b 100644 --- a/chrome/browser/dips/dips_service.cc +++ b/chrome/browser/dips/dips_service.cc
@@ -14,12 +14,12 @@ #include "base/strings/strcat.h" #include "base/task/thread_pool.h" #include "base/time/time.h" -#include "base/time/time_delta_from_string.h" #include "chrome/browser/content_settings/cookie_settings_factory.h" #include "chrome/browser/content_settings/host_content_settings_map_factory.h" #include "chrome/browser/dips/dips_features.h" #include "chrome/browser/dips/dips_redirect_info.h" #include "chrome/browser/dips/dips_service_factory.h" +#include "chrome/browser/dips/dips_storage.h" #include "chrome/browser/dips/dips_utils.h" #include "chrome/browser/profiles/profile.h" #include "chrome/common/pref_names.h" @@ -93,10 +93,10 @@ cookie_settings_(CookieSettingsFactory::GetForProfile( Profile::FromBrowserContext(context))), repeating_timer_(CreateTimer(Profile::FromBrowserContext(context))) { + DCHECK(base::FeatureList::IsEnabled(dips::kFeature)); absl::optional<base::FilePath> path; - if (base::FeatureList::IsEnabled(dips::kFeature) && - dips::kPersistedDatabaseEnabled.Get() && + if (dips::kPersistedDatabaseEnabled.Get() && !browser_context_->IsOffTheRecord()) { path = browser_context_->GetPath().Append(kDIPSFilename); } @@ -111,18 +111,11 @@ std::unique_ptr<signin::PersistentRepeatingTimer> DIPSService::CreateTimer( Profile* profile) { DCHECK(profile); - absl::optional<base::TimeDelta> delay = base::TimeDeltaFromString( - base::GetFieldTrialParamValueByFeature(dips::kFeature, "timer_delay")); - if (!delay.has_value()) - return nullptr; - - // TODO(crbug.com/1375302): - // - Add RepeatingCallback to trigger logging of UKM when this timer fires. - // --- Add grace period for this, making it also configurable via a Finch - // --- parameter. + // base::Unretained(this) is safe here since the timer that is created has the + // same lifetime as this service. return std::make_unique<signin::PersistentRepeatingTimer>( - profile->GetPrefs(), prefs::kDIPSTimerLastUpdate, delay.value(), - base::DoNothing()); + profile->GetPrefs(), prefs::kDIPSTimerLastUpdate, dips::kTimerDelay.Get(), + base::BindRepeating(&DIPSService::OnTimerFired, base::Unretained(this))); } DIPSService::~DIPSService() = default; @@ -262,3 +255,15 @@ UmaHistogramBounceCategory(category, chain.cookie_mode.value(), redirect.redirect_type); } + +void DIPSService::OnTimerFired() { + base::Time start = base::Time::Now(); + storage_.AsyncCall(&DIPSStorage::DeleteDIPSEligibleState) + .WithArgs(GetCookieMode()) + .Then(base::BindOnce( + [](base::Time deletion_start) { + base::UmaHistogramLongTimes100("Privacy.DIPS.DeletionLatency", + base::Time::Now() - deletion_start); + }, + start)); +}
diff --git a/chrome/browser/dips/dips_service.h b/chrome/browser/dips/dips_service.h index 41b4d97..0445ad93 100644 --- a/chrome/browser/dips/dips_service.h +++ b/chrome/browser/dips/dips_service.h
@@ -78,6 +78,8 @@ void InitializeStorageWithEngagedSites(); void InitializeStorage(base::Time time, std::vector<std::string> sites); + void OnTimerFired(); + raw_ptr<content::BrowserContext> browser_context_; scoped_refptr<content_settings::CookieSettings> cookie_settings_; // The persisted timer controlling how often incidental state is cleared. @@ -86,7 +88,6 @@ // See base/time/time_delta_from_string.h for how that param should be given. std::unique_ptr<signin::PersistentRepeatingTimer> repeating_timer_; base::SequenceBound<DIPSStorage> storage_; - base::WeakPtrFactory<DIPSService> weak_factory_{this}; };
diff --git a/chrome/browser/dips/dips_storage.cc b/chrome/browser/dips/dips_storage.cc index 80471945..26cca4b2 100644 --- a/chrome/browser/dips/dips_storage.cc +++ b/chrome/browser/dips/dips_storage.cc
@@ -12,9 +12,9 @@ #include "base/strings/strcat.h" #include "base/task/sequenced_task_runner.h" #include "base/threading/thread_restrictions.h" +#include "chrome/browser/dips/dips_features.h" #include "chrome/browser/dips/dips_utils.h" #include "services/network/public/mojom/network_context.mojom.h" -#include "sql/init_status.h" #include "url/gurl.h" namespace { @@ -190,6 +190,50 @@ } } +std::vector<std::string> DIPSStorage::GetSitesThatBounced() const { + DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); + DCHECK(db_); + return db_->GetSitesThatBounced(); +} + +std::vector<std::string> DIPSStorage::GetSitesThatBouncedWithState() const { + DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); + DCHECK(db_); + return db_->GetSitesThatBouncedWithState(); +} + +std::vector<std::string> DIPSStorage::GetSitesThatUsedStorage() const { + DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); + DCHECK(db_); + return db_->GetSitesThatUsedStorage(); +} + +void DIPSStorage::DeleteDIPSEligibleState(DIPSCookieMode mode) { + std::vector<std::string> sites_to_clear; + switch (dips::kTriggeringAction.Get()) { + case DIPSTriggeringAction::kStorage: { + sites_to_clear = GetSitesThatUsedStorage(); + break; + } + case DIPSTriggeringAction::kBounce: { + sites_to_clear = GetSitesThatBounced(); + break; + } + case DIPSTriggeringAction::kStatefulBounce: { + sites_to_clear = GetSitesThatBouncedWithState(); + break; + } + } + + base::UmaHistogramCounts1000(base::StrCat({"Privacy.DIPS.ClearedSitesCount", + GetHistogramSuffix(mode)}), + sites_to_clear.size()); + + // Perform clearing of sites. + // TODO: Actually clear the site-data for `sites_to_clear` here as well. + RemoveRows(sites_to_clear); +} + /* static */ size_t DIPSStorage::SetPrepopulateChunkSizeForTesting(size_t size) { return std::exchange(g_prepopulate_chunk_size, size);
diff --git a/chrome/browser/dips/dips_storage.h b/chrome/browser/dips/dips_storage.h index d4314b6..b1e2814 100644 --- a/chrome/browser/dips/dips_storage.h +++ b/chrome/browser/dips/dips_storage.h
@@ -14,6 +14,7 @@ #include "base/time/time.h" #include "chrome/browser/dips/dips_database.h" #include "chrome/browser/dips/dips_state.h" +#include "chrome/browser/dips/dips_utils.h" #include "services/network/public/mojom/network_context.mojom.h" class GURL; @@ -45,6 +46,22 @@ // meaning that |url| wrote to storage while redirecting. void RecordBounce(const GURL& url, base::Time time, bool stateful); + // Storage querying Methods -------------------------------------------------- + // Returns all sites that did a bounce that aren't protected from DIPS. + std::vector<std::string> GetSitesThatBounced() const; + + // Returns all sites that did a stateful bounce that aren't protected from + // DIPS. + std::vector<std::string> GetSitesThatBouncedWithState() const; + + // Returns all sites which use storage that aren't protected from DIPS. + std::vector<std::string> GetSitesThatUsedStorage() const; + + // Queries the DIPS database for sites whose state DIPS should clear. + // If DIPS deletion isn't enabled, this just logs UMA about how many sites + // would've been cleared by DIPS. + void DeleteDIPSEligibleState(DIPSCookieMode mode); + // Utility Methods ----------------------------------------------------------- static size_t SetPrepopulateChunkSizeForTesting(size_t size);
diff --git a/chrome/browser/extensions/api/debugger/debugger_apitest.cc b/chrome/browser/extensions/api/debugger/debugger_apitest.cc index 1c32f10..f92a29c 100644 --- a/chrome/browser/extensions/api/debugger/debugger_apitest.cc +++ b/chrome/browser/extensions/api/debugger/debugger_apitest.cc
@@ -760,8 +760,8 @@ content::TestNavigationManager navigation_manager_iframe(tab, iframe_url); tab->GetController().LoadURL(url, content::Referrer(), ui::PAGE_TRANSITION_LINK, std::string()); - navigation_manager.WaitForNavigationFinished(); - navigation_manager_iframe.WaitForNavigationFinished(); + ASSERT_TRUE(navigation_manager.WaitForNavigationFinished()); + ASSERT_TRUE(navigation_manager_iframe.WaitForNavigationFinished()); EXPECT_TRUE(content::WaitForLoadStop(tab)); ASSERT_TRUE(RunExtensionTest("debugger",
diff --git a/chrome/browser/extensions/api/declarative_content/content_action.cc b/chrome/browser/extensions/api/declarative_content/content_action.cc index 402baaf2..334cad9a 100644 --- a/chrome/browser/extensions/api/declarative_content/content_action.cc +++ b/chrome/browser/extensions/api/declarative_content/content_action.cc
@@ -405,11 +405,6 @@ } gfx::ImageSkia icon; - // TODO(crbug.com/1187011): When removing base::DictionaryValue from - // ParseIconFromCanvasDictionary, |canvas_set| should be changed to - // base::Value::Dict and checking for base::Value::Type::DICTIONARY should be - // removed. This is a temporary solution to prevent content_action base::Value - // migration from expanding across too many locations. const base::Value::Dict* canvas_set = dict->FindDict("imageData"); if (canvas_set && ExtensionAction::ParseIconFromCanvasDictionary(*canvas_set, &icon) !=
diff --git a/chrome/browser/extensions/api/declarative_net_request/declarative_net_request_browsertest.cc b/chrome/browser/extensions/api/declarative_net_request/declarative_net_request_browsertest.cc index 3c60470..04f267bb 100644 --- a/chrome/browser/extensions/api/declarative_net_request/declarative_net_request_browsertest.cc +++ b/chrome/browser/extensions/api/declarative_net_request/declarative_net_request_browsertest.cc
@@ -656,7 +656,6 @@ break; } - // A cast is necessary from ListValue to Value, else this fails to compile. const std::string script = content::JsReplace(base::StringPrintf(kScript, function_name), base::Value(rules_to_add_builder.Build()),
diff --git a/chrome/browser/extensions/api/declarative_webrequest/webrequest_action_unittest.cc b/chrome/browser/extensions/api/declarative_webrequest/webrequest_action_unittest.cc index 327e0e0..123bddfd 100644 --- a/chrome/browser/extensions/api/declarative_webrequest/webrequest_action_unittest.cc +++ b/chrome/browser/extensions/api/declarative_webrequest/webrequest_action_unittest.cc
@@ -34,7 +34,6 @@ namespace helpers = extension_web_request_api_helpers; namespace keys = extensions::declarative_webrequest_constants; -using base::DictionaryValue; using extension_test_util::LoadManifestUnchecked; using helpers::EventResponseDeltas; using testing::HasSubstr; @@ -177,26 +176,26 @@ EXPECT_FALSE(result.get()); // Test missing instanceType element. - base::DictionaryValue input; + base::Value::Dict input; error.clear(); - result = - WebRequestAction::Create(nullptr, nullptr, input, &error, &bad_message); + result = WebRequestAction::Create( + nullptr, nullptr, base::Value(input.Clone()), &error, &bad_message); EXPECT_TRUE(bad_message); EXPECT_FALSE(result.get()); // Test wrong instanceType element. - input.SetStringKey(keys::kInstanceTypeKey, kUnknownActionType); + input.Set(keys::kInstanceTypeKey, kUnknownActionType); error.clear(); - result = - WebRequestAction::Create(nullptr, nullptr, input, &error, &bad_message); + result = WebRequestAction::Create( + nullptr, nullptr, base::Value(input.Clone()), &error, &bad_message); EXPECT_NE("", error); EXPECT_FALSE(result.get()); // Test success - input.SetStringKey(keys::kInstanceTypeKey, keys::kCancelRequestType); + input.Set(keys::kInstanceTypeKey, keys::kCancelRequestType); error.clear(); - result = - WebRequestAction::Create(nullptr, nullptr, input, &error, &bad_message); + result = WebRequestAction::Create( + nullptr, nullptr, base::Value(input.Clone()), &error, &bad_message); EXPECT_EQ("", error); EXPECT_FALSE(bad_message); ASSERT_TRUE(result.get()); @@ -220,14 +219,14 @@ EXPECT_TRUE(result->actions().empty()); EXPECT_EQ(std::numeric_limits<int>::min(), result->GetMinimumPriority()); - base::DictionaryValue correct_action; - correct_action.SetStringKey(keys::kInstanceTypeKey, keys::kIgnoreRulesType); - correct_action.SetIntKey(keys::kLowerPriorityThanKey, 10); - base::DictionaryValue incorrect_action; - incorrect_action.SetStringKey(keys::kInstanceTypeKey, kUnknownActionType); + base::Value::Dict correct_action; + correct_action.Set(keys::kInstanceTypeKey, keys::kIgnoreRulesType); + correct_action.Set(keys::kLowerPriorityThanKey, 10); + base::Value::Dict incorrect_action; + incorrect_action.Set(keys::kInstanceTypeKey, kUnknownActionType); // Test success. - input.push_back(correct_action.Clone()); + input.emplace_back(std::move(correct_action)); error.clear(); result = WebRequestActionSet::Create(nullptr, nullptr, input, &error, &bad_message); @@ -240,7 +239,7 @@ EXPECT_EQ(10, result->GetMinimumPriority()); // Test failure. - input.push_back(incorrect_action.Clone()); + input.emplace_back(std::move(incorrect_action)); error.clear(); result = WebRequestActionSet::Create(nullptr, nullptr, input, &error, &bad_message);
diff --git a/chrome/browser/extensions/api/enterprise_device_attributes/enterprise_device_attributes_ash_apitest.cc b/chrome/browser/extensions/api/enterprise_device_attributes/enterprise_device_attributes_ash_apitest.cc index 3579c26..f563cc1 100644 --- a/chrome/browser/extensions/api/enterprise_device_attributes/enterprise_device_attributes_ash_apitest.cc +++ b/chrome/browser/extensions/api/enterprise_device_attributes/enterprise_device_attributes_ash_apitest.cc
@@ -34,20 +34,18 @@ constexpr char kExtensionPemPath[] = "extensions/api_test/enterprise_device_attributes.pem"; -base::Value BuildCustomArg(const std::string& expected_directory_device_id, - const std::string& expected_serial_number, - const std::string& expected_asset_id, - const std::string& expected_annotated_location, - const std::string& expected_hostname) { - base::Value custom_arg(base::Value::Type::DICTIONARY); - custom_arg.SetKey("expectedDirectoryDeviceId", - base::Value(expected_directory_device_id)); - custom_arg.SetKey("expectedSerialNumber", - base::Value(expected_serial_number)); - custom_arg.SetKey("expectedAssetId", base::Value(expected_asset_id)); - custom_arg.SetKey("expectedAnnotatedLocation", - base::Value(expected_annotated_location)); - custom_arg.SetKey("expectedHostname", base::Value(expected_hostname)); +base::Value::Dict BuildCustomArg( + const std::string& expected_directory_device_id, + const std::string& expected_serial_number, + const std::string& expected_asset_id, + const std::string& expected_annotated_location, + const std::string& expected_hostname) { + base::Value::Dict custom_arg; + custom_arg.Set("expectedDirectoryDeviceId", expected_directory_device_id); + custom_arg.Set("expectedSerialNumber", expected_serial_number); + custom_arg.Set("expectedAssetId", expected_asset_id); + custom_arg.Set("expectedAnnotatedLocation", expected_annotated_location); + custom_arg.Set("expectedHostname", expected_hostname); return custom_arg; }
diff --git a/chrome/browser/extensions/api/enterprise_device_attributes/enterprise_device_attributes_lacros_apitest.cc b/chrome/browser/extensions/api/enterprise_device_attributes/enterprise_device_attributes_lacros_apitest.cc index 071eb8d..26a26ad5 100644 --- a/chrome/browser/extensions/api/enterprise_device_attributes/enterprise_device_attributes_lacros_apitest.cc +++ b/chrome/browser/extensions/api/enterprise_device_attributes/enterprise_device_attributes_lacros_apitest.cc
@@ -38,20 +38,18 @@ constexpr char kExtensionPemPath[] = "extensions/api_test/enterprise_device_attributes.pem"; -base::Value BuildCustomArg(const std::string& expected_directory_device_id, - const std::string& expected_serial_number, - const std::string& expected_asset_id, - const std::string& expected_annotated_location, - const std::string& expected_hostname) { - base::Value custom_arg(base::Value::Type::DICTIONARY); - custom_arg.SetKey("expectedDirectoryDeviceId", - base::Value(expected_directory_device_id)); - custom_arg.SetKey("expectedSerialNumber", - base::Value(expected_serial_number)); - custom_arg.SetKey("expectedAssetId", base::Value(expected_asset_id)); - custom_arg.SetKey("expectedAnnotatedLocation", - base::Value(expected_annotated_location)); - custom_arg.SetKey("expectedHostname", base::Value(expected_hostname)); +base::Value::Dict BuildCustomArg( + const std::string& expected_directory_device_id, + const std::string& expected_serial_number, + const std::string& expected_asset_id, + const std::string& expected_annotated_location, + const std::string& expected_hostname) { + base::Value::Dict custom_arg; + custom_arg.Set("expectedDirectoryDeviceId", expected_directory_device_id); + custom_arg.Set("expectedSerialNumber", expected_serial_number); + custom_arg.Set("expectedAssetId", expected_asset_id); + custom_arg.Set("expectedAnnotatedLocation", expected_annotated_location); + custom_arg.Set("expectedHostname", expected_hostname); return custom_arg; } @@ -145,7 +143,7 @@ void TestExtension(Browser* browser, const GURL& page_url, - const base::Value& custom_arg_value) { + base::Value::Dict custom_arg_value) { DCHECK(page_url.is_valid()) << "page_url must be valid"; std::string custom_arg; @@ -187,10 +185,10 @@ std::string expected_annotated_location = is_affiliated ? kAnnotatedLocation : ""; std::string expected_hostname = is_affiliated ? kHostname : ""; - TestExtension(CreateBrowser(profile()), test_url, - BuildCustomArg(expected_directory_device_id, - expected_serial_number, expected_asset_id, - expected_annotated_location, expected_hostname)); + base::Value::Dict custom_arg = BuildCustomArg( + expected_directory_device_id, expected_serial_number, expected_asset_id, + expected_annotated_location, expected_hostname); + TestExtension(CreateBrowser(profile()), test_url, std::move(custom_arg)); } // Both cases of affiliated and non-affiliated users are tested.
diff --git a/chrome/browser/extensions/api/enterprise_networking_attributes/enterprise_networking_attributes_ash_apitest.cc b/chrome/browser/extensions/api/enterprise_networking_attributes/enterprise_networking_attributes_ash_apitest.cc index 644b46f..aa3b1d5 100644 --- a/chrome/browser/extensions/api/enterprise_networking_attributes/enterprise_networking_attributes_ash_apitest.cc +++ b/chrome/browser/extensions/api/enterprise_networking_attributes/enterprise_networking_attributes_ash_apitest.cc
@@ -41,26 +41,26 @@ constexpr char kWifiIPConfigV4Path[] = "/ipconfig/stub_wifi-ipv4"; constexpr char kWifiIPConfigV6Path[] = "/ipconfig/stub_wifi-ipv6"; -base::Value BuildCustomArgForSuccess(const std::string& expected_mac_address, - const std::string& expected_ipv4_address, - const std::string& expected_ipv6_address) { - base::Value network_details(base::Value::Type::DICTIONARY); - network_details.SetKey("macAddress", base::Value(expected_mac_address)); - network_details.SetKey("ipv4", base::Value(expected_ipv4_address)); - network_details.SetKey("ipv6", base::Value(expected_ipv6_address)); +base::Value::Dict BuildCustomArgForSuccess( + const std::string& expected_mac_address, + const std::string& expected_ipv4_address, + const std::string& expected_ipv6_address) { + base::Value::Dict network_details; + network_details.Set("macAddress", expected_mac_address); + network_details.Set("ipv4", expected_ipv4_address); + network_details.Set("ipv6", expected_ipv6_address); - base::Value custom_arg(base::Value::Type::DICTIONARY); - custom_arg.SetKey("testName", base::Value("success")); - custom_arg.SetKey("expectedResult", std::move(network_details)); + base::Value::Dict custom_arg; + custom_arg.Set("testName", "success"); + custom_arg.Set("expectedResult", std::move(network_details)); return custom_arg; } -base::Value BuildCustomArgForFailure( +base::Value::Dict BuildCustomArgForFailure( const std::string& expected_error_message) { - base::Value custom_arg(base::Value::Type::DICTIONARY); - custom_arg.SetKey("testName", base::Value("failure")); - custom_arg.SetKey("expectedErrorMessage", - base::Value(expected_error_message)); + base::Value::Dict custom_arg; + custom_arg.Set("testName", "failure"); + custom_arg.Set("expectedErrorMessage", expected_error_message); return custom_arg; } @@ -95,21 +95,17 @@ kWifiDevicePath, shill::kAddressProperty, base::Value(kMacAddress), /* notify_changed= */ false); - base::DictionaryValue ipconfig_v4_dictionary; - ipconfig_v4_dictionary.SetKey(shill::kAddressProperty, - base::Value(kIpv4Address)); - ipconfig_v4_dictionary.SetKey(shill::kMethodProperty, - base::Value(shill::kTypeIPv4)); - shill_ipconfig_client->AddIPConfig(kWifiIPConfigV4Path, - ipconfig_v4_dictionary); + base::Value::Dict ipconfig_v4_dictionary; + ipconfig_v4_dictionary.Set(shill::kAddressProperty, kIpv4Address); + ipconfig_v4_dictionary.Set(shill::kMethodProperty, shill::kTypeIPv4); + shill_ipconfig_client->AddIPConfig( + kWifiIPConfigV4Path, base::Value(std::move(ipconfig_v4_dictionary))); - base::DictionaryValue ipconfig_v6_dictionary; - ipconfig_v6_dictionary.SetKey(shill::kAddressProperty, - base::Value(kIpv6Address)); - ipconfig_v6_dictionary.SetKey(shill::kMethodProperty, - base::Value(shill::kTypeIPv6)); - shill_ipconfig_client->AddIPConfig(kWifiIPConfigV6Path, - ipconfig_v6_dictionary); + base::Value::Dict ipconfig_v6_dictionary; + ipconfig_v6_dictionary.Set(shill::kAddressProperty, kIpv6Address); + ipconfig_v6_dictionary.Set(shill::kMethodProperty, shill::kTypeIPv6); + shill_ipconfig_client->AddIPConfig( + kWifiIPConfigV6Path, base::Value(std::move(ipconfig_v6_dictionary))); base::Value::List ip_configs; ip_configs.Append(kWifiIPConfigV4Path); @@ -161,7 +157,7 @@ const GURL test_url = extension->GetResourceURL("test.html"); // Run test without connected network. - base::Value custom_arg_disconnected = + base::Value::Dict custom_arg_disconnected = is_affiliated ? BuildCustomArgForFailure(kErrorNetworkNotConnected) : BuildCustomArgForFailure(kErrorUserNotAffiliated); TestExtension(CreateBrowser(profile()), test_url, @@ -169,7 +165,7 @@ // Run test with connected network. ConnectNetwork(); - base::Value custom_arg_connected = + base::Value::Dict custom_arg_connected = is_affiliated ? BuildCustomArgForSuccess(kFormattedMacAddress, kIpv4Address, kIpv6Address) : BuildCustomArgForFailure(kErrorUserNotAffiliated);
diff --git a/chrome/browser/extensions/api/enterprise_platform_keys/enterprise_platform_keys_apitest_nss.cc b/chrome/browser/extensions/api/enterprise_platform_keys/enterprise_platform_keys_apitest_nss.cc index af321db..a759273 100644 --- a/chrome/browser/extensions/api/enterprise_platform_keys/enterprise_platform_keys_apitest_nss.cc +++ b/chrome/browser/extensions/api/enterprise_platform_keys/enterprise_platform_keys_apitest_nss.cc
@@ -190,9 +190,9 @@ // Builds the tests configuration dictionary and serializes it. std::string BuildCustomArg(bool user_session_test, bool system_token_enabled) { - base::Value custom_arg_value(base::Value::Type::DICTIONARY); - custom_arg_value.SetBoolKey(kIsUserSessionTestConfig, user_session_test); - custom_arg_value.SetBoolKey(kSystemTokenEnabledConfig, system_token_enabled); + base::Value::Dict custom_arg_value; + custom_arg_value.Set(kIsUserSessionTestConfig, user_session_test); + custom_arg_value.Set(kSystemTokenEnabledConfig, system_token_enabled); std::string custom_arg; if (!base::JSONWriter::Write(custom_arg_value, &custom_arg)) {
diff --git a/chrome/browser/extensions/api/enterprise_reporting_private/enterprise_reporting_private_unittest.cc b/chrome/browser/extensions/api/enterprise_reporting_private/enterprise_reporting_private_unittest.cc index fbd1af0..6049b6d 100644 --- a/chrome/browser/extensions/api/enterprise_reporting_private/enterprise_reporting_private_unittest.cc +++ b/chrome/browser/extensions/api/enterprise_reporting_private/enterprise_reporting_private_unittest.cc
@@ -1105,8 +1105,8 @@ } ::reporting::Record GetTestRecord() const { - base::Value data{base::Value::Type::DICTIONARY}; - data.SetKey("TEST_KEY", base::Value("TEST_VALUE")); + base::Value::Dict data; + data.Set("TEST_KEY", base::Value("TEST_VALUE")); std::string serialized_data; DCHECK(base::JSONWriter::Write(data, &serialized_data));
diff --git a/chrome/browser/extensions/api/force_installed_affiliated_extension_apitest.cc b/chrome/browser/extensions/api/force_installed_affiliated_extension_apitest.cc index c45bd2fd..78e330b 100644 --- a/chrome/browser/extensions/api/force_installed_affiliated_extension_apitest.cc +++ b/chrome/browser/extensions/api/force_installed_affiliated_extension_apitest.cc
@@ -112,7 +112,7 @@ void ForceInstalledAffiliatedExtensionApiTest::TestExtension( Browser* browser, const GURL& page_url, - const base::Value& custom_arg_value) { + const base::Value::Dict& custom_arg_value) { DCHECK(page_url.is_valid()) << "page_url must be valid"; std::string custom_arg;
diff --git a/chrome/browser/extensions/api/force_installed_affiliated_extension_apitest.h b/chrome/browser/extensions/api/force_installed_affiliated_extension_apitest.h index e5c066b..a6a3e8d 100644 --- a/chrome/browser/extensions/api/force_installed_affiliated_extension_apitest.h +++ b/chrome/browser/extensions/api/force_installed_affiliated_extension_apitest.h
@@ -53,7 +53,7 @@ // test pass/fail notification. void TestExtension(Browser* browser, const GURL& page_url, - const base::Value& custom_arg_value); + const base::Value::Dict& custom_arg_value); testing::NiceMock<policy::MockConfigurationPolicyProvider> policy_provider_; ash::ScopedStubInstallAttributes test_install_attributes_;
diff --git a/chrome/browser/extensions/api/tabs/tabs_test.cc b/chrome/browser/extensions/api/tabs/tabs_test.cc index 483fe59..bc530ca 100644 --- a/chrome/browser/extensions/api/tabs/tabs_test.cc +++ b/chrome/browser/extensions/api/tabs/tabs_test.cc
@@ -2040,7 +2040,7 @@ // Wait for the TestNavigationManager-monitored navigation to complete to // avoid a race during browser teardown (see crbug.com/882213). - navigation_manager.WaitForNavigationFinished(); + ASSERT_TRUE(navigation_manager.WaitForNavigationFinished()); } #endif // BUILDFLAG(ENABLE_PDF)
diff --git a/chrome/browser/extensions/api/terminal/terminal_private_api.cc b/chrome/browser/extensions/api/terminal/terminal_private_api.cc index c1488f9..8c4f889 100644 --- a/chrome/browser/extensions/api/terminal/terminal_private_api.cc +++ b/chrome/browser/extensions/api/terminal/terminal_private_api.cc
@@ -230,8 +230,8 @@ return; } base::Value::List args; - base::Value prefs(base::Value::Type::DICTIONARY); - prefs.SetKey(pref_name, profile->GetPrefs()->GetValue(pref_name).Clone()); + base::Value::Dict prefs; + prefs.Set(pref_name, profile->GetPrefs()->GetValue(pref_name).Clone()); args.Append(std::move(prefs)); auto event = std::make_unique<extensions::Event>( extensions::events::TERMINAL_PRIVATE_ON_PREF_CHANGED, @@ -733,22 +733,19 @@ TerminalPrivateGetOSInfoFunction::~TerminalPrivateGetOSInfoFunction() = default; ExtensionFunction::ResponseAction TerminalPrivateGetOSInfoFunction::Run() { - base::DictionaryValue info; - info.SetBoolKey("alternative_emulator", - base::FeatureList::IsEnabled( - ash::features::kTerminalAlternativeEmulator)); - info.SetBoolKey("multi_profile", base::FeatureList::IsEnabled( - ash::features::kTerminalMultiProfile)); - info.SetBoolKey("sftp", - base::FeatureList::IsEnabled(ash::features::kTerminalSftp)); - info.SetBoolKey("tast", - extensions::ExtensionRegistry::Get(browser_context()) - ->enabled_extensions() - .Contains(extension_misc::kGuestModeTestExtensionId)); - info.SetBoolKey( - "tmux_integration", - base::FeatureList::IsEnabled(ash::features::kTerminalTmuxIntegration)); - return RespondNow(OneArgument(std::move(info))); + base::Value::Dict info; + info.Set("alternative_emulator", + base::FeatureList::IsEnabled( + ash::features::kTerminalAlternativeEmulator)); + info.Set("multi_profile", + base::FeatureList::IsEnabled(ash::features::kTerminalMultiProfile)); + info.Set("sftp", base::FeatureList::IsEnabled(ash::features::kTerminalSftp)); + info.Set("tast", extensions::ExtensionRegistry::Get(browser_context()) + ->enabled_extensions() + .Contains(extension_misc::kGuestModeTestExtensionId)); + info.Set("tmux_integration", base::FeatureList::IsEnabled( + ash::features::kTerminalTmuxIntegration)); + return RespondNow(OneArgument(base::Value(std::move(info)))); } TerminalPrivateGetPrefsFunction::~TerminalPrivateGetPrefsFunction() = default;
diff --git a/chrome/browser/extensions/api/webrtc_logging_private/webrtc_logging_private_api.cc b/chrome/browser/extensions/api/webrtc_logging_private/webrtc_logging_private_api.cc index d58078e..75693fd 100644 --- a/chrome/browser/extensions/api/webrtc_logging_private/webrtc_logging_private_api.cc +++ b/chrome/browser/extensions/api/webrtc_logging_private/webrtc_logging_private_api.cc
@@ -612,10 +612,10 @@ const std::string& filesystem_id, const std::string& base_name) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); - std::unique_ptr<base::DictionaryValue> dict(new base::DictionaryValue()); - dict->SetStringKey("fileSystemId", filesystem_id); - dict->SetStringKey("baseName", base_name); - Respond(OneArgument(base::Value::FromUniquePtrValue(std::move(dict)))); + base::Value::Dict dict; + dict.Set("fileSystemId", filesystem_id); + dict.Set("baseName", base_name); + Respond(OneArgument(base::Value(std::move(dict)))); } void WebrtcLoggingPrivateGetLogsDirectoryFunction::FireErrorCallback(
diff --git a/chrome/browser/extensions/content_script_tracker_browsertest.cc b/chrome/browser/extensions/content_script_tracker_browsertest.cc index 4b17eb0..30aa7a9 100644 --- a/chrome/browser/extensions/content_script_tracker_browsertest.cc +++ b/chrome/browser/extensions/content_script_tracker_browsertest.cc
@@ -897,7 +897,8 @@ const Extension* extension = LoadExtension(unpacked_path); // Step UI.1.3 ASSERT_TRUE(extension); commit_delayer.Wait(); // Step UI.3b - part1 - navigation_manager.WaitForNavigationFinished(); // Step UI.3b - part2 + ASSERT_TRUE( + navigation_manager.WaitForNavigationFinished()); // Step UI.3b - part2 ASSERT_TRUE(listener.WaitUntilSatisfied()); // Step UI.4 // Verify that content script has been injected.
diff --git a/chrome/browser/extensions/extension_override_apitest.cc b/chrome/browser/extensions/extension_override_apitest.cc index 5ec1780..1ac69cc 100644 --- a/chrome/browser/extensions/extension_override_apitest.cc +++ b/chrome/browser/extensions/extension_override_apitest.cc
@@ -348,12 +348,12 @@ // a preferences file without corresponding UnloadExtension() calls. This is // the same as the above test, except for that it is testing the case where // the file already contains dupes when an extension is loaded. - base::Value list(base::Value::Type::LIST); + base::Value::List list; for (size_t i = 0; i < 3; ++i) { - std::unique_ptr<base::DictionaryValue> dict(new base::DictionaryValue()); - dict->SetStringKey("entry", "http://www.google.com/"); - dict->SetBoolKey("active", true); - list.Append(std::move(*dict)); + base::Value::Dict dict; + dict.Set("entry", "http://www.google.com/"); + dict.Set("active", true); + list.Append(std::move(dict)); } {
diff --git a/chrome/browser/extensions/extension_prefs_unittest.cc b/chrome/browser/extensions/extension_prefs_unittest.cc index cd3b6b3..49c295af 100644 --- a/chrome/browser/extensions/extension_prefs_unittest.cc +++ b/chrome/browser/extensions/extension_prefs_unittest.cc
@@ -1085,9 +1085,10 @@ EXPECT_EQ(kTestValue, str_value); // TODO(crbug.com/1015619): Remove 2023-05. kPrefStringForIdMapping. - auto dictionary = std::make_unique<base::DictionaryValue>(); - prefs()->UpdateExtensionPref(extension_->id(), kPrefStringForIdMapping, - std::move(dictionary)); + base::Value::Dict dictionary; + prefs()->UpdateExtensionPref( + extension_->id(), kPrefStringForIdMapping, + std::make_unique<base::Value>(std::move(dictionary))); EXPECT_TRUE( prefs()->ReadPrefAsDict(extension_->id(), kPrefStringForIdMapping));
diff --git a/chrome/browser/extensions/extension_protocols_unittest.cc b/chrome/browser/extensions/extension_protocols_unittest.cc index 34d1636..14d3fdf 100644 --- a/chrome/browser/extensions/extension_protocols_unittest.cc +++ b/chrome/browser/extensions/extension_protocols_unittest.cc
@@ -778,9 +778,8 @@ "web_accessible_resources": ["*"] })"; test_dir.WriteManifest(kManifest); - std::unique_ptr<base::DictionaryValue> manifest = base::DictionaryValue::From( - base::Value::ToUniquePtrValue(base::test::ParseJson(kManifest))); - ASSERT_TRUE(manifest); + base::Value::Dict manifest = base::test::ParseJsonDict(kManifest); + ASSERT_FALSE(manifest.empty()); test_dir.WriteFile(FILE_PATH_LITERAL("json_file.json"), "{}"); test_dir.WriteFile(FILE_PATH_LITERAL("js_file.js"), "function() {}");
diff --git a/chrome/browser/extensions/extension_service.cc b/chrome/browser/extensions/extension_service.cc index 38dfe9f..2fdbadf 100644 --- a/chrome/browser/extensions/extension_service.cc +++ b/chrome/browser/extensions/extension_service.cc
@@ -836,9 +836,6 @@ extension_prefs_->OnExtensionUninstalled( extension->id(), extension->location(), external_uninstall); - // Track the uninstallation. - UMA_HISTOGRAM_ENUMERATION("Extensions.ExtensionUninstalled", 1, 2); - return true; } @@ -1560,18 +1557,11 @@ // If the extension is disabled due to a permissions increase, but does in // fact have all permissions, remove that disable reason. - // TODO(devlin): This was added to fix crbug.com/616474, but it's unclear - // if this behavior should stay forever. - if (disable_reasons & disable_reason::DISABLE_PERMISSIONS_INCREASE) { - bool reset_permissions_increase = false; - if (!is_privilege_increase) { - reset_permissions_increase = true; - disable_reasons &= ~disable_reason::DISABLE_PERMISSIONS_INCREASE; - extension_prefs_->RemoveDisableReason( - extension->id(), disable_reason::DISABLE_PERMISSIONS_INCREASE); - } - UMA_HISTOGRAM_BOOLEAN("Extensions.ResetPermissionsIncrease", - reset_permissions_increase); + if (disable_reasons & disable_reason::DISABLE_PERMISSIONS_INCREASE && + !is_privilege_increase) { + disable_reasons &= ~disable_reason::DISABLE_PERMISSIONS_INCREASE; + extension_prefs_->RemoveDisableReason( + extension->id(), disable_reason::DISABLE_PERMISSIONS_INCREASE); } // Extension has changed permissions significantly. Disable it. A
diff --git a/chrome/browser/extensions/installed_loader.cc b/chrome/browser/extensions/installed_loader.cc index 0653c94..f559c7f 100644 --- a/chrome/browser/extensions/installed_loader.cc +++ b/chrome/browser/extensions/installed_loader.cc
@@ -22,6 +22,7 @@ #include "chrome/browser/extensions/extension_service.h" #include "chrome/browser/extensions/extension_util.h" #include "chrome/browser/extensions/load_error_reporter.h" +#include "chrome/browser/profiles/profile.h" #include "chrome/browser/profiles/profile_manager.h" #include "chrome/common/chrome_switches.h" #include "chrome/common/extensions/chrome_manifest_url_handlers.h" @@ -407,6 +408,21 @@ RecordExtensionsMetrics(); } +// static +bool InstalledLoader::ProfileCanUseNonComponentExtensions( + const Profile* profile) { + if (!profile) { + return false; + } + +#if !BUILDFLAG(IS_CHROMEOS_ASH) + return profile->IsRegularProfile(); +#else + // TODO(crbug.com/1383740): Expand to CrOS. + return false; +#endif +} + // TODO(crbug.com/1163038): Separate out Webstore/Offstore metrics. void InstalledLoader::RecordExtensionsMetrics() { Profile* profile = extension_service_->profile();
diff --git a/chrome/browser/extensions/installed_loader.h b/chrome/browser/extensions/installed_loader.h index df5685f..032fd28 100644 --- a/chrome/browser/extensions/installed_loader.h +++ b/chrome/browser/extensions/installed_loader.h
@@ -10,6 +10,8 @@ #include "base/files/file_path.h" #include "base/memory/raw_ptr.h" +class Profile; + namespace extensions { class ExtensionPrefs; @@ -49,6 +51,13 @@ // LoadAllExtensions(). void RecordExtensionsMetricsForTesting(); + // TODO(crbug.com/1383740): Expand to CrOS. + // TODO(crbug.com/1383740): Move to another file in + // //chrome/browser/extensions. + // Returns true for profiles that can use anything other than component + // extensions. + static bool ProfileCanUseNonComponentExtensions(const Profile* profile); + private: // Returns the flags that should be used with Extension::Create() for an // extension that is already installed.
diff --git a/chrome/browser/extensions/installed_loader_unittest.cc b/chrome/browser/extensions/installed_loader_unittest.cc index fe01ad08a..c8f38a4 100644 --- a/chrome/browser/extensions/installed_loader_unittest.cc +++ b/chrome/browser/extensions/installed_loader_unittest.cc
@@ -10,6 +10,7 @@ #include "chrome/browser/extensions/permissions_updater.h" #include "chrome/browser/extensions/scripting_permissions_modifier.h" #include "chrome/browser/profiles/profile.h" +#include "chrome/test/base/testing_profile.h" #include "extensions/common/extension_builder.h" #include "extensions/common/extension_features.h" @@ -389,4 +390,56 @@ RunHostPermissionsMetricsTest(params); } +#if !BUILDFLAG(IS_CHROMEOS_ASH) +TEST_F(InstalledLoaderUnitTest, + Browser_ProfileCanUseNonComponentExtensions_RegularProfile) { + InstalledLoader loader(service()); + // testing_profile() defaults to a regular profile. + EXPECT_TRUE(loader.ProfileCanUseNonComponentExtensions(testing_profile())); +} + +TEST_F(InstalledLoaderUnitTest, + Browser_ProfileCannotUseNonComponentExtensions_NoProfile) { + InstalledLoader loader(service()); + EXPECT_FALSE(loader.ProfileCanUseNonComponentExtensions(nullptr)); +} + +TEST_F(InstalledLoaderUnitTest, + Browser_ProfileCannotUseNonComponentExtensions_GuestProfile) { + testing_profile()->SetGuestSession(true); + InstalledLoader loader(service()); + EXPECT_FALSE(loader.ProfileCanUseNonComponentExtensions(testing_profile())); +} + +TEST_F(InstalledLoaderUnitTest, + Browser_ProfileCannotUseNonComponentExtensions_IncognitoProfile) { + TestingProfile* incognito_test_profile = + TestingProfile::Builder().BuildIncognito(testing_profile()); + ASSERT_TRUE(incognito_test_profile); + InstalledLoader loader(service()); + EXPECT_FALSE( + loader.ProfileCanUseNonComponentExtensions(incognito_test_profile)); +} + +TEST_F(InstalledLoaderUnitTest, + Browser_ProfileCannotUseNonComponentExtensions_OTRProfile) { + TestingProfile* otr_test_profile = + TestingProfile::Builder().BuildOffTheRecord( + testing_profile(), Profile::OTRProfileID::CreateUniqueForTesting()); + ASSERT_TRUE(otr_test_profile); + InstalledLoader loader(service()); + EXPECT_FALSE(loader.ProfileCanUseNonComponentExtensions(otr_test_profile)); +} +#endif // !BUILDFLAG(IS_CHROMEOS_ASH) + +#if BUILDFLAG(IS_CHROMEOS_ASH) +// TODO(crbug.com/1383740): Expand to CrOS. +TEST_F(InstalledLoaderUnitTest, + ChromeOS_ProfileCanUseNonComponentExtensions_RegularProfile) { + InstalledLoader loader(service()); + // testing_profile() defaults to a regular profile. + EXPECT_FALSE(loader.ProfileCanUseNonComponentExtensions(testing_profile())); +} +#endif // BUILDFLAG(IS_CHROMEOS_ASH) + } // namespace extensions
diff --git a/chrome/browser/extensions/omnibox_focus_interactive_test.cc b/chrome/browser/extensions/omnibox_focus_interactive_test.cc index 4f6ad7b..a00229e 100644 --- a/chrome/browser/extensions/omnibox_focus_interactive_test.cc +++ b/chrome/browser/extensions/omnibox_focus_interactive_test.cc
@@ -570,7 +570,7 @@ EXPECT_TRUE(content::ExecuteScript( web_contents->GetPrimaryMainFrame(), content::JsReplace(kAddFencedFrameScript, fenced_frame_url))); - navigation.WaitForNavigationFinished(); + ASSERT_TRUE(navigation.WaitForNavigationFinished()); // Verify that after the fenced frame navigation, the tab contents stayed // focused.
diff --git a/chrome/browser/extensions/standard_management_policy_provider_unittest.cc b/chrome/browser/extensions/standard_management_policy_provider_unittest.cc index 2874aa8..5716367 100644 --- a/chrome/browser/extensions/standard_management_policy_provider_unittest.cc +++ b/chrome/browser/extensions/standard_management_policy_provider_unittest.cc
@@ -151,11 +151,10 @@ // Tests the behavior of the ManagementPolicy provider methods for a theme // extension with and without a set policy theme. TEST_F(StandardManagementPolicyProviderTest, ThemeExtension) { - auto extension = - ExtensionBuilder("testTheme") - .SetLocation(ManifestLocation::kInternal) - .SetManifestKey("theme", std::make_unique<base::DictionaryValue>()) - .Build(); + auto extension = ExtensionBuilder("testTheme") + .SetLocation(ManifestLocation::kInternal) + .SetManifestKey("theme", base::Value::Dict()) + .Build(); std::u16string error16; EXPECT_EQ(extension->GetType(), Manifest::TYPE_THEME);
diff --git a/chrome/browser/extensions/user_script_listener.cc b/chrome/browser/extensions/user_script_listener.cc index 9e937da2..90ce2af 100644 --- a/chrome/browser/extensions/user_script_listener.cc +++ b/chrome/browser/extensions/user_script_listener.cc
@@ -7,8 +7,6 @@ #include <memory> #include "base/bind.h" -#include "base/metrics/histogram_macros.h" -#include "base/timer/elapsed_timer.h" #include "chrome/browser/browser_process.h" #include "chrome/browser/extensions/chrome_content_browser_client_extensions_part.h" #include "chrome/browser/profiles/profile.h" @@ -42,8 +40,6 @@ should_defer_ = false; // Only resume the request if |this| has deferred it. if (did_defer_) { - UMA_HISTOGRAM_TIMES("Extensions.ThrottledNetworkRequestDelay", - timer_->Elapsed()); Resume(); } } @@ -53,7 +49,6 @@ // Only defer requests if Resume has not yet been called. if (should_defer_) { did_defer_ = true; - timer_ = std::make_unique<base::ElapsedTimer>(); return DEFER; } return PROCEED; @@ -66,7 +61,6 @@ private: bool should_defer_ = true; bool did_defer_ = false; - std::unique_ptr<base::ElapsedTimer> timer_; }; struct UserScriptListener::ProfileData {
diff --git a/chrome/browser/extensions/user_script_listener_unittest.cc b/chrome/browser/extensions/user_script_listener_unittest.cc index 2ab843b7..29f978ca 100644 --- a/chrome/browser/extensions/user_script_listener_unittest.cc +++ b/chrome/browser/extensions/user_script_listener_unittest.cc
@@ -36,6 +36,7 @@ #include "extensions/browser/test_extension_registry_observer.h" #include "extensions/common/url_pattern_set.h" #include "testing/gtest/include/gtest/gtest.h" +#include "third_party/abseil-cpp/absl/types/optional.h" #if BUILDFLAG(IS_CHROMEOS_ASH) #include "chrome/browser/ash/login/users/fake_chrome_user_manager.h" @@ -53,13 +54,17 @@ const char kNotMatchingUrl[] = "http://example.com/"; const ExtensionId kTestExtensionId = "behllobkkfkfnphdnhnkndlbkcpglgmj"; -// Yoinked from extension_manifest_unittest.cc. -std::unique_ptr<base::DictionaryValue> LoadManifestFile( - const base::FilePath path, - std::string* error) { +// Yoinked from manifest_unittest.cc. +absl::optional<base::Value::Dict> LoadManifestFile(const base::FilePath path, + std::string* error) { EXPECT_TRUE(base::PathExists(path)); JSONFileValueDeserializer deserializer(path); - return base::DictionaryValue::From(deserializer.Deserialize(nullptr, error)); + std::unique_ptr<base::Value> manifest = + deserializer.Deserialize(nullptr, error); + if (!manifest || !manifest->is_dict()) { + return absl::nullopt; + } + return std::move(*manifest).TakeDict(); } scoped_refptr<Extension> LoadExtension(const std::string& filename, @@ -70,11 +75,12 @@ AppendASCII("extensions"). AppendASCII("manifest_tests"). AppendASCII(filename.c_str()); - std::unique_ptr<base::DictionaryValue> value = LoadManifestFile(path, error); - if (!value) + absl::optional<base::Value::Dict> manifest = LoadManifestFile(path, error); + if (!manifest) { return nullptr; + } return Extension::Create(path.DirName(), mojom::ManifestLocation::kUnpacked, - value->GetDict(), Extension::NO_FLAGS, error); + *manifest, Extension::NO_FLAGS, error); } } // namespace
diff --git a/chrome/browser/extensions/web_contents_browsertest.cc b/chrome/browser/extensions/web_contents_browsertest.cc index 7748b02..eb1d955b 100644 --- a/chrome/browser/extensions/web_contents_browsertest.cc +++ b/chrome/browser/extensions/web_contents_browsertest.cc
@@ -152,7 +152,7 @@ ASSERT_TRUE(ExecuteScriptInBackgroundPageNoWait( extension->id(), base::StringPrintf(kScript, target_url.spec().c_str()))); - navigation_observer.WaitForNavigationFinished(); + ASSERT_TRUE(navigation_observer.WaitForNavigationFinished()); EXPECT_FALSE(navigation_observer.was_committed()); EXPECT_EQ(extension->GetResourceURL("background.html"), background_contents->GetLastCommittedURL()); @@ -167,7 +167,7 @@ ASSERT_TRUE(ExecuteScriptInBackgroundPageNoWait( extension->id(), base::StringPrintf(kScript, target_url.spec().c_str()))); - navigation_observer.WaitForNavigationFinished(); + ASSERT_TRUE(navigation_observer.WaitForNavigationFinished()); EXPECT_TRUE(navigation_observer.was_committed()); EXPECT_EQ(target_url, background_contents->GetLastCommittedURL()); } @@ -181,7 +181,7 @@ ASSERT_TRUE(ExecuteScriptInBackgroundPageNoWait( extension->id(), base::StringPrintf(kScript, target_url.spec().c_str()))); - navigation_observer.WaitForNavigationFinished(); + ASSERT_TRUE(navigation_observer.WaitForNavigationFinished()); EXPECT_TRUE(navigation_observer.was_committed()); EXPECT_EQ(target_url, background_contents->GetLastCommittedURL()); }
diff --git a/chrome/browser/flag-metadata.json b/chrome/browser/flag-metadata.json index 88e71825..25ac9a5 100644 --- a/chrome/browser/flag-metadata.json +++ b/chrome/browser/flag-metadata.json
@@ -430,11 +430,6 @@ "expiry_milestone": 113 }, { - "name": "autofill-enable-get-details-for-enroll-parsing-in-upload-card-response", - "owners": [ "vinnypersky@google.com", "siyua" ], - "expiry_milestone": 110 - }, - { "name": "autofill-enable-manual-fallback-for-virtual-cards", "owners": [ "siashah", "siyua" ], "expiry_milestone": 112 @@ -643,6 +638,11 @@ "expiry_milestone": 108 }, { + "name": "block-insecure-downloads", + "owners": [ "jdeblasio", "trusty-transport@chromium.org" ], + "expiry_milestone": 118 + }, + { "name": "block-insecure-private-network-requests", "owners": [ "titouan", "chrome-security-owp-team@google.com" ], "expiry_milestone": 111 @@ -2327,11 +2327,6 @@ "expiry_milestone": 115 }, { - "name": "enable-fre-default-browser-screen-testing", - "owners": [ "alionadangla", "gambard" ], - "expiry_milestone": 110 - }, - { "name": "enable-fullscreen-api", "owners": [ "ajuma", "joemerramos" ], "expiry_milestone": 113 @@ -2600,7 +2595,7 @@ }, { "name": "enable-log-controller-for-diagnostics-app", - "owners": [ "//ash/webui/diagnostics_ui/OWNERS" ], + "owners": [ "ashleydp", "jimmyxgong", "michaelcheco", "cros-peripherals@google.com" ], "expiry_milestone": 120 }, {
diff --git a/chrome/browser/flag_descriptions.cc b/chrome/browser/flag_descriptions.cc index e522d9a..15dfbbe 100644 --- a/chrome/browser/flag_descriptions.cc +++ b/chrome/browser/flag_descriptions.cc
@@ -85,6 +85,12 @@ "reporting delays and noise. Only works if the Attribution Reporting API " "is already enabled."; +const char kBlockInsecureDownloadsName[] = "Block insecure downloads"; +const char kBlockInsecureDownloadsDescription[] = + "Enables insecure download blocking. This shows a 'blocked' message if the " + "user attempts to download a file over an insecure transport (e.g. HTTP) " + "either directly or via an insecure redirect."; + const char kBrokerFileOperationsOnDiskCacheInNetworkServiceName[] = "Broker file operations on disk cache in the Network Service"; const char kBrokerFileOperationsOnDiskCacheInNetworkServiceDescription[] = @@ -327,17 +333,6 @@ "When enabled, manual fallback will be auto-triggered on form interaction " "in the case where autofill failed to fill a credit card form accurately."; -const char kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponseName[] = - "Enable parsing of the GetDetailsForEnrollResponseDetails in the " - "UploadCardResponseDetails"; -const char - kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponseDescription[] = - "When enabled, the GetDetailsForEnrollResponseDetails in the " - "UploadCardResponseDetails will be parsed, which will allow the " - "Virtual Card Enrollment flow to skip making a new GetDetailsForEnroll " - "request. This is an optimization to improve the latency of the " - "Virtual Card Enrollment flow."; - const char kAutofillEnableFIDOProgressDialogName[] = "Show FIDO progress dialog on Android"; const char kAutofillEnableFIDOProgressDialogDescription[] =
diff --git a/chrome/browser/flag_descriptions.h b/chrome/browser/flag_descriptions.h index 69151e6..59b9ea2 100644 --- a/chrome/browser/flag_descriptions.h +++ b/chrome/browser/flag_descriptions.h
@@ -75,6 +75,9 @@ extern const char kAttributionReportingDebugModeName[]; extern const char kAttributionReportingDebugModeDescription[]; +extern const char kBlockInsecureDownloadsName[]; +extern const char kBlockInsecureDownloadsDescription[]; + extern const char kBrokerFileOperationsOnDiskCacheInNetworkServiceName[]; extern const char kBrokerFileOperationsOnDiskCacheInNetworkServiceDescription[]; @@ -195,11 +198,6 @@ extern const char kAutofillAutoTriggerManualFallbackForCardsName[]; extern const char kAutofillAutoTriggerManualFallbackForCardsDescription[]; -extern const char - kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponseName[]; -extern const char - kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponseDescription[]; - extern const char kAutofillEnableFIDOProgressDialogName[]; extern const char kAutofillEnableFIDOProgressDialogDescription[];
diff --git a/chrome/browser/media/router/discovery/access_code/access_code_cast_sink_service_browsertest.cc b/chrome/browser/media/router/discovery/access_code/access_code_cast_sink_service_browsertest.cc index 2a08693..f731c1d 100644 --- a/chrome/browser/media/router/discovery/access_code/access_code_cast_sink_service_browsertest.cc +++ b/chrome/browser/media/router/discovery/access_code/access_code_cast_sink_service_browsertest.cc
@@ -45,16 +45,8 @@ class AccessCodeCastSinkServiceBrowserTest : public AccessCodeCastIntegrationBrowserTest {}; -// TODO(b/242928209): Saved device tests are flaky on linux-rel/Mac/ChromeOS. -#if BUILDFLAG(IS_LINUX) -#define MAYBE_PRE_InstantExpiration DISABLED_PRE_InstantExpiration -#define MAYBE_InstantExpiration DISABLED_InstantExpiration -#else -#define MAYBE_PRE_InstantExpiration PRE_InstantExpiration -#define MAYBE_InstantExpiration InstantExpiration -#endif IN_PROC_BROWSER_TEST_F(AccessCodeCastSinkServiceBrowserTest, - MAYBE_PRE_InstantExpiration) { + PRE_InstantExpiration) { #if BUILDFLAG(IS_WIN) // TODO(b/235896651): This test sometimes timesout on win10. if (base::win::GetVersion() >= base::win::Version::WIN10) @@ -119,7 +111,7 @@ } IN_PROC_BROWSER_TEST_F(AccessCodeCastSinkServiceBrowserTest, - MAYBE_InstantExpiration) { + InstantExpiration) { #if BUILDFLAG(IS_WIN) // TODO(b/235896651): This test sometimes timesout on win10. if (base::win::GetVersion() >= base::win::Version::WIN10) @@ -143,16 +135,7 @@ weak_ptr_factory_.GetWeakPtr())); } -// TODO(b/242928209): Saved device tests are flaky on linux-rel/Mac -#if BUILDFLAG(IS_LINUX) -#define MAYBE_PRE_SavedDevice DISABLED_PRE_SavedDevice -#define MAYBE_SavedDevice DISABLED_SavedDevice -#else -#define MAYBE_PRE_SavedDevice PRE_SavedDevice -#define MAYBE_SavedDevice SavedDevice -#endif -IN_PROC_BROWSER_TEST_F(AccessCodeCastSinkServiceBrowserTest, - MAYBE_PRE_SavedDevice) { +IN_PROC_BROWSER_TEST_F(AccessCodeCastSinkServiceBrowserTest, PRE_SavedDevice) { #if BUILDFLAG(IS_WIN) // TODO(b/235896651): This test sometimes timesout on win10. if (base::win::GetVersion() >= base::win::Version::WIN10) @@ -209,8 +192,7 @@ GetPrefUpdater()->GetMediaSinkInternalValueBySinkId("cast:<1234>")); } -IN_PROC_BROWSER_TEST_F(AccessCodeCastSinkServiceBrowserTest, - MAYBE_SavedDevice) { +IN_PROC_BROWSER_TEST_F(AccessCodeCastSinkServiceBrowserTest, SavedDevice) { #if BUILDFLAG(IS_WIN) // TODO(b/235896651): This test sometimes timesout on win10. if (base::win::GetVersion() >= base::win::Version::WIN10)
diff --git a/chrome/browser/media/webrtc/region_capture_browsertest.cc b/chrome/browser/media/webrtc/region_capture_browsertest.cc index 47dd08f3..f28a3af 100644 --- a/chrome/browser/media/webrtc/region_capture_browsertest.cc +++ b/chrome/browser/media/webrtc/region_capture_browsertest.cc
@@ -742,17 +742,10 @@ } // Original track becomes unblocked for cropping after clone is GCed 2/3. -// TODO(crbug.com/1353349) Re-enable for macOS after flakes are resolved. -#if BUILDFLAG(IS_MAC) -#define MAYBE_CanRecropOriginalTrackAfterCloneIsGarbageCollected \ - DISABLED_CanRecropOriginalTrackAfterCloneIsGarbageCollected -#else -#define MAYBE_CanRecropOriginalTrackAfterCloneIsGarbageCollected \ - CanRecropOriginalTrackAfterCloneIsGarbageCollected -#endif +// TODO(crbug.com/1353349) Re-enable after flakes are resolved. IN_PROC_BROWSER_TEST_F( RegionCaptureClonesBrowserTest, - MAYBE_CanRecropOriginalTrackAfterCloneIsGarbageCollected) { + DISABLED_CanRecropOriginalTrackAfterCloneIsGarbageCollected) { ManualSetUp(); ASSERT_TRUE(CropTo(kCropTarget0, Frame::kTopLevelDocument, Track::kOriginal));
diff --git a/chrome/browser/platform_util_linux.cc b/chrome/browser/platform_util_linux.cc index 90bf7b59..15b06139 100644 --- a/chrome/browser/platform_util_linux.cc +++ b/chrome/browser/platform_util_linux.cc
@@ -105,10 +105,10 @@ void OnAppTerminating() { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); // The browser process is about to exit. Clean up while we still can. + object_proxy_ = nullptr; if (bus_) bus_->ShutdownOnDBusThreadAndBlock(); bus_.reset(); - object_proxy_ = nullptr; } void CheckFileManagerRunning(Profile* profile, @@ -278,8 +278,10 @@ } scoped_refptr<dbus::Bus> bus_; + + // These proxy objects are owned by `bus_`. raw_ptr<dbus::ObjectProxy> dbus_proxy_ = nullptr; - raw_ptr<dbus::ObjectProxy, DanglingUntriaged> object_proxy_ = nullptr; + raw_ptr<dbus::ObjectProxy> object_proxy_ = nullptr; absl::optional<bool> prefer_filemanager_interface_;
diff --git a/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output.js b/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output.js index 875c577..10bc758 100644 --- a/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output.js +++ b/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output.js
@@ -11,7 +11,6 @@ import {Cursor, CURSOR_NODE_INDEX} from '../../../common/cursors/cursor.js'; import {CursorRange} from '../../../common/cursors/range.js'; import {LocalStorage} from '../../../common/local_storage.js'; -import {AutomationTreeWalker} from '../../../common/tree_walker.js'; import {Earcon} from '../../common/abstract_earcons.js'; import {NavBraille} from '../../common/braille/nav_braille.js'; import {EventSourceType} from '../../common/event_source_type.js'; @@ -672,48 +671,6 @@ } /** @override */ - formatTextContent_(data, token, options) { - const buff = data.outputBuffer; - const node = data.node; - const formatLog = data.outputFormatLogger; - - if (node.name && token === 'nameOrTextContent') { - formatLog.writeToken(token); - this.format_({ - node, - outputFormat: '$name', - outputBuffer: buff, - outputFormatLogger: formatLog, - }); - return; - } - - if (!node.firstChild) { - return; - } - - const root = node; - const walker = new AutomationTreeWalker(node, Dir.FORWARD, { - visit: AutomationPredicate.leafOrStaticText, - leaf: n => { - // The root might be a leaf itself, but we still want to descend - // into it. - return n !== root && AutomationPredicate.leafOrStaticText(n); - }, - root: r => r === root, - }); - const outputStrings = []; - while (walker.next().node) { - if (walker.node.name) { - outputStrings.push(walker.node.name.trim()); - } - } - const finalOutput = outputStrings.join(' '); - this.append_(buff, finalOutput, options); - formatLog.writeTokenWithValue(token, finalOutput); - } - - /** @override */ formatAsFieldAccessor_(data, token, options) { const buff = data.outputBuffer; const node = data.node;
diff --git a/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output_formatter.js b/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output_formatter.js index 8e9ff21b..2838dd2 100644 --- a/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output_formatter.js +++ b/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output_formatter.js
@@ -11,6 +11,7 @@ import {Cursor, CURSOR_NODE_INDEX} from '../../../common/cursors/cursor.js'; import {CursorRange} from '../../../common/cursors/range.js'; import {LocalStorage} from '../../../common/local_storage.js'; +import {AutomationTreeWalker} from '../../../common/tree_walker.js'; import {Msgs} from '../../common/msgs.js'; import {OutputFormatParserObserver} from './output_format_parser.js'; @@ -101,7 +102,7 @@ } else if (token === 'node') { this.formatNode_(this.params_, token, tree, options); } else if (token === 'nameOrTextContent' || token === 'textContent') { - this.output_.formatTextContent_(this.params_, token, options); + this.formatTextContent_(this.params_, token, options); } else if (this.params_.node[token] !== undefined) { this.output_.formatAsFieldAccessor_(this.params_, token, options); } else if (outputTypes.OUTPUT_STATE_INFO[token]) { @@ -687,6 +688,53 @@ * @param {!outputTypes.OutputFormattingData} data * @param {string} token * @param {!{annotation: Array<*>, isUnique: (boolean|undefined)}} options + * @private + */ + formatTextContent_(data, token, options) { + const buff = data.outputBuffer; + const node = data.node; + const formatLog = data.outputFormatLogger; + + if (node.name && token === 'nameOrTextContent') { + formatLog.writeToken(token); + this.output_.format_({ + node, + outputFormat: '$name', + outputBuffer: buff, + outputFormatLogger: formatLog, + }); + return; + } + + if (!node.firstChild) { + return; + } + + const root = node; + const walker = new AutomationTreeWalker(node, Dir.FORWARD, { + visit: AutomationPredicate.leafOrStaticText, + leaf: n => { + // The root might be a leaf itself, but we still want to descend + // into it. + return n !== root && AutomationPredicate.leafOrStaticText(n); + }, + root: r => r === root, + }); + const outputStrings = []; + while (walker.next().node) { + if (walker.node.name) { + outputStrings.push(walker.node.name.trim()); + } + } + const finalOutput = outputStrings.join(' '); + this.output_.append_(buff, finalOutput, options); + formatLog.writeTokenWithValue(token, finalOutput); + } + + /** + * @param {!outputTypes.OutputFormattingData} data + * @param {string} token + * @param {!{annotation: Array<*>, isUnique: (boolean|undefined)}} options */ formatUrlFilename_(data, token, options) { const buff = data.outputBuffer;
diff --git a/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output_interface.js b/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output_interface.js index d041f78f..b083326 100644 --- a/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output_interface.js +++ b/chrome/browser/resources/chromeos/accessibility/chromevox/background/output/output_interface.js
@@ -94,13 +94,6 @@ formatPrecedingBullet_(data) {} /** - * @param {!outputTypes.OutputFormattingData} data - * @param {string} token - * @param {!{annotation: Array<*>, isUnique: (boolean|undefined)}} options - */ - formatTextContent_(data, token, options) {} - - /** * @param {!AutomationNode} node * @param {!AutomationNode} prevNode * @param {!outputTypes.OutputEventType} type
diff --git a/chrome/browser/resources/chromeos/accessibility/common/repeated_event_handler_test.js b/chrome/browser/resources/chromeos/accessibility/common/repeated_event_handler_test.js index a9f926f9..6cf82bec 100644 --- a/chrome/browser/resources/chromeos/accessibility/common/repeated_event_handler_test.js +++ b/chrome/browser/resources/chromeos/accessibility/common/repeated_event_handler_test.js
@@ -3,21 +3,23 @@ // found in the LICENSE file. // Include test fixture. -GEN_INCLUDE([ - '../select_to_speak/select_to_speak_e2e_test_base.js', -]); +GEN_INCLUDE(['testing/common_e2e_test_base.js']); /** Test fixture for array_util.js. */ -RepeatedEventHandlerTest = class extends SelectToSpeakE2ETest { +AccessibilityExtensionRepeatedEventHandlerTest = + class extends CommonE2ETestBase { /** @override */ async setUpDeferred() { + await importModule('EventGenerator', '/common/event_generator.js'); + await importModule('KeyCode', '/common/key_code.js'); await importModule( 'RepeatedEventHandler', '/common/repeated_event_handler.js'); } }; AX_TEST_F( - 'RepeatedEventHandlerTest', 'RepeatedEventHandledOnce', async function() { + 'AccessibilityExtensionRepeatedEventHandlerTest', + 'RepeatedEventHandledOnce', async function() { const root = await this.runWithLoadedTree(''); this.handlerCallCount = 0; const handler = () => this.handlerCallCount++; @@ -37,8 +39,8 @@ }); AX_TEST_F( - 'RepeatedEventHandlerTest', 'NoEventsHandledAfterStopListening', - async function() { + 'AccessibilityExtensionRepeatedEventHandlerTest', + 'NoEventsHandledAfterStopListening', async function() { const root = await this.runWithLoadedTree(''); this.handlerCallCount = 0; const handler = () => this.handlerCallCount++;
diff --git a/chrome/browser/resources/chromeos/accessibility/common/repeated_tree_change_handler_test.js b/chrome/browser/resources/chromeos/accessibility/common/repeated_tree_change_handler_test.js index 142cf090..1c878480 100644 --- a/chrome/browser/resources/chromeos/accessibility/common/repeated_tree_change_handler_test.js +++ b/chrome/browser/resources/chromeos/accessibility/common/repeated_tree_change_handler_test.js
@@ -3,12 +3,11 @@ // found in the LICENSE file. // Include test fixture. -GEN_INCLUDE([ - '../select_to_speak/select_to_speak_e2e_test_base.js', -]); +GEN_INCLUDE(['testing/common_e2e_test_base.js']); /** Test fixture for array_util.js. */ -RepeatedTreeChangeHandlerTest = class extends SelectToSpeakE2ETest { +AccessibilityExtensionRepeatedTreeChangeHandlerTest = + class extends CommonE2ETestBase { /** @override */ async setUpDeferred() { await importModule( @@ -17,8 +16,8 @@ }; TEST_F( - 'RepeatedTreeChangeHandlerTest', 'RepeatedTreeChangeHandledOnce', - function() { + 'AccessibilityExtensionRepeatedTreeChangeHandlerTest', + 'RepeatedTreeChangeHandledOnce', function() { this.runWithLoadedDesktop(() => { this.handlerCallCount = 0; const handler = () => this.handlerCallCount++; @@ -38,28 +37,31 @@ }); }); -TEST_F('RepeatedTreeChangeHandlerTest', 'Predicate', function() { - this.runWithLoadedDesktop(() => { - this.handlerCallCount = 0; - const handler = () => this.handlerCallCount++; +TEST_F( + 'AccessibilityExtensionRepeatedTreeChangeHandlerTest', 'Predicate', + function() { + this.runWithLoadedDesktop(() => { + this.handlerCallCount = 0; + const handler = () => this.handlerCallCount++; - const repeatedHandler = new RepeatedTreeChangeHandler( - 'allTreeChanges', handler, {predicate: c => c.type === 'nodeRemoved'}); + const repeatedHandler = new RepeatedTreeChangeHandler( + 'allTreeChanges', handler, + {predicate: c => c.type === 'nodeRemoved'}); - // Simulate events being fired. - repeatedHandler.onChange_({type: 'nodeAdded'}); - repeatedHandler.onChange_({type: 'nodeAdded'}); - repeatedHandler.onChange_({type: 'nodeAdded'}); - repeatedHandler.onChange_({type: 'nodeRemoved'}); - repeatedHandler.onChange_({type: 'nodeRemoved'}); - repeatedHandler.onChange_({type: 'nodeRemoved'}); - repeatedHandler.onChange_({type: 'nodeRemoved'}); + // Simulate events being fired. + repeatedHandler.onChange_({type: 'nodeAdded'}); + repeatedHandler.onChange_({type: 'nodeAdded'}); + repeatedHandler.onChange_({type: 'nodeAdded'}); + repeatedHandler.onChange_({type: 'nodeRemoved'}); + repeatedHandler.onChange_({type: 'nodeRemoved'}); + repeatedHandler.onChange_({type: 'nodeRemoved'}); + repeatedHandler.onChange_({type: 'nodeRemoved'}); - // Verify that nodes that don't satisfy the predicate aren't added to the - // change stack. - assertEquals(repeatedHandler.changeStack_.length, 4); + // Verify that nodes that don't satisfy the predicate aren't added to + // the change stack. + assertEquals(repeatedHandler.changeStack_.length, 4); - // Yield before verifying how many times the handler was called. - setTimeout(() => assertEquals(this.handlerCallCount, 1), 0); - }); -}); + // Yield before verifying how many times the handler was called. + setTimeout(() => assertEquals(this.handlerCallCount, 1), 0); + }); + });
diff --git a/chrome/browser/resources/settings/chromeos/os_settings_icons.html b/chrome/browser/resources/settings/chromeos/os_settings_icons.html index 75a07d6..1ff5447a 100644 --- a/chrome/browser/resources/settings/chromeos/os_settings_icons.html +++ b/chrome/browser/resources/settings/chromeos/os_settings_icons.html
@@ -166,8 +166,8 @@ <g id="fingerprint" viewBox="0 0 24 24"><path d="M17.81 4.47c-.08 0-.16-.02-.23-.06C15.66 3.42 14 3 12.01 3c-1.98 0-3.86.47-5.57 1.41-.24.13-.54.04-.68-.2-.13-.24-.04-.55.2-.68C7.82 2.52 9.86 2 12.01 2c2.13 0 3.99.47 6.03 1.52.25.13.34.43.21.67-.09.18-.26.28-.44.28zM3.5 9.72c-.1 0-.2-.03-.29-.09-.23-.16-.28-.47-.12-.7.99-1.4 2.25-2.5 3.75-3.27C9.98 4.04 14 4.03 17.15 5.65c1.5.77 2.76 1.86 3.75 3.25.16.22.11.54-.12.7-.23.16-.54.11-.7-.12-.9-1.26-2.04-2.25-3.39-2.94-2.87-1.47-6.54-1.47-9.4.01-1.36.7-2.5 1.7-3.4 2.96-.08.14-.23.21-.39.21zm6.25 12.07c-.13 0-.26-.05-.35-.15-.87-.87-1.34-1.43-2.01-2.64-.69-1.23-1.05-2.73-1.05-4.34 0-2.97 2.54-5.39 5.66-5.39s5.66 2.42 5.66 5.39c0 .28-.22.5-.5.5s-.5-.22-.5-.5c0-2.42-2.09-4.39-4.66-4.39-2.57 0-4.66 1.97-4.66 4.39 0 1.44.32 2.77.93 3.85.64 1.15 1.08 1.64 1.85 2.42.19.2.19.51 0 .71-.11.1-.24.15-.37.15zm7.17-1.85c-1.19 0-2.24-.3-3.1-.89-1.49-1.01-2.38-2.65-2.38-4.39 0-.28.22-.5.5-.5s.5.22.5.5c0 1.41.72 2.74 1.94 3.56.71.48 1.54.71 2.54.71.24 0 .64-.03 1.04-.1.27-.05.53.13.58.41.05.27-.13.53-.41.58-.57.11-1.07.12-1.21.12zM14.91 22c-.04 0-.09-.01-.13-.02-1.59-.44-2.63-1.03-3.72-2.1-1.4-1.39-2.17-3.24-2.17-5.22 0-1.62 1.38-2.94 3.08-2.94 1.7 0 3.08 1.32 3.08 2.94 0 1.07.93 1.94 2.08 1.94s2.08-.87 2.08-1.94c0-3.77-3.25-6.83-7.25-6.83-2.84 0-5.44 1.58-6.61 4.03-.39.81-.59 1.76-.59 2.8 0 .78.07 2.01.67 3.61.1.26-.03.55-.29.64-.26.1-.55-.04-.64-.29-.49-1.31-.73-2.61-.73-3.96 0-1.2.23-2.29.68-3.24 1.33-2.79 4.28-4.6 7.51-4.6 4.55 0 8.25 3.51 8.25 7.83 0 1.62-1.38 2.94-3.08 2.94s-3.08-1.32-3.08-2.94c0-1.07-.93-1.94-2.08-1.94s-2.08.87-2.08 1.94c0 1.71.66 3.31 1.87 4.51.95.94 1.86 1.46 3.27 1.85.27.07.42.35.35.61-.05.23-.26.38-.47.38z"></path></g> <g id="gamepad" viewBox="0 0 24 24"><path d="M15 7.5V2H9v5.5l3 3 3-3zM7.5 9H2v6h5.5l3-3-3-3zM9 16.5V22h6v-5.5l-3-3-3 3zM16.5 9l-3 3 3 3H22V9h-5.5z"></path></g> <g id="headset" viewBox="0 0 24 24"><path d="M12 1c-4.97 0-9 4.03-9 9v7c0 1.66 1.34 3 3 3h3v-8H5v-2c0-3.87 3.13-7 7-7s7 3.13 7 7v2h-4v8h3c1.66 0 3-1.34 3-3v-7c0-4.97-4.03-9-9-9z"></path></g> - <g id="hotspot-disabled"><path fill-rule="evenodd" clip-rule="evenodd" d="M3.18333 2L2 3.175L4.24167 5.41667C3.15833 6.825 2.50833 8.58333 2.50833 10.4917C2.50833 12.7917 3.44167 14.875 4.95 16.3833L6.13333 15.2C4.925 14 4.175 12.3333 4.175 10.4917C4.175 9.03333 4.65 7.7 5.43333 6.60833L6.625 7.8C6.13333 8.575 5.84167 9.5 5.84167 10.4917C5.84167 11.875 6.40833 13.1167 7.30833 14.025L8.49167 12.8417C7.88333 12.2417 7.50833 11.4167 7.50833 10.4917C7.50833 9.96666 7.63333 9.46667 7.85 9.025L9.19167 10.3667C9.19167 10.3875 9.1875 10.4083 9.18333 10.4292C9.17917 10.45 9.175 10.4708 9.175 10.4917C9.175 10.95 9.36667 11.3667 9.66667 11.6667C9.96667 11.9667 10.3833 12.1583 10.8417 12.1583C10.8773 12.1583 10.9128 12.1523 10.9536 12.1453L10.975 12.1417L17.325 18.4917L18.5 17.3167L3.18333 2ZM15.5917 12.05C15.75 11.5583 15.8417 11.0333 15.8417 10.4917C15.8417 7.73333 13.6 5.49167 10.8417 5.49167C10.3 5.49167 9.775 5.575 9.28333 5.74167L10.7083 7.16667C10.75 7.15833 10.8 7.15833 10.8417 7.15833C12.6833 7.15833 14.175 8.65 14.175 10.4917C14.175 10.5333 14.175 10.5833 14.1667 10.625L15.5917 12.05ZM17.5083 10.4917C17.5083 6.80833 14.525 3.825 10.8417 3.825C9.825 3.825 8.86667 4.05 8 4.45833L6.76667 3.225C7.975 2.54167 9.35833 2.15833 10.8417 2.15833C15.4417 2.15833 19.175 5.89167 19.175 10.4917C19.175 11.975 18.7833 13.3667 18.1083 14.5667L16.8667 13.325C17.2833 12.4667 17.5083 11.5083 17.5083 10.4917Z" fill="#1B1B1F"></path></g> - <g id="hotspot-enabled"><path d="M2 10.5C2 5.84343 5.808 2 10.5 2C15.192 2 19 5.84343 19 10.5C19 13.7534 17.2915 16.5158 15 18L14 16.5C15.9315 15.3233 17.3 13.1185 17.5 10.5C17.3 6.78713 14.257 3.71582 10.5 3.5C6.743 3.71582 3.7 6.78713 3.5 10.5C3.7 13.1185 5.0685 15.3233 7 16.5L6 18C3.7085 16.5158 2 13.7534 2 10.5Z" fill="#1B1B1F"></path><path d="M15.5 10.5C15.5 7.78374 13.2583 5.5 10.5 5.5C7.74167 5.5 5.5 7.78374 5.5 10.5C5.5 12.4786 6.50833 14.1171 8 15L9 13.5C7.84167 12.9285 7.16667 11.8503 7 10.5C7.16667 8.71761 8.65833 7.19794 10.5 7C12.3417 7.19794 13.8333 8.71761 14 10.5C13.8333 11.8503 13.1583 12.9285 12 13.5L13 15C14.4917 14.1171 15.5 12.4786 15.5 10.5Z" fill="#1B1B1F"></path><path d="M10.5 12C11.3284 12 12 11.3284 12 10.5C12 9.67157 11.3284 9 10.5 9C9.67157 9 9 9.67157 9 10.5C9 11.3284 9.67157 12 10.5 12Z" fill="#1B1B1F"></path></g> + <g id="hotspot-disabled"><path fill-rule="evenodd" clip-rule="evenodd" d="M3.18333 2L2 3.175L4.24167 5.41667C3.15833 6.825 2.50833 8.58333 2.50833 10.4917C2.50833 12.7917 3.44167 14.875 4.95 16.3833L6.13333 15.2C4.925 14 4.175 12.3333 4.175 10.4917C4.175 9.03333 4.65 7.7 5.43333 6.60833L6.625 7.8C6.13333 8.575 5.84167 9.5 5.84167 10.4917C5.84167 11.875 6.40833 13.1167 7.30833 14.025L8.49167 12.8417C7.88333 12.2417 7.50833 11.4167 7.50833 10.4917C7.50833 9.96666 7.63333 9.46667 7.85 9.025L9.19167 10.3667C9.19167 10.3875 9.1875 10.4083 9.18333 10.4292C9.17917 10.45 9.175 10.4708 9.175 10.4917C9.175 10.95 9.36667 11.3667 9.66667 11.6667C9.96667 11.9667 10.3833 12.1583 10.8417 12.1583C10.8773 12.1583 10.9128 12.1523 10.9536 12.1453L10.975 12.1417L17.325 18.4917L18.5 17.3167L3.18333 2ZM15.5917 12.05C15.75 11.5583 15.8417 11.0333 15.8417 10.4917C15.8417 7.73333 13.6 5.49167 10.8417 5.49167C10.3 5.49167 9.775 5.575 9.28333 5.74167L10.7083 7.16667C10.75 7.15833 10.8 7.15833 10.8417 7.15833C12.6833 7.15833 14.175 8.65 14.175 10.4917C14.175 10.5333 14.175 10.5833 14.1667 10.625L15.5917 12.05ZM17.5083 10.4917C17.5083 6.80833 14.525 3.825 10.8417 3.825C9.825 3.825 8.86667 4.05 8 4.45833L6.76667 3.225C7.975 2.54167 9.35833 2.15833 10.8417 2.15833C15.4417 2.15833 19.175 5.89167 19.175 10.4917C19.175 11.975 18.7833 13.3667 18.1083 14.5667L16.8667 13.325C17.2833 12.4667 17.5083 11.5083 17.5083 10.4917Z"></path></g> + <g id="hotspot-enabled"><path d="M2 10.5C2 5.84343 5.808 2 10.5 2C15.192 2 19 5.84343 19 10.5C19 13.7534 17.2915 16.5158 15 18L14 16.5C15.9315 15.3233 17.3 13.1185 17.5 10.5C17.3 6.78713 14.257 3.71582 10.5 3.5C6.743 3.71582 3.7 6.78713 3.5 10.5C3.7 13.1185 5.0685 15.3233 7 16.5L6 18C3.7085 16.5158 2 13.7534 2 10.5Z"></path><path d="M15.5 10.5C15.5 7.78374 13.2583 5.5 10.5 5.5C7.74167 5.5 5.5 7.78374 5.5 10.5C5.5 12.4786 6.50833 14.1171 8 15L9 13.5C7.84167 12.9285 7.16667 11.8503 7 10.5C7.16667 8.71761 8.65833 7.19794 10.5 7C12.3417 7.19794 13.8333 8.71761 14 10.5C13.8333 11.8503 13.1583 12.9285 12 13.5L13 15C14.4917 14.1171 15.5 12.4786 15.5 10.5Z"></path><path d="M10.5 12C11.3284 12 12 11.3284 12 10.5C12 9.67157 11.3284 9 10.5 9C9.67157 9 9 9.67157 9 10.5C9 11.3284 9.67157 12 10.5 12Z"></path></g> <g id="info-outline" viewBox="0 0 24 24"><path d="M11 17h2v-6h-2v6zm1-15C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zM11 9h2V7h-2v2z"></path></g> <g id="language" viewBox="0 0 24 24"><path d="M11.99 2C6.47 2 2 6.48 2 12s4.47 10 9.99 10C17.52 22 22 17.52 22 12S17.52 2 11.99 2zm6.93 6h-2.95c-.32-1.25-.78-2.45-1.38-3.56 1.84.63 3.37 1.91 4.33 3.56zM12 4.04c.83 1.2 1.48 2.53 1.91 3.96h-3.82c.43-1.43 1.08-2.76 1.91-3.96zM4.26 14C4.1 13.36 4 12.69 4 12s.1-1.36.26-2h3.38c-.08.66-.14 1.32-.14 2 0 .68.06 1.34.14 2H4.26zm.82 2h2.95c.32 1.25.78 2.45 1.38 3.56-1.84-.63-3.37-1.9-4.33-3.56zm2.95-8H5.08c.96-1.66 2.49-2.93 4.33-3.56C8.81 5.55 8.35 6.75 8.03 8zM12 19.96c-.83-1.2-1.48-2.53-1.91-3.96h3.82c-.43 1.43-1.08 2.76-1.91 3.96zM14.34 14H9.66c-.09-.66-.16-1.32-.16-2 0-.68.07-1.35.16-2h4.68c.09.65.16 1.32.16 2 0 .68-.07 1.34-.16 2zm.25 5.56c.6-1.11 1.06-2.31 1.38-3.56h2.95c-.96 1.65-2.49 2.93-4.33 3.56zM16.36 14c.08-.66.14-1.32.14-2 0-.68-.06-1.34-.14-2h3.38c.16.64.26 1.31.26 2s-.1 1.36-.26 2h-3.38z"></path></g> <g id="laptop-chromebook" viewBox="0 0 24 24"><path d="M22 18V3H2v15H0v2h24v-2h-2zm-8 0h-4v-1h4v1zm6-3H4V5h16v10z"></path></g>
diff --git a/chrome/browser/resources/side_panel/customize_chrome/BUILD.gn b/chrome/browser/resources/side_panel/customize_chrome/BUILD.gn index 6954eff..68edc24e 100644 --- a/chrome/browser/resources/side_panel/customize_chrome/BUILD.gn +++ b/chrome/browser/resources/side_panel/customize_chrome/BUILD.gn
@@ -28,15 +28,24 @@ "theme_snapshot.ts", ] - non_web_component_files = [ "customize_chrome_api_proxy.ts" ] + non_web_component_files = [ + "chrome_cart_proxy.ts", + "customize_chrome_api_proxy.ts", + ] html_files = [] foreach(f, web_component_files) { html_files += [ string_replace(f, ".ts", ".html") ] } - mojo_files_deps = [ "//chrome/browser/ui/webui/side_panel/customize_chrome:mojo_bindings_js__generator" ] - mojo_files = [ "$root_gen_dir/mojom-webui/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome.mojom-webui.js" ] + mojo_files_deps = [ + "//chrome/browser/cart:mojo_bindings_ts__generator", + "//chrome/browser/ui/webui/side_panel/customize_chrome:mojo_bindings_js__generator", + ] + mojo_files = [ + "$root_gen_dir/chrome/browser/cart/chrome_cart.mojom-webui.ts", + "$root_gen_dir/mojom-webui/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome.mojom-webui.js", + ] ts_composite = true ts_deps = [
diff --git a/chrome/browser/resources/side_panel/customize_chrome/cards.html b/chrome/browser/resources/side_panel/customize_chrome/cards.html index a11b7c98..262964b 100644 --- a/chrome/browser/resources/side_panel/customize_chrome/cards.html +++ b/chrome/browser/resources/side_panel/customize_chrome/cards.html
@@ -33,6 +33,10 @@ width: 16px; } + .card-option-name { + margin-inline-start: 40px; + } + iron-collapse { --iron-collapse-transition-duration: 300ms; width: 100%; @@ -71,6 +75,18 @@ title="[[item.name]]" on-change="onCardStatusChange_"> </cr-checkbox> </div> + <template is="dom-if" + if="[[showDiscountCheckbox_(item.id, item.enabled, discountCheckboxEligible_)]]"> + <div class="card"> + <div class="card-option-name">$i18n{modulesCartDiscountConsentAccept}</div> + <cr-checkbox class="card-checkbox" + checked="{{discountCheckbox_}}" + disabled="[[managedByPolicy_]]" + title="$i18n{modulesCartDiscountConsentAccept}" + on-change="onDiscountCheckboxChange_"> + </cr-checkbox> + </div> + </template> </template> </iron-collapse> </div>
diff --git a/chrome/browser/resources/side_panel/customize_chrome/cards.ts b/chrome/browser/resources/side_panel/customize_chrome/cards.ts index a0662533..36683976 100644 --- a/chrome/browser/resources/side_panel/customize_chrome/cards.ts +++ b/chrome/browser/resources/side_panel/customize_chrome/cards.ts
@@ -6,10 +6,12 @@ import 'chrome://resources/cr_elements/cr_toggle/cr_toggle.js'; import 'chrome://resources/cr_elements/policy/cr_policy_indicator.js'; import 'chrome://resources/polymer/v3_0/iron-collapse/iron-collapse.js'; +import './strings.m.js'; import {DomRepeatEvent, PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js'; import {getTemplate} from './cards.html.js'; +import {ChromeCartProxy} from './chrome_cart_proxy.js'; import {CustomizeChromePageHandlerInterface, ModuleSettings} from './customize_chrome.mojom-webui.js'; import {CustomizeChromeApiProxy} from './customize_chrome_api_proxy.js'; @@ -36,16 +38,33 @@ /** Whether the modules are managed by admin policies or not. */ managedByPolicy_: Boolean, + + // Discount checkbox is a workaround for crbug.com/1199465 and will be + // removed after module customization is better defined. Please avoid + // using similar pattern for other features. + discountCheckbox_: { + type: Boolean, + value: false, + }, + + discountCheckboxEligible_: { + type: Boolean, + value: false, + }, }; } + static get observers() { + return ['modulesChanged_(modules_.*)']; + } + private modules_: ModuleSettings[]; private show_: boolean; private managedByPolicy_: boolean; private pageHandler_: CustomizeChromePageHandlerInterface; private setModulesSettingsListenerId_: number|null = null; - - // TODO:(crbug.com/1401492): Add chrome cart discount consent support. + private discountCheckbox_: boolean; + private discountCheckboxEligible_: boolean; constructor() { super(); @@ -72,6 +91,19 @@ this.setModulesSettingsListenerId_!); } + private modulesChanged_() { + if (this.modules_.some(module => module.id === 'chrome_cart')) { + ChromeCartProxy.getHandler().getDiscountToggleVisible().then( + ({toggleVisible}) => { + this.discountCheckboxEligible_ = toggleVisible; + }); + + ChromeCartProxy.getHandler().getDiscountEnabled().then(({enabled}) => { + this.discountCheckbox_ = enabled; + }); + } + } + private onShowChange_(e: CustomEvent<boolean>) { this.show_ = e.detail; this.pageHandler_.setModulesVisible(this.show_); @@ -84,6 +116,17 @@ // TODO(crbug.com/1384258): Add metrics. } + + private showDiscountCheckbox_( + id: string, checked: boolean, eligible: boolean): boolean { + return id === 'chrome_cart' && checked && eligible; + } + + private onDiscountCheckboxChange_() { + if (this.discountCheckboxEligible_) { + ChromeCartProxy.getHandler().setDiscountEnabled(this.discountCheckbox_); + } + } } declare global {
diff --git a/chrome/browser/resources/side_panel/customize_chrome/chrome_cart_proxy.ts b/chrome/browser/resources/side_panel/customize_chrome/chrome_cart_proxy.ts new file mode 100644 index 0000000..684407c --- /dev/null +++ b/chrome/browser/resources/side_panel/customize_chrome/chrome_cart_proxy.ts
@@ -0,0 +1,25 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import {CartHandler, CartHandlerRemote} from './chrome_cart.mojom-webui.js'; + +/** + * @fileoverview This file provides a class that exposes the Mojo handler + * interface used for sending requests from NTP chrome cart module JS to the + * browser and receiving the browser response. + */ + +let handler: CartHandlerRemote|null = null; + +export class ChromeCartProxy { + static getHandler(): CartHandlerRemote { + return handler || (handler = CartHandler.getRemote()); + } + + static setHandler(newHandler: CartHandlerRemote) { + handler = newHandler; + } + + private constructor() {} +}
diff --git a/chrome/browser/ui/BUILD.gn b/chrome/browser/ui/BUILD.gn index f757adba..db1b668 100644 --- a/chrome/browser/ui/BUILD.gn +++ b/chrome/browser/ui/BUILD.gn
@@ -2070,6 +2070,8 @@ "../ash/app_list/search/chrome_search_result.h", "../ash/app_list/search/common/icon_constants.cc", "../ash/app_list/search/common/icon_constants.h", + "../ash/app_list/search/common/keyword_util.cc", + "../ash/app_list/search/common/keyword_util.h", "../ash/app_list/search/common/search_result_util.cc", "../ash/app_list/search/common/search_result_util.h", "../ash/app_list/search/common/string_util.cc",
diff --git a/chrome/browser/ui/views/profiles/profile_picker_view_browsertest.cc b/chrome/browser/ui/views/profiles/profile_picker_view_browsertest.cc index 0e87703..14fda5a 100644 --- a/chrome/browser/ui/views/profiles/profile_picker_view_browsertest.cc +++ b/chrome/browser/ui/views/profiles/profile_picker_view_browsertest.cc
@@ -1559,7 +1559,8 @@ #endif // !BUILDFLAG(IS_CHROMEOS_LACROS) // TODO(crbug.com/1289326) Test is flaky on Linux CFI -#if BUILDFLAG(CFI_ICALL_CHECK) && BUILDFLAG(IS_LINUX) +// TODO(crbug.com/1403890) Test is also flaky on Linux (dbg) +#if BUILDFLAG(IS_LINUX) #define MAYBE_CreateSignedInEnterpriseProfileSettings \ DISABLED_CreateSignedInEnterpriseProfileSettings #else
diff --git a/chrome/browser/ui/views/tabs/new_tab_button.cc b/chrome/browser/ui/views/tabs/new_tab_button.cc index c205558..9366afe 100644 --- a/chrome/browser/ui/views/tabs/new_tab_button.cc +++ b/chrome/browser/ui/views/tabs/new_tab_button.cc
@@ -124,14 +124,17 @@ paint_as_active_subscription_ = GetWidget()->RegisterPaintAsActiveChangedCallback(base::BindRepeating( &NewTabButton::FrameColorsChanged, base::Unretained(this))); - // Set the initial state correctly. - FrameColorsChanged(); } void NewTabButton::RemovedFromWidget() { paint_as_active_subscription_ = {}; } +void NewTabButton::OnThemeChanged() { + views::ImageButton::OnThemeChanged(); + FrameColorsChanged(); +} + #if BUILDFLAG(IS_WIN) void NewTabButton::OnMouseReleased(const ui::MouseEvent& event) { if (!event.IsOnlyRightMouseButton()) {
diff --git a/chrome/browser/ui/views/tabs/new_tab_button.h b/chrome/browser/ui/views/tabs/new_tab_button.h index 7c2a308..e5617ec 100644 --- a/chrome/browser/ui/views/tabs/new_tab_button.h +++ b/chrome/browser/ui/views/tabs/new_tab_button.h
@@ -60,6 +60,7 @@ void OnBoundsChanged(const gfx::Rect& previous_bounds) override; void AddedToWidget() override; void RemovedFromWidget() override; + void OnThemeChanged() override; private: class HighlightPathGenerator;
diff --git a/chrome/browser/ui/views/tabs/tab.cc b/chrome/browser/ui/views/tabs/tab.cc index 579a5c23..fe22bf7 100644 --- a/chrome/browser/ui/views/tabs/tab.cc +++ b/chrome/browser/ui/views/tabs/tab.cc
@@ -688,8 +688,6 @@ paint_as_active_subscription_ = GetWidget()->RegisterPaintAsActiveChangedCallback(base::BindRepeating( &Tab::UpdateForegroundColors, base::Unretained(this))); - // Set the initial state correctly - UpdateForegroundColors(); } void Tab::RemovedFromWidget() {
diff --git a/chrome/browser/ui/views/tabs/tab_strip.cc b/chrome/browser/ui/views/tabs/tab_strip.cc index 61e6e31..07b133a 100644 --- a/chrome/browser/ui/views/tabs/tab_strip.cc +++ b/chrome/browser/ui/views/tabs/tab_strip.cc
@@ -1881,8 +1881,6 @@ // So we only get enter/exit messages when the mouse enters/exits the whole // tabstrip, even if it is entering/exiting a specific Tab, too. SetNotifyEnterExitOnChild(true); - - UpdateContrastRatioValues(); } void TabStrip::NewTabButtonPressed(const ui::Event& event) { @@ -2021,6 +2019,8 @@ // The contrast ratio for the separator between inactive tabs. constexpr float kTabSeparatorContrast = 2.5f; separator_color_ = get_blend(inactive_fg, kTabSeparatorContrast).color; + + SchedulePaint(); } void TabStrip::ShiftTabRelative(Tab* tab, int offset) { @@ -2168,8 +2168,6 @@ paint_as_active_subscription_ = GetWidget()->RegisterPaintAsActiveChangedCallback(base::BindRepeating( &TabStrip::UpdateContrastRatioValues, base::Unretained(this))); - // Set the initial state correctly. - UpdateContrastRatioValues(); } void TabStrip::RemovedFromWidget() { @@ -2177,6 +2175,11 @@ paint_as_active_subscription_ = {}; } +void TabStrip::OnThemeChanged() { + views::View::OnThemeChanged(); + UpdateContrastRatioValues(); +} + void TabStrip::OnGestureEvent(ui::GestureEvent* event) { switch (event->type()) { case ui::ET_GESTURE_LONG_TAP: {
diff --git a/chrome/browser/ui/views/tabs/tab_strip.h b/chrome/browser/ui/views/tabs/tab_strip.h index 1a7d9169..c5425818 100644 --- a/chrome/browser/ui/views/tabs/tab_strip.h +++ b/chrome/browser/ui/views/tabs/tab_strip.h
@@ -395,6 +395,7 @@ void OnMouseExited(const ui::MouseEvent& event) override; void AddedToWidget() override; void RemovedFromWidget() override; + void OnThemeChanged() override; // ui::EventHandler: void OnGestureEvent(ui::GestureEvent* event) override;
diff --git a/chrome/browser/ui/views/tabs/tab_strip_scroll_container.cc b/chrome/browser/ui/views/tabs/tab_strip_scroll_container.cc index feaf999..1285a3c 100644 --- a/chrome/browser/ui/views/tabs/tab_strip_scroll_container.cc +++ b/chrome/browser/ui/views/tabs/tab_strip_scroll_container.cc
@@ -313,8 +313,6 @@ GetWidget()->RegisterPaintAsActiveChangedCallback( base::BindRepeating(&TabStripScrollContainer::FrameColorsChanged, base::Unretained(this))); - // Set the initial state correctly. - FrameColorsChanged(); } void TabStripScrollContainer::RemovedFromWidget() {
diff --git a/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_page_handler.cc b/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_page_handler.cc index 66e4935..fc859397 100644 --- a/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_page_handler.cc +++ b/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_page_handler.cc
@@ -65,6 +65,13 @@ prefs::kNtpDisabledModules, base::BindRepeating(&CustomizeChromePageHandler::UpdateModulesSettings, base::Unretained(this))); + if (IsCartModuleEnabled()) { + pref_change_registrar_.Add( + prefs::kCartDiscountEnabled, + base::BindRepeating(&CustomizeChromePageHandler::UpdateModulesSettings, + base::Unretained(this))); + } + ntp_custom_background_service_observation_.Observe( ntp_custom_background_service_.get()); }
diff --git a/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_ui.cc b/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_ui.cc index b907645..acd6267a 100644 --- a/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_ui.cc +++ b/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_ui.cc
@@ -7,6 +7,7 @@ #include <string> #include <utility> +#include "chrome/browser/cart/cart_handler.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/search/background/ntp_custom_background_service_factory.h" #include "chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_page_handler.h" @@ -61,6 +62,8 @@ {"showShortcutsToggle", IDS_NTP_CUSTOMIZE_SHOW_SHORTCUTS_LABEL}, // Card strings. {"showCardsToggleTitle", IDS_NTP_CUSTOMIZE_SHOW_CARDS_LABEL}, + {"modulesCartDiscountConsentAccept", + IDS_NTP_MODULES_CART_DISCOUNT_CONSENT_ACCEPT}, // Required by <managed-dialog>. {"controlledSettingPolicy", IDS_CONTROLLED_SETTING_POLICY}, {"close", IDS_NEW_TAB_VOICE_CLOSE_TOOLTIP}, @@ -88,6 +91,13 @@ page_factory_receiver_.Bind(std::move(receiver)); } +void CustomizeChromeUI::BindInterface( + mojo::PendingReceiver<chrome_cart::mojom::CartHandler> + pending_page_handler) { + cart_handler_ = std::make_unique<CartHandler>(std::move(pending_page_handler), + profile_, web_contents_); +} + void CustomizeChromeUI::CreatePageHandler( mojo::PendingRemote<side_panel::mojom::CustomizeChromePage> pending_page, mojo::PendingReceiver<side_panel::mojom::CustomizeChromePageHandler>
diff --git a/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_ui.h b/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_ui.h index 4151fa9..8cc0006d 100644 --- a/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_ui.h +++ b/chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome_ui.h
@@ -7,6 +7,7 @@ #include <memory> +#include "chrome/browser/cart/chrome_cart.mojom.h" #include "chrome/browser/ui/webui/side_panel/customize_chrome/customize_chrome.mojom.h" #include "content/public/browser/web_ui_controller.h" #include "mojo/public/cpp/bindings/pending_receiver.h" @@ -19,6 +20,7 @@ } // namespace content class CustomizeChromePageHandler; +class CartHandler; class Profile; // WebUI controller for chrome://customize-chrome-side-panel.top-chrome @@ -39,6 +41,11 @@ mojo::PendingReceiver< side_panel::mojom::CustomizeChromePageHandlerFactory> receiver); + // Instantiates the implementor of the chrome_cart::mojom::CartHandler + // mojo interface passing the pending receiver that will be internally bound. + void BindInterface( + mojo::PendingReceiver<chrome_cart::mojom::CartHandler> pending_receiver); + private: // side_panel::mojom::CustomizeChromePageHandlerFactory void CreatePageHandler( @@ -47,6 +54,7 @@ pending_page_handler) override; std::unique_ptr<CustomizeChromePageHandler> customize_chrome_page_handler_; + std::unique_ptr<CartHandler> cart_handler_; raw_ptr<Profile> profile_; raw_ptr<content::WebContents> web_contents_; mojo::Receiver<side_panel::mojom::CustomizeChromePageHandlerFactory>
diff --git a/chrome/browser/ui/webui/side_panel/user_notes/user_notes.mojom b/chrome/browser/ui/webui/side_panel/user_notes/user_notes.mojom index c309988..246e0b6 100644 --- a/chrome/browser/ui/webui/side_panel/user_notes/user_notes.mojom +++ b/chrome/browser/ui/webui/side_panel/user_notes/user_notes.mojom
@@ -47,6 +47,13 @@ mojo_base.mojom.Time last_modification_time; }; +// Used by the WebUI page to bootstrap bidirectional communication. +interface UserNotesPageHandlerFactory { + // The WebUI calls this method when the page is first initialized. + CreatePageHandler(pending_remote<UserNotesPage> page, + pending_receiver<UserNotesPageHandler> handler); +}; + // Browser-side handler for requests from WebUI page. interface UserNotesPageHandler { // Notify the backend that the UI is ready to be shown. @@ -71,3 +78,13 @@ // Called when deleting all notes for a url. DeleteNotesForUrl(url.mojom.Url url) => (bool success); }; + +// WebUI-side handler for requests from the browser. +interface UserNotesPage { + // Called when notes are changed. + // It could be caused by note changes from local change or from sync. + // This may/may not trigger a reload immediately. + // If the WebUI is in the background we can delay reloading until it is + // foregrounded. + NotesChanged(); +};
diff --git a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler.cc b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler.cc index b6bb066c..5cdb678da 100644 --- a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler.cc +++ b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler.cc
@@ -9,7 +9,6 @@ #include "chrome/browser/power_bookmarks/power_bookmark_service_factory.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.h" -#include "components/power_bookmarks/core/power_bookmark_service.h" #include "components/power_bookmarks/core/powers/power.h" #include "components/power_bookmarks/core/powers/power_overview.h" #include "components/sync/protocol/power_bookmark_specifics.pb.h" @@ -87,14 +86,20 @@ UserNotesPageHandler::UserNotesPageHandler( mojo::PendingReceiver<side_panel::mojom::UserNotesPageHandler> receiver, + mojo::PendingRemote<side_panel::mojom::UserNotesPage> page, Profile* profile, UserNotesSidePanelUI* user_notes_ui) : receiver_(this, std::move(receiver)), + page_(std::move(page)), profile_(profile), service_(PowerBookmarkServiceFactory::GetForBrowserContext(profile_)), - user_notes_ui_(user_notes_ui) {} + user_notes_ui_(user_notes_ui) { + service_->AddObserver(this); +} -UserNotesPageHandler::~UserNotesPageHandler() = default; +UserNotesPageHandler::~UserNotesPageHandler() { + service_->RemoveObserver(this); +} void UserNotesPageHandler::ShowUI() { auto embedder = user_notes_ui_->embedder(); @@ -175,3 +180,7 @@ bool success) { std::move(callback).Run(success); }, std::move(callback))); } + +void UserNotesPageHandler::OnPowersChanged() { + page_->NotesChanged(); +}
diff --git a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler.h b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler.h index f8d317a..3b21c296 100644 --- a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler.h +++ b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler.h
@@ -7,8 +7,10 @@ #include "base/memory/raw_ptr.h" #include "chrome/browser/ui/webui/side_panel/user_notes/user_notes.mojom.h" +#include "components/power_bookmarks/core/power_bookmark_service.h" #include "mojo/public/cpp/bindings/pending_receiver.h" #include "mojo/public/cpp/bindings/receiver.h" +#include "mojo/public/cpp/bindings/remote.h" namespace power_bookmarks { class PowerBookmarkService; @@ -17,10 +19,13 @@ class UserNotesSidePanelUI; class Profile; -class UserNotesPageHandler : public side_panel::mojom::UserNotesPageHandler { +class UserNotesPageHandler + : public side_panel::mojom::UserNotesPageHandler, + public power_bookmarks::PowerBookmarkService::Observer { public: explicit UserNotesPageHandler( mojo::PendingReceiver<side_panel::mojom::UserNotesPageHandler> receiver, + mojo::PendingRemote<side_panel::mojom::UserNotesPage> page, Profile* profile, UserNotesSidePanelUI* user_notes_ui); UserNotesPageHandler(const UserNotesPageHandler&) = delete; @@ -45,7 +50,11 @@ void SetCurrentTabUrlForTesting(GURL url) { current_tab_url_ = url; } private: + // power_bookmarks::PowerBookmarkService::Observer: + void OnPowersChanged() override; + mojo::Receiver<side_panel::mojom::UserNotesPageHandler> receiver_; + mojo::Remote<side_panel::mojom::UserNotesPage> page_; const raw_ptr<Profile> profile_; const raw_ptr<power_bookmarks::PowerBookmarkService> service_; raw_ptr<UserNotesSidePanelUI> user_notes_ui_ = nullptr;
diff --git a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler_unittest.cc b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler_unittest.cc index bd29bd7..08378caf 100644 --- a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler_unittest.cc +++ b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_page_handler_unittest.cc
@@ -13,18 +13,36 @@ #include "chrome/test/base/browser_with_test_window_test.h" #include "chrome/test/base/test_browser_window.h" #include "components/power_bookmarks/core/power_bookmark_features.h" +#include "testing/gmock/include/gmock/gmock.h" namespace { class TestUserNotesPageHandler : public UserNotesPageHandler { public: - explicit TestUserNotesPageHandler(Profile* profile) + explicit TestUserNotesPageHandler( + mojo::PendingRemote<side_panel::mojom::UserNotesPage> page, + Profile* profile) : UserNotesPageHandler( mojo::PendingReceiver<side_panel::mojom::UserNotesPageHandler>(), + std::move(page), profile, nullptr) {} }; +class MockUserNotesPage : public side_panel::mojom::UserNotesPage { + public: + MockUserNotesPage() = default; + ~MockUserNotesPage() override = default; + + mojo::PendingRemote<side_panel::mojom::UserNotesPage> BindAndGetRemote() { + DCHECK(!receiver_.is_bound()); + return receiver_.BindNewPipeAndPassRemote(); + } + mojo::Receiver<side_panel::mojom::UserNotesPage> receiver_{this}; + + MOCK_METHOD0(NotesChanged, void()); +}; + struct Note { GURL url; std::string text; @@ -35,7 +53,8 @@ void SetUp() override { features_.InitAndEnableFeature(power_bookmarks::kPowerBookmarkBackend); BrowserWithTestWindowTest::SetUp(); - handler_ = std::make_unique<TestUserNotesPageHandler>(profile()); + handler_ = std::make_unique<TestUserNotesPageHandler>( + page_.BindAndGetRemote(), profile()); GURL url1(u"https://url1"); GURL url2(u"https://url2"); @@ -61,12 +80,16 @@ TestUserNotesPageHandler* handler() { return handler_.get(); } + protected: + MockUserNotesPage page_; + private: std::unique_ptr<TestUserNotesPageHandler> handler_; base::test::ScopedFeatureList features_; }; TEST_F(UserNotesPageHandlerTest, GetNotes) { + EXPECT_CALL(page_, NotesChanged()).Times(1); side_panel::mojom::UserNotesPageHandlerAsyncWaiter waiter(handler()); handler()->SetCurrentTabUrlForTesting(GURL(u"https://url1")); auto notes = waiter.GetNotesForCurrentTab(); @@ -79,6 +102,7 @@ } TEST_F(UserNotesPageHandlerTest, GetNoteOverviews) { + EXPECT_CALL(page_, NotesChanged()).Times(1); side_panel::mojom::UserNotesPageHandlerAsyncWaiter waiter(handler()); handler()->SetCurrentTabUrlForTesting(GURL(u"https://url1")); auto note_overviews = waiter.GetNoteOverviews(""); @@ -86,6 +110,7 @@ } TEST_F(UserNotesPageHandlerTest, CreateAndDeleteNote) { + EXPECT_CALL(page_, NotesChanged()).Times(3); side_panel::mojom::UserNotesPageHandlerAsyncWaiter waiter(handler()); handler()->SetCurrentTabUrlForTesting(GURL(u"https://url5")); ASSERT_TRUE(waiter.NewNoteFinished("note5")); @@ -101,6 +126,7 @@ } TEST_F(UserNotesPageHandlerTest, UpdateNote) { + EXPECT_CALL(page_, NotesChanged()).Times(3); side_panel::mojom::UserNotesPageHandlerAsyncWaiter waiter(handler()); handler()->SetCurrentTabUrlForTesting(GURL(u"https://url5")); ASSERT_TRUE(waiter.NewNoteFinished("note5")); @@ -119,6 +145,7 @@ } TEST_F(UserNotesPageHandlerTest, DeleteNotesForUrl) { + EXPECT_CALL(page_, NotesChanged()).Times(2); side_panel::mojom::UserNotesPageHandlerAsyncWaiter waiter(handler()); ASSERT_TRUE(waiter.DeleteNotesForUrl(GURL(u"https://url1")));
diff --git a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.cc b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.cc index dea633b..83a07f38 100644 --- a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.cc +++ b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.cc
@@ -33,7 +33,17 @@ WEB_UI_CONTROLLER_TYPE_IMPL(UserNotesSidePanelUI) void UserNotesSidePanelUI::BindInterface( + mojo::PendingReceiver<side_panel::mojom::UserNotesPageHandlerFactory> + receiver) { + page_factory_receiver_.reset(); + page_factory_receiver_.Bind(std::move(receiver)); +} + +void UserNotesSidePanelUI::CreatePageHandler( + mojo::PendingRemote<side_panel::mojom::UserNotesPage> page, mojo::PendingReceiver<side_panel::mojom::UserNotesPageHandler> receiver) { + DCHECK(page); + user_notes_page_handler_ = std::make_unique<UserNotesPageHandler>( - std::move(receiver), Profile::FromWebUI(web_ui()), this); + std::move(receiver), std::move(page), Profile::FromWebUI(web_ui()), this); }
diff --git a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.h b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.h index 35a2205a..747437e0 100644 --- a/chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.h +++ b/chrome/browser/ui/webui/side_panel/user_notes/user_notes_side_panel_ui.h
@@ -13,19 +13,29 @@ class UserNotesPageHandler; -class UserNotesSidePanelUI : public ui::MojoBubbleWebUIController { +class UserNotesSidePanelUI + : public ui::MojoBubbleWebUIController, + public side_panel::mojom::UserNotesPageHandlerFactory { public: explicit UserNotesSidePanelUI(content::WebUI* web_ui); UserNotesSidePanelUI(const UserNotesSidePanelUI&) = delete; UserNotesSidePanelUI& operator=(const UserNotesSidePanelUI&) = delete; ~UserNotesSidePanelUI() override; - // Instantiates the implementor of the mojom::PageHandler mojo - // interface passing the pending receiver that will be internally bound. void BindInterface( - mojo::PendingReceiver<side_panel::mojom::UserNotesPageHandler> receiver); + mojo::PendingReceiver<side_panel::mojom::UserNotesPageHandlerFactory> + factory); private: + // user_notes::mojom::UserNotesPageHandlerFactory + void CreatePageHandler( + mojo::PendingRemote<side_panel::mojom::UserNotesPage> page, + mojo::PendingReceiver<side_panel::mojom::UserNotesPageHandler> receiver) + override; + + mojo::Receiver<side_panel::mojom::UserNotesPageHandlerFactory> + page_factory_receiver_{this}; + std::unique_ptr<UserNotesPageHandler> user_notes_page_handler_; WEB_UI_CONTROLLER_TYPE_DECL();
diff --git a/chrome/browser/window_management/screen_details_browsertest.cc b/chrome/browser/window_management/screen_details_browsertest.cc index e28f067..39cf8cf 100644 --- a/chrome/browser/window_management/screen_details_browsertest.cc +++ b/chrome/browser/window_management/screen_details_browsertest.cc
@@ -95,8 +95,7 @@ )JS"; ASSERT_TRUE(EvalJs(tab, kEnterFullscreenScript).ExtractBool()); ASSERT_TRUE(tab->IsFullscreen()); - DevToolsWindow* dev_tools_window = - DevToolsWindowTesting::OpenDevToolsWindowSync(tab, true); + DevToolsWindowTesting::OpenDevToolsWindowSync(tab, true); ASSERT_TRUE(tab->IsFullscreen()); if (FullscreenScreenSizeMatchesDisplayEnabled()) { // `window.screen` dimensions match the display size. @@ -108,9 +107,7 @@ EXPECT_NE(display_size, EvalJs(tab, "`${innerWidth}x${innerHeight}`")); EXPECT_EQ(display_size, EvalJs(tab, kGetCurrentScreenSizeScript)); - // Check dimensions again after closing dev tools and exiting fullscreen. - // Wait for a window `resize` event to propagate from the browser process. - DevToolsWindowTesting::CloseDevToolsWindowSync(dev_tools_window); + // Check dimensions again after exiting fullscreen and getting a `resize`. constexpr char kExitFullscreenAndResizeScript[] = R"JS( Promise.all([ document.exitFullscreen(), new Promise(r => { window.onresize = r; })
diff --git a/chrome/build/linux.pgo.txt b/chrome/build/linux.pgo.txt index 2545529..518af6b 100644 --- a/chrome/build/linux.pgo.txt +++ b/chrome/build/linux.pgo.txt
@@ -1 +1 @@ -chrome-linux-main-1672142204-140570d135de4ecb7f44d4800fe4d5e3c91294f8.profdata +chrome-linux-main-1672185343-c93a75b067fc8f1aed9f9d786c25060c46784bf2.profdata
diff --git a/chrome/build/mac-arm.pgo.txt b/chrome/build/mac-arm.pgo.txt index 369ba44d..59b6506 100644 --- a/chrome/build/mac-arm.pgo.txt +++ b/chrome/build/mac-arm.pgo.txt
@@ -1 +1 @@ -chrome-mac-arm-main-1672142204-9a4e1aee2f3760d8d401b0dc35b2a1f18bcaf02f.profdata +chrome-mac-arm-main-1672185343-9e09ee18e8d888910d41511ff7c70ee8af3be613.profdata
diff --git a/chrome/build/win32.pgo.txt b/chrome/build/win32.pgo.txt index c1918b5..dd2e98d 100644 --- a/chrome/build/win32.pgo.txt +++ b/chrome/build/win32.pgo.txt
@@ -1 +1 @@ -chrome-win32-main-1672153142-80723408a34901dff39ba42d4acd1c1fea8eef43.profdata +chrome-win32-main-1672194909-6d73fe10f77ecc313471cedabd61a31bf263bd91.profdata
diff --git a/chrome/build/win64.pgo.txt b/chrome/build/win64.pgo.txt index 0521440a..b6761f4 100644 --- a/chrome/build/win64.pgo.txt +++ b/chrome/build/win64.pgo.txt
@@ -1 +1 @@ -chrome-win64-main-1672153142-f863dd361409b1a66cd18c40b598036b38554a30.profdata +chrome-win64-main-1672194909-facf7677b4eafd8ffe127fa2ea16258a601e12a4.profdata
diff --git a/chrome/common/chrome_features.cc b/chrome/common/chrome_features.cc index 2e7312d..d37e64f 100644 --- a/chrome/common/chrome_features.cc +++ b/chrome/common/chrome_features.cc
@@ -1086,6 +1086,11 @@ "TreatUnsafeDownloadsAsActive", base::FEATURE_ENABLED_BY_DEFAULT); +// Block downloads delivered over insecure transports (i.e. not over HTTPS). +BASE_FEATURE(kBlockInsecureDownloads, + "BlockInsecureDownloads", + base::FEATURE_DISABLED_BY_DEFAULT); + // TrustSafetySentimentSurvey #if !BUILDFLAG(IS_ANDROID) // Enables surveying of users of Trust & Safety features with HaTS.
diff --git a/chrome/common/chrome_features.h b/chrome/common/chrome_features.h index 1001b8c..8696e68 100644 --- a/chrome/common/chrome_features.h +++ b/chrome/common/chrome_features.h
@@ -594,6 +594,8 @@ COMPONENT_EXPORT(CHROME_FEATURES) BASE_DECLARE_FEATURE(kTreatUnsafeDownloadsAsActive); +COMPONENT_EXPORT(CHROME_FEATURES) +BASE_DECLARE_FEATURE(kBlockInsecureDownloads); // TrustSafetySentimentSurvey COMPONENT_EXPORT(CHROME_FEATURES)
diff --git a/chrome/test/data/webui/side_panel/customize_chrome/cards_test.ts b/chrome/test/data/webui/side_panel/customize_chrome/cards_test.ts index e813c94..ce9e465 100644 --- a/chrome/test/data/webui/side_panel/customize_chrome/cards_test.ts +++ b/chrome/test/data/webui/side_panel/customize_chrome/cards_test.ts
@@ -6,14 +6,18 @@ import 'chrome://customize-chrome-side-panel.top-chrome/cards.js'; import {CardsElement} from 'chrome://customize-chrome-side-panel.top-chrome/cards.js'; +import {CartHandlerRemote} from 'chrome://customize-chrome-side-panel.top-chrome/chrome_cart.mojom-webui.js'; +import {ChromeCartProxy} from 'chrome://customize-chrome-side-panel.top-chrome/chrome_cart_proxy.js'; import {CustomizeChromePageCallbackRouter, CustomizeChromePageHandlerRemote, CustomizeChromePageRemote, ModuleSettings} from 'chrome://customize-chrome-side-panel.top-chrome/customize_chrome.mojom-webui.js'; import {CustomizeChromeApiProxy} from 'chrome://customize-chrome-side-panel.top-chrome/customize_chrome_api_proxy.js'; import {CrCheckboxElement} from 'chrome://resources/cr_elements/cr_checkbox/cr_checkbox.js'; import {CrToggleElement} from 'chrome://resources/cr_elements/cr_toggle/cr_toggle.js'; +import {loadTimeData} from 'chrome://resources/js/load_time_data.js'; import {IronCollapseElement} from 'chrome://resources/polymer/v3_0/iron-collapse/iron-collapse.js'; -import {assertEquals, assertNotEquals, assertTrue} from 'chrome://webui-test/chai_assert.js'; +import {assertDeepEquals, assertEquals, assertFalse, assertNotEquals, assertTrue} from 'chrome://webui-test/chai_assert.js'; import {waitAfterNextRender} from 'chrome://webui-test/polymer_test_util.js'; import {TestBrowserProxy} from 'chrome://webui-test/test_browser_proxy.js'; +import {isVisible} from 'chrome://webui-test/test_util.js'; import {assertNotStyle, assertStyle, installMock} from './test_support.js'; @@ -34,8 +38,8 @@ }); async function setupTest( - modules: ModuleSettings[], modulesVisible: boolean, - modulesManaged: boolean) { + modules: ModuleSettings[], modulesManaged: boolean, + modulesVisible: boolean) { callbackRouterRemote.setModulesSettings( modules, modulesManaged, modulesVisible); @@ -82,7 +86,8 @@ {id: 'bar', name: 'bar name', enabled: true}, {id: 'baz', name: 'baz name', enabled: false}, ], - visible, false); + /*modulesManaged=*/ false, + /*modulesVisible=*/ visible); // Assert. assertEquals(visible, getToggleElement().checked); @@ -108,8 +113,8 @@ {id: 'foo', name: 'foo name', enabled: true}, {id: 'bar', name: 'bar name', enabled: false}, ], - /*modulesVisible=*/ visible, - /*modulesManaged=*/ false); + /*modulesManaged=*/ false, + /*modulesVisible=*/ visible); assertEquals(visible, getCollapseElement().opened); getToggleElement().click(); @@ -133,8 +138,8 @@ {id: 'foo', name: 'foo name', enabled: true}, {id: 'bar', name: 'bar name', enabled: false}, ], - /*modulesVisible=*/ visible, - /*modulesManaged=*/ true); + /*modulesManaged=*/ true, + /*modulesVisible=*/ visible); const policyIndicator = customizeCards.shadowRoot!.querySelector('cr-policy-indicator'); @@ -151,5 +156,115 @@ }); }); + suite('Chrome Cart', () => { + let cartHandler: TestBrowserProxy<CartHandlerRemote>; + + suiteSetup(() => { + cartHandler = installMock(CartHandlerRemote, ChromeCartProxy.setHandler); + }); + + [true, false].forEach(visible => { + test(`Discount option ${(visible ? '' : 'not ')} visible`, async () => { + // Arrange. + cartHandler.setResultFor( + 'getDiscountToggleVisible', + Promise.resolve({toggleVisible: visible})); + cartHandler.setResultFor( + 'getDiscountEnabled', Promise.resolve({enabled: false})); + await setupTest( + [ + {id: 'chrome_cart', name: 'Chrome Cart', enabled: true}, + ], + /*modulesManaged=*/ false, + /*modulesVisible=*/ visible); + + // Assert. + assertEquals(visible, getToggleElement().checked); + const cards = getCardsMap(); + assertCardCheckedStatus(cards, 'Chrome Cart', true); + if (visible) { + assertCardCheckedStatus( + cards, loadTimeData.getString('modulesCartDiscountConsentAccept'), + false); + } + const cardOptions = + customizeCards.shadowRoot!.querySelectorAll('.card-option-name'); + assertEquals(visible ? 1 : 0, cardOptions.length); + }); + }); + + test(`discount checkbox sets discount status`, async () => { + // Arrange. + cartHandler.setResultFor( + 'getDiscountToggleVisible', Promise.resolve({toggleVisible: true})); + cartHandler.setResultFor( + 'getDiscountEnabled', Promise.resolve({enabled: true})); + + await setupTest( + [ + {id: 'chrome_cart', name: 'Chrome Cart', enabled: true}, + ], + /*modulesManaged=*/ false, + /*modulesVisible=*/ true); + + // Act. + const cartCardOptionName = + customizeCards.shadowRoot!.querySelector('.card-option-name')!; + const discountCheckbox: CrCheckboxElement = + cartCardOptionName.nextElementSibling! as CrCheckboxElement; + discountCheckbox.click(); + + // Assert. + assertEquals(1, cartHandler.getCallCount('setDiscountEnabled')); + assertDeepEquals(false, cartHandler.getArgs('setDiscountEnabled')[0]); + }); + + test(`Unchecking cart card hides discount option`, async () => { + // Arrange. + cartHandler.setResultFor( + 'getDiscountToggleVisible', Promise.resolve({toggleVisible: true})); + cartHandler.setResultFor( + 'getDiscountEnabled', Promise.resolve({enabled: true})); + + await setupTest( + [ + {id: 'chrome_cart', name: 'Chrome Cart', enabled: true}, + {id: 'bar', name: 'bar name', enabled: false}, + ], + /*modulesManaged=*/ false, + /*modulesVisible=*/ true); + + assertTrue(getToggleElement().checked); + assertTrue(getCollapseElement().opened); + let cards = getCardsMap(); + assertCardCheckedStatus(cards, 'Chrome Cart', true); + assertCardCheckedStatus( + cards, loadTimeData.getString('modulesCartDiscountConsentAccept'), + true); + assertCardCheckedStatus(cards, 'bar name', false); + + const cartCardCheckbox = + cards.get('Chrome Cart')!.querySelector('cr-checkbox')!; + cartCardCheckbox.click(); + await handler.whenCalled('setModuleDisabled'); + callbackRouterRemote.setModulesSettings( + [ + {id: 'chrome_cart', name: 'Chrome Cart', enabled: false}, + {id: 'bar', name: 'bar name', enabled: false}, + ], + false, true); + await callbackRouterRemote.$.flushForTesting(); + await waitAfterNextRender(customizeCards); + + const cartCardOptionName = + customizeCards.shadowRoot!.querySelector('.card-option-name')!; + assertFalse(isVisible(cartCardOptionName)); + + cards = getCardsMap(); + assertCardCheckedStatus(cards, 'Chrome Cart', false); + assertCardCheckedStatus(cards, 'bar name', false); + }); + }); + // TODO(crbug.com/1384258): Add metric related tests. });
diff --git a/chrome/updater/BUILD.gn b/chrome/updater/BUILD.gn index 92d0ded..540c176 100644 --- a/chrome/updater/BUILD.gn +++ b/chrome/updater/BUILD.gn
@@ -709,6 +709,9 @@ if (is_win) { sources += [ + # TODO(crbug.com/1402743) - eliminate the dependency on //chrome/test. + "//chrome/test/base/process_inspector_win.cc", + "//chrome/test/base/process_inspector_win.h", "activity_impl_win_unittest.cc", "app/server/win/com_classes_legacy_unittest.cc", "auto_run_on_os_upgrade_task_unittest.cc",
diff --git a/chrome/updater/util/unittest_util.cc b/chrome/updater/util/unittest_util.cc index 692410d..d8ee74e 100644 --- a/chrome/updater/util/unittest_util.cc +++ b/chrome/updater/util/unittest_util.cc
@@ -34,6 +34,7 @@ #include <shlobj.h> #include "base/win/windows_version.h" +#include "chrome/test/base/process_inspector_win.h" #include "chrome/updater/util/win_util.h" #endif @@ -346,6 +347,46 @@ LOG(ERROR) << __func__ << ": failed to backup pml file"; } +base::FilePath::StringType PrintProcesses( + const base::FilePath::StringType& executable_name) { + class ExeNameProcessFilter : public base::ProcessFilter { + public: + explicit ExeNameProcessFilter( + const base::FilePath::StringType& executable_name) + : executable_name_(executable_name) {} + + bool Includes(const base::ProcessEntry& entry) const override { + return base::EqualsCaseInsensitiveASCII(entry.exe_file(), + executable_name_); + } + + private: + const base::FilePath::StringType executable_name_; + }; + + base::FilePath::StringType message(FILE_PATH_LITERAL("Found processes:")); + const base::FilePath::StringType demarcation(72, FILE_PATH_LITERAL('=')); + message += demarcation; + + ExeNameProcessFilter exe_name_filter(executable_name); + base::ProcessIterator process_iterator(&exe_name_filter); + const base::ProcessIterator::ProcessEntries& process_entries = + process_iterator.Snapshot(); + for (const base::ProcessEntry& entry : process_entries) { + message += base::StrCat( + {entry.exe_file(), FILE_PATH_LITERAL(", cmdline="), + [](base::ProcessId pid) { + std::unique_ptr<ProcessInspector> process_inspector = + ProcessInspector::Create(base::Process::OpenWithAccess( + pid, PROCESS_ALL_ACCESS | PROCESS_VM_READ)); + return process_inspector ? process_inspector->command_line() + : FILE_PATH_LITERAL("n/a"); + }(entry.pid())}); + } + + return message + demarcation; +} + #endif // BUILDFLAG(IS_WIN) } // namespace updater::test
diff --git a/chrome/updater/util/unittest_util.h b/chrome/updater/util/unittest_util.h index aad0f27..42d1035 100644 --- a/chrome/updater/util/unittest_util.h +++ b/chrome/updater/util/unittest_util.h
@@ -84,6 +84,10 @@ // `C:\\tools\\Procmon.exe`, and `pml_file` needs to be a valid path to a // procmon PML file returned from `StartProcmonLogging`. void StopProcmonLogging(const base::FilePath& pml_file); + +// Returns a log string of processes matching `executable_name`. +base::FilePath::StringType PrintProcesses( + const base::FilePath::StringType& executable_name); #endif } // namespace updater::test
diff --git a/chrome/updater/win/task_scheduler_unittest.cc b/chrome/updater/win/task_scheduler_unittest.cc index 050f195..2d15d5d 100644 --- a/chrome/updater/win/task_scheduler_unittest.cc +++ b/chrome/updater/win/task_scheduler_unittest.cc
@@ -69,7 +69,8 @@ void SetUp() override { task_scheduler_ = TaskScheduler::CreateInstance(GetTestScope()); EXPECT_TRUE(IsServiceRunning(SERVICE_SCHEDULE)); - ASSERT_FALSE(test::IsProcessRunning(kTestProcessExecutableName)); + ASSERT_FALSE(test::IsProcessRunning(kTestProcessExecutableName)) + << test::PrintProcesses(kTestProcessExecutableName); } void TearDown() override {
diff --git a/chromeos/CHROMEOS_LKGM b/chromeos/CHROMEOS_LKGM index ca96929bd..40dec49 100644 --- a/chromeos/CHROMEOS_LKGM +++ b/chromeos/CHROMEOS_LKGM
@@ -1 +1 @@ -15292.0.0 \ No newline at end of file +15294.0.0 \ No newline at end of file
diff --git a/components/app_restore/full_restore_read_and_save_unittest.cc b/components/app_restore/full_restore_read_and_save_unittest.cc index a824529..3eb14e9 100644 --- a/components/app_restore/full_restore_read_and_save_unittest.cc +++ b/components/app_restore/full_restore_read_and_save_unittest.cc
@@ -318,7 +318,7 @@ window->SetProperty(aura::client::kAppType, static_cast<int>(ash::AppType::LACROS)); window->SetProperty(app_restore::kLacrosWindowId, - std::string(kLacrosWindowId)); + std::string(lacros_window_id)); window->SetProperty(app_restore::kWindowIdKey, restore_session_id); window->SetProperty(app_restore::kRestoreWindowIdKey, restore_window_id); return window;
diff --git a/components/autofill/core/browser/payments/payments_client_unittest.cc b/components/autofill/core/browser/payments/payments_client_unittest.cc index 2b85cf8b..fca6fe9 100644 --- a/components/autofill/core/browser/payments/payments_client_unittest.cc +++ b/components/autofill/core/browser/payments/payments_client_unittest.cc
@@ -1458,9 +1458,6 @@ TEST_F(PaymentsClientTest, UploadSuccessGetDetailsForEnrollmentResponseDetailsPresent) { - base::test::ScopedFeatureList scoped_feature_list; - scoped_feature_list.InitAndEnableFeature( - features::kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponse); StartUploading(/*include_cvc=*/true); IssueOAuthToken(); ReturnResponse(net::HTTP_OK,
diff --git a/components/autofill/core/browser/payments/payments_requests/upload_card_request.cc b/components/autofill/core/browser/payments/payments_requests/upload_card_request.cc index 09e43a7..d573cc65 100644 --- a/components/autofill/core/browser/payments/payments_requests/upload_card_request.cc +++ b/components/autofill/core/browser/payments/payments_requests/upload_card_request.cc
@@ -152,11 +152,8 @@ } } - if (base::FeatureList::IsEnabled( - features:: - kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponse) && - upload_card_response_details_.virtual_card_enrollment_state == - CreditCard::VirtualCardEnrollmentState::UNENROLLED_AND_ELIGIBLE) { + if (upload_card_response_details_.virtual_card_enrollment_state == + CreditCard::VirtualCardEnrollmentState::UNENROLLED_AND_ELIGIBLE) { const auto* virtual_card_enrollment_data = virtual_card_metadata->FindKeyOfType("virtual_card_enrollment_data", base::Value::Type::DICTIONARY);
diff --git a/components/autofill/core/common/autofill_payments_features.cc b/components/autofill/core/common/autofill_payments_features.cc index ffb6b70054..13b644f 100644 --- a/components/autofill/core/common/autofill_payments_features.cc +++ b/components/autofill/core/common/autofill_payments_features.cc
@@ -67,14 +67,6 @@ "AutofillEnableCvcForVcnYellowPath", base::FEATURE_DISABLED_BY_DEFAULT); -// When enabled, the GetDetailsForEnrollResponseDetails in the -// UploadCardResponseDetails will be parsed, which will allow the Virtual Card -// Enrollment flow to skip making a new GetDetailsForEnroll request. This is an -// optimization to improve the latency of the Virtual Card Enrollment flow. -BASE_FEATURE(kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponse, - "AutofillEnableGetDetailsForEnrollParsingInUploadCardResponse", - base::FEATURE_ENABLED_BY_DEFAULT); - // When enabled, a progress dialog will display while authenticating with FIDO. // TODO(crbug.com/1337380): Clean up kAutofillEnableFIDOProgressDialog when it's // fully rolled out.
diff --git a/components/autofill/core/common/autofill_payments_features.h b/components/autofill/core/common/autofill_payments_features.h index 2cd64db7..bb6ff02 100644 --- a/components/autofill/core/common/autofill_payments_features.h +++ b/components/autofill/core/common/autofill_payments_features.h
@@ -19,8 +19,6 @@ BASE_DECLARE_FEATURE(kAutofillEnableCardArtImage); BASE_DECLARE_FEATURE(kAutofillEnableCardProductName); BASE_DECLARE_FEATURE(kAutofillEnableCvcForVcnYellowPath); -BASE_DECLARE_FEATURE( - kAutofillEnableGetDetailsForEnrollParsingInUploadCardResponse); BASE_DECLARE_FEATURE(kAutofillEnableFIDOProgressDialog); BASE_DECLARE_FEATURE(kAutofillEnableManualFallbackForVirtualCards); BASE_DECLARE_FEATURE(kAutofillEnableMerchantOptOutErrorDialog);
diff --git a/components/commerce/core/android/BUILD.gn b/components/commerce/core/android/BUILD.gn index c9a9621..af788ef9 100644 --- a/components/commerce/core/android/BUILD.gn +++ b/components/commerce/core/android/BUILD.gn
@@ -6,13 +6,23 @@ android_library("core_java") { annotation_processor_deps = [ "//base/android/jni_generator:jni_processor" ] - sources = - [ "java/src/org/chromium/components/commerce/core/ShoppingService.java" ] + sources = [ + "java/src/org/chromium/components/commerce/core/CommerceSubscription.java", + "java/src/org/chromium/components/commerce/core/ShoppingService.java", + "java/src/org/chromium/components/commerce/core/SubscriptionsObserver.java", + ] deps = [ + "//base:base_java", "//base:jni_java", "//build/android:build_java", "//third_party/androidx:androidx_annotation_annotation_java", "//url:gurl_java", ] + srcjar_deps = [ ":java_enum_srcjar" ] +} + +java_cpp_enum("java_enum_srcjar") { + sources = + [ "//components/commerce/core/subscriptions/commerce_subscription.h" ] }
diff --git a/components/commerce/core/android/java/src/org/chromium/components/commerce/core/CommerceSubscription.java b/components/commerce/core/android/java/src/org/chromium/components/commerce/core/CommerceSubscription.java new file mode 100644 index 0000000..1beafd3 --- /dev/null +++ b/components/commerce/core/android/java/src/org/chromium/components/commerce/core/CommerceSubscription.java
@@ -0,0 +1,42 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.components.commerce.core; + +import androidx.annotation.Nullable; + +/** Represents the information for one commerce subscription entry. */ +public class CommerceSubscription { + public final @SubscriptionType int type; + public final @IdentifierType int idType; + public final String id; + public final @ManagementType int managementType; + @Nullable + public final UserSeenOffer userSeenOffer; + + /** User seen offer data upon price tracking subscribing. */ + public static class UserSeenOffer { + /** Associated offer id. */ + public final String offerId; + /** The price upon subscribing. */ + public final long userSeenPrice; + /** Country code of the offer. */ + public final String countryCode; + + public UserSeenOffer(String offerId, long userSeenPrice, String countryCode) { + this.offerId = offerId; + this.userSeenPrice = userSeenPrice; + this.countryCode = countryCode; + } + } + + public CommerceSubscription(@SubscriptionType int type, @IdentifierType int idType, String id, + @ManagementType int managementType, @Nullable UserSeenOffer userSeenOffer) { + this.type = type; + this.idType = idType; + this.id = id; + this.managementType = managementType; + this.userSeenOffer = userSeenOffer; + } +}
diff --git a/components/commerce/core/android/java/src/org/chromium/components/commerce/core/ShoppingService.java b/components/commerce/core/android/java/src/org/chromium/components/commerce/core/ShoppingService.java index 6df1dc12..23e0492 100644 --- a/components/commerce/core/android/java/src/org/chromium/components/commerce/core/ShoppingService.java +++ b/components/commerce/core/android/java/src/org/chromium/components/commerce/core/ShoppingService.java
@@ -6,11 +6,15 @@ import androidx.annotation.VisibleForTesting; +import org.chromium.base.Callback; +import org.chromium.base.ObserverList; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.base.annotations.NativeMethods; import org.chromium.url.GURL; +import java.util.ArrayList; +import java.util.List; import java.util.Optional; /** A central hub for accessing shopping and product information. */ @@ -88,6 +92,9 @@ /** A pointer to the native side of the object. */ private long mNativeShoppingServiceAndroid; + private final ObserverList<SubscriptionsObserver> mSubscriptionsObservers = + new ObserverList<>(); + /** Private constructor to ensure construction only happens by native. */ private ShoppingService(long nativePtr) { mNativeShoppingServiceAndroid = nativePtr; @@ -151,9 +158,36 @@ ShoppingServiceJni.get().scheduleSavedProductUpdate(mNativeShoppingServiceAndroid, this); } + /** Create new subscriptions in batch. */ + public void subscribe(CommerceSubscription sub, Callback<Boolean> callback) { + if (mNativeShoppingServiceAndroid == 0) return; + + assert sub.userSeenOffer != null; + ShoppingServiceJni.get().subscribe(mNativeShoppingServiceAndroid, this, sub.type, + sub.idType, sub.managementType, sub.id, sub.userSeenOffer.offerId, + sub.userSeenOffer.userSeenPrice, sub.userSeenOffer.countryCode, callback); + } + + /** Delete existing subscriptions in batch. */ + public void unsubscribe(CommerceSubscription sub, Callback<Boolean> callback) { + if (mNativeShoppingServiceAndroid == 0) return; + + ShoppingServiceJni.get().unsubscribe(mNativeShoppingServiceAndroid, this, sub.type, + sub.idType, sub.managementType, sub.id, callback); + } + + public void addSubscriptionsObserver(SubscriptionsObserver observer) { + mSubscriptionsObservers.addObserver(observer); + } + + public void removeSubscriptionsObserver(SubscriptionsObserver observer) { + mSubscriptionsObservers.removeObserver(observer); + } + @CalledByNative private void destroy() { mNativeShoppingServiceAndroid = 0; + mSubscriptionsObservers.clear(); } @CalledByNative @@ -195,6 +229,31 @@ callback.onResult(url, info); } + @CalledByNative + private List<CommerceSubscription> createSubscriptionAndAddToList( + List<CommerceSubscription> subs, int type, int idType, int managementType, String id) { + if (subs == null) { + subs = new ArrayList<>(); + } + CommerceSubscription sub = new CommerceSubscription(type, idType, id, managementType, null); + subs.add(sub); + return subs; + } + + @CalledByNative + private void onSubscribe(List<CommerceSubscription> subs, boolean succeeded) { + for (SubscriptionsObserver o : mSubscriptionsObservers) { + o.onSubscribe(subs, succeeded); + } + } + + @CalledByNative + private void onUnsubscribe(List<CommerceSubscription> subs, boolean succeeded) { + for (SubscriptionsObserver o : mSubscriptionsObservers) { + o.onUnsubscribe(subs, succeeded); + } + } + @NativeMethods interface Natives { void getProductInfoForUrl(long nativeShoppingServiceAndroid, ShoppingService caller, @@ -205,5 +264,10 @@ GURL url, MerchantInfoCallback callback); void fetchPriceEmailPref(long nativeShoppingServiceAndroid, ShoppingService caller); void scheduleSavedProductUpdate(long nativeShoppingServiceAndroid, ShoppingService caller); + void subscribe(long nativeShoppingServiceAndroid, ShoppingService caller, int type, + int idType, int managementType, String id, String seenOfferId, long seenPrice, + String seenCountry, Callback<Boolean> callback); + void unsubscribe(long nativeShoppingServiceAndroid, ShoppingService caller, int type, + int idType, int managementType, String id, Callback<Boolean> callback); } }
diff --git a/components/commerce/core/android/java/src/org/chromium/components/commerce/core/SubscriptionsObserver.java b/components/commerce/core/android/java/src/org/chromium/components/commerce/core/SubscriptionsObserver.java new file mode 100644 index 0000000..cfff60a --- /dev/null +++ b/components/commerce/core/android/java/src/org/chromium/components/commerce/core/SubscriptionsObserver.java
@@ -0,0 +1,26 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.components.commerce.core; + +import java.util.List; + +/** An observer to notify that a (un)subscribe request has finished. */ +public interface SubscriptionsObserver { + /** + * Invoked when a subscribe request has finished. + * + * @param subscriptions The list of subscriptions being added. + * @param succeeded Whether the subscriptions are successfully added. + */ + void onSubscribe(List<CommerceSubscription> subscriptions, boolean succeeded); + + /** + * Invoked when an unsubscribe request has finished. + * + * @param subscriptions The list of subscriptions being removed. + * @param succeeded Whether the subscriptions are successfully removed. + */ + void onUnsubscribe(List<CommerceSubscription> subscriptions, boolean succeeded); +}
diff --git a/components/commerce/core/android/shopping_service_android.cc b/components/commerce/core/android/shopping_service_android.cc index a30d2c82..e1e0963 100644 --- a/components/commerce/core/android/shopping_service_android.cc +++ b/components/commerce/core/android/shopping_service_android.cc
@@ -4,13 +4,18 @@ #include "components/commerce/core/android/shopping_service_android.h" +#include "base/android/callback_android.h" #include "base/android/jni_string.h" #include "base/bind.h" #include "components/commerce/core/shopping_service_jni_headers/ShoppingService_jni.h" +#include "components/commerce/core/subscriptions/commerce_subscription.h" +#include "third_party/abseil-cpp/absl/types/optional.h" #include "url/android/gurl_android.h" #include "url/gurl.h" +using base::android::ConvertJavaStringToUTF8; using base::android::ConvertUTF8ToJavaString; +using base::android::RunBooleanCallbackAndroid; using base::android::ScopedJavaLocalRef; namespace commerce { @@ -19,6 +24,7 @@ : shopping_service_(service), weak_ptr_factory_(this) { java_ref_.Reset(Java_ShoppingService_create( base::android::AttachCurrentThread(), reinterpret_cast<jlong>(this))); + scoped_subscriptions_observer_.Observe(shopping_service_); } ShoppingServiceAndroid::~ShoppingServiceAndroid() { @@ -139,4 +145,90 @@ shopping_service_->ScheduleSavedProductUpdate(); } +void ShoppingServiceAndroid::Subscribe( + JNIEnv* env, + const JavaParamRef<jobject>& obj, + jint j_type, + jint j_id_type, + jint j_management_type, + const JavaParamRef<jstring>& j_id, + const JavaParamRef<jstring>& j_seen_offer_id, + jlong j_seen_price, + const JavaParamRef<jstring>& j_seen_country, + const JavaParamRef<jobject>& j_callback) { + std::string id = ConvertJavaStringToUTF8(j_id); + std::string seen_offer_id = ConvertJavaStringToUTF8(j_seen_offer_id); + std::string seen_country = ConvertJavaStringToUTF8(j_seen_country); + CHECK(!id.empty()); + + auto user_seen_offer = absl::make_optional<UserSeenOffer>( + seen_offer_id, j_seen_price, seen_country); + CommerceSubscription sub(SubscriptionType(j_type), IdentifierType(j_id_type), + id, ManagementType(j_management_type), + kUnknownSubscriptionTimestamp, + std::move(user_seen_offer)); + std::unique_ptr<std::vector<CommerceSubscription>> subs = + std::make_unique<std::vector<CommerceSubscription>>(); + subs->push_back(std::move(sub)); + + auto callback = base::BindOnce(&RunBooleanCallbackAndroid, + ScopedJavaGlobalRef<jobject>(j_callback)); + + shopping_service_->Subscribe(std::move(subs), std::move(callback)); +} + +void ShoppingServiceAndroid::Unsubscribe( + JNIEnv* env, + const JavaParamRef<jobject>& obj, + jint j_type, + jint j_id_type, + jint j_management_type, + const JavaParamRef<jstring>& j_id, + const JavaParamRef<jobject>& j_callback) { + std::string id = ConvertJavaStringToUTF8(j_id); + CHECK(!id.empty()); + + CommerceSubscription sub(SubscriptionType(j_type), IdentifierType(j_id_type), + id, ManagementType(j_management_type), + kUnknownSubscriptionTimestamp, absl::nullopt); + std::unique_ptr<std::vector<CommerceSubscription>> subs = + std::make_unique<std::vector<CommerceSubscription>>(); + subs->push_back(std::move(sub)); + + auto callback = base::BindOnce(&RunBooleanCallbackAndroid, + ScopedJavaGlobalRef<jobject>(j_callback)); + + shopping_service_->Unsubscribe(std::move(subs), std::move(callback)); +} + +void ShoppingServiceAndroid::OnSubscribe( + const std::vector<CommerceSubscription>& subscriptions, + bool succeeded) { + JNIEnv* env = base::android::AttachCurrentThread(); + Java_ShoppingService_onSubscribe( + env, java_ref_, ConvertSubscriptionsToJavaList(subscriptions), succeeded); +} + +void ShoppingServiceAndroid::OnUnsubscribe( + const std::vector<CommerceSubscription>& subscriptions, + bool succeeded) { + JNIEnv* env = base::android::AttachCurrentThread(); + Java_ShoppingService_onUnsubscribe( + env, java_ref_, ConvertSubscriptionsToJavaList(subscriptions), succeeded); +} + +ScopedJavaLocalRef<jobject> +ShoppingServiceAndroid::ConvertSubscriptionsToJavaList( + const std::vector<CommerceSubscription>& subscriptions) { + JNIEnv* env = base::android::AttachCurrentThread(); + ScopedJavaLocalRef<jobject> j_subs = nullptr; + for (const auto& sub : subscriptions) { + j_subs = Java_ShoppingService_createSubscriptionAndAddToList( + env, java_ref_, j_subs, static_cast<int>(sub.type), + static_cast<int>(sub.id_type), static_cast<int>(sub.management_type), + ConvertUTF8ToJavaString(env, sub.id)); + } + return j_subs; +} + } // namespace commerce
diff --git a/components/commerce/core/android/shopping_service_android.h b/components/commerce/core/android/shopping_service_android.h index 162dd8b..4fd72ab 100644 --- a/components/commerce/core/android/shopping_service_android.h +++ b/components/commerce/core/android/shopping_service_android.h
@@ -9,8 +9,10 @@ #include "base/android/scoped_java_ref.h" #include "base/memory/raw_ptr.h" #include "base/memory/weak_ptr.h" +#include "base/scoped_observation.h" #include "base/supports_user_data.h" #include "components/commerce/core/shopping_service.h" +#include "components/commerce/core/subscriptions/subscriptions_observer.h" using base::android::JavaParamRef; using base::android::ScopedJavaGlobalRef; @@ -21,8 +23,10 @@ namespace commerce { class ShoppingService; +struct CommerceSubscription; -class ShoppingServiceAndroid : public base::SupportsUserData::Data { +class ShoppingServiceAndroid : public base::SupportsUserData::Data, + public SubscriptionsObserver { public: ShoppingServiceAndroid(const ShoppingServiceAndroid&) = delete; ShoppingServiceAndroid& operator=(const ShoppingServiceAndroid&) = delete; @@ -50,6 +54,25 @@ void ScheduleSavedProductUpdate(JNIEnv* env, const JavaParamRef<jobject>& obj); + void Subscribe(JNIEnv* env, + const JavaParamRef<jobject>& obj, + jint j_type, + jint j_id_type, + jint j_management_type, + const JavaParamRef<jstring>& j_id, + const JavaParamRef<jstring>& j_seen_offer_id, + jlong j_seen_price, + const JavaParamRef<jstring>& j_seen_country, + const JavaParamRef<jobject>& j_callback); + + void Unsubscribe(JNIEnv* env, + const JavaParamRef<jobject>& obj, + jint j_type, + jint j_id_type, + jint j_management_type, + const JavaParamRef<jstring>& j_id, + const JavaParamRef<jobject>& j_callback); + ScopedJavaGlobalRef<jobject> java_ref() { return java_ref_; } private: @@ -63,6 +86,13 @@ const GURL& url, absl::optional<MerchantInfo> info); + void OnSubscribe(const std::vector<CommerceSubscription>& subscriptions, + bool succeeded) override; + void OnUnsubscribe(const std::vector<CommerceSubscription>& subscriptions, + bool succeeded) override; + ScopedJavaLocalRef<jobject> ConvertSubscriptionsToJavaList( + const std::vector<CommerceSubscription>& subscriptions); + // A handle to the backing shopping service. This is held as a raw pointer // since this object's lifecycle is tied to the service itself. This object // will always be destroyed before the service is. @@ -71,6 +101,9 @@ // A handle to the java side of this object. ScopedJavaGlobalRef<jobject> java_ref_; + base::ScopedObservation<ShoppingService, SubscriptionsObserver> + scoped_subscriptions_observer_{this}; + base::WeakPtrFactory<ShoppingServiceAndroid> weak_ptr_factory_; };
diff --git a/components/commerce/core/shopping_service.cc b/components/commerce/core/shopping_service.cc index 0308fd3a..c6914cef 100644 --- a/components/commerce/core/shopping_service.cc +++ b/components/commerce/core/shopping_service.cc
@@ -30,6 +30,7 @@ #include "components/commerce/core/shopping_power_bookmark_data_provider.h" #include "components/commerce/core/subscriptions/commerce_subscription.h" #include "components/commerce/core/subscriptions/subscriptions_manager.h" +#include "components/commerce/core/subscriptions/subscriptions_observer.h" #include "components/commerce/core/web_wrapper.h" #include "components/grit/components_resources.h" #include "components/optimization_guide/core/new_optimization_guide_decider.h" @@ -693,6 +694,20 @@ } } +void ShoppingService::AddSubscriptionsObserver( + SubscriptionsObserver* observer) { + if (subscriptions_manager_) { + subscriptions_manager_->AddObserver(observer); + } +} + +void ShoppingService::RemoveSubscriptionsObserver( + SubscriptionsObserver* observer) { + if (subscriptions_manager_) { + subscriptions_manager_->RemoveObserver(observer); + } +} + void ShoppingService::FetchPriceEmailPref() { if (account_checker_) { account_checker_->FetchPriceEmailPref();
diff --git a/components/commerce/core/shopping_service.h b/components/commerce/core/shopping_service.h index 24dce19..fab2c1b 100644 --- a/components/commerce/core/shopping_service.h +++ b/components/commerce/core/shopping_service.h
@@ -16,6 +16,7 @@ #include "base/memory/scoped_refptr.h" #include "base/memory/weak_ptr.h" #include "base/scoped_observation.h" +#include "base/scoped_observation_traits.h" #include "base/sequence_checker.h" #include "base/supports_user_data.h" #include "components/commerce/core/account_checker.h" @@ -95,6 +96,7 @@ class ShoppingPowerBookmarkDataProvider; class ShoppingBookmarkModelObserver; class SubscriptionsManager; +class SubscriptionsObserver; class WebWrapper; struct CommerceSubscription; @@ -214,6 +216,11 @@ std::unique_ptr<std::vector<CommerceSubscription>> subscriptions, base::OnceCallback<void(bool)> callback); + // Methods to register or remove SubscriptionsObserver, which will be notified + // when a (un)subscribe request has finished. + void AddSubscriptionsObserver(SubscriptionsObserver* observer); + void RemoveSubscriptionsObserver(SubscriptionsObserver* observer); + // Fetch users' pref from server on whether to receive price tracking emails. void FetchPriceEmailPref(); @@ -406,4 +413,21 @@ } // namespace commerce +namespace base { + +template <> +struct ScopedObservationTraits<commerce::ShoppingService, + commerce::SubscriptionsObserver> { + static void AddObserver(commerce::ShoppingService* source, + commerce::SubscriptionsObserver* observer) { + source->AddSubscriptionsObserver(observer); + } + static void RemoveObserver(commerce::ShoppingService* source, + commerce::SubscriptionsObserver* observer) { + source->RemoveSubscriptionsObserver(observer); + } +}; + +} // namespace base + #endif // COMPONENTS_COMMERCE_CORE_SHOPPING_SERVICE_H_
diff --git a/components/commerce/core/subscriptions/BUILD.gn b/components/commerce/core/subscriptions/BUILD.gn index 9bf065c4..4ecc51b 100644 --- a/components/commerce/core/subscriptions/BUILD.gn +++ b/components/commerce/core/subscriptions/BUILD.gn
@@ -8,6 +8,7 @@ "commerce_subscription.h", "subscriptions_manager.cc", "subscriptions_manager.h", + "subscriptions_observer.h", "subscriptions_server_proxy.cc", "subscriptions_server_proxy.h", "subscriptions_storage.cc",
diff --git a/components/commerce/core/subscriptions/commerce_subscription.h b/components/commerce/core/subscriptions/commerce_subscription.h index f59be5c..efe70f1 100644 --- a/components/commerce/core/subscriptions/commerce_subscription.h +++ b/components/commerce/core/subscriptions/commerce_subscription.h
@@ -22,6 +22,8 @@ namespace commerce { // The type of subscription. +// A Java counterpart will be generated for this enum. +// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.components.commerce.core enum class SubscriptionType { // Unspecified type. kTypeUnspecified = 0, @@ -30,6 +32,8 @@ }; // The type of subscription identifier. +// A Java counterpart will be generated for this enum. +// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.components.commerce.core enum class IdentifierType { // Unspecified identifier type. kIdentifierTypeUnspecified = 0, @@ -40,6 +44,8 @@ }; // The type of subscription management. +// A Java counterpart will be generated for this enum. +// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.components.commerce.core enum class ManagementType { // Unspecified management type. kTypeUnspecified = 0,
diff --git a/components/commerce/core/subscriptions/subscriptions_manager.cc b/components/commerce/core/subscriptions/subscriptions_manager.cc index e9844d0..c8de959 100644 --- a/components/commerce/core/subscriptions/subscriptions_manager.cc +++ b/components/commerce/core/subscriptions/subscriptions_manager.cc
@@ -7,6 +7,7 @@ #include "base/task/sequenced_task_runner.h" #include "components/commerce/core/commerce_feature_list.h" #include "components/commerce/core/subscriptions/commerce_subscription.h" +#include "components/commerce/core/subscriptions/subscriptions_observer.h" #include "components/commerce/core/subscriptions/subscriptions_server_proxy.h" #include "components/commerce/core/subscriptions/subscriptions_storage.h" #include "components/session_proto_db/session_proto_storage.h" @@ -22,11 +23,11 @@ const char kTimeoutParam[] = "subscriptions_request_timeout"; constexpr base::FeatureParam<int> kTimeoutMs{&commerce::kShoppingList, kTimeoutParam, kDefaultTimeoutMs}; +} // namespace const char kTrackResultHistogramName[] = "Commerce.Subscriptions.TrackResult"; const char kUntrackResultHistogramName[] = "Commerce.Subscriptions.UntrackResult"; -} // namespace SubscriptionsManager::SubscriptionsManager( signin::IdentityManager* identity_manager, @@ -51,6 +52,7 @@ : server_proxy_(std::move(server_proxy)), storage_(std::move(storage)), account_checker_(account_checker), + observers_(base::ObserverListPolicy::EXISTING_ONLY), weak_ptr_factory_(this) { // Avoid duplicate server calls on android. Remove this after we integrate // android implementation to shopping service. @@ -86,48 +88,68 @@ void SubscriptionsManager::Subscribe( std::unique_ptr<std::vector<CommerceSubscription>> subscriptions, base::OnceCallback<void(bool)> callback) { + CHECK(subscriptions->size() > 0); + // If there is a coming subscribe request but the last sync with the server // failed, we should re-try the sync, or this request will fail directly. if (!last_sync_succeeded_ && !HasRequestRunning()) { SyncSubscriptions(); } SubscriptionType type = (*subscriptions)[0].type; + // Make a copy of subscriptions to notify observers later. + std::vector<CommerceSubscription> notified_subscriptions = *subscriptions; pending_requests_.emplace( type, AsyncOperation::kSubscribe, std::move(subscriptions), base::BindOnce( [](base::WeakPtr<SubscriptionsManager> manager, + std::vector<CommerceSubscription> notified_subscriptions, base::OnceCallback<void(bool)> callback, SubscriptionsRequestStatus result) { base::UmaHistogramEnumeration(kTrackResultHistogramName, result); - std::move(callback).Run(result == - SubscriptionsRequestStatus::kSuccess); + bool succeeded = result == SubscriptionsRequestStatus::kSuccess || + result == SubscriptionsRequestStatus::kNoOp; + for (SubscriptionsObserver& observer : manager->observers_) { + observer.OnSubscribe(notified_subscriptions, succeeded); + } + std::move(callback).Run(succeeded); manager->OnRequestCompletion(); }, - weak_ptr_factory_.GetWeakPtr(), std::move(callback))); + weak_ptr_factory_.GetWeakPtr(), std::move(notified_subscriptions), + std::move(callback))); CheckAndProcessRequest(); } void SubscriptionsManager::Unsubscribe( std::unique_ptr<std::vector<CommerceSubscription>> subscriptions, base::OnceCallback<void(bool)> callback) { + CHECK(subscriptions->size() > 0); + // If there is a coming unsubscribe request but the last sync with the server // failed, we should re-try the sync, or this request will fail directly. if (!last_sync_succeeded_ && !HasRequestRunning()) { SyncSubscriptions(); } SubscriptionType type = (*subscriptions)[0].type; + // Make a copy of subscriptions to notify observers later. + std::vector<CommerceSubscription> notified_subscriptions = *subscriptions; pending_requests_.emplace( type, AsyncOperation::kUnsubscribe, std::move(subscriptions), base::BindOnce( [](base::WeakPtr<SubscriptionsManager> manager, + std::vector<CommerceSubscription> notified_subscriptions, base::OnceCallback<void(bool)> callback, SubscriptionsRequestStatus result) { base::UmaHistogramEnumeration(kUntrackResultHistogramName, result); - std::move(callback).Run(result == - SubscriptionsRequestStatus::kSuccess); + bool succeeded = result == SubscriptionsRequestStatus::kSuccess || + result == SubscriptionsRequestStatus::kNoOp; + for (SubscriptionsObserver& observer : manager->observers_) { + observer.OnUnsubscribe(notified_subscriptions, succeeded); + } + std::move(callback).Run(succeeded); manager->OnRequestCompletion(); }, - weak_ptr_factory_.GetWeakPtr(), std::move(callback))); + weak_ptr_factory_.GetWeakPtr(), std::move(notified_subscriptions), + std::move(callback))); CheckAndProcessRequest(); } @@ -202,9 +224,8 @@ unique_subscriptions) { if (unique_subscriptions->size() == 0) { base::SequencedTaskRunner::GetCurrentDefault()->PostTask( - FROM_HERE, - base::BindOnce(std::move(callback), - SubscriptionsRequestStatus::kSuccess)); + FROM_HERE, base::BindOnce(std::move(callback), + SubscriptionsRequestStatus::kNoOp)); return; } manager->server_proxy_->Create( @@ -232,9 +253,8 @@ unique_subscriptions) { if (unique_subscriptions->size() == 0) { base::SequencedTaskRunner::GetCurrentDefault()->PostTask( - FROM_HERE, - base::BindOnce(std::move(callback), - SubscriptionsRequestStatus::kSuccess)); + FROM_HERE, base::BindOnce(std::move(callback), + SubscriptionsRequestStatus::kNoOp)); return; } manager->server_proxy_->Delete( @@ -332,6 +352,14 @@ return has_request_running_; } +void SubscriptionsManager::AddObserver(SubscriptionsObserver* observer) { + observers_.AddObserver(observer); +} + +void SubscriptionsManager::RemoveObserver(SubscriptionsObserver* observer) { + observers_.RemoveObserver(observer); +} + bool SubscriptionsManager::GetLastSyncSucceededForTesting() { return last_sync_succeeded_; }
diff --git a/components/commerce/core/subscriptions/subscriptions_manager.h b/components/commerce/core/subscriptions/subscriptions_manager.h index 85be34b..d540685a 100644 --- a/components/commerce/core/subscriptions/subscriptions_manager.h +++ b/components/commerce/core/subscriptions/subscriptions_manager.h
@@ -12,6 +12,7 @@ #include "base/callback.h" #include "base/check.h" #include "base/memory/scoped_refptr.h" +#include "base/observer_list.h" #include "base/scoped_observation.h" #include "base/time/time.h" #include "components/commerce/core/account_checker.h" @@ -26,14 +27,19 @@ namespace commerce { +class SubscriptionsObserver; class SubscriptionsServerProxy; class SubscriptionsStorage; enum class SubscriptionType; struct CommerceSubscription; +extern const char kTrackResultHistogramName[]; +extern const char kUntrackResultHistogramName[]; + // Possible result status of a product (un)tracking request. This enum needs to // match the values in enums.xml. enum class SubscriptionsRequestStatus { + // Subscriptions successfully added or removed on server. kSuccess = 0, // Server failed to parse the request. kServerParseError = 1, @@ -49,9 +55,12 @@ // for monitoring purpose only and should never happen if the subscriptions // work correctly. kLost = 6, + // No action taken because the product is already tracked/untracked on the + // server. + kNoOp = 7, // This enum must be last and is only used for histograms. - kMaxValue = kLost + kMaxValue = kNoOp }; using SubscriptionsRequestCallback = @@ -95,6 +104,9 @@ void IsSubscribed(CommerceSubscription subscription, base::OnceCallback<void(bool)> callback); + void AddObserver(SubscriptionsObserver* observer); + void RemoveObserver(SubscriptionsObserver* observer); + // For tests only, return last_sync_succeeded_. bool GetLastSyncSucceededForTesting(); @@ -198,6 +210,8 @@ raw_ptr<AccountChecker> account_checker_; + base::ObserverList<SubscriptionsObserver>::Unchecked observers_; + base::WeakPtrFactory<SubscriptionsManager> weak_ptr_factory_; };
diff --git a/components/commerce/core/subscriptions/subscriptions_manager_unittest.cc b/components/commerce/core/subscriptions/subscriptions_manager_unittest.cc index eb82b99..feb7122 100644 --- a/components/commerce/core/subscriptions/subscriptions_manager_unittest.cc +++ b/components/commerce/core/subscriptions/subscriptions_manager_unittest.cc
@@ -9,12 +9,14 @@ #include "base/callback.h" #include "base/check.h" #include "base/run_loop.h" +#include "base/test/metrics/histogram_tester.h" #include "base/test/scoped_feature_list.h" #include "base/test/task_environment.h" #include "components/commerce/core/commerce_feature_list.h" #include "components/commerce/core/mock_account_checker.h" #include "components/commerce/core/subscriptions/commerce_subscription.h" #include "components/commerce/core/subscriptions/subscriptions_manager.h" +#include "components/commerce/core/subscriptions/subscriptions_observer.h" #include "components/commerce/core/subscriptions/subscriptions_server_proxy.h" #include "components/commerce/core/subscriptions/subscriptions_storage.h" #include "components/signin/public/identity_manager/identity_test_environment.h" @@ -209,7 +211,8 @@ } }; -class SubscriptionsManagerTest : public testing::Test { +class SubscriptionsManagerTest : public testing::Test, + public commerce::SubscriptionsObserver { public: SubscriptionsManagerTest() : mock_server_proxy_(std::make_unique<MockSubscriptionsServerProxy>()), @@ -240,6 +243,24 @@ account_checker_.SetAnonymizedUrlDataCollectionEnabled(msbb_enabled); } + void OnSubscribe(const std::vector<CommerceSubscription>& subscriptions, + bool succeeded) override { + ASSERT_EQ(1, (int)subscriptions.size()); + ASSERT_EQ("333", subscriptions[0].id); + ASSERT_EQ(true, succeeded); + on_subscribe_run_loop_.Quit(); + } + + void OnUnsubscribe(const std::vector<CommerceSubscription>& subscriptions, + bool succeeded) override { + ASSERT_EQ(1, (int)subscriptions.size()); + ASSERT_EQ("333", subscriptions[0].id); + ASSERT_EQ(true, succeeded); + on_unsubscribe_run_loop_.Quit(); + } + + void AddObserver() { subscriptions_manager_->AddObserver(this); } + protected: base::test::TaskEnvironment task_environment_; signin::IdentityTestEnvironment identity_test_env_; @@ -248,6 +269,9 @@ std::unique_ptr<MockSubscriptionsServerProxy> mock_server_proxy_; std::unique_ptr<MockSubscriptionsStorage> mock_storage_; std::unique_ptr<SubscriptionsManager> subscriptions_manager_; + base::HistogramTester histogram_tester; + base::RunLoop on_subscribe_run_loop_; + base::RunLoop on_unsubscribe_run_loop_; }; TEST_F(SubscriptionsManagerTest, TestSyncSucceeded) { @@ -332,6 +356,9 @@ &run_loop)); // The callback should eventually quit the run loop. run_loop.Run(); + + histogram_tester.ExpectTotalCount(kTrackResultHistogramName, 1); + histogram_tester.ExpectBucketCount(kTrackResultHistogramName, 0, 1); } TEST_F(SubscriptionsManagerTest, TestSubscribe_ServerManageFailed) { @@ -367,6 +394,9 @@ &run_loop)); // The callback should eventually quit the run loop. run_loop.Run(); + + histogram_tester.ExpectTotalCount(kTrackResultHistogramName, 1); + histogram_tester.ExpectBucketCount(kTrackResultHistogramName, 1, 1); } TEST_F(SubscriptionsManagerTest, TestSubscribe_LastSyncFailed) { @@ -403,6 +433,9 @@ &run_loop)); // The callback should eventually quit the run loop. run_loop.Run(); + + histogram_tester.ExpectTotalCount(kTrackResultHistogramName, 1); + histogram_tester.ExpectBucketCount(kTrackResultHistogramName, 4, 1); } TEST_F(SubscriptionsManagerTest, TestSubscribe_HasRequestRunning) { @@ -433,6 +466,8 @@ // Use a RunLoop in case the callback is posted on a different thread. base::RunLoop().RunUntilIdle(); ASSERT_EQ(false, callback_executed); + + histogram_tester.ExpectTotalCount(kTrackResultHistogramName, 0); } TEST_F(SubscriptionsManagerTest, TestSubscribe_HasStuckRequestRunning) { @@ -576,6 +611,9 @@ &run_loop)); // The callback should eventually quit the run loop. run_loop.Run(); + + histogram_tester.ExpectTotalCount(kTrackResultHistogramName, 1); + histogram_tester.ExpectBucketCount(kTrackResultHistogramName, 7, 1); } TEST_F(SubscriptionsManagerTest, TestUnsubscribe) { @@ -611,6 +649,9 @@ }, &run_loop)); run_loop.Run(); + + histogram_tester.ExpectTotalCount(kUntrackResultHistogramName, 1); + histogram_tester.ExpectBucketCount(kUntrackResultHistogramName, 0, 1); } TEST_F(SubscriptionsManagerTest, TestUnsubscribe_LastSyncFailed) { @@ -646,6 +687,9 @@ }, &run_loop)); run_loop.Run(); + + histogram_tester.ExpectTotalCount(kUntrackResultHistogramName, 1); + histogram_tester.ExpectBucketCount(kUntrackResultHistogramName, 4, 1); } TEST_F(SubscriptionsManagerTest, @@ -678,6 +722,8 @@ // Use a RunLoop in case the callback is posted on a different thread. base::RunLoop().RunUntilIdle(); ASSERT_EQ(false, callback_executed); + + histogram_tester.ExpectTotalCount(kUntrackResultHistogramName, 0); } TEST_F(SubscriptionsManagerTest, TestUnsubscribe_NonExistingSubscriptions) { @@ -710,6 +756,9 @@ &run_loop)); // The callback should eventually quit the run loop. run_loop.Run(); + + histogram_tester.ExpectTotalCount(kUntrackResultHistogramName, 1); + histogram_tester.ExpectBucketCount(kUntrackResultHistogramName, 7, 1); } TEST_F(SubscriptionsManagerTest, TestIdentityChange) { @@ -817,4 +866,39 @@ run_loop.Run(); } +TEST_F(SubscriptionsManagerTest, TestSubscriptionsObserver) { + SetAccountStatus(true, true); + mock_server_proxy_->MockGetResponses("111"); + mock_server_proxy_->MockManageResponses(true); + mock_storage_->MockGetResponses("222"); + mock_storage_->MockUpdateResponses(true); + + CreateManagerAndVerify(true); + AddObserver(); + + base::RunLoop subscribe_run_loop; + subscriptions_manager_->Subscribe( + BuildSubscriptions("333"), + base::BindOnce( + [](base::RunLoop* subscribe_run_loop, bool succeeded) { + ASSERT_EQ(true, succeeded); + subscribe_run_loop->Quit(); + }, + &subscribe_run_loop)); + subscribe_run_loop.Run(); + on_subscribe_run_loop_.Run(); + + base::RunLoop unsubscribe_run_loop; + subscriptions_manager_->Unsubscribe( + BuildSubscriptions("333"), + base::BindOnce( + [](base::RunLoop* unsubscribe_run_loop, bool succeeded) { + ASSERT_EQ(true, succeeded); + unsubscribe_run_loop->Quit(); + }, + &unsubscribe_run_loop)); + unsubscribe_run_loop.Run(); + on_unsubscribe_run_loop_.Run(); +} + } // namespace commerce
diff --git a/components/commerce/core/subscriptions/subscriptions_observer.h b/components/commerce/core/subscriptions/subscriptions_observer.h new file mode 100644 index 0000000..18913c8 --- /dev/null +++ b/components/commerce/core/subscriptions/subscriptions_observer.h
@@ -0,0 +1,36 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef COMPONENTS_COMMERCE_CORE_SUBSCRIPTIONS_SUBSCRIPTIONS_OBSERVER_H_ +#define COMPONENTS_COMMERCE_CORE_SUBSCRIPTIONS_SUBSCRIPTIONS_OBSERVER_H_ + +#include <string> + +#include "components/commerce/core/subscriptions/commerce_subscription.h" + +namespace commerce { + +class SubscriptionsObserver { + public: + SubscriptionsObserver(const SubscriptionsObserver&) = delete; + SubscriptionsObserver& operator=(const SubscriptionsObserver&) = delete; + + // Invoked when a subscribe request for |subscriptions| has finished. + virtual void OnSubscribe( + const std::vector<CommerceSubscription>& subscriptions, + bool succeeded) = 0; + + // Invoked when an unsubscribe request for |subscriptions| has finished. + virtual void OnUnsubscribe( + const std::vector<CommerceSubscription>& subscriptions, + bool succeeded) = 0; + + protected: + SubscriptionsObserver() = default; + virtual ~SubscriptionsObserver() = default; +}; + +} // namespace commerce + +#endif // COMPONENTS_COMMERCE_CORE_SUBSCRIPTIONS_SUBSCRIPTIONS_OBSERVER_H_
diff --git a/components/omnibox/browser/autocomplete_grouper_sections.cc b/components/omnibox/browser/autocomplete_grouper_sections.cc index e915ce3e..4ded7bcc4 100644 --- a/components/omnibox/browser/autocomplete_grouper_sections.cc +++ b/components/omnibox/browser/autocomplete_grouper_sections.cc
@@ -20,12 +20,8 @@ // static ACMatches Section::GroupMatches(PSections sections, ACMatches matches) { - int last_relevance = std::numeric_limits<int>::max(); for (const auto& match : matches) { DCHECK(match.suggestion_group_id.has_value()); - DCHECK(match.relevance <= last_relevance); - if (!match.allowed_to_be_default_match) - last_relevance = match.relevance; for (const auto& section : sections) { if (section->Add(match)) break;
diff --git a/components/optimization_guide/content/browser/BUILD.gn b/components/optimization_guide/content/browser/BUILD.gn index 82c0a51..b0f8edf 100644 --- a/components/optimization_guide/content/browser/BUILD.gn +++ b/components/optimization_guide/content/browser/BUILD.gn
@@ -95,6 +95,8 @@ deps = [ ":browser", ":test_support", + "//components/history/core/test", + "//components/omnibox/browser:test_support", "//components/optimization_guide/core:test_support", "//components/search_engines", "//components/ukm:test_support",
diff --git a/components/optimization_guide/content/browser/DEPS b/components/optimization_guide/content/browser/DEPS index 9da827c4..5deccd6 100644 --- a/components/optimization_guide/content/browser/DEPS +++ b/components/optimization_guide/content/browser/DEPS
@@ -3,9 +3,11 @@ "+components/continuous_search/common/public/mojom", "+components/google/core", "+components/history/core/browser", + "+components/history/core/test", "+components/keyed_service/core", "+components/no_state_prefetch/browser", "+components/omnibox/browser", + "+components/omnibox/common", "+components/optimization_guide/core", "+components/optimization_guide/proto", "+components/search_engines",
diff --git a/components/optimization_guide/content/browser/page_content_annotations_web_contents_observer_unittest.cc b/components/optimization_guide/content/browser/page_content_annotations_web_contents_observer_unittest.cc index 1af63838..e948cb5 100644 --- a/components/optimization_guide/content/browser/page_content_annotations_web_contents_observer_unittest.cc +++ b/components/optimization_guide/content/browser/page_content_annotations_web_contents_observer_unittest.cc
@@ -4,12 +4,28 @@ #include "components/optimization_guide/content/browser/page_content_annotations_web_contents_observer.h" +#include <string> +#include <utility> + #include "base/command_line.h" +#include "base/files/file_path.h" +#include "base/files/scoped_temp_dir.h" #include "base/memory/raw_ptr.h" +#include "base/run_loop.h" +#include "base/strings/utf_string_conversions.h" +#include "base/task/cancelable_task_tracker.h" +#include "base/test/bind.h" #include "base/test/metrics/histogram_tester.h" #include "base/test/scoped_feature_list.h" +#include "base/time/time.h" #include "components/google/core/common/google_switches.h" +#include "components/history/core/browser/history_database_params.h" #include "components/history/core/browser/history_service.h" +#include "components/history/core/browser/history_types.h" +#include "components/history/core/test/test_history_database.h" +#include "components/omnibox/browser/fake_autocomplete_provider_client.h" +#include "components/omnibox/browser/zero_suggest_cache_service.h" +#include "components/omnibox/common/omnibox_features.h" #include "components/optimization_guide/content/browser/page_content_annotations_service.h" #include "components/optimization_guide/content/browser/test_optimization_guide_decider.h" #include "components/optimization_guide/core/optimization_guide_features.h" @@ -82,17 +98,19 @@ public: explicit FakePageContentAnnotationsService( OptimizationGuideModelProvider* optimization_guide_model_provider, - history::HistoryService* history_service) - : PageContentAnnotationsService(nullptr, - "en-US", - optimization_guide_model_provider, - history_service, - nullptr, - nullptr, - nullptr, - base::FilePath(), - nullptr, - nullptr) {} + history::HistoryService* history_service, + ZeroSuggestCacheService* zero_suggest_cache_service) + : PageContentAnnotationsService( + std::make_unique<FakeAutocompleteProviderClient>(), + "en-US", + optimization_guide_model_provider, + history_service, + nullptr, + zero_suggest_cache_service, + nullptr, + base::FilePath(), + nullptr, + nullptr) {} ~FakePageContentAnnotationsService() override = default; void Annotate(const HistoryVisit& visit) override { @@ -101,6 +119,10 @@ void ExtractRelatedSearches(const HistoryVisit& visit, content::WebContents* web_contents) override { + if (ShouldExtractRelatedSearchesFromZPSCache()) { + return; + } + last_related_searches_extraction_request_.emplace( std::make_pair(visit, web_contents)); } @@ -191,12 +213,20 @@ void SetUp() override { content::RenderViewHostTestHarness::SetUp(); + ASSERT_TRUE(temp_dir_.CreateUniqueTempDir()); + history_service_ = std::make_unique<history::HistoryService>(); + ASSERT_TRUE(history_service_->Init( + history::TestHistoryDatabaseParamsForPath(temp_dir_.GetPath()))); + optimization_guide_model_provider_ = std::make_unique<TestOptimizationGuideModelProvider>(); - history_service_ = std::make_unique<history::HistoryService>(); + const size_t cache_size = 1; + zero_suggest_cache_service_ = + std::make_unique<ZeroSuggestCacheService>(cache_size); page_content_annotations_service_ = std::make_unique<FakePageContentAnnotationsService>( - optimization_guide_model_provider_.get(), history_service_.get()); + optimization_guide_model_provider_.get(), history_service_.get(), + zero_suggest_cache_service_.get()); // Set up a simple template URL service with a default search engine. template_url_service_ = std::make_unique<TemplateURLService>( @@ -219,6 +249,9 @@ } void TearDown() override { + history_service_->Shutdown(); + task_environment()->RunUntilIdle(); + page_content_annotations_service_.reset(); optimization_guide_model_provider_.reset(); template_url_service_.reset(); @@ -231,6 +264,12 @@ return page_content_annotations_service_.get(); } + history::HistoryService* history_service() { return history_service_.get(); } + + ZeroSuggestCacheService* zero_suggest_cache_service() { + return zero_suggest_cache_service_.get(); + } + PageContentAnnotationsWebContentsObserver* helper() { return PageContentAnnotationsWebContentsObserver::FromWebContents( web_contents()); @@ -249,6 +288,8 @@ std::unique_ptr<TestOptimizationGuideModelProvider> optimization_guide_model_provider_; std::unique_ptr<history::HistoryService> history_service_; + base::ScopedTempDir temp_dir_; + std::unique_ptr<ZeroSuggestCacheService> zero_suggest_cache_service_; std::unique_ptr<FakePageContentAnnotationsService> page_content_annotations_service_; std::unique_ptr<TemplateURLService> template_url_service_; @@ -550,6 +591,144 @@ EXPECT_EQ(last_request->second, web_contents()); } +class PageContentAnnotationsWebContentsObserverRelatedSearchesFromZPSCacheTest + : public PageContentAnnotationsWebContentsObserverTest { + public: + PageContentAnnotationsWebContentsObserverRelatedSearchesFromZPSCacheTest() { + scoped_feature_list_.InitWithFeaturesAndParameters( + /*enabled_features=*/ + {{features::kPageContentAnnotations, + {{"extract_related_searches", "true"}}}, + {omnibox::kZeroSuggestInMemoryCaching, {}}, + {features::kExtractRelatedSearchesFromPrefetchedZPSResponse, {}}}, + /*disabled_features=*/{}); + } + + void StoreMockZeroSuggestResponse( + ZeroSuggestCacheService* zero_suggest_cache_service, + const std::string& page_url, + const std::string& response_json) { + DCHECK(zero_suggest_cache_service); + + ZeroSuggestCacheService::CacheEntry entry; + entry.response_json = response_json; + zero_suggest_cache_service->StoreZeroSuggestResponse(page_url, entry); + } + + // Performs synchronous call of `HistoryService::QueryHistory()`. + history::QueryResults QueryHistory(history::HistoryService* history_service, + const std::string& text_query) { + DCHECK(history_service); + + base::CancelableTaskTracker tracker; + + history::QueryResults results; + history::QueryOptions options; + + base::RunLoop run_loop; + history_service->QueryHistory( + base::UTF8ToUTF16(text_query), options, + base::BindLambdaForTesting([&](history::QueryResults r) { + results = std::move(r); + run_loop.Quit(); + }), + &tracker); + run_loop.Run(); + + return results; + } + + private: + base::test::ScopedFeatureList scoped_feature_list_; +}; + +TEST_F(PageContentAnnotationsWebContentsObserverRelatedSearchesFromZPSCacheTest, + ExtractRelatedSearchesFromCacheForMainFrameSRPUrl) { + std::string response_json = R"([ + "", + ["los angeles", "san diego", "san francisco"], + ["", "", ""], + [], + { + "google:clientdata": { + "bpc": false, + "tlw": false + }, + "google:suggestdetail": [{}, {}, {}], + "google:suggestrelevance": [701, 700, 553], + "google:suggestsubtypes": [ + [512, 433, 67], + [131, 433, 67], + [512, 433, 67] + ], + "google:suggesttype": ["QUERY", "ENTITY", "ENTITY"], + "google:verbatimrelevance": 851 + }])"; + + // Verify proper behavior when navigating to non-Google SRP. + { + // Navigate to non-Google SRP and commit. + const GURL non_google_srp_url = GURL("http://www.foo.com/search?q=a"); + content::NavigationSimulator::NavigateAndCommitFromBrowser( + web_contents(), non_google_srp_url); + + // Add non-SRP navigation to visit history. + history_service()->AddPage(non_google_srp_url, base::Time::Now(), + history::VisitSource::SOURCE_BROWSED); + task_environment()->RunUntilIdle(); + + // Extractor request will NOT be sent for a non-Google SRP visit. + auto last_request = service()->last_related_searches_extraction_request(); + EXPECT_FALSE(last_request.has_value()); + + // Given a non-Google SRP visit, ZPS caching should NOT trigger "related + // searches" extraction. + StoreMockZeroSuggestResponse(zero_suggest_cache_service(), + non_google_srp_url.spec(), response_json); + task_environment()->RunUntilIdle(); + auto results = QueryHistory(history_service(), non_google_srp_url.spec()); + EXPECT_EQ(results.size(), 1U); + + auto related_searches = + results.back().content_annotations().related_searches; + EXPECT_TRUE(related_searches.empty()); + } + + // Verify proper behavior when navigating to Google SRP. + { + // Navigate to Google SRP and commit. + const GURL google_srp_url = GURL("http://default-engine.com/search?q=a"); + content::NavigationSimulator::NavigateAndCommitFromBrowser(web_contents(), + google_srp_url); + + // Add SRP navigation to visit history. + history_service()->AddPage(google_srp_url, base::Time::Now(), + history::VisitSource::SOURCE_BROWSED); + task_environment()->RunUntilIdle(); + + // Extractor request will NOT be sent since "extract related searches from + // ZPS cache" feature flag is enabled. + auto last_request = service()->last_related_searches_extraction_request(); + EXPECT_FALSE(last_request.has_value()); + + // Given a Google SRP visit, ZPS caching should trigger "related searches" + // extraction. + StoreMockZeroSuggestResponse(zero_suggest_cache_service(), + google_srp_url.spec(), response_json); + task_environment()->RunUntilIdle(); + auto results = QueryHistory(history_service(), google_srp_url.spec()); + EXPECT_EQ(results.size(), 1U); + + auto related_searches = + results.back().content_annotations().related_searches; + EXPECT_FALSE(related_searches.empty()); + + EXPECT_EQ(related_searches[0], "los angeles"); + EXPECT_EQ(related_searches[1], "san diego"); + EXPECT_EQ(related_searches[2], "san francisco"); + } +} + class PageContentAnnotationsWebContentsObserverOnlyPersistGoogleSearchMetadataTest : public PageContentAnnotationsWebContentsObserverTest {
diff --git a/components/segmentation_platform/embedder/default_model/shopping_user_model.cc b/components/segmentation_platform/embedder/default_model/shopping_user_model.cc index 9620063a..891f40b 100644 --- a/components/segmentation_platform/embedder/default_model/shopping_user_model.cc +++ b/components/segmentation_platform/embedder/default_model/shopping_user_model.cc
@@ -42,16 +42,6 @@ MetadataWriter::UMAFeature::FromUserAction( "Autofill_PolledCreditCardSuggestions", 7)}; - -std::unique_ptr<ModelProvider> GetShoppingUserDefaultModel() { - if (!base::GetFieldTrialParamByFeatureAsBool( - features::kShoppingUserSegmentFeature, kDefaultModelEnabledParam, - true)) { - return nullptr; - } - return std::make_unique<ShoppingUserModel>(); -} - } // namespace // static @@ -64,17 +54,11 @@ config->segmentation_uma_name = kShoppingUserUmaName; config->AddSegmentId( SegmentId::OPTIMIZATION_TARGET_SEGMENTATION_SHOPPING_USER, - GetShoppingUserDefaultModel()); + std::make_unique<ShoppingUserModel>()); config->segment_selection_ttl = - base::Days(base::GetFieldTrialParamByFeatureAsInt( - features::kShoppingUserSegmentFeature, - kVariationsParamNameSegmentSelectionTTLDays, - kShoppingUserDefaultSelectionTTLDays)); + base::Days(kShoppingUserDefaultSelectionTTLDays); config->unknown_selection_ttl = - base::Days(base::GetFieldTrialParamByFeatureAsInt( - features::kShoppingUserSegmentFeature, - kVariationsParamNameUnknownSelectionTTLDays, - kShoppingUserDefaultUnknownSelectionTTLDays)); + base::Days(kShoppingUserDefaultUnknownSelectionTTLDays); config->is_boolean_segment = true; return config; }
diff --git a/components/subresource_filter/tools/indexing_tool_unittest.cc b/components/subresource_filter/tools/indexing_tool_unittest.cc index 5eeca561..db23797 100644 --- a/components/subresource_filter/tools/indexing_tool_unittest.cc +++ b/components/subresource_filter/tools/indexing_tool_unittest.cc
@@ -131,20 +131,16 @@ WriteVersionMetadata(version_path, "1.2.3", checksum); std::string version_json; EXPECT_TRUE(base::ReadFileToString(version_path, &version_json)); - std::unique_ptr<base::DictionaryValue> json = base::DictionaryValue::From( - base::JSONReader::ReadDeprecated(version_json)); + absl::optional<base::Value> json = base::JSONReader::Read(version_json); - std::string actual_content = - json->FindPath({"subresource_filter", "ruleset_version", "content"}) - ->GetString(); - EXPECT_EQ("1.2.3", actual_content); - int actual_format = - json->FindPath({"subresource_filter", "ruleset_version", "format"}) - ->GetInt(); + std::string* actual_content = json->GetDict().FindStringByDottedPath( + "subresource_filter.ruleset_version.content"); + EXPECT_EQ("1.2.3", *actual_content); + absl::optional<int> actual_format = json->GetDict().FindIntByDottedPath( + "subresource_filter.ruleset_version.format"); EXPECT_EQ(RulesetIndexer::kIndexedFormatVersion, actual_format); - int actual_checksum = - json->FindPath({"subresource_filter", "ruleset_version", "checksum"}) - ->GetInt(); + absl::optional<int> actual_checksum = json->GetDict().FindIntByDottedPath( + "subresource_filter.ruleset_version.checksum"); EXPECT_EQ(checksum, actual_checksum); }
diff --git a/content/browser/renderer_host/render_frame_host_impl.cc b/content/browser/renderer_host/render_frame_host_impl.cc index 9e8ca1f..b79ba0d 100644 --- a/content/browser/renderer_host/render_frame_host_impl.cc +++ b/content/browser/renderer_host/render_frame_host_impl.cc
@@ -13312,7 +13312,11 @@ // If the origin doesn't match, we would do a DumpWithoutCrashing above. // So, don't do a DumpWithoutCrashing unless there's another param that // doesn't match. + // Note: This is temporarily disabled on Android as there has been a recent + // spike of reports on Android WebView. +#if !BUILDFLAG(IS_ANDROID) base::debug::DumpWithoutCrashing(); +#endif // !BUILDFLAG(IS_ANDROID) } }
diff --git a/docs/contributing.md b/docs/contributing.md index 7ab59dab..62c0ac99 100644 --- a/docs/contributing.md +++ b/docs/contributing.md
@@ -462,9 +462,6 @@ `-r foo@example.com` when running `git cl upload`. * A comma-separated list of reviewer email addresses (e.g. foo@example.com, bar@example.com). -* **Tbr:** The same format as the `R` footer, but indicates to the - commit queue that it can skip checking that all files in the change - have been approved by their respective `OWNERS`. * **Cq-Include-Trybots:** * A comma-separated list of trybots which should be triggered and checked by the CQ in addition to the normal set.
diff --git a/docs/infra/cq.md b/docs/infra/cq.md index e12d152..153c36b 100644 --- a/docs/infra/cq.md +++ b/docs/infra/cq.md
@@ -77,12 +77,6 @@ This should only be used for reverts to green the tree, since it skips try bots and might therefore break the tree. You shouldn't use this otherwise. -* `Tbr: <username>` - - [See policy](https://chromium.googlesource.com/chromium/src/+/HEAD/docs/code_reviews.md#TBR-To-Be-Reviewed) - of when it's acceptable to use TBR ("To be reviewed"). If a change has a TBR - line with a valid reviewer, the CQ will skip checks for LGTMs. - ## FAQ ### What exactly does the CQ run?
diff --git a/docs/security/sheriff.md b/docs/security/sheriff.md index 2b564fe..14c3190 100644 --- a/docs/security/sheriff.md +++ b/docs/security/sheriff.md
@@ -154,11 +154,7 @@ access, so this will probably make the issue inaccessible to you. * **If the report is asking about why something is or is not on the Safe Browsing list:** - * Assign it to zbutler@, who will triage it for the Safe Browsing team. - * Remove the **Restrict-View-SecurityTeam** label and add the - **Restrict-View-Google** label. - * Change **Type-Bug-Security** label to **Type-Bug**. - * Add the **Security** component. + * Close the bug and request the reporter submit the URL to SafeBrowsing. * See below for reporting URLs to SafeBrowsing. * **If the report is a potentially valid bug but is not a security vulnerability:** @@ -349,8 +345,10 @@ explanatory text. * Report suspected malicious URLs to SafeBrowsing: - * Public URL: - [https://support.google.com/websearch/contact/safe_browsing](https://support.google.com/websearch/contact/safe_browsing). + * Public URLs: + * [Report malware](https://safebrowsing.google.com/safebrowsing/report_badware/?hl=en) + * [Report phishing](https://safebrowsing.google.com/safebrowsing/report_phish/?hl=en) + * [Report incorrect phishing warning](https://safebrowsing.google.com/safebrowsing/report_error/?hl=en) * Googlers: see instructions at [go/safebrowsing-escalation](https://goto.google.com/safebrowsing-escalation) * Report suspected malicious file attachments to SafeBrowsing and VirusTotal. * Make sure the report is properly forwarded when the vulnerability is in an
diff --git a/extensions/browser/api/hid/hid_api.cc b/extensions/browser/api/hid/hid_api.cc index 97186c9..5dd4350 100644 --- a/extensions/browser/api/hid/hid_api.cc +++ b/extensions/browser/api/hid/hid_api.cc
@@ -13,8 +13,6 @@ #include "base/bind.h" #include "base/values.h" #include "extensions/browser/api/api_resource_manager.h" -#include "extensions/browser/api/device_permissions_prompt.h" -#include "extensions/browser/api/extensions_api_client.h" #include "extensions/common/api/hid.h" #include "mojo/public/cpp/bindings/callback_helpers.h" #include "services/device/public/cpp/hid/hid_device_filter.h" @@ -95,53 +93,6 @@ Respond(OneArgument(base::Value(std::move(devices)))); } -HidGetUserSelectedDevicesFunction::HidGetUserSelectedDevicesFunction() = - default; - -HidGetUserSelectedDevicesFunction::~HidGetUserSelectedDevicesFunction() = - default; - -ExtensionFunction::ResponseAction HidGetUserSelectedDevicesFunction::Run() { - std::unique_ptr<api::hid::GetUserSelectedDevices::Params> parameters = - hid::GetUserSelectedDevices::Params::Create(args()); - EXTENSION_FUNCTION_VALIDATE(parameters); - - content::WebContents* web_contents = GetSenderWebContents(); - if (!web_contents || !user_gesture()) { - return RespondNow(OneArgument(base::Value(base::Value::Type::LIST))); - } - - bool multiple = false; - std::vector<HidDeviceFilter> filters; - if (parameters->options) { - multiple = parameters->options->multiple && *parameters->options->multiple; - if (parameters->options->filters) { - const auto& api_filters = *parameters->options->filters; - filters.resize(api_filters.size()); - for (size_t i = 0; i < api_filters.size(); ++i) { - ConvertHidDeviceFilter(api_filters[i], &filters[i]); - } - } - } - - prompt_ = - ExtensionsAPIClient::Get()->CreateDevicePermissionsPrompt(web_contents); - CHECK(prompt_); - prompt_->AskForHidDevices( - extension(), browser_context(), multiple, filters, - base::BindOnce(&HidGetUserSelectedDevicesFunction::OnDevicesChosen, - this)); - return RespondLater(); -} - -void HidGetUserSelectedDevicesFunction::OnDevicesChosen( - std::vector<device::mojom::HidDeviceInfoPtr> devices) { - HidDeviceManager* device_manager = HidDeviceManager::Get(browser_context()); - CHECK(device_manager); - Respond(OneArgument( - base::Value(device_manager->GetApiDevicesFromList(std::move(devices))))); -} - HidConnectFunction::HidConnectFunction() : connection_manager_(nullptr) { }
diff --git a/extensions/browser/api/hid/hid_api.h b/extensions/browser/api/hid/hid_api.h index a6b4a89..546f7f3 100644 --- a/extensions/browser/api/hid/hid_api.h +++ b/extensions/browser/api/hid/hid_api.h
@@ -21,8 +21,6 @@ namespace extensions { -class DevicePermissionsPrompt; - class HidGetDevicesFunction : public ExtensionFunction { public: DECLARE_EXTENSION_FUNCTION("hid.getDevices", HID_GETDEVICES) @@ -41,29 +39,6 @@ void OnEnumerationComplete(base::Value::List devices); }; -class HidGetUserSelectedDevicesFunction : public ExtensionFunction { - public: - DECLARE_EXTENSION_FUNCTION("hid.getUserSelectedDevices", - HID_GETUSERSELECTEDDEVICES) - - HidGetUserSelectedDevicesFunction(); - - HidGetUserSelectedDevicesFunction(const HidGetUserSelectedDevicesFunction&) = - delete; - HidGetUserSelectedDevicesFunction& operator=( - const HidGetUserSelectedDevicesFunction&) = delete; - - private: - ~HidGetUserSelectedDevicesFunction() override; - - // ExtensionFunction: - ResponseAction Run() override; - - void OnDevicesChosen(std::vector<device::mojom::HidDeviceInfoPtr> devices); - - std::unique_ptr<DevicePermissionsPrompt> prompt_; -}; - class HidConnectFunction : public ExtensionFunction { public: DECLARE_EXTENSION_FUNCTION("hid.connect", HID_CONNECT)
diff --git a/extensions/browser/api/hid/hid_apitest.cc b/extensions/browser/api/hid/hid_apitest.cc index 1ce51a1..bb60585 100644 --- a/extensions/browser/api/hid/hid_apitest.cc +++ b/extensions/browser/api/hid/hid_apitest.cc
@@ -252,23 +252,6 @@ EXPECT_EQ("success", result_listener.message()); } -IN_PROC_BROWSER_TEST_F(HidApiTest, GetUserSelectedDevices) { - ExtensionTestMessageListener open_listener("opened_device"); - - TestExtensionsAPIClient test_api_client; - ASSERT_TRUE(LoadApp("api_test/hid/get_user_selected_devices")); - ASSERT_TRUE(open_listener.WaitUntilSatisfied()); - - ExtensionTestMessageListener remove_listener("removed"); - GetFakeHidManager()->RemoveDevice(kTestDeviceGuids[0]); - ASSERT_TRUE(remove_listener.WaitUntilSatisfied()); - - ExtensionTestMessageListener add_listener("added"); - AddDevice(kTestDeviceGuids[0], kTestPhysicalDeviceIds[0], kTestVendorId, - kTestProductId, true, "A"); - ASSERT_TRUE(add_listener.WaitUntilSatisfied()); -} - namespace { device::mojom::HidDeviceInfoPtr CreateDeviceWithOneCollection(
diff --git a/extensions/browser/api/hid/hid_device_manager.cc b/extensions/browser/api/hid/hid_device_manager.cc index ce0d83d..2214bd2 100644 --- a/extensions/browser/api/hid/hid_device_manager.cc +++ b/extensions/browser/api/hid/hid_device_manager.cc
@@ -147,22 +147,6 @@ } } -base::Value::List HidDeviceManager::GetApiDevicesFromList( - std::vector<device::mojom::HidDeviceInfoPtr> devices) { - DCHECK(thread_checker_.CalledOnValidThread()); - base::Value::List device_list; - for (const auto& device : devices) { - const auto device_entry = resource_ids_.find(device->guid); - DCHECK(device_entry != resource_ids_.end()); - - hid::HidDeviceInfo device_info; - device_info.device_id = device_entry->second; - PopulateHidDeviceInfo(&device_info, *device); - device_list.Append(device_info.ToValue()); - } - return device_list; -} - const device::mojom::HidDeviceInfo* HidDeviceManager::GetDeviceInfo( int resource_id) { DCHECK(thread_checker_.CalledOnValidThread());
diff --git a/extensions/browser/api/hid/hid_device_manager.h b/extensions/browser/api/hid/hid_device_manager.h index fdeb0a1..2f3502e 100644 --- a/extensions/browser/api/hid/hid_device_manager.h +++ b/extensions/browser/api/hid/hid_device_manager.h
@@ -64,11 +64,6 @@ const std::vector<device::HidDeviceFilter>& filters, GetApiDevicesCallback callback); - // Converts a list of device::mojom::HidDeviceInfo objects into a value that - // can be returned through the API. - base::Value::List GetApiDevicesFromList( - std::vector<device::mojom::HidDeviceInfoPtr> devices); - const device::mojom::HidDeviceInfo* GetDeviceInfo(int resource_id); void Connect(const std::string& device_guid, ConnectCallback callback);
diff --git a/extensions/browser/api/web_request/web_request_time_tracker.cc b/extensions/browser/api/web_request/web_request_time_tracker.cc index a39c6fc..f5523a9 100644 --- a/extensions/browser/api/web_request/web_request_time_tracker.cc +++ b/extensions/browser/api/web_request/web_request_time_tracker.cc
@@ -108,6 +108,8 @@ UMA_HISTOGRAM_PERCENTAGE("Extensions.NetworkDelayPercentage", percentage); } + constexpr int kBucketCount = 50; + // Record the time spent in listeners in onBeforeRequest. Only do this if // we have a time for both the dispatch and completion time (we may not, // if the request were canceled). @@ -125,11 +127,23 @@ "Extensions.WebRequest.BeforeRequestListenerEvaluationTime." "WebRequestOnly", listener_time); + UMA_HISTOGRAM_CUSTOM_MICROSECONDS_TIMES( + "Extensions.WebRequest." + "BeforeRequestListenerEvaluationTimeInMicroseconds" + "WebRequestOnly", + listener_time, base::Microseconds(1), base::Seconds(30), + kBucketCount); } else { // Both webRequest and DNR handlers. UMA_HISTOGRAM_TIMES( "Extensions.WebRequest.BeforeRequestListenerEvaluationTime." "WebRequestAndDeclarativeNetRequest", listener_time); + UMA_HISTOGRAM_CUSTOM_MICROSECONDS_TIMES( + "Extensions.WebRequest." + "BeforeRequestListenerEvaluationTimeInMicroseconds" + "WebRequestAndDeclarativeNetRequest", + listener_time, base::Microseconds(1), base::Seconds(30), + kBucketCount); } } @@ -139,14 +153,21 @@ // start time. (The inverse is not true, since we only log completion time // if there was at least one relevant action.) DCHECK(!log.before_request_dnr_start_time.is_null()); + + base::TimeDelta elapsed_time = log.before_request_dnr_completion_time - + log.before_request_dnr_start_time; + // DeclarativeNetRequest handlers also aren't really affected by webRequest // listeners, so no need to split up the time depending on whether there // were webRequest listeners. UMA_HISTOGRAM_TIMES( "Extensions.WebRequest." "BeforeRequestDeclarativeNetRequestEvaluationTime", - log.before_request_dnr_completion_time - - log.before_request_dnr_start_time); + elapsed_time); + UMA_HISTOGRAM_CUSTOM_MICROSECONDS_TIMES( + "Extensions.WebRequest." + "BeforeRequestDeclarativeNetRequestEvaluationTimeInMicroseconds", + elapsed_time, base::Microseconds(1), base::Seconds(30), kBucketCount); } }
diff --git a/extensions/browser/extension_function.cc b/extensions/browser/extension_function.cc index 9d94c36..7285dfe 100644 --- a/extensions/browser/extension_function.cc +++ b/extensions/browser/extension_function.cc
@@ -143,13 +143,6 @@ std::ignore = ExtensionFunctionMemoryDumpProvider::GetInstance(); } -// Adds Kiosk. prefix to uma histograms if running in a kiosk extension. -std::string WrapUma(const std::string& uma, bool is_kiosk_enabled) { - if (is_kiosk_enabled) - return uma + ".Kiosk"; - return uma; -} - // Logs UMA about the performance for a given extension function run. void LogUma(bool success, base::TimeDelta elapsed_time, @@ -196,10 +189,6 @@ histogram_value); } } - base::UmaHistogramTimes( - WrapUma("Extensions.Functions.FailedTotalExecutionTime", - is_kiosk_enabled), - elapsed_time); } } @@ -208,9 +197,10 @@ base::RecordAction(base::UserMetricsAction("BadMessageTerminate_EFD")); // Track the specific function's |histogram_value|, as this may indicate a // bug in that API's implementation. - base::UmaHistogramSparse( - WrapUma("Extensions.BadMessageFunctionName", is_kiosk_enabled), - histogram_value); + const char* histogram_name = is_kiosk_enabled + ? "Extensions.BadMessageFunctionName.Kiosk" + : "Extensions.BadMessageFunctionName"; + base::UmaHistogramSparse(histogram_name, histogram_value); } bool IsKiosk(const extensions::Extension* extension) {
diff --git a/extensions/browser/extension_function_histogram_value.h b/extensions/browser/extension_function_histogram_value.h index f565b82..871cdee74 100644 --- a/extensions/browser/extension_function_histogram_value.h +++ b/extensions/browser/extension_function_histogram_value.h
@@ -1115,7 +1115,7 @@ SERIAL_CLEARBREAK = 1054, DELETED_EXTENSIONVIEWINTERNAL_LOADSRC = 1055, // Obsolete: crbug.com/982858 DELETED_EXTENSIONVIEWINTERNAL_PARSESRC = 1056, // Obsolete: crbug.com/982858 - HID_GETUSERSELECTEDDEVICES = 1057, + DELETED_HID_GETUSERSELECTEDDEVICES = 1057, FILESYSTEMPROVIDERINTERNAL_GETACTIONSREQUESTEDSUCCESS = 1058, DASHBOARDPRIVATE_SHOWPERMISSIONPROMPTFORDELEGATEDBUNDLEINSTALL = 1059, FILEMANAGERPRIVATEINTERNAL_GETCUSTOMACTIONS = 1060,
diff --git a/extensions/browser/extension_registrar.cc b/extensions/browser/extension_registrar.cc index f6b8330..e2942916 100644 --- a/extensions/browser/extension_registrar.cc +++ b/extensions/browser/extension_registrar.cc
@@ -8,7 +8,6 @@ #include "base/callback_helpers.h" #include "base/check_op.h" #include "base/containers/contains.h" -#include "base/metrics/histogram_macros.h" #include "base/notreached.h" #include "build/chromeos_buildflags.h" #include "content/public/browser/browser_context.h" @@ -68,10 +67,6 @@ // Other than for unpacked extensions, we should not be downgrading. if (!Manifest::IsUnpackedLocation(extension->location()) && version_compare_result < 0) { - UMA_HISTOGRAM_ENUMERATION( - "Extensions.AttemptedToDowngradeVersionLocation", - extension->location()); - // TODO(https://crbug.com/810799): It would be awfully nice to CHECK this, // but that's caused problems. There are apparently times when this // happens that we aren't accounting for. We should track those down and
diff --git a/extensions/common/api/_api_features.json b/extensions/common/api/_api_features.json index 9567b15..5392f414 100644 --- a/extensions/common/api/_api_features.json +++ b/extensions/common/api/_api_features.json
@@ -260,11 +260,6 @@ "dependencies": ["permission:hid"], "contexts": ["blessed_extension"] }, - "hid.getUserSelectedDevices": { - "contexts": ["blessed_extension"], - "channel": "dev", - "dependencies": ["permission:hid"] - }, "i18n": { "channel": "stable", "extension_types": ["extension", "legacy_packaged_app", "platform_app",
diff --git a/extensions/common/api/hid.idl b/extensions/common/api/hid.idl index 2bccbec..dacff4d 100644 --- a/extensions/common/api/hid.idl +++ b/extensions/common/api/hid.idl
@@ -68,14 +68,6 @@ DeviceFilter[]? filters; }; - dictionary DevicePromptOptions { - // Allow the user to select multiple devices. - boolean? multiple; - // Filter the list of devices presented to the user. If multiple filters - // are provided devices matching any filter will be displayed. - DeviceFilter[]? filters; - }; - callback GetDevicesCallback = void (HidDeviceInfo[] devices); callback ConnectCallback = void (HidConnectInfo connection); callback DisconnectCallback = void (); @@ -96,17 +88,6 @@ static void getDevices(GetDevicesOptions options, GetDevicesCallback callback); - // Presents a device picker to the user and returns $(ref:HidDeviceInfo) - // objects for the devices selected. - // If the user cancels the picker devices will be empty. A user gesture - // is required for the dialog to display. Without a user gesture, the - // callback will run as though the user cancelled. If multiple filters are - // provided devices matching any filter will be displayed. - // |options|: Configuration of the device picker dialog box. - // |callback|: Invoked with a list of chosen $(ref:Device)s. - static void getUserSelectedDevices(optional DevicePromptOptions options, - GetDevicesCallback callback); - // Open a connection to an HID device for communication. // |deviceId|: The $(ref:HidDeviceInfo.deviceId) of the device to open. static void connect(long deviceId,
diff --git a/extensions/renderer/api/messaging/messaging_util.cc b/extensions/renderer/api/messaging/messaging_util.cc index 768b833..9f4fc6b 100644 --- a/extensions/renderer/api/messaging/messaging_util.cc +++ b/extensions/renderer/api/messaging/messaging_util.cc
@@ -7,7 +7,6 @@ #include <string> #include "base/check.h" -#include "base/metrics/histogram_macros.h" #include "base/notreached.h" #include "base/strings/stringprintf.h" #include "components/crx_file/id_util.h" @@ -59,14 +58,6 @@ size_t message_length = message.length(); - // Max bucket at 512 MB - anything over that, and we don't care. - static constexpr int kMaxUmaLength = 1024 * 1024 * 512; - static constexpr int kMinUmaLength = 1; - static constexpr int kBucketCount = 50; - UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions.Messaging.MessageSize", - message_length, kMinUmaLength, kMaxUmaLength, - kBucketCount); - // IPC messages will fail at > 128 MB. Restrict extension messages to 64 MB. // A 64 MB JSON-ifiable object is scary enough as is. static constexpr size_t kMaxMessageLength = 1024 * 1024 * 64;
diff --git a/extensions/renderer/native_extension_bindings_system.cc b/extensions/renderer/native_extension_bindings_system.cc index 4ae0777d..728f358 100644 --- a/extensions/renderer/native_extension_bindings_system.cc +++ b/extensions/renderer/native_extension_bindings_system.cc
@@ -9,10 +9,8 @@ #include "base/bind.h" #include "base/callback.h" #include "base/command_line.h" -#include "base/metrics/histogram_macros.h" #include "base/strings/string_piece.h" #include "base/strings/stringprintf.h" -#include "base/timer/elapsed_timer.h" #include "components/crx_file/id_util.h" #include "content/public/common/content_switches.h" #include "extensions/common/constants.h" @@ -740,7 +738,6 @@ CHECK( gin::Converter<std::string>::FromV8(isolate, api_name, &api_name_string)); - base::ElapsedTimer timer; v8::Local<v8::Object> root_binding = CreateFullBinding( context, script_context, &data->bindings_system->api_system_, FeatureProvider::GetAPIFeatures(), api_name_string); @@ -752,9 +749,6 @@ if (!success.IsJust() || !success.FromJust()) return v8::Local<v8::Object>(); - UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions.Bindings.NativeBindingCreationTime", - timer.Elapsed().InMicroseconds(), 1, 10000000, - 50); return root_binding; }
diff --git a/extensions/renderer/shared_l10n_map.cc b/extensions/renderer/shared_l10n_map.cc index 3636a92a..38d8bff 100644 --- a/extensions/renderer/shared_l10n_map.cc +++ b/extensions/renderer/shared_l10n_map.cc
@@ -4,7 +4,6 @@ #include "extensions/renderer/shared_l10n_map.h" -#include "base/metrics/histogram_macros.h" #include "base/no_destructor.h" #include "extensions/common/extension_messages.h" #include "ipc/ipc_sender.h" @@ -75,11 +74,8 @@ // A sync call to load message catalogs for current extension. // TODO(devlin): Wait, what?! A synchronous call to the browser to perform // potentially blocking work reading files from disk? That's Bad. - { - SCOPED_UMA_HISTOGRAM_TIMER("Extensions.SyncGetMessageBundle"); - message_sender->Send( - new ExtensionHostMsg_GetMessageBundle(extension_id, &l10n_messages)); - } + message_sender->Send( + new ExtensionHostMsg_GetMessageBundle(extension_id, &l10n_messages)); return &l10n_messages; }
diff --git a/extensions/test/data/api_test/hid/get_user_selected_devices/background.js b/extensions/test/data/api_test/hid/get_user_selected_devices/background.js deleted file mode 100644 index 5a10fa8..0000000 --- a/extensions/test/data/api_test/hid/get_user_selected_devices/background.js +++ /dev/null
@@ -1,35 +0,0 @@ -// Copyright 2015 The Chromium Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -var device_from_user; - -chrome.test.runWithUserGesture(function() { - chrome.hid.getDevices({}, function(devices) { - chrome.test.assertNoLastError(); - chrome.test.assertEq(0, devices.length); - chrome.hid.getUserSelectedDevices({ multiple: false }, function(devices) { - chrome.test.assertNoLastError(); - chrome.test.assertEq(1, devices.length); - device_from_user = devices[0]; - chrome.test.assertEq(device_from_user.serialNumber, "A"); - chrome.hid.connect(device_from_user.deviceId, function(connection) { - chrome.test.assertNoLastError(); - chrome.hid.disconnect(connection.connectionId); - chrome.test.sendMessage("opened_device"); - }); - }); - }); -}); - -chrome.hid.onDeviceRemoved.addListener(function(deviceId) { - chrome.test.assertEq(device_from_user.deviceId, deviceId); - chrome.test.sendMessage("removed"); -}); - -chrome.hid.onDeviceAdded.addListener(function(device) { - chrome.test.assertTrue(device_from_user.deviceId != device.deviceId); - chrome.test.assertEq(device_from_user.vendorId, device.vendorId); - chrome.test.assertEq(device_from_user.productId, device.productId); - chrome.test.sendMessage("added"); -});
diff --git a/extensions/test/data/api_test/hid/get_user_selected_devices/manifest.json b/extensions/test/data/api_test/hid/get_user_selected_devices/manifest.json deleted file mode 100644 index 29816df..0000000 --- a/extensions/test/data/api_test/hid/get_user_selected_devices/manifest.json +++ /dev/null
@@ -1,14 +0,0 @@ -{ - "name": "chrome.hid.getUserSelectedDevices", - "manifest_version": 2, - "version": "0.1", - "description": "browser test for chrome.hid.getUserSelectedDevices", - "app": { - "background": { - "scripts": ["background.js"] - } - }, - "permissions": [ - "hid" - ] -}
diff --git a/ios/chrome/browser/flags/about_flags.mm b/ios/chrome/browser/flags/about_flags.mm index a277368..3c8e009 100644 --- a/ios/chrome/browser/flags/about_flags.mm +++ b/ios/chrome/browser/flags/about_flags.mm
@@ -402,23 +402,6 @@ }; #endif // BUILDFLAG(IOS_BACKGROUND_MODE_ENABLED) -const FeatureEntry::FeatureParam kFREDefaultBrowserPromoDefaultDelay[] = { - {kFREDefaultBrowserPromoParam, kFREDefaultBrowserPromoDefaultDelayParam}}; -const FeatureEntry::FeatureParam kFREDefaultBrowserPromoFirstRunOnly[] = { - {kFREDefaultBrowserPromoParam, kFREDefaultBrowserPromoFirstRunOnlyParam}}; -const FeatureEntry::FeatureParam kFREDefaultBrowserPromoShortDelay[] = { - {kFREDefaultBrowserPromoParam, kFREDefaultBrowserPromoShortDelayParam}}; -const FeatureEntry::FeatureVariation kFREDefaultBrowserPromoVariations[] = { - {"Wait 14 days after FRE default browser promo", - kFREDefaultBrowserPromoDefaultDelay, - std::size(kFREDefaultBrowserPromoDefaultDelay), nullptr}, - {"FRE default browser promo only", kFREDefaultBrowserPromoFirstRunOnly, - std::size(kFREDefaultBrowserPromoFirstRunOnly), nullptr}, - {"Wait 3 days after FRE default browser promo", - kFREDefaultBrowserPromoShortDelay, - std::size(kFREDefaultBrowserPromoShortDelay), nullptr}, -}; - const FeatureEntry::FeatureParam kTrendingQueriesEnableAllUsers[] = { {kTrendingQueriesHideShortcutsParam, "false"}}; const FeatureEntry::FeatureParam kTrendingQueriesEnableAllUsersHideShortcuts[] = @@ -852,13 +835,6 @@ FEATURE_WITH_PARAMS_VALUE_TYPE(kEnableDiscoverFeedTopSyncPromo, kDiscoverFeedTopSyncPromoVariations, "EnableDiscoverFeedTopSyncPromo")}, - {"enable-fre-default-browser-screen-testing", - flag_descriptions::kEnableFREDefaultBrowserPromoScreenName, - flag_descriptions::kEnableFREDefaultBrowserPromoScreenDescription, - flags_ui::kOsIos, - FEATURE_WITH_PARAMS_VALUE_TYPE(kEnableFREDefaultBrowserPromoScreen, - kFREDefaultBrowserPromoVariations, - kIOSMICeAndDefaultBrowserTrialName)}, {"shared-highlighting-amp", flag_descriptions::kIOSSharedHighlightingAmpName, flag_descriptions::kIOSSharedHighlightingAmpDescription, flags_ui::kOsIos,
diff --git a/ios/chrome/browser/flags/ios_chrome_flag_descriptions.cc b/ios/chrome/browser/flags/ios_chrome_flag_descriptions.cc index a062a9a..5b37058 100644 --- a/ios/chrome/browser/flags/ios_chrome_flag_descriptions.cc +++ b/ios/chrome/browser/flags/ios_chrome_flag_descriptions.cc
@@ -270,12 +270,6 @@ const char kEnableDiscoverFeedDiscoFeedEndpointDescription[] = "Enable using the discofeed endpoint for the discover feed."; -const char kEnableFREDefaultBrowserPromoScreenName[] = - "Enable FRE default browser screen"; -const char kEnableFREDefaultBrowserPromoScreenDescription[] = - "Display the FRE default browser screen and other default browser promo " - "depending on experiment."; - const char kEnableFeedAblationName[] = "Enables Feed Ablation"; const char kEnableFeedAblationDescription[] = "If Enabled the Feed will be removed from the NTP";
diff --git a/ios/chrome/browser/flags/ios_chrome_flag_descriptions.h b/ios/chrome/browser/flags/ios_chrome_flag_descriptions.h index 3ddbecdd..3f77c53 100644 --- a/ios/chrome/browser/flags/ios_chrome_flag_descriptions.h +++ b/ios/chrome/browser/flags/ios_chrome_flag_descriptions.h
@@ -256,11 +256,6 @@ extern const char kEnableFeedAblationName[]; extern const char kEnableFeedAblationDescription[]; -// Title and description for the flag to test the FRE default browser promo -// experiment. -extern const char kEnableFREDefaultBrowserPromoScreenName[]; -extern const char kEnableFREDefaultBrowserPromoScreenDescription[]; - // Title and description for the flag to enable the Fullscreen API. extern const char kEnableFullscreenAPIName[]; extern const char kEnableFullscreenAPIDescription[];
diff --git a/ios/chrome/browser/policy/user_policy_egtest.mm b/ios/chrome/browser/policy/user_policy_egtest.mm index a840037c..3421e36 100644 --- a/ios/chrome/browser/policy/user_policy_egtest.mm +++ b/ios/chrome/browser/policy/user_policy_egtest.mm
@@ -273,10 +273,10 @@ // Tests that the user policies are fetched when the user decides to "Continue" // in the notification dialog. - (void)testUserPolicyNotificationWithAcceptChoice { - // TODO(crbug.com/1386163): Failing on iphone 15.7 devices. - if (base::ios::IsRunningOnIOS15OrLater() && + // TODO(crbug.com/1386163): Flaky on iphone 14.5 simulators and 15.7 devices. + if (base::ios::IsRunningOnIOS14OrLater() && !base::ios::IsRunningOnIOS16OrLater()) { - EARL_GREY_TEST_DISABLED(@"Test disabled on iOS 15."); + EARL_GREY_TEST_DISABLED(@"Test disabled on iOS 14 and 15."); } // Clear the prefs related to user policy to make sure that the notification
diff --git a/ios/chrome/browser/ui/first_run/BUILD.gn b/ios/chrome/browser/ui/first_run/BUILD.gn index a8f0f37..cc58f6a 100644 --- a/ios/chrome/browser/ui/first_run/BUILD.gn +++ b/ios/chrome/browser/ui/first_run/BUILD.gn
@@ -126,7 +126,7 @@ ] deps = [ ":field_trial", - "//ios/chrome/browser/ui:feature_flags", + "//base", "//ios/chrome/browser/ui/screen:screen_provider", "//ios/chrome/browser/ui/screen:screen_provider_protected", "//ios/chrome/browser/ui/screen:screen_type",
diff --git a/ios/chrome/browser/ui/first_run/first_run_egtest.mm b/ios/chrome/browser/ui/first_run/first_run_egtest.mm index 8514886..058746d2 100644 --- a/ios/chrome/browser/ui/first_run/first_run_egtest.mm +++ b/ios/chrome/browser/ui/first_run/first_run_egtest.mm
@@ -89,6 +89,15 @@ grey_sufficientlyVisible(), nil); } +// Dismiss default browser promo. +void DismissDefaultBrowserPromo() { + id<GREYMatcher> buttonMatcher = grey_allOf( + grey_ancestor(grey_accessibilityID( + first_run::kFirstRunDefaultBrowserScreenAccessibilityIdentifier)), + GetNoThanksButton(), nil); + [[EarlGrey selectElementWithMatcher:buttonMatcher] performAction:grey_tap()]; +} + // Returns a constraint where the element is below the reference. GREYLayoutConstraint* BelowConstraint() { return [GREYLayoutConstraint @@ -126,8 +135,6 @@ - (AppLaunchConfiguration)appConfigurationForTestCase { AppLaunchConfiguration config; config.features_disabled.push_back(signin::kNewMobileIdentityConsistencyFRE); - config.features_disabled.push_back(kEnableFREDefaultBrowserPromoScreen); - // Show the First Run UI at startup. config.additional_args.push_back("-FirstRunForceEnabled"); config.additional_args.push_back("true"); @@ -140,11 +147,6 @@ #pragma mark - Helpers -// Remove when default browser screen will be fully enabled -- (BOOL)isDefaultBrowserTestDisabled { - return YES; -} - // Checks that the welcome screen is displayed. - (void)verifyWelcomeScreenIsDisplayed { [[EarlGrey selectElementWithMatcher: @@ -179,28 +181,6 @@ assertWithMatcher:grey_notNil()]; } -// Checks that none of any FRE's screen is displayed. -- (void)verifyFREIsDismissed { - [[EarlGrey selectElementWithMatcher: - grey_accessibilityID( - first_run::kFirstRunWelcomeScreenAccessibilityIdentifier)] - assertWithMatcher:grey_nil()]; - - [[EarlGrey - selectElementWithMatcher:grey_accessibilityID( - kSigninSyncScreenAccessibilityIdentifier)] - assertWithMatcher:grey_nil()]; - - [[EarlGrey - selectElementWithMatcher: - grey_accessibilityID( - first_run::kFirstRunDefaultBrowserScreenAccessibilityIdentifier)] - assertWithMatcher:grey_nil()]; - - [[EarlGrey selectElementWithMatcher:chrome_test_util::FakeOmnibox()] - assertWithMatcher:grey_sufficientlyVisible()]; -} - // Scrolls down to `elementMatcher` in the scrollable content of the first run // screen. - (void)scrollToElementAndAssertVisibility:(id<GREYMatcher>)elementMatcher { @@ -407,6 +387,7 @@ [self scrollToElementAndAssertVisibility:GetNoThanksButton()]; [[EarlGrey selectElementWithMatcher:GetNoThanksButton()] performAction:grey_tap()]; + DismissDefaultBrowserPromo(); // Add account for the identity switcher to be shown. [SigninEarlGrey addFakeIdentity:fakeIdentity]; @@ -422,12 +403,7 @@ } // Checks that the default browser screen is displayed correctly. -// TODO(crbug.com/1282248): Re-enable this test. -- (void)DISABLED_testDefaultBrowserScreenUI { - if ([self isDefaultBrowserTestDisabled]) { - return; - } - +- (void)testDefaultBrowserScreenUI { // Go to the default browser screen. [self verifyWelcomeScreenIsDisplayed]; [self scrollToElementAndAssertVisibility:GetAcceptButton()]; @@ -558,7 +534,7 @@ [self scrollToElementAndAssertVisibility:GetAcceptButton()]; [[EarlGrey selectElementWithMatcher:GetAcceptButton()] performAction:grey_tap()]; - [self verifyFREIsDismissed]; + [self verifyDefaultBrowserScreenIsDisplayed]; } // Checks that when opening the app no accounts are here and the primary button @@ -650,6 +626,7 @@ [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Verify that the sync cell is visible and "On" is displayed. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; @@ -698,6 +675,7 @@ [SigninEarlGrey verifySignedInWithFakeIdentity:fakeSupervisedIdentity]; // Verify that the sync cell is visible and "On" is displayed. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; @@ -722,7 +700,7 @@ // Verify that the user is not signed in. [SigninEarlGrey verifySignedOut]; - + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; // Because the user is not signed in, the sync cell is not be visible. @@ -768,6 +746,7 @@ // Verify that the browser isn't signed in by validating that there isn't a // sync cell visible in settings. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIIsHidden]; @@ -828,6 +807,7 @@ [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Verify that the sync cell is visible and "Off" is displayed. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:NO]; @@ -877,6 +857,7 @@ GREYAssertTrue([FirstRunAppInterface isSyncFirstSetupComplete], @"Sync should start when turning on sync in FRE."); + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; [SigninEarlGrey verifySyncUIEnabled:YES]; @@ -949,6 +930,7 @@ GREYAssertTrue([FirstRunAppInterface isSyncFirstSetupComplete], @"Sync should start when turning on sync in FRE."); + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; @@ -1011,6 +993,7 @@ // Verify that the browser isn't signed in by validating that there isn't a // sync cell visible in settings. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIIsHidden]; @@ -1038,7 +1021,7 @@ performAction:grey_tap()]; // The Sync screen should not be displayed, so the NTP should be visible. - [self verifyFREIsDismissed]; + [self verifyDefaultBrowserScreenIsDisplayed]; } @end
diff --git a/ios/chrome/browser/ui/first_run/first_run_screen_provider.mm b/ios/chrome/browser/ui/first_run/first_run_screen_provider.mm index 64d9b3d..3ecd54df6 100644 --- a/ios/chrome/browser/ui/first_run/first_run_screen_provider.mm +++ b/ios/chrome/browser/ui/first_run/first_run_screen_provider.mm
@@ -8,7 +8,6 @@ #import "ios/chrome/browser/ui/first_run/fre_field_trial.h" #import "ios/chrome/browser/ui/screen/screen_provider+protected.h" #import "ios/chrome/browser/ui/screen/screen_type.h" -#import "ios/chrome/browser/ui/ui_feature_flags.h" #if !defined(__has_feature) || !__has_feature(objc_arc) #error "This file requires ARC support." @@ -38,12 +37,7 @@ [screens addObject:@(kSignInAndSync)]; break; } - - if (fre_field_trial::GetFREDefaultBrowserScreenPromoFRE() != - NewDefaultBrowserPromoFRE::kDisabled) { - [screens addObject:@(kDefaultBrowserPromo)]; - } - + [screens addObject:@(kDefaultBrowserPromo)]; [screens addObject:@(kStepsCompleted)]; return [super initWithScreens:screens]; }
diff --git a/ios/chrome/browser/ui/first_run/first_run_two_steps_egtest.mm b/ios/chrome/browser/ui/first_run/first_run_two_steps_egtest.mm index ed466ce..be870809 100644 --- a/ios/chrome/browser/ui/first_run/first_run_two_steps_egtest.mm +++ b/ios/chrome/browser/ui/first_run/first_run_two_steps_egtest.mm
@@ -85,6 +85,18 @@ grey_ancestor(disclaimer), nil); } +// Dismiss default browser promo. +void DismissDefaultBrowserPromo() { + id<GREYMatcher> buttonMatcher = grey_allOf( + grey_ancestor(grey_accessibilityID( + first_run::kFirstRunDefaultBrowserScreenAccessibilityIdentifier)), + grey_accessibilityLabel(l10n_util::GetNSString( + IDS_IOS_FIRST_RUN_DEFAULT_BROWSER_SCREEN_SECONDARY_ACTION)), + nil); + [[[EarlGrey selectElementWithMatcher:buttonMatcher] + assertWithMatcher:grey_notNil()] performAction:grey_tap()]; +} + } // namespace // Test first run stages @@ -135,8 +147,6 @@ std::string(signin::kNewMobileIdentityConsistencyFRE.name) + ".Test:" + std::string(kNewMobileIdentityConsistencyFREParam) + "/" + kNewMobileIdentityConsistencyFREParamTwoSteps); - // Disable default browser promo. - config.features_disabled.push_back(kEnableFREDefaultBrowserPromoScreen); // Show the First Run UI at startup. config.additional_args.push_back("-FirstRunForceEnabled"); config.additional_args.push_back("true"); @@ -355,6 +365,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; } @@ -386,6 +397,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; } @@ -417,6 +429,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is off. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:NO]; } @@ -477,6 +490,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [ChromeEarlGreyUI tapSettingsMenuButton:chrome_test_util::ManageSyncSettingsButton()]; @@ -538,6 +552,7 @@ kPromoStyleScrollViewAccessibilityIdentifier] performAction:grey_tap()]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; } @@ -594,6 +609,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; // Close settings. @@ -630,6 +646,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:NO]; // Close settings. @@ -660,6 +677,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:NO]; } @@ -716,6 +734,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; } @@ -824,6 +843,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeSupervisedIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; }
diff --git a/ios/chrome/browser/ui/first_run/first_run_uma_dialog_egtest.mm b/ios/chrome/browser/ui/first_run/first_run_uma_dialog_egtest.mm index 8b7923e..979a86ea 100644 --- a/ios/chrome/browser/ui/first_run/first_run_uma_dialog_egtest.mm +++ b/ios/chrome/browser/ui/first_run/first_run_uma_dialog_egtest.mm
@@ -75,6 +75,18 @@ onElementWithMatcher:scrollViewMatcher]; } +// Dismiss default browser promo. +void DismissDefaultBrowserPromo() { + id<GREYMatcher> buttonMatcher = grey_allOf( + grey_ancestor(grey_accessibilityID( + first_run::kFirstRunDefaultBrowserScreenAccessibilityIdentifier)), + grey_accessibilityLabel(l10n_util::GetNSString( + IDS_IOS_FIRST_RUN_DEFAULT_BROWSER_SCREEN_SECONDARY_ACTION)), + nil); + [[[EarlGrey selectElementWithMatcher:buttonMatcher] + assertWithMatcher:grey_notNil()] performAction:grey_tap()]; +} + } // namespace // Test first run with UMA dialog MICe FRE. Those tests are only related to the @@ -95,8 +107,6 @@ AppLaunchConfiguration config; config.additional_args.push_back(std::string("-") + test_switches::kSignInAtStartup); - // Disable default browser promo. - config.features_disabled.push_back(kEnableFREDefaultBrowserPromoScreen); // Show the First Run UI at startup. config.additional_args.push_back("-FirstRunForceEnabled"); config.additional_args.push_back("true"); @@ -304,6 +314,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; } @@ -330,6 +341,7 @@ // Check signed in. [SigninEarlGrey verifySignedInWithFakeIdentity:fakeIdentity]; // Check sync is on. + DismissDefaultBrowserPromo(); [ChromeEarlGreyUI openSettingsMenu]; [SigninEarlGrey verifySyncUIEnabled:YES]; }
diff --git a/ios/chrome/browser/ui/first_run/fre_field_trial.cc b/ios/chrome/browser/ui/first_run/fre_field_trial.cc index b3d732be..e068b69 100644 --- a/ios/chrome/browser/ui/first_run/fre_field_trial.cc +++ b/ios/chrome/browser/ui/first_run/fre_field_trial.cc
@@ -97,13 +97,6 @@ namespace fre_field_trial { -NewDefaultBrowserPromoFRE GetFREDefaultBrowserScreenPromoFRE() { - if (base::FeatureList::IsEnabled(kEnableFREDefaultBrowserPromoScreen)) { - return NewDefaultBrowserPromoFRE::kShortDelay; - } - return NewDefaultBrowserPromoFRE::kDisabled; -} - NewMobileIdentityConsistencyFRE GetNewMobileIdentityConsistencyFRE() { if (base::FeatureList::IsEnabled(signin::kNewMobileIdentityConsistencyFRE)) { return kkNewMobileIdentityConsistencyFREParam.Get();
diff --git a/ios/chrome/browser/ui/first_run/fre_field_trial.h b/ios/chrome/browser/ui/first_run/fre_field_trial.h index aa1f07bc..31dab046 100644 --- a/ios/chrome/browser/ui/first_run/fre_field_trial.h +++ b/ios/chrome/browser/ui/first_run/fre_field_trial.h
@@ -51,10 +51,6 @@ namespace fre_field_trial { -// Returns the FRE default browser promo setup according to the feature flag and -// experiment. See NewDefaultBrowserPromoFRE. -NewDefaultBrowserPromoFRE GetFREDefaultBrowserScreenPromoFRE(); - // Returns the FRE to display according to the feature flag and experiment. // See NewMobileIdentityConsistencyFRE. NewMobileIdentityConsistencyFRE GetNewMobileIdentityConsistencyFRE();
diff --git a/ios/chrome/browser/ui/ui_feature_flags.cc b/ios/chrome/browser/ui/ui_feature_flags.cc index 07a245c..efd950d5 100644 --- a/ios/chrome/browser/ui/ui_feature_flags.cc +++ b/ios/chrome/browser/ui/ui_feature_flags.cc
@@ -29,10 +29,6 @@ "SharedHighlightingIOS", base::FEATURE_ENABLED_BY_DEFAULT); -BASE_FEATURE(kEnableFREDefaultBrowserPromoScreen, - "EnableFREDefaultBrowserPromoScreen", - base::FEATURE_ENABLED_BY_DEFAULT); - // TODO(crbug.com/1128242): Remove this flag after the refactoring work is // finished. BASE_FEATURE(kModernTabStrip,
diff --git a/ios/chrome/browser/ui/ui_feature_flags.h b/ios/chrome/browser/ui/ui_feature_flags.h index 17146aa..e77a311 100644 --- a/ios/chrome/browser/ui/ui_feature_flags.h +++ b/ios/chrome/browser/ui/ui_feature_flags.h
@@ -33,11 +33,6 @@ // Feature flag to enable Shared Highlighting (Link to Text). BASE_DECLARE_FEATURE(kSharedHighlightingIOS); -// Feature flag for testing the 'default browser' screen in FRE and different -// experiments to suggest the users to update the default browser in the -// Settings.app. -BASE_DECLARE_FEATURE(kEnableFREDefaultBrowserPromoScreen); - // TODO(crbug.com/1128242): Remove this flag after the refactoring work is // finished. Flag to modernize the tabstrip without disturbing the existing one. BASE_DECLARE_FEATURE(kModernTabStrip);
diff --git a/mojo/public/tools/bindings/generators/mojom_java_generator.py b/mojo/public/tools/bindings/generators/mojom_java_generator.py index 701075d..33df852 100644 --- a/mojo/public/tools/bindings/generators/mojom_java_generator.py +++ b/mojo/public/tools/bindings/generators/mojom_java_generator.py
@@ -13,7 +13,7 @@ import sys import tempfile -from jinja2 import contextfilter +import jinja2 import mojom.fileutil as fileutil import mojom.generate.generator as generator @@ -199,7 +199,7 @@ return params -@contextfilter +@jinja2.pass_context def DecodeMethod(context, kind, offset, bit): def _DecodeMethodName(kind): if mojom.IsArrayKind(kind): @@ -221,7 +221,8 @@ params = AppendEncodeDecodeParams([ str(offset) ], context, kind, bit) return '%s(%s)' % (methodName, ', '.join(params)) -@contextfilter + +@jinja2.pass_context def EncodeMethod(context, kind, variable, offset, bit): params = AppendEncodeDecodeParams( [ variable, str(offset) ], context, kind, bit) @@ -250,7 +251,8 @@ elements += _GetNameHierachy(kind) return '.'.join(elements) -@contextfilter + +@jinja2.pass_context def GetJavaClassForEnum(context, kind): return GetNameForKind(context, kind) @@ -260,7 +262,8 @@ return _java_primitive_to_boxed_type[unboxed_type] return unboxed_type -@contextfilter + +@jinja2.pass_context def GetJavaType(context, kind, boxed=False, with_generics=True): if boxed: return GetBoxedJavaType(context, kind) @@ -292,7 +295,8 @@ return 'int' return _spec_to_java_type[kind.spec] -@contextfilter + +@jinja2.pass_context def DefaultValue(context, field): assert field.default if isinstance(field.kind, mojom.Struct): @@ -302,20 +306,23 @@ GetJavaType(context, field.kind), ExpressionToText(context, field.default, kind_spec=field.kind.spec)) -@contextfilter + +@jinja2.pass_context def ConstantValue(context, constant): return '(%s) %s' % ( GetJavaType(context, constant.kind), ExpressionToText(context, constant.value, kind_spec=constant.kind.spec)) -@contextfilter + +@jinja2.pass_context def NewArray(context, kind, size): if mojom.IsArrayKind(kind.kind): return NewArray(context, kind.kind, size) + '[]' return 'new %s[%s]' % ( GetJavaType(context, kind.kind, boxed=False, with_generics=False), size) -@contextfilter + +@jinja2.pass_context def ExpressionToText(context, token, kind_spec=''): def _TranslateNamedValue(named_value): entity_name = GetNameForElement(named_value)
diff --git a/net/cookies/cookie_monster_unittest.cc b/net/cookies/cookie_monster_unittest.cc index 11c95e6..2f285d1 100644 --- a/net/cookies/cookie_monster_unittest.cc +++ b/net/cookies/cookie_monster_unittest.cc
@@ -3127,6 +3127,13 @@ CookieSameSite::NO_RESTRICTION, CookiePriority::COOKIE_PRIORITY_DEFAULT, false, CookiePartitionKey::FromURLForTesting(GURL("https://toplevelsite.com")))); + // Expired cookie, should not be stored. + list.push_back(*CanonicalCookie::CreateUnsafeCookieForTesting( + "expired", "foobar", https_www_foo_.url().host(), "/", + base::Time::Now() - base::Days(1), base::Time::Now() - base::Days(2), + base::Time(), base::Time(), /*secure=*/true, /*httponly=*/false, + CookieSameSite::NO_RESTRICTION, CookiePriority::COOKIE_PRIORITY_DEFAULT, + /*same_party=*/false)); // SetAllCookies must not flush. ASSERT_EQ(0, store->flush_count());
diff --git a/services/network/proxy_resolver_factory_mojo.cc b/services/network/proxy_resolver_factory_mojo.cc index 73b7263..23cb31e4 100644 --- a/services/network/proxy_resolver_factory_mojo.cc +++ b/services/network/proxy_resolver_factory_mojo.cc
@@ -108,9 +108,9 @@ mojo::PendingRemote<proxy_resolver::mojom::HostResolverRequestClient> client) override { if (operation == net::ProxyResolveDnsOperation::MY_IP_ADDRESS) { - my_ip_address_impl_ex_->AddRequest(std::move(client)); - } else if (operation == net::ProxyResolveDnsOperation::MY_IP_ADDRESS_EX) { my_ip_address_impl_->AddRequest(std::move(client)); + } else if (operation == net::ProxyResolveDnsOperation::MY_IP_ADDRESS_EX) { + my_ip_address_impl_ex_->AddRequest(std::move(client)); } else { bool is_ex = operation == net::ProxyResolveDnsOperation::DNS_RESOLVE_EX; // Request was for dnsResolve() or dnsResolveEx().
diff --git a/testing/scripts/run_finch_smoke_tests_android.py b/testing/scripts/run_finch_smoke_tests_android.py index ef386870..2d72ebe 100755 --- a/testing/scripts/run_finch_smoke_tests_android.py +++ b/testing/scripts/run_finch_smoke_tests_android.py
@@ -56,8 +56,9 @@ import aw_variations_seed_pb2 import devil_chromium -import wpt_common +from blinkpy.common.host import Host +from blinkpy.common.path_finder import PathFinder from blinkpy.web_tests.models import test_failures from blinkpy.web_tests.port.android import ( ANDROID_WEBLAYER, ANDROID_WEBVIEW, CHROME_ANDROID) @@ -114,10 +115,17 @@ # pylint: disable=super-with-arguments, abstract-method -class FinchTestCase(wpt_common.BaseWptScriptAdapter): +class FinchTestCase(common.BaseIsolatedScriptArgsAdapter): def __init__(self, device): + self.host = Host() + self.fs = self.host.filesystem + self.path_finder = PathFinder(self.fs) + self.port = self.host.port_factory.get() super(FinchTestCase, self).__init__() + self._parser = self._override_options(self._parser) + self._include_filename = None + self.layout_test_results_subdir = 'layout-test-results' self._device = device self.parse_args() self._browser_apk_helper = apk_helper.ToHelper(self.options.browser_apk) @@ -169,6 +177,89 @@ def finch_seed_download_args(cls): return [] + def generate_test_output_args(self, output): + return ['--log-chromium=%s' % output] + + def generate_test_filter_args(self, test_filter_str): + included_tests, excluded_tests = \ + self._resolve_tests_from_isolate_filter(test_filter_str) + include_file, self._include_filename = self.fs.open_text_tempfile() + with include_file: + for test in included_tests: + include_file.write(test) + include_file.write('\n') + wpt_args = ['--include-file=%s' % self._include_filename] + for test in excluded_tests: + wpt_args.append('--exclude=%s' % test) + return wpt_args + + def _override_options(self, base_parser): + """Create a parser that overrides existing options. + + `argument.ArgumentParser` can extend other parsers and override their + options, with the caveat that the child parser only inherits options + that the parent had at the time of the child's initialization. There is + not a clean way to add option overrides in `add_extra_arguments`, where + the provided parser is only passed up the inheritance chain, so we add + overridden options here at the very end. + + See Also: + https://docs.python.org/3/library/argparse.html#parents + """ + parser = argparse.ArgumentParser( + parents=[base_parser], + # Allow overriding existing options in the parent parser. + conflict_handler='resolve', + epilog=('All unrecognized arguments are passed through ' + "to wptrunner. Use '--wpt-help' to see wptrunner's usage."), + ) + parser.add_argument( + '--isolated-script-test-repeat', + '--repeat', + '--gtest_repeat', + metavar='REPEAT', + type=int, + default=1, + help='Number of times to run the tests') + parser.add_argument( + '--isolated-script-test-launcher-retry-limit', + '--test-launcher-retry-limit', + '--retry-unexpected', + metavar='RETRIES', + type=int, + help=( + 'Maximum number of times to rerun unexpectedly failed tests. ' + 'Defaults to 3 unless given an explicit list of tests to run.')) + # `--gtest_filter` and `--isolated-script-test-filter` have slightly + # different formats and behavior, so keep them as separate options. + # See: crbug/1316164#c4 + + # TODO(crbug.com/1356318): This is a temporary hack to hide the + # inherited '--xvfb' option and force Xvfb to run always. + parser.add_argument('--xvfb', action='store_true', default=True, + help=argparse.SUPPRESS) + return parser + + def generate_test_repeat_args(self, repeat_count): + return ['--repeat=%d' % repeat_count] + + def generate_test_launcher_retry_limit_args(self, retry_limit): + return ['--retry-unexpected=%d' % retry_limit] + + def generate_sharding_args(self, total_shards, shard_index): + return ['--total-chunks=%d' % total_shards, + # shard_index is 0-based but WPT's this-chunk to be 1-based + '--this-chunk=%d' % (shard_index + 1), + # The default sharding strategy is to shard by directory. But + # we want to hash each test to determine which shard runs it. + # This allows running individual directories that have few + # tests across many shards. + '--chunk-type=hash'] + + def clean_up_after_test_run(self): + if self._include_filename: + self.fs.remove(self._include_filename) + def new_seed_downloaded(self): # TODO(crbug.com/1285152): Implement seed download test # for Chrome and WebLayer. @@ -208,6 +299,19 @@ 'Running tests on the default finch seed') self.options.finch_seed_path = self.default_finch_seed_path + @property + def output_directory(self): + return self.path_finder.path_from_chromium_base('out', + self.options.target) + + @property + def mojo_js_directory(self): + return self.fs.join(self.output_directory, 'gen') + + @property + def wpt_output(self): + return self.options.isolated_script_test_output + def __enter__(self): self._device.EnableRoot() # Run below commands to ensure that the device can download a seed @@ -227,7 +331,11 @@ @property def rest_args(self): - rest_args = super(FinchTestCase, self).rest_args + unknown_args = super(FinchTestCase, self).rest_args + + rest_args = list() + + rest_args.extend(self.wpt_rest_args(unknown_args)) rest_args.extend([ '--webdriver-arg=--disable-build-check', @@ -251,6 +359,91 @@ return rest_args + @property + def wpt_binary(self): + default_wpt_binary = os.path.join( + common.SRC_DIR, "third_party", "wpt_tools", "wpt", "wpt") + return os.environ.get("WPT_BINARY", default_wpt_binary) + + @property + def wpt_root_dir(self): + return self.path_finder.path_from_web_tests( + self.path_finder.wpt_prefix()) + + @property + def _wpt_run_args(self): + """The start of a 'wpt run' command.""" + return [ + self.wpt_binary, + # Use virtualenv packages installed by vpython, not wpt. + '--venv=%s' % self.path_finder.chromium_base(), + '--skip-venv-setup', + 'run', + ] + + def process_and_upload_results(self): + command = [ + self.select_python_executable(), + os.path.join(BLINK_TOOLS, 'wpt_process_results.py'), + '--target', + self.options.target, + '--web-tests-dir', + BLINK_WEB_TESTS, + '--artifacts-dir', + os.path.join(os.path.dirname(self.wpt_output), + self.layout_test_results_subdir), + '--wpt-results', + self.wpt_output, + ] + if self.options.verbose: + command.append('--verbose') + + return common.run_command(command) + + def wpt_rest_args(self, unknown_args): + rest_args = list(self._wpt_run_args) + rest_args.extend([ + '--no-pause-after-test', + '--no-capture-stdio', + '--no-manifest-download', + '--tests=%s' % self.wpt_root_dir, + '--metadata=%s' % self.wpt_root_dir, + '--mojojs-path=%s' % self.mojo_js_directory, + ]) + + if self.options.default_exclude: + rest_args.extend(['--default-exclude']) + + if self.options.verbose >= 3: + rest_args.extend([ + '--log-mach=-', + '--log-mach-level=debug', + '--log-mach-verbose', + ]) + if self.options.verbose >= 4: + rest_args.extend([ + '--webdriver-arg=--verbose', + '--webdriver-arg="--log-path=-"', + ]) + + rest_args.append(self.wpt_product_name()) + # We pass through unknown args as late as possible so that they can + # override earlier options. It also allows users to pass test names as + # positional args, which must not have option strings between them. + for unknown_arg in unknown_args: + # crbug/1274933#c14: Some developers had used the end-of-options + # marker '--' to pass through arguments to wptrunner. + # crrev.com/c/3573284 makes this no longer necessary. + if unknown_arg == '--': + logger.warning( + 'Unrecognized options will automatically fall through ' + 'to wptrunner.') + logger.warning( + "There is no need to use the end-of-options marker '--'.") + else: + rest_args.append(unknown_arg) + return rest_args + @classmethod def add_common_arguments(cls, parser): parser.add_argument('--test-case', @@ -299,6 +492,22 @@ def add_extra_arguments(self, parser): super(FinchTestCase, self).add_extra_arguments(parser) + parser.add_argument( + '-t', + '--target', + default='Release', + help='Target build subdirectory under //out') + parser.add_argument( + '--default-exclude', + action='store_true', + help=('Only run the tests explicitly given in arguments ' + '(can run no tests, which will exit with code 0)')) + parser.add_argument( + '-v', + '--verbose', + action='count', + default=0, + help='Increase verbosity') self.add_product_specific_argument_groups(parser) self.add_common_arguments(parser)
diff --git a/third_party/blink/web_tests/TestExpectations b/third_party/blink/web_tests/TestExpectations index 6bc903a2..10c73f81 100644 --- a/third_party/blink/web_tests/TestExpectations +++ b/third_party/blink/web_tests/TestExpectations
@@ -6906,3 +6906,6 @@ # TODO(crbug.com/1403318): Re-enable this test virtual/scalefactor200/external/wpt/element-timing/image-src-change.html [ Failure Skip ] + +# TODO(crbug.com/1403877): Deflake this test. +virtual/portals/http/tests/devtools/portals/portals-elements-nesting.js [ Failure Pass ]
diff --git a/third_party/jinja2/Jinja2-2.11.3.tar.gz.md5 b/third_party/jinja2/Jinja2-2.11.3.tar.gz.md5 deleted file mode 100644 index 064a628..0000000 --- a/third_party/jinja2/Jinja2-2.11.3.tar.gz.md5 +++ /dev/null
@@ -1 +0,0 @@ -231dc00d34afb2672c497713fa9cdaaa Jinja2-2.11.3.tar.gz
diff --git a/third_party/jinja2/Jinja2-2.11.3.tar.gz.sha512 b/third_party/jinja2/Jinja2-2.11.3.tar.gz.sha512 deleted file mode 100644 index 26f0717..0000000 --- a/third_party/jinja2/Jinja2-2.11.3.tar.gz.sha512 +++ /dev/null
@@ -1 +0,0 @@ -fce4f835795fe9afb622f8106f60344032a811f3f693806f31ba482f9b7c1400f93dfa1701b4db0b472cbed4b0793cb329778c8091811ef0e3b577150d28e004 Jinja2-2.11.3.tar.gz
diff --git a/third_party/jinja2/README.chromium b/third_party/jinja2/README.chromium index df828d66..079ce0c 100644 --- a/third_party/jinja2/README.chromium +++ b/third_party/jinja2/README.chromium
@@ -1,8 +1,7 @@ Name: Jinja2 Python Template Engine Short Name: jinja2 URL: https://jinja.palletsprojects.com/ -Version: 2.11.3 -CPEPrefix: cpe:/a:pocoo:jinja2:2.11.3 +Version: 3.1.2 License: BSD 3-Clause License File: LICENSE.rst Security Critical: no @@ -10,9 +9,8 @@ Description: Template engine for code generation in Blink. -Source: https://files.pythonhosted.org/packages/4f/e7/65300e6b32e69768ded990494809106f87da1d436418d5f1367ed3966fd7/Jinja2-2.11.3.tar.gz -MD5: 231dc00d34afb2672c497713fa9cdaaa -SHA-512: fce4f835795fe9afb622f8106f60344032a811f3f693806f31ba482f9b7c1400f93dfa1701b4db0b472cbed4b0793cb329778c8091811ef0e3b577150d28e004 +Source: https://files.pythonhosted.org/packages/7a/ff/75c28576a1d900e87eb6335b063fab47a8ef3c8b4d88524c4bf78f670cce/Jinja2-3.1.2.tar.gz +SHA-256: 31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 Local Modifications: This only includes the src/jinja2/ directory from the tarball and the @@ -21,5 +19,3 @@ * README.chromium (this file) * OWNERS * jinja2.gni -* files of hashes (MD5 is also posted on website, SHA-512 computed locally). -* patches/*.patch for local modifications.
diff --git a/third_party/jinja2/README.rst b/third_party/jinja2/README.rst index 060b19e..a197aea 100644 --- a/third_party/jinja2/README.rst +++ b/third_party/jinja2/README.rst
@@ -35,7 +35,7 @@ $ pip install -U Jinja2 -.. _pip: https://pip.pypa.io/en/stable/quickstart/ +.. _pip: https://pip.pypa.io/en/stable/getting-started/ In A Nutshell @@ -54,13 +54,25 @@ {% endblock %} +Donate +------ + +The Pallets organization develops and supports Jinja and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + Links ----- -- Website: https://palletsprojects.com/p/jinja/ - Documentation: https://jinja.palletsprojects.com/ -- Releases: https://pypi.org/project/Jinja2/ -- Code: https://github.com/pallets/jinja -- Issue tracker: https://github.com/pallets/jinja/issues -- Test status: https://dev.azure.com/pallets/jinja/_build -- Official chat: https://discord.gg/t6rrQZH +- Changes: https://jinja.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Jinja2/ +- Source Code: https://github.com/pallets/jinja/ +- Issue Tracker: https://github.com/pallets/jinja/issues/ +- Website: https://palletsprojects.com/p/jinja/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets
diff --git a/third_party/jinja2/__init__.py b/third_party/jinja2/__init__.py index f17866f..e323926 100644 --- a/third_party/jinja2/__init__.py +++ b/third_party/jinja2/__init__.py
@@ -1,44 +1,37 @@ -# -*- coding: utf-8 -*- """Jinja is a template engine written in pure Python. It provides a non-XML syntax that supports inline expressions and an optional sandboxed environment. """ -from markupsafe import escape -from markupsafe import Markup +from .bccache import BytecodeCache as BytecodeCache +from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache +from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache +from .environment import Environment as Environment +from .environment import Template as Template +from .exceptions import TemplateAssertionError as TemplateAssertionError +from .exceptions import TemplateError as TemplateError +from .exceptions import TemplateNotFound as TemplateNotFound +from .exceptions import TemplateRuntimeError as TemplateRuntimeError +from .exceptions import TemplatesNotFound as TemplatesNotFound +from .exceptions import TemplateSyntaxError as TemplateSyntaxError +from .exceptions import UndefinedError as UndefinedError +from .loaders import BaseLoader as BaseLoader +from .loaders import ChoiceLoader as ChoiceLoader +from .loaders import DictLoader as DictLoader +from .loaders import FileSystemLoader as FileSystemLoader +from .loaders import FunctionLoader as FunctionLoader +from .loaders import ModuleLoader as ModuleLoader +from .loaders import PackageLoader as PackageLoader +from .loaders import PrefixLoader as PrefixLoader +from .runtime import ChainableUndefined as ChainableUndefined +from .runtime import DebugUndefined as DebugUndefined +from .runtime import make_logging_undefined as make_logging_undefined +from .runtime import StrictUndefined as StrictUndefined +from .runtime import Undefined as Undefined +from .utils import clear_caches as clear_caches +from .utils import is_undefined as is_undefined +from .utils import pass_context as pass_context +from .utils import pass_environment as pass_environment +from .utils import pass_eval_context as pass_eval_context +from .utils import select_autoescape as select_autoescape -from .bccache import BytecodeCache -from .bccache import FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache -from .environment import Environment -from .environment import Template -from .exceptions import TemplateAssertionError -from .exceptions import TemplateError -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .filters import contextfilter -from .filters import environmentfilter -from .filters import evalcontextfilter -from .loaders import BaseLoader -from .loaders import ChoiceLoader -from .loaders import DictLoader -from .loaders import FileSystemLoader -from .loaders import FunctionLoader -from .loaders import ModuleLoader -from .loaders import PackageLoader -from .loaders import PrefixLoader -from .runtime import ChainableUndefined -from .runtime import DebugUndefined -from .runtime import make_logging_undefined -from .runtime import StrictUndefined -from .runtime import Undefined -from .utils import clear_caches -from .utils import contextfunction -from .utils import environmentfunction -from .utils import evalcontextfunction -from .utils import is_undefined -from .utils import select_autoescape - -__version__ = "2.11.3" +__version__ = "3.1.2"
diff --git a/third_party/jinja2/_compat.py b/third_party/jinja2/_compat.py deleted file mode 100644 index 1f04495..0000000 --- a/third_party/jinja2/_compat.py +++ /dev/null
@@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# flake8: noqa -import marshal -import sys - -PY2 = sys.version_info[0] == 2 -PYPY = hasattr(sys, "pypy_translation_info") -_identity = lambda x: x - -if not PY2: - unichr = chr - range_type = range - text_type = str - string_types = (str,) - integer_types = (int,) - - iterkeys = lambda d: iter(d.keys()) - itervalues = lambda d: iter(d.values()) - iteritems = lambda d: iter(d.items()) - - import pickle - from io import BytesIO, StringIO - - NativeStringIO = StringIO - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - ifilter = filter - imap = map - izip = zip - intern = sys.intern - - implements_iterator = _identity - implements_to_string = _identity - encode_filename = _identity - - marshal_dump = marshal.dump - marshal_load = marshal.load - -else: - unichr = unichr - text_type = unicode - range_type = xrange - string_types = (str, unicode) - integer_types = (int, long) - - iterkeys = lambda d: d.iterkeys() - itervalues = lambda d: d.itervalues() - iteritems = lambda d: d.iteritems() - - import cPickle as pickle - from cStringIO import StringIO as BytesIO, StringIO - - NativeStringIO = BytesIO - - exec("def reraise(tp, value, tb=None):\n raise tp, value, tb") - - from itertools import imap, izip, ifilter - - intern = intern - - def implements_iterator(cls): - cls.next = cls.__next__ - del cls.__next__ - return cls - - def implements_to_string(cls): - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode("utf-8") - return cls - - def encode_filename(filename): - if isinstance(filename, unicode): - return filename.encode("utf-8") - return filename - - def marshal_dump(code, f): - if isinstance(f, file): - marshal.dump(code, f) - else: - f.write(marshal.dumps(code)) - - def marshal_load(f): - if isinstance(f, file): - return marshal.load(f) - return marshal.loads(f.read()) - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a - # dummy metaclass for one level of class instantiation that replaces - # itself with the actual metaclass. - class metaclass(type): - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - - return type.__new__(metaclass, "temporary_class", (), {}) - - -try: - from urllib.parse import quote_from_bytes as url_quote -except ImportError: - from urllib import quote as url_quote - - -try: - from collections import abc -except ImportError: - import collections as abc - - -try: - from os import fspath -except ImportError: - try: - from pathlib import PurePath - except ImportError: - PurePath = None - - def fspath(path): - if hasattr(path, "__fspath__"): - return path.__fspath__() - - # Python 3.5 doesn't have __fspath__ yet, use str. - if PurePath is not None and isinstance(path, PurePath): - return str(path) - - return path
diff --git a/third_party/jinja2/_identifier.py b/third_party/jinja2/_identifier.py index 224d5449..928c1503 100644 --- a/third_party/jinja2/_identifier.py +++ b/third_party/jinja2/_identifier.py
@@ -2,5 +2,5 @@ # generated by scripts/generate_identifier_pattern.py pattern = re.compile( - r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 + r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 )
diff --git a/third_party/jinja2/async_utils.py b/third_party/jinja2/async_utils.py new file mode 100644 index 0000000..1a4f3892 --- /dev/null +++ b/third_party/jinja2/async_utils.py
@@ -0,0 +1,84 @@ +import inspect +import typing as t +from functools import WRAPPER_ASSIGNMENTS +from functools import wraps + +from .utils import _PassArg +from .utils import pass_eval_context + +V = t.TypeVar("V") + + +def async_variant(normal_func): # type: ignore + def decorator(async_func): # type: ignore + pass_arg = _PassArg.from_obj(normal_func) + need_eval_context = pass_arg is None + + if pass_arg is _PassArg.environment: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].is_async) + + else: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].environment.is_async) + + # Take the doc and annotations from the sync function, but the + # name from the async function. Pallets-Sphinx-Themes + # build_function_directive expects __wrapped__ to point to the + # sync function. + async_func_attrs = ("__module__", "__name__", "__qualname__") + normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) + + @wraps(normal_func, assigned=normal_func_attrs) + @wraps(async_func, assigned=async_func_attrs, updated=()) + def wrapper(*args, **kwargs): # type: ignore + b = is_async(args) + + if need_eval_context: + args = args[1:] + + if b: + return async_func(*args, **kwargs) + + return normal_func(*args, **kwargs) + + if need_eval_context: + wrapper = pass_eval_context(wrapper) + + wrapper.jinja_async_variant = True + return wrapper + + return decorator + + +_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} + + +async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": + # Avoid a costly call to isawaitable + if type(value) in _common_primitives: + return t.cast("V", value) + + if inspect.isawaitable(value): + return await t.cast("t.Awaitable[V]", value) + + return t.cast("V", value) + + +async def auto_aiter( + iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> "t.AsyncIterator[V]": + if hasattr(iterable, "__aiter__"): + async for item in t.cast("t.AsyncIterable[V]", iterable): + yield item + else: + for item in t.cast("t.Iterable[V]", iterable): + yield item + + +async def auto_to_list( + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> t.List["V"]: + return [x async for x in auto_aiter(value)]
diff --git a/third_party/jinja2/asyncfilters.py b/third_party/jinja2/asyncfilters.py deleted file mode 100644 index 3d98dbc..0000000 --- a/third_party/jinja2/asyncfilters.py +++ /dev/null
@@ -1,158 +0,0 @@ -from functools import wraps - -from . import filters -from .asyncsupport import auto_aiter -from .asyncsupport import auto_await - - -async def auto_to_seq(value): - seq = [] - if hasattr(value, "__aiter__"): - async for item in value: - seq.append(item) - else: - for item in value: - seq.append(item) - return seq - - -async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): - seq, func = filters.prepare_select_or_reject(args, kwargs, modfunc, lookup_attr) - if seq: - async for item in auto_aiter(seq): - if func(item): - yield item - - -def dualfilter(normal_filter, async_filter): - wrap_evalctx = False - if getattr(normal_filter, "environmentfilter", False) is True: - - def is_async(args): - return args[0].is_async - - wrap_evalctx = False - else: - has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True - has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True - wrap_evalctx = not has_evalctxfilter and not has_ctxfilter - - def is_async(args): - return args[0].environment.is_async - - @wraps(normal_filter) - def wrapper(*args, **kwargs): - b = is_async(args) - if wrap_evalctx: - args = args[1:] - if b: - return async_filter(*args, **kwargs) - return normal_filter(*args, **kwargs) - - if wrap_evalctx: - wrapper.evalcontextfilter = True - - wrapper.asyncfiltervariant = True - - return wrapper - - -def asyncfiltervariant(original): - def decorator(f): - return dualfilter(original, f) - - return decorator - - -@asyncfiltervariant(filters.do_first) -async def do_first(environment, seq): - try: - return await auto_aiter(seq).__anext__() - except StopAsyncIteration: - return environment.undefined("No first item, sequence was empty.") - - -@asyncfiltervariant(filters.do_groupby) -async def do_groupby(environment, value, attribute): - expr = filters.make_attrgetter(environment, attribute) - return [ - filters._GroupTuple(key, await auto_to_seq(values)) - for key, values in filters.groupby( - sorted(await auto_to_seq(value), key=expr), expr - ) - ] - - -@asyncfiltervariant(filters.do_join) -async def do_join(eval_ctx, value, d=u"", attribute=None): - return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute) - - -@asyncfiltervariant(filters.do_list) -async def do_list(value): - return await auto_to_seq(value) - - -@asyncfiltervariant(filters.do_reject) -async def do_reject(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: not x, False) - - -@asyncfiltervariant(filters.do_rejectattr) -async def do_rejectattr(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: not x, True) - - -@asyncfiltervariant(filters.do_select) -async def do_select(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: x, False) - - -@asyncfiltervariant(filters.do_selectattr) -async def do_selectattr(*args, **kwargs): - return async_select_or_reject(args, kwargs, lambda x: x, True) - - -@asyncfiltervariant(filters.do_map) -async def do_map(*args, **kwargs): - seq, func = filters.prepare_map(args, kwargs) - if seq: - async for item in auto_aiter(seq): - yield await auto_await(func(item)) - - -@asyncfiltervariant(filters.do_sum) -async def do_sum(environment, iterable, attribute=None, start=0): - rv = start - if attribute is not None: - func = filters.make_attrgetter(environment, attribute) - else: - - def func(x): - return x - - async for item in auto_aiter(iterable): - rv += func(item) - return rv - - -@asyncfiltervariant(filters.do_slice) -async def do_slice(value, slices, fill_with=None): - return filters.do_slice(await auto_to_seq(value), slices, fill_with) - - -ASYNC_FILTERS = { - "first": do_first, - "groupby": do_groupby, - "join": do_join, - "list": do_list, - # we intentionally do not support do_last because that would be - # ridiculous - "reject": do_reject, - "rejectattr": do_rejectattr, - "map": do_map, - "select": do_select, - "selectattr": do_selectattr, - "sum": do_sum, - "slice": do_slice, -}
diff --git a/third_party/jinja2/asyncsupport.py b/third_party/jinja2/asyncsupport.py deleted file mode 100644 index 78ba3739..0000000 --- a/third_party/jinja2/asyncsupport.py +++ /dev/null
@@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -"""The code for async support. Importing this patches Jinja on supported -Python versions. -""" -import asyncio -import inspect -from functools import update_wrapper - -from markupsafe import Markup - -from .environment import TemplateModule -from .runtime import LoopContext -from .utils import concat -from .utils import internalcode -from .utils import missing - - -async def concat_async(async_gen): - rv = [] - - async def collect(): - async for event in async_gen: - rv.append(event) - - await collect() - return concat(rv) - - -async def generate_async(self, *args, **kwargs): - vars = dict(*args, **kwargs) - try: - async for event in self.root_render_func(self.new_context(vars)): - yield event - except Exception: - yield self.environment.handle_exception() - - -def wrap_generate_func(original_generate): - def _convert_generator(self, loop, args, kwargs): - async_gen = self.generate_async(*args, **kwargs) - try: - while 1: - yield loop.run_until_complete(async_gen.__anext__()) - except StopAsyncIteration: - pass - - def generate(self, *args, **kwargs): - if not self.environment.is_async: - return original_generate(self, *args, **kwargs) - return _convert_generator(self, asyncio.get_event_loop(), args, kwargs) - - return update_wrapper(generate, original_generate) - - -async def render_async(self, *args, **kwargs): - if not self.environment.is_async: - raise RuntimeError("The environment was not created with async mode enabled.") - - vars = dict(*args, **kwargs) - ctx = self.new_context(vars) - - try: - return await concat_async(self.root_render_func(ctx)) - except Exception: - return self.environment.handle_exception() - - -def wrap_render_func(original_render): - def render(self, *args, **kwargs): - if not self.environment.is_async: - return original_render(self, *args, **kwargs) - loop = asyncio.get_event_loop() - return loop.run_until_complete(self.render_async(*args, **kwargs)) - - return update_wrapper(render, original_render) - - -def wrap_block_reference_call(original_call): - @internalcode - async def async_call(self): - rv = await concat_async(self._stack[self._depth](self._context)) - if self._context.eval_ctx.autoescape: - rv = Markup(rv) - return rv - - @internalcode - def __call__(self): - if not self._context.environment.is_async: - return original_call(self) - return async_call(self) - - return update_wrapper(__call__, original_call) - - -def wrap_macro_invoke(original_invoke): - @internalcode - async def async_invoke(self, arguments, autoescape): - rv = await self._func(*arguments) - if autoescape: - rv = Markup(rv) - return rv - - @internalcode - def _invoke(self, arguments, autoescape): - if not self._environment.is_async: - return original_invoke(self, arguments, autoescape) - return async_invoke(self, arguments, autoescape) - - return update_wrapper(_invoke, original_invoke) - - -@internalcode -async def get_default_module_async(self): - if self._module is not None: - return self._module - self._module = rv = await self.make_module_async() - return rv - - -def wrap_default_module(original_default_module): - @internalcode - def _get_default_module(self): - if self.environment.is_async: - raise RuntimeError("Template module attribute is unavailable in async mode") - return original_default_module(self) - - return _get_default_module - - -async def make_module_async(self, vars=None, shared=False, locals=None): - context = self.new_context(vars, shared, locals) - body_stream = [] - async for item in self.root_render_func(context): - body_stream.append(item) - return TemplateModule(self, context, body_stream) - - -def patch_template(): - from . import Template - - Template.generate = wrap_generate_func(Template.generate) - Template.generate_async = update_wrapper(generate_async, Template.generate_async) - Template.render_async = update_wrapper(render_async, Template.render_async) - Template.render = wrap_render_func(Template.render) - Template._get_default_module = wrap_default_module(Template._get_default_module) - Template._get_default_module_async = get_default_module_async - Template.make_module_async = update_wrapper( - make_module_async, Template.make_module_async - ) - - -def patch_runtime(): - from .runtime import BlockReference, Macro - - BlockReference.__call__ = wrap_block_reference_call(BlockReference.__call__) - Macro._invoke = wrap_macro_invoke(Macro._invoke) - - -def patch_filters(): - from .filters import FILTERS - from .asyncfilters import ASYNC_FILTERS - - FILTERS.update(ASYNC_FILTERS) - - -def patch_all(): - patch_template() - patch_runtime() - patch_filters() - - -async def auto_await(value): - if inspect.isawaitable(value): - return await value - return value - - -async def auto_aiter(iterable): - if hasattr(iterable, "__aiter__"): - async for item in iterable: - yield item - return - for item in iterable: - yield item - - -class AsyncLoopContext(LoopContext): - _to_iterator = staticmethod(auto_aiter) - - @property - async def length(self): - if self._length is not None: - return self._length - - try: - self._length = len(self._iterable) - except TypeError: - iterable = [x async for x in self._iterator] - self._iterator = self._to_iterator(iterable) - self._length = len(iterable) + self.index + (self._after is not missing) - - return self._length - - @property - async def revindex0(self): - return await self.length - self.index - - @property - async def revindex(self): - return await self.length - self.index0 - - async def _peek_next(self): - if self._after is not missing: - return self._after - - try: - self._after = await self._iterator.__anext__() - except StopAsyncIteration: - self._after = missing - - return self._after - - @property - async def last(self): - return await self._peek_next() is missing - - @property - async def nextitem(self): - rv = await self._peek_next() - - if rv is missing: - return self._undefined("there is no next item") - - return rv - - def __aiter__(self): - return self - - async def __anext__(self): - if self._after is not missing: - rv = self._after - self._after = missing - else: - rv = await self._iterator.__anext__() - - self.index0 += 1 - self._before = self._current - self._current = rv - return rv, self - - -async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0): - import warnings - - warnings.warn( - "This template must be recompiled with at least Jinja 2.11, or" - " it will fail in 3.0.", - DeprecationWarning, - stacklevel=2, - ) - return AsyncLoopContext(iterable, undefined, recurse, depth0) - - -patch_all()
diff --git a/third_party/jinja2/bccache.py b/third_party/jinja2/bccache.py index 9c066103..d0ddf56 100644 --- a/third_party/jinja2/bccache.py +++ b/third_party/jinja2/bccache.py
@@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """The optional bytecode cache system. This is useful if you have very complex template situations and the compilation of all those templates slows down your application too much. @@ -8,22 +7,30 @@ """ import errno import fnmatch +import marshal import os +import pickle import stat import sys import tempfile +import typing as t from hashlib import sha1 -from os import listdir -from os import path +from io import BytesIO +from types import CodeType -from ._compat import BytesIO -from ._compat import marshal_dump -from ._compat import marshal_load -from ._compat import pickle -from ._compat import text_type -from .utils import open_if_exists +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment -bc_version = 4 + class _MemcachedClient(te.Protocol): + def get(self, key: str) -> bytes: + ... + + def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None: + ... + + +bc_version = 5 # Magic bytes to identify Jinja bytecode cache files. Contains the # Python major and minor version to avoid loading incompatible bytecode # if a project upgrades its Python version. @@ -34,7 +41,7 @@ ) -class Bucket(object): +class Bucket: """Buckets are used to store the bytecode for one template. It's created and initialized by the bytecode cache and passed to the loading functions. @@ -43,17 +50,17 @@ cache subclasses don't have to care about cache invalidation. """ - def __init__(self, environment, key, checksum): + def __init__(self, environment: "Environment", key: str, checksum: str) -> None: self.environment = environment self.key = key self.checksum = checksum self.reset() - def reset(self): + def reset(self) -> None: """Resets the bucket (unloads the bytecode).""" - self.code = None + self.code: t.Optional[CodeType] = None - def load_bytecode(self, f): + def load_bytecode(self, f: t.BinaryIO) -> None: """Loads bytecode from a file or file like object.""" # make sure the magic header is correct magic = f.read(len(bc_magic)) @@ -67,31 +74,31 @@ return # if marshal_load fails then we need to reload try: - self.code = marshal_load(f) + self.code = marshal.load(f) except (EOFError, ValueError, TypeError): self.reset() return - def write_bytecode(self, f): + def write_bytecode(self, f: t.IO[bytes]) -> None: """Dump the bytecode into the file or file like object passed.""" if self.code is None: raise TypeError("can't write empty bucket") f.write(bc_magic) pickle.dump(self.checksum, f, 2) - marshal_dump(self.code, f) + marshal.dump(self.code, f) - def bytecode_from_string(self, string): - """Load bytecode from a string.""" + def bytecode_from_string(self, string: bytes) -> None: + """Load bytecode from bytes.""" self.load_bytecode(BytesIO(string)) - def bytecode_to_string(self): - """Return the bytecode as string.""" + def bytecode_to_string(self) -> bytes: + """Return the bytecode as bytes.""" out = BytesIO() self.write_bytecode(out) return out.getvalue() -class BytecodeCache(object): +class BytecodeCache: """To implement your own bytecode cache you have to subclass this class and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of these methods are passed a :class:`~jinja2.bccache.Bucket`. @@ -120,41 +127,48 @@ Jinja. """ - def load_bytecode(self, bucket): + def load_bytecode(self, bucket: Bucket) -> None: """Subclasses have to override this method to load bytecode into a bucket. If they are not able to find code in the cache for the bucket, it must not do anything. """ raise NotImplementedError() - def dump_bytecode(self, bucket): + def dump_bytecode(self, bucket: Bucket) -> None: """Subclasses have to override this method to write the bytecode from a bucket back to the cache. If it unable to do so it must not fail silently but raise an exception. """ raise NotImplementedError() - def clear(self): + def clear(self) -> None: """Clears the cache. This method is not used by Jinja but should be implemented to allow applications to clear the bytecode cache used by a particular environment. """ - def get_cache_key(self, name, filename=None): + def get_cache_key( + self, name: str, filename: t.Optional[t.Union[str]] = None + ) -> str: """Returns the unique hash key for this template name.""" hash = sha1(name.encode("utf-8")) + if filename is not None: - filename = "|" + filename - if isinstance(filename, text_type): - filename = filename.encode("utf-8") - hash.update(filename) + hash.update(f"|{filename}".encode()) + return hash.hexdigest() - def get_source_checksum(self, source): + def get_source_checksum(self, source: str) -> str: """Returns a checksum for the source.""" return sha1(source.encode("utf-8")).hexdigest() - def get_bucket(self, environment, name, filename, source): + def get_bucket( + self, + environment: "Environment", + name: str, + filename: t.Optional[str], + source: str, + ) -> Bucket: """Return a cache bucket for the given template. All arguments are mandatory but filename may be `None`. """ @@ -164,7 +178,7 @@ self.load_bytecode(bucket) return bucket - def set_bucket(self, bucket): + def set_bucket(self, bucket: Bucket) -> None: """Put the bucket into the cache.""" self.dump_bytecode(bucket) @@ -187,14 +201,16 @@ This bytecode cache supports clearing of the cache using the clear method. """ - def __init__(self, directory=None, pattern="__jinja2_%s.cache"): + def __init__( + self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" + ) -> None: if directory is None: directory = self._get_default_cache_dir() self.directory = directory self.pattern = pattern - def _get_default_cache_dir(self): - def _unsafe_dir(): + def _get_default_cache_dir(self) -> str: + def _unsafe_dir() -> "te.NoReturn": raise RuntimeError( "Cannot determine safe temp directory. You " "need to explicitly provide one." @@ -209,7 +225,7 @@ if not hasattr(os, "getuid"): _unsafe_dir() - dirname = "_jinja2-cache-%d" % os.getuid() + dirname = f"_jinja2-cache-{os.getuid()}" actual_dir = os.path.join(tmpdir, dirname) try: @@ -240,34 +256,72 @@ return actual_dir - def _get_cache_filename(self, bucket): - return path.join(self.directory, self.pattern % bucket.key) + def _get_cache_filename(self, bucket: Bucket) -> str: + return os.path.join(self.directory, self.pattern % (bucket.key,)) - def load_bytecode(self, bucket): - f = open_if_exists(self._get_cache_filename(bucket), "rb") - if f is not None: - try: - bucket.load_bytecode(f) - finally: - f.close() + def load_bytecode(self, bucket: Bucket) -> None: + filename = self._get_cache_filename(bucket) - def dump_bytecode(self, bucket): - f = open(self._get_cache_filename(bucket), "wb") + # Don't test for existence before opening the file, since the + # file could disappear after the test before the open. try: - bucket.write_bytecode(f) - finally: - f.close() + f = open(filename, "rb") + except (FileNotFoundError, IsADirectoryError, PermissionError): + # PermissionError can occur on Windows when an operation is + # in progress, such as calling clear(). + return - def clear(self): + with f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket: Bucket) -> None: + # Write to a temporary file, then rename to the real name after + # writing. This avoids another process reading the file before + # it is fully written. + name = self._get_cache_filename(bucket) + f = tempfile.NamedTemporaryFile( + mode="wb", + dir=os.path.dirname(name), + prefix=os.path.basename(name), + suffix=".tmp", + delete=False, + ) + + def remove_silent() -> None: + try: + os.remove(f.name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + pass + + try: + with f: + bucket.write_bytecode(f) + except BaseException: + remove_silent() + raise + + try: + os.replace(f.name, name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + remove_silent() + except BaseException: + remove_silent() + raise + + def clear(self) -> None: # imported lazily here because google app-engine doesn't support # write access on the file system and the function does not exist # normally. from os import remove - files = fnmatch.filter(listdir(self.directory), self.pattern % "*") + files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) for filename in files: try: - remove(path.join(self.directory, filename)) + remove(os.path.join(self.directory, filename)) except OSError: pass @@ -284,7 +338,7 @@ - `python-memcached <https://pypi.org/project/python-memcached/>`_ (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only unicode. You can however pass + does not support storing binary data, only text. You can however pass the underlying cache client to the bytecode cache which is available as `django.core.cache.cache._client`.) @@ -319,32 +373,34 @@ def __init__( self, - client, - prefix="jinja2/bytecode/", - timeout=None, - ignore_memcache_errors=True, + client: "_MemcachedClient", + prefix: str = "jinja2/bytecode/", + timeout: t.Optional[int] = None, + ignore_memcache_errors: bool = True, ): self.client = client self.prefix = prefix self.timeout = timeout self.ignore_memcache_errors = ignore_memcache_errors - def load_bytecode(self, bucket): + def load_bytecode(self, bucket: Bucket) -> None: try: code = self.client.get(self.prefix + bucket.key) except Exception: if not self.ignore_memcache_errors: raise - code = None - if code is not None: + else: bucket.bytecode_from_string(code) - def dump_bytecode(self, bucket): - args = (self.prefix + bucket.key, bucket.bytecode_to_string()) - if self.timeout is not None: - args += (self.timeout,) + def dump_bytecode(self, bucket: Bucket) -> None: + key = self.prefix + bucket.key + value = bucket.bytecode_to_string() + try: - self.client.set(*args) + if self.timeout is not None: + self.client.set(key, value, self.timeout) + else: + self.client.set(key, value) except Exception: if not self.ignore_memcache_errors: raise
diff --git a/third_party/jinja2/compiler.py b/third_party/jinja2/compiler.py index b39478d..3458095 100644 --- a/third_party/jinja2/compiler.py +++ b/third_party/jinja2/compiler.py
@@ -1,7 +1,8 @@ -# -*- coding: utf-8 -*- """Compiles nodes from the parser into Python code.""" -from collections import namedtuple +import typing as t +from contextlib import contextmanager from functools import update_wrapper +from io import StringIO from itertools import chain from keyword import iskeyword as is_python_keyword @@ -9,13 +10,6 @@ from markupsafe import Markup from . import nodes -from ._compat import imap -from ._compat import iteritems -from ._compat import izip -from ._compat import NativeStringIO -from ._compat import range_type -from ._compat import string_types -from ._compat import text_type from .exceptions import TemplateAssertionError from .idtracking import Symbols from .idtracking import VAR_LOAD_ALIAS @@ -24,9 +18,16 @@ from .idtracking import VAR_LOAD_UNDEFINED from .nodes import EvalContext from .optimizer import Optimizer +from .utils import _PassArg from .utils import concat from .visitor import NodeVisitor +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + operators = { "eq": "==", "ne": "!=", @@ -38,79 +39,109 @@ "notin": "not in", } -# what method to iterate over items do we want to use for dict iteration -# in generated code? on 2.x let's go with iteritems, on 3.x with items -if hasattr(dict, "iteritems"): - dict_item_iter = "iteritems" -else: - dict_item_iter = "items" -code_features = ["division"] - -# does this python version support generator stops? (PEP 0479) -try: - exec("from __future__ import generator_stop") - code_features.append("generator_stop") -except SyntaxError: - pass - -# does this python version support yield from? -try: - exec("def f(): yield from x()") -except SyntaxError: - supports_yield_from = False -else: - supports_yield_from = True - - -def optimizeconst(f): - def new_func(self, node, frame, **kwargs): +def optimizeconst(f: F) -> F: + def new_func( + self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any + ) -> t.Any: # Only optimize if the frame is not volatile - if self.optimized and not frame.eval_ctx.volatile: + if self.optimizer is not None and not frame.eval_ctx.volatile: new_node = self.optimizer.visit(node, frame.eval_ctx) + if new_node != node: return self.visit(new_node, frame) + return f(self, node, frame, **kwargs) - return update_wrapper(new_func, f) + return update_wrapper(t.cast(F, new_func), f) + + +def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed + and op in self.environment.intercepted_binops # type: ignore + ): + self.write(f"environment.call_binop(context, {op!r}, ") + self.visit(node.left, frame) + self.write(", ") + self.visit(node.right, frame) + else: + self.write("(") + self.visit(node.left, frame) + self.write(f" {op} ") + self.visit(node.right, frame) + + self.write(")") + + return visitor + + +def _make_unop( + op: str, +) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed + and op in self.environment.intercepted_unops # type: ignore + ): + self.write(f"environment.call_unop(context, {op!r}, ") + self.visit(node.node, frame) + else: + self.write("(" + op) + self.visit(node.node, frame) + + self.write(")") + + return visitor def generate( - node, environment, name, filename, stream=None, defer_init=False, optimized=True -): + node: nodes.Template, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, +) -> t.Optional[str]: """Generate the python source for a node tree.""" if not isinstance(node, nodes.Template): raise TypeError("Can't compile non template nodes") + generator = environment.code_generator_class( environment, name, filename, stream, defer_init, optimized ) generator.visit(node) + if stream is None: - return generator.stream.getvalue() + return generator.stream.getvalue() # type: ignore + + return None -def has_safe_repr(value): +def has_safe_repr(value: t.Any) -> bool: """Does the node have a safe representation?""" if value is None or value is NotImplemented or value is Ellipsis: return True - if type(value) in (bool, int, float, complex, range_type, Markup) + string_types: + + if type(value) in {bool, int, float, complex, range, str, Markup}: return True - if type(value) in (tuple, list, set, frozenset): - for item in value: - if not has_safe_repr(item): - return False - return True - elif type(value) is dict: - for key, value in iteritems(value): - if not has_safe_repr(key): - return False - if not has_safe_repr(value): - return False - return True + + if type(value) in {tuple, list, set, frozenset}: + return all(has_safe_repr(v) for v in value) + + if type(value) is dict: + return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) + return False -def find_undeclared(nodes, names): +def find_undeclared( + nodes: t.Iterable[nodes.Node], names: t.Iterable[str] +) -> t.Set[str]: """Check if the names passed are accessed undeclared. The return value is a set of all the undeclared names from the sequence of names found. """ @@ -123,20 +154,49 @@ return visitor.undeclared -class MacroRef(object): - def __init__(self, node): +class MacroRef: + def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: self.node = node self.accesses_caller = False self.accesses_kwargs = False self.accesses_varargs = False -class Frame(object): +class Frame: """Holds compile time information for us.""" - def __init__(self, eval_ctx, parent=None, level=None): + def __init__( + self, + eval_ctx: EvalContext, + parent: t.Optional["Frame"] = None, + level: t.Optional[int] = None, + ) -> None: self.eval_ctx = eval_ctx - self.symbols = Symbols(parent and parent.symbols or None, level=level) + + # the parent of this frame + self.parent = parent + + if parent is None: + self.symbols = Symbols(level=level) + + # in some dynamic inheritance situations the compiler needs to add + # write tests around output statements. + self.require_output_check = False + + # inside some tags we are using a buffer rather than yield statements. + # this for example affects {% filter %} or {% macro %}. If a frame + # is buffered this variable points to the name of the list used as + # buffer. + self.buffer: t.Optional[str] = None + + # the name of the block we're in, otherwise None. + self.block: t.Optional[str] = None + + else: + self.symbols = Symbols(parent.symbols, level=level) + self.require_output_check = parent.require_output_check + self.buffer = parent.buffer + self.block = parent.block # a toplevel frame is the root + soft frames such as if conditions. self.toplevel = False @@ -146,47 +206,40 @@ # situations. self.rootlevel = False - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = parent and parent.require_output_check + # variables set inside of loops and blocks should not affect outer frames, + # but they still needs to be kept track of as part of the active context. + self.loop_frame = False + self.block_frame = False - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer = None + # track whether the frame is being used in an if-statement or conditional + # expression as it determines which errors should be raised during runtime + # or compile time. + self.soft_frame = False - # the name of the block we're in, otherwise None. - self.block = parent and parent.block or None - - # the parent of this frame - self.parent = parent - - if parent is not None: - self.buffer = parent.buffer - - def copy(self): + def copy(self) -> "Frame": """Create a copy of the current one.""" rv = object.__new__(self.__class__) rv.__dict__.update(self.__dict__) rv.symbols = self.symbols.copy() return rv - def inner(self, isolated=False): + def inner(self, isolated: bool = False) -> "Frame": """Return an inner frame.""" if isolated: return Frame(self.eval_ctx, level=self.symbols.level + 1) return Frame(self.eval_ctx, self) - def soft(self): + def soft(self) -> "Frame": """Return a soft frame. A soft frame may not be modified as standalone thing as it shares the resources with the frame it was created of, but it's not a rootlevel frame any longer. - This is only used to implement if-statements. + This is only used to implement if-statements and conditional + expressions. """ rv = self.copy() rv.rootlevel = False + rv.soft_frame = True return rv __copy__ = copy @@ -199,19 +252,19 @@ class DependencyFinderVisitor(NodeVisitor): """A visitor that collects filter and test calls.""" - def __init__(self): - self.filters = set() - self.tests = set() + def __init__(self) -> None: + self.filters: t.Set[str] = set() + self.tests: t.Set[str] = set() - def visit_Filter(self, node): + def visit_Filter(self, node: nodes.Filter) -> None: self.generic_visit(node) self.filters.add(node.name) - def visit_Test(self, node): + def visit_Test(self, node: nodes.Test) -> None: self.generic_visit(node) self.tests.add(node.name) - def visit_Block(self, node): + def visit_Block(self, node: nodes.Block) -> None: """Stop visiting at blocks.""" @@ -221,11 +274,11 @@ not stop at closure frames. """ - def __init__(self, names): + def __init__(self, names: t.Iterable[str]) -> None: self.names = set(names) - self.undeclared = set() + self.undeclared: t.Set[str] = set() - def visit_Name(self, node): + def visit_Name(self, node: nodes.Name) -> None: if node.ctx == "load" and node.name in self.names: self.undeclared.add(node.name) if self.undeclared == self.names: @@ -233,7 +286,7 @@ else: self.names.discard(node.name) - def visit_Block(self, node): + def visit_Block(self, node: nodes.Block) -> None: """Stop visiting a blocks.""" @@ -246,26 +299,33 @@ class CodeGenerator(NodeVisitor): def __init__( - self, environment, name, filename, stream=None, defer_init=False, optimized=True - ): + self, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, + ) -> None: if stream is None: - stream = NativeStringIO() + stream = StringIO() self.environment = environment self.name = name self.filename = filename self.stream = stream self.created_block_context = False self.defer_init = defer_init - self.optimized = optimized + self.optimizer: t.Optional[Optimizer] = None + if optimized: self.optimizer = Optimizer(environment) # aliases for imports - self.import_aliases = {} + self.import_aliases: t.Dict[str, str] = {} # a registry for all blocks. Because blocks are moved out # into the global python scope they are registered here - self.blocks = {} + self.blocks: t.Dict[str, nodes.Block] = {} # the number of extends statements so far self.extends_so_far = 0 @@ -279,12 +339,12 @@ self.code_lineno = 1 # registry of all filters and tests (global, not block local) - self.tests = {} - self.filters = {} + self.tests: t.Dict[str, str] = {} + self.filters: t.Dict[str, str] = {} # the debug information - self.debug_info = [] - self._write_debug_info = None + self.debug_info: t.List[t.Tuple[int, int]] = [] + self._write_debug_info: t.Optional[int] = None # the number of new lines before the next write() self._new_lines = 0 @@ -303,75 +363,83 @@ self._indentation = 0 # Tracks toplevel assignments - self._assign_stack = [] + self._assign_stack: t.List[t.Set[str]] = [] # Tracks parameter definition blocks - self._param_def_block = [] + self._param_def_block: t.List[t.Set[str]] = [] # Tracks the current context. self._context_reference_stack = ["context"] + @property + def optimized(self) -> bool: + return self.optimizer is not None + # -- Various compilation helpers - def fail(self, msg, lineno): + def fail(self, msg: str, lineno: int) -> "te.NoReturn": """Fail with a :exc:`TemplateAssertionError`.""" raise TemplateAssertionError(msg, lineno, self.name, self.filename) - def temporary_identifier(self): + def temporary_identifier(self) -> str: """Get a new unique identifier.""" self._last_identifier += 1 - return "t_%d" % self._last_identifier + return f"t_{self._last_identifier}" - def buffer(self, frame): + def buffer(self, frame: Frame) -> None: """Enable buffering for the frame from that point onwards.""" frame.buffer = self.temporary_identifier() - self.writeline("%s = []" % frame.buffer) + self.writeline(f"{frame.buffer} = []") - def return_buffer_contents(self, frame, force_unescaped=False): + def return_buffer_contents( + self, frame: Frame, force_unescaped: bool = False + ) -> None: """Return the buffer contents of the frame.""" if not force_unescaped: if frame.eval_ctx.volatile: self.writeline("if context.eval_ctx.autoescape:") self.indent() - self.writeline("return Markup(concat(%s))" % frame.buffer) + self.writeline(f"return Markup(concat({frame.buffer}))") self.outdent() self.writeline("else:") self.indent() - self.writeline("return concat(%s)" % frame.buffer) + self.writeline(f"return concat({frame.buffer})") self.outdent() return elif frame.eval_ctx.autoescape: - self.writeline("return Markup(concat(%s))" % frame.buffer) + self.writeline(f"return Markup(concat({frame.buffer}))") return - self.writeline("return concat(%s)" % frame.buffer) + self.writeline(f"return concat({frame.buffer})") - def indent(self): + def indent(self) -> None: """Indent by one.""" self._indentation += 1 - def outdent(self, step=1): + def outdent(self, step: int = 1) -> None: """Outdent by step.""" self._indentation -= step - def start_write(self, frame, node=None): + def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: """Yield or write into the frame buffer.""" if frame.buffer is None: self.writeline("yield ", node) else: - self.writeline("%s.append(" % frame.buffer, node) + self.writeline(f"{frame.buffer}.append(", node) - def end_write(self, frame): + def end_write(self, frame: Frame) -> None: """End the writing process started by `start_write`.""" if frame.buffer is not None: self.write(")") - def simple_write(self, s, frame, node=None): + def simple_write( + self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None + ) -> None: """Simple shortcut for start_write + write + end_write.""" self.start_write(frame, node) self.write(s) self.end_write(frame) - def blockvisit(self, nodes, frame): + def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: """Visit a list of nodes as block in a frame. If the current frame is no buffer a dummy ``if 0: yield None`` is written automatically. """ @@ -382,7 +450,7 @@ except CompilerExit: pass - def write(self, x): + def write(self, x: str) -> None: """Write a string into the output stream.""" if self._new_lines: if not self._first_write: @@ -396,19 +464,26 @@ self._new_lines = 0 self.stream.write(x) - def writeline(self, x, node=None, extra=0): + def writeline( + self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 + ) -> None: """Combination of newline and write.""" self.newline(node, extra) self.write(x) - def newline(self, node=None, extra=0): + def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: """Add one or more newlines before the next write.""" self._new_lines = max(self._new_lines, 1 + extra) if node is not None and node.lineno != self._last_line: self._write_debug_info = node.lineno self._last_line = node.lineno - def signature(self, node, frame, extra_kwargs=None): + def signature( + self, + node: t.Union[nodes.Call, nodes.Filter, nodes.Test], + frame: Frame, + extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> None: """Writes a function call to the stream for the current node. A leading comma is added automatically. The extra keyword arguments may not include python keywords otherwise a syntax @@ -417,11 +492,10 @@ """ # if any of the given keyword arguments is a python keyword # we have to make sure that no invalid call is created. - kwarg_workaround = False - for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()): - if is_python_keyword(kwarg): - kwarg_workaround = True - break + kwarg_workaround = any( + is_python_keyword(t.cast(str, k)) + for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) + ) for arg in node.args: self.write(", ") @@ -432,8 +506,8 @@ self.write(", ") self.visit(kwarg, frame) if extra_kwargs is not None: - for key, value in iteritems(extra_kwargs): - self.write(", %s=%s" % (key, value)) + for key, value in extra_kwargs.items(): + self.write(f", {key}={value}") if node.dyn_args: self.write(", *") self.visit(node.dyn_args, frame) @@ -444,12 +518,12 @@ else: self.write(", **{") for kwarg in node.kwargs: - self.write("%r: " % kwarg.key) + self.write(f"{kwarg.key!r}: ") self.visit(kwarg.value, frame) self.write(", ") if extra_kwargs is not None: - for key, value in iteritems(extra_kwargs): - self.write("%r: %s, " % (key, value)) + for key, value in extra_kwargs.items(): + self.write(f"{key!r}: {value}, ") if node.dyn_kwargs is not None: self.write("}, **") self.visit(node.dyn_kwargs, frame) @@ -461,50 +535,82 @@ self.write(", **") self.visit(node.dyn_kwargs, frame) - def pull_dependencies(self, nodes): - """Pull all the dependencies.""" + def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: + """Find all filter and test names used in the template and + assign them to variables in the compiled namespace. Checking + that the names are registered with the environment is done when + compiling the Filter and Test nodes. If the node is in an If or + CondExpr node, the check is done at runtime instead. + + .. versionchanged:: 3.0 + Filters and tests in If and CondExpr nodes are checked at + runtime instead of compile time. + """ visitor = DependencyFinderVisitor() + for node in nodes: visitor.visit(node) - for dependency in "filters", "tests": - mapping = getattr(self, dependency) - for name in sorted(getattr(visitor, dependency)): - if name not in mapping: - mapping[name] = self.temporary_identifier() - self.writeline( - "%s = environment.%s[%r]" % (mapping[name], dependency, name) - ) - def enter_frame(self, frame): + for id_map, names, dependency in (self.filters, visitor.filters, "filters"), ( + self.tests, + visitor.tests, + "tests", + ): + for name in sorted(names): + if name not in id_map: + id_map[name] = self.temporary_identifier() + + # add check during runtime that dependencies used inside of executed + # blocks are defined, as this step may be skipped during compile time + self.writeline("try:") + self.indent() + self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") + self.outdent() + self.writeline("except KeyError:") + self.indent() + self.writeline("@internalcode") + self.writeline(f"def {id_map[name]}(*unused):") + self.indent() + self.writeline( + f'raise TemplateRuntimeError("No {dependency[:-1]}' + f' named {name!r} found.")' + ) + self.outdent() + self.outdent() + + def enter_frame(self, frame: Frame) -> None: undefs = [] - for target, (action, param) in iteritems(frame.symbols.loads): + for target, (action, param) in frame.symbols.loads.items(): if action == VAR_LOAD_PARAMETER: pass elif action == VAR_LOAD_RESOLVE: - self.writeline("%s = %s(%r)" % (target, self.get_resolve_func(), param)) + self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") elif action == VAR_LOAD_ALIAS: - self.writeline("%s = %s" % (target, param)) + self.writeline(f"{target} = {param}") elif action == VAR_LOAD_UNDEFINED: undefs.append(target) else: raise NotImplementedError("unknown load instruction") if undefs: - self.writeline("%s = missing" % " = ".join(undefs)) + self.writeline(f"{' = '.join(undefs)} = missing") - def leave_frame(self, frame, with_python_scope=False): + def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: if not with_python_scope: undefs = [] - for target, _ in iteritems(frame.symbols.loads): + for target in frame.symbols.loads: undefs.append(target) if undefs: - self.writeline("%s = missing" % " = ".join(undefs)) + self.writeline(f"{' = '.join(undefs)} = missing") - def func(self, name): - if self.environment.is_async: - return "async def %s" % name - return "def %s" % name + def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: + return async_value if self.environment.is_async else sync_value - def macro_body(self, node, frame): + def func(self, name: str) -> str: + return f"{self.choose_async()}def {name}" + + def macro_body( + self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame + ) -> t.Tuple[Frame, MacroRef]: """Dump the function def of a macro or call block.""" frame = frame.inner() frame.symbols.analyze_node(node) @@ -513,6 +619,7 @@ explicit_caller = None skip_special_params = set() args = [] + for idx, arg in enumerate(node.args): if arg.name == "caller": explicit_caller = idx @@ -552,7 +659,7 @@ # macros are delayed, they never require output checks frame.require_output_check = False frame.symbols.analyze_node(node) - self.writeline("%s(%s):" % (self.func("macro"), ", ".join(args)), node) + self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) self.indent() self.buffer(frame) @@ -561,17 +668,17 @@ self.push_parameter_definitions(frame) for idx, arg in enumerate(node.args): ref = frame.symbols.ref(arg.name) - self.writeline("if %s is missing:" % ref) + self.writeline(f"if {ref} is missing:") self.indent() try: default = node.defaults[idx - len(node.args)] except IndexError: self.writeline( - "%s = undefined(%r, name=%r)" - % (ref, "parameter %r was not provided" % arg.name, arg.name) + f'{ref} = undefined("parameter {arg.name!r} was not provided",' + f" name={arg.name!r})" ) else: - self.writeline("%s = " % ref) + self.writeline(f"{ref} = ") self.visit(default, frame) self.mark_parameter_stored(ref) self.outdent() @@ -584,50 +691,46 @@ return frame, macro_ref - def macro_def(self, macro_ref, frame): + def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: """Dump the macro definition for the def created by macro_body.""" arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) name = getattr(macro_ref.node, "name", None) if len(macro_ref.node.args) == 1: arg_tuple += "," self.write( - "Macro(environment, macro, %r, (%s), %r, %r, %r, " - "context.eval_ctx.autoescape)" - % ( - name, - arg_tuple, - macro_ref.accesses_kwargs, - macro_ref.accesses_varargs, - macro_ref.accesses_caller, - ) + f"Macro(environment, macro, {name!r}, ({arg_tuple})," + f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," + f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" ) - def position(self, node): + def position(self, node: nodes.Node) -> str: """Return a human readable position for the node.""" - rv = "line %d" % node.lineno + rv = f"line {node.lineno}" if self.name is not None: - rv += " in " + repr(self.name) + rv = f"{rv} in {self.name!r}" return rv - def dump_local_context(self, frame): - return "{%s}" % ", ".join( - "%r: %s" % (name, target) - for name, target in sorted(iteritems(frame.symbols.dump_stores())) + def dump_local_context(self, frame: Frame) -> str: + items_kv = ", ".join( + f"{name!r}: {target}" + for name, target in frame.symbols.dump_stores().items() ) + return f"{{{items_kv}}}" - def write_commons(self): + def write_commons(self) -> None: """Writes a common preamble that is used by root and block functions. Primarily this sets up common local helpers and enforces a generator through a dead branch. """ self.writeline("resolve = context.resolve_or_missing") self.writeline("undefined = environment.undefined") + self.writeline("concat = environment.concat") # always use the standard Undefined class for the implicit else of # conditional expressions self.writeline("cond_expr_undefined = Undefined") self.writeline("if 0: yield None") - def push_parameter_definitions(self, frame): + def push_parameter_definitions(self, frame: Frame) -> None: """Pushes all parameter targets from the given frame into a local stack that permits tracking of yet to be assigned parameters. In particular this enables the optimization from `visit_Name` to skip @@ -636,97 +739,108 @@ """ self._param_def_block.append(frame.symbols.dump_param_targets()) - def pop_parameter_definitions(self): + def pop_parameter_definitions(self) -> None: """Pops the current parameter definitions set.""" self._param_def_block.pop() - def mark_parameter_stored(self, target): + def mark_parameter_stored(self, target: str) -> None: """Marks a parameter in the current parameter definitions as stored. This will skip the enforced undefined checks. """ if self._param_def_block: self._param_def_block[-1].discard(target) - def push_context_reference(self, target): + def push_context_reference(self, target: str) -> None: self._context_reference_stack.append(target) - def pop_context_reference(self): + def pop_context_reference(self) -> None: self._context_reference_stack.pop() - def get_context_ref(self): + def get_context_ref(self) -> str: return self._context_reference_stack[-1] - def get_resolve_func(self): + def get_resolve_func(self) -> str: target = self._context_reference_stack[-1] if target == "context": return "resolve" - return "%s.resolve" % target + return f"{target}.resolve" - def derive_context(self, frame): - return "%s.derived(%s)" % ( - self.get_context_ref(), - self.dump_local_context(frame), - ) + def derive_context(self, frame: Frame) -> str: + return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - def parameter_is_undeclared(self, target): + def parameter_is_undeclared(self, target: str) -> bool: """Checks if a given target is an undeclared parameter.""" if not self._param_def_block: return False return target in self._param_def_block[-1] - def push_assign_tracking(self): + def push_assign_tracking(self) -> None: """Pushes a new layer for assignment tracking.""" self._assign_stack.append(set()) - def pop_assign_tracking(self, frame): + def pop_assign_tracking(self, frame: Frame) -> None: """Pops the topmost level for assignment tracking and updates the context variables if necessary. """ vars = self._assign_stack.pop() - if not frame.toplevel or not vars: + if ( + not frame.block_frame + and not frame.loop_frame + and not frame.toplevel + or not vars + ): return public_names = [x for x in vars if x[:1] != "_"] if len(vars) == 1: name = next(iter(vars)) ref = frame.symbols.ref(name) - self.writeline("context.vars[%r] = %s" % (name, ref)) + if frame.loop_frame: + self.writeline(f"_loop_vars[{name!r}] = {ref}") + return + if frame.block_frame: + self.writeline(f"_block_vars[{name!r}] = {ref}") + return + self.writeline(f"context.vars[{name!r}] = {ref}") else: - self.writeline("context.vars.update({") + if frame.loop_frame: + self.writeline("_loop_vars.update({") + elif frame.block_frame: + self.writeline("_block_vars.update({") + else: + self.writeline("context.vars.update({") for idx, name in enumerate(vars): if idx: self.write(", ") ref = frame.symbols.ref(name) - self.write("%r: %s" % (name, ref)) + self.write(f"{name!r}: {ref}") self.write("})") - if public_names: + if not frame.block_frame and not frame.loop_frame and public_names: if len(public_names) == 1: - self.writeline("context.exported_vars.add(%r)" % public_names[0]) + self.writeline(f"context.exported_vars.add({public_names[0]!r})") else: - self.writeline( - "context.exported_vars.update((%s))" - % ", ".join(imap(repr, public_names)) - ) + names_str = ", ".join(map(repr, public_names)) + self.writeline(f"context.exported_vars.update(({names_str}))") # -- Statement Visitors - def visit_Template(self, node, frame=None): + def visit_Template( + self, node: nodes.Template, frame: t.Optional[Frame] = None + ) -> None: assert frame is None, "no root frame allowed" eval_ctx = EvalContext(self.environment, self.name) - from .runtime import exported - - self.writeline("from __future__ import %s" % ", ".join(code_features)) - self.writeline("from jinja2.runtime import " + ", ".join(exported)) + from .runtime import exported, async_exported if self.environment.is_async: - self.writeline( - "from jinja2.asyncsupport import auto_await, " - "auto_aiter, AsyncLoopContext" - ) + exported_names = sorted(exported + async_exported) + else: + exported_names = sorted(exported) + + self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) # if we want a deferred initialization we cannot move the # environment into a local name - envenv = not self.defer_init and ", environment=environment" or "" + envenv = "" if self.defer_init else ", environment=environment" # do we have an extends tag at all? If not, we can save some # overhead by just not processing any inheritance code. @@ -735,7 +849,7 @@ # find all blocks for block in node.find_all(nodes.Block): if block.name in self.blocks: - self.fail("block %r defined twice" % block.name, block.lineno) + self.fail(f"block {block.name!r} defined twice", block.lineno) self.blocks[block.name] = block # find all imports and import them @@ -745,16 +859,16 @@ self.import_aliases[imp] = alias = self.temporary_identifier() if "." in imp: module, obj = imp.rsplit(".", 1) - self.writeline("from %s import %s as %s" % (module, obj, alias)) + self.writeline(f"from {module} import {obj} as {alias}") else: - self.writeline("import %s as %s" % (imp, alias)) + self.writeline(f"import {imp} as {alias}") # add the load name - self.writeline("name = %r" % self.name) + self.writeline(f"name = {self.name!r}") # generate the root render function. self.writeline( - "%s(context, missing=missing%s):" % (self.func("root"), envenv), extra=1 + f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 ) self.indent() self.write_commons() @@ -763,7 +877,7 @@ frame = Frame(eval_ctx) if "self" in find_undeclared(node.body, ("self",)): ref = frame.symbols.declare_parameter("self") - self.writeline("%s = TemplateReference(context)" % ref) + self.writeline(f"{ref} = TemplateReference(context)") frame.symbols.analyze_node(node) frame.toplevel = frame.rootlevel = True frame.require_output_check = have_extends and not self.has_known_extends @@ -781,13 +895,11 @@ self.indent() self.writeline("if parent_template is not None:") self.indent() - if supports_yield_from and not self.environment.is_async: + if not self.environment.is_async: self.writeline("yield from parent_template.root_render_func(context)") else: self.writeline( - "%sfor event in parent_template." - "root_render_func(context):" - % (self.environment.is_async and "async " or "") + "async for event in parent_template.root_render_func(context):" ) self.indent() self.writeline("yield event") @@ -795,10 +907,9 @@ self.outdent(1 + (not self.has_known_extends)) # at this point we now have the blocks collected and can visit them too. - for name, block in iteritems(self.blocks): + for name, block in self.blocks.items(): self.writeline( - "%s(context, missing=missing%s):" - % (self.func("block_" + name), envenv), + f"{self.func('block_' + name)}(context, missing=missing{envenv}):", block, 1, ) @@ -808,32 +919,29 @@ # toplevel template. This would cause a variety of # interesting issues with identifier tracking. block_frame = Frame(eval_ctx) + block_frame.block_frame = True undeclared = find_undeclared(block.body, ("self", "super")) if "self" in undeclared: ref = block_frame.symbols.declare_parameter("self") - self.writeline("%s = TemplateReference(context)" % ref) + self.writeline(f"{ref} = TemplateReference(context)") if "super" in undeclared: ref = block_frame.symbols.declare_parameter("super") - self.writeline("%s = context.super(%r, block_%s)" % (ref, name, name)) + self.writeline(f"{ref} = context.super({name!r}, block_{name})") block_frame.symbols.analyze_node(block) block_frame.block = name + self.writeline("_block_vars = {}") self.enter_frame(block_frame) self.pull_dependencies(block.body) self.blockvisit(block.body, block_frame) self.leave_frame(block_frame, with_python_scope=True) self.outdent() - self.writeline( - "blocks = {%s}" % ", ".join("%r: block_%s" % (x, x) for x in self.blocks), - extra=1, - ) + blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) + self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) + debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) + self.writeline(f"debug_info = {debug_kv_str!r}") - # add a function that returns the debug info - self.writeline( - "debug_info = %r" % "&".join("%s=%s" % x for x in self.debug_info) - ) - - def visit_Block(self, node, frame): + def visit_Block(self, node: nodes.Block, frame: Frame) -> None: """Call a block and register it for the template.""" level = 0 if frame.toplevel: @@ -851,18 +959,23 @@ else: context = self.get_context_ref() - if ( - supports_yield_from - and not self.environment.is_async - and frame.buffer is None - ): + if node.required: + self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) + self.indent() self.writeline( - "yield from context.blocks[%r][0](%s)" % (node.name, context), node + f'raise TemplateRuntimeError("Required block {node.name!r} not found")', + node, + ) + self.outdent() + + if not self.environment.is_async and frame.buffer is None: + self.writeline( + f"yield from context.blocks[{node.name!r}][0]({context})", node ) else: - loop = self.environment.is_async and "async for" or "for" self.writeline( - "%s event in context.blocks[%r][0](%s):" % (loop, node.name, context), + f"{self.choose_async()}for event in" + f" context.blocks[{node.name!r}][0]({context}):", node, ) self.indent() @@ -871,7 +984,7 @@ self.outdent(level) - def visit_Extends(self, node, frame): + def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: """Calls the extender.""" if not frame.toplevel: self.fail("cannot use extend from a non top-level scope", node.lineno) @@ -888,7 +1001,7 @@ if not self.has_known_extends: self.writeline("if parent_template is not None:") self.indent() - self.writeline("raise TemplateRuntimeError(%r)" % "extended multiple times") + self.writeline('raise TemplateRuntimeError("extended multiple times")') # if we have a known extends already we don't need that code here # as we know that the template execution will end here. @@ -899,10 +1012,8 @@ self.writeline("parent_template = environment.get_template(", node) self.visit(node.template, frame) - self.write(", %r)" % self.name) - self.writeline( - "for name, parent_block in parent_template.blocks.%s():" % dict_item_iter - ) + self.write(f", {self.name!r})") + self.writeline("for name, parent_block in parent_template.blocks.items():") self.indent() self.writeline("context.blocks.setdefault(name, []).append(parent_block)") self.outdent() @@ -916,7 +1027,7 @@ # and now we have one more self.extends_so_far += 1 - def visit_Include(self, node, frame): + def visit_Include(self, node: nodes.Include, frame: Frame) -> None: """Handles includes.""" if node.ignore_missing: self.writeline("try:") @@ -924,16 +1035,16 @@ func_name = "get_or_select_template" if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, string_types): + if isinstance(node.template.value, str): func_name = "get_template" elif isinstance(node.template.value, (tuple, list)): func_name = "select_template" elif isinstance(node.template, (nodes.Tuple, nodes.List)): func_name = "select_template" - self.writeline("template = environment.%s(" % func_name, node) + self.writeline(f"template = environment.{func_name}(", node) self.visit(node.template, frame) - self.write(", %r)" % self.name) + self.write(f", {self.name!r})") if node.ignore_missing: self.outdent() self.writeline("except TemplateNotFound:") @@ -945,26 +1056,19 @@ skip_event_yield = False if node.with_context: - loop = self.environment.is_async and "async for" or "for" self.writeline( - "%s event in template.root_render_func(" - "template.new_context(context.get_all(), True, " - "%s)):" % (loop, self.dump_local_context(frame)) + f"{self.choose_async()}for event in template.root_render_func(" + "template.new_context(context.get_all(), True," + f" {self.dump_local_context(frame)})):" ) elif self.environment.is_async: self.writeline( - "for event in (await " - "template._get_default_module_async())" + "for event in (await template._get_default_module_async())" "._body_stream:" ) else: - if supports_yield_from: - self.writeline("yield from template._get_default_module()._body_stream") - skip_event_yield = True - else: - self.writeline( - "for event in template._get_default_module()._body_stream:" - ) + self.writeline("yield from template._get_default_module()._body_stream") + skip_event_yield = True if not skip_event_yield: self.indent() @@ -974,53 +1078,37 @@ if node.ignore_missing: self.outdent() - def visit_Import(self, node, frame): - """Visit regular imports.""" - self.writeline("%s = " % frame.symbols.ref(node.target), node) - if frame.toplevel: - self.write("context.vars[%r] = " % node.target) - if self.environment.is_async: - self.write("await ") - self.write("environment.get_template(") + def _import_common( + self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame + ) -> None: + self.write(f"{self.choose_async('await ')}environment.get_template(") self.visit(node.template, frame) - self.write(", %r)." % self.name) - if node.with_context: - self.write( - "make_module%s(context.get_all(), True, %s)" - % ( - self.environment.is_async and "_async" or "", - self.dump_local_context(frame), - ) - ) - elif self.environment.is_async: - self.write("_get_default_module_async()") - else: - self.write("_get_default_module()") - if frame.toplevel and not node.target.startswith("_"): - self.writeline("context.exported_vars.discard(%r)" % node.target) + self.write(f", {self.name!r}).") - def visit_FromImport(self, node, frame): + if node.with_context: + f_name = f"make_module{self.choose_async('_async')}" + self.write( + f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" + ) + else: + self.write(f"_get_default_module{self.choose_async('_async')}(context)") + + def visit_Import(self, node: nodes.Import, frame: Frame) -> None: + """Visit regular imports.""" + self.writeline(f"{frame.symbols.ref(node.target)} = ", node) + if frame.toplevel: + self.write(f"context.vars[{node.target!r}] = ") + + self._import_common(node, frame) + + if frame.toplevel and not node.target.startswith("_"): + self.writeline(f"context.exported_vars.discard({node.target!r})") + + def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: """Visit named imports.""" self.newline(node) - self.write( - "included_template = %senvironment.get_template(" - % (self.environment.is_async and "await " or "") - ) - self.visit(node.template, frame) - self.write(", %r)." % self.name) - if node.with_context: - self.write( - "make_module%s(context.get_all(), True, %s)" - % ( - self.environment.is_async and "_async" or "", - self.dump_local_context(frame), - ) - ) - elif self.environment.is_async: - self.write("_get_default_module_async()") - else: - self.write("_get_default_module()") - + self.write("included_template = ") + self._import_common(node, frame) var_names = [] discarded_names = [] for name in node.names: @@ -1029,22 +1117,18 @@ else: alias = name self.writeline( - "%s = getattr(included_template, " - "%r, missing)" % (frame.symbols.ref(alias), name) + f"{frame.symbols.ref(alias)} =" + f" getattr(included_template, {name!r}, missing)" ) - self.writeline("if %s is missing:" % frame.symbols.ref(alias)) + self.writeline(f"if {frame.symbols.ref(alias)} is missing:") self.indent() + message = ( + "the template {included_template.__name__!r}" + f" (imported on {self.position(node)})" + f" does not export the requested name {name!r}" + ) self.writeline( - "%s = undefined(%r %% " - "included_template.__name__, " - "name=%r)" - % ( - frame.symbols.ref(alias), - "the template %%r (imported on %s) does " - "not export the requested name %s" - % (self.position(node), repr(name)), - name, - ) + f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" ) self.outdent() if frame.toplevel: @@ -1055,35 +1139,35 @@ if var_names: if len(var_names) == 1: name = var_names[0] - self.writeline( - "context.vars[%r] = %s" % (name, frame.symbols.ref(name)) - ) + self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") else: - self.writeline( - "context.vars.update({%s})" - % ", ".join( - "%r: %s" % (name, frame.symbols.ref(name)) for name in var_names - ) + names_kv = ", ".join( + f"{name!r}: {frame.symbols.ref(name)}" for name in var_names ) + self.writeline(f"context.vars.update({{{names_kv}}})") if discarded_names: if len(discarded_names) == 1: - self.writeline("context.exported_vars.discard(%r)" % discarded_names[0]) + self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") else: + names_str = ", ".join(map(repr, discarded_names)) self.writeline( - "context.exported_vars.difference_" - "update((%s))" % ", ".join(imap(repr, discarded_names)) + f"context.exported_vars.difference_update(({names_str}))" ) - def visit_For(self, node, frame): + def visit_For(self, node: nodes.For, frame: Frame) -> None: loop_frame = frame.inner() + loop_frame.loop_frame = True test_frame = frame.inner() else_frame = frame.inner() # try to figure out if we have an extended loop. An extended loop # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body. - extended_loop = node.recursive or "loop" in find_undeclared( - node.iter_child_nodes(only=("body",)), ("loop",) + # variable is accessed in the body if the body is a scoped block. + extended_loop = ( + node.recursive + or "loop" + in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) + or any(block.scoped for block in node.find_all(nodes.Block)) ) loop_ref = None @@ -1097,13 +1181,13 @@ if node.test: loop_filter_func = self.temporary_identifier() test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline("%s(fiter):" % self.func(loop_filter_func), node.test) + self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) self.indent() self.enter_frame(test_frame) - self.writeline(self.environment.is_async and "async for " or "for ") + self.writeline(self.choose_async("async for ", "for ")) self.visit(node.target, loop_frame) self.write(" in ") - self.write(self.environment.is_async and "auto_aiter(fiter)" or "fiter") + self.write(self.choose_async("auto_aiter(fiter)", "fiter")) self.write(":") self.indent() self.writeline("if ", node.test) @@ -1120,7 +1204,7 @@ # variable is a special one we have to enforce aliasing for it. if node.recursive: self.writeline( - "%s(reciter, loop_render_func, depth=0):" % self.func("loop"), node + f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node ) self.indent() self.buffer(loop_frame) @@ -1131,7 +1215,7 @@ # make sure the loop variable is a special one and raise a template # assertion error if a loop tries to write to loop if extended_loop: - self.writeline("%s = missing" % loop_ref) + self.writeline(f"{loop_ref} = missing") for name in node.find_all(nodes.Name): if name.ctx == "store" and name.name == "loop": @@ -1142,20 +1226,17 @@ if node.else_: iteration_indicator = self.temporary_identifier() - self.writeline("%s = 1" % iteration_indicator) + self.writeline(f"{iteration_indicator} = 1") - self.writeline(self.environment.is_async and "async for " or "for ", node) + self.writeline(self.choose_async("async for ", "for "), node) self.visit(node.target, loop_frame) if extended_loop: - if self.environment.is_async: - self.write(", %s in AsyncLoopContext(" % loop_ref) - else: - self.write(", %s in LoopContext(" % loop_ref) + self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") else: self.write(" in ") if node.test: - self.write("%s(" % loop_filter_func) + self.write(f"{loop_filter_func}(") if node.recursive: self.write("reciter") else: @@ -1170,21 +1251,22 @@ if node.recursive: self.write(", undefined, loop_render_func, depth):") else: - self.write(extended_loop and ", undefined):" or ":") + self.write(", undefined):" if extended_loop else ":") self.indent() self.enter_frame(loop_frame) + self.writeline("_loop_vars = {}") self.blockvisit(node.body, loop_frame) if node.else_: - self.writeline("%s = 0" % iteration_indicator) + self.writeline(f"{iteration_indicator} = 0") self.outdent() self.leave_frame( loop_frame, with_python_scope=node.recursive and not node.else_ ) if node.else_: - self.writeline("if %s:" % iteration_indicator) + self.writeline(f"if {iteration_indicator}:") self.indent() self.enter_frame(else_frame) self.blockvisit(node.else_, else_frame) @@ -1197,9 +1279,7 @@ self.return_buffer_contents(loop_frame) self.outdent() self.start_write(frame, node) - if self.environment.is_async: - self.write("await ") - self.write("loop(") + self.write(f"{self.choose_async('await ')}loop(") if self.environment.is_async: self.write("auto_aiter(") self.visit(node.iter, frame) @@ -1208,7 +1288,12 @@ self.write(", loop)") self.end_write(frame) - def visit_If(self, node, frame): + # at the end of the iteration, clear any assignments made in the + # loop from the top level + if self._assign_stack: + self._assign_stack[-1].difference_update(loop_frame.symbols.stores) + + def visit_If(self, node: nodes.If, frame: Frame) -> None: if_frame = frame.soft() self.writeline("if ", node) self.visit(node.test, if_frame) @@ -1229,17 +1314,17 @@ self.blockvisit(node.else_, if_frame) self.outdent() - def visit_Macro(self, node, frame): + def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: macro_frame, macro_ref = self.macro_body(node, frame) self.newline() if frame.toplevel: if not node.name.startswith("_"): - self.write("context.exported_vars.add(%r)" % node.name) - self.writeline("context.vars[%r] = " % node.name) - self.write("%s = " % frame.symbols.ref(node.name)) + self.write(f"context.exported_vars.add({node.name!r})") + self.writeline(f"context.vars[{node.name!r}] = ") + self.write(f"{frame.symbols.ref(node.name)} = ") self.macro_def(macro_ref, macro_frame) - def visit_CallBlock(self, node, frame): + def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: call_frame, macro_ref = self.macro_body(node, frame) self.writeline("caller = ") self.macro_def(macro_ref, call_frame) @@ -1247,7 +1332,7 @@ self.visit_Call(node.call, frame, forward_caller=True) self.end_write(frame) - def visit_FilterBlock(self, node, frame): + def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: filter_frame = frame.inner() filter_frame.symbols.analyze_node(node) self.enter_frame(filter_frame) @@ -1258,11 +1343,11 @@ self.end_write(frame) self.leave_frame(filter_frame) - def visit_With(self, node, frame): + def visit_With(self, node: nodes.With, frame: Frame) -> None: with_frame = frame.inner() with_frame.symbols.analyze_node(node) self.enter_frame(with_frame) - for target, expr in izip(node.targets, node.values): + for target, expr in zip(node.targets, node.values): self.newline() self.visit(target, with_frame) self.write(" = ") @@ -1270,18 +1355,25 @@ self.blockvisit(node.body, with_frame) self.leave_frame(with_frame) - def visit_ExprStmt(self, node, frame): + def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: self.newline(node) self.visit(node.node, frame) - _FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src")) - #: The default finalize function if the environment isn't configured - #: with one. Or if the environment has one, this is called on that - #: function's output for constants. - _default_finalize = text_type - _finalize = None + class _FinalizeInfo(t.NamedTuple): + const: t.Optional[t.Callable[..., str]] + src: t.Optional[str] - def _make_finalize(self): + @staticmethod + def _default_finalize(value: t.Any) -> t.Any: + """The default finalize function if the environment isn't + configured with one. Or, if the environment has one, this is + called on that function's output for constants. + """ + return str(value) + + _finalize: t.Optional[_FinalizeInfo] = None + + def _make_finalize(self) -> _FinalizeInfo: """Build the finalize function to be used on constants and at runtime. Cached so it's only created once for all output nodes. @@ -1297,39 +1389,48 @@ if self._finalize is not None: return self._finalize + finalize: t.Optional[t.Callable[..., t.Any]] finalize = default = self._default_finalize src = None if self.environment.finalize: src = "environment.finalize(" env_finalize = self.environment.finalize + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(env_finalize) # type: ignore + ) + finalize = None - def finalize(value): - return default(env_finalize(value)) + if pass_arg is None: - if getattr(env_finalize, "contextfunction", False) is True: - src += "context, " - finalize = None # noqa: F811 - elif getattr(env_finalize, "evalcontextfunction", False) is True: - src += "context.eval_ctx, " - finalize = None - elif getattr(env_finalize, "environmentfunction", False) is True: - src += "environment, " + def finalize(value: t.Any) -> t.Any: + return default(env_finalize(value)) - def finalize(value): - return default(env_finalize(self.environment, value)) + else: + src = f"{src}{pass_arg}, " + + if pass_arg == "environment": + + def finalize(value: t.Any) -> t.Any: + return default(env_finalize(self.environment, value)) self._finalize = self._FinalizeInfo(finalize, src) return self._finalize - def _output_const_repr(self, group): + def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: """Given a group of constant values converted from ``Output`` child nodes, produce a string to write to the template module source. """ return repr(concat(group)) - def _output_child_to_const(self, node, frame, finalize): + def _output_child_to_const( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> str: """Try to optimize a child of an ``Output`` node by trying to convert it to constant, finalized data at compile time. @@ -1344,25 +1445,29 @@ # Template data doesn't go through finalize. if isinstance(node, nodes.TemplateData): - return text_type(const) + return str(const) - return finalize.const(const) + return finalize.const(const) # type: ignore - def _output_child_pre(self, node, frame, finalize): + def _output_child_pre( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: """Output extra source code before visiting a child of an ``Output`` node. """ if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else to_string)(") + self.write("(escape if context.eval_ctx.autoescape else str)(") elif frame.eval_ctx.autoescape: self.write("escape(") else: - self.write("to_string(") + self.write("str(") if finalize.src is not None: self.write(finalize.src) - def _output_child_post(self, node, frame, finalize): + def _output_child_post( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: """Output extra source code after visiting a child of an ``Output`` node. """ @@ -1371,7 +1476,7 @@ if finalize.src is not None: self.write(")") - def visit_Output(self, node, frame): + def visit_Output(self, node: nodes.Output, frame: Frame) -> None: # If an extends is active, don't render outside a block. if frame.require_output_check: # A top-level extends is known to exist at compile time. @@ -1382,7 +1487,7 @@ self.indent() finalize = self._make_finalize() - body = [] + body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] # Evaluate constants at compile time if possible. Each item in # body will be either a list of static data or a node to be @@ -1414,9 +1519,9 @@ if frame.buffer is not None: if len(body) == 1: - self.writeline("%s.append(" % frame.buffer) + self.writeline(f"{frame.buffer}.append(") else: - self.writeline("%s.extend((" % frame.buffer) + self.writeline(f"{frame.buffer}.extend((") self.indent() @@ -1450,7 +1555,7 @@ if frame.require_output_check: self.outdent() - def visit_Assign(self, node, frame): + def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: self.push_assign_tracking() self.newline(node) self.visit(node.target, frame) @@ -1458,7 +1563,7 @@ self.visit(node.node, frame) self.pop_assign_tracking(frame) - def visit_AssignBlock(self, node, frame): + def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: self.push_assign_tracking() block_frame = frame.inner() # This is a special case. Since a set block always captures we @@ -1475,15 +1580,17 @@ if node.filter is not None: self.visit_Filter(node.filter, block_frame) else: - self.write("concat(%s)" % block_frame.buffer) + self.write(f"concat({block_frame.buffer})") self.write(")") self.pop_assign_tracking(frame) self.leave_frame(block_frame) # -- Expression Visitors - def visit_Name(self, node, frame): - if node.ctx == "store" and frame.toplevel: + def visit_Name(self, node: nodes.Name, frame: Frame) -> None: + if node.ctx == "store" and ( + frame.toplevel or frame.loop_frame or frame.block_frame + ): if self._assign_stack: self._assign_stack[-1].add(node.name) ref = frame.symbols.ref(node.name) @@ -1499,52 +1606,51 @@ and not self.parameter_is_undeclared(ref) ): self.write( - "(undefined(name=%r) if %s is missing else %s)" - % (node.name, ref, ref) + f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" ) return self.write(ref) - def visit_NSRef(self, node, frame): + def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: # NSRefs can only be used to store values; since they use the normal # `foo.bar` notation they will be parsed as a normal attribute access # when used anywhere but in a `set` context ref = frame.symbols.ref(node.name) - self.writeline("if not isinstance(%s, Namespace):" % ref) + self.writeline(f"if not isinstance({ref}, Namespace):") self.indent() self.writeline( - "raise TemplateRuntimeError(%r)" - % "cannot assign attribute on non-namespace object" + "raise TemplateRuntimeError" + '("cannot assign attribute on non-namespace object")' ) self.outdent() - self.writeline("%s[%r]" % (ref, node.attr)) + self.writeline(f"{ref}[{node.attr!r}]") - def visit_Const(self, node, frame): + def visit_Const(self, node: nodes.Const, frame: Frame) -> None: val = node.as_const(frame.eval_ctx) if isinstance(val, float): self.write(str(val)) else: self.write(repr(val)) - def visit_TemplateData(self, node, frame): + def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: try: self.write(repr(node.as_const(frame.eval_ctx))) except nodes.Impossible: self.write( - "(Markup if context.eval_ctx.autoescape else identity)(%r)" % node.data + f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" ) - def visit_Tuple(self, node, frame): + def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: self.write("(") idx = -1 for idx, item in enumerate(node.items): if idx: self.write(", ") self.visit(item, frame) - self.write(idx == 0 and ",)" or ")") + self.write(",)" if idx == 0 else ")") - def visit_List(self, node, frame): + def visit_List(self, node: nodes.List, frame: Frame) -> None: self.write("[") for idx, item in enumerate(node.items): if idx: @@ -1552,7 +1658,7 @@ self.visit(item, frame) self.write("]") - def visit_Dict(self, node, frame): + def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: self.write("{") for idx, item in enumerate(node.items): if idx: @@ -1562,96 +1668,59 @@ self.visit(item.value, frame) self.write("}") - def binop(operator, interceptable=True): # noqa: B902 - @optimizeconst - def visitor(self, node, frame): - if ( - self.environment.sandboxed - and operator in self.environment.intercepted_binops - ): - self.write("environment.call_binop(context, %r, " % operator) - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(" %s " % operator) - self.visit(node.right, frame) - self.write(")") - - return visitor - - def uaop(operator, interceptable=True): # noqa: B902 - @optimizeconst - def visitor(self, node, frame): - if ( - self.environment.sandboxed - and operator in self.environment.intercepted_unops - ): - self.write("environment.call_unop(context, %r, " % operator) - self.visit(node.node, frame) - else: - self.write("(" + operator) - self.visit(node.node, frame) - self.write(")") - - return visitor - - visit_Add = binop("+") - visit_Sub = binop("-") - visit_Mul = binop("*") - visit_Div = binop("/") - visit_FloorDiv = binop("//") - visit_Pow = binop("**") - visit_Mod = binop("%") - visit_And = binop("and", interceptable=False) - visit_Or = binop("or", interceptable=False) - visit_Pos = uaop("+") - visit_Neg = uaop("-") - visit_Not = uaop("not ", interceptable=False) - del binop, uaop + visit_Add = _make_binop("+") + visit_Sub = _make_binop("-") + visit_Mul = _make_binop("*") + visit_Div = _make_binop("/") + visit_FloorDiv = _make_binop("//") + visit_Pow = _make_binop("**") + visit_Mod = _make_binop("%") + visit_And = _make_binop("and") + visit_Or = _make_binop("or") + visit_Pos = _make_unop("+") + visit_Neg = _make_unop("-") + visit_Not = _make_unop("not ") @optimizeconst - def visit_Concat(self, node, frame): + def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: if frame.eval_ctx.volatile: - func_name = "(context.eval_ctx.volatile and markup_join or unicode_join)" + func_name = "(markup_join if context.eval_ctx.volatile else str_join)" elif frame.eval_ctx.autoescape: func_name = "markup_join" else: - func_name = "unicode_join" - self.write("%s((" % func_name) + func_name = "str_join" + self.write(f"{func_name}((") for arg in node.nodes: self.visit(arg, frame) self.write(", ") self.write("))") @optimizeconst - def visit_Compare(self, node, frame): + def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: self.write("(") self.visit(node.expr, frame) for op in node.ops: self.visit(op, frame) self.write(")") - def visit_Operand(self, node, frame): - self.write(" %s " % operators[node.op]) + def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: + self.write(f" {operators[node.op]} ") self.visit(node.expr, frame) @optimizeconst - def visit_Getattr(self, node, frame): + def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: if self.environment.is_async: self.write("(await auto_await(") self.write("environment.getattr(") self.visit(node.node, frame) - self.write(", %r)" % node.attr) + self.write(f", {node.attr!r})") if self.environment.is_async: self.write("))") @optimizeconst - def visit_Getitem(self, node, frame): + def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: # slices bypass the environment getitem method. if isinstance(node.arg, nodes.Slice): self.visit(node.node, frame) @@ -1671,7 +1740,7 @@ if self.environment.is_async: self.write("))") - def visit_Slice(self, node, frame): + def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: if node.start is not None: self.visit(node.start, frame) self.write(":") @@ -1681,60 +1750,83 @@ self.write(":") self.visit(node.step, frame) - @optimizeconst - def visit_Filter(self, node, frame): + @contextmanager + def _filter_test_common( + self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool + ) -> t.Iterator[None]: if self.environment.is_async: - self.write("await auto_await(") - self.write(self.filters[node.name] + "(") - func = self.environment.filters.get(node.name) - if func is None: - self.fail("no filter named %r" % node.name, node.lineno) - if getattr(func, "contextfilter", False) is True: - self.write("context, ") - elif getattr(func, "evalcontextfilter", False) is True: - self.write("context.eval_ctx, ") - elif getattr(func, "environmentfilter", False) is True: - self.write("environment, ") + self.write("(await auto_await(") - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - "(context.eval_ctx.autoescape and" - " Markup(concat(%s)) or concat(%s))" % (frame.buffer, frame.buffer) - ) - elif frame.eval_ctx.autoescape: - self.write("Markup(concat(%s))" % frame.buffer) + if is_filter: + self.write(f"{self.filters[node.name]}(") + func = self.environment.filters.get(node.name) else: - self.write("concat(%s)" % frame.buffer) + self.write(f"{self.tests[node.name]}(") + func = self.environment.tests.get(node.name) + + # When inside an If or CondExpr frame, allow the filter to be + # undefined at compile time and only raise an error if it's + # actually called at runtime. See pull_dependencies. + if func is None and not frame.soft_frame: + type_name = "filter" if is_filter else "test" + self.fail(f"No {type_name} named {node.name!r}.", node.lineno) + + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(func) # type: ignore + ) + + if pass_arg is not None: + self.write(f"{pass_arg}, ") + + # Back to the visitor function to handle visiting the target of + # the filter or test. + yield + self.signature(node, frame) self.write(")") + if self.environment.is_async: - self.write(")") + self.write("))") @optimizeconst - def visit_Test(self, node, frame): - self.write(self.tests[node.name] + "(") - if node.name not in self.environment.tests: - self.fail("no test named %r" % node.name, node.lineno) - self.visit(node.node, frame) - self.signature(node, frame) - self.write(")") - - @optimizeconst - def visit_CondExpr(self, node, frame): - def write_expr2(): - if node.expr2 is not None: - return self.visit(node.expr2, frame) - self.write( - "cond_expr_undefined(%r)" - % ( - "the inline if-" - "expression on %s evaluated to false and " - "no else section was defined." % self.position(node) + def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: + with self._filter_test_common(node, frame, True): + # if the filter node is None we are inside a filter block + # and want to write to the current buffer + if node.node is not None: + self.visit(node.node, frame) + elif frame.eval_ctx.volatile: + self.write( + f"(Markup(concat({frame.buffer}))" + f" if context.eval_ctx.autoescape else concat({frame.buffer}))" ) + elif frame.eval_ctx.autoescape: + self.write(f"Markup(concat({frame.buffer}))") + else: + self.write(f"concat({frame.buffer})") + + @optimizeconst + def visit_Test(self, node: nodes.Test, frame: Frame) -> None: + with self._filter_test_common(node, frame, False): + self.visit(node.node, frame) + + @optimizeconst + def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: + frame = frame.soft() + + def write_expr2() -> None: + if node.expr2 is not None: + self.visit(node.expr2, frame) + return + + self.write( + f'cond_expr_undefined("the inline if-expression on' + f" {self.position(node)} evaluated to false and no else" + f' section was defined.")' ) self.write("(") @@ -1746,71 +1838,89 @@ self.write(")") @optimizeconst - def visit_Call(self, node, frame, forward_caller=False): + def visit_Call( + self, node: nodes.Call, frame: Frame, forward_caller: bool = False + ) -> None: if self.environment.is_async: - self.write("await auto_await(") + self.write("(await auto_await(") if self.environment.sandboxed: self.write("environment.call(context, ") else: self.write("context.call(") self.visit(node.node, frame) - extra_kwargs = forward_caller and {"caller": "caller"} or None + extra_kwargs = {"caller": "caller"} if forward_caller else None + loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} + block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} + if extra_kwargs: + extra_kwargs.update(loop_kwargs, **block_kwargs) + elif loop_kwargs or block_kwargs: + extra_kwargs = dict(loop_kwargs, **block_kwargs) self.signature(node, frame, extra_kwargs) self.write(")") if self.environment.is_async: - self.write(")") + self.write("))") - def visit_Keyword(self, node, frame): + def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: self.write(node.key + "=") self.visit(node.value, frame) # -- Unused nodes for extensions - def visit_MarkSafe(self, node, frame): + def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: self.write("Markup(") self.visit(node.expr, frame) self.write(")") - def visit_MarkSafeIfAutoescape(self, node, frame): - self.write("(context.eval_ctx.autoescape and Markup or identity)(") + def visit_MarkSafeIfAutoescape( + self, node: nodes.MarkSafeIfAutoescape, frame: Frame + ) -> None: + self.write("(Markup if context.eval_ctx.autoescape else identity)(") self.visit(node.expr, frame) self.write(")") - def visit_EnvironmentAttribute(self, node, frame): + def visit_EnvironmentAttribute( + self, node: nodes.EnvironmentAttribute, frame: Frame + ) -> None: self.write("environment." + node.name) - def visit_ExtensionAttribute(self, node, frame): - self.write("environment.extensions[%r].%s" % (node.identifier, node.name)) + def visit_ExtensionAttribute( + self, node: nodes.ExtensionAttribute, frame: Frame + ) -> None: + self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - def visit_ImportedName(self, node, frame): + def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: self.write(self.import_aliases[node.importname]) - def visit_InternalName(self, node, frame): + def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: self.write(node.name) - def visit_ContextReference(self, node, frame): + def visit_ContextReference( + self, node: nodes.ContextReference, frame: Frame + ) -> None: self.write("context") - def visit_DerivedContextReference(self, node, frame): + def visit_DerivedContextReference( + self, node: nodes.DerivedContextReference, frame: Frame + ) -> None: self.write(self.derive_context(frame)) - def visit_Continue(self, node, frame): + def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: self.writeline("continue", node) - def visit_Break(self, node, frame): + def visit_Break(self, node: nodes.Break, frame: Frame) -> None: self.writeline("break", node) - def visit_Scope(self, node, frame): + def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: scope_frame = frame.inner() scope_frame.symbols.analyze_node(node) self.enter_frame(scope_frame) self.blockvisit(node.body, scope_frame) self.leave_frame(scope_frame) - def visit_OverlayScope(self, node, frame): + def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: ctx = self.temporary_identifier() - self.writeline("%s = %s" % (ctx, self.derive_context(frame))) - self.writeline("%s.vars = " % ctx) + self.writeline(f"{ctx} = {self.derive_context(frame)}") + self.writeline(f"{ctx}.vars = ") self.visit(node.context, frame) self.push_context_reference(ctx) @@ -1821,9 +1931,11 @@ self.leave_frame(scope_frame) self.pop_context_reference() - def visit_EvalContextModifier(self, node, frame): + def visit_EvalContextModifier( + self, node: nodes.EvalContextModifier, frame: Frame + ) -> None: for keyword in node.options: - self.writeline("context.eval_ctx.%s = " % keyword.key) + self.writeline(f"context.eval_ctx.{keyword.key} = ") self.visit(keyword.value, frame) try: val = keyword.value.as_const(frame.eval_ctx) @@ -1832,12 +1944,14 @@ else: setattr(frame.eval_ctx, keyword.key, val) - def visit_ScopedEvalContextModifier(self, node, frame): + def visit_ScopedEvalContextModifier( + self, node: nodes.ScopedEvalContextModifier, frame: Frame + ) -> None: old_ctx_name = self.temporary_identifier() saved_ctx = frame.eval_ctx.save() - self.writeline("%s = context.eval_ctx.save()" % old_ctx_name) + self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") self.visit_EvalContextModifier(node, frame) for child in node.body: self.visit(child, frame) frame.eval_ctx.revert(saved_ctx) - self.writeline("context.eval_ctx.revert(%s)" % old_ctx_name) + self.writeline(f"context.eval_ctx.revert({old_ctx_name})")
diff --git a/third_party/jinja2/constants.py b/third_party/jinja2/constants.py index bf7f2ca..41a1c23b 100644 --- a/third_party/jinja2/constants.py +++ b/third_party/jinja2/constants.py
@@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- #: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = u"""\ +LOREM_IPSUM_WORDS = """\ a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at auctor augue bibendum blandit class commodo condimentum congue consectetuer consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
diff --git a/third_party/jinja2/debug.py b/third_party/jinja2/debug.py index 5d8aec3..7ed7e92 100644 --- a/third_party/jinja2/debug.py +++ b/third_party/jinja2/debug.py
@@ -1,38 +1,37 @@ import sys +import typing as t from types import CodeType +from types import TracebackType -from . import TemplateSyntaxError -from ._compat import PYPY +from .exceptions import TemplateSyntaxError from .utils import internal_code from .utils import missing +if t.TYPE_CHECKING: + from .runtime import Context -def rewrite_traceback_stack(source=None): + +def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: """Rewrite the current exception to replace any tracebacks from within compiled template code with tracebacks that look like they came from the template source. This must be called within an ``except`` block. - :param exc_info: A :meth:`sys.exc_info` tuple. If not provided, - the current ``exc_info`` is used. :param source: For ``TemplateSyntaxError``, the original source if known. - :return: A :meth:`sys.exc_info` tuple that can be re-raised. + :return: The original exception with the rewritten traceback. """ - exc_type, exc_value, tb = sys.exc_info() + _, exc_value, tb = sys.exc_info() + exc_value = t.cast(BaseException, exc_value) + tb = t.cast(TracebackType, tb) if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: exc_value.translated = True exc_value.source = source - - try: - # Remove the old traceback on Python 3, otherwise the frames - # from the compiler still show up. - exc_value.with_traceback(None) - except AttributeError: - pass - + # Remove the old traceback, otherwise the frames from the + # compiler still show up. + exc_value.with_traceback(None) # Outside of runtime, so the frame isn't executing template # code, but it still needs to point at the template. tb = fake_traceback( @@ -68,12 +67,15 @@ # Assign tb_next in reverse to avoid circular references. for tb in reversed(stack): - tb_next = tb_set_next(tb, tb_next) + tb.tb_next = tb_next + tb_next = tb - return exc_type, exc_value, tb_next + return exc_value.with_traceback(tb_next) -def fake_traceback(exc_value, tb, filename, lineno): +def fake_traceback( # type: ignore + exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int +) -> TracebackType: """Produce a new traceback object that looks like it came from the template source instead of the compiled code. The filename, line number, and location name will point to the template, and the local @@ -100,79 +102,60 @@ "__jinja_exception__": exc_value, } # Raise an exception at the correct line number. - code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec") + code: CodeType = compile( + "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" + ) # Build a new code object that points to the template file and # replaces the location with a block name. - try: - location = "template" + location = "template" - if tb is not None: - function = tb.tb_frame.f_code.co_name + if tb is not None: + function = tb.tb_frame.f_code.co_name - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = 'block "%s"' % function[6:] + if function == "root": + location = "top-level template code" + elif function.startswith("block_"): + location = f"block {function[6:]!r}" - # Collect arguments for the new code object. CodeType only - # accepts positional arguments, and arguments were inserted in - # new Python versions. - code_args = [] - - for attr in ( - "argcount", - "posonlyargcount", # Python 3.8 - "kwonlyargcount", # Python 3 - "nlocals", - "stacksize", - "flags", - "code", # codestring - "consts", # constants - "names", - "varnames", - ("filename", filename), - ("name", location), - "firstlineno", - "lnotab", - "freevars", - "cellvars", - ): - if isinstance(attr, tuple): - # Replace with given value. - code_args.append(attr[1]) - continue - - try: - # Copy original value if it exists. - code_args.append(getattr(code, "co_" + attr)) - except AttributeError: - # Some arguments were added later. - continue - - code = CodeType(*code_args) - except Exception: - # Some environments such as Google App Engine don't support - # modifying code objects. - pass + if sys.version_info >= (3, 8): + code = code.replace(co_name=location) + else: + code = CodeType( + code.co_argcount, + code.co_kwonlyargcount, + code.co_nlocals, + code.co_stacksize, + code.co_flags, + code.co_code, + code.co_consts, + code.co_names, + code.co_varnames, + code.co_filename, + location, + code.co_firstlineno, + code.co_lnotab, + code.co_freevars, + code.co_cellvars, + ) # Execute the new code, which is guaranteed to raise, and return # the new traceback without this frame. try: exec(code, globals, locals) except BaseException: - return sys.exc_info()[2].tb_next + return sys.exc_info()[2].tb_next # type: ignore -def get_template_locals(real_locals): +def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: """Based on the runtime locals, get the context that would be available at that point in the template. """ # Start with the current template context. - ctx = real_locals.get("context") + ctx: "t.Optional[Context]" = real_locals.get("context") - if ctx: - data = ctx.get_all().copy() + if ctx is not None: + data: t.Dict[str, t.Any] = ctx.get_all().copy() else: data = {} @@ -180,7 +163,7 @@ # rather than pushing a context. Local variables follow the scheme # l_depth_name. Find the highest-depth local that has a value for # each name. - local_overrides = {} + local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} for name, value in real_locals.items(): if not name.startswith("l_") or value is missing: @@ -188,8 +171,8 @@ continue try: - _, depth, name = name.split("_", 2) - depth = int(depth) + _, depth_str, name = name.split("_", 2) + depth = int(depth_str) except ValueError: continue @@ -206,63 +189,3 @@ data[name] = value return data - - -if sys.version_info >= (3, 7): - # tb_next is directly assignable as of Python 3.7 - def tb_set_next(tb, tb_next): - tb.tb_next = tb_next - return tb - - -elif PYPY: - # PyPy might have special support, and won't work with ctypes. - try: - import tputil - except ImportError: - # Without tproxy support, use the original traceback. - def tb_set_next(tb, tb_next): - return tb - - else: - # With tproxy support, create a proxy around the traceback that - # returns the new tb_next. - def tb_set_next(tb, tb_next): - def controller(op): - if op.opname == "__getattribute__" and op.args[0] == "tb_next": - return tb_next - - return op.delegate() - - return tputil.make_proxy(controller, obj=tb) - - -else: - # Use ctypes to assign tb_next at the C level since it's read-only - # from Python. - import ctypes - - class _CTraceback(ctypes.Structure): - _fields_ = [ - # Extra PyObject slots when compiled with Py_TRACE_REFS. - ("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()), - # Only care about tb_next as an object, not a traceback. - ("tb_next", ctypes.py_object), - ] - - def tb_set_next(tb, tb_next): - c_tb = _CTraceback.from_address(id(tb)) - - # Clear out the old tb_next. - if tb.tb_next is not None: - c_tb_next = ctypes.py_object(tb.tb_next) - c_tb.tb_next = ctypes.py_object() - ctypes.pythonapi.Py_DecRef(c_tb_next) - - # Assign the new tb_next. - if tb_next is not None: - c_tb_next = ctypes.py_object(tb_next) - ctypes.pythonapi.Py_IncRef(c_tb_next) - c_tb.tb_next = c_tb_next - - return tb
diff --git a/third_party/jinja2/defaults.py b/third_party/jinja2/defaults.py index 8e0e7d7..638cad3 100644 --- a/third_party/jinja2/defaults.py +++ b/third_party/jinja2/defaults.py
@@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- -from ._compat import range_type +import typing as t + from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 from .tests import TESTS as DEFAULT_TESTS # noqa: F401 from .utils import Cycler @@ -7,6 +7,9 @@ from .utils import Joiner from .utils import Namespace +if t.TYPE_CHECKING: + import typing_extensions as te + # defaults for the parser / lexer BLOCK_START_STRING = "{%" BLOCK_END_STRING = "%}" @@ -14,17 +17,17 @@ VARIABLE_END_STRING = "}}" COMMENT_START_STRING = "{#" COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX = None -LINE_COMMENT_PREFIX = None +LINE_STATEMENT_PREFIX: t.Optional[str] = None +LINE_COMMENT_PREFIX: t.Optional[str] = None TRIM_BLOCKS = False LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE = "\n" +NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" KEEP_TRAILING_NEWLINE = False # default filters, tests and namespace DEFAULT_NAMESPACE = { - "range": range_type, + "range": range, "dict": dict, "lipsum": generate_lorem_ipsum, "cycler": Cycler, @@ -33,10 +36,11 @@ } # default policies -DEFAULT_POLICIES = { +DEFAULT_POLICIES: t.Dict[str, t.Any] = { "compiler.ascii_str": True, "urlize.rel": "noopener", "urlize.target": None, + "urlize.extra_schemes": None, "truncate.leeway": 5, "json.dumps_function": None, "json.dumps_kwargs": {"sort_keys": True},
diff --git a/third_party/jinja2/environment.py b/third_party/jinja2/environment.py index 8430390..ea04e8b 100644 --- a/third_party/jinja2/environment.py +++ b/third_party/jinja2/environment.py
@@ -1,25 +1,19 @@ -# -*- coding: utf-8 -*- """Classes for managing templates and their runtime and compile time options. """ import os -import sys +import typing +import typing as t import weakref +from collections import ChainMap +from functools import lru_cache from functools import partial from functools import reduce +from types import CodeType from markupsafe import Markup from . import nodes -from ._compat import encode_filename -from ._compat import implements_iterator -from ._compat import implements_to_string -from ._compat import iteritems -from ._compat import PY2 -from ._compat import PYPY -from ._compat import reraise -from ._compat import string_types -from ._compat import text_type from .compiler import CodeGenerator from .compiler import generate from .defaults import BLOCK_END_STRING @@ -44,25 +38,33 @@ from .exceptions import TemplateSyntaxError from .exceptions import UndefinedError from .lexer import get_lexer +from .lexer import Lexer from .lexer import TokenStream from .nodes import EvalContext from .parser import Parser from .runtime import Context from .runtime import new_context from .runtime import Undefined +from .utils import _PassArg from .utils import concat from .utils import consume -from .utils import have_async_gen from .utils import import_string from .utils import internalcode from .utils import LRUCache from .utils import missing +if t.TYPE_CHECKING: + import typing_extensions as te + from .bccache import BytecodeCache + from .ext import Extension + from .loaders import BaseLoader + +_env_bound = t.TypeVar("_env_bound", bound="Environment") + + # for direct template usage we have up to ten living environments -_spontaneous_environments = LRUCache(10) - - -def get_spontaneous_environment(cls, *args): +@lru_cache(maxsize=10) +def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: """Return a new spontaneous environment. A spontaneous environment is used for templates created directly rather than through an existing environment. @@ -70,75 +72,74 @@ :param cls: Environment class to create. :param args: Positional arguments passed to environment. """ - key = (cls, args) - - try: - return _spontaneous_environments[key] - except KeyError: - _spontaneous_environments[key] = env = cls(*args) - env.shared = True - return env + env = cls(*args) + env.shared = True + return env -def create_cache(size): +def create_cache( + size: int, +) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: """Return the cache class for the given size.""" if size == 0: return None + if size < 0: return {} - return LRUCache(size) + + return LRUCache(size) # type: ignore -def copy_cache(cache): +def copy_cache( + cache: t.Optional[t.MutableMapping], +) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: """Create an empty copy of the given cache.""" if cache is None: return None - elif type(cache) is dict: + + if type(cache) is dict: return {} - return LRUCache(cache.capacity) + + return LRUCache(cache.capacity) # type: ignore -def load_extensions(environment, extensions): +def load_extensions( + environment: "Environment", + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], +) -> t.Dict[str, "Extension"]: """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated environments. + Returns a dict of instantiated extensions. """ result = {} + for extension in extensions: - if isinstance(extension, string_types): - extension = import_string(extension) + if isinstance(extension, str): + extension = t.cast(t.Type["Extension"], import_string(extension)) + result[extension.identifier] = extension(environment) + return result -def fail_for_missing_callable(string, name): - msg = string % name - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = "%s (%s; did you forget to quote the callable name?)" % (msg, e) - raise TemplateRuntimeError(msg) - - -def _environment_sanity_check(environment): +def _environment_config_check(environment: "Environment") -> "Environment": """Perform a sanity check on the environment.""" assert issubclass( environment.undefined, Undefined - ), "undefined must be a subclass of undefined because filters depend on it." + ), "'undefined' must be a subclass of 'jinja2.Undefined'." assert ( environment.block_start_string != environment.variable_start_string != environment.comment_start_string - ), "block, variable and comment start strings must be different" - assert environment.newline_sequence in ( + ), "block, variable and comment start strings must be different." + assert environment.newline_sequence in { "\r", "\r\n", "\n", - ), "newline_sequence set to unknown line ending string." + }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." return environment -class Environment(object): +class Environment: r"""The core component of Jinja is the `Environment`. It contains important shared variables like configuration, filters, tests, globals and others. Instances of this class may be modified if @@ -256,9 +257,8 @@ See :ref:`bytecode-cache` for more information. `enable_async` - If set to true this enables async template execution which allows - you to take advantage of newer Python features. This requires - Python 3.6 or later. + If set to true this enables async template execution which + allows using async functions and generators. """ #: if this environment is sandboxed. Modifying this variable won't make @@ -271,7 +271,7 @@ overlayed = False #: the environment this environment is linked to if it is an overlay - linked_to = None + linked_to: t.Optional["Environment"] = None #: shared environments have this set to `True`. A shared environment #: must not be modified @@ -279,36 +279,40 @@ #: the class that is used for code generation. See #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class = CodeGenerator + code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - #: the context class thatis used for templates. See + concat = "".join + + #: the context class that is used for templates. See #: :class:`~jinja2.runtime.Context` for more information. - context_class = Context + context_class: t.Type[Context] = Context + + template_class: t.Type["Template"] def __init__( self, - block_start_string=BLOCK_START_STRING, - block_end_string=BLOCK_END_STRING, - variable_start_string=VARIABLE_START_STRING, - variable_end_string=VARIABLE_END_STRING, - comment_start_string=COMMENT_START_STRING, - comment_end_string=COMMENT_END_STRING, - line_statement_prefix=LINE_STATEMENT_PREFIX, - line_comment_prefix=LINE_COMMENT_PREFIX, - trim_blocks=TRIM_BLOCKS, - lstrip_blocks=LSTRIP_BLOCKS, - newline_sequence=NEWLINE_SEQUENCE, - keep_trailing_newline=KEEP_TRAILING_NEWLINE, - extensions=(), - optimized=True, - undefined=Undefined, - finalize=None, - autoescape=False, - loader=None, - cache_size=400, - auto_reload=True, - bytecode_cache=None, - enable_async=False, + block_start_string: str = BLOCK_START_STRING, + block_end_string: str = BLOCK_END_STRING, + variable_start_string: str = VARIABLE_START_STRING, + variable_end_string: str = VARIABLE_END_STRING, + comment_start_string: str = COMMENT_START_STRING, + comment_end_string: str = COMMENT_END_STRING, + line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, + line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, + trim_blocks: bool = TRIM_BLOCKS, + lstrip_blocks: bool = LSTRIP_BLOCKS, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, + keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), + optimized: bool = True, + undefined: t.Type[Undefined] = Undefined, + finalize: t.Optional[t.Callable[..., t.Any]] = None, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, + loader: t.Optional["BaseLoader"] = None, + cache_size: int = 400, + auto_reload: bool = True, + bytecode_cache: t.Optional["BytecodeCache"] = None, + enable_async: bool = False, ): # !!Important notice!! # The constructor accepts quite a few arguments that should be @@ -336,7 +340,7 @@ self.keep_trailing_newline = keep_trailing_newline # runtime information - self.undefined = undefined + self.undefined: t.Type[Undefined] = undefined self.optimized = optimized self.finalize = finalize self.autoescape = autoescape @@ -358,52 +362,50 @@ # load extensions self.extensions = load_extensions(self, extensions) - self.enable_async = enable_async - self.is_async = self.enable_async and have_async_gen - if self.is_async: - # runs patch_all() to enable async support - from . import asyncsupport # noqa: F401 + self.is_async = enable_async + _environment_config_check(self) - _environment_sanity_check(self) - - def add_extension(self, extension): + def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: """Adds an extension after the environment was created. .. versionadded:: 2.5 """ self.extensions.update(load_extensions(self, [extension])) - def extend(self, **attributes): + def extend(self, **attributes: t.Any) -> None: """Add the items to the instance of the environment if they do not exist yet. This is used by :ref:`extensions <writing-extensions>` to register callbacks and configuration values without breaking inheritance. """ - for key, value in iteritems(attributes): + for key, value in attributes.items(): if not hasattr(self, key): setattr(self, key, value) def overlay( self, - block_start_string=missing, - block_end_string=missing, - variable_start_string=missing, - variable_end_string=missing, - comment_start_string=missing, - comment_end_string=missing, - line_statement_prefix=missing, - line_comment_prefix=missing, - trim_blocks=missing, - lstrip_blocks=missing, - extensions=missing, - optimized=missing, - undefined=missing, - finalize=missing, - autoescape=missing, - loader=missing, - cache_size=missing, - auto_reload=missing, - bytecode_cache=missing, - ): + block_start_string: str = missing, + block_end_string: str = missing, + variable_start_string: str = missing, + variable_end_string: str = missing, + comment_start_string: str = missing, + comment_end_string: str = missing, + line_statement_prefix: t.Optional[str] = missing, + line_comment_prefix: t.Optional[str] = missing, + trim_blocks: bool = missing, + lstrip_blocks: bool = missing, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, + keep_trailing_newline: bool = missing, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, + optimized: bool = missing, + undefined: t.Type[Undefined] = missing, + finalize: t.Optional[t.Callable[..., t.Any]] = missing, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, + loader: t.Optional["BaseLoader"] = missing, + cache_size: int = missing, + auto_reload: bool = missing, + bytecode_cache: t.Optional["BytecodeCache"] = missing, + enable_async: bool = False, + ) -> "Environment": """Create a new overlay environment that shares all the data with the current environment except for cache and the overridden attributes. Extensions cannot be removed for an overlayed environment. An overlayed @@ -414,16 +416,20 @@ up completely. Not all attributes are truly linked, some are just copied over so modifications on the original environment may not shine through. + + .. versionchanged:: 3.1.2 + Added the ``newline_sequence``,, ``keep_trailing_newline``, + and ``enable_async`` parameters to match ``__init__``. """ args = dict(locals()) - del args["self"], args["cache_size"], args["extensions"] + del args["self"], args["cache_size"], args["extensions"], args["enable_async"] rv = object.__new__(self.__class__) rv.__dict__.update(self.__dict__) rv.overlayed = True rv.linked_to = self - for key, value in iteritems(args): + for key, value in args.items(): if value is not missing: setattr(rv, key, value) @@ -433,25 +439,33 @@ rv.cache = copy_cache(self.cache) rv.extensions = {} - for key, value in iteritems(self.extensions): + for key, value in self.extensions.items(): rv.extensions[key] = value.bind(rv) if extensions is not missing: rv.extensions.update(load_extensions(rv, extensions)) - return _environment_sanity_check(rv) + if enable_async is not missing: + rv.is_async = enable_async - lexer = property(get_lexer, doc="The lexer for this environment.") + return _environment_config_check(rv) - def iter_extensions(self): + @property + def lexer(self) -> Lexer: + """The lexer for this environment.""" + return get_lexer(self) + + def iter_extensions(self) -> t.Iterator["Extension"]: """Iterates over the extensions by priority.""" return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - def getitem(self, obj, argument): + def getitem( + self, obj: t.Any, argument: t.Union[str, t.Any] + ) -> t.Union[t.Any, Undefined]: """Get an item or attribute of an object but prefer the item.""" try: return obj[argument] except (AttributeError, TypeError, LookupError): - if isinstance(argument, string_types): + if isinstance(argument, str): try: attr = str(argument) except Exception: @@ -463,9 +477,9 @@ pass return self.undefined(obj=obj, name=argument) - def getattr(self, obj, attribute): + def getattr(self, obj: t.Any, attribute: str) -> t.Any: """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a bytestring. + Unlike :meth:`getitem` the attribute *must* be a string. """ try: return getattr(obj, attribute) @@ -476,51 +490,113 @@ except (TypeError, LookupError, AttributeError): return self.undefined(obj=obj, name=attribute) - def call_filter( - self, name, value, args=None, kwargs=None, context=None, eval_ctx=None - ): - """Invokes a filter on a value the same way the compiler does it. + def _filter_test_common( + self, + name: t.Union[str, Undefined], + value: t.Any, + args: t.Optional[t.Sequence[t.Any]], + kwargs: t.Optional[t.Mapping[str, t.Any]], + context: t.Optional[Context], + eval_ctx: t.Optional[EvalContext], + is_filter: bool, + ) -> t.Any: + if is_filter: + env_map = self.filters + type_name = "filter" + else: + env_map = self.tests + type_name = "test" - Note that on Python 3 this might return a coroutine in case the - filter is running from an environment in async mode and the filter - supports async execution. It's your responsibility to await this - if needed. + func = env_map.get(name) # type: ignore - .. versionadded:: 2.7 - """ - func = self.filters.get(name) if func is None: - fail_for_missing_callable("no filter named %r", name) - args = [value] + list(args or ()) - if getattr(func, "contextfilter", False) is True: + msg = f"No {type_name} named {name!r}." + + if isinstance(name, Undefined): + try: + name._fail_with_undefined_error() + except Exception as e: + msg = f"{msg} ({e}; did you forget to quote the callable name?)" + + raise TemplateRuntimeError(msg) + + args = [value, *(args if args is not None else ())] + kwargs = kwargs if kwargs is not None else {} + pass_arg = _PassArg.from_obj(func) + + if pass_arg is _PassArg.context: if context is None: raise TemplateRuntimeError( - "Attempted to invoke context filter without context" + f"Attempted to invoke a context {type_name} without context." ) + args.insert(0, context) - elif getattr(func, "evalcontextfilter", False) is True: + elif pass_arg is _PassArg.eval_context: if eval_ctx is None: if context is not None: eval_ctx = context.eval_ctx else: eval_ctx = EvalContext(self) - args.insert(0, eval_ctx) - elif getattr(func, "environmentfilter", False) is True: - args.insert(0, self) - return func(*args, **(kwargs or {})) - def call_test(self, name, value, args=None, kwargs=None): - """Invokes a test on a value the same way the compiler does it. + args.insert(0, eval_ctx) + elif pass_arg is _PassArg.environment: + args.insert(0, self) + + return func(*args, **kwargs) + + def call_filter( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a filter on a value the same way the compiler does. + + This might return a coroutine if the filter is running from an + environment in async mode and the filter supports async + execution. It's your responsibility to await this if needed. .. versionadded:: 2.7 """ - func = self.tests.get(name) - if func is None: - fail_for_missing_callable("no test named %r", name) - return func(value, *(args or ()), **(kwargs or {})) + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, True + ) + + def call_test( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a test on a value the same way the compiler does. + + This might return a coroutine if the test is running from an + environment in async mode and the test supports async execution. + It's your responsibility to await this if needed. + + .. versionchanged:: 3.0 + Tests support ``@pass_context``, etc. decorators. Added + the ``context`` and ``eval_ctx`` parameters. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, False + ) @internalcode - def parse(self, source, name=None, filename=None): + def parse( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> nodes.Template: """Parse the sourcecode and return the abstract syntax tree. This tree of nodes is used by the compiler to convert the template into executable source- or bytecode. This is useful for debugging or to @@ -534,11 +610,18 @@ except TemplateSyntaxError: self.handle_exception(source=source) - def _parse(self, source, name, filename): + def _parse( + self, source: str, name: t.Optional[str], filename: t.Optional[str] + ) -> nodes.Template: """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, encode_filename(filename)).parse() + return Parser(self, source, name, filename).parse() - def lex(self, source, name=None, filename=None): + def lex( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> t.Iterator[t.Tuple[int, str, str]]: """Lex the given sourcecode and return a generator that yields tokens as tuples in the form ``(lineno, token_type, value)``. This can be useful for :ref:`extension development <writing-extensions>` @@ -548,13 +631,18 @@ of the extensions to be applied you have to filter source through the :meth:`preprocess` method. """ - source = text_type(source) + source = str(source) try: return self.lexer.tokeniter(source, name, filename) except TemplateSyntaxError: self.handle_exception(source=source) - def preprocess(self, source, name=None, filename=None): + def preprocess( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> str: """Preprocesses the source with all extensions. This is automatically called for all parsing and compiling methods but *not* for :meth:`lex` because there you usually only want the actual source tokenized. @@ -562,28 +650,43 @@ return reduce( lambda s, e: e.preprocess(s, name, filename), self.iter_extensions(), - text_type(source), + str(source), ) - def _tokenize(self, source, name, filename=None, state=None): + def _tokenize( + self, + source: str, + name: t.Optional[str], + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> TokenStream: """Called by the parser to do the preprocessing and filtering for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. """ source = self.preprocess(source, name, filename) stream = self.lexer.tokenize(source, name, filename, state) + for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) + stream = ext.filter_stream(stream) # type: ignore + if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) + stream = TokenStream(stream, name, filename) # type: ignore + return stream - def _generate(self, source, name, filename, defer_init=False): + def _generate( + self, + source: nodes.Template, + name: t.Optional[str], + filename: t.Optional[str], + defer_init: bool = False, + ) -> str: """Internal hook that can be overridden to hook a different generate method in. .. versionadded:: 2.5 """ - return generate( + return generate( # type: ignore source, self, name, @@ -592,16 +695,45 @@ optimized=self.optimized, ) - def _compile(self, source, filename): + def _compile(self, source: str, filename: str) -> CodeType: """Internal hook that can be overridden to hook a different compile method in. .. versionadded:: 2.5 """ - return compile(source, filename, "exec") + return compile(source, filename, "exec") # type: ignore + + @typing.overload + def compile( # type: ignore + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[False]" = False, + defer_init: bool = False, + ) -> CodeType: + ... + + @typing.overload + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[True]" = ..., + defer_init: bool = False, + ) -> str: + ... @internalcode - def compile(self, source, name=None, filename=None, raw=False, defer_init=False): + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: bool = False, + defer_init: bool = False, + ) -> t.Union[str, CodeType]: """Compile a node or template source code. The `name` parameter is the load name of the template after it was joined using :meth:`join_path` if necessary, not the filename on the file system. @@ -623,7 +755,7 @@ """ source_hint = None try: - if isinstance(source, string_types): + if isinstance(source, str): source_hint = source source = self._parse(source, name, filename) source = self._generate(source, name, filename, defer_init=defer_init) @@ -631,13 +763,13 @@ return source if filename is None: filename = "<template>" - else: - filename = encode_filename(filename) return self._compile(source, filename) except TemplateSyntaxError: self.handle_exception(source=source_hint) - def compile_expression(self, source, undefined_to_none=True): + def compile_expression( + self, source: str, undefined_to_none: bool = True + ) -> "TemplateExpression": """A handy helper method that returns a callable that accepts keyword arguments that appear as variables in the expression. If called it returns the result of the expression. @@ -674,8 +806,7 @@ ) expr.set_environment(self) except TemplateSyntaxError: - if sys.exc_info() is not None: - self.handle_exception(source=source) + self.handle_exception(source=source) body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)] template = self.from_string(nodes.Template(body, lineno=1)) @@ -683,14 +814,13 @@ def compile_templates( self, - target, - extensions=None, - filter_func=None, - zip="deflated", - log_function=None, - ignore_errors=True, - py_compile=False, - ): + target: t.Union[str, os.PathLike], + extensions: t.Optional[t.Collection[str]] = None, + filter_func: t.Optional[t.Callable[[str], bool]] = None, + zip: t.Optional[str] = "deflated", + log_function: t.Optional[t.Callable[[str], None]] = None, + ignore_errors: bool = True, + ) -> None: """Finds all the templates the loader can find, compiles them and stores them in `target`. If `zip` is `None`, instead of in a zipfile, the templates will be stored in a directory. @@ -706,52 +836,26 @@ syntax errors to abort the compilation you can set `ignore_errors` to `False` and you will get an exception on syntax errors. - If `py_compile` is set to `True` .pyc files will be written to the - target instead of standard .py files. This flag does not do anything - on pypy and Python 3 where pyc files are not picked up by itself and - don't give much benefit. - .. versionadded:: 2.4 """ from .loaders import ModuleLoader if log_function is None: - def log_function(x): + def log_function(x: str) -> None: pass - if py_compile: - if not PY2 or PYPY: - import warnings + assert log_function is not None + assert self.loader is not None, "No loader configured." - warnings.warn( - "'py_compile=True' has no effect on PyPy or Python" - " 3 and will be removed in version 3.0", - DeprecationWarning, - stacklevel=2, - ) - py_compile = False - else: - import imp - import marshal - - py_header = imp.get_magic() + u"\xff\xff\xff\xff".encode("iso-8859-15") - - # Python 3.3 added a source filesize to the header - if sys.version_info >= (3, 3): - py_header += u"\x00\x00\x00\x00".encode("iso-8859-15") - - def write_file(filename, data): + def write_file(filename: str, data: str) -> None: if zip: info = ZipInfo(filename) info.external_attr = 0o755 << 16 zip_file.writestr(info, data) else: - if isinstance(data, text_type): - data = data.encode("utf8") - with open(os.path.join(target, filename), "wb") as f: - f.write(data) + f.write(data.encode("utf8")) if zip is not None: from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED @@ -759,11 +863,11 @@ zip_file = ZipFile( target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip] ) - log_function('Compiling into Zip archive "%s"' % target) + log_function(f"Compiling into Zip archive {target!r}") else: if not os.path.isdir(target): os.makedirs(target) - log_function('Compiling into folder "%s"' % target) + log_function(f"Compiling into folder {target!r}") try: for name in self.list_templates(extensions, filter_func): @@ -773,25 +877,24 @@ except TemplateSyntaxError as e: if not ignore_errors: raise - log_function('Could not compile "%s": %s' % (name, e)) + log_function(f'Could not compile "{name}": {e}') continue filename = ModuleLoader.get_module_filename(name) - if py_compile: - c = self._compile(code, encode_filename(filename)) - write_file(filename + "c", py_header + marshal.dumps(c)) - log_function('Byte-compiled "%s" as %s' % (name, filename + "c")) - else: - write_file(filename, code) - log_function('Compiled "%s" as %s' % (name, filename)) + write_file(filename, code) + log_function(f'Compiled "{name}" as {filename}') finally: if zip: zip_file.close() log_function("Finished compiling templates") - def list_templates(self, extensions=None, filter_func=None): + def list_templates( + self, + extensions: t.Optional[t.Collection[str]] = None, + filter_func: t.Optional[t.Callable[[str], bool]] = None, + ) -> t.List[str]: """Returns a list of templates for this environment. This requires that the loader supports the loader's :meth:`~BaseLoader.list_templates` method. @@ -807,6 +910,7 @@ .. versionadded:: 2.4 """ + assert self.loader is not None, "No loader configured." names = self.loader.list_templates() if extensions is not None: @@ -815,23 +919,23 @@ "either extensions or filter_func can be passed, but not both" ) - def filter_func(x): - return "." in x and x.rsplit(".", 1)[1] in extensions + def filter_func(x: str) -> bool: + return "." in x and x.rsplit(".", 1)[1] in extensions # type: ignore if filter_func is not None: names = [name for name in names if filter_func(name)] return names - def handle_exception(self, source=None): + def handle_exception(self, source: t.Optional[str] = None) -> "te.NoReturn": """Exception handling helper. This is used internally to either raise rewritten exceptions or return a rendered traceback for the template. """ from .debug import rewrite_traceback_stack - reraise(*rewrite_traceback_stack(source=source)) + raise rewrite_traceback_stack(source=source) - def join_path(self, template, parent): + def join_path(self, template: str, parent: str) -> str: """Join a template with the parent. By default all the lookups are relative to the loader root so this method returns the `template` parameter unchanged, but if the paths should be relative to the @@ -844,7 +948,9 @@ return template @internalcode - def _load_template(self, name, globals): + def _load_template( + self, name: str, globals: t.Optional[t.MutableMapping[str, t.Any]] + ) -> "Template": if self.loader is None: raise TypeError("no loader for this environment specified") cache_key = (weakref.ref(self.loader), name) @@ -853,49 +959,88 @@ if template is not None and ( not self.auto_reload or template.is_up_to_date ): + # template.globals is a ChainMap, modifying it will only + # affect the template, not the environment globals. + if globals: + template.globals.update(globals) + return template - template = self.loader.load(self, name, globals) + + template = self.loader.load(self, name, self.make_globals(globals)) + if self.cache is not None: self.cache[cache_key] = template return template @internalcode - def get_template(self, name, parent=None, globals=None): - """Load a template from the loader. If a loader is configured this - method asks the loader for the template and returns a :class:`Template`. - If the `parent` parameter is not `None`, :meth:`join_path` is called - to get the real template name before loading. + def get_template( + self, + name: t.Union[str, "Template"], + parent: t.Optional[str] = None, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + """Load a template by name with :attr:`loader` and return a + :class:`Template`. If the template does not exist a + :exc:`TemplateNotFound` exception is raised. - The `globals` parameter can be used to provide template wide globals. - These variables are available in the context at render time. + :param name: Name of the template to load. When loading + templates from the filesystem, "/" is used as the path + separator, even on Windows. + :param parent: The name of the parent template importing this + template. :meth:`join_path` can be used to implement name + transformations with this. + :param globals: Extend the environment :attr:`globals` with + these extra variables available for all renders of this + template. If the template has already been loaded and + cached, its globals are updated with any new items. - If the template does not exist a :exc:`TemplateNotFound` exception is - raised. + .. versionchanged:: 3.0 + If a template is loaded from cache, ``globals`` will update + the template's globals instead of ignoring the new values. .. versionchanged:: 2.4 - If `name` is a :class:`Template` object it is returned from the - function unchanged. + If ``name`` is a :class:`Template` object it is returned + unchanged. """ if isinstance(name, Template): return name if parent is not None: name = self.join_path(name, parent) - return self._load_template(name, self.make_globals(globals)) + + return self._load_template(name, globals) @internalcode - def select_template(self, names, parent=None, globals=None): - """Works like :meth:`get_template` but tries a number of templates - before it fails. If it cannot find any of the templates, it will - raise a :exc:`TemplatesNotFound` exception. + def select_template( + self, + names: t.Iterable[t.Union[str, "Template"]], + parent: t.Optional[str] = None, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + """Like :meth:`get_template`, but tries loading multiple names. + If none of the names can be loaded a :exc:`TemplatesNotFound` + exception is raised. + + :param names: List of template names to try loading in order. + :param parent: The name of the parent template importing this + template. :meth:`join_path` can be used to implement name + transformations with this. + :param globals: Extend the environment :attr:`globals` with + these extra variables available for all renders of this + template. If the template has already been loaded and + cached, its globals are updated with any new items. + + .. versionchanged:: 3.0 + If a template is loaded from cache, ``globals`` will update + the template's globals instead of ignoring the new values. .. versionchanged:: 2.11 - If names is :class:`Undefined`, an :exc:`UndefinedError` is - raised instead. If no templates were found and names + If ``names`` is :class:`Undefined`, an :exc:`UndefinedError` + is raised instead. If no templates were found and ``names`` contains :class:`Undefined`, the message is more helpful. .. versionchanged:: 2.4 - If `names` contains a :class:`Template` object it is returned - from the function unchanged. + If ``names`` contains a :class:`Template` object it is + returned unchanged. .. versionadded:: 2.3 """ @@ -904,9 +1049,9 @@ if not names: raise TemplatesNotFound( - message=u"Tried to select from an empty list " u"of templates." + message="Tried to select from an empty list of templates." ) - globals = self.make_globals(globals) + for name in names: if isinstance(name, Template): return name @@ -916,95 +1061,127 @@ return self._load_template(name, globals) except (TemplateNotFound, UndefinedError): pass - raise TemplatesNotFound(names) + raise TemplatesNotFound(names) # type: ignore @internalcode - def get_or_select_template(self, template_name_or_list, parent=None, globals=None): - """Does a typecheck and dispatches to :meth:`select_template` - if an iterable of template names is given, otherwise to - :meth:`get_template`. + def get_or_select_template( + self, + template_name_or_list: t.Union[ + str, "Template", t.List[t.Union[str, "Template"]] + ], + parent: t.Optional[str] = None, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + """Use :meth:`select_template` if an iterable of template names + is given, or :meth:`get_template` if one name is given. .. versionadded:: 2.3 """ - if isinstance(template_name_or_list, (string_types, Undefined)): + if isinstance(template_name_or_list, (str, Undefined)): return self.get_template(template_name_or_list, parent, globals) elif isinstance(template_name_or_list, Template): return template_name_or_list return self.select_template(template_name_or_list, parent, globals) - def from_string(self, source, globals=None, template_class=None): - """Load a template from a string. This parses the source given and - returns a :class:`Template` object. + def from_string( + self, + source: t.Union[str, nodes.Template], + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + template_class: t.Optional[t.Type["Template"]] = None, + ) -> "Template": + """Load a template from a source string without using + :attr:`loader`. + + :param source: Jinja source to compile into a template. + :param globals: Extend the environment :attr:`globals` with + these extra variables available for all renders of this + template. If the template has already been loaded and + cached, its globals are updated with any new items. + :param template_class: Return an instance of this + :class:`Template` class. """ - globals = self.make_globals(globals) + gs = self.make_globals(globals) cls = template_class or self.template_class - return cls.from_code(self, self.compile(source), globals, None) + return cls.from_code(self, self.compile(source), gs, None) - def make_globals(self, d): - """Return a dict for the globals.""" - if not d: - return self.globals - return dict(self.globals, **d) + def make_globals( + self, d: t.Optional[t.MutableMapping[str, t.Any]] + ) -> t.MutableMapping[str, t.Any]: + """Make the globals map for a template. Any given template + globals overlay the environment :attr:`globals`. + + Returns a :class:`collections.ChainMap`. This allows any changes + to a template's globals to only affect that template, while + changes to the environment's globals are still reflected. + However, avoid modifying any globals after a template is loaded. + + :param d: Dict of template-specific globals. + + .. versionchanged:: 3.0 + Use :class:`collections.ChainMap` to always prevent mutating + environment globals. + """ + if d is None: + d = {} + + return ChainMap(d, self.globals) -class Template(object): - """The central template object. This class represents a compiled template - and is used to evaluate it. +class Template: + """A compiled template that can be rendered. - Normally the template object is generated from an :class:`Environment` but - it also has a constructor that makes it possible to create a template - instance directly using the constructor. It takes the same arguments as - the environment constructor but it's not possible to specify a loader. + Use the methods on :class:`Environment` to create or load templates. + The environment is used to configure how templates are compiled and + behave. - Every template object has a few methods and members that are guaranteed - to exist. However it's important that a template object should be - considered immutable. Modifications on the object are not supported. + It is also possible to create a template object directly. This is + not usually recommended. The constructor takes most of the same + arguments as :class:`Environment`. All templates created with the + same environment arguments share the same ephemeral ``Environment`` + instance behind the scenes. - Template objects created from the constructor rather than an environment - do have an `environment` attribute that points to a temporary environment - that is probably shared with other templates created with the constructor - and compatible settings. - - >>> template = Template('Hello {{ name }}!') - >>> template.render(name='John Doe') == u'Hello John Doe!' - True - >>> stream = template.stream(name='John Doe') - >>> next(stream) == u'Hello John Doe!' - True - >>> next(stream) - Traceback (most recent call last): - ... - StopIteration + A template object should be considered immutable. Modifications on + the object are not supported. """ #: Type of environment to create when creating a template directly #: rather than through an existing environment. - environment_class = Environment + environment_class: t.Type[Environment] = Environment + + environment: Environment + globals: t.MutableMapping[str, t.Any] + name: t.Optional[str] + filename: t.Optional[str] + blocks: t.Dict[str, t.Callable[[Context], t.Iterator[str]]] + root_render_func: t.Callable[[Context], t.Iterator[str]] + _module: t.Optional["TemplateModule"] + _debug_info: str + _uptodate: t.Optional[t.Callable[[], bool]] def __new__( cls, - source, - block_start_string=BLOCK_START_STRING, - block_end_string=BLOCK_END_STRING, - variable_start_string=VARIABLE_START_STRING, - variable_end_string=VARIABLE_END_STRING, - comment_start_string=COMMENT_START_STRING, - comment_end_string=COMMENT_END_STRING, - line_statement_prefix=LINE_STATEMENT_PREFIX, - line_comment_prefix=LINE_COMMENT_PREFIX, - trim_blocks=TRIM_BLOCKS, - lstrip_blocks=LSTRIP_BLOCKS, - newline_sequence=NEWLINE_SEQUENCE, - keep_trailing_newline=KEEP_TRAILING_NEWLINE, - extensions=(), - optimized=True, - undefined=Undefined, - finalize=None, - autoescape=False, - enable_async=False, - ): + source: t.Union[str, nodes.Template], + block_start_string: str = BLOCK_START_STRING, + block_end_string: str = BLOCK_END_STRING, + variable_start_string: str = VARIABLE_START_STRING, + variable_end_string: str = VARIABLE_END_STRING, + comment_start_string: str = COMMENT_START_STRING, + comment_end_string: str = COMMENT_END_STRING, + line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, + line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, + trim_blocks: bool = TRIM_BLOCKS, + lstrip_blocks: bool = LSTRIP_BLOCKS, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, + keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), + optimized: bool = True, + undefined: t.Type[Undefined] = Undefined, + finalize: t.Optional[t.Callable[..., t.Any]] = None, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, + enable_async: bool = False, + ) -> t.Any: # it returns a `Template`, but this breaks the sphinx build... env = get_spontaneous_environment( - cls.environment_class, + cls.environment_class, # type: ignore block_start_string, block_end_string, variable_start_string, @@ -1019,7 +1196,7 @@ keep_trailing_newline, frozenset(extensions), optimized, - undefined, + undefined, # type: ignore finalize, autoescape, None, @@ -1031,7 +1208,13 @@ return env.from_string(source, template_class=cls) @classmethod - def from_code(cls, environment, code, globals, uptodate=None): + def from_code( + cls, + environment: Environment, + code: CodeType, + globals: t.MutableMapping[str, t.Any], + uptodate: t.Optional[t.Callable[[], bool]] = None, + ) -> "Template": """Creates a template object from compiled code and the globals. This is used by the loaders and environment to create a template object. """ @@ -1042,7 +1225,12 @@ return rv @classmethod - def from_module_dict(cls, environment, module_dict, globals): + def from_module_dict( + cls, + environment: Environment, + module_dict: t.MutableMapping[str, t.Any], + globals: t.MutableMapping[str, t.Any], + ) -> "Template": """Creates a template object from a module. This is used by the module loader to create a template object. @@ -1051,8 +1239,13 @@ return cls._from_namespace(environment, module_dict, globals) @classmethod - def _from_namespace(cls, environment, namespace, globals): - t = object.__new__(cls) + def _from_namespace( + cls, + environment: Environment, + namespace: t.MutableMapping[str, t.Any], + globals: t.MutableMapping[str, t.Any], + ) -> "Template": + t: "Template" = object.__new__(cls) t.environment = environment t.globals = globals t.name = namespace["name"] @@ -1060,7 +1253,7 @@ t.blocks = namespace["blocks"] # render function and module - t.root_render_func = namespace["root"] + t.root_render_func = namespace["root"] # type: ignore t._module = None # debug and loader helpers @@ -1073,7 +1266,7 @@ return t - def render(self, *args, **kwargs): + def render(self, *args: t.Any, **kwargs: t.Any) -> str: """This method accepts the same arguments as the `dict` constructor: A dict, a dict subclass or some keyword arguments. If no arguments are given the context will be empty. These two calls do the same:: @@ -1081,15 +1274,33 @@ template.render(knights='that say nih') template.render({'knights': 'that say nih'}) - This will return the rendered template as unicode string. + This will return the rendered template as a string. """ - vars = dict(*args, **kwargs) + if self.environment.is_async: + import asyncio + + close = False + + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = asyncio.new_event_loop() + close = True + + try: + return loop.run_until_complete(self.render_async(*args, **kwargs)) + finally: + if close: + loop.close() + + ctx = self.new_context(dict(*args, **kwargs)) + try: - return concat(self.root_render_func(self.new_context(vars))) + return self.environment.concat(self.root_render_func(ctx)) # type: ignore except Exception: self.environment.handle_exception() - def render_async(self, *args, **kwargs): + async def render_async(self, *args: t.Any, **kwargs: t.Any) -> str: """This works similar to :meth:`render` but returns a coroutine that when awaited returns the entire rendered template string. This requires the async feature to be enabled. @@ -1098,42 +1309,75 @@ await template.render_async(knights='that say nih; asynchronously') """ - # see asyncsupport for the actual implementation - raise NotImplementedError( - "This feature is not available for this version of Python" - ) + if not self.environment.is_async: + raise RuntimeError( + "The environment was not created with async mode enabled." + ) - def stream(self, *args, **kwargs): + ctx = self.new_context(dict(*args, **kwargs)) + + try: + return self.environment.concat( # type: ignore + [n async for n in self.root_render_func(ctx)] # type: ignore + ) + except Exception: + return self.environment.handle_exception() + + def stream(self, *args: t.Any, **kwargs: t.Any) -> "TemplateStream": """Works exactly like :meth:`generate` but returns a :class:`TemplateStream`. """ return TemplateStream(self.generate(*args, **kwargs)) - def generate(self, *args, **kwargs): + def generate(self, *args: t.Any, **kwargs: t.Any) -> t.Iterator[str]: """For very large templates it can be useful to not render the whole template at once but evaluate each statement after another and yield piece for piece. This method basically does exactly that and returns - a generator that yields one item after another as unicode strings. + a generator that yields one item after another as strings. It accepts the same arguments as :meth:`render`. """ - vars = dict(*args, **kwargs) + if self.environment.is_async: + import asyncio + + async def to_list() -> t.List[str]: + return [x async for x in self.generate_async(*args, **kwargs)] + + yield from asyncio.run(to_list()) + return + + ctx = self.new_context(dict(*args, **kwargs)) + try: - for event in self.root_render_func(self.new_context(vars)): + yield from self.root_render_func(ctx) # type: ignore + except Exception: + yield self.environment.handle_exception() + + async def generate_async( + self, *args: t.Any, **kwargs: t.Any + ) -> t.AsyncIterator[str]: + """An async version of :meth:`generate`. Works very similarly but + returns an async iterator instead. + """ + if not self.environment.is_async: + raise RuntimeError( + "The environment was not created with async mode enabled." + ) + + ctx = self.new_context(dict(*args, **kwargs)) + + try: + async for event in self.root_render_func(ctx): # type: ignore yield event except Exception: yield self.environment.handle_exception() - def generate_async(self, *args, **kwargs): - """An async version of :meth:`generate`. Works very similarly but - returns an async iterator instead. - """ - # see asyncsupport for the actual implementation - raise NotImplementedError( - "This feature is not available for this version of Python" - ) - - def new_context(self, vars=None, shared=False, locals=None): + def new_context( + self, + vars: t.Optional[t.Dict[str, t.Any]] = None, + shared: bool = False, + locals: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> Context: """Create a new :class:`Context` for this template. The vars provided will be passed to the template. Per default the globals are added to the context. If shared is set to `True` the data @@ -1145,35 +1389,80 @@ self.environment, self.name, self.blocks, vars, shared, self.globals, locals ) - def make_module(self, vars=None, shared=False, locals=None): + def make_module( + self, + vars: t.Optional[t.Dict[str, t.Any]] = None, + shared: bool = False, + locals: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> "TemplateModule": """This method works like the :attr:`module` attribute when called without arguments but it will evaluate the template on every call rather than caching it. It's also possible to provide a dict which is then used as context. The arguments are the same as for the :meth:`new_context` method. """ - return TemplateModule(self, self.new_context(vars, shared, locals)) + ctx = self.new_context(vars, shared, locals) + return TemplateModule(self, ctx) - def make_module_async(self, vars=None, shared=False, locals=None): + async def make_module_async( + self, + vars: t.Optional[t.Dict[str, t.Any]] = None, + shared: bool = False, + locals: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> "TemplateModule": """As template module creation can invoke template code for asynchronous executions this method must be used instead of the normal :meth:`make_module` one. Likewise the module attribute becomes unavailable in async mode. """ - # see asyncsupport for the actual implementation - raise NotImplementedError( - "This feature is not available for this version of Python" + ctx = self.new_context(vars, shared, locals) + return TemplateModule( + self, ctx, [x async for x in self.root_render_func(ctx)] # type: ignore ) @internalcode - def _get_default_module(self): - if self._module is not None: - return self._module - self._module = rv = self.make_module() - return rv + def _get_default_module(self, ctx: t.Optional[Context] = None) -> "TemplateModule": + """If a context is passed in, this means that the template was + imported. Imported templates have access to the current + template's globals by default, but they can only be accessed via + the context during runtime. + + If there are new globals, we need to create a new module because + the cached module is already rendered and will not have access + to globals from the current context. This new module is not + cached because the template can be imported elsewhere, and it + should have access to only the current template's globals. + """ + if self.environment.is_async: + raise RuntimeError("Module is not available in async mode.") + + if ctx is not None: + keys = ctx.globals_keys - self.globals.keys() + + if keys: + return self.make_module({k: ctx.parent[k] for k in keys}) + + if self._module is None: + self._module = self.make_module() + + return self._module + + async def _get_default_module_async( + self, ctx: t.Optional[Context] = None + ) -> "TemplateModule": + if ctx is not None: + keys = ctx.globals_keys - self.globals.keys() + + if keys: + return await self.make_module_async({k: ctx.parent[k] for k in keys}) + + if self._module is None: + self._module = await self.make_module_async() + + return self._module @property - def module(self): + def module(self) -> "TemplateModule": """The template as module. This is used for imports in the template runtime but is also useful if one wants to access exported template variables from the Python layer: @@ -1188,7 +1477,7 @@ """ return self._get_default_module() - def get_corresponding_lineno(self, lineno): + def get_corresponding_lineno(self, lineno: int) -> int: """Return the source line number of a line number in the generated bytecode as they are not in sync. """ @@ -1198,100 +1487,113 @@ return 1 @property - def is_up_to_date(self): + def is_up_to_date(self) -> bool: """If this variable is `False` there is a newer version available.""" if self._uptodate is None: return True return self._uptodate() @property - def debug_info(self): + def debug_info(self) -> t.List[t.Tuple[int, int]]: """The debug info mapping.""" if self._debug_info: - return [tuple(map(int, x.split("="))) for x in self._debug_info.split("&")] + return [ + tuple(map(int, x.split("="))) # type: ignore + for x in self._debug_info.split("&") + ] + return [] - def __repr__(self): + def __repr__(self) -> str: if self.name is None: - name = "memory:%x" % id(self) + name = f"memory:{id(self):x}" else: name = repr(self.name) - return "<%s %s>" % (self.__class__.__name__, name) + return f"<{type(self).__name__} {name}>" -@implements_to_string -class TemplateModule(object): +class TemplateModule: """Represents an imported template. All the exported names of the template are available as attributes on this object. Additionally - converting it into an unicode- or bytestrings renders the contents. + converting it into a string renders the contents. """ - def __init__(self, template, context, body_stream=None): + def __init__( + self, + template: Template, + context: Context, + body_stream: t.Optional[t.Iterable[str]] = None, + ) -> None: if body_stream is None: if context.environment.is_async: raise RuntimeError( - "Async mode requires a body stream " - "to be passed to a template module. Use " - "the async methods of the API you are " - "using." + "Async mode requires a body stream to be passed to" + " a template module. Use the async methods of the" + " API you are using." ) - body_stream = list(template.root_render_func(context)) + + body_stream = list(template.root_render_func(context)) # type: ignore + self._body_stream = body_stream self.__dict__.update(context.get_exported()) self.__name__ = template.name - def __html__(self): + def __html__(self) -> Markup: return Markup(concat(self._body_stream)) - def __str__(self): + def __str__(self) -> str: return concat(self._body_stream) - def __repr__(self): + def __repr__(self) -> str: if self.__name__ is None: - name = "memory:%x" % id(self) + name = f"memory:{id(self):x}" else: name = repr(self.__name__) - return "<%s %s>" % (self.__class__.__name__, name) + return f"<{type(self).__name__} {name}>" -class TemplateExpression(object): +class TemplateExpression: """The :meth:`jinja2.Environment.compile_expression` method returns an instance of this object. It encapsulates the expression-like access to the template with an expression it wraps. """ - def __init__(self, template, undefined_to_none): + def __init__(self, template: Template, undefined_to_none: bool) -> None: self._template = template self._undefined_to_none = undefined_to_none - def __call__(self, *args, **kwargs): + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Optional[t.Any]: context = self._template.new_context(dict(*args, **kwargs)) - consume(self._template.root_render_func(context)) + consume(self._template.root_render_func(context)) # type: ignore rv = context.vars["result"] if self._undefined_to_none and isinstance(rv, Undefined): rv = None return rv -@implements_iterator -class TemplateStream(object): +class TemplateStream: """A template stream works pretty much like an ordinary python generator but it can buffer multiple items to reduce the number of total iterations. Per default the output is unbuffered which means that for every unbuffered - instruction in the template one unicode string is yielded. + instruction in the template one string is yielded. If buffering is enabled with a buffer size of 5, five items are combined - into a new unicode string. This is mainly useful if you are streaming + into a new string. This is mainly useful if you are streaming big templates to a client via WSGI which flushes after each iteration. """ - def __init__(self, gen): + def __init__(self, gen: t.Iterator[str]) -> None: self._gen = gen self.disable_buffering() - def dump(self, fp, encoding=None, errors="strict"): + def dump( + self, + fp: t.Union[str, t.IO], + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + ) -> None: """Dump the complete stream into a file or file-like object. - Per default unicode strings are written, if you want to encode + Per default strings are written, if you want to encode before writing specify an `encoding`. Example usage:: @@ -1299,16 +1601,19 @@ Template('Hello {{ name }}!').stream(name='foo').dump('hello.html') """ close = False - if isinstance(fp, string_types): + + if isinstance(fp, str): if encoding is None: encoding = "utf-8" + fp = open(fp, "wb") close = True try: if encoding is not None: - iterable = (x.encode(encoding, errors) for x in self) + iterable = (x.encode(encoding, errors) for x in self) # type: ignore else: - iterable = self + iterable = self # type: ignore + if hasattr(fp, "writelines"): fp.writelines(iterable) else: @@ -1318,17 +1623,17 @@ if close: fp.close() - def disable_buffering(self): + def disable_buffering(self) -> None: """Disable the output buffering.""" self._next = partial(next, self._gen) self.buffered = False - def _buffered_generator(self, size): - buf = [] + def _buffered_generator(self, size: int) -> t.Iterator[str]: + buf: t.List[str] = [] c_size = 0 push = buf.append - while 1: + while True: try: while c_size < size: c = next(self._gen) @@ -1342,7 +1647,7 @@ del buf[:] c_size = 0 - def enable_buffering(self, size=5): + def enable_buffering(self, size: int = 5) -> None: """Enable buffering. Buffer `size` items before yielding them.""" if size <= 1: raise ValueError("buffer size too small") @@ -1350,11 +1655,11 @@ self.buffered = True self._next = partial(next, self._buffered_generator(size)) - def __iter__(self): + def __iter__(self) -> "TemplateStream": return self - def __next__(self): - return self._next() + def __next__(self) -> str: + return self._next() # type: ignore # hook in default template class. if anyone reads this comment: ignore that
diff --git a/third_party/jinja2/exceptions.py b/third_party/jinja2/exceptions.py index 0bf2003e3..082ebe8f 100644 --- a/third_party/jinja2/exceptions.py +++ b/third_party/jinja2/exceptions.py
@@ -1,44 +1,20 @@ -# -*- coding: utf-8 -*- -from ._compat import imap -from ._compat import implements_to_string -from ._compat import PY2 -from ._compat import text_type +import typing as t + +if t.TYPE_CHECKING: + from .runtime import Undefined class TemplateError(Exception): """Baseclass for all template errors.""" - if PY2: + def __init__(self, message: t.Optional[str] = None) -> None: + super().__init__(message) - def __init__(self, message=None): - if message is not None: - message = text_type(message).encode("utf-8") - Exception.__init__(self, message) - - @property - def message(self): - if self.args: - message = self.args[0] - if message is not None: - return message.decode("utf-8", "replace") - - def __unicode__(self): - return self.message or u"" - - else: - - def __init__(self, message=None): - Exception.__init__(self, message) - - @property - def message(self): - if self.args: - message = self.args[0] - if message is not None: - return message + @property + def message(self) -> t.Optional[str]: + return self.args[0] if self.args else None -@implements_to_string class TemplateNotFound(IOError, LookupError, TemplateError): """Raised if a template does not exist. @@ -47,11 +23,15 @@ provided, an :exc:`UndefinedError` is raised. """ - # looks weird, but removes the warning descriptor that just - # bogusly warns us about message being deprecated - message = None + # Silence the Python warning about message being deprecated since + # it's not valid here. + message: t.Optional[str] = None - def __init__(self, name, message=None): + def __init__( + self, + name: t.Optional[t.Union[str, "Undefined"]], + message: t.Optional[str] = None, + ) -> None: IOError.__init__(self, name) if message is None: @@ -66,8 +46,8 @@ self.name = name self.templates = [name] - def __str__(self): - return self.message + def __str__(self) -> str: + return str(self.message) class TemplatesNotFound(TemplateNotFound): @@ -82,7 +62,11 @@ .. versionadded:: 2.2 """ - def __init__(self, names=(), message=None): + def __init__( + self, + names: t.Sequence[t.Union[str, "Undefined"]] = (), + message: t.Optional[str] = None, + ) -> None: if message is None: from .runtime import Undefined @@ -94,52 +78,57 @@ else: parts.append(name) - message = u"none of the templates given were found: " + u", ".join( - imap(text_type, parts) - ) - TemplateNotFound.__init__(self, names and names[-1] or None, message) + parts_str = ", ".join(map(str, parts)) + message = f"none of the templates given were found: {parts_str}" + + super().__init__(names[-1] if names else None, message) self.templates = list(names) -@implements_to_string class TemplateSyntaxError(TemplateError): """Raised to tell the user that there is a problem with the template.""" - def __init__(self, message, lineno, name=None, filename=None): - TemplateError.__init__(self, message) + def __init__( + self, + message: str, + lineno: int, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> None: + super().__init__(message) self.lineno = lineno self.name = name self.filename = filename - self.source = None + self.source: t.Optional[str] = None # this is set to True if the debug.translate_syntax_error # function translated the syntax error into a new traceback self.translated = False - def __str__(self): + def __str__(self) -> str: # for translated errors we only return the message if self.translated: - return self.message + return t.cast(str, self.message) # otherwise attach some stuff - location = "line %d" % self.lineno + location = f"line {self.lineno}" name = self.filename or self.name if name: - location = 'File "%s", %s' % (name, location) - lines = [self.message, " " + location] + location = f'File "{name}", {location}' + lines = [t.cast(str, self.message), " " + location] # if the source is set, add the line to the output if self.source is not None: try: line = self.source.splitlines()[self.lineno - 1] except IndexError: - line = None - if line: + pass + else: lines.append(" " + line.strip()) - return u"\n".join(lines) + return "\n".join(lines) - def __reduce__(self): + def __reduce__(self): # type: ignore # https://bugs.python.org/issue1692335 Exceptions that take # multiple required arguments have problems with pickling. # Without this, raises TypeError: __init__() missing 1 required
diff --git a/third_party/jinja2/ext.py b/third_party/jinja2/ext.py index 9141be4d..d555054 100644 --- a/third_party/jinja2/ext.py +++ b/third_party/jinja2/ext.py
@@ -1,53 +1,57 @@ -# -*- coding: utf-8 -*- """Extension API for adding custom tags and behavior.""" import pprint import re -from sys import version_info +import typing as t from markupsafe import Markup +from . import defaults from . import nodes -from ._compat import iteritems -from ._compat import string_types -from ._compat import with_metaclass -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING from .environment import Environment from .exceptions import TemplateAssertionError from .exceptions import TemplateSyntaxError -from .nodes import ContextReference -from .runtime import concat -from .utils import contextfunction +from .runtime import concat # type: ignore +from .runtime import Context +from .runtime import Undefined from .utils import import_string +from .utils import pass_context -# the only real useful gettext functions for a Jinja template. Note -# that ugettext must be assigned to gettext as Jinja doesn't support -# non unicode strings. -GETTEXT_FUNCTIONS = ("_", "gettext", "ngettext") +if t.TYPE_CHECKING: + import typing_extensions as te + from .lexer import Token + from .lexer import TokenStream + from .parser import Parser + class _TranslationsBasic(te.Protocol): + def gettext(self, message: str) -> str: + ... + + def ngettext(self, singular: str, plural: str, n: int) -> str: + pass + + class _TranslationsContext(_TranslationsBasic): + def pgettext(self, context: str, message: str) -> str: + ... + + def npgettext(self, context: str, singular: str, plural: str, n: int) -> str: + ... + + _SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext] + + +# I18N functions available in Jinja templates. If the I18N library +# provides ugettext, it will be assigned to gettext. +GETTEXT_FUNCTIONS: t.Tuple[str, ...] = ( + "_", + "gettext", + "ngettext", + "pgettext", + "npgettext", +) _ws_re = re.compile(r"\s*\n\s*") -class ExtensionRegistry(type): - """Gives the extension an unique identifier.""" - - def __new__(mcs, name, bases, d): - rv = type.__new__(mcs, name, bases, d) - rv.identifier = rv.__module__ + "." + rv.__name__ - return rv - - -class Extension(with_metaclass(ExtensionRegistry, object)): +class Extension: """Extensions can be used to add extra functionality to the Jinja template system at the parser level. Custom extensions are bound to an environment but may not store environment specific data on `self`. The reason for @@ -66,8 +70,13 @@ name as includes the name of the extension (fragment cache). """ + identifier: t.ClassVar[str] + + def __init_subclass__(cls) -> None: + cls.identifier = f"{cls.__module__}.{cls.__name__}" + #: if this extension parses this is the list of tags it's listening to. - tags = set() + tags: t.Set[str] = set() #: the priority of that extension. This is especially useful for #: extensions that preprocess values. A lower value means higher @@ -76,24 +85,28 @@ #: .. versionadded:: 2.4 priority = 100 - def __init__(self, environment): + def __init__(self, environment: Environment) -> None: self.environment = environment - def bind(self, environment): + def bind(self, environment: Environment) -> "Extension": """Create a copy of this extension bound to another environment.""" rv = object.__new__(self.__class__) rv.__dict__.update(self.__dict__) rv.environment = environment return rv - def preprocess(self, source, name, filename=None): + def preprocess( + self, source: str, name: t.Optional[str], filename: t.Optional[str] = None + ) -> str: """This method is called before the actual lexing and can be used to preprocess the source. The `filename` is optional. The return value must be the preprocessed source. """ return source - def filter_stream(self, stream): + def filter_stream( + self, stream: "TokenStream" + ) -> t.Union["TokenStream", t.Iterable["Token"]]: """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used to filter tokens returned. This method has to return an iterable of :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a @@ -101,7 +114,7 @@ """ return stream - def parse(self, parser): + def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]: """If any of the :attr:`tags` matched this method is called with the parser as first argument. The token the parser stream is pointing at is the name token that matched. This method has to return one or a @@ -109,7 +122,9 @@ """ raise NotImplementedError() - def attr(self, name, lineno=None): + def attr( + self, name: str, lineno: t.Optional[int] = None + ) -> nodes.ExtensionAttribute: """Return an attribute node for the current extension. This is useful to pass constants on extensions to generated template code. @@ -120,8 +135,14 @@ return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno) def call_method( - self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None - ): + self, + name: str, + args: t.Optional[t.List[nodes.Expr]] = None, + kwargs: t.Optional[t.List[nodes.Keyword]] = None, + dyn_args: t.Optional[nodes.Expr] = None, + dyn_kwargs: t.Optional[nodes.Expr] = None, + lineno: t.Optional[int] = None, + ) -> nodes.Call: """Call a method of the extension. This is a shortcut for :meth:`attr` + :class:`jinja2.nodes.Call`. """ @@ -139,38 +160,88 @@ ) -@contextfunction -def _gettext_alias(__context, *args, **kwargs): +@pass_context +def _gettext_alias( + __context: Context, *args: t.Any, **kwargs: t.Any +) -> t.Union[t.Any, Undefined]: return __context.call(__context.resolve("gettext"), *args, **kwargs) -def _make_new_gettext(func): - @contextfunction - def gettext(__context, __string, **variables): +def _make_new_gettext(func: t.Callable[[str], str]) -> t.Callable[..., str]: + @pass_context + def gettext(__context: Context, __string: str, **variables: t.Any) -> str: rv = __context.call(func, __string) if __context.eval_ctx.autoescape: rv = Markup(rv) # Always treat as a format string, even if there are no # variables. This makes translation strings more consistent # and predictable. This requires escaping - return rv % variables + return rv % variables # type: ignore return gettext -def _make_new_ngettext(func): - @contextfunction - def ngettext(__context, __singular, __plural, __num, **variables): +def _make_new_ngettext(func: t.Callable[[str, str, int], str]) -> t.Callable[..., str]: + @pass_context + def ngettext( + __context: Context, + __singular: str, + __plural: str, + __num: int, + **variables: t.Any, + ) -> str: variables.setdefault("num", __num) rv = __context.call(func, __singular, __plural, __num) if __context.eval_ctx.autoescape: rv = Markup(rv) # Always treat as a format string, see gettext comment above. - return rv % variables + return rv % variables # type: ignore return ngettext +def _make_new_pgettext(func: t.Callable[[str, str], str]) -> t.Callable[..., str]: + @pass_context + def pgettext( + __context: Context, __string_ctx: str, __string: str, **variables: t.Any + ) -> str: + variables.setdefault("context", __string_ctx) + rv = __context.call(func, __string_ctx, __string) + + if __context.eval_ctx.autoescape: + rv = Markup(rv) + + # Always treat as a format string, see gettext comment above. + return rv % variables # type: ignore + + return pgettext + + +def _make_new_npgettext( + func: t.Callable[[str, str, str, int], str] +) -> t.Callable[..., str]: + @pass_context + def npgettext( + __context: Context, + __string_ctx: str, + __singular: str, + __plural: str, + __num: int, + **variables: t.Any, + ) -> str: + variables.setdefault("context", __string_ctx) + variables.setdefault("num", __num) + rv = __context.call(func, __string_ctx, __singular, __plural, __num) + + if __context.eval_ctx.autoescape: + rv = Markup(rv) + + # Always treat as a format string, see gettext comment above. + return rv % variables # type: ignore + + return npgettext + + class InternationalizationExtension(Extension): """This extension adds gettext support to Jinja.""" @@ -183,8 +254,8 @@ # something is called twice here. One time for the gettext value and # the other time for the n-parameter of the ngettext function. - def __init__(self, environment): - Extension.__init__(self, environment) + def __init__(self, environment: Environment) -> None: + super().__init__(environment) environment.globals["_"] = _gettext_alias environment.extend( install_gettext_translations=self._install, @@ -195,48 +266,108 @@ newstyle_gettext=False, ) - def _install(self, translations, newstyle=None): + def _install( + self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None + ) -> None: + # ugettext and ungettext are preferred in case the I18N library + # is providing compatibility with older Python versions. gettext = getattr(translations, "ugettext", None) if gettext is None: gettext = translations.gettext ngettext = getattr(translations, "ungettext", None) if ngettext is None: ngettext = translations.ngettext - self._install_callables(gettext, ngettext, newstyle) - def _install_null(self, newstyle=None): + pgettext = getattr(translations, "pgettext", None) + npgettext = getattr(translations, "npgettext", None) self._install_callables( - lambda x: x, lambda s, p, n: (n != 1 and (p,) or (s,))[0], newstyle + gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext ) - def _install_callables(self, gettext, ngettext, newstyle=None): + def _install_null(self, newstyle: t.Optional[bool] = None) -> None: + import gettext + + translations = gettext.NullTranslations() + + if hasattr(translations, "pgettext"): + # Python < 3.8 + pgettext = translations.pgettext # type: ignore + else: + + def pgettext(c: str, s: str) -> str: + return s + + if hasattr(translations, "npgettext"): + npgettext = translations.npgettext # type: ignore + else: + + def npgettext(c: str, s: str, p: str, n: int) -> str: + return s if n == 1 else p + + self._install_callables( + gettext=translations.gettext, + ngettext=translations.ngettext, + newstyle=newstyle, + pgettext=pgettext, + npgettext=npgettext, + ) + + def _install_callables( + self, + gettext: t.Callable[[str], str], + ngettext: t.Callable[[str, str, int], str], + newstyle: t.Optional[bool] = None, + pgettext: t.Optional[t.Callable[[str, str], str]] = None, + npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None, + ) -> None: if newstyle is not None: - self.environment.newstyle_gettext = newstyle - if self.environment.newstyle_gettext: + self.environment.newstyle_gettext = newstyle # type: ignore + if self.environment.newstyle_gettext: # type: ignore gettext = _make_new_gettext(gettext) ngettext = _make_new_ngettext(ngettext) - self.environment.globals.update(gettext=gettext, ngettext=ngettext) - def _uninstall(self, translations): - for key in "gettext", "ngettext": + if pgettext is not None: + pgettext = _make_new_pgettext(pgettext) + + if npgettext is not None: + npgettext = _make_new_npgettext(npgettext) + + self.environment.globals.update( + gettext=gettext, ngettext=ngettext, pgettext=pgettext, npgettext=npgettext + ) + + def _uninstall(self, translations: "_SupportedTranslations") -> None: + for key in ("gettext", "ngettext", "pgettext", "npgettext"): self.environment.globals.pop(key, None) - def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS): - if isinstance(source, string_types): + def _extract( + self, + source: t.Union[str, nodes.Template], + gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS, + ) -> t.Iterator[ + t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]] + ]: + if isinstance(source, str): source = self.environment.parse(source) return extract_from_ast(source, gettext_functions) - def parse(self, parser): + def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]: """Parse a translatable tag.""" lineno = next(parser.stream).lineno - num_called_num = False + + context = None + context_token = parser.stream.next_if("string") + + if context_token is not None: + context = context_token.value # find all the variables referenced. Additionally a variable can be # defined in the body of the trans block too, but this is checked at # a later state. - plural_expr = None - plural_expr_assignment = None - variables = {} + plural_expr: t.Optional[nodes.Expr] = None + plural_expr_assignment: t.Optional[nodes.Assign] = None + num_called_num = False + variables: t.Dict[str, nodes.Expr] = {} trimmed = None while parser.stream.current.type != "block_end": if variables: @@ -246,34 +377,34 @@ if parser.stream.skip_if("colon"): break - name = parser.stream.expect("name") - if name.value in variables: + token = parser.stream.expect("name") + if token.value in variables: parser.fail( - "translatable variable %r defined twice." % name.value, - name.lineno, + f"translatable variable {token.value!r} defined twice.", + token.lineno, exc=TemplateAssertionError, ) # expressions if parser.stream.current.type == "assign": next(parser.stream) - variables[name.value] = var = parser.parse_expression() - elif trimmed is None and name.value in ("trimmed", "notrimmed"): - trimmed = name.value == "trimmed" + variables[token.value] = var = parser.parse_expression() + elif trimmed is None and token.value in ("trimmed", "notrimmed"): + trimmed = token.value == "trimmed" continue else: - variables[name.value] = var = nodes.Name(name.value, "load") + variables[token.value] = var = nodes.Name(token.value, "load") if plural_expr is None: if isinstance(var, nodes.Call): plural_expr = nodes.Name("_trans", "load") - variables[name.value] = plural_expr + variables[token.value] = plural_expr plural_expr_assignment = nodes.Assign( nodes.Name("_trans", "store"), var ) else: plural_expr = var - num_called_num = name.value == "num" + num_called_num = token.value == "num" parser.stream.expect("block_end") @@ -294,15 +425,15 @@ have_plural = True next(parser.stream) if parser.stream.current.type != "block_end": - name = parser.stream.expect("name") - if name.value not in variables: + token = parser.stream.expect("name") + if token.value not in variables: parser.fail( - "unknown variable %r for pluralization" % name.value, - name.lineno, + f"unknown variable {token.value!r} for pluralization", + token.lineno, exc=TemplateAssertionError, ) - plural_expr = variables[name.value] - num_called_num = name.value == "num" + plural_expr = variables[token.value] + num_called_num = token.value == "num" parser.stream.expect("block_end") plural_names, plural = self._parse_block(parser, False) next(parser.stream) @@ -311,9 +442,9 @@ next(parser.stream) # register free names as simple name expressions - for var in referenced: - if var not in variables: - variables[var] = nodes.Name(var, "load") + for name in referenced: + if name not in variables: + variables[name] = nodes.Name(name, "load") if not have_plural: plural_expr = None @@ -330,6 +461,7 @@ node = self._make_node( singular, plural, + context, variables, plural_expr, bool(referenced), @@ -341,14 +473,17 @@ else: return node - def _trim_whitespace(self, string, _ws_re=_ws_re): + def _trim_whitespace(self, string: str, _ws_re: t.Pattern[str] = _ws_re) -> str: return _ws_re.sub(" ", string.strip()) - def _parse_block(self, parser, allow_pluralize): + def _parse_block( + self, parser: "Parser", allow_pluralize: bool + ) -> t.Tuple[t.List[str], str]: """Parse until the next block tag with a given name.""" referenced = [] buf = [] - while 1: + + while True: if parser.stream.current.type == "data": buf.append(parser.stream.current.value.replace("%", "%%")) next(parser.stream) @@ -356,7 +491,7 @@ next(parser.stream) name = parser.stream.expect("name").value referenced.append(name) - buf.append("%%(%s)s" % name) + buf.append(f"%({name})s") parser.stream.expect("variable_end") elif parser.stream.current.type == "block_begin": next(parser.stream) @@ -379,37 +514,44 @@ return referenced, concat(buf) def _make_node( - self, singular, plural, variables, plural_expr, vars_referenced, num_called_num - ): + self, + singular: str, + plural: t.Optional[str], + context: t.Optional[str], + variables: t.Dict[str, nodes.Expr], + plural_expr: t.Optional[nodes.Expr], + vars_referenced: bool, + num_called_num: bool, + ) -> nodes.Output: """Generates a useful node from the data provided.""" + newstyle = self.environment.newstyle_gettext # type: ignore + node: nodes.Expr + # no variables referenced? no need to escape for old style # gettext invocations only if there are vars. - if not vars_referenced and not self.environment.newstyle_gettext: + if not vars_referenced and not newstyle: singular = singular.replace("%%", "%") if plural: plural = plural.replace("%%", "%") - # singular only: - if plural_expr is None: - gettext = nodes.Name("gettext", "load") - node = nodes.Call(gettext, [nodes.Const(singular)], [], None, None) + func_name = "gettext" + func_args: t.List[nodes.Expr] = [nodes.Const(singular)] - # singular and plural - else: - ngettext = nodes.Name("ngettext", "load") - node = nodes.Call( - ngettext, - [nodes.Const(singular), nodes.Const(plural), plural_expr], - [], - None, - None, - ) + if context is not None: + func_args.insert(0, nodes.Const(context)) + func_name = f"p{func_name}" + + if plural_expr is not None: + func_name = f"n{func_name}" + func_args.extend((nodes.Const(plural), plural_expr)) + + node = nodes.Call(nodes.Name(func_name, "load"), func_args, [], None, None) # in case newstyle gettext is used, the method is powerful # enough to handle the variable expansion and autoescape # handling itself - if self.environment.newstyle_gettext: - for key, value in iteritems(variables): + if newstyle: + for key, value in variables.items(): # the function adds that later anyways in case num was # called num, so just skip it. if num_called_num and key == "num": @@ -439,9 +581,9 @@ that it doesn't print the return value. """ - tags = set(["do"]) + tags = {"do"} - def parse(self, parser): + def parse(self, parser: "Parser") -> nodes.ExprStmt: node = nodes.ExprStmt(lineno=next(parser.stream).lineno) node.node = parser.parse_tuple() return node @@ -450,23 +592,15 @@ class LoopControlExtension(Extension): """Adds break and continue to the template engine.""" - tags = set(["break", "continue"]) + tags = {"break", "continue"} - def parse(self, parser): + def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]: token = next(parser.stream) if token.value == "break": return nodes.Break(lineno=token.lineno) return nodes.Continue(lineno=token.lineno) -class WithExtension(Extension): - pass - - -class AutoEscapeExtension(Extension): - pass - - class DebugExtension(Extension): """A ``{% debug %}`` tag that dumps the available variables, filters, and tests. @@ -490,13 +624,13 @@ tags = {"debug"} - def parse(self, parser): + def parse(self, parser: "Parser") -> nodes.Output: lineno = parser.stream.expect("name:debug").lineno - context = ContextReference() + context = nodes.ContextReference() result = self.call_method("_render", [context], lineno=lineno) return nodes.Output([result], lineno=lineno) - def _render(self, context): + def _render(self, context: Context) -> str: result = { "context": context.get_all(), "filters": sorted(self.environment.filters.keys()), @@ -504,13 +638,16 @@ } # Set the depth since the intent is to show the top few names. - if version_info[:2] >= (3, 4): - return pprint.pformat(result, depth=3, compact=True) - else: - return pprint.pformat(result, depth=3) + return pprint.pformat(result, depth=3, compact=True) -def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True): +def extract_from_ast( + ast: nodes.Template, + gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS, + babel_style: bool = True, +) -> t.Iterator[ + t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]] +]: """Extract localizable strings from the given template node. Per default this function returns matches in babel style that means non string parameters as well as keyword arguments are returned as `None`. This @@ -538,23 +675,26 @@ * ``lineno`` is the number of the line on which the string was found, * ``function`` is the name of the ``gettext`` function used (if the string was extracted from embedded Python code), and - * ``message`` is the string itself (a ``unicode`` object, or a tuple - of ``unicode`` objects for functions with multiple string arguments). + * ``message`` is the string, or a tuple of strings for functions + with multiple string arguments. This extraction function operates on the AST and is because of that unable to extract any comments. For comment support you have to use the babel extraction interface or extract comments yourself. """ - for node in node.find_all(nodes.Call): + out: t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]] + + for node in ast.find_all(nodes.Call): if ( not isinstance(node.node, nodes.Name) or node.node.name not in gettext_functions ): continue - strings = [] + strings: t.List[t.Optional[str]] = [] + for arg in node.args: - if isinstance(arg, nodes.Const) and isinstance(arg.value, string_types): + if isinstance(arg, nodes.Const) and isinstance(arg.value, str): strings.append(arg.value) else: strings.append(None) @@ -567,31 +707,35 @@ strings.append(None) if not babel_style: - strings = tuple(x for x in strings if x is not None) - if not strings: + out = tuple(x for x in strings if x is not None) + + if not out: continue else: if len(strings) == 1: - strings = strings[0] + out = strings[0] else: - strings = tuple(strings) - yield node.lineno, node.node.name, strings + out = tuple(strings) + + yield node.lineno, node.node.name, out -class _CommentFinder(object): +class _CommentFinder: """Helper class to find comments in a token stream. Can only find comments for gettext calls forwards. Once the comment from line 4 is found, a comment for line 1 will not return a usable value. """ - def __init__(self, tokens, comment_tags): + def __init__( + self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str] + ) -> None: self.tokens = tokens self.comment_tags = comment_tags self.offset = 0 self.last_lineno = 0 - def find_backwards(self, offset): + def find_backwards(self, offset: int) -> t.List[str]: try: for _, token_type, token_value in reversed( self.tokens[self.offset : offset] @@ -607,7 +751,7 @@ finally: self.offset = offset - def find_comments(self, lineno): + def find_comments(self, lineno: int) -> t.List[str]: if not self.comment_tags or self.last_lineno > lineno: return [] for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]): @@ -616,7 +760,16 @@ return self.find_backwards(len(self.tokens)) -def babel_extract(fileobj, keywords, comment_tags, options): +def babel_extract( + fileobj: t.BinaryIO, + keywords: t.Sequence[str], + comment_tags: t.Sequence[str], + options: t.Dict[str, t.Any], +) -> t.Iterator[ + t.Tuple[ + int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]], t.List[str] + ] +]: """Babel extraction method for Jinja templates. .. versionchanged:: 2.3 @@ -644,33 +797,37 @@ :return: an iterator over ``(lineno, funcname, message, comments)`` tuples. (comments will be empty currently) """ - extensions = set() - for extension in options.get("extensions", "").split(","): - extension = extension.strip() - if not extension: - continue - extensions.add(import_string(extension)) - if InternationalizationExtension not in extensions: - extensions.add(InternationalizationExtension) + extensions: t.Dict[t.Type[Extension], None] = {} - def getbool(options, key, default=False): - return options.get(key, str(default)).lower() in ("1", "on", "yes", "true") + for extension_name in options.get("extensions", "").split(","): + extension_name = extension_name.strip() + + if not extension_name: + continue + + extensions[import_string(extension_name)] = None + + if InternationalizationExtension not in extensions: + extensions[InternationalizationExtension] = None + + def getbool(options: t.Mapping[str, str], key: str, default: bool = False) -> bool: + return options.get(key, str(default)).lower() in {"1", "on", "yes", "true"} silent = getbool(options, "silent", True) environment = Environment( - options.get("block_start_string", BLOCK_START_STRING), - options.get("block_end_string", BLOCK_END_STRING), - options.get("variable_start_string", VARIABLE_START_STRING), - options.get("variable_end_string", VARIABLE_END_STRING), - options.get("comment_start_string", COMMENT_START_STRING), - options.get("comment_end_string", COMMENT_END_STRING), - options.get("line_statement_prefix") or LINE_STATEMENT_PREFIX, - options.get("line_comment_prefix") or LINE_COMMENT_PREFIX, - getbool(options, "trim_blocks", TRIM_BLOCKS), - getbool(options, "lstrip_blocks", LSTRIP_BLOCKS), - NEWLINE_SEQUENCE, - getbool(options, "keep_trailing_newline", KEEP_TRAILING_NEWLINE), - frozenset(extensions), + options.get("block_start_string", defaults.BLOCK_START_STRING), + options.get("block_end_string", defaults.BLOCK_END_STRING), + options.get("variable_start_string", defaults.VARIABLE_START_STRING), + options.get("variable_end_string", defaults.VARIABLE_END_STRING), + options.get("comment_start_string", defaults.COMMENT_START_STRING), + options.get("comment_end_string", defaults.COMMENT_END_STRING), + options.get("line_statement_prefix") or defaults.LINE_STATEMENT_PREFIX, + options.get("line_comment_prefix") or defaults.LINE_COMMENT_PREFIX, + getbool(options, "trim_blocks", defaults.TRIM_BLOCKS), + getbool(options, "lstrip_blocks", defaults.LSTRIP_BLOCKS), + defaults.NEWLINE_SEQUENCE, + getbool(options, "keep_trailing_newline", defaults.KEEP_TRAILING_NEWLINE), + tuple(extensions), cache_size=0, auto_reload=False, ) @@ -678,7 +835,7 @@ if getbool(options, "trimmed"): environment.policies["ext.i18n.trimmed"] = True if getbool(options, "newstyle_gettext"): - environment.newstyle_gettext = True + environment.newstyle_gettext = True # type: ignore source = fileobj.read().decode(options.get("encoding", "utf-8")) try: @@ -699,6 +856,4 @@ i18n = InternationalizationExtension do = ExprStmtExtension loopcontrols = LoopControlExtension -with_ = WithExtension -autoescape = AutoEscapeExtension debug = DebugExtension
diff --git a/third_party/jinja2/filters.py b/third_party/jinja2/filters.py index 74b108d..ed07c4c 100644 --- a/third_party/jinja2/filters.py +++ b/third_party/jinja2/filters.py
@@ -1,79 +1,75 @@ -# -*- coding: utf-8 -*- """Built-in template filters used with the ``|`` operator.""" import math import random import re -import warnings -from collections import namedtuple +import typing +import typing as t +from collections import abc from itertools import chain from itertools import groupby from markupsafe import escape from markupsafe import Markup -from markupsafe import soft_unicode +from markupsafe import soft_str -from ._compat import abc -from ._compat import imap -from ._compat import iteritems -from ._compat import string_types -from ._compat import text_type +from .async_utils import async_variant +from .async_utils import auto_aiter +from .async_utils import auto_await +from .async_utils import auto_to_list from .exceptions import FilterArgumentError from .runtime import Undefined from .utils import htmlsafe_json_dumps +from .utils import pass_context +from .utils import pass_environment +from .utils import pass_eval_context from .utils import pformat -from .utils import unicode_urlencode +from .utils import url_quote from .utils import urlize -_word_re = re.compile(r"\w+", re.UNICODE) -_word_beginning_split_re = re.compile(r"([-\s\(\{\[\<]+)", re.UNICODE) +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + from .nodes import EvalContext + from .runtime import Context + from .sandbox import SandboxedEnvironment # noqa: F401 + + class HasHTML(te.Protocol): + def __html__(self) -> str: + pass -def contextfilter(f): - """Decorator for marking context dependent filters. The current - :class:`Context` will be passed as first argument. - """ - f.contextfilter = True - return f +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +K = t.TypeVar("K") +V = t.TypeVar("V") -def evalcontextfilter(f): - """Decorator for marking eval-context dependent filters. An eval - context object is passed as first argument. For more information - about the eval context, see :ref:`eval-context`. - - .. versionadded:: 2.4 - """ - f.evalcontextfilter = True - return f - - -def environmentfilter(f): - """Decorator for marking environment dependent filters. The current - :class:`Environment` is passed to the filter as first argument. - """ - f.environmentfilter = True - return f - - -def ignore_case(value): +def ignore_case(value: V) -> V: """For use as a postprocessor for :func:`make_attrgetter`. Converts strings to lowercase and returns other types as-is.""" - return value.lower() if isinstance(value, string_types) else value + if isinstance(value, str): + return t.cast(V, value.lower()) + + return value -def make_attrgetter(environment, attribute, postprocess=None, default=None): +def make_attrgetter( + environment: "Environment", + attribute: t.Optional[t.Union[str, int]], + postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None, + default: t.Optional[t.Any] = None, +) -> t.Callable[[t.Any], t.Any]: """Returns a callable that looks up the given attribute from a passed object with the rules of the environment. Dots are allowed to access attributes of attributes. Integer parts in paths are looked up as integers. """ - attribute = _prepare_attribute_parts(attribute) + parts = _prepare_attribute_parts(attribute) - def attrgetter(item): - for part in attribute: + def attrgetter(item: t.Any) -> t.Any: + for part in parts: item = environment.getitem(item, part) - if default and isinstance(item, Undefined): + if default is not None and isinstance(item, Undefined): item = default if postprocess is not None: @@ -84,7 +80,11 @@ return attrgetter -def make_multi_attrgetter(environment, attribute, postprocess=None): +def make_multi_attrgetter( + environment: "Environment", + attribute: t.Optional[t.Union[str, int]], + postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None, +) -> t.Callable[[t.Any], t.List[t.Any]]: """Returns a callable that looks up the given comma separated attributes from a passed object with the rules of the environment. Dots are allowed to access attributes of each attribute. Integer @@ -95,17 +95,19 @@ Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc. """ - attribute_parts = ( - attribute.split(",") if isinstance(attribute, string_types) else [attribute] - ) - attribute = [ - _prepare_attribute_parts(attribute_part) for attribute_part in attribute_parts - ] + if isinstance(attribute, str): + split: t.Sequence[t.Union[str, int, None]] = attribute.split(",") + else: + split = [attribute] - def attrgetter(item): - items = [None] * len(attribute) - for i, attribute_part in enumerate(attribute): + parts = [_prepare_attribute_parts(item) for item in split] + + def attrgetter(item: t.Any) -> t.List[t.Any]: + items = [None] * len(parts) + + for i, attribute_part in enumerate(parts): item_i = item + for part in attribute_part: item_i = environment.getitem(item_i, part) @@ -113,28 +115,35 @@ item_i = postprocess(item_i) items[i] = item_i + return items return attrgetter -def _prepare_attribute_parts(attr): +def _prepare_attribute_parts( + attr: t.Optional[t.Union[str, int]] +) -> t.List[t.Union[str, int]]: if attr is None: return [] - elif isinstance(attr, string_types): + + if isinstance(attr, str): return [int(x) if x.isdigit() else x for x in attr.split(".")] - else: - return [attr] + + return [attr] -def do_forceescape(value): +def do_forceescape(value: "t.Union[str, HasHTML]") -> Markup: """Enforce HTML escaping. This will probably double escape variables.""" if hasattr(value, "__html__"): - value = value.__html__() - return escape(text_type(value)) + value = t.cast("HasHTML", value).__html__() + + return escape(str(value)) -def do_urlencode(value): +def do_urlencode( + value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[t.Tuple[str, t.Any]]] +) -> str: """Quote data for use in a URL path or query using UTF-8. Basic wrapper around :func:`urllib.parse.quote` when given a @@ -150,22 +159,23 @@ .. versionadded:: 2.7 """ - if isinstance(value, string_types) or not isinstance(value, abc.Iterable): - return unicode_urlencode(value) + if isinstance(value, str) or not isinstance(value, abc.Iterable): + return url_quote(value) if isinstance(value, dict): - items = iteritems(value) + items: t.Iterable[t.Tuple[str, t.Any]] = value.items() else: - items = iter(value) + items = value # type: ignore - return u"&".join( - "%s=%s" % (unicode_urlencode(k, for_qs=True), unicode_urlencode(v, for_qs=True)) - for k, v in items + return "&".join( + f"{url_quote(k, for_qs=True)}={url_quote(v, for_qs=True)}" for k, v in items ) -@evalcontextfilter -def do_replace(eval_ctx, s, old, new, count=None): +@pass_eval_context +def do_replace( + eval_ctx: "EvalContext", s: str, old: str, new: str, count: t.Optional[int] = None +) -> str: """Return a copy of the value with all occurrences of a substring replaced with a new one. The first argument is the substring that should be replaced, the second is the replacement string. @@ -182,8 +192,10 @@ """ if count is None: count = -1 + if not eval_ctx.autoescape: - return text_type(s).replace(text_type(old), text_type(new), count) + return str(s).replace(str(old), str(new), count) + if ( hasattr(old, "__html__") or hasattr(new, "__html__") @@ -191,22 +203,55 @@ ): s = escape(s) else: - s = soft_unicode(s) - return s.replace(soft_unicode(old), soft_unicode(new), count) + s = soft_str(s) + + return s.replace(soft_str(old), soft_str(new), count) -def do_upper(s): +def do_upper(s: str) -> str: """Convert a value to uppercase.""" - return soft_unicode(s).upper() + return soft_str(s).upper() -def do_lower(s): +def do_lower(s: str) -> str: """Convert a value to lowercase.""" - return soft_unicode(s).lower() + return soft_str(s).lower() -@evalcontextfilter -def do_xmlattr(_eval_ctx, d, autospace=True): +def do_items(value: t.Union[t.Mapping[K, V], Undefined]) -> t.Iterator[t.Tuple[K, V]]: + """Return an iterator over the ``(key, value)`` items of a mapping. + + ``x|items`` is the same as ``x.items()``, except if ``x`` is + undefined an empty iterator is returned. + + This filter is useful if you expect the template to be rendered with + an implementation of Jinja in another programming language that does + not have a ``.items()`` method on its mapping type. + + .. code-block:: html+jinja + + <dl> + {% for key, value in my_dict|items %} + <dt>{{ key }} + <dd>{{ value }} + {% endfor %} + </dl> + + .. versionadded:: 3.1 + """ + if isinstance(value, Undefined): + return + + if not isinstance(value, abc.Mapping): + raise TypeError("Can only get item pairs from a mapping.") + + yield from value.items() + + +@pass_eval_context +def do_xmlattr( + eval_ctx: "EvalContext", d: t.Mapping[str, t.Any], autospace: bool = True +) -> str: """Create an SGML/XML attribute string based on the items in a dict. All values that are neither `none` nor `undefined` are automatically escaped: @@ -229,42 +274,52 @@ As you can see it automatically prepends a space in front of the item if the filter returned something unless the second parameter is false. """ - rv = u" ".join( - u'%s="%s"' % (escape(key), escape(value)) - for key, value in iteritems(d) + rv = " ".join( + f'{escape(key)}="{escape(value)}"' + for key, value in d.items() if value is not None and not isinstance(value, Undefined) ) + if autospace and rv: - rv = u" " + rv - if _eval_ctx.autoescape: + rv = " " + rv + + if eval_ctx.autoescape: rv = Markup(rv) + return rv -def do_capitalize(s): +def do_capitalize(s: str) -> str: """Capitalize a value. The first character will be uppercase, all others lowercase. """ - return soft_unicode(s).capitalize() + return soft_str(s).capitalize() -def do_title(s): +_word_beginning_split_re = re.compile(r"([-\s({\[<]+)") + + +def do_title(s: str) -> str: """Return a titlecased version of the value. I.e. words will start with uppercase letters, all remaining characters are lowercase. """ return "".join( [ item[0].upper() + item[1:].lower() - for item in _word_beginning_split_re.split(soft_unicode(s)) + for item in _word_beginning_split_re.split(soft_str(s)) if item ] ) -def do_dictsort(value, case_sensitive=False, by="key", reverse=False): - """Sort a dict and yield (key, value) pairs. Because python dicts are - unsorted you may want to use this function to order them by either - key or value: +def do_dictsort( + value: t.Mapping[K, V], + case_sensitive: bool = False, + by: 'te.Literal["key", "value"]' = "key", + reverse: bool = False, +) -> t.List[t.Tuple[K, V]]: + """Sort a dict and yield (key, value) pairs. Python dicts may not + be in the order you want to display them in, so sort them first. .. sourcecode:: jinja @@ -287,7 +342,7 @@ else: raise FilterArgumentError('You can only sort by either "key" or "value"') - def sort_func(item): + def sort_func(item: t.Tuple[t.Any, t.Any]) -> t.Any: value = item[pos] if not case_sensitive: @@ -298,8 +353,14 @@ return sorted(value.items(), key=sort_func, reverse=reverse) -@environmentfilter -def do_sort(environment, value, reverse=False, case_sensitive=False, attribute=None): +@pass_environment +def do_sort( + environment: "Environment", + value: "t.Iterable[V]", + reverse: bool = False, + case_sensitive: bool = False, + attribute: t.Optional[t.Union[str, int]] = None, +) -> "t.List[V]": """Sort an iterable using Python's :func:`sorted`. .. sourcecode:: jinja @@ -331,7 +392,7 @@ .. sourcecode:: jinja - {% for user users|sort(attribute="age,name") %} + {% for user in users|sort(attribute="age,name") %} ... {% endfor %} @@ -348,8 +409,13 @@ return sorted(value, key=key_func, reverse=reverse) -@environmentfilter -def do_unique(environment, value, case_sensitive=False, attribute=None): +@pass_environment +def do_unique( + environment: "Environment", + value: "t.Iterable[V]", + case_sensitive: bool = False, + attribute: t.Optional[t.Union[str, int]] = None, +) -> "t.Iterator[V]": """Returns a list of unique items from the given iterable. .. sourcecode:: jinja @@ -376,7 +442,13 @@ yield item -def _min_or_max(environment, value, func, case_sensitive, attribute): +def _min_or_max( + environment: "Environment", + value: "t.Iterable[V]", + func: "t.Callable[..., V]", + case_sensitive: bool, + attribute: t.Optional[t.Union[str, int]], +) -> "t.Union[V, Undefined]": it = iter(value) try: @@ -390,8 +462,13 @@ return func(chain([first], it), key=key_func) -@environmentfilter -def do_min(environment, value, case_sensitive=False, attribute=None): +@pass_environment +def do_min( + environment: "Environment", + value: "t.Iterable[V]", + case_sensitive: bool = False, + attribute: t.Optional[t.Union[str, int]] = None, +) -> "t.Union[V, Undefined]": """Return the smallest item from the sequence. .. sourcecode:: jinja @@ -405,8 +482,13 @@ return _min_or_max(environment, value, min, case_sensitive, attribute) -@environmentfilter -def do_max(environment, value, case_sensitive=False, attribute=None): +@pass_environment +def do_max( + environment: "Environment", + value: "t.Iterable[V]", + case_sensitive: bool = False, + attribute: t.Optional[t.Union[str, int]] = None, +) -> "t.Union[V, Undefined]": """Return the largest item from the sequence. .. sourcecode:: jinja @@ -420,7 +502,11 @@ return _min_or_max(environment, value, max, case_sensitive, attribute) -def do_default(value, default_value=u"", boolean=False): +def do_default( + value: V, + default_value: V = "", # type: ignore + boolean: bool = False, +) -> V: """If the value is undefined it will return the passed default value, otherwise the value of the variable: @@ -445,11 +531,17 @@ """ if isinstance(value, Undefined) or (boolean and not value): return default_value + return value -@evalcontextfilter -def do_join(eval_ctx, value, d=u"", attribute=None): +@pass_eval_context +def sync_do_join( + eval_ctx: "EvalContext", + value: t.Iterable, + d: str = "", + attribute: t.Optional[t.Union[str, int]] = None, +) -> str: """Return a string which is the concatenation of the strings in the sequence. The separator between elements is an empty string per default, you can define it with the optional parameter: @@ -472,39 +564,54 @@ The `attribute` parameter was added. """ if attribute is not None: - value = imap(make_attrgetter(eval_ctx.environment, attribute), value) + value = map(make_attrgetter(eval_ctx.environment, attribute), value) # no automatic escaping? joining is a lot easier then if not eval_ctx.autoescape: - return text_type(d).join(imap(text_type, value)) + return str(d).join(map(str, value)) # if the delimiter doesn't have an html representation we check # if any of the items has. If yes we do a coercion to Markup if not hasattr(d, "__html__"): value = list(value) do_escape = False + for idx, item in enumerate(value): if hasattr(item, "__html__"): do_escape = True else: - value[idx] = text_type(item) + value[idx] = str(item) + if do_escape: d = escape(d) else: - d = text_type(d) + d = str(d) + return d.join(value) # no html involved, to normal joining - return soft_unicode(d).join(imap(soft_unicode, value)) + return soft_str(d).join(map(soft_str, value)) -def do_center(value, width=80): +@async_variant(sync_do_join) # type: ignore +async def do_join( + eval_ctx: "EvalContext", + value: t.Union[t.AsyncIterable, t.Iterable], + d: str = "", + attribute: t.Optional[t.Union[str, int]] = None, +) -> str: + return sync_do_join(eval_ctx, await auto_to_list(value), d, attribute) + + +def do_center(value: str, width: int = 80) -> str: """Centers the value in a field of a given width.""" - return text_type(value).center(width) + return soft_str(value).center(width) -@environmentfilter -def do_first(environment, seq): +@pass_environment +def sync_do_first( + environment: "Environment", seq: "t.Iterable[V]" +) -> "t.Union[V, Undefined]": """Return the first item of a sequence.""" try: return next(iter(seq)) @@ -512,10 +619,21 @@ return environment.undefined("No first item, sequence was empty.") -@environmentfilter -def do_last(environment, seq): - """ - Return the last item of a sequence. +@async_variant(sync_do_first) # type: ignore +async def do_first( + environment: "Environment", seq: "t.Union[t.AsyncIterable[V], t.Iterable[V]]" +) -> "t.Union[V, Undefined]": + try: + return await auto_aiter(seq).__anext__() + except StopAsyncIteration: + return environment.undefined("No first item, sequence was empty.") + + +@pass_environment +def do_last( + environment: "Environment", seq: "t.Reversible[V]" +) -> "t.Union[V, Undefined]": + """Return the last item of a sequence. Note: Does not work with generators. You may want to explicitly convert it to a list: @@ -530,8 +648,11 @@ return environment.undefined("No last item, sequence was empty.") -@contextfilter -def do_random(context, seq): +# No async do_last, it may not be safe in async mode. + + +@pass_context +def do_random(context: "Context", seq: "t.Sequence[V]") -> "t.Union[V, Undefined]": """Return a random item from the sequence.""" try: return random.choice(seq) @@ -539,108 +660,151 @@ return context.environment.undefined("No random item, sequence was empty.") -def do_filesizeformat(value, binary=False): +def do_filesizeformat(value: t.Union[str, float, int], binary: bool = False) -> str: """Format the value like a 'human-readable' file size (i.e. 13 kB, 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega, Giga, etc.), if the second parameter is set to `True` the binary prefixes are used (Mebi, Gibi). """ bytes = float(value) - base = binary and 1024 or 1000 + base = 1024 if binary else 1000 prefixes = [ - (binary and "KiB" or "kB"), - (binary and "MiB" or "MB"), - (binary and "GiB" or "GB"), - (binary and "TiB" or "TB"), - (binary and "PiB" or "PB"), - (binary and "EiB" or "EB"), - (binary and "ZiB" or "ZB"), - (binary and "YiB" or "YB"), + ("KiB" if binary else "kB"), + ("MiB" if binary else "MB"), + ("GiB" if binary else "GB"), + ("TiB" if binary else "TB"), + ("PiB" if binary else "PB"), + ("EiB" if binary else "EB"), + ("ZiB" if binary else "ZB"), + ("YiB" if binary else "YB"), ] + if bytes == 1: return "1 Byte" elif bytes < base: - return "%d Bytes" % bytes + return f"{int(bytes)} Bytes" else: for i, prefix in enumerate(prefixes): unit = base ** (i + 2) + if bytes < unit: - return "%.1f %s" % ((base * bytes / unit), prefix) - return "%.1f %s" % ((base * bytes / unit), prefix) + return f"{base * bytes / unit:.1f} {prefix}" + + return f"{base * bytes / unit:.1f} {prefix}" -def do_pprint(value, verbose=False): - """Pretty print a variable. Useful for debugging. - - With Jinja 1.2 onwards you can pass it a parameter. If this parameter - is truthy the output will be more verbose (this requires `pretty`) - """ - return pformat(value, verbose=verbose) +def do_pprint(value: t.Any) -> str: + """Pretty print a variable. Useful for debugging.""" + return pformat(value) -@evalcontextfilter +_uri_scheme_re = re.compile(r"^([\w.+-]{2,}:(/){0,2})$") + + +@pass_eval_context def do_urlize( - eval_ctx, value, trim_url_limit=None, nofollow=False, target=None, rel=None -): - """Converts URLs in plain text into clickable links. + eval_ctx: "EvalContext", + value: str, + trim_url_limit: t.Optional[int] = None, + nofollow: bool = False, + target: t.Optional[str] = None, + rel: t.Optional[str] = None, + extra_schemes: t.Optional[t.Iterable[str]] = None, +) -> str: + """Convert URLs in text into clickable links. - If you pass the filter an additional integer it will shorten the urls - to that number. Also a third argument exists that makes the urls - "nofollow": + This may not recognize links in some situations. Usually, a more + comprehensive formatter, such as a Markdown library, is a better + choice. - .. sourcecode:: jinja + Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email + addresses. Links with trailing punctuation (periods, commas, closing + parentheses) and leading punctuation (opening parentheses) are + recognized excluding the punctuation. Email addresses that include + header fields are not recognized (for example, + ``mailto:address@example.com?cc=copy@example.com``). - {{ mytext|urlize(40, true) }} - links are shortened to 40 chars and defined with rel="nofollow" + :param value: Original text containing URLs to link. + :param trim_url_limit: Shorten displayed URL values to this length. + :param nofollow: Add the ``rel=nofollow`` attribute to links. + :param target: Add the ``target`` attribute to links. + :param rel: Add the ``rel`` attribute to links. + :param extra_schemes: Recognize URLs that start with these schemes + in addition to the default behavior. Defaults to + ``env.policies["urlize.extra_schemes"]``, which defaults to no + extra schemes. - If *target* is specified, the ``target`` attribute will be added to the - ``<a>`` tag: + .. versionchanged:: 3.0 + The ``extra_schemes`` parameter was added. - .. sourcecode:: jinja + .. versionchanged:: 3.0 + Generate ``https://`` links for URLs without a scheme. - {{ mytext|urlize(40, target='_blank') }} + .. versionchanged:: 3.0 + The parsing rules were updated. Recognize email addresses with + or without the ``mailto:`` scheme. Validate IP addresses. Ignore + parentheses and brackets in more cases. - .. versionchanged:: 2.8+ - The *target* parameter was added. + .. versionchanged:: 2.8 + The ``target`` parameter was added. """ policies = eval_ctx.environment.policies - rel = set((rel or "").split() or []) + rel_parts = set((rel or "").split()) + if nofollow: - rel.add("nofollow") - rel.update((policies["urlize.rel"] or "").split()) + rel_parts.add("nofollow") + + rel_parts.update((policies["urlize.rel"] or "").split()) + rel = " ".join(sorted(rel_parts)) or None + if target is None: target = policies["urlize.target"] - rel = " ".join(sorted(rel)) or None - rv = urlize(value, trim_url_limit, rel=rel, target=target) + + if extra_schemes is None: + extra_schemes = policies["urlize.extra_schemes"] or () + + for scheme in extra_schemes: + if _uri_scheme_re.fullmatch(scheme) is None: + raise FilterArgumentError(f"{scheme!r} is not a valid URI scheme prefix.") + + rv = urlize( + value, + trim_url_limit=trim_url_limit, + rel=rel, + target=target, + extra_schemes=extra_schemes, + ) + if eval_ctx.autoescape: rv = Markup(rv) + return rv -def do_indent(s, width=4, first=False, blank=False, indentfirst=None): +def do_indent( + s: str, width: t.Union[int, str] = 4, first: bool = False, blank: bool = False +) -> str: """Return a copy of the string with each line indented by 4 spaces. The first line and blank lines are not indented by default. - :param width: Number of spaces to indent by. + :param width: Number of spaces, or a string, to indent by. :param first: Don't skip indenting the first line. :param blank: Don't skip indenting empty lines. + .. versionchanged:: 3.0 + ``width`` can be a string. + .. versionchanged:: 2.10 Blank lines are not indented by default. Rename the ``indentfirst`` argument to ``first``. """ - if indentfirst is not None: - warnings.warn( - "The 'indentfirst' argument is renamed to 'first' and will" - " be removed in version 3.0.", - DeprecationWarning, - stacklevel=2, - ) - first = indentfirst + if isinstance(width, str): + indention = width + else: + indention = " " * width - indention = u" " * width - newline = u"\n" + newline = "\n" if isinstance(s, Markup): indention = Markup(indention) @@ -665,8 +829,15 @@ return rv -@environmentfilter -def do_truncate(env, s, length=255, killwords=False, end="...", leeway=None): +@pass_environment +def do_truncate( + env: "Environment", + s: str, + length: int = 255, + killwords: bool = False, + end: str = "...", + leeway: t.Optional[int] = None, +) -> str: """Return a truncated copy of the string. The length is specified with the first parameter which defaults to ``255``. If the second parameter is ``true`` the filter will cut the text at length. Otherwise @@ -692,25 +863,29 @@ """ if leeway is None: leeway = env.policies["truncate.leeway"] - assert length >= len(end), "expected length >= %s, got %s" % (len(end), length) - assert leeway >= 0, "expected leeway >= 0, got %s" % leeway + + assert length >= len(end), f"expected length >= {len(end)}, got {length}" + assert leeway >= 0, f"expected leeway >= 0, got {leeway}" + if len(s) <= length + leeway: return s + if killwords: return s[: length - len(end)] + end + result = s[: length - len(end)].rsplit(" ", 1)[0] return result + end -@environmentfilter +@pass_environment def do_wordwrap( - environment, - s, - width=79, - break_long_words=True, - wrapstring=None, - break_on_hyphens=True, -): + environment: "Environment", + s: str, + width: int = 79, + break_long_words: bool = True, + wrapstring: t.Optional[str] = None, + break_on_hyphens: bool = True, +) -> str: """Wrap a string to the given width. Existing newlines are treated as paragraphs to be wrapped separately. @@ -732,10 +907,9 @@ .. versionchanged:: 2.7 Added the ``wrapstring`` parameter. """ - import textwrap - if not wrapstring: + if wrapstring is None: wrapstring = environment.newline_sequence # textwrap.wrap doesn't consider existing newlines when wrapping. @@ -759,12 +933,15 @@ ) -def do_wordcount(s): +_word_re = re.compile(r"\w+") + + +def do_wordcount(s: str) -> int: """Count the words in that string.""" - return len(_word_re.findall(soft_unicode(s))) + return len(_word_re.findall(soft_str(s))) -def do_int(value, default=0, base=10): +def do_int(value: t.Any, default: int = 0, base: int = 10) -> int: """Convert the value into an integer. If the conversion doesn't work it will return ``0``. You can override this default using the first parameter. You @@ -774,8 +951,9 @@ The base is ignored for decimal numbers and non-string values. """ try: - if isinstance(value, string_types): + if isinstance(value, str): return int(value, base) + return int(value) except (TypeError, ValueError): # this quirk is necessary so that "42.23"|int gives 42. @@ -785,7 +963,7 @@ return default -def do_float(value, default=0.0): +def do_float(value: t.Any, default: float = 0.0) -> float: """Convert the value into a floating point number. If the conversion doesn't work it will return ``0.0``. You can override this default using the first parameter. @@ -796,7 +974,7 @@ return default -def do_format(value, *args, **kwargs): +def do_format(value: str, *args: t.Any, **kwargs: t.Any) -> str: """Apply the given values to a `printf-style`_ format string, like ``string % values``. @@ -820,22 +998,26 @@ raise FilterArgumentError( "can't handle positional and keyword arguments at the same time" ) - return soft_unicode(value) % (kwargs or args) + + return soft_str(value) % (kwargs or args) -def do_trim(value, chars=None): +def do_trim(value: str, chars: t.Optional[str] = None) -> str: """Strip leading and trailing characters, by default whitespace.""" - return soft_unicode(value).strip(chars) + return soft_str(value).strip(chars) -def do_striptags(value): +def do_striptags(value: "t.Union[str, HasHTML]") -> str: """Strip SGML/XML tags and replace adjacent whitespace by one space.""" if hasattr(value, "__html__"): - value = value.__html__() - return Markup(text_type(value)).striptags() + value = t.cast("HasHTML", value).__html__() + + return Markup(str(value)).striptags() -def do_slice(value, slices, fill_with=None): +def sync_do_slice( + value: "t.Collection[V]", slices: int, fill_with: "t.Optional[V]" = None +) -> "t.Iterator[t.List[V]]": """Slice an iterator and return a list of lists containing those items. Useful if you want to create a div containing three ul tags that represent columns: @@ -860,18 +1042,34 @@ items_per_slice = length // slices slices_with_extra = length % slices offset = 0 + for slice_number in range(slices): start = offset + slice_number * items_per_slice + if slice_number < slices_with_extra: offset += 1 + end = offset + (slice_number + 1) * items_per_slice tmp = seq[start:end] + if fill_with is not None and slice_number >= slices_with_extra: tmp.append(fill_with) + yield tmp -def do_batch(value, linecount, fill_with=None): +@async_variant(sync_do_slice) # type: ignore +async def do_slice( + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + slices: int, + fill_with: t.Optional[t.Any] = None, +) -> "t.Iterator[t.List[V]]": + return sync_do_slice(await auto_to_list(value), slices, fill_with) + + +def do_batch( + value: "t.Iterable[V]", linecount: int, fill_with: "t.Optional[V]" = None +) -> "t.Iterator[t.List[V]]": """ A filter that batches items. It works pretty much like `slice` just the other way round. It returns a list of lists with the @@ -890,19 +1088,27 @@ {%- endfor %} </table> """ - tmp = [] + tmp: "t.List[V]" = [] + for item in value: if len(tmp) == linecount: yield tmp tmp = [] + tmp.append(item) + if tmp: if fill_with is not None and len(tmp) < linecount: tmp += [fill_with] * (linecount - len(tmp)) + yield tmp -def do_round(value, precision=0, method="common"): +def do_round( + value: float, + precision: int = 0, + method: 'te.Literal["common", "ceil", "floor"]' = "common", +) -> float: """Round the number to a given precision. The first parameter specifies the precision (default is ``0``), the second the rounding method: @@ -930,24 +1136,35 @@ """ if method not in {"common", "ceil", "floor"}: raise FilterArgumentError("method must be common, ceil or floor") + if method == "common": return round(value, precision) + func = getattr(math, method) - return func(value * (10 ** precision)) / (10 ** precision) + return t.cast(float, func(value * (10**precision)) / (10**precision)) -# Use a regular tuple repr here. This is what we did in the past and we -# really want to hide this custom type as much as possible. In particular -# we do not want to accidentally expose an auto generated repr in case -# people start to print this out in comments or something similar for -# debugging. -_GroupTuple = namedtuple("_GroupTuple", ["grouper", "list"]) -_GroupTuple.__repr__ = tuple.__repr__ -_GroupTuple.__str__ = tuple.__str__ +class _GroupTuple(t.NamedTuple): + grouper: t.Any + list: t.List + + # Use the regular tuple repr to hide this subclass if users print + # out the value during debugging. + def __repr__(self) -> str: + return tuple.__repr__(self) + + def __str__(self) -> str: + return tuple.__str__(self) -@environmentfilter -def do_groupby(environment, value, attribute): +@pass_environment +def sync_do_groupby( + environment: "Environment", + value: "t.Iterable[V]", + attribute: t.Union[str, int], + default: t.Optional[t.Any] = None, + case_sensitive: bool = False, +) -> "t.List[_GroupTuple]": """Group a sequence of objects by an attribute using Python's :func:`itertools.groupby`. The attribute can use dot notation for nested access, like ``"address.city"``. Unlike Python's ``groupby``, @@ -978,18 +1195,86 @@ <li>{{ group.grouper }}: {{ group.list|join(", ") }} {% endfor %}</ul> + You can specify a ``default`` value to use if an object in the list + does not have the given attribute. + + .. sourcecode:: jinja + + <ul>{% for city, items in users|groupby("city", default="NY") %} + <li>{{ city }}: {{ items|map(attribute="name")|join(", ") }}</li> + {% endfor %}</ul> + + Like the :func:`~jinja-filters.sort` filter, sorting and grouping is + case-insensitive by default. The ``key`` for each group will have + the case of the first item in that group of values. For example, if + a list of users has cities ``["CA", "NY", "ca"]``, the "CA" group + will have two values. This can be disabled by passing + ``case_sensitive=True``. + + .. versionchanged:: 3.1 + Added the ``case_sensitive`` parameter. Sorting and grouping is + case-insensitive by default, matching other filters that do + comparisons. + + .. versionchanged:: 3.0 + Added the ``default`` parameter. + .. versionchanged:: 2.6 The attribute supports dot notation for nested access. """ - expr = make_attrgetter(environment, attribute) - return [ + expr = make_attrgetter( + environment, + attribute, + postprocess=ignore_case if not case_sensitive else None, + default=default, + ) + out = [ _GroupTuple(key, list(values)) for key, values in groupby(sorted(value, key=expr), expr) ] + if not case_sensitive: + # Return the real key from the first value instead of the lowercase key. + output_expr = make_attrgetter(environment, attribute, default=default) + out = [_GroupTuple(output_expr(values[0]), values) for _, values in out] -@environmentfilter -def do_sum(environment, iterable, attribute=None, start=0): + return out + + +@async_variant(sync_do_groupby) # type: ignore +async def do_groupby( + environment: "Environment", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + attribute: t.Union[str, int], + default: t.Optional[t.Any] = None, + case_sensitive: bool = False, +) -> "t.List[_GroupTuple]": + expr = make_attrgetter( + environment, + attribute, + postprocess=ignore_case if not case_sensitive else None, + default=default, + ) + out = [ + _GroupTuple(key, await auto_to_list(values)) + for key, values in groupby(sorted(await auto_to_list(value), key=expr), expr) + ] + + if not case_sensitive: + # Return the real key from the first value instead of the lowercase key. + output_expr = make_attrgetter(environment, attribute, default=default) + out = [_GroupTuple(output_expr(values[0]), values) for _, values in out] + + return out + + +@pass_environment +def sync_do_sum( + environment: "Environment", + iterable: "t.Iterable[V]", + attribute: t.Optional[t.Union[str, int]] = None, + start: V = 0, # type: ignore +) -> V: """Returns the sum of a sequence of numbers plus the value of parameter 'start' (which defaults to 0). When the sequence is empty it returns start. @@ -1001,52 +1286,93 @@ Total: {{ items|sum(attribute='price') }} .. versionchanged:: 2.6 - The `attribute` parameter was added to allow suming up over - attributes. Also the `start` parameter was moved on to the right. + The ``attribute`` parameter was added to allow summing up over + attributes. Also the ``start`` parameter was moved on to the right. """ if attribute is not None: - iterable = imap(make_attrgetter(environment, attribute), iterable) - return sum(iterable, start) + iterable = map(make_attrgetter(environment, attribute), iterable) + + return sum(iterable, start) # type: ignore[no-any-return, call-overload] -def do_list(value): +@async_variant(sync_do_sum) # type: ignore +async def do_sum( + environment: "Environment", + iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + attribute: t.Optional[t.Union[str, int]] = None, + start: V = 0, # type: ignore +) -> V: + rv = start + + if attribute is not None: + func = make_attrgetter(environment, attribute) + else: + + def func(x: V) -> V: + return x + + async for item in auto_aiter(iterable): + rv += func(item) + + return rv + + +def sync_do_list(value: "t.Iterable[V]") -> "t.List[V]": """Convert the value into a list. If it was a string the returned list will be a list of characters. """ return list(value) -def do_mark_safe(value): +@async_variant(sync_do_list) # type: ignore +async def do_list(value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]") -> "t.List[V]": + return await auto_to_list(value) + + +def do_mark_safe(value: str) -> Markup: """Mark the value as safe which means that in an environment with automatic escaping enabled this variable will not be escaped. """ return Markup(value) -def do_mark_unsafe(value): +def do_mark_unsafe(value: str) -> str: """Mark a value as unsafe. This is the reverse operation for :func:`safe`.""" - return text_type(value) + return str(value) -def do_reverse(value): +@typing.overload +def do_reverse(value: str) -> str: + ... + + +@typing.overload +def do_reverse(value: "t.Iterable[V]") -> "t.Iterable[V]": + ... + + +def do_reverse(value: t.Union[str, t.Iterable[V]]) -> t.Union[str, t.Iterable[V]]: """Reverse the object or return an iterator that iterates over it the other way round. """ - if isinstance(value, string_types): + if isinstance(value, str): return value[::-1] + try: - return reversed(value) + return reversed(value) # type: ignore except TypeError: try: rv = list(value) rv.reverse() return rv - except TypeError: - raise FilterArgumentError("argument must be iterable") + except TypeError as e: + raise FilterArgumentError("argument must be iterable") from e -@environmentfilter -def do_attr(environment, obj, name): +@pass_environment +def do_attr( + environment: "Environment", obj: t.Any, name: str +) -> t.Union[Undefined, t.Any]: """Get an attribute of an object. ``foo|attr("bar")`` works like ``foo.bar`` just that always an attribute is returned and items are not looked up. @@ -1063,16 +1389,39 @@ except AttributeError: pass else: - if environment.sandboxed and not environment.is_safe_attribute( - obj, name, value - ): - return environment.unsafe_undefined(obj, name) + if environment.sandboxed: + environment = t.cast("SandboxedEnvironment", environment) + + if not environment.is_safe_attribute(obj, name, value): + return environment.unsafe_undefined(obj, name) + return value + return environment.undefined(obj=obj, name=name) -@contextfilter -def do_map(*args, **kwargs): +@typing.overload +def sync_do_map( + context: "Context", value: t.Iterable, name: str, *args: t.Any, **kwargs: t.Any +) -> t.Iterable: + ... + + +@typing.overload +def sync_do_map( + context: "Context", + value: t.Iterable, + *, + attribute: str = ..., + default: t.Optional[t.Any] = None, +) -> t.Iterable: + ... + + +@pass_context +def sync_do_map( + context: "Context", value: t.Iterable, *args: t.Any, **kwargs: t.Any +) -> t.Iterable: """Applies a filter on a sequence of objects or looks up an attribute. This is useful when dealing with lists of objects but you are really only interested in a certain value of it. @@ -1104,7 +1453,7 @@ .. code-block:: python (u.username for u in users) - (u.username or "Anonymous" for u in users) + (getattr(u, "username", "Anonymous") for u in users) (do_lower(x) for x in titles) .. versionchanged:: 2.11.0 @@ -1112,14 +1461,53 @@ .. versionadded:: 2.7 """ - seq, func = prepare_map(args, kwargs) - if seq: - for item in seq: + if value: + func = prepare_map(context, args, kwargs) + + for item in value: yield func(item) -@contextfilter -def do_select(*args, **kwargs): +@typing.overload +def do_map( + context: "Context", + value: t.Union[t.AsyncIterable, t.Iterable], + name: str, + *args: t.Any, + **kwargs: t.Any, +) -> t.Iterable: + ... + + +@typing.overload +def do_map( + context: "Context", + value: t.Union[t.AsyncIterable, t.Iterable], + *, + attribute: str = ..., + default: t.Optional[t.Any] = None, +) -> t.Iterable: + ... + + +@async_variant(sync_do_map) # type: ignore +async def do_map( + context: "Context", + value: t.Union[t.AsyncIterable, t.Iterable], + *args: t.Any, + **kwargs: t.Any, +) -> t.AsyncIterable: + if value: + func = prepare_map(context, args, kwargs) + + async for item in auto_aiter(value): + yield await auto_await(func(item)) + + +@pass_context +def sync_do_select( + context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any +) -> "t.Iterator[V]": """Filters a sequence of objects by applying a test to each object, and only selecting the objects with the test succeeding. @@ -1144,11 +1532,23 @@ .. versionadded:: 2.7 """ - return select_or_reject(args, kwargs, lambda x: x, False) + return select_or_reject(context, value, args, kwargs, lambda x: x, False) -@contextfilter -def do_reject(*args, **kwargs): +@async_variant(sync_do_select) # type: ignore +async def do_select( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + *args: t.Any, + **kwargs: t.Any, +) -> "t.AsyncIterator[V]": + return async_select_or_reject(context, value, args, kwargs, lambda x: x, False) + + +@pass_context +def sync_do_reject( + context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any +) -> "t.Iterator[V]": """Filters a sequence of objects by applying a test to each object, and rejecting the objects with the test succeeding. @@ -1168,11 +1568,23 @@ .. versionadded:: 2.7 """ - return select_or_reject(args, kwargs, lambda x: not x, False) + return select_or_reject(context, value, args, kwargs, lambda x: not x, False) -@contextfilter -def do_selectattr(*args, **kwargs): +@async_variant(sync_do_reject) # type: ignore +async def do_reject( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + *args: t.Any, + **kwargs: t.Any, +) -> "t.AsyncIterator[V]": + return async_select_or_reject(context, value, args, kwargs, lambda x: not x, False) + + +@pass_context +def sync_do_selectattr( + context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any +) -> "t.Iterator[V]": """Filters a sequence of objects by applying a test to the specified attribute of each object, and only selecting the objects with the test succeeding. @@ -1196,11 +1608,23 @@ .. versionadded:: 2.7 """ - return select_or_reject(args, kwargs, lambda x: x, True) + return select_or_reject(context, value, args, kwargs, lambda x: x, True) -@contextfilter -def do_rejectattr(*args, **kwargs): +@async_variant(sync_do_selectattr) # type: ignore +async def do_selectattr( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + *args: t.Any, + **kwargs: t.Any, +) -> "t.AsyncIterator[V]": + return async_select_or_reject(context, value, args, kwargs, lambda x: x, True) + + +@pass_context +def sync_do_rejectattr( + context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any +) -> "t.Iterator[V]": """Filters a sequence of objects by applying a test to the specified attribute of each object, and rejecting the objects with the test succeeding. @@ -1222,105 +1646,138 @@ .. versionadded:: 2.7 """ - return select_or_reject(args, kwargs, lambda x: not x, True) + return select_or_reject(context, value, args, kwargs, lambda x: not x, True) -@evalcontextfilter -def do_tojson(eval_ctx, value, indent=None): - """Dumps a structure to JSON so that it's safe to use in ``<script>`` - tags. It accepts the same arguments and returns a JSON string. Note that - this is available in templates through the ``|tojson`` filter which will - also mark the result as safe. Due to how this function escapes certain - characters this is safe even if used outside of ``<script>`` tags. +@async_variant(sync_do_rejectattr) # type: ignore +async def do_rejectattr( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + *args: t.Any, + **kwargs: t.Any, +) -> "t.AsyncIterator[V]": + return async_select_or_reject(context, value, args, kwargs, lambda x: not x, True) - The following characters are escaped in strings: - - ``<`` - - ``>`` - - ``&`` - - ``'`` +@pass_eval_context +def do_tojson( + eval_ctx: "EvalContext", value: t.Any, indent: t.Optional[int] = None +) -> Markup: + """Serialize an object to a string of JSON, and mark it safe to + render in HTML. This filter is only for use in HTML documents. - This makes it safe to embed such strings in any place in HTML with the - notable exception of double quoted attributes. In that case single - quote your attributes or HTML escape it in addition. + The returned string is safe to render in HTML documents and + ``<script>`` tags. The exception is in HTML attributes that are + double quoted; either use single quotes or the ``|forceescape`` + filter. - The indent parameter can be used to enable pretty printing. Set it to - the number of spaces that the structures should be indented with. - - Note that this filter is for use in HTML contexts only. + :param value: The object to serialize to JSON. + :param indent: The ``indent`` parameter passed to ``dumps``, for + pretty-printing the value. .. versionadded:: 2.9 """ policies = eval_ctx.environment.policies - dumper = policies["json.dumps_function"] - options = policies["json.dumps_kwargs"] + dumps = policies["json.dumps_function"] + kwargs = policies["json.dumps_kwargs"] + if indent is not None: - options = dict(options) - options["indent"] = indent - return htmlsafe_json_dumps(value, dumper=dumper, **options) + kwargs = kwargs.copy() + kwargs["indent"] = indent + + return htmlsafe_json_dumps(value, dumps=dumps, **kwargs) -def prepare_map(args, kwargs): - context = args[0] - seq = args[1] - default = None - - if len(args) == 2 and "attribute" in kwargs: +def prepare_map( + context: "Context", args: t.Tuple, kwargs: t.Dict[str, t.Any] +) -> t.Callable[[t.Any], t.Any]: + if not args and "attribute" in kwargs: attribute = kwargs.pop("attribute") default = kwargs.pop("default", None) + if kwargs: raise FilterArgumentError( - "Unexpected keyword argument %r" % next(iter(kwargs)) + f"Unexpected keyword argument {next(iter(kwargs))!r}" ) + func = make_attrgetter(context.environment, attribute, default=default) else: try: - name = args[2] - args = args[3:] + name = args[0] + args = args[1:] except LookupError: - raise FilterArgumentError("map requires a filter argument") + raise FilterArgumentError("map requires a filter argument") from None - def func(item): + def func(item: t.Any) -> t.Any: return context.environment.call_filter( name, item, args, kwargs, context=context ) - return seq, func + return func -def prepare_select_or_reject(args, kwargs, modfunc, lookup_attr): - context = args[0] - seq = args[1] +def prepare_select_or_reject( + context: "Context", + args: t.Tuple, + kwargs: t.Dict[str, t.Any], + modfunc: t.Callable[[t.Any], t.Any], + lookup_attr: bool, +) -> t.Callable[[t.Any], t.Any]: if lookup_attr: try: - attr = args[2] + attr = args[0] except LookupError: - raise FilterArgumentError("Missing parameter for attribute name") + raise FilterArgumentError("Missing parameter for attribute name") from None + transfunc = make_attrgetter(context.environment, attr) off = 1 else: off = 0 - def transfunc(x): + def transfunc(x: V) -> V: return x try: - name = args[2 + off] - args = args[3 + off :] + name = args[off] + args = args[1 + off :] - def func(item): + def func(item: t.Any) -> t.Any: return context.environment.call_test(name, item, args, kwargs) except LookupError: - func = bool + func = bool # type: ignore - return seq, lambda item: modfunc(func(transfunc(item))) + return lambda item: modfunc(func(transfunc(item))) -def select_or_reject(args, kwargs, modfunc, lookup_attr): - seq, func = prepare_select_or_reject(args, kwargs, modfunc, lookup_attr) - if seq: - for item in seq: +def select_or_reject( + context: "Context", + value: "t.Iterable[V]", + args: t.Tuple, + kwargs: t.Dict[str, t.Any], + modfunc: t.Callable[[t.Any], t.Any], + lookup_attr: bool, +) -> "t.Iterator[V]": + if value: + func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr) + + for item in value: + if func(item): + yield item + + +async def async_select_or_reject( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + args: t.Tuple, + kwargs: t.Dict[str, t.Any], + modfunc: t.Callable[[t.Any], t.Any], + lookup_attr: bool, +) -> "t.AsyncIterator[V]": + if value: + func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr) + + async for item in auto_aiter(value): if func(item): yield item @@ -1350,6 +1807,7 @@ "length": len, "list": do_list, "lower": do_lower, + "items": do_items, "map": do_map, "min": do_min, "max": do_max, @@ -1365,7 +1823,7 @@ "selectattr": do_selectattr, "slice": do_slice, "sort": do_sort, - "string": soft_unicode, + "string": soft_str, "striptags": do_striptags, "sum": do_sum, "title": do_title,
diff --git a/third_party/jinja2/idtracking.py b/third_party/jinja2/idtracking.py index 9a0d838..995ebaa0 100644 --- a/third_party/jinja2/idtracking.py +++ b/third_party/jinja2/idtracking.py
@@ -1,4 +1,6 @@ -from ._compat import iteritems +import typing as t + +from . import nodes from .visitor import NodeVisitor VAR_LOAD_PARAMETER = "param" @@ -7,7 +9,9 @@ VAR_LOAD_UNDEFINED = "undefined" -def find_symbols(nodes, parent_symbols=None): +def find_symbols( + nodes: t.Iterable[nodes.Node], parent_symbols: t.Optional["Symbols"] = None +) -> "Symbols": sym = Symbols(parent=parent_symbols) visitor = FrameSymbolVisitor(sym) for node in nodes: @@ -15,58 +19,71 @@ return sym -def symbols_for_node(node, parent_symbols=None): +def symbols_for_node( + node: nodes.Node, parent_symbols: t.Optional["Symbols"] = None +) -> "Symbols": sym = Symbols(parent=parent_symbols) sym.analyze_node(node) return sym -class Symbols(object): - def __init__(self, parent=None, level=None): +class Symbols: + def __init__( + self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None + ) -> None: if level is None: if parent is None: level = 0 else: level = parent.level + 1 - self.level = level - self.parent = parent - self.refs = {} - self.loads = {} - self.stores = set() - def analyze_node(self, node, **kwargs): + self.level: int = level + self.parent = parent + self.refs: t.Dict[str, str] = {} + self.loads: t.Dict[str, t.Any] = {} + self.stores: t.Set[str] = set() + + def analyze_node(self, node: nodes.Node, **kwargs: t.Any) -> None: visitor = RootVisitor(self) visitor.visit(node, **kwargs) - def _define_ref(self, name, load=None): - ident = "l_%d_%s" % (self.level, name) + def _define_ref( + self, name: str, load: t.Optional[t.Tuple[str, t.Optional[str]]] = None + ) -> str: + ident = f"l_{self.level}_{name}" self.refs[name] = ident if load is not None: self.loads[ident] = load return ident - def find_load(self, target): + def find_load(self, target: str) -> t.Optional[t.Any]: if target in self.loads: return self.loads[target] + if self.parent is not None: return self.parent.find_load(target) - def find_ref(self, name): + return None + + def find_ref(self, name: str) -> t.Optional[str]: if name in self.refs: return self.refs[name] + if self.parent is not None: return self.parent.find_ref(name) - def ref(self, name): + return None + + def ref(self, name: str) -> str: rv = self.find_ref(name) if rv is None: raise AssertionError( - "Tried to resolve a name to a reference that " - "was unknown to the frame (%r)" % name + "Tried to resolve a name to a reference that was" + f" unknown to the frame ({name!r})" ) return rv - def copy(self): + def copy(self) -> "Symbols": rv = object.__new__(self.__class__) rv.__dict__.update(self.__dict__) rv.refs = self.refs.copy() @@ -74,7 +91,7 @@ rv.stores = self.stores.copy() return rv - def store(self, name): + def store(self, name: str) -> None: self.stores.add(name) # If we have not see the name referenced yet, we need to figure @@ -92,17 +109,16 @@ # Otherwise we can just set it to undefined. self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None)) - def declare_parameter(self, name): + def declare_parameter(self, name: str) -> str: self.stores.add(name) return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None)) - def load(self, name): - target = self.find_ref(name) - if target is None: + def load(self, name: str) -> None: + if self.find_ref(name) is None: self._define_ref(name, load=(VAR_LOAD_RESOLVE, name)) - def branch_update(self, branch_symbols): - stores = {} + def branch_update(self, branch_symbols: t.Sequence["Symbols"]) -> None: + stores: t.Dict[str, int] = {} for branch in branch_symbols: for target in branch.stores: if target in self.stores: @@ -114,10 +130,11 @@ self.loads.update(sym.loads) self.stores.update(sym.stores) - for name, branch_count in iteritems(stores): + for name, branch_count in stores.items(): if branch_count == len(branch_symbols): continue - target = self.find_ref(name) + + target = self.find_ref(name) # type: ignore assert target is not None, "should not happen" if self.parent is not None: @@ -127,56 +144,64 @@ continue self.loads[target] = (VAR_LOAD_RESOLVE, name) - def dump_stores(self): - rv = {} - node = self + def dump_stores(self) -> t.Dict[str, str]: + rv: t.Dict[str, str] = {} + node: t.Optional["Symbols"] = self + while node is not None: - for name in node.stores: + for name in sorted(node.stores): if name not in rv: - rv[name] = self.find_ref(name) + rv[name] = self.find_ref(name) # type: ignore + node = node.parent + return rv - def dump_param_targets(self): + def dump_param_targets(self) -> t.Set[str]: rv = set() - node = self + node: t.Optional["Symbols"] = self + while node is not None: - for target, (instr, _) in iteritems(self.loads): + for target, (instr, _) in self.loads.items(): if instr == VAR_LOAD_PARAMETER: rv.add(target) + node = node.parent + return rv class RootVisitor(NodeVisitor): - def __init__(self, symbols): + def __init__(self, symbols: "Symbols") -> None: self.sym_visitor = FrameSymbolVisitor(symbols) - def _simple_visit(self, node, **kwargs): + def _simple_visit(self, node: nodes.Node, **kwargs: t.Any) -> None: for child in node.iter_child_nodes(): self.sym_visitor.visit(child) - visit_Template = ( - visit_Block - ) = ( - visit_Macro - ) = ( - visit_FilterBlock - ) = visit_Scope = visit_If = visit_ScopedEvalContextModifier = _simple_visit + visit_Template = _simple_visit + visit_Block = _simple_visit + visit_Macro = _simple_visit + visit_FilterBlock = _simple_visit + visit_Scope = _simple_visit + visit_If = _simple_visit + visit_ScopedEvalContextModifier = _simple_visit - def visit_AssignBlock(self, node, **kwargs): + def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None: for child in node.body: self.sym_visitor.visit(child) - def visit_CallBlock(self, node, **kwargs): + def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None: for child in node.iter_child_nodes(exclude=("call",)): self.sym_visitor.visit(child) - def visit_OverlayScope(self, node, **kwargs): + def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None: for child in node.body: self.sym_visitor.visit(child) - def visit_For(self, node, for_branch="body", **kwargs): + def visit_For( + self, node: nodes.For, for_branch: str = "body", **kwargs: t.Any + ) -> None: if for_branch == "body": self.sym_visitor.visit(node.target, store_as_param=True) branch = node.body @@ -189,28 +214,30 @@ return else: raise RuntimeError("Unknown for branch") - for item in branch or (): - self.sym_visitor.visit(item) - def visit_With(self, node, **kwargs): + if branch: + for item in branch: + self.sym_visitor.visit(item) + + def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None: for target in node.targets: self.sym_visitor.visit(target) for child in node.body: self.sym_visitor.visit(child) - def generic_visit(self, node, *args, **kwargs): - raise NotImplementedError( - "Cannot find symbols for %r" % node.__class__.__name__ - ) + def generic_visit(self, node: nodes.Node, *args: t.Any, **kwargs: t.Any) -> None: + raise NotImplementedError(f"Cannot find symbols for {type(node).__name__!r}") class FrameSymbolVisitor(NodeVisitor): """A visitor for `Frame.inspect`.""" - def __init__(self, symbols): + def __init__(self, symbols: "Symbols") -> None: self.symbols = symbols - def visit_Name(self, node, store_as_param=False, **kwargs): + def visit_Name( + self, node: nodes.Name, store_as_param: bool = False, **kwargs: t.Any + ) -> None: """All assignments to names go through this function.""" if store_as_param or node.ctx == "param": self.symbols.declare_parameter(node.name) @@ -219,72 +246,73 @@ elif node.ctx == "load": self.symbols.load(node.name) - def visit_NSRef(self, node, **kwargs): + def visit_NSRef(self, node: nodes.NSRef, **kwargs: t.Any) -> None: self.symbols.load(node.name) - def visit_If(self, node, **kwargs): + def visit_If(self, node: nodes.If, **kwargs: t.Any) -> None: self.visit(node.test, **kwargs) - original_symbols = self.symbols - def inner_visit(nodes): + def inner_visit(nodes: t.Iterable[nodes.Node]) -> "Symbols": self.symbols = rv = original_symbols.copy() + for subnode in nodes: self.visit(subnode, **kwargs) + self.symbols = original_symbols return rv body_symbols = inner_visit(node.body) elif_symbols = inner_visit(node.elif_) else_symbols = inner_visit(node.else_ or ()) - self.symbols.branch_update([body_symbols, elif_symbols, else_symbols]) - def visit_Macro(self, node, **kwargs): + def visit_Macro(self, node: nodes.Macro, **kwargs: t.Any) -> None: self.symbols.store(node.name) - def visit_Import(self, node, **kwargs): + def visit_Import(self, node: nodes.Import, **kwargs: t.Any) -> None: self.generic_visit(node, **kwargs) self.symbols.store(node.target) - def visit_FromImport(self, node, **kwargs): + def visit_FromImport(self, node: nodes.FromImport, **kwargs: t.Any) -> None: self.generic_visit(node, **kwargs) + for name in node.names: if isinstance(name, tuple): self.symbols.store(name[1]) else: self.symbols.store(name) - def visit_Assign(self, node, **kwargs): + def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) -> None: """Visit assignments in the correct order.""" self.visit(node.node, **kwargs) self.visit(node.target, **kwargs) - def visit_For(self, node, **kwargs): + def visit_For(self, node: nodes.For, **kwargs: t.Any) -> None: """Visiting stops at for blocks. However the block sequence is visited as part of the outer scope. """ self.visit(node.iter, **kwargs) - def visit_CallBlock(self, node, **kwargs): + def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None: self.visit(node.call, **kwargs) - def visit_FilterBlock(self, node, **kwargs): + def visit_FilterBlock(self, node: nodes.FilterBlock, **kwargs: t.Any) -> None: self.visit(node.filter, **kwargs) - def visit_With(self, node, **kwargs): + def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None: for target in node.values: self.visit(target) - def visit_AssignBlock(self, node, **kwargs): + def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None: """Stop visiting at block assigns.""" self.visit(node.target, **kwargs) - def visit_Scope(self, node, **kwargs): + def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) -> None: """Stop visiting at scopes.""" - def visit_Block(self, node, **kwargs): + def visit_Block(self, node: nodes.Block, **kwargs: t.Any) -> None: """Stop visiting at blocks.""" - def visit_OverlayScope(self, node, **kwargs): + def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None: """Do not visit into overlay scopes."""
diff --git a/third_party/jinja2/jinja2.gni b/third_party/jinja2/jinja2.gni index 9aec8f7..680a871 100644 --- a/third_party/jinja2/jinja2.gni +++ b/third_party/jinja2/jinja2.gni
@@ -2,10 +2,8 @@ jinja2_sources = [ "//third_party/jinja2/__init__.py", - "//third_party/jinja2/_compat.py", "//third_party/jinja2/_identifier.py", - "//third_party/jinja2/asyncfilters.py", - "//third_party/jinja2/asyncsupport.py", + "//third_party/jinja2/async_utils.py", "//third_party/jinja2/bccache.py", "//third_party/jinja2/compiler.py", "//third_party/jinja2/constants.py",
diff --git a/third_party/jinja2/lexer.py b/third_party/jinja2/lexer.py index 552356a1..aff7e9f 100644 --- a/third_party/jinja2/lexer.py +++ b/third_party/jinja2/lexer.py
@@ -1,32 +1,48 @@ -# -*- coding: utf-8 -*- """Implements a Jinja / Python combination lexer. The ``Lexer`` class is used to do some preprocessing. It filters out invalid operators like the bitshift operators we don't allow in templates. It separates template code and python code in expressions. """ import re +import typing as t from ast import literal_eval from collections import deque -from operator import itemgetter +from sys import intern -from ._compat import implements_iterator -from ._compat import intern -from ._compat import iteritems -from ._compat import text_type +from ._identifier import pattern as name_re from .exceptions import TemplateSyntaxError from .utils import LRUCache +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + # cache for the lexers. Exists in order to be able to have multiple # environments with the same lexer -_lexer_cache = LRUCache(50) +_lexer_cache: t.MutableMapping[t.Tuple, "Lexer"] = LRUCache(50) # type: ignore # static regular expressions -whitespace_re = re.compile(r"\s+", re.U) +whitespace_re = re.compile(r"\s+") newline_re = re.compile(r"(\r\n|\r|\n)") string_re = re.compile( r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S ) -integer_re = re.compile(r"(\d+_)*\d+") +integer_re = re.compile( + r""" + ( + 0b(_?[0-1])+ # binary + | + 0o(_?[0-7])+ # octal + | + 0x(_?[\da-f])+ # hex + | + [1-9](_?\d)* # decimal + | + 0(_?0)* # decimal zero + ) + """, + re.IGNORECASE | re.VERBOSE, +) float_re = re.compile( r""" (?<!\.) # doesn't start with a . @@ -41,20 +57,6 @@ re.IGNORECASE | re.VERBOSE, ) -try: - # check if this Python supports Unicode identifiers - compile("föö", "<unknown>", "eval") -except SyntaxError: - # Python 2, no Unicode support, use ASCII identifiers - name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*") - check_ident = False -else: - # Unicode support, import generated re pattern and set flag to use - # str.isidentifier to validate during lexing. - from ._identifier import pattern as name_re - - check_ident = True - # internal the tokens and keep references to them TOKEN_ADD = intern("add") TOKEN_ASSIGN = intern("assign") @@ -136,10 +138,10 @@ ";": TOKEN_SEMICOLON, } -reverse_operators = dict([(v, k) for k, v in iteritems(operators)]) +reverse_operators = {v: k for k, v in operators.items()} assert len(operators) == len(reverse_operators), "operators dropped" operator_re = re.compile( - "(%s)" % "|".join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x))) + f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})" ) ignored_tokens = frozenset( @@ -158,9 +160,10 @@ ) -def _describe_token_type(token_type): +def _describe_token_type(token_type: str) -> str: if token_type in reverse_operators: return reverse_operators[token_type] + return { TOKEN_COMMENT_BEGIN: "begin of comment", TOKEN_COMMENT_END: "end of comment", @@ -177,32 +180,35 @@ }.get(token_type, token_type) -def describe_token(token): +def describe_token(token: "Token") -> str: """Returns a description of the token.""" if token.type == TOKEN_NAME: return token.value + return _describe_token_type(token.type) -def describe_token_expr(expr): +def describe_token_expr(expr: str) -> str: """Like `describe_token` but for token expressions.""" if ":" in expr: type, value = expr.split(":", 1) + if type == TOKEN_NAME: return value else: type = expr + return _describe_token_type(type) -def count_newlines(value): +def count_newlines(value: str) -> int: """Count the number of newline characters in the string. This is useful for extensions that filter a stream. """ return len(newline_re.findall(value)) -def compile_rules(environment): +def compile_rules(environment: "Environment") -> t.List[t.Tuple[str, str]]: """Compiles all the rules from the environment into a list of rules.""" e = re.escape rules = [ @@ -243,36 +249,30 @@ return [x[1:] for x in sorted(rules, reverse=True)] -class Failure(object): +class Failure: """Class that raises a `TemplateSyntaxError` if called. Used by the `Lexer` to specify known errors. """ - def __init__(self, message, cls=TemplateSyntaxError): + def __init__( + self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError + ) -> None: self.message = message self.error_class = cls - def __call__(self, lineno, filename): + def __call__(self, lineno: int, filename: str) -> "te.NoReturn": raise self.error_class(self.message, lineno, filename) -class Token(tuple): - """Token class.""" +class Token(t.NamedTuple): + lineno: int + type: str + value: str - __slots__ = () - lineno, type, value = (property(itemgetter(x)) for x in range(3)) + def __str__(self) -> str: + return describe_token(self) - def __new__(cls, lineno, type, value): - return tuple.__new__(cls, (lineno, intern(str(type)), value)) - - def __str__(self): - if self.type in reverse_operators: - return reverse_operators[self.type] - elif self.type == "name": - return self.value - return self.type - - def test(self, expr): + def test(self, expr: str) -> bool: """Test a token against a token expression. This can either be a token type or ``'token_type:token_value'``. This can only test against string values and types. @@ -281,76 +281,75 @@ # passed an iterable of not interned strings. if self.type == expr: return True - elif ":" in expr: + + if ":" in expr: return expr.split(":", 1) == [self.type, self.value] + return False - def test_any(self, *iterable): + def test_any(self, *iterable: str) -> bool: """Test against multiple token expressions.""" - for expr in iterable: - if self.test(expr): - return True - return False - - def __repr__(self): - return "Token(%r, %r, %r)" % (self.lineno, self.type, self.value) + return any(self.test(expr) for expr in iterable) -@implements_iterator -class TokenStreamIterator(object): +class TokenStreamIterator: """The iterator for tokenstreams. Iterate over the stream until the eof token is reached. """ - def __init__(self, stream): + def __init__(self, stream: "TokenStream") -> None: self.stream = stream - def __iter__(self): + def __iter__(self) -> "TokenStreamIterator": return self - def __next__(self): + def __next__(self) -> Token: token = self.stream.current + if token.type is TOKEN_EOF: self.stream.close() - raise StopIteration() + raise StopIteration + next(self.stream) return token -@implements_iterator -class TokenStream(object): +class TokenStream: """A token stream is an iterable that yields :class:`Token`\\s. The parser however does not iterate over it but calls :meth:`next` to go one token ahead. The current active token is stored as :attr:`current`. """ - def __init__(self, generator, name, filename): + def __init__( + self, + generator: t.Iterable[Token], + name: t.Optional[str], + filename: t.Optional[str], + ): self._iter = iter(generator) - self._pushed = deque() + self._pushed: "te.Deque[Token]" = deque() self.name = name self.filename = filename self.closed = False self.current = Token(1, TOKEN_INITIAL, "") next(self) - def __iter__(self): + def __iter__(self) -> TokenStreamIterator: return TokenStreamIterator(self) - def __bool__(self): + def __bool__(self) -> bool: return bool(self._pushed) or self.current.type is not TOKEN_EOF - __nonzero__ = __bool__ # py2 - @property - def eos(self): + def eos(self) -> bool: """Are we at the end of the stream?""" return not self - def push(self, token): + def push(self, token: Token) -> None: """Push a token back to the stream.""" self._pushed.append(token) - def look(self): + def look(self) -> Token: """Look at the next token.""" old_token = next(self) result = self.current @@ -358,28 +357,31 @@ self.current = old_token return result - def skip(self, n=1): + def skip(self, n: int = 1) -> None: """Got n tokens ahead.""" for _ in range(n): next(self) - def next_if(self, expr): + def next_if(self, expr: str) -> t.Optional[Token]: """Perform the token test and return the token if it matched. Otherwise the return value is `None`. """ if self.current.test(expr): return next(self) - def skip_if(self, expr): + return None + + def skip_if(self, expr: str) -> bool: """Like :meth:`next_if` but only returns `True` or `False`.""" return self.next_if(expr) is not None - def __next__(self): + def __next__(self) -> Token: """Go one token ahead and return the old one. Use the built-in :func:`next` instead of calling this directly. """ rv = self.current + if self._pushed: self.current = self._pushed.popleft() elif self.current.type is not TOKEN_EOF: @@ -387,40 +389,41 @@ self.current = next(self._iter) except StopIteration: self.close() + return rv - def close(self): + def close(self) -> None: """Close the stream.""" self.current = Token(self.current.lineno, TOKEN_EOF, "") - self._iter = None + self._iter = iter(()) self.closed = True - def expect(self, expr): + def expect(self, expr: str) -> Token: """Expect a given token type and return it. This accepts the same argument as :meth:`jinja2.lexer.Token.test`. """ if not self.current.test(expr): expr = describe_token_expr(expr) + if self.current.type is TOKEN_EOF: raise TemplateSyntaxError( - "unexpected end of template, expected %r." % expr, + f"unexpected end of template, expected {expr!r}.", self.current.lineno, self.name, self.filename, ) + raise TemplateSyntaxError( - "expected token %r, got %r" % (expr, describe_token(self.current)), + f"expected token {expr!r}, got {describe_token(self.current)!r}", self.current.lineno, self.name, self.filename, ) - try: - return self.current - finally: - next(self) + + return next(self) -def get_lexer(environment): +def get_lexer(environment: "Environment") -> "Lexer": """Return a lexer which is probably cached.""" key = ( environment.block_start_string, @@ -437,9 +440,10 @@ environment.keep_trailing_newline, ) lexer = _lexer_cache.get(key) + if lexer is None: - lexer = Lexer(environment) - _lexer_cache[key] = lexer + _lexer_cache[key] = lexer = Lexer(environment) + return lexer @@ -452,11 +456,17 @@ # Even though it looks like a no-op, creating instances fails # without this. - def __new__(cls, *members, **kwargs): - return super(OptionalLStrip, cls).__new__(cls, members) + def __new__(cls, *members, **kwargs): # type: ignore + return super().__new__(cls, members) -class Lexer(object): +class _Rule(t.NamedTuple): + pattern: t.Pattern[str] + tokens: t.Union[str, t.Tuple[str, ...], t.Tuple[Failure]] + command: t.Optional[str] + + +class Lexer: """Class that implements a lexer for a given environment. Automatically created by the environment class, usually you don't have to do that. @@ -464,21 +474,21 @@ Multiple environments can share the same lexer. """ - def __init__(self, environment): + def __init__(self, environment: "Environment") -> None: # shortcuts e = re.escape - def c(x): + def c(x: str) -> t.Pattern[str]: return re.compile(x, re.M | re.S) # lexing rules for tags - tag_rules = [ - (whitespace_re, TOKEN_WHITESPACE, None), - (float_re, TOKEN_FLOAT, None), - (integer_re, TOKEN_INTEGER, None), - (name_re, TOKEN_NAME, None), - (string_re, TOKEN_STRING, None), - (operator_re, TOKEN_OPERATOR, None), + tag_rules: t.List[_Rule] = [ + _Rule(whitespace_re, TOKEN_WHITESPACE, None), + _Rule(float_re, TOKEN_FLOAT, None), + _Rule(integer_re, TOKEN_INTEGER, None), + _Rule(name_re, TOKEN_NAME, None), + _Rule(string_re, TOKEN_STRING, None), + _Rule(operator_re, TOKEN_OPERATOR, None), ] # assemble the root lexing rule. because "|" is ungreedy @@ -489,70 +499,57 @@ # is required. root_tag_rules = compile_rules(environment) - # block suffix if trimming is enabled - block_suffix_re = environment.trim_blocks and "\\n?" or "" + block_start_re = e(environment.block_start_string) + block_end_re = e(environment.block_end_string) + comment_end_re = e(environment.comment_end_string) + variable_end_re = e(environment.variable_end_string) - # If lstrip is enabled, it should not be applied if there is any - # non-whitespace between the newline and block. - self.lstrip_unless_re = c(r"[^ \t]") if environment.lstrip_blocks else None + # block suffix if trimming is enabled + block_suffix_re = "\\n?" if environment.trim_blocks else "" + + self.lstrip_blocks = environment.lstrip_blocks self.newline_sequence = environment.newline_sequence self.keep_trailing_newline = environment.keep_trailing_newline + root_raw_re = ( + rf"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*" + rf"(?:\-{block_end_re}\s*|{block_end_re}))" + ) + root_parts_re = "|".join( + [root_raw_re] + [rf"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules] + ) + # global lexing rules - self.rules = { + self.rules: t.Dict[str, t.List[_Rule]] = { "root": [ # directives - ( - c( - "(.*?)(?:%s)" - % "|".join( - [ - r"(?P<raw_begin>%s(\-|\+|)\s*raw\s*(?:\-%s\s*|%s))" - % ( - e(environment.block_start_string), - e(environment.block_end_string), - e(environment.block_end_string), - ) - ] - + [ - r"(?P<%s>%s(\-|\+|))" % (n, r) - for n, r in root_tag_rules - ] - ) - ), - OptionalLStrip(TOKEN_DATA, "#bygroup"), + _Rule( + c(rf"(.*?)(?:{root_parts_re})"), + OptionalLStrip(TOKEN_DATA, "#bygroup"), # type: ignore "#bygroup", ), # data - (c(".+"), TOKEN_DATA, None), + _Rule(c(".+"), TOKEN_DATA, None), ], # comments TOKEN_COMMENT_BEGIN: [ - ( + _Rule( c( - r"(.*?)((?:\-%s\s*|%s)%s)" - % ( - e(environment.comment_end_string), - e(environment.comment_end_string), - block_suffix_re, - ) + rf"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*" + rf"|{comment_end_re}{block_suffix_re}))" ), (TOKEN_COMMENT, TOKEN_COMMENT_END), "#pop", ), - (c("(.)"), (Failure("Missing end of comment tag"),), None), + _Rule(c(r"(.)"), (Failure("Missing end of comment tag"),), None), ], # blocks TOKEN_BLOCK_BEGIN: [ - ( + _Rule( c( - r"(?:\-%s\s*|%s)%s" - % ( - e(environment.block_end_string), - e(environment.block_end_string), - block_suffix_re, - ) + rf"(?:\+{block_end_re}|\-{block_end_re}\s*" + rf"|{block_end_re}{block_suffix_re})" ), TOKEN_BLOCK_END, "#pop", @@ -561,14 +558,8 @@ + tag_rules, # variables TOKEN_VARIABLE_BEGIN: [ - ( - c( - r"\-%s\s*|%s" - % ( - e(environment.variable_end_string), - e(environment.variable_end_string), - ) - ), + _Rule( + c(rf"\-{variable_end_re}\s*|{variable_end_re}"), TOKEN_VARIABLE_END, "#pop", ) @@ -576,29 +567,25 @@ + tag_rules, # raw block TOKEN_RAW_BEGIN: [ - ( + _Rule( c( - r"(.*?)((?:%s(\-|\+|))\s*endraw\s*(?:\-%s\s*|%s%s))" - % ( - e(environment.block_start_string), - e(environment.block_end_string), - e(environment.block_end_string), - block_suffix_re, - ) + rf"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*" + rf"(?:\+{block_end_re}|\-{block_end_re}\s*" + rf"|{block_end_re}{block_suffix_re}))" ), - OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END), + OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END), # type: ignore "#pop", ), - (c("(.)"), (Failure("Missing end of raw directive"),), None), + _Rule(c(r"(.)"), (Failure("Missing end of raw directive"),), None), ], # line statements TOKEN_LINESTATEMENT_BEGIN: [ - (c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop") + _Rule(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop") ] + tag_rules, # line comments TOKEN_LINECOMMENT_BEGIN: [ - ( + _Rule( c(r"(.*?)()(?=\n|$)"), (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END), "#pop", @@ -606,23 +593,39 @@ ], } - def _normalize_newlines(self, value): - """Called for strings and template data to normalize it to unicode.""" + def _normalize_newlines(self, value: str) -> str: + """Replace all newlines with the configured sequence in strings + and template data. + """ return newline_re.sub(self.newline_sequence, value) - def tokenize(self, source, name=None, filename=None, state=None): + def tokenize( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> TokenStream: """Calls tokeniter + tokenize and wraps it in a token stream.""" stream = self.tokeniter(source, name, filename, state) return TokenStream(self.wrap(stream, name, filename), name, filename) - def wrap(self, stream, name=None, filename=None): + def wrap( + self, + stream: t.Iterable[t.Tuple[int, str, str]], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> t.Iterator[Token]: """This is called with the stream as returned by `tokenize` and wraps every token in a :class:`Token` and converts the value. """ - for lineno, token, value in stream: + for lineno, token, value_str in stream: if token in ignored_tokens: continue - elif token == TOKEN_LINESTATEMENT_BEGIN: + + value: t.Any = value_str + + if token == TOKEN_LINESTATEMENT_BEGIN: token = TOKEN_BLOCK_BEGIN elif token == TOKEN_LINESTATEMENT_END: token = TOKEN_BLOCK_END @@ -630,12 +633,13 @@ elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END): continue elif token == TOKEN_DATA: - value = self._normalize_newlines(value) + value = self._normalize_newlines(value_str) elif token == "keyword": - token = value + token = value_str elif token == TOKEN_NAME: - value = str(value) - if check_ident and not value.isidentifier(): + value = value_str + + if not value.isidentifier(): raise TemplateSyntaxError( "Invalid character in identifier", lineno, name, filename ) @@ -643,51 +647,62 @@ # try to unescape string try: value = ( - self._normalize_newlines(value[1:-1]) + self._normalize_newlines(value_str[1:-1]) .encode("ascii", "backslashreplace") .decode("unicode-escape") ) except Exception as e: msg = str(e).split(":")[-1].strip() - raise TemplateSyntaxError(msg, lineno, name, filename) + raise TemplateSyntaxError(msg, lineno, name, filename) from e elif token == TOKEN_INTEGER: - value = int(value.replace("_", "")) + value = int(value_str.replace("_", ""), 0) elif token == TOKEN_FLOAT: # remove all "_" first to support more Python versions - value = literal_eval(value.replace("_", "")) + value = literal_eval(value_str.replace("_", "")) elif token == TOKEN_OPERATOR: - token = operators[value] + token = operators[value_str] + yield Token(lineno, token, value) - def tokeniter(self, source, name, filename=None, state=None): + def tokeniter( + self, + source: str, + name: t.Optional[str], + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> t.Iterator[t.Tuple[int, str, str]]: """This method tokenizes the text and returns the tokens in a - generator. Use this method if you just want to tokenize a template. + generator. Use this method if you just want to tokenize a template. + + .. versionchanged:: 3.0 + Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line + breaks. """ - source = text_type(source) - lines = source.splitlines() - if self.keep_trailing_newline and source: - for newline in ("\r\n", "\r", "\n"): - if source.endswith(newline): - lines.append("") - break + lines = newline_re.split(source)[::2] + + if not self.keep_trailing_newline and lines[-1] == "": + del lines[-1] + source = "\n".join(lines) pos = 0 lineno = 1 stack = ["root"] + if state is not None and state != "root": assert state in ("variable", "block"), "invalid state" stack.append(state + "_begin") + statetokens = self.rules[stack[-1]] source_length = len(source) - balancing_stack = [] - lstrip_unless_re = self.lstrip_unless_re + balancing_stack: t.List[str] = [] newlines_stripped = 0 line_starting = True - while 1: + while True: # tokenizer loop for regex, tokens, new_state in statetokens: m = regex.match(source, pos) + # if no match we try again with the next rule if m is None: continue @@ -705,13 +720,12 @@ # tuples support more options if isinstance(tokens, tuple): - groups = m.groups() + groups: t.Sequence[str] = m.groups() if isinstance(tokens, OptionalLStrip): # Rule supports lstrip. Match will look like # text, block type, whitespace control, type, control, ... text = groups[0] - # Skipping the text and first type, every other group is the # whitespace control for each type. One of the groups will be # -, +, or empty string instead of None. @@ -721,22 +735,23 @@ # Strip all whitespace between the text and the tag. stripped = text.rstrip() newlines_stripped = text[len(stripped) :].count("\n") - groups = (stripped,) + groups[1:] + groups = [stripped, *groups[1:]] elif ( # Not marked for preserving whitespace. strip_sign != "+" # lstrip is enabled. - and lstrip_unless_re is not None + and self.lstrip_blocks # Not a variable expression. and not m.groupdict().get(TOKEN_VARIABLE_BEGIN) ): # The start of text between the last newline and the tag. l_pos = text.rfind("\n") + 1 + if l_pos > 0 or line_starting: # If there's only whitespace between the newline and the # tag, strip it. - if not lstrip_unless_re.search(text, l_pos): - groups = (text[:l_pos],) + groups[1:] + if whitespace_re.fullmatch(text, l_pos): + groups = [text[:l_pos], *groups[1:]] for idx, token in enumerate(tokens): # failure group @@ -746,28 +761,30 @@ # yield for the current token the first named # group that matched elif token == "#bygroup": - for key, value in iteritems(m.groupdict()): + for key, value in m.groupdict().items(): if value is not None: yield lineno, key, value lineno += value.count("\n") break else: raise RuntimeError( - "%r wanted to resolve " - "the token dynamically" - " but no group matched" % regex + f"{regex!r} wanted to resolve the token dynamically" + " but no group matched" ) # normal group else: data = groups[idx] + if data or token not in ignore_if_empty: yield lineno, token, data + lineno += data.count("\n") + newlines_stripped newlines_stripped = 0 # strings as token just are yielded as it. else: data = m.group() + # update brace/parentheses balance if tokens == TOKEN_OPERATOR: if data == "{": @@ -779,24 +796,26 @@ elif data in ("}", ")", "]"): if not balancing_stack: raise TemplateSyntaxError( - "unexpected '%s'" % data, lineno, name, filename + f"unexpected '{data}'", lineno, name, filename ) + expected_op = balancing_stack.pop() + if expected_op != data: raise TemplateSyntaxError( - "unexpected '%s', " - "expected '%s'" % (data, expected_op), + f"unexpected '{data}', expected '{expected_op}'", lineno, name, filename, ) + # yield items if data or tokens not in ignore_if_empty: yield lineno, tokens, data + lineno += data.count("\n") line_starting = m.group()[-1:] == "\n" - # fetch new position into new variable so that we can check # if there is a internal parsing error which would result # in an infinite loop @@ -809,27 +828,28 @@ stack.pop() # resolve the new state by group checking elif new_state == "#bygroup": - for key, value in iteritems(m.groupdict()): + for key, value in m.groupdict().items(): if value is not None: stack.append(key) break else: raise RuntimeError( - "%r wanted to resolve the " - "new state dynamically but" - " no group matched" % regex + f"{regex!r} wanted to resolve the new state dynamically" + f" but no group matched" ) # direct state name given else: stack.append(new_state) + statetokens = self.rules[stack[-1]] # we are still at the same position and no stack change. # this means a loop without break condition, avoid that and # raise error elif pos2 == pos: raise RuntimeError( - "%r yielded empty string without stack change" % regex + f"{regex!r} yielded empty string without stack change" ) + # publish new function and start again pos = pos2 break @@ -839,10 +859,8 @@ # end of text if pos >= source_length: return + # something went wrong raise TemplateSyntaxError( - "unexpected char %r at %d" % (source[pos], pos), - lineno, - name, - filename, + f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename )
diff --git a/third_party/jinja2/loaders.py b/third_party/jinja2/loaders.py index 457c4b5..d2f9809 100644 --- a/third_party/jinja2/loaders.py +++ b/third_party/jinja2/loaders.py
@@ -1,33 +1,37 @@ -# -*- coding: utf-8 -*- """API and implementations for loading templates from different data sources. """ +import importlib.util import os +import posixpath import sys +import typing as t import weakref +import zipimport +from collections import abc from hashlib import sha1 -from os import path +from importlib import import_module from types import ModuleType -from ._compat import abc -from ._compat import fspath -from ._compat import iteritems -from ._compat import string_types from .exceptions import TemplateNotFound from .utils import internalcode from .utils import open_if_exists +if t.TYPE_CHECKING: + from .environment import Environment + from .environment import Template -def split_template_path(template): + +def split_template_path(template: str) -> t.List[str]: """Split a path into segments and perform a sanity check. If it detects '..' in the path it will raise a `TemplateNotFound` error. """ pieces = [] for piece in template.split("/"): if ( - path.sep in piece - or (path.altsep and path.altsep in piece) - or piece == path.pardir + os.path.sep in piece + or (os.path.altsep and os.path.altsep in piece) + or piece == os.path.pardir ): raise TemplateNotFound(template) elif piece and piece != ".": @@ -35,7 +39,7 @@ return pieces -class BaseLoader(object): +class BaseLoader: """Baseclass for all loaders. Subclass this and override `get_source` to implement a custom loading mechanism. The environment provides a `get_template` method that calls the loader's `load` method to get the @@ -57,8 +61,8 @@ if not exists(path): raise TemplateNotFound(template) mtime = getmtime(path) - with file(path) as f: - source = f.read().decode('utf-8') + with open(path) as f: + source = f.read() return source, path, lambda: mtime == getmtime(path) """ @@ -68,16 +72,18 @@ #: .. versionadded:: 2.4 has_source_access = True - def get_source(self, environment, template): + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: """Get the template source, filename and reload helper for a template. It's passed the environment and template name and has to return a tuple in the form ``(source, filename, uptodate)`` or raise a `TemplateNotFound` error if it can't locate the template. The source part of the returned tuple must be the source of the - template as unicode string or a ASCII bytestring. The filename should - be the name of the file on the filesystem if it was loaded from there, - otherwise `None`. The filename is used by python for the tracebacks + template as a string. The filename should be the name of the + file on the filesystem if it was loaded from there, otherwise + ``None``. The filename is used by Python for the tracebacks if no loader extension is used. The last item in the tuple is the `uptodate` function. If auto @@ -88,18 +94,23 @@ """ if not self.has_source_access: raise RuntimeError( - "%s cannot provide access to the source" % self.__class__.__name__ + f"{type(self).__name__} cannot provide access to the source" ) raise TemplateNotFound(template) - def list_templates(self): + def list_templates(self) -> t.List[str]: """Iterates over all templates. If the loader does not support that it should raise a :exc:`TypeError` which is the default behavior. """ raise TypeError("this loader cannot iterate over all templates") @internalcode - def load(self, environment, name, globals=None): + def load( + self, + environment: "Environment", + name: str, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": """Loads a template. This method looks up the template in the cache or loads one by calling :meth:`get_source`. Subclasses should not override this method as loaders working on collections of other @@ -139,44 +150,53 @@ class FileSystemLoader(BaseLoader): - """Loads templates from the file system. This loader can find templates - in folders on the file system and is the preferred way to load them. + """Load templates from a directory in the file system. - The loader takes the path to the templates as string, or if multiple - locations are wanted a list of them which is then looked up in the - given order:: + The path can be relative or absolute. Relative paths are relative to + the current working directory. - >>> loader = FileSystemLoader('/path/to/templates') - >>> loader = FileSystemLoader(['/path/to/templates', '/other/path']) + .. code-block:: python - Per default the template encoding is ``'utf-8'`` which can be changed - by setting the `encoding` parameter to something else. + loader = FileSystemLoader("templates") - To follow symbolic links, set the *followlinks* parameter to ``True``:: + A list of paths can be given. The directories will be searched in + order, stopping at the first matching template. - >>> loader = FileSystemLoader('/path/to/templates', followlinks=True) + .. code-block:: python + + loader = FileSystemLoader(["/override/templates", "/default/templates"]) + + :param searchpath: A path, or list of paths, to the directory that + contains the templates. + :param encoding: Use this encoding to read the text from template + files. + :param followlinks: Follow symbolic links in the path. .. versionchanged:: 2.8 - The ``followlinks`` parameter was added. + Added the ``followlinks`` parameter. """ - def __init__(self, searchpath, encoding="utf-8", followlinks=False): - if not isinstance(searchpath, abc.Iterable) or isinstance( - searchpath, string_types - ): + def __init__( + self, + searchpath: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]], + encoding: str = "utf-8", + followlinks: bool = False, + ) -> None: + if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath, str): searchpath = [searchpath] - # In Python 3.5, os.path.join doesn't support Path. This can be - # simplified to list(searchpath) when Python 3.5 is dropped. - self.searchpath = [fspath(p) for p in searchpath] - + self.searchpath = [os.fspath(p) for p in searchpath] self.encoding = encoding self.followlinks = followlinks - def get_source(self, environment, template): + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, str, t.Callable[[], bool]]: pieces = split_template_path(template) for searchpath in self.searchpath: - filename = path.join(searchpath, *pieces) + # Use posixpath even on Windows to avoid "drive:" or UNC + # segments breaking out of the search directory. + filename = posixpath.join(searchpath, *pieces) f = open_if_exists(filename) if f is None: continue @@ -185,18 +205,19 @@ finally: f.close() - mtime = path.getmtime(filename) + mtime = os.path.getmtime(filename) - def uptodate(): + def uptodate() -> bool: try: - return path.getmtime(filename) == mtime + return os.path.getmtime(filename) == mtime except OSError: return False - return contents, filename, uptodate + # Use normpath to convert Windows altsep to sep. + return contents, os.path.normpath(filename), uptodate raise TemplateNotFound(template) - def list_templates(self): + def list_templates(self) -> t.List[str]: found = set() for searchpath in self.searchpath: walk_dir = os.walk(searchpath, followlinks=self.followlinks) @@ -215,105 +236,199 @@ class PackageLoader(BaseLoader): - """Load templates from python eggs or packages. It is constructed with - the name of the python package and the path to the templates in that - package:: + """Load templates from a directory in a Python package. - loader = PackageLoader('mypackage', 'views') + :param package_name: Import name of the package that contains the + template directory. + :param package_path: Directory within the imported package that + contains the templates. + :param encoding: Encoding of template files. - If the package path is not given, ``'templates'`` is assumed. + The following example looks up templates in the ``pages`` directory + within the ``project.ui`` package. - Per default the template encoding is ``'utf-8'`` which can be changed - by setting the `encoding` parameter to something else. Due to the nature - of eggs it's only possible to reload templates if the package was loaded - from the file system and not a zip file. + .. code-block:: python + + loader = PackageLoader("project.ui", "pages") + + Only packages installed as directories (standard pip behavior) or + zip/egg files (less common) are supported. The Python API for + introspecting data in packages is too limited to support other + installation methods the way this loader requires. + + There is limited support for :pep:`420` namespace packages. The + template directory is assumed to only be in one namespace + contributor. Zip files contributing to a namespace are not + supported. + + .. versionchanged:: 3.0 + No longer uses ``setuptools`` as a dependency. + + .. versionchanged:: 3.0 + Limited PEP 420 namespace package support. """ - def __init__(self, package_name, package_path="templates", encoding="utf-8"): - from pkg_resources import DefaultProvider - from pkg_resources import get_provider - from pkg_resources import ResourceManager + def __init__( + self, + package_name: str, + package_path: "str" = "templates", + encoding: str = "utf-8", + ) -> None: + package_path = os.path.normpath(package_path).rstrip(os.path.sep) - provider = get_provider(package_name) - self.encoding = encoding - self.manager = ResourceManager() - self.filesystem_bound = isinstance(provider, DefaultProvider) - self.provider = provider + # normpath preserves ".", which isn't valid in zip paths. + if package_path == os.path.curdir: + package_path = "" + elif package_path[:2] == os.path.curdir + os.path.sep: + package_path = package_path[2:] + self.package_path = package_path + self.package_name = package_name + self.encoding = encoding - def get_source(self, environment, template): - pieces = split_template_path(template) - p = "/".join((self.package_path,) + tuple(pieces)) + # Make sure the package exists. This also makes namespace + # packages work, otherwise get_loader returns None. + import_module(package_name) + spec = importlib.util.find_spec(package_name) + assert spec is not None, "An import spec was not found for the package." + loader = spec.loader + assert loader is not None, "A loader was not found for the package." + self._loader = loader + self._archive = None + template_root = None - if not self.provider.has_resource(p): - raise TemplateNotFound(template) + if isinstance(loader, zipimport.zipimporter): + self._archive = loader.archive + pkgdir = next(iter(spec.submodule_search_locations)) # type: ignore + template_root = os.path.join(pkgdir, package_path).rstrip(os.path.sep) + else: + roots: t.List[str] = [] - filename = uptodate = None + # One element for regular packages, multiple for namespace + # packages, or None for single module file. + if spec.submodule_search_locations: + roots.extend(spec.submodule_search_locations) + # A single module file, use the parent directory instead. + elif spec.origin is not None: + roots.append(os.path.dirname(spec.origin)) - if self.filesystem_bound: - filename = self.provider.get_resource_filename(self.manager, p) - mtime = path.getmtime(filename) + for root in roots: + root = os.path.join(root, package_path) - def uptodate(): - try: - return path.getmtime(filename) == mtime - except OSError: - return False + if os.path.isdir(root): + template_root = root + break - source = self.provider.get_resource_string(self.manager, p) - return source.decode(self.encoding), filename, uptodate + if template_root is None: + raise ValueError( + f"The {package_name!r} package was not installed in a" + " way that PackageLoader understands." + ) - def list_templates(self): - path = self.package_path + self._template_root = template_root - if path[:2] == "./": - path = path[2:] - elif path == ".": - path = "" + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, str, t.Optional[t.Callable[[], bool]]]: + # Use posixpath even on Windows to avoid "drive:" or UNC + # segments breaking out of the search directory. Use normpath to + # convert Windows altsep to sep. + p = os.path.normpath( + posixpath.join(self._template_root, *split_template_path(template)) + ) + up_to_date: t.Optional[t.Callable[[], bool]] - offset = len(path) - results = [] + if self._archive is None: + # Package is a directory. + if not os.path.isfile(p): + raise TemplateNotFound(template) - def _walk(path): - for filename in self.provider.resource_listdir(path): - fullname = path + "/" + filename + with open(p, "rb") as f: + source = f.read() - if self.provider.resource_isdir(fullname): - _walk(fullname) - else: - results.append(fullname[offset:].lstrip("/")) + mtime = os.path.getmtime(p) - _walk(path) + def up_to_date() -> bool: + return os.path.isfile(p) and os.path.getmtime(p) == mtime + + else: + # Package is a zip file. + try: + source = self._loader.get_data(p) # type: ignore + except OSError as e: + raise TemplateNotFound(template) from e + + # Could use the zip's mtime for all template mtimes, but + # would need to safely reload the module if it's out of + # date, so just report it as always current. + up_to_date = None + + return source.decode(self.encoding), p, up_to_date + + def list_templates(self) -> t.List[str]: + results: t.List[str] = [] + + if self._archive is None: + # Package is a directory. + offset = len(self._template_root) + + for dirpath, _, filenames in os.walk(self._template_root): + dirpath = dirpath[offset:].lstrip(os.path.sep) + results.extend( + os.path.join(dirpath, name).replace(os.path.sep, "/") + for name in filenames + ) + else: + if not hasattr(self._loader, "_files"): + raise TypeError( + "This zip import does not have the required" + " metadata to list templates." + ) + + # Package is a zip file. + prefix = ( + self._template_root[len(self._archive) :].lstrip(os.path.sep) + + os.path.sep + ) + offset = len(prefix) + + for name in self._loader._files.keys(): # type: ignore + # Find names under the templates directory that aren't directories. + if name.startswith(prefix) and name[-1] != os.path.sep: + results.append(name[offset:].replace(os.path.sep, "/")) + results.sort() return results class DictLoader(BaseLoader): - """Loads a template from a python dict. It's passed a dict of unicode - strings bound to template names. This loader is useful for unittesting: + """Loads a template from a Python dict mapping template names to + template source. This loader is useful for unittesting: >>> loader = DictLoader({'index.html': 'source here'}) Because auto reloading is rarely useful this is disabled per default. """ - def __init__(self, mapping): + def __init__(self, mapping: t.Mapping[str, str]) -> None: self.mapping = mapping - def get_source(self, environment, template): + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, None, t.Callable[[], bool]]: if template in self.mapping: source = self.mapping[template] return source, None, lambda: source == self.mapping.get(template) raise TemplateNotFound(template) - def list_templates(self): + def list_templates(self) -> t.List[str]: return sorted(self.mapping) class FunctionLoader(BaseLoader): """A loader that is passed a function which does the loading. The function receives the name of the template and has to return either - an unicode string with the template source, a tuple in the form ``(source, + a string with the template source, a tuple in the form ``(source, filename, uptodatefunc)`` or `None` if the template does not exist. >>> def load_template(name): @@ -328,15 +443,30 @@ return value. """ - def __init__(self, load_func): + def __init__( + self, + load_func: t.Callable[ + [str], + t.Optional[ + t.Union[ + str, t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]] + ] + ], + ], + ) -> None: self.load_func = load_func - def get_source(self, environment, template): + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: rv = self.load_func(template) + if rv is None: raise TemplateNotFound(template) - elif isinstance(rv, string_types): + + if isinstance(rv, str): return rv, None, None + return rv @@ -355,40 +485,49 @@ by loading ``'app2/index.html'`` the file from the second. """ - def __init__(self, mapping, delimiter="/"): + def __init__( + self, mapping: t.Mapping[str, BaseLoader], delimiter: str = "/" + ) -> None: self.mapping = mapping self.delimiter = delimiter - def get_loader(self, template): + def get_loader(self, template: str) -> t.Tuple[BaseLoader, str]: try: prefix, name = template.split(self.delimiter, 1) loader = self.mapping[prefix] - except (ValueError, KeyError): - raise TemplateNotFound(template) + except (ValueError, KeyError) as e: + raise TemplateNotFound(template) from e return loader, name - def get_source(self, environment, template): + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: loader, name = self.get_loader(template) try: return loader.get_source(environment, name) - except TemplateNotFound: + except TemplateNotFound as e: # re-raise the exception with the correct filename here. # (the one that includes the prefix) - raise TemplateNotFound(template) + raise TemplateNotFound(template) from e @internalcode - def load(self, environment, name, globals=None): + def load( + self, + environment: "Environment", + name: str, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": loader, local_name = self.get_loader(name) try: return loader.load(environment, local_name, globals) - except TemplateNotFound: + except TemplateNotFound as e: # re-raise the exception with the correct filename here. # (the one that includes the prefix) - raise TemplateNotFound(name) + raise TemplateNotFound(name) from e - def list_templates(self): + def list_templates(self) -> t.List[str]: result = [] - for prefix, loader in iteritems(self.mapping): + for prefix, loader in self.mapping.items(): for template in loader.list_templates(): result.append(prefix + self.delimiter + template) return result @@ -408,10 +547,12 @@ from a different location. """ - def __init__(self, loaders): + def __init__(self, loaders: t.Sequence[BaseLoader]) -> None: self.loaders = loaders - def get_source(self, environment, template): + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: for loader in self.loaders: try: return loader.get_source(environment, template) @@ -420,7 +561,12 @@ raise TemplateNotFound(template) @internalcode - def load(self, environment, name, globals=None): + def load( + self, + environment: "Environment", + name: str, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": for loader in self.loaders: try: return loader.load(environment, name, globals) @@ -428,7 +574,7 @@ pass raise TemplateNotFound(name) - def list_templates(self): + def list_templates(self) -> t.List[str]: found = set() for loader in self.loaders: found.update(loader.list_templates()) @@ -454,17 +600,19 @@ has_source_access = False - def __init__(self, path): - package_name = "_jinja2_module_templates_%x" % id(self) + def __init__( + self, path: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]] + ) -> None: + package_name = f"_jinja2_module_templates_{id(self):x}" # create a fake module that looks for the templates in the # path given. mod = _TemplateModule(package_name) - if not isinstance(path, abc.Iterable) or isinstance(path, string_types): + if not isinstance(path, abc.Iterable) or isinstance(path, str): path = [path] - mod.__path__ = [fspath(p) for p in path] + mod.__path__ = [os.fspath(p) for p in path] sys.modules[package_name] = weakref.proxy( mod, lambda x: sys.modules.pop(package_name, None) @@ -477,28 +625,37 @@ self.package_name = package_name @staticmethod - def get_template_key(name): + def get_template_key(name: str) -> str: return "tmpl_" + sha1(name.encode("utf-8")).hexdigest() @staticmethod - def get_module_filename(name): + def get_module_filename(name: str) -> str: return ModuleLoader.get_template_key(name) + ".py" @internalcode - def load(self, environment, name, globals=None): + def load( + self, + environment: "Environment", + name: str, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": key = self.get_template_key(name) - module = "%s.%s" % (self.package_name, key) + module = f"{self.package_name}.{key}" mod = getattr(self.module, module, None) + if mod is None: try: mod = __import__(module, None, None, ["root"]) - except ImportError: - raise TemplateNotFound(name) + except ImportError as e: + raise TemplateNotFound(name) from e # remove the entry from sys.modules, we only want the attribute # on the module object we have stored on the loader. sys.modules.pop(module, None) + if globals is None: + globals = {} + return environment.template_class.from_module_dict( environment, mod.__dict__, globals )
diff --git a/third_party/jinja2/meta.py b/third_party/jinja2/meta.py index 3795aac..0057d6ea 100644 --- a/third_party/jinja2/meta.py +++ b/third_party/jinja2/meta.py
@@ -1,32 +1,36 @@ -# -*- coding: utf-8 -*- """Functions that expose information about templates that might be interesting for introspection. """ +import typing as t + from . import nodes -from ._compat import iteritems -from ._compat import string_types from .compiler import CodeGenerator +from .compiler import Frame + +if t.TYPE_CHECKING: + from .environment import Environment class TrackingCodeGenerator(CodeGenerator): """We abuse the code generator for introspection.""" - def __init__(self, environment): - CodeGenerator.__init__(self, environment, "<introspection>", "<introspection>") - self.undeclared_identifiers = set() + def __init__(self, environment: "Environment") -> None: + super().__init__(environment, "<introspection>", "<introspection>") + self.undeclared_identifiers: t.Set[str] = set() - def write(self, x): + def write(self, x: str) -> None: """Don't write.""" - def enter_frame(self, frame): + def enter_frame(self, frame: Frame) -> None: """Remember all undeclared identifiers.""" - CodeGenerator.enter_frame(self, frame) - for _, (action, param) in iteritems(frame.symbols.loads): + super().enter_frame(frame) + + for _, (action, param) in frame.symbols.loads.items(): if action == "resolve" and param not in self.environment.globals: self.undeclared_identifiers.add(param) -def find_undeclared_variables(ast): +def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]: """Returns a set of all variables in the AST that will be looked up from the context at runtime. Because at compile time it's not known which variables will be used depending on the path the execution takes at @@ -35,7 +39,7 @@ >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') - >>> meta.find_undeclared_variables(ast) == set(['bar']) + >>> meta.find_undeclared_variables(ast) == {'bar'} True .. admonition:: Implementation @@ -45,12 +49,16 @@ :exc:`TemplateAssertionError` during compilation and as a matter of fact this function can currently raise that exception as well. """ - codegen = TrackingCodeGenerator(ast.environment) + codegen = TrackingCodeGenerator(ast.environment) # type: ignore codegen.visit(ast) return codegen.undeclared_identifiers -def find_referenced_templates(ast): +_ref_types = (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include) +_RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include] + + +def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]]: """Finds all the referenced templates from the AST. This will return an iterator over all the hardcoded template extensions, inclusions and imports. If dynamic inheritance or inclusion is used, `None` will be @@ -65,17 +73,19 @@ This function is useful for dependency tracking. For example if you want to rebuild parts of the website after a layout template has changed. """ - for node in ast.find_all( - (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include) - ): - if not isinstance(node.template, nodes.Const): + template_name: t.Any + + for node in ast.find_all(_ref_types): + template: nodes.Expr = node.template # type: ignore + + if not isinstance(template, nodes.Const): # a tuple with some non consts in there - if isinstance(node.template, (nodes.Tuple, nodes.List)): - for template_name in node.template.items: + if isinstance(template, (nodes.Tuple, nodes.List)): + for template_name in template.items: # something const, only yield the strings and ignore # non-string consts that really just make no sense if isinstance(template_name, nodes.Const): - if isinstance(template_name.value, string_types): + if isinstance(template_name.value, str): yield template_name.value # something dynamic in there else: @@ -85,16 +95,16 @@ yield None continue # constant is a basestring, direct template name - if isinstance(node.template.value, string_types): - yield node.template.value + if isinstance(template.value, str): + yield template.value # a tuple or list (latter *should* not happen) made of consts, # yield the consts that are strings. We could warn here for # non string values elif isinstance(node, nodes.Include) and isinstance( - node.template.value, (tuple, list) + template.value, (tuple, list) ): - for template_name in node.template.value: - if isinstance(template_name, string_types): + for template_name in template.value: + if isinstance(template_name, str): yield template_name # something else we don't care about, we could warn here else:
diff --git a/third_party/jinja2/nativetypes.py b/third_party/jinja2/nativetypes.py index a9ead4e2..ac086103 100644 --- a/third_party/jinja2/nativetypes.py +++ b/third_party/jinja2/nativetypes.py
@@ -1,53 +1,67 @@ +import typing as t from ast import literal_eval +from ast import parse from itertools import chain from itertools import islice +from types import GeneratorType from . import nodes -from ._compat import text_type from .compiler import CodeGenerator +from .compiler import Frame from .compiler import has_safe_repr from .environment import Environment from .environment import Template -def native_concat(nodes): +def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]: """Return a native Python type from the list of compiled nodes. If the result is a single node, its value is returned. Otherwise, the nodes are concatenated as strings. If the result can be parsed with :func:`ast.literal_eval`, the parsed value is returned. Otherwise, the string is returned. - :param nodes: Iterable of nodes to concatenate. + :param values: Iterable of outputs to concatenate. """ - head = list(islice(nodes, 2)) + head = list(islice(values, 2)) if not head: return None if len(head) == 1: raw = head[0] + if not isinstance(raw, str): + return raw else: - raw = u"".join([text_type(v) for v in chain(head, nodes)]) + if isinstance(values, GeneratorType): + values = chain(head, values) + raw = "".join([str(v) for v in values]) try: - return literal_eval(raw) + return literal_eval( + # In Python 3.10+ ast.literal_eval removes leading spaces/tabs + # from the given string. For backwards compatibility we need to + # parse the string ourselves without removing leading spaces/tabs. + parse(raw, mode="eval") + ) except (ValueError, SyntaxError, MemoryError): return raw class NativeCodeGenerator(CodeGenerator): """A code generator which renders Python types by not adding - ``to_string()`` around output nodes. + ``str()`` around output nodes. """ @staticmethod - def _default_finalize(value): + def _default_finalize(value: t.Any) -> t.Any: return value - def _output_const_repr(self, group): - return repr(u"".join([text_type(v) for v in group])) + def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: + return repr("".join([str(v) for v in group])) - def _output_child_to_const(self, node, frame, finalize): + def _output_child_to_const( + self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo + ) -> t.Any: const = node.as_const(frame.eval_ctx) if not has_safe_repr(const): @@ -56,13 +70,17 @@ if isinstance(node, nodes.TemplateData): return const - return finalize.const(const) + return finalize.const(const) # type: ignore - def _output_child_pre(self, node, frame, finalize): + def _output_child_pre( + self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo + ) -> None: if finalize.src is not None: self.write(finalize.src) - def _output_child_post(self, node, frame, finalize): + def _output_child_post( + self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo + ) -> None: if finalize.src is not None: self.write(")") @@ -71,22 +89,40 @@ """An environment that renders templates to native Python types.""" code_generator_class = NativeCodeGenerator + concat = staticmethod(native_concat) # type: ignore class NativeTemplate(Template): environment_class = NativeEnvironment - def render(self, *args, **kwargs): + def render(self, *args: t.Any, **kwargs: t.Any) -> t.Any: """Render the template to produce a native Python type. If the result is a single node, its value is returned. Otherwise, the nodes are concatenated as strings. If the result can be parsed with :func:`ast.literal_eval`, the parsed value is returned. Otherwise, the string is returned. """ - vars = dict(*args, **kwargs) + ctx = self.new_context(dict(*args, **kwargs)) try: - return native_concat(self.root_render_func(self.new_context(vars))) + return self.environment_class.concat( # type: ignore + self.root_render_func(ctx) # type: ignore + ) + except Exception: + return self.environment.handle_exception() + + async def render_async(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + if not self.environment.is_async: + raise RuntimeError( + "The environment was not created with async mode enabled." + ) + + ctx = self.new_context(dict(*args, **kwargs)) + + try: + return self.environment_class.concat( # type: ignore + [n async for n in self.root_render_func(ctx)] # type: ignore + ) except Exception: return self.environment.handle_exception()
diff --git a/third_party/jinja2/nodes.py b/third_party/jinja2/nodes.py index 95bd614..b2f88d9 100644 --- a/third_party/jinja2/nodes.py +++ b/third_party/jinja2/nodes.py
@@ -1,19 +1,23 @@ -# -*- coding: utf-8 -*- """AST nodes generated by the parser for the compiler. Also provides some node tree helper functions used by the parser and compiler in order to normalize nodes. """ +import inspect import operator +import typing as t from collections import deque from markupsafe import Markup -from ._compat import izip -from ._compat import PY2 -from ._compat import text_type -from ._compat import with_metaclass +from .utils import _PassArg -_binop_to_func = { +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +_NodeBound = t.TypeVar("_NodeBound", bound="Node") + +_binop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { "*": operator.mul, "/": operator.truediv, "//": operator.floordiv, @@ -23,9 +27,13 @@ "-": operator.sub, } -_uaop_to_func = {"not": operator.not_, "+": operator.pos, "-": operator.neg} +_uaop_to_func: t.Dict[str, t.Callable[[t.Any], t.Any]] = { + "not": operator.not_, + "+": operator.pos, + "-": operator.neg, +} -_cmpop_to_func = { +_cmpop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { "eq": operator.eq, "ne": operator.ne, "gt": operator.gt, @@ -46,24 +54,26 @@ inheritance. fields and attributes from the parent class are automatically forwarded to the child.""" - def __new__(mcs, name, bases, d): + def __new__(mcs, name, bases, d): # type: ignore for attr in "fields", "attributes": storage = [] - storage.extend(getattr(bases[0], attr, ())) + storage.extend(getattr(bases[0] if bases else object, attr, ())) storage.extend(d.get(attr, ())) - assert len(bases) == 1, "multiple inheritance not allowed" + assert len(bases) <= 1, "multiple inheritance not allowed" assert len(storage) == len(set(storage)), "layout conflict" d[attr] = tuple(storage) d.setdefault("abstract", False) return type.__new__(mcs, name, bases, d) -class EvalContext(object): +class EvalContext: """Holds evaluation time information. Custom attributes can be attached to it in extensions. """ - def __init__(self, environment, template_name=None): + def __init__( + self, environment: "Environment", template_name: t.Optional[str] = None + ) -> None: self.environment = environment if callable(environment.autoescape): self.autoescape = environment.autoescape(template_name) @@ -71,27 +81,26 @@ self.autoescape = environment.autoescape self.volatile = False - def save(self): + def save(self) -> t.Mapping[str, t.Any]: return self.__dict__.copy() - def revert(self, old): + def revert(self, old: t.Mapping[str, t.Any]) -> None: self.__dict__.clear() self.__dict__.update(old) -def get_eval_context(node, ctx): +def get_eval_context(node: "Node", ctx: t.Optional[EvalContext]) -> EvalContext: if ctx is None: if node.environment is None: raise RuntimeError( - "if no eval context is passed, the " - "node must have an attached " - "environment." + "if no eval context is passed, the node must have an" + " attached environment." ) return EvalContext(node.environment) return ctx -class Node(with_metaclass(NodeType, object)): +class Node(metaclass=NodeType): """Baseclass for all Jinja nodes. There are a number of nodes available of different types. There are four major types: @@ -108,33 +117,36 @@ all nodes automatically. """ - fields = () - attributes = ("lineno", "environment") + fields: t.Tuple[str, ...] = () + attributes: t.Tuple[str, ...] = ("lineno", "environment") abstract = True - def __init__(self, *fields, **attributes): + lineno: int + environment: t.Optional["Environment"] + + def __init__(self, *fields: t.Any, **attributes: t.Any) -> None: if self.abstract: raise TypeError("abstract nodes are not instantiable") if fields: if len(fields) != len(self.fields): if not self.fields: - raise TypeError("%r takes 0 arguments" % self.__class__.__name__) + raise TypeError(f"{type(self).__name__!r} takes 0 arguments") raise TypeError( - "%r takes 0 or %d argument%s" - % ( - self.__class__.__name__, - len(self.fields), - len(self.fields) != 1 and "s" or "", - ) + f"{type(self).__name__!r} takes 0 or {len(self.fields)}" + f" argument{'s' if len(self.fields) != 1 else ''}" ) - for name, arg in izip(self.fields, fields): + for name, arg in zip(self.fields, fields): setattr(self, name, arg) for attr in self.attributes: setattr(self, attr, attributes.pop(attr, None)) if attributes: - raise TypeError("unknown attribute %r" % next(iter(attributes))) + raise TypeError(f"unknown attribute {next(iter(attributes))!r}") - def iter_fields(self, exclude=None, only=None): + def iter_fields( + self, + exclude: t.Optional[t.Container[str]] = None, + only: t.Optional[t.Container[str]] = None, + ) -> t.Iterator[t.Tuple[str, t.Any]]: """This method iterates over all fields that are defined and yields ``(key, value)`` tuples. Per default all fields are returned, but it's possible to limit that to some fields by providing the `only` @@ -143,7 +155,7 @@ """ for name in self.fields: if ( - (exclude is only is None) + (exclude is None and only is None) or (exclude is not None and name not in exclude) or (only is not None and name in only) ): @@ -152,7 +164,11 @@ except AttributeError: pass - def iter_child_nodes(self, exclude=None, only=None): + def iter_child_nodes( + self, + exclude: t.Optional[t.Container[str]] = None, + only: t.Optional[t.Container[str]] = None, + ) -> t.Iterator["Node"]: """Iterates over all direct child nodes of the node. This iterates over all fields and yields the values of they are nodes. If the value of a field is a list all the nodes in that list are returned. @@ -165,24 +181,27 @@ elif isinstance(item, Node): yield item - def find(self, node_type): + def find(self, node_type: t.Type[_NodeBound]) -> t.Optional[_NodeBound]: """Find the first node of a given type. If no such node exists the return value is `None`. """ for result in self.find_all(node_type): return result - def find_all(self, node_type): + return None + + def find_all( + self, node_type: t.Union[t.Type[_NodeBound], t.Tuple[t.Type[_NodeBound], ...]] + ) -> t.Iterator[_NodeBound]: """Find all the nodes of a given type. If the type is a tuple, the check is performed for any of the tuple items. """ for child in self.iter_child_nodes(): if isinstance(child, node_type): - yield child - for result in child.find_all(node_type): - yield result + yield child # type: ignore + yield from child.find_all(node_type) - def set_ctx(self, ctx): + def set_ctx(self, ctx: str) -> "Node": """Reset the context of a node and all child nodes. Per default the parser will all generate nodes that have a 'load' context as it's the most common one. This method is used in the parser to set assignment @@ -192,11 +211,11 @@ while todo: node = todo.popleft() if "ctx" in node.fields: - node.ctx = ctx + node.ctx = ctx # type: ignore todo.extend(node.iter_child_nodes()) return self - def set_lineno(self, lineno, override=False): + def set_lineno(self, lineno: int, override: bool = False) -> "Node": """Set the line numbers of the node and children.""" todo = deque([self]) while todo: @@ -207,7 +226,7 @@ todo.extend(node.iter_child_nodes()) return self - def set_environment(self, environment): + def set_environment(self, environment: "Environment") -> "Node": """Set the environment for all nodes.""" todo = deque([self]) while todo: @@ -216,30 +235,25 @@ todo.extend(node.iter_child_nodes()) return self - def __eq__(self, other): - return type(self) is type(other) and tuple(self.iter_fields()) == tuple( - other.iter_fields() - ) + def __eq__(self, other: t.Any) -> bool: + if type(self) is not type(other): + return NotImplemented - def __ne__(self, other): - return not self.__eq__(other) + return tuple(self.iter_fields()) == tuple(other.iter_fields()) - # Restore Python 2 hashing behavior on Python 3 __hash__ = object.__hash__ - def __repr__(self): - return "%s(%s)" % ( - self.__class__.__name__, - ", ".join("%s=%r" % (arg, getattr(self, arg, None)) for arg in self.fields), - ) + def __repr__(self) -> str: + args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields) + return f"{type(self).__name__}({args_str})" - def dump(self): - def _dump(node): + def dump(self) -> str: + def _dump(node: t.Union[Node, t.Any]) -> None: if not isinstance(node, Node): buf.append(repr(node)) return - buf.append("nodes.%s(" % node.__class__.__name__) + buf.append(f"nodes.{type(node).__name__}(") if not node.fields: buf.append(")") return @@ -258,7 +272,7 @@ _dump(value) buf.append(")") - buf = [] + buf: t.List[str] = [] _dump(self) return "".join(buf) @@ -281,6 +295,7 @@ """ fields = ("body",) + body: t.List[Node] class Output(Stmt): @@ -289,12 +304,14 @@ """ fields = ("nodes",) + nodes: t.List["Expr"] class Extends(Stmt): """Represents an extends statement.""" fields = ("template",) + template: "Expr" class For(Stmt): @@ -307,12 +324,22 @@ """ fields = ("target", "iter", "body", "else_", "test", "recursive") + target: Node + iter: Node + body: t.List[Node] + else_: t.List[Node] + test: t.Optional[Node] + recursive: bool class If(Stmt): """If `test` is true, `body` is rendered, else `else_`.""" fields = ("test", "body", "elif_", "else_") + test: Node + body: t.List[Node] + elif_: t.List["If"] + else_: t.List[Node] class Macro(Stmt): @@ -322,6 +349,10 @@ """ fields = ("name", "args", "defaults", "body") + name: str + args: t.List["Name"] + defaults: t.List["Expr"] + body: t.List[Node] class CallBlock(Stmt): @@ -330,12 +361,18 @@ """ fields = ("call", "args", "defaults", "body") + call: "Call" + args: t.List["Name"] + defaults: t.List["Expr"] + body: t.List[Node] class FilterBlock(Stmt): """Node for filter sections.""" fields = ("body", "filter") + body: t.List[Node] + filter: "Filter" class With(Stmt): @@ -346,24 +383,41 @@ """ fields = ("targets", "values", "body") + targets: t.List["Expr"] + values: t.List["Expr"] + body: t.List[Node] class Block(Stmt): - """A node that represents a block.""" + """A node that represents a block. - fields = ("name", "body", "scoped") + .. versionchanged:: 3.0.0 + the `required` field was added. + """ + + fields = ("name", "body", "scoped", "required") + name: str + body: t.List[Node] + scoped: bool + required: bool class Include(Stmt): """A node that represents the include tag.""" fields = ("template", "with_context", "ignore_missing") + template: "Expr" + with_context: bool + ignore_missing: bool class Import(Stmt): """A node that represents the import tag.""" fields = ("template", "target", "with_context") + template: "Expr" + target: str + with_context: bool class FromImport(Stmt): @@ -379,24 +433,33 @@ """ fields = ("template", "names", "with_context") + template: "Expr" + names: t.List[t.Union[str, t.Tuple[str, str]]] + with_context: bool class ExprStmt(Stmt): """A statement that evaluates an expression and discards the result.""" fields = ("node",) + node: Node class Assign(Stmt): """Assigns an expression to a target.""" fields = ("target", "node") + target: "Expr" + node: Node class AssignBlock(Stmt): """Assigns a block to a target.""" fields = ("target", "filter", "body") + target: "Expr" + filter: t.Optional["Filter"] + body: t.List[Node] class Expr(Node): @@ -404,7 +467,7 @@ abstract = True - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: """Return the value of the expression as constant or raise :exc:`Impossible` if this was not possible. @@ -417,7 +480,7 @@ """ raise Impossible() - def can_assign(self): + def can_assign(self) -> bool: """Check if it's possible to assign something to this node.""" return False @@ -426,44 +489,49 @@ """Baseclass for all binary expressions.""" fields = ("left", "right") - operator = None + left: Expr + right: Expr + operator: str abstract = True - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: eval_ctx = get_eval_context(self, eval_ctx) + # intercepted operators cannot be folded at compile time if ( - self.environment.sandboxed - and self.operator in self.environment.intercepted_binops + eval_ctx.environment.sandboxed + and self.operator in eval_ctx.environment.intercepted_binops # type: ignore ): raise Impossible() f = _binop_to_func[self.operator] try: return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx)) - except Exception: - raise Impossible() + except Exception as e: + raise Impossible() from e class UnaryExpr(Expr): """Baseclass for all unary expressions.""" fields = ("node",) - operator = None + node: Expr + operator: str abstract = True - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: eval_ctx = get_eval_context(self, eval_ctx) + # intercepted operators cannot be folded at compile time if ( - self.environment.sandboxed - and self.operator in self.environment.intercepted_unops + eval_ctx.environment.sandboxed + and self.operator in eval_ctx.environment.intercepted_unops # type: ignore ): raise Impossible() f = _uaop_to_func[self.operator] try: return f(self.node.as_const(eval_ctx)) - except Exception: - raise Impossible() + except Exception as e: + raise Impossible() from e class Name(Expr): @@ -476,17 +544,21 @@ """ fields = ("name", "ctx") + name: str + ctx: str - def can_assign(self): - return self.name not in ("true", "false", "none", "True", "False", "None") + def can_assign(self) -> bool: + return self.name not in {"true", "false", "none", "True", "False", "None"} class NSRef(Expr): """Reference to a namespace value assignment""" fields = ("name", "attr") + name: str + attr: str - def can_assign(self): + def can_assign(self) -> bool: # We don't need any special checks here; NSRef assignments have a # runtime check to ensure the target is a namespace object which will # have been checked already as it is created using a normal assignment @@ -508,22 +580,18 @@ """ fields = ("value",) + value: t.Any - def as_const(self, eval_ctx=None): - rv = self.value - if ( - PY2 - and type(rv) is text_type - and self.environment.policies["compiler.ascii_str"] - ): - try: - rv = rv.encode("ascii") - except UnicodeError: - pass - return rv + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + return self.value @classmethod - def from_untrusted(cls, value, lineno=None, environment=None): + def from_untrusted( + cls, + value: t.Any, + lineno: t.Optional[int] = None, + environment: "t.Optional[Environment]" = None, + ) -> "Const": """Return a const object if the value is representable as constant value in the generated code, otherwise it will raise an `Impossible` exception. @@ -539,8 +607,9 @@ """A constant template string.""" fields = ("data",) + data: str - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str: eval_ctx = get_eval_context(self, eval_ctx) if eval_ctx.volatile: raise Impossible() @@ -556,12 +625,14 @@ """ fields = ("items", "ctx") + items: t.List[Expr] + ctx: str - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[t.Any, ...]: eval_ctx = get_eval_context(self, eval_ctx) return tuple(x.as_const(eval_ctx) for x in self.items) - def can_assign(self): + def can_assign(self) -> bool: for item in self.items: if not item.can_assign(): return False @@ -572,8 +643,9 @@ """Any list literal such as ``[1, 2, 3]``""" fields = ("items",) + items: t.List[Expr] - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.List[t.Any]: eval_ctx = get_eval_context(self, eval_ctx) return [x.as_const(eval_ctx) for x in self.items] @@ -584,8 +656,11 @@ """ fields = ("items",) + items: t.List["Pair"] - def as_const(self, eval_ctx=None): + def as_const( + self, eval_ctx: t.Optional[EvalContext] = None + ) -> t.Dict[t.Any, t.Any]: eval_ctx = get_eval_context(self, eval_ctx) return dict(x.as_const(eval_ctx) for x in self.items) @@ -594,8 +669,12 @@ """A key, value pair for dicts.""" fields = ("key", "value") + key: Expr + value: Expr - def as_const(self, eval_ctx=None): + def as_const( + self, eval_ctx: t.Optional[EvalContext] = None + ) -> t.Tuple[t.Any, t.Any]: eval_ctx = get_eval_context(self, eval_ctx) return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx) @@ -604,8 +683,10 @@ """A key, value pair for keyword arguments where key is a string.""" fields = ("key", "value") + key: str + value: Expr - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[str, t.Any]: eval_ctx = get_eval_context(self, eval_ctx) return self.key, self.value.as_const(eval_ctx) @@ -616,8 +697,11 @@ """ fields = ("test", "expr1", "expr2") + test: Expr + expr1: Expr + expr2: t.Optional[Expr] - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: eval_ctx = get_eval_context(self, eval_ctx) if self.test.as_const(eval_ctx): return self.expr1.as_const(eval_ctx) @@ -629,93 +713,103 @@ return self.expr2.as_const(eval_ctx) -def args_as_const(node, eval_ctx): +def args_as_const( + node: t.Union["_FilterTestCommon", "Call"], eval_ctx: t.Optional[EvalContext] +) -> t.Tuple[t.List[t.Any], t.Dict[t.Any, t.Any]]: args = [x.as_const(eval_ctx) for x in node.args] kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs) if node.dyn_args is not None: try: args.extend(node.dyn_args.as_const(eval_ctx)) - except Exception: - raise Impossible() + except Exception as e: + raise Impossible() from e if node.dyn_kwargs is not None: try: kwargs.update(node.dyn_kwargs.as_const(eval_ctx)) - except Exception: - raise Impossible() + except Exception as e: + raise Impossible() from e return args, kwargs -class Filter(Expr): - """This node applies a filter on an expression. `name` is the name of - the filter, the rest of the fields are the same as for :class:`Call`. - - If the `node` of a filter is `None` the contents of the last buffer are - filtered. Buffers are created by macros and filter blocks. - """ - +class _FilterTestCommon(Expr): fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs") + node: Expr + name: str + args: t.List[Expr] + kwargs: t.List[Pair] + dyn_args: t.Optional[Expr] + dyn_kwargs: t.Optional[Expr] + abstract = True + _is_filter = True - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: eval_ctx = get_eval_context(self, eval_ctx) - if eval_ctx.volatile or self.node is None: + if eval_ctx.volatile: raise Impossible() - # we have to be careful here because we call filter_ below. - # if this variable would be called filter, 2to3 would wrap the - # call in a list because it is assuming we are talking about the - # builtin filter function here which no longer returns a list in - # python 3. because of that, do not rename filter_ to filter! - filter_ = self.environment.filters.get(self.name) + if self._is_filter: + env_map = eval_ctx.environment.filters + else: + env_map = eval_ctx.environment.tests - if filter_ is None or getattr(filter_, "contextfilter", False) is True: + func = env_map.get(self.name) + pass_arg = _PassArg.from_obj(func) # type: ignore + + if func is None or pass_arg is _PassArg.context: raise Impossible() - # We cannot constant handle async filters, so we need to make sure - # to not go down this path. - if eval_ctx.environment.is_async and getattr( - filter_, "asyncfiltervariant", False + if eval_ctx.environment.is_async and ( + getattr(func, "jinja_async_variant", False) is True + or inspect.iscoroutinefunction(func) ): raise Impossible() args, kwargs = args_as_const(self, eval_ctx) args.insert(0, self.node.as_const(eval_ctx)) - if getattr(filter_, "evalcontextfilter", False) is True: + if pass_arg is _PassArg.eval_context: args.insert(0, eval_ctx) - elif getattr(filter_, "environmentfilter", False) is True: - args.insert(0, self.environment) + elif pass_arg is _PassArg.environment: + args.insert(0, eval_ctx.environment) try: - return filter_(*args, **kwargs) - except Exception: - raise Impossible() + return func(*args, **kwargs) + except Exception as e: + raise Impossible() from e -class Test(Expr): - """Applies a test on an expression. `name` is the name of the test, the - rest of the fields are the same as for :class:`Call`. +class Filter(_FilterTestCommon): + """Apply a filter to an expression. ``name`` is the name of the + filter, the other fields are the same as :class:`Call`. + + If ``node`` is ``None``, the filter is being used in a filter block + and is applied to the content of the block. """ - fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs") + node: t.Optional[Expr] # type: ignore - def as_const(self, eval_ctx=None): - test = self.environment.tests.get(self.name) - - if test is None: + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + if self.node is None: raise Impossible() - eval_ctx = get_eval_context(self, eval_ctx) - args, kwargs = args_as_const(self, eval_ctx) - args.insert(0, self.node.as_const(eval_ctx)) + return super().as_const(eval_ctx=eval_ctx) - try: - return test(*args, **kwargs) - except Exception: - raise Impossible() + +class Test(_FilterTestCommon): + """Apply a test to an expression. ``name`` is the name of the test, + the other field are the same as :class:`Call`. + + .. versionchanged:: 3.0 + ``as_const`` shares the same logic for filters and tests. Tests + check for volatile, async, and ``@pass_context`` etc. + decorators. + """ + + _is_filter = False class Call(Expr): @@ -727,26 +821,33 @@ """ fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs") + node: Expr + args: t.List[Expr] + kwargs: t.List[Keyword] + dyn_args: t.Optional[Expr] + dyn_kwargs: t.Optional[Expr] class Getitem(Expr): """Get an attribute or item from an expression and prefer the item.""" fields = ("node", "arg", "ctx") + node: Expr + arg: Expr + ctx: str - def as_const(self, eval_ctx=None): - eval_ctx = get_eval_context(self, eval_ctx) + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: if self.ctx != "load": raise Impossible() + + eval_ctx = get_eval_context(self, eval_ctx) + try: - return self.environment.getitem( + return eval_ctx.environment.getitem( self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx) ) - except Exception: - raise Impossible() - - def can_assign(self): - return False + except Exception as e: + raise Impossible() from e class Getattr(Expr): @@ -755,18 +856,20 @@ """ fields = ("node", "attr", "ctx") + node: Expr + attr: str + ctx: str - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: if self.ctx != "load": raise Impossible() - try: - eval_ctx = get_eval_context(self, eval_ctx) - return self.environment.getattr(self.node.as_const(eval_ctx), self.attr) - except Exception: - raise Impossible() - def can_assign(self): - return False + eval_ctx = get_eval_context(self, eval_ctx) + + try: + return eval_ctx.environment.getattr(self.node.as_const(eval_ctx), self.attr) + except Exception as e: + raise Impossible() from e class Slice(Expr): @@ -775,11 +878,14 @@ """ fields = ("start", "stop", "step") + start: t.Optional[Expr] + stop: t.Optional[Expr] + step: t.Optional[Expr] - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> slice: eval_ctx = get_eval_context(self, eval_ctx) - def const(obj): + def const(obj: t.Optional[Expr]) -> t.Optional[t.Any]: if obj is None: return None return obj.as_const(eval_ctx) @@ -788,15 +894,16 @@ class Concat(Expr): - """Concatenates the list of expressions provided after converting them to - unicode. + """Concatenates the list of expressions provided after converting + them to strings. """ fields = ("nodes",) + nodes: t.List[Expr] - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str: eval_ctx = get_eval_context(self, eval_ctx) - return "".join(text_type(x.as_const(eval_ctx)) for x in self.nodes) + return "".join(str(x.as_const(eval_ctx)) for x in self.nodes) class Compare(Expr): @@ -805,8 +912,10 @@ """ fields = ("expr", "ops") + expr: Expr + ops: t.List["Operand"] - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: eval_ctx = get_eval_context(self, eval_ctx) result = value = self.expr.as_const(eval_ctx) @@ -819,8 +928,8 @@ return False value = new_value - except Exception: - raise Impossible() + except Exception as e: + raise Impossible() from e return result @@ -829,15 +938,8 @@ """Holds an operator and an expression.""" fields = ("op", "expr") - - -if __debug__: - Operand.__doc__ += "\nThe following operators are available: " + ", ".join( - sorted( - "``%s``" % x - for x in set(_binop_to_func) | set(_uaop_to_func) | set(_cmpop_to_func) - ) - ) + op: str + expr: Expr class Mul(BinExpr): @@ -853,7 +955,7 @@ class FloorDiv(BinExpr): - """Divides the left by the right node and truncates conver the + """Divides the left by the right node and converts the result into an integer by truncating. """ @@ -889,7 +991,7 @@ operator = "and" - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: eval_ctx = get_eval_context(self, eval_ctx) return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx) @@ -899,7 +1001,7 @@ operator = "or" - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: eval_ctx = get_eval_context(self, eval_ctx) return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx) @@ -931,6 +1033,7 @@ """ fields = ("name",) + name: str class ExtensionAttribute(Expr): @@ -942,6 +1045,8 @@ """ fields = ("identifier", "name") + identifier: str + name: str class ImportedName(Expr): @@ -952,6 +1057,7 @@ """ fields = ("importname",) + importname: str class InternalName(Expr): @@ -959,12 +1065,13 @@ yourself but the parser provides a :meth:`~jinja2.parser.Parser.free_identifier` method that creates a new identifier for you. This identifier is not available from the - template and is not threated specially by the compiler. + template and is not treated specially by the compiler. """ fields = ("name",) + name: str - def __init__(self): + def __init__(self) -> None: raise TypeError( "Can't create internal names. Use the " "`free_identifier` method on a parser." @@ -975,8 +1082,9 @@ """Mark the wrapped expression as safe (wrap it as `Markup`).""" fields = ("expr",) + expr: Expr - def as_const(self, eval_ctx=None): + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> Markup: eval_ctx = get_eval_context(self, eval_ctx) return Markup(self.expr.as_const(eval_ctx)) @@ -989,8 +1097,11 @@ """ fields = ("expr",) + expr: Expr - def as_const(self, eval_ctx=None): + def as_const( + self, eval_ctx: t.Optional[EvalContext] = None + ) -> t.Union[Markup, t.Any]: eval_ctx = get_eval_context(self, eval_ctx) if eval_ctx.volatile: raise Impossible() @@ -1012,9 +1123,9 @@ Getattr(ContextReference(), 'name')) This is basically equivalent to using the - :func:`~jinja2.contextfunction` decorator when using the - high-level API, which causes a reference to the context to be passed - as the first argument to a function. + :func:`~jinja2.pass_context` decorator when using the high-level + API, which causes a reference to the context to be passed as the + first argument to a function. """ @@ -1039,6 +1150,7 @@ """An artificial scope.""" fields = ("body",) + body: t.List[Node] class OverlayScope(Stmt): @@ -1056,6 +1168,8 @@ """ fields = ("context", "body") + context: Expr + body: t.List[Node] class EvalContextModifier(Stmt): @@ -1068,6 +1182,7 @@ """ fields = ("options",) + options: t.List[Keyword] class ScopedEvalContextModifier(EvalContextModifier): @@ -1077,12 +1192,13 @@ """ fields = ("body",) + body: t.List[Node] # make sure nobody creates custom nodes -def _failing_new(*args, **kwargs): +def _failing_new(*args: t.Any, **kwargs: t.Any) -> "te.NoReturn": raise TypeError("can't create custom node types") -NodeType.__new__ = staticmethod(_failing_new) +NodeType.__new__ = staticmethod(_failing_new) # type: ignore del _failing_new
diff --git a/third_party/jinja2/optimizer.py b/third_party/jinja2/optimizer.py index 7bc78c45..fe10107 100644 --- a/third_party/jinja2/optimizer.py +++ b/third_party/jinja2/optimizer.py
@@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """The optimizer tries to constant fold expressions and modify the AST in place so that it should be faster to evaluate. @@ -8,23 +7,30 @@ would have a different scope. The solution would be a second syntax tree that stored the scoping rules. """ +import typing as t + from . import nodes from .visitor import NodeTransformer +if t.TYPE_CHECKING: + from .environment import Environment -def optimize(node, environment): + +def optimize(node: nodes.Node, environment: "Environment") -> nodes.Node: """The context hint can be used to perform an static optimization based on the context given.""" optimizer = Optimizer(environment) - return optimizer.visit(node) + return t.cast(nodes.Node, optimizer.visit(node)) class Optimizer(NodeTransformer): - def __init__(self, environment): + def __init__(self, environment: "t.Optional[Environment]") -> None: self.environment = environment - def generic_visit(self, node, *args, **kwargs): - node = super(Optimizer, self).generic_visit(node, *args, **kwargs) + def generic_visit( + self, node: nodes.Node, *args: t.Any, **kwargs: t.Any + ) -> nodes.Node: + node = super().generic_visit(node, *args, **kwargs) # Do constant folding. Some other nodes besides Expr have # as_const, but folding them causes errors later on.
diff --git a/third_party/jinja2/parser.py b/third_party/jinja2/parser.py index d5881066..cefce2d 100644 --- a/third_party/jinja2/parser.py +++ b/third_party/jinja2/parser.py
@@ -1,12 +1,20 @@ -# -*- coding: utf-8 -*- """Parse tokens from the lexer into nodes for the compiler.""" +import typing +import typing as t + from . import nodes -from ._compat import imap from .exceptions import TemplateAssertionError from .exceptions import TemplateSyntaxError from .lexer import describe_token from .lexer import describe_token_expr +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +_ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include) +_MacroCall = t.TypeVar("_MacroCall", nodes.Macro, nodes.CallBlock) + _statement_keywords = frozenset( [ "for", @@ -25,7 +33,7 @@ ) _compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"]) -_math_nodes = { +_math_nodes: t.Dict[str, t.Type[nodes.Expr]] = { "add": nodes.Add, "sub": nodes.Sub, "mul": nodes.Mul, @@ -35,26 +43,40 @@ } -class Parser(object): +class Parser: """This is the central parsing class Jinja uses. It's passed to extensions and can be used to parse expressions or statements. """ - def __init__(self, environment, source, name=None, filename=None, state=None): + def __init__( + self, + environment: "Environment", + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> None: self.environment = environment self.stream = environment._tokenize(source, name, filename, state) self.name = name self.filename = filename self.closed = False - self.extensions = {} + self.extensions: t.Dict[ + str, t.Callable[["Parser"], t.Union[nodes.Node, t.List[nodes.Node]]] + ] = {} for extension in environment.iter_extensions(): for tag in extension.tags: self.extensions[tag] = extension.parse self._last_identifier = 0 - self._tag_stack = [] - self._end_token_stack = [] + self._tag_stack: t.List[str] = [] + self._end_token_stack: t.List[t.Tuple[str, ...]] = [] - def fail(self, msg, lineno=None, exc=TemplateSyntaxError): + def fail( + self, + msg: str, + lineno: t.Optional[int] = None, + exc: t.Type[TemplateSyntaxError] = TemplateSyntaxError, + ) -> "te.NoReturn": """Convenience method that raises `exc` with the message, passed line number or last line number as well as the current name and filename. @@ -63,13 +85,18 @@ lineno = self.stream.current.lineno raise exc(msg, lineno, self.name, self.filename) - def _fail_ut_eof(self, name, end_token_stack, lineno): - expected = [] + def _fail_ut_eof( + self, + name: t.Optional[str], + end_token_stack: t.List[t.Tuple[str, ...]], + lineno: t.Optional[int], + ) -> "te.NoReturn": + expected: t.Set[str] = set() for exprs in end_token_stack: - expected.extend(imap(describe_token_expr, exprs)) + expected.update(map(describe_token_expr, exprs)) if end_token_stack: - currently_looking = " or ".join( - "'%s'" % describe_token_expr(expr) for expr in end_token_stack[-1] + currently_looking: t.Optional[str] = " or ".join( + map(repr, map(describe_token_expr, end_token_stack[-1])) ) else: currently_looking = None @@ -77,59 +104,65 @@ if name is None: message = ["Unexpected end of template."] else: - message = ["Encountered unknown tag '%s'." % name] + message = [f"Encountered unknown tag {name!r}."] if currently_looking: if name is not None and name in expected: message.append( - "You probably made a nesting mistake. Jinja " - "is expecting this tag, but currently looking " - "for %s." % currently_looking + "You probably made a nesting mistake. Jinja is expecting this tag," + f" but currently looking for {currently_looking}." ) else: message.append( - "Jinja was looking for the following tags: " - "%s." % currently_looking + f"Jinja was looking for the following tags: {currently_looking}." ) if self._tag_stack: message.append( - "The innermost block that needs to be " - "closed is '%s'." % self._tag_stack[-1] + "The innermost block that needs to be closed is" + f" {self._tag_stack[-1]!r}." ) self.fail(" ".join(message), lineno) - def fail_unknown_tag(self, name, lineno=None): + def fail_unknown_tag( + self, name: str, lineno: t.Optional[int] = None + ) -> "te.NoReturn": """Called if the parser encounters an unknown tag. Tries to fail with a human readable error message that could help to identify the problem. """ - return self._fail_ut_eof(name, self._end_token_stack, lineno) + self._fail_ut_eof(name, self._end_token_stack, lineno) - def fail_eof(self, end_tokens=None, lineno=None): + def fail_eof( + self, + end_tokens: t.Optional[t.Tuple[str, ...]] = None, + lineno: t.Optional[int] = None, + ) -> "te.NoReturn": """Like fail_unknown_tag but for end of template situations.""" stack = list(self._end_token_stack) if end_tokens is not None: stack.append(end_tokens) - return self._fail_ut_eof(None, stack, lineno) + self._fail_ut_eof(None, stack, lineno) - def is_tuple_end(self, extra_end_rules=None): + def is_tuple_end( + self, extra_end_rules: t.Optional[t.Tuple[str, ...]] = None + ) -> bool: """Are we at the end of a tuple?""" if self.stream.current.type in ("variable_end", "block_end", "rparen"): return True elif extra_end_rules is not None: - return self.stream.current.test_any(extra_end_rules) + return self.stream.current.test_any(extra_end_rules) # type: ignore return False - def free_identifier(self, lineno=None): + def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName: """Return a new free identifier as :class:`~jinja2.nodes.InternalName`.""" self._last_identifier += 1 rv = object.__new__(nodes.InternalName) - nodes.Node.__init__(rv, "fi%d" % self._last_identifier, lineno=lineno) + nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno) return rv - def parse_statement(self): + def parse_statement(self) -> t.Union[nodes.Node, t.List[nodes.Node]]: """Parse a single statement.""" token = self.stream.current if token.type != "name": @@ -138,7 +171,8 @@ pop_tag = True try: if token.value in _statement_keywords: - return getattr(self, "parse_" + self.stream.current.value)() + f = getattr(self, f"parse_{self.stream.current.value}") + return f() # type: ignore if token.value == "call": return self.parse_call_block() if token.value == "filter": @@ -157,7 +191,9 @@ if pop_tag: self._tag_stack.pop() - def parse_statements(self, end_tokens, drop_needle=False): + def parse_statements( + self, end_tokens: t.Tuple[str, ...], drop_needle: bool = False + ) -> t.List[nodes.Node]: """Parse multiple statements into a list until one of the end tokens is reached. This is used to parse the body of statements as it also parses template data if appropriate. The parser checks first if the @@ -184,7 +220,7 @@ next(self.stream) return result - def parse_set(self): + def parse_set(self) -> t.Union[nodes.Assign, nodes.AssignBlock]: """Parse an assign statement.""" lineno = next(self.stream).lineno target = self.parse_assign_target(with_namespace=True) @@ -195,7 +231,7 @@ body = self.parse_statements(("name:endset",), drop_needle=True) return nodes.AssignBlock(target, filter_node, body, lineno=lineno) - def parse_for(self): + def parse_for(self) -> nodes.For: """Parse a for loop.""" lineno = self.stream.expect("name:for").lineno target = self.parse_assign_target(extra_end_rules=("name:in",)) @@ -214,10 +250,10 @@ else_ = self.parse_statements(("name:endfor",), drop_needle=True) return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno) - def parse_if(self): + def parse_if(self) -> nodes.If: """Parse an if construct.""" node = result = nodes.If(lineno=self.stream.expect("name:if").lineno) - while 1: + while True: node.test = self.parse_tuple(with_condexpr=False) node.body = self.parse_statements(("name:elif", "name:else", "name:endif")) node.elif_ = [] @@ -232,10 +268,10 @@ break return result - def parse_with(self): + def parse_with(self) -> nodes.With: node = nodes.With(lineno=next(self.stream).lineno) - targets = [] - values = [] + targets: t.List[nodes.Expr] = [] + values: t.List[nodes.Expr] = [] while self.stream.current.type != "block_end": if targets: self.stream.expect("comma") @@ -249,37 +285,50 @@ node.body = self.parse_statements(("name:endwith",), drop_needle=True) return node - def parse_autoescape(self): + def parse_autoescape(self) -> nodes.Scope: node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno) node.options = [nodes.Keyword("autoescape", self.parse_expression())] node.body = self.parse_statements(("name:endautoescape",), drop_needle=True) return nodes.Scope([node]) - def parse_block(self): + def parse_block(self) -> nodes.Block: node = nodes.Block(lineno=next(self.stream).lineno) node.name = self.stream.expect("name").value node.scoped = self.stream.skip_if("name:scoped") + node.required = self.stream.skip_if("name:required") # common problem people encounter when switching from django # to jinja. we do not support hyphens in block names, so let's # raise a nicer error message in that case. if self.stream.current.type == "sub": self.fail( - "Block names in Jinja have to be valid Python " - "identifiers and may not contain hyphens, use an " - "underscore instead." + "Block names in Jinja have to be valid Python identifiers and may not" + " contain hyphens, use an underscore instead." ) node.body = self.parse_statements(("name:endblock",), drop_needle=True) + + # enforce that required blocks only contain whitespace or comments + # by asserting that the body, if not empty, is just TemplateData nodes + # with whitespace data + if node.required and not all( + isinstance(child, nodes.TemplateData) and child.data.isspace() + for body in node.body + for child in body.nodes # type: ignore + ): + self.fail("Required blocks can only contain comments or whitespace") + self.stream.skip_if("name:" + node.name) return node - def parse_extends(self): + def parse_extends(self) -> nodes.Extends: node = nodes.Extends(lineno=next(self.stream).lineno) node.template = self.parse_expression() return node - def parse_import_context(self, node, default): + def parse_import_context( + self, node: _ImportInclude, default: bool + ) -> _ImportInclude: if self.stream.current.test_any( "name:with", "name:without" ) and self.stream.look().test("name:context"): @@ -289,7 +338,7 @@ node.with_context = default return node - def parse_include(self): + def parse_include(self) -> nodes.Include: node = nodes.Include(lineno=next(self.stream).lineno) node.template = self.parse_expression() if self.stream.current.test("name:ignore") and self.stream.look().test( @@ -301,30 +350,30 @@ node.ignore_missing = False return self.parse_import_context(node, True) - def parse_import(self): + def parse_import(self) -> nodes.Import: node = nodes.Import(lineno=next(self.stream).lineno) node.template = self.parse_expression() self.stream.expect("name:as") node.target = self.parse_assign_target(name_only=True).name return self.parse_import_context(node, False) - def parse_from(self): + def parse_from(self) -> nodes.FromImport: node = nodes.FromImport(lineno=next(self.stream).lineno) node.template = self.parse_expression() self.stream.expect("name:import") node.names = [] - def parse_context(): - if self.stream.current.value in ( + def parse_context() -> bool: + if self.stream.current.value in { "with", "without", - ) and self.stream.look().test("name:context"): + } and self.stream.look().test("name:context"): node.with_context = next(self.stream).value == "with" self.stream.skip() return True return False - while 1: + while True: if node.names: self.stream.expect("comma") if self.stream.current.type == "name": @@ -350,9 +399,9 @@ node.with_context = False return node - def parse_signature(self, node): - node.args = args = [] - node.defaults = defaults = [] + def parse_signature(self, node: _MacroCall) -> None: + args = node.args = [] + defaults = node.defaults = [] self.stream.expect("lparen") while self.stream.current.type != "rparen": if args: @@ -366,7 +415,7 @@ args.append(arg) self.stream.expect("rparen") - def parse_call_block(self): + def parse_call_block(self) -> nodes.CallBlock: node = nodes.CallBlock(lineno=next(self.stream).lineno) if self.stream.current.type == "lparen": self.parse_signature(node) @@ -374,26 +423,27 @@ node.args = [] node.defaults = [] - node.call = self.parse_expression() - if not isinstance(node.call, nodes.Call): + call_node = self.parse_expression() + if not isinstance(call_node, nodes.Call): self.fail("expected call", node.lineno) + node.call = call_node node.body = self.parse_statements(("name:endcall",), drop_needle=True) return node - def parse_filter_block(self): + def parse_filter_block(self) -> nodes.FilterBlock: node = nodes.FilterBlock(lineno=next(self.stream).lineno) - node.filter = self.parse_filter(None, start_inline=True) + node.filter = self.parse_filter(None, start_inline=True) # type: ignore node.body = self.parse_statements(("name:endfilter",), drop_needle=True) return node - def parse_macro(self): + def parse_macro(self) -> nodes.Macro: node = nodes.Macro(lineno=next(self.stream).lineno) node.name = self.parse_assign_target(name_only=True).name self.parse_signature(node) node.body = self.parse_statements(("name:endmacro",), drop_needle=True) return node - def parse_print(self): + def parse_print(self) -> nodes.Output: node = nodes.Output(lineno=next(self.stream).lineno) node.nodes = [] while self.stream.current.type != "block_end": @@ -402,13 +452,29 @@ node.nodes.append(self.parse_expression()) return node + @typing.overload + def parse_assign_target( + self, with_tuple: bool = ..., name_only: "te.Literal[True]" = ... + ) -> nodes.Name: + ... + + @typing.overload def parse_assign_target( self, - with_tuple=True, - name_only=False, - extra_end_rules=None, - with_namespace=False, - ): + with_tuple: bool = True, + name_only: bool = False, + extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, + with_namespace: bool = False, + ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]: + ... + + def parse_assign_target( + self, + with_tuple: bool = True, + name_only: bool = False, + extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, + with_namespace: bool = False, + ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]: """Parse an assignment target. As Jinja allows assignments to tuples, this function can parse all allowed assignment targets. Per default assignments to tuples are parsed, that can be disable however @@ -417,6 +483,8 @@ parameter is forwarded to the tuple parsing function. If `with_namespace` is enabled, a namespace assignment may be parsed. """ + target: nodes.Expr + if with_namespace and self.stream.look().type == "dot": token = self.stream.expect("name") next(self.stream) # dot @@ -432,14 +500,17 @@ ) else: target = self.parse_primary() + target.set_ctx("store") + if not target.can_assign(): self.fail( - "can't assign to %r" % target.__class__.__name__.lower(), target.lineno + f"can't assign to {type(target).__name__.lower()!r}", target.lineno ) - return target - def parse_expression(self, with_condexpr=True): + return target # type: ignore + + def parse_expression(self, with_condexpr: bool = True) -> nodes.Expr: """Parse an expression. Per default all expressions are parsed, if the optional `with_condexpr` parameter is set to `False` conditional expressions are not parsed. @@ -448,9 +519,11 @@ return self.parse_condexpr() return self.parse_or() - def parse_condexpr(self): + def parse_condexpr(self) -> nodes.Expr: lineno = self.stream.current.lineno expr1 = self.parse_or() + expr3: t.Optional[nodes.Expr] + while self.stream.skip_if("name:if"): expr2 = self.parse_or() if self.stream.skip_if("name:else"): @@ -461,7 +534,7 @@ lineno = self.stream.current.lineno return expr1 - def parse_or(self): + def parse_or(self) -> nodes.Expr: lineno = self.stream.current.lineno left = self.parse_and() while self.stream.skip_if("name:or"): @@ -470,7 +543,7 @@ lineno = self.stream.current.lineno return left - def parse_and(self): + def parse_and(self) -> nodes.Expr: lineno = self.stream.current.lineno left = self.parse_not() while self.stream.skip_if("name:and"): @@ -479,17 +552,17 @@ lineno = self.stream.current.lineno return left - def parse_not(self): + def parse_not(self) -> nodes.Expr: if self.stream.current.test("name:not"): lineno = next(self.stream).lineno return nodes.Not(self.parse_not(), lineno=lineno) return self.parse_compare() - def parse_compare(self): + def parse_compare(self) -> nodes.Expr: lineno = self.stream.current.lineno expr = self.parse_math1() ops = [] - while 1: + while True: token_type = self.stream.current.type if token_type in _compare_operators: next(self.stream) @@ -508,7 +581,7 @@ return expr return nodes.Compare(expr, ops, lineno=lineno) - def parse_math1(self): + def parse_math1(self) -> nodes.Expr: lineno = self.stream.current.lineno left = self.parse_concat() while self.stream.current.type in ("add", "sub"): @@ -519,7 +592,7 @@ lineno = self.stream.current.lineno return left - def parse_concat(self): + def parse_concat(self) -> nodes.Expr: lineno = self.stream.current.lineno args = [self.parse_math2()] while self.stream.current.type == "tilde": @@ -529,7 +602,7 @@ return args[0] return nodes.Concat(args, lineno=lineno) - def parse_math2(self): + def parse_math2(self) -> nodes.Expr: lineno = self.stream.current.lineno left = self.parse_pow() while self.stream.current.type in ("mul", "div", "floordiv", "mod"): @@ -540,7 +613,7 @@ lineno = self.stream.current.lineno return left - def parse_pow(self): + def parse_pow(self) -> nodes.Expr: lineno = self.stream.current.lineno left = self.parse_unary() while self.stream.current.type == "pow": @@ -550,9 +623,11 @@ lineno = self.stream.current.lineno return left - def parse_unary(self, with_filter=True): + def parse_unary(self, with_filter: bool = True) -> nodes.Expr: token_type = self.stream.current.type lineno = self.stream.current.lineno + node: nodes.Expr + if token_type == "sub": next(self.stream) node = nodes.Neg(self.parse_unary(False), lineno=lineno) @@ -566,8 +641,9 @@ node = self.parse_filter_expr(node) return node - def parse_primary(self): + def parse_primary(self) -> nodes.Expr: token = self.stream.current + node: nodes.Expr if token.type == "name": if token.value in ("true", "false", "True", "False"): node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno) @@ -596,16 +672,16 @@ elif token.type == "lbrace": node = self.parse_dict() else: - self.fail("unexpected '%s'" % describe_token(token), token.lineno) + self.fail(f"unexpected {describe_token(token)!r}", token.lineno) return node def parse_tuple( self, - simplified=False, - with_condexpr=True, - extra_end_rules=None, - explicit_parentheses=False, - ): + simplified: bool = False, + with_condexpr: bool = True, + extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, + explicit_parentheses: bool = False, + ) -> t.Union[nodes.Tuple, nodes.Expr]: """Works like `parse_expression` but if multiple expressions are delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created. This method could also return a regular expression instead of a tuple @@ -631,12 +707,13 @@ parse = self.parse_expression else: - def parse(): + def parse() -> nodes.Expr: return self.parse_expression(with_condexpr=False) - args = [] + args: t.List[nodes.Expr] = [] is_tuple = False - while 1: + + while True: if args: self.stream.expect("comma") if self.is_tuple_end(extra_end_rules): @@ -658,15 +735,15 @@ # tuple. if not explicit_parentheses: self.fail( - "Expected an expression, got '%s'" - % describe_token(self.stream.current) + "Expected an expression," + f" got {describe_token(self.stream.current)!r}" ) return nodes.Tuple(args, "load", lineno=lineno) - def parse_list(self): + def parse_list(self) -> nodes.List: token = self.stream.expect("lbracket") - items = [] + items: t.List[nodes.Expr] = [] while self.stream.current.type != "rbracket": if items: self.stream.expect("comma") @@ -676,9 +753,9 @@ self.stream.expect("rbracket") return nodes.List(items, lineno=token.lineno) - def parse_dict(self): + def parse_dict(self) -> nodes.Dict: token = self.stream.expect("lbrace") - items = [] + items: t.List[nodes.Pair] = [] while self.stream.current.type != "rbrace": if items: self.stream.expect("comma") @@ -691,8 +768,8 @@ self.stream.expect("rbrace") return nodes.Dict(items, lineno=token.lineno) - def parse_postfix(self, node): - while 1: + def parse_postfix(self, node: nodes.Expr) -> nodes.Expr: + while True: token_type = self.stream.current.type if token_type == "dot" or token_type == "lbracket": node = self.parse_subscript(node) @@ -704,11 +781,11 @@ break return node - def parse_filter_expr(self, node): - while 1: + def parse_filter_expr(self, node: nodes.Expr) -> nodes.Expr: + while True: token_type = self.stream.current.type if token_type == "pipe": - node = self.parse_filter(node) + node = self.parse_filter(node) # type: ignore elif token_type == "name" and self.stream.current.value == "is": node = self.parse_test(node) # calls are valid both after postfix expressions (getattr @@ -719,8 +796,12 @@ break return node - def parse_subscript(self, node): + def parse_subscript( + self, node: nodes.Expr + ) -> t.Union[nodes.Getattr, nodes.Getitem]: token = next(self.stream) + arg: nodes.Expr + if token.type == "dot": attr_token = self.stream.current next(self.stream) @@ -733,7 +814,7 @@ arg = nodes.Const(attr_token.value, lineno=attr_token.lineno) return nodes.Getitem(node, arg, "load", lineno=token.lineno) if token.type == "lbracket": - args = [] + args: t.List[nodes.Expr] = [] while self.stream.current.type != "rbracket": if args: self.stream.expect("comma") @@ -746,8 +827,9 @@ return nodes.Getitem(node, arg, "load", lineno=token.lineno) self.fail("expected subscript expression", token.lineno) - def parse_subscribed(self): + def parse_subscribed(self) -> nodes.Expr: lineno = self.stream.current.lineno + args: t.List[t.Optional[nodes.Expr]] if self.stream.current.type == "colon": next(self.stream) @@ -777,23 +859,26 @@ return nodes.Slice(lineno=lineno, *args) - def parse_call(self, node): + def parse_call_args(self) -> t.Tuple: token = self.stream.expect("lparen") args = [] kwargs = [] - dyn_args = dyn_kwargs = None + dyn_args = None + dyn_kwargs = None require_comma = False - def ensure(expr): + def ensure(expr: bool) -> None: if not expr: self.fail("invalid syntax for function call expression", token.lineno) while self.stream.current.type != "rparen": if require_comma: self.stream.expect("comma") + # support for trailing comma if self.stream.current.type == "rparen": break + if self.stream.current.type == "mul": ensure(dyn_args is None and dyn_kwargs is None) next(self.stream) @@ -819,13 +904,20 @@ args.append(self.parse_expression()) require_comma = True - self.stream.expect("rparen") - if node is None: - return args, kwargs, dyn_args, dyn_kwargs + self.stream.expect("rparen") + return args, kwargs, dyn_args, dyn_kwargs + + def parse_call(self, node: nodes.Expr) -> nodes.Call: + # The lparen will be expected in parse_call_args, but the lineno + # needs to be recorded before the stream is advanced. + token = self.stream.current + args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno) - def parse_filter(self, node, start_inline=False): + def parse_filter( + self, node: t.Optional[nodes.Expr], start_inline: bool = False + ) -> t.Optional[nodes.Expr]: while self.stream.current.type == "pipe" or start_inline: if not start_inline: next(self.stream) @@ -835,7 +927,7 @@ next(self.stream) name += "." + self.stream.expect("name").value if self.stream.current.type == "lparen": - args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None) + args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() else: args = [] kwargs = [] @@ -846,7 +938,7 @@ start_inline = False return node - def parse_test(self, node): + def parse_test(self, node: nodes.Expr) -> nodes.Expr: token = next(self.stream) if self.stream.current.test("name:not"): next(self.stream) @@ -860,8 +952,8 @@ dyn_args = dyn_kwargs = None kwargs = [] if self.stream.current.type == "lparen": - args, kwargs, dyn_args, dyn_kwargs = self.parse_call(None) - elif self.stream.current.type in ( + args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() + elif self.stream.current.type in { "name", "string", "integer", @@ -869,7 +961,7 @@ "lparen", "lbracket", "lbrace", - ) and not self.stream.current.test_any("name:else", "name:or", "name:and"): + } and not self.stream.current.test_any("name:else", "name:or", "name:and"): if self.stream.current.test("name:is"): self.fail("You cannot chain multiple tests with is") arg_node = self.parse_primary() @@ -884,15 +976,17 @@ node = nodes.Not(node, lineno=token.lineno) return node - def subparse(self, end_tokens=None): - body = [] - data_buffer = [] + def subparse( + self, end_tokens: t.Optional[t.Tuple[str, ...]] = None + ) -> t.List[nodes.Node]: + body: t.List[nodes.Node] = [] + data_buffer: t.List[nodes.Node] = [] add_data = data_buffer.append if end_tokens is not None: self._end_token_stack.append(end_tokens) - def flush_data(): + def flush_data() -> None: if data_buffer: lineno = data_buffer[0].lineno body.append(nodes.Output(data_buffer[:], lineno=lineno)) @@ -929,10 +1023,9 @@ finally: if end_tokens is not None: self._end_token_stack.pop() - return body - def parse(self): + def parse(self) -> nodes.Template: """Parse the whole template into a `Template` node.""" result = nodes.Template(self.subparse(), lineno=1) result.set_environment(self.environment)
diff --git a/third_party/jinja2/patches/0001-jinja2-make-compiled-template-deterministic-for-pyth.patch b/third_party/jinja2/patches/0001-jinja2-make-compiled-template-deterministic-for-pyth.patch deleted file mode 100644 index 85dd20d..0000000 --- a/third_party/jinja2/patches/0001-jinja2-make-compiled-template-deterministic-for-pyth.patch +++ /dev/null
@@ -1,30 +0,0 @@ -From: Takuto Ikuta <tikuta@chromium.org> -Date: Mon, 24 May 2021 17:09:21 +0900 -Subject: [PATCH] jinja2: make compiled template deterministic for python3 - -set() doesn't have deterministic iteration order, so need this to -have deterministic output from jinja2. - -Bug: 1194274 - -diff -Naur a/compiler.py b/compiler.py ---- a/compiler.py 2021-10-05 23:41:45.774333282 +0900 -+++ b/compiler.py 2021-10-06 16:46:08.082078686 +0900 -@@ -468,7 +468,7 @@ - visitor.visit(node) - for dependency in "filters", "tests": - mapping = getattr(self, dependency) -- for name in getattr(visitor, dependency): -+ for name in sorted(getattr(visitor, dependency)): - if name not in mapping: - mapping[name] = self.temporary_identifier() - self.writeline( -@@ -612,7 +612,7 @@ - def dump_local_context(self, frame): - return "{%s}" % ", ".join( - "%r: %s" % (name, target) -- for name, target in iteritems(frame.symbols.dump_stores()) -+ for name, target in sorted(iteritems(frame.symbols.dump_stores())) - ) - - def write_commons(self):
diff --git a/third_party/jinja2/py.typed b/third_party/jinja2/py.typed new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/third_party/jinja2/py.typed
diff --git a/third_party/jinja2/runtime.py b/third_party/jinja2/runtime.py index 3ad7968..985842b 100644 --- a/third_party/jinja2/runtime.py +++ b/third_party/jinja2/runtime.py
@@ -1,32 +1,45 @@ -# -*- coding: utf-8 -*- """The runtime functions and state used by compiled templates.""" +import functools import sys +import typing as t +from collections import abc from itertools import chain -from types import MethodType from markupsafe import escape # noqa: F401 from markupsafe import Markup -from markupsafe import soft_unicode +from markupsafe import soft_str -from ._compat import abc -from ._compat import imap -from ._compat import implements_iterator -from ._compat import implements_to_string -from ._compat import iteritems -from ._compat import PY2 -from ._compat import string_types -from ._compat import text_type -from ._compat import with_metaclass +from .async_utils import auto_aiter +from .async_utils import auto_await # noqa: F401 from .exceptions import TemplateNotFound # noqa: F401 from .exceptions import TemplateRuntimeError # noqa: F401 from .exceptions import UndefinedError from .nodes import EvalContext +from .utils import _PassArg from .utils import concat -from .utils import evalcontextfunction from .utils import internalcode from .utils import missing from .utils import Namespace # noqa: F401 from .utils import object_type_repr +from .utils import pass_eval_context + +V = t.TypeVar("V") +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +if t.TYPE_CHECKING: + import logging + import typing_extensions as te + from .environment import Environment + + class LoopRenderFunc(te.Protocol): + def __call__( + self, + reciter: t.Iterable[V], + loop_render_func: "LoopRenderFunc", + depth: int = 0, + ) -> str: + ... + # these variables are exported to the template runtime exported = [ @@ -36,54 +49,54 @@ "Markup", "TemplateRuntimeError", "missing", - "concat", "escape", "markup_join", - "unicode_join", - "to_string", + "str_join", "identity", "TemplateNotFound", "Namespace", "Undefined", + "internalcode", +] +async_exported = [ + "AsyncLoopContext", + "auto_aiter", + "auto_await", ] -#: the name of the function that is used to convert something into -#: a string. We can just use the text type here. -to_string = text_type - -def identity(x): +def identity(x: V) -> V: """Returns its argument. Useful for certain things in the environment. """ return x -def markup_join(seq): - """Concatenation that escapes if necessary and converts to unicode.""" +def markup_join(seq: t.Iterable[t.Any]) -> str: + """Concatenation that escapes if necessary and converts to string.""" buf = [] - iterator = imap(soft_unicode, seq) + iterator = map(soft_str, seq) for arg in iterator: buf.append(arg) if hasattr(arg, "__html__"): - return Markup(u"").join(chain(buf, iterator)) + return Markup("").join(chain(buf, iterator)) return concat(buf) -def unicode_join(seq): - """Simple args to unicode conversion and concatenation.""" - return concat(imap(text_type, seq)) +def str_join(seq: t.Iterable[t.Any]) -> str: + """Simple args to string conversion and concatenation.""" + return concat(map(str, seq)) def new_context( - environment, - template_name, - blocks, - vars=None, - shared=None, - globals=None, - locals=None, -): + environment: "Environment", + template_name: t.Optional[str], + blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]], + vars: t.Optional[t.Dict[str, t.Any]] = None, + shared: bool = False, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + locals: t.Optional[t.Mapping[str, t.Any]] = None, +) -> "Context": """Internal helper for context creation.""" if vars is None: vars = {} @@ -96,66 +109,38 @@ # we don't want to modify the dict passed if shared: parent = dict(parent) - for key, value in iteritems(locals): + for key, value in locals.items(): if value is not missing: parent[key] = value - return environment.context_class(environment, parent, template_name, blocks) + return environment.context_class( + environment, parent, template_name, blocks, globals=globals + ) -class TemplateReference(object): +class TemplateReference: """The `self` in templates.""" - def __init__(self, context): + def __init__(self, context: "Context") -> None: self.__context = context - def __getitem__(self, name): + def __getitem__(self, name: str) -> t.Any: blocks = self.__context.blocks[name] return BlockReference(name, self.__context, blocks, 0) - def __repr__(self): - return "<%s %r>" % (self.__class__.__name__, self.__context.name) + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.__context.name!r}>" -def _get_func(x): - return getattr(x, "__func__", x) +def _dict_method_all(dict_method: F) -> F: + @functools.wraps(dict_method) + def f_all(self: "Context") -> t.Any: + return dict_method(self.get_all()) + + return t.cast(F, f_all) -class ContextMeta(type): - def __new__(mcs, name, bases, d): - rv = type.__new__(mcs, name, bases, d) - if bases == (): - return rv - - resolve = _get_func(rv.resolve) - default_resolve = _get_func(Context.resolve) - resolve_or_missing = _get_func(rv.resolve_or_missing) - default_resolve_or_missing = _get_func(Context.resolve_or_missing) - - # If we have a changed resolve but no changed default or missing - # resolve we invert the call logic. - if ( - resolve is not default_resolve - and resolve_or_missing is default_resolve_or_missing - ): - rv._legacy_resolve_mode = True - elif ( - resolve is default_resolve - and resolve_or_missing is default_resolve_or_missing - ): - rv._fast_resolve_mode = True - - return rv - - -def resolve_or_missing(context, key, missing=missing): - if key in context.vars: - return context.vars[key] - if key in context.parent: - return context.parent[key] - return missing - - -class Context(with_metaclass(ContextMeta)): +@abc.Mapping.register +class Context: """The template context holds the variables of a template. It stores the values passed to the template and also the names the template exports. Creating instances is neither supported nor useful as it's created @@ -165,7 +150,7 @@ The context is immutable. Modifications on :attr:`parent` **must not** happen and modifications on :attr:`vars` are allowed from generated template code only. Template filters and global functions marked as - :func:`contextfunction`\\s get the active context passed as first argument + :func:`pass_context` get the active context passed as first argument and are allowed to access the context read-only. The template context supports read only dict operations (`get`, @@ -175,30 +160,30 @@ :class:`Undefined` object for missing variables. """ - # XXX: we want to eventually make this be a deprecation warning and - # remove it. - _legacy_resolve_mode = False - _fast_resolve_mode = False - - def __init__(self, environment, parent, name, blocks): + def __init__( + self, + environment: "Environment", + parent: t.Dict[str, t.Any], + name: t.Optional[str], + blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]], + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ): self.parent = parent - self.vars = {} - self.environment = environment + self.vars: t.Dict[str, t.Any] = {} + self.environment: "Environment" = environment self.eval_ctx = EvalContext(self.environment, name) - self.exported_vars = set() + self.exported_vars: t.Set[str] = set() self.name = name + self.globals_keys = set() if globals is None else set(globals) # create the initial mapping of blocks. Whenever template inheritance # takes place the runtime will update this mapping with the new blocks # from the template. - self.blocks = dict((k, [v]) for k, v in iteritems(blocks)) + self.blocks = {k: [v] for k, v in blocks.items()} - # In case we detect the fast resolve mode we can set up an alias - # here that bypasses the legacy code logic. - if self._fast_resolve_mode: - self.resolve_or_missing = MethodType(resolve_or_missing, self) - - def super(self, name, current): + def super( + self, name: str, current: t.Callable[["Context"], t.Iterator[str]] + ) -> t.Union["BlockReference", "Undefined"]: """Render a parent block.""" try: blocks = self.blocks[name] @@ -206,47 +191,62 @@ blocks[index] except LookupError: return self.environment.undefined( - "there is no parent block called %r." % name, name="super" + f"there is no parent block called {name!r}.", name="super" ) return BlockReference(name, self, blocks, index) - def get(self, key, default=None): - """Returns an item from the template context, if it doesn't exist - `default` is returned. + def get(self, key: str, default: t.Any = None) -> t.Any: + """Look up a variable by name, or return a default if the key is + not found. + + :param key: The variable name to look up. + :param default: The value to return if the key is not found. """ try: return self[key] except KeyError: return default - def resolve(self, key): - """Looks up a variable like `__getitem__` or `get` but returns an - :class:`Undefined` object with the name of the name looked up. + def resolve(self, key: str) -> t.Union[t.Any, "Undefined"]: + """Look up a variable by name, or return an :class:`Undefined` + object if the key is not found. + + If you need to add custom behavior, override + :meth:`resolve_or_missing`, not this method. The various lookup + functions use that method, not this one. + + :param key: The variable name to look up. """ - if self._legacy_resolve_mode: - rv = resolve_or_missing(self, key) - else: - rv = self.resolve_or_missing(key) + rv = self.resolve_or_missing(key) + if rv is missing: return self.environment.undefined(name=key) + return rv - def resolve_or_missing(self, key): - """Resolves a variable like :meth:`resolve` but returns the - special `missing` value if it cannot be found. + def resolve_or_missing(self, key: str) -> t.Any: + """Look up a variable by name, or return a ``missing`` sentinel + if the key is not found. + + Override this method to add custom lookup behavior. + :meth:`resolve`, :meth:`get`, and :meth:`__getitem__` use this + method. Don't call this method directly. + + :param key: The variable name to look up. """ - if self._legacy_resolve_mode: - rv = self.resolve(key) - if isinstance(rv, Undefined): - rv = missing - return rv - return resolve_or_missing(self, key) + if key in self.vars: + return self.vars[key] - def get_exported(self): + if key in self.parent: + return self.parent[key] + + return missing + + def get_exported(self) -> t.Dict[str, t.Any]: """Get a new dict with the exported variables.""" - return dict((k, self.vars[k]) for k in self.exported_vars) + return {k: self.vars[k] for k in self.exported_vars} - def get_all(self): + def get_all(self) -> t.Dict[str, t.Any]: """Return the complete context as dict including the exported variables. For optimizations reasons this might not return an actual copy so be careful with using it. @@ -258,44 +258,51 @@ return dict(self.parent, **self.vars) @internalcode - def call(__self, __obj, *args, **kwargs): # noqa: B902 + def call( + __self, __obj: t.Callable, *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Union[t.Any, "Undefined"]: """Call the callable with the arguments and keyword arguments provided but inject the active context or environment as first - argument if the callable is a :func:`contextfunction` or - :func:`environmentfunction`. + argument if the callable has :func:`pass_context` or + :func:`pass_environment`. """ if __debug__: __traceback_hide__ = True # noqa # Allow callable classes to take a context - if hasattr(__obj, "__call__"): # noqa: B004 - fn = __obj.__call__ - for fn_type in ( - "contextfunction", - "evalcontextfunction", - "environmentfunction", - ): - if hasattr(fn, fn_type): - __obj = fn - break + if ( + hasattr(__obj, "__call__") # noqa: B004 + and _PassArg.from_obj(__obj.__call__) is not None # type: ignore + ): + __obj = __obj.__call__ # type: ignore - if callable(__obj): - if getattr(__obj, "contextfunction", False) is True: - args = (__self,) + args - elif getattr(__obj, "evalcontextfunction", False) is True: - args = (__self.eval_ctx,) + args - elif getattr(__obj, "environmentfunction", False) is True: - args = (__self.environment,) + args + pass_arg = _PassArg.from_obj(__obj) + + if pass_arg is _PassArg.context: + # the active context should have access to variables set in + # loops and blocks without mutating the context itself + if kwargs.get("_loop_vars"): + __self = __self.derived(kwargs["_loop_vars"]) + if kwargs.get("_block_vars"): + __self = __self.derived(kwargs["_block_vars"]) + args = (__self,) + args + elif pass_arg is _PassArg.eval_context: + args = (__self.eval_ctx,) + args + elif pass_arg is _PassArg.environment: + args = (__self.environment,) + args + + kwargs.pop("_block_vars", None) + kwargs.pop("_loop_vars", None) + try: return __obj(*args, **kwargs) except StopIteration: return __self.environment.undefined( - "value was undefined because " - "a callable raised a " - "StopIteration exception" + "value was undefined because a callable raised a" + " StopIteration exception" ) - def derived(self, locals=None): + def derived(self, locals: t.Optional[t.Dict[str, t.Any]] = None) -> "Context": """Internal helper function to create a derived context. This is used in situations where the system needs a new context in the same template that is independent. @@ -304,78 +311,79 @@ self.environment, self.name, {}, self.get_all(), True, None, locals ) context.eval_ctx = self.eval_ctx - context.blocks.update((k, list(v)) for k, v in iteritems(self.blocks)) + context.blocks.update((k, list(v)) for k, v in self.blocks.items()) return context - def _all(meth): # noqa: B902 - def proxy(self): - return getattr(self.get_all(), meth)() + keys = _dict_method_all(dict.keys) + values = _dict_method_all(dict.values) + items = _dict_method_all(dict.items) - proxy.__doc__ = getattr(dict, meth).__doc__ - proxy.__name__ = meth - return proxy - - keys = _all("keys") - values = _all("values") - items = _all("items") - - # not available on python 3 - if PY2: - iterkeys = _all("iterkeys") - itervalues = _all("itervalues") - iteritems = _all("iteritems") - del _all - - def __contains__(self, name): + def __contains__(self, name: str) -> bool: return name in self.vars or name in self.parent - def __getitem__(self, key): - """Lookup a variable or raise `KeyError` if the variable is - undefined. + def __getitem__(self, key: str) -> t.Any: + """Look up a variable by name with ``[]`` syntax, or raise a + ``KeyError`` if the key is not found. """ item = self.resolve_or_missing(key) + if item is missing: raise KeyError(key) + return item - def __repr__(self): - return "<%s %s of %r>" % ( - self.__class__.__name__, - repr(self.get_all()), - self.name, - ) + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.get_all()!r} of {self.name!r}>" -abc.Mapping.register(Context) - - -class BlockReference(object): +class BlockReference: """One block on a template reference.""" - def __init__(self, name, context, stack, depth): + def __init__( + self, + name: str, + context: "Context", + stack: t.List[t.Callable[["Context"], t.Iterator[str]]], + depth: int, + ) -> None: self.name = name self._context = context self._stack = stack self._depth = depth @property - def super(self): + def super(self) -> t.Union["BlockReference", "Undefined"]: """Super the block.""" if self._depth + 1 >= len(self._stack): return self._context.environment.undefined( - "there is no parent block called %r." % self.name, name="super" + f"there is no parent block called {self.name!r}.", name="super" ) return BlockReference(self.name, self._context, self._stack, self._depth + 1) @internalcode - def __call__(self): - rv = concat(self._stack[self._depth](self._context)) + async def _async_call(self) -> str: + rv = concat( + [x async for x in self._stack[self._depth](self._context)] # type: ignore + ) + if self._context.eval_ctx.autoescape: - rv = Markup(rv) + return Markup(rv) + + return rv + + @internalcode + def __call__(self) -> str: + if self._context.environment.is_async: + return self._async_call() # type: ignore + + rv = concat(self._stack[self._depth](self._context)) + + if self._context.eval_ctx.autoescape: + return Markup(rv) + return rv -@implements_iterator class LoopContext: """A wrapper iterable for dynamic ``for`` loops, with information about the loop and iteration. @@ -384,13 +392,19 @@ #: Current iteration of the loop, starting at 0. index0 = -1 - _length = None - _after = missing - _current = missing - _before = missing - _last_changed_value = missing + _length: t.Optional[int] = None + _after: t.Any = missing + _current: t.Any = missing + _before: t.Any = missing + _last_changed_value: t.Any = missing - def __init__(self, iterable, undefined, recurse=None, depth0=0): + def __init__( + self, + iterable: t.Iterable[V], + undefined: t.Type["Undefined"], + recurse: t.Optional["LoopRenderFunc"] = None, + depth0: int = 0, + ) -> None: """ :param iterable: Iterable to wrap. :param undefined: :class:`Undefined` class to use for next and @@ -407,11 +421,11 @@ self.depth0 = depth0 @staticmethod - def _to_iterator(iterable): + def _to_iterator(iterable: t.Iterable[V]) -> t.Iterator[V]: return iter(iterable) @property - def length(self): + def length(self) -> int: """Length of the iterable. If the iterable is a generator or otherwise does not have a @@ -421,7 +435,7 @@ return self._length try: - self._length = len(self._iterable) + self._length = len(self._iterable) # type: ignore except TypeError: iterable = list(self._iterator) self._iterator = self._to_iterator(iterable) @@ -429,21 +443,21 @@ return self._length - def __len__(self): + def __len__(self) -> int: return self.length @property - def depth(self): + def depth(self) -> int: """How many levels deep a recursive loop currently is, starting at 1.""" return self.depth0 + 1 @property - def index(self): + def index(self) -> int: """Current iteration of the loop, starting at 1.""" return self.index0 + 1 @property - def revindex0(self): + def revindex0(self) -> int: """Number of iterations from the end of the loop, ending at 0. Requires calculating :attr:`length`. @@ -451,7 +465,7 @@ return self.length - self.index @property - def revindex(self): + def revindex(self) -> int: """Number of iterations from the end of the loop, ending at 1. Requires calculating :attr:`length`. @@ -459,11 +473,11 @@ return self.length - self.index0 @property - def first(self): + def first(self) -> bool: """Whether this is the first iteration of the loop.""" return self.index0 == 0 - def _peek_next(self): + def _peek_next(self) -> t.Any: """Return the next element in the iterable, or :data:`missing` if the iterable is exhausted. Only peeks one item ahead, caching the result in :attr:`_last` for use in subsequent checks. The @@ -476,7 +490,7 @@ return self._after @property - def last(self): + def last(self) -> bool: """Whether this is the last iteration of the loop. Causes the iterable to advance early. See @@ -486,7 +500,7 @@ return self._peek_next() is missing @property - def previtem(self): + def previtem(self) -> t.Union[t.Any, "Undefined"]: """The item in the previous iteration. Undefined during the first iteration. """ @@ -496,13 +510,13 @@ return self._before @property - def nextitem(self): + def nextitem(self) -> t.Union[t.Any, "Undefined"]: """The item in the next iteration. Undefined during the last iteration. Causes the iterable to advance early. See :func:`itertools.groupby` for issues this can cause. - The :func:`groupby` filter avoids that issue. + The :func:`jinja-filters.groupby` filter avoids that issue. """ rv = self._peek_next() @@ -511,7 +525,7 @@ return rv - def cycle(self, *args): + def cycle(self, *args: V) -> V: """Return a value from the given args, cycling through based on the current :attr:`index0`. @@ -522,7 +536,7 @@ return args[self.index0 % len(args)] - def changed(self, *value): + def changed(self, *value: t.Any) -> bool: """Return ``True`` if previously called with a different value (including when called for the first time). @@ -534,10 +548,10 @@ return False - def __iter__(self): + def __iter__(self) -> "LoopContext": return self - def __next__(self): + def __next__(self) -> t.Tuple[t.Any, "LoopContext"]: if self._after is not missing: rv = self._after self._after = missing @@ -550,7 +564,7 @@ return rv, self @internalcode - def __call__(self, iterable): + def __call__(self, iterable: t.Iterable[V]) -> str: """When iterating over nested data, render the body of the loop recursively with the given inner iterable data. @@ -563,23 +577,94 @@ return self._recurse(iterable, self._recurse, depth=self.depth) - def __repr__(self): - return "<%s %d/%d>" % (self.__class__.__name__, self.index, self.length) + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.index}/{self.length}>" -class Macro(object): +class AsyncLoopContext(LoopContext): + _iterator: t.AsyncIterator[t.Any] # type: ignore + + @staticmethod + def _to_iterator( # type: ignore + iterable: t.Union[t.Iterable[V], t.AsyncIterable[V]] + ) -> t.AsyncIterator[V]: + return auto_aiter(iterable) + + @property + async def length(self) -> int: # type: ignore + if self._length is not None: + return self._length + + try: + self._length = len(self._iterable) # type: ignore + except TypeError: + iterable = [x async for x in self._iterator] + self._iterator = self._to_iterator(iterable) + self._length = len(iterable) + self.index + (self._after is not missing) + + return self._length + + @property + async def revindex0(self) -> int: # type: ignore + return await self.length - self.index + + @property + async def revindex(self) -> int: # type: ignore + return await self.length - self.index0 + + async def _peek_next(self) -> t.Any: + if self._after is not missing: + return self._after + + try: + self._after = await self._iterator.__anext__() + except StopAsyncIteration: + self._after = missing + + return self._after + + @property + async def last(self) -> bool: # type: ignore + return await self._peek_next() is missing + + @property + async def nextitem(self) -> t.Union[t.Any, "Undefined"]: + rv = await self._peek_next() + + if rv is missing: + return self._undefined("there is no next item") + + return rv + + def __aiter__(self) -> "AsyncLoopContext": + return self + + async def __anext__(self) -> t.Tuple[t.Any, "AsyncLoopContext"]: + if self._after is not missing: + rv = self._after + self._after = missing + else: + rv = await self._iterator.__anext__() + + self.index0 += 1 + self._before = self._current + self._current = rv + return rv, self + + +class Macro: """Wraps a macro function.""" def __init__( self, - environment, - func, - name, - arguments, - catch_kwargs, - catch_varargs, - caller, - default_autoescape=None, + environment: "Environment", + func: t.Callable[..., str], + name: str, + arguments: t.List[str], + catch_kwargs: bool, + catch_varargs: bool, + caller: bool, + default_autoescape: t.Optional[bool] = None, ): self._environment = environment self._func = func @@ -590,13 +675,18 @@ self.catch_varargs = catch_varargs self.caller = caller self.explicit_caller = "caller" in arguments + if default_autoescape is None: - default_autoescape = environment.autoescape + if callable(environment.autoescape): + default_autoescape = environment.autoescape(None) + else: + default_autoescape = environment.autoescape + self._default_autoescape = default_autoescape @internalcode - @evalcontextfunction - def __call__(self, *args, **kwargs): + @pass_eval_context + def __call__(self, *args: t.Any, **kwargs: t.Any) -> str: # This requires a bit of explanation, In the past we used to # decide largely based on compile-time information if a macro is # safe or unsafe. While there was a volatile mode it was largely @@ -656,40 +746,47 @@ elif kwargs: if "caller" in kwargs: raise TypeError( - "macro %r was invoked with two values for " - "the special caller argument. This is " - "most likely a bug." % self.name + f"macro {self.name!r} was invoked with two values for the special" + " caller argument. This is most likely a bug." ) raise TypeError( - "macro %r takes no keyword argument %r" - % (self.name, next(iter(kwargs))) + f"macro {self.name!r} takes no keyword argument {next(iter(kwargs))!r}" ) if self.catch_varargs: arguments.append(args[self._argument_count :]) elif len(args) > self._argument_count: raise TypeError( - "macro %r takes not more than %d argument(s)" - % (self.name, len(self.arguments)) + f"macro {self.name!r} takes not more than" + f" {len(self.arguments)} argument(s)" ) return self._invoke(arguments, autoescape) - def _invoke(self, arguments, autoescape): - """This method is being swapped out by the async implementation.""" + async def _async_invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str: + rv = await self._func(*arguments) # type: ignore + + if autoescape: + return Markup(rv) + + return rv # type: ignore + + def _invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str: + if self._environment.is_async: + return self._async_invoke(arguments, autoescape) # type: ignore + rv = self._func(*arguments) + if autoescape: rv = Markup(rv) + return rv - def __repr__(self): - return "<%s %s>" % ( - self.__class__.__name__, - self.name is None and "anonymous" or repr(self.name), - ) + def __repr__(self) -> str: + name = "anonymous" if self.name is None else repr(self.name) + return f"<{type(self).__name__} {name}>" -@implements_to_string -class Undefined(object): +class Undefined: """The default undefined type. This undefined type can be printed and iterated over, but every other access will raise an :exc:`UndefinedError`: @@ -711,14 +808,20 @@ "_undefined_exception", ) - def __init__(self, hint=None, obj=missing, name=None, exc=UndefinedError): + def __init__( + self, + hint: t.Optional[str] = None, + obj: t.Any = missing, + name: t.Optional[str] = None, + exc: t.Type[TemplateRuntimeError] = UndefinedError, + ) -> None: self._undefined_hint = hint self._undefined_obj = obj self._undefined_name = name self._undefined_exception = exc @property - def _undefined_message(self): + def _undefined_message(self) -> str: """Build a message about the undefined value based on how it was accessed. """ @@ -726,107 +829,78 @@ return self._undefined_hint if self._undefined_obj is missing: - return "%r is undefined" % self._undefined_name + return f"{self._undefined_name!r} is undefined" - if not isinstance(self._undefined_name, string_types): - return "%s has no element %r" % ( - object_type_repr(self._undefined_obj), - self._undefined_name, + if not isinstance(self._undefined_name, str): + return ( + f"{object_type_repr(self._undefined_obj)} has no" + f" element {self._undefined_name!r}" ) - return "%r has no attribute %r" % ( - object_type_repr(self._undefined_obj), - self._undefined_name, + return ( + f"{object_type_repr(self._undefined_obj)!r} has no" + f" attribute {self._undefined_name!r}" ) @internalcode - def _fail_with_undefined_error(self, *args, **kwargs): + def _fail_with_undefined_error( + self, *args: t.Any, **kwargs: t.Any + ) -> "te.NoReturn": """Raise an :exc:`UndefinedError` when operations are performed on the undefined value. """ raise self._undefined_exception(self._undefined_message) @internalcode - def __getattr__(self, name): + def __getattr__(self, name: str) -> t.Any: if name[:2] == "__": raise AttributeError(name) + return self._fail_with_undefined_error() - __add__ = ( - __radd__ - ) = ( - __mul__ - ) = ( - __rmul__ - ) = ( - __div__ - ) = ( - __rdiv__ - ) = ( - __truediv__ - ) = ( - __rtruediv__ - ) = ( - __floordiv__ - ) = ( - __rfloordiv__ - ) = ( - __mod__ - ) = ( - __rmod__ - ) = ( - __pos__ - ) = ( - __neg__ - ) = ( - __call__ - ) = ( - __getitem__ - ) = ( - __lt__ - ) = ( - __le__ - ) = ( - __gt__ - ) = ( - __ge__ - ) = ( - __int__ - ) = ( - __float__ - ) = ( - __complex__ - ) = __pow__ = __rpow__ = __sub__ = __rsub__ = _fail_with_undefined_error + __add__ = __radd__ = __sub__ = __rsub__ = _fail_with_undefined_error + __mul__ = __rmul__ = __div__ = __rdiv__ = _fail_with_undefined_error + __truediv__ = __rtruediv__ = _fail_with_undefined_error + __floordiv__ = __rfloordiv__ = _fail_with_undefined_error + __mod__ = __rmod__ = _fail_with_undefined_error + __pos__ = __neg__ = _fail_with_undefined_error + __call__ = __getitem__ = _fail_with_undefined_error + __lt__ = __le__ = __gt__ = __ge__ = _fail_with_undefined_error + __int__ = __float__ = __complex__ = _fail_with_undefined_error + __pow__ = __rpow__ = _fail_with_undefined_error - def __eq__(self, other): + def __eq__(self, other: t.Any) -> bool: return type(self) is type(other) - def __ne__(self, other): + def __ne__(self, other: t.Any) -> bool: return not self.__eq__(other) - def __hash__(self): + def __hash__(self) -> int: return id(type(self)) - def __str__(self): - return u"" + def __str__(self) -> str: + return "" - def __len__(self): + def __len__(self) -> int: return 0 - def __iter__(self): - if 0: - yield None + def __iter__(self) -> t.Iterator[t.Any]: + yield from () - def __nonzero__(self): + async def __aiter__(self) -> t.AsyncIterator[t.Any]: + for _ in (): + yield + + def __bool__(self) -> bool: return False - __bool__ = __nonzero__ - - def __repr__(self): + def __repr__(self) -> str: return "Undefined" -def make_logging_undefined(logger=None, base=None): +def make_logging_undefined( + logger: t.Optional["logging.Logger"] = None, base: t.Type[Undefined] = Undefined +) -> t.Type[Undefined]: """Given a logger object this returns a new undefined class that will log certain failures. It will log iterations and printing. If no logger is given a default logger is created. @@ -851,70 +925,39 @@ logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler(sys.stderr)) - if base is None: - base = Undefined - def _log_message(undef): - if undef._undefined_hint is None: - if undef._undefined_obj is missing: - hint = "%s is undefined" % undef._undefined_name - elif not isinstance(undef._undefined_name, string_types): - hint = "%s has no element %s" % ( - object_type_repr(undef._undefined_obj), - undef._undefined_name, - ) - else: - hint = "%s has no attribute %s" % ( - object_type_repr(undef._undefined_obj), - undef._undefined_name, - ) - else: - hint = undef._undefined_hint - logger.warning("Template variable warning: %s", hint) + def _log_message(undef: Undefined) -> None: + logger.warning( # type: ignore + "Template variable warning: %s", undef._undefined_message + ) - class LoggingUndefined(base): - def _fail_with_undefined_error(self, *args, **kwargs): + class LoggingUndefined(base): # type: ignore + __slots__ = () + + def _fail_with_undefined_error( # type: ignore + self, *args: t.Any, **kwargs: t.Any + ) -> "te.NoReturn": try: - return base._fail_with_undefined_error(self, *args, **kwargs) + super()._fail_with_undefined_error(*args, **kwargs) except self._undefined_exception as e: - logger.error("Template variable error: %s", str(e)) + logger.error("Template variable error: %s", e) # type: ignore raise e - def __str__(self): - rv = base.__str__(self) + def __str__(self) -> str: _log_message(self) - return rv + return super().__str__() # type: ignore - def __iter__(self): - rv = base.__iter__(self) + def __iter__(self) -> t.Iterator[t.Any]: _log_message(self) - return rv + return super().__iter__() # type: ignore - if PY2: - - def __nonzero__(self): - rv = base.__nonzero__(self) - _log_message(self) - return rv - - def __unicode__(self): - rv = base.__unicode__(self) - _log_message(self) - return rv - - else: - - def __bool__(self): - rv = base.__bool__(self) - _log_message(self) - return rv + def __bool__(self) -> bool: + _log_message(self) + return super().__bool__() # type: ignore return LoggingUndefined -# No @implements_to_string decorator here because __str__ -# is not overwritten from Undefined in this class. -# This would cause a recursion error in Python 2. class ChainableUndefined(Undefined): """An undefined that is chainable, where both ``__getattr__`` and ``__getitem__`` return itself rather than raising an @@ -933,16 +976,15 @@ __slots__ = () - def __html__(self): - return self.__str__() + def __html__(self) -> str: + return str(self) - def __getattr__(self, _): + def __getattr__(self, _: str) -> "ChainableUndefined": return self - __getitem__ = __getattr__ + __getitem__ = __getattr__ # type: ignore -@implements_to_string class DebugUndefined(Undefined): """An undefined that returns the debug info when printed. @@ -959,18 +1001,22 @@ __slots__ = () - def __str__(self): - if self._undefined_hint is None: - if self._undefined_obj is missing: - return u"{{ %s }}" % self._undefined_name - return "{{ no such element: %s[%r] }}" % ( - object_type_repr(self._undefined_obj), - self._undefined_name, + def __str__(self) -> str: + if self._undefined_hint: + message = f"undefined value printed: {self._undefined_hint}" + + elif self._undefined_obj is missing: + message = self._undefined_name # type: ignore + + else: + message = ( + f"no such element: {object_type_repr(self._undefined_obj)}" + f"[{self._undefined_name!r}]" ) - return u"{{ undefined value printed: %s }}" % self._undefined_hint + + return f"{{{{ {message} }}}}" -@implements_to_string class StrictUndefined(Undefined): """An undefined that barks on print and iteration as well as boolean tests and all kinds of comparisons. In other words: you can do nothing @@ -992,17 +1038,13 @@ """ __slots__ = () - __iter__ = ( - __str__ - ) = ( - __len__ - ) = ( - __nonzero__ - ) = __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error + __iter__ = __str__ = __len__ = Undefined._fail_with_undefined_error + __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error + __contains__ = Undefined._fail_with_undefined_error -# remove remaining slots attributes, after the metaclass did the magic they -# are unneeded and irritating as they contain wrong data for the subclasses. +# Remove slots attributes, after the metaclass is applied they are +# unneeded and contain wrong data for subclasses. del ( Undefined.__slots__, ChainableUndefined.__slots__,
diff --git a/third_party/jinja2/sandbox.py b/third_party/jinja2/sandbox.py index cfd7993..06d7414 100644 --- a/third_party/jinja2/sandbox.py +++ b/third_party/jinja2/sandbox.py
@@ -1,42 +1,32 @@ -# -*- coding: utf-8 -*- """A sandbox layer that ensures unsafe operations cannot be performed. Useful when the template itself comes from an untrusted source. """ import operator import types -import warnings +import typing as t +from _string import formatter_field_name_split # type: ignore +from collections import abc from collections import deque from string import Formatter from markupsafe import EscapeFormatter from markupsafe import Markup -from ._compat import abc -from ._compat import PY2 -from ._compat import range_type -from ._compat import string_types from .environment import Environment from .exceptions import SecurityError +from .runtime import Context +from .runtime import Undefined + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) #: maximum number of items a range may produce MAX_RANGE = 100000 -#: attributes of function objects that are considered unsafe. -if PY2: - UNSAFE_FUNCTION_ATTRIBUTES = { - "func_closure", - "func_code", - "func_dict", - "func_defaults", - "func_globals", - } -else: - # On versions > python 2 the special attributes on functions are gone, - # but they remain on methods and generators for whatever reason. - UNSAFE_FUNCTION_ATTRIBUTES = set() +#: Unsafe function attributes. +UNSAFE_FUNCTION_ATTRIBUTES: t.Set[str] = set() -#: unsafe method attributes. function attributes are unsafe for methods too -UNSAFE_METHOD_ATTRIBUTES = {"im_class", "im_func", "im_self"} +#: Unsafe method attributes. Function attributes are unsafe for methods too. +UNSAFE_METHOD_ATTRIBUTES: t.Set[str] = set() #: unsafe generator attributes. UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"} @@ -47,41 +37,9 @@ #: unsafe attributes on async generators UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"} -# make sure we don't warn in python 2.6 about stuff we don't care about -warnings.filterwarnings( - "ignore", "the sets module", DeprecationWarning, module=__name__ -) - -_mutable_set_types = (set,) -_mutable_mapping_types = (dict,) -_mutable_sequence_types = (list,) - -# on python 2.x we can register the user collection types -try: - from UserDict import UserDict, DictMixin - from UserList import UserList - - _mutable_mapping_types += (UserDict, DictMixin) - _mutable_set_types += (UserList,) -except ImportError: - pass - -# if sets is still available, register the mutable set from there as well -try: - from sets import Set - - _mutable_set_types += (Set,) -except ImportError: - pass - -#: register Python 2.6 abstract base classes -_mutable_set_types += (abc.MutableSet,) -_mutable_mapping_types += (abc.MutableMapping,) -_mutable_sequence_types += (abc.MutableSequence,) - -_mutable_spec = ( +_mutable_spec: t.Tuple[t.Tuple[t.Type, t.FrozenSet[str]], ...] = ( ( - _mutable_set_types, + abc.MutableSet, frozenset( [ "add", @@ -96,11 +54,11 @@ ), ), ( - _mutable_mapping_types, + abc.MutableMapping, frozenset(["clear", "pop", "popitem", "setdefault", "update"]), ), ( - _mutable_sequence_types, + abc.MutableSequence, frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]), ), ( @@ -122,76 +80,49 @@ ) -class _MagicFormatMapping(abc.Mapping): - """This class implements a dummy wrapper to fix a bug in the Python - standard library for string formatting. - - See https://bugs.python.org/issue13598 for information about why - this is necessary. - """ - - def __init__(self, args, kwargs): - self._args = args - self._kwargs = kwargs - self._last_index = 0 - - def __getitem__(self, key): - if key == "": - idx = self._last_index - self._last_index += 1 - try: - return self._args[idx] - except LookupError: - pass - key = str(idx) - return self._kwargs[key] - - def __iter__(self): - return iter(self._kwargs) - - def __len__(self): - return len(self._kwargs) - - -def inspect_format_method(callable): +def inspect_format_method(callable: t.Callable) -> t.Optional[str]: if not isinstance( callable, (types.MethodType, types.BuiltinMethodType) ) or callable.__name__ not in ("format", "format_map"): return None + obj = callable.__self__ - if isinstance(obj, string_types): + + if isinstance(obj, str): return obj + return None -def safe_range(*args): + +def safe_range(*args: int) -> range: """A range that can't generate ranges with a length of more than MAX_RANGE items. """ - rng = range_type(*args) + rng = range(*args) if len(rng) > MAX_RANGE: raise OverflowError( "Range too big. The sandbox blocks ranges larger than" - " MAX_RANGE (%d)." % MAX_RANGE + f" MAX_RANGE ({MAX_RANGE})." ) return rng -def unsafe(f): +def unsafe(f: F) -> F: """Marks a function or method as unsafe. - :: + .. code-block: python @unsafe def delete(self): pass """ - f.unsafe_callable = True + f.unsafe_callable = True # type: ignore return f -def is_internal_attribute(obj, attr): +def is_internal_attribute(obj: t.Any, attr: str) -> bool: """Test if the attribute given is an internal python attribute. For example this function returns `True` for the `func_code` attribute of python objects. This is useful if the environment method @@ -228,12 +159,10 @@ return attr.startswith("__") -def modifies_known_mutable(obj, attr): +def modifies_known_mutable(obj: t.Any, attr: str) -> bool: """This function checks if an attribute on a builtin mutable object - (list, dict, set or deque) would modify it if called. It also supports - the "user"-versions of the objects (`sets.Set`, `UserDict.*` etc.) and - with Python 2.6 onwards the abstract base classes `MutableSet`, - `MutableMapping`, and `MutableSequence`. + (list, dict, set or deque) or the corresponding ABCs would modify it + if called. >>> modifies_known_mutable({}, "clear") True @@ -244,8 +173,7 @@ >>> modifies_known_mutable([], "index") False - If called with an unsupported object (such as unicode) `False` is - returned. + If called with an unsupported object, ``False`` is returned. >>> modifies_known_mutable("foo", "upper") False @@ -272,7 +200,7 @@ #: default callback table for the binary operators. A copy of this is #: available on each instance of a sandboxed environment as #: :attr:`binop_table` - default_binop_table = { + default_binop_table: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { "+": operator.add, "-": operator.sub, "*": operator.mul, @@ -285,7 +213,10 @@ #: default callback table for the unary operators. A copy of this is #: available on each instance of a sandboxed environment as #: :attr:`unop_table` - default_unop_table = {"+": operator.pos, "-": operator.neg} + default_unop_table: t.Dict[str, t.Callable[[t.Any], t.Any]] = { + "+": operator.pos, + "-": operator.neg, + } #: a set of binary operators that should be intercepted. Each operator #: that is added to this set (empty by default) is delegated to the @@ -301,7 +232,7 @@ #: interested in. #: #: .. versionadded:: 2.6 - intercepted_binops = frozenset() + intercepted_binops: t.FrozenSet[str] = frozenset() #: a set of unary operators that should be intercepted. Each operator #: that is added to this set (empty by default) is delegated to the @@ -316,32 +247,15 @@ #: interested in. #: #: .. versionadded:: 2.6 - intercepted_unops = frozenset() + intercepted_unops: t.FrozenSet[str] = frozenset() - def intercept_unop(self, operator): - """Called during template compilation with the name of a unary - operator to check if it should be intercepted at runtime. If this - method returns `True`, :meth:`call_unop` is executed for this unary - operator. The default implementation of :meth:`call_unop` will use - the :attr:`unop_table` dictionary to perform the operator with the - same logic as the builtin one. - - The following unary operators are interceptable: ``+`` and ``-`` - - Intercepted calls are always slower than the native operator call, - so make sure only to intercept the ones you are interested in. - - .. versionadded:: 2.6 - """ - return False - - def __init__(self, *args, **kwargs): - Environment.__init__(self, *args, **kwargs) + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + super().__init__(*args, **kwargs) self.globals["range"] = safe_range self.binop_table = self.default_binop_table.copy() self.unop_table = self.default_unop_table.copy() - def is_safe_attribute(self, obj, attr, value): + def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool: """The sandboxed environment will call this method to check if the attribute of an object is safe to access. Per default all attributes starting with an underscore are considered private as well as the @@ -350,17 +264,20 @@ """ return not (attr.startswith("_") or is_internal_attribute(obj, attr)) - def is_safe_callable(self, obj): - """Check if an object is safely callable. Per default a function is - considered safe unless the `unsafe_callable` attribute exists and is - True. Override this method to alter the behavior, but this won't - affect the `unsafe` decorator from this module. + def is_safe_callable(self, obj: t.Any) -> bool: + """Check if an object is safely callable. By default callables + are considered safe unless decorated with :func:`unsafe`. + + This also recognizes the Django convention of setting + ``func.alters_data = True``. """ return not ( getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False) ) - def call_binop(self, context, operator, left, right): + def call_binop( + self, context: Context, operator: str, left: t.Any, right: t.Any + ) -> t.Any: """For intercepted binary operator calls (:meth:`intercepted_binops`) this function is executed instead of the builtin operator. This can be used to fine tune the behavior of certain operators. @@ -369,7 +286,7 @@ """ return self.binop_table[operator](left, right) - def call_unop(self, context, operator, arg): + def call_unop(self, context: Context, operator: str, arg: t.Any) -> t.Any: """For intercepted unary operator calls (:meth:`intercepted_unops`) this function is executed instead of the builtin operator. This can be used to fine tune the behavior of certain operators. @@ -378,12 +295,14 @@ """ return self.unop_table[operator](arg) - def getitem(self, obj, argument): + def getitem( + self, obj: t.Any, argument: t.Union[str, t.Any] + ) -> t.Union[t.Any, Undefined]: """Subscribe an object from sandboxed code.""" try: return obj[argument] except (TypeError, LookupError): - if isinstance(argument, string_types): + if isinstance(argument, str): try: attr = str(argument) except Exception: @@ -399,7 +318,7 @@ return self.unsafe_undefined(obj, argument) return self.undefined(obj=obj, name=argument) - def getattr(self, obj, attribute): + def getattr(self, obj: t.Any, attribute: str) -> t.Union[t.Any, Undefined]: """Subscribe an object from sandboxed code and prefer the attribute. The attribute passed *must* be a bytestring. """ @@ -416,40 +335,52 @@ return self.unsafe_undefined(obj, attribute) return self.undefined(obj=obj, name=attribute) - def unsafe_undefined(self, obj, attribute): + def unsafe_undefined(self, obj: t.Any, attribute: str) -> Undefined: """Return an undefined object for unsafe attributes.""" return self.undefined( - "access to attribute %r of %r " - "object is unsafe." % (attribute, obj.__class__.__name__), + f"access to attribute {attribute!r} of" + f" {type(obj).__name__!r} object is unsafe.", name=attribute, obj=obj, exc=SecurityError, ) - def format_string(self, s, args, kwargs, format_func=None): + def format_string( + self, + s: str, + args: t.Tuple[t.Any, ...], + kwargs: t.Dict[str, t.Any], + format_func: t.Optional[t.Callable] = None, + ) -> str: """If a format call is detected, then this is routed through this method so that our safety sandbox can be used for it. """ + formatter: SandboxedFormatter if isinstance(s, Markup): - formatter = SandboxedEscapeFormatter(self, s.escape) + formatter = SandboxedEscapeFormatter(self, escape=s.escape) else: formatter = SandboxedFormatter(self) if format_func is not None and format_func.__name__ == "format_map": if len(args) != 1 or kwargs: raise TypeError( - "format_map() takes exactly one argument %d given" - % (len(args) + (kwargs is not None)) + "format_map() takes exactly one argument" + f" {len(args) + (kwargs is not None)} given" ) kwargs = args[0] - args = None + args = () - kwargs = _MagicFormatMapping(args, kwargs) rv = formatter.vformat(s, args, kwargs) return type(s)(rv) - def call(__self, __context, __obj, *args, **kwargs): # noqa: B902 + def call( + __self, # noqa: B902 + __context: Context, + __obj: t.Any, + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: """Call an object from sandboxed code.""" fmt = inspect_format_method(__obj) if fmt is not None: @@ -458,7 +389,7 @@ # the double prefixes are to avoid double keyword argument # errors when proxying the call. if not __self.is_safe_callable(__obj): - raise SecurityError("%r is not safely callable" % (__obj,)) + raise SecurityError(f"{__obj!r} is not safely callable") return __context.call(__obj, *args, **kwargs) @@ -468,26 +399,21 @@ `dict` by using the :func:`modifies_known_mutable` function. """ - def is_safe_attribute(self, obj, attr, value): - if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value): + def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool: + if not super().is_safe_attribute(obj, attr, value): return False + return not modifies_known_mutable(obj, attr) -# This really is not a public API apparently. -try: - from _string import formatter_field_name_split -except ImportError: - - def formatter_field_name_split(field_name): - return field_name._formatter_field_name_split() - - -class SandboxedFormatterMixin(object): - def __init__(self, env): +class SandboxedFormatter(Formatter): + def __init__(self, env: Environment, **kwargs: t.Any) -> None: self._env = env + super().__init__(**kwargs) - def get_field(self, field_name, args, kwargs): + def get_field( + self, field_name: str, args: t.Sequence[t.Any], kwargs: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, str]: first, rest = formatter_field_name_split(field_name) obj = self.get_value(first, args, kwargs) for is_attr, i in rest: @@ -498,13 +424,5 @@ return obj, first -class SandboxedFormatter(SandboxedFormatterMixin, Formatter): - def __init__(self, env): - SandboxedFormatterMixin.__init__(self, env) - Formatter.__init__(self) - - -class SandboxedEscapeFormatter(SandboxedFormatterMixin, EscapeFormatter): - def __init__(self, env, escape): - SandboxedFormatterMixin.__init__(self, env) - EscapeFormatter.__init__(self, escape) +class SandboxedEscapeFormatter(SandboxedFormatter, EscapeFormatter): + pass
diff --git a/third_party/jinja2/tests.py b/third_party/jinja2/tests.py index fabd4ce..a467cf0 100644 --- a/third_party/jinja2/tests.py +++ b/third_party/jinja2/tests.py
@@ -1,36 +1,32 @@ -# -*- coding: utf-8 -*- """Built-in template tests used with the ``is`` operator.""" -import decimal import operator -import re +import typing as t +from collections import abc +from numbers import Number -from ._compat import abc -from ._compat import integer_types -from ._compat import string_types -from ._compat import text_type from .runtime import Undefined +from .utils import pass_environment -number_re = re.compile(r"^-?\d+(\.\d+)?$") -regex_type = type(number_re) -test_callable = callable +if t.TYPE_CHECKING: + from .environment import Environment -def test_odd(value): +def test_odd(value: int) -> bool: """Return true if the variable is odd.""" return value % 2 == 1 -def test_even(value): +def test_even(value: int) -> bool: """Return true if the variable is even.""" return value % 2 == 0 -def test_divisibleby(value, num): +def test_divisibleby(value: int, num: int) -> bool: """Check if a variable is divisible by a number.""" return value % num == 0 -def test_defined(value): +def test_defined(value: t.Any) -> bool: """Return true if the variable is defined: .. sourcecode:: jinja @@ -47,17 +43,57 @@ return not isinstance(value, Undefined) -def test_undefined(value): +def test_undefined(value: t.Any) -> bool: """Like :func:`defined` but the other way round.""" return isinstance(value, Undefined) -def test_none(value): +@pass_environment +def test_filter(env: "Environment", value: str) -> bool: + """Check if a filter exists by name. Useful if a filter may be + optionally available. + + .. code-block:: jinja + + {% if 'markdown' is filter %} + {{ value | markdown }} + {% else %} + {{ value }} + {% endif %} + + .. versionadded:: 3.0 + """ + return value in env.filters + + +@pass_environment +def test_test(env: "Environment", value: str) -> bool: + """Check if a test exists by name. Useful if a test may be + optionally available. + + .. code-block:: jinja + + {% if 'loud' is test %} + {% if value is loud %} + {{ value|upper }} + {% else %} + {{ value|lower }} + {% endif %} + {% else %} + {{ value }} + {% endif %} + + .. versionadded:: 3.0 + """ + return value in env.tests + + +def test_none(value: t.Any) -> bool: """Return true if the variable is none.""" return value is None -def test_boolean(value): +def test_boolean(value: t.Any) -> bool: """Return true if the object is a boolean value. .. versionadded:: 2.11 @@ -65,7 +101,7 @@ return value is True or value is False -def test_false(value): +def test_false(value: t.Any) -> bool: """Return true if the object is False. .. versionadded:: 2.11 @@ -73,7 +109,7 @@ return value is False -def test_true(value): +def test_true(value: t.Any) -> bool: """Return true if the object is True. .. versionadded:: 2.11 @@ -82,16 +118,16 @@ # NOTE: The existing 'number' test matches booleans and floats -def test_integer(value): +def test_integer(value: t.Any) -> bool: """Return true if the object is an integer. .. versionadded:: 2.11 """ - return isinstance(value, integer_types) and value is not True and value is not False + return isinstance(value, int) and value is not True and value is not False # NOTE: The existing 'number' test matches booleans and integers -def test_float(value): +def test_float(value: t.Any) -> bool: """Return true if the object is a float. .. versionadded:: 2.11 @@ -99,22 +135,22 @@ return isinstance(value, float) -def test_lower(value): +def test_lower(value: str) -> bool: """Return true if the variable is lowercased.""" - return text_type(value).islower() + return str(value).islower() -def test_upper(value): +def test_upper(value: str) -> bool: """Return true if the variable is uppercased.""" - return text_type(value).isupper() + return str(value).isupper() -def test_string(value): +def test_string(value: t.Any) -> bool: """Return true if the object is a string.""" - return isinstance(value, string_types) + return isinstance(value, str) -def test_mapping(value): +def test_mapping(value: t.Any) -> bool: """Return true if the object is a mapping (dict etc.). .. versionadded:: 2.6 @@ -122,12 +158,12 @@ return isinstance(value, abc.Mapping) -def test_number(value): +def test_number(value: t.Any) -> bool: """Return true if the variable is a number.""" - return isinstance(value, integer_types + (float, complex, decimal.Decimal)) + return isinstance(value, Number) -def test_sequence(value): +def test_sequence(value: t.Any) -> bool: """Return true if the variable is a sequence. Sequences are variables that are iterable. """ @@ -136,10 +172,11 @@ value.__getitem__ except Exception: return False + return True -def test_sameas(value, other): +def test_sameas(value: t.Any, other: t.Any) -> bool: """Check if an object points to the same memory address than another object: @@ -152,21 +189,22 @@ return value is other -def test_iterable(value): +def test_iterable(value: t.Any) -> bool: """Check if it's possible to iterate over an object.""" try: iter(value) except TypeError: return False + return True -def test_escaped(value): +def test_escaped(value: t.Any) -> bool: """Check if the value is escaped.""" return hasattr(value, "__html__") -def test_in(value, seq): +def test_in(value: t.Any, seq: t.Container) -> bool: """Check if value is in seq. .. versionadded:: 2.10 @@ -180,6 +218,8 @@ "divisibleby": test_divisibleby, "defined": test_defined, "undefined": test_undefined, + "filter": test_filter, + "test": test_test, "none": test_none, "boolean": test_boolean, "false": test_false, @@ -193,7 +233,7 @@ "number": test_number, "sequence": test_sequence, "iterable": test_iterable, - "callable": test_callable, + "callable": callable, "sameas": test_sameas, "escaped": test_escaped, "in": test_in,
diff --git a/third_party/jinja2/utils.py b/third_party/jinja2/utils.py index 6afca81..9b5f5a50 100644 --- a/third_party/jinja2/utils.py +++ b/third_party/jinja2/utils.py
@@ -1,80 +1,98 @@ -# -*- coding: utf-8 -*- +import enum import json import os import re -import warnings +import typing as t +from collections import abc from collections import deque from random import choice from random import randrange -from string import ascii_letters as _letters -from string import digits as _digits from threading import Lock +from types import CodeType +from urllib.parse import quote_from_bytes -from markupsafe import escape -from markupsafe import Markup +import markupsafe -from ._compat import abc -from ._compat import string_types -from ._compat import text_type -from ._compat import url_quote +if t.TYPE_CHECKING: + import typing_extensions as te + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) # special singleton representing missing values for the runtime -missing = type("MissingType", (), {"__repr__": lambda x: "missing"})() +missing: t.Any = type("MissingType", (), {"__repr__": lambda x: "missing"})() -# internal code -internal_code = set() +internal_code: t.MutableSet[CodeType] = set() -concat = u"".join - -_slash_escape = "\\/" not in json.dumps("/") +concat = "".join -def contextfunction(f): - """This decorator can be used to mark a function or method context callable. - A context callable is passed the active :class:`Context` as first argument when - called from the template. This is useful if a function wants to get access - to the context or functions provided on the context object. For example - a function that returns a sorted list of template variables the current - template exports could look like this:: +def pass_context(f: F) -> F: + """Pass the :class:`~jinja2.runtime.Context` as the first argument + to the decorated function when called while rendering a template. - @contextfunction - def get_exported_names(context): - return sorted(context.exported_vars) + Can be used on functions, filters, and tests. + + If only ``Context.eval_context`` is needed, use + :func:`pass_eval_context`. If only ``Context.environment`` is + needed, use :func:`pass_environment`. + + .. versionadded:: 3.0.0 + Replaces ``contextfunction`` and ``contextfilter``. """ - f.contextfunction = True + f.jinja_pass_arg = _PassArg.context # type: ignore return f -def evalcontextfunction(f): - """This decorator can be used to mark a function or method as an eval - context callable. This is similar to the :func:`contextfunction` - but instead of passing the context, an evaluation context object is - passed. For more information about the eval context, see - :ref:`eval-context`. +def pass_eval_context(f: F) -> F: + """Pass the :class:`~jinja2.nodes.EvalContext` as the first argument + to the decorated function when called while rendering a template. + See :ref:`eval-context`. - .. versionadded:: 2.4 + Can be used on functions, filters, and tests. + + If only ``EvalContext.environment`` is needed, use + :func:`pass_environment`. + + .. versionadded:: 3.0.0 + Replaces ``evalcontextfunction`` and ``evalcontextfilter``. """ - f.evalcontextfunction = True + f.jinja_pass_arg = _PassArg.eval_context # type: ignore return f -def environmentfunction(f): - """This decorator can be used to mark a function or method as environment - callable. This decorator works exactly like the :func:`contextfunction` - decorator just that the first argument is the active :class:`Environment` - and not context. +def pass_environment(f: F) -> F: + """Pass the :class:`~jinja2.Environment` as the first argument to + the decorated function when called while rendering a template. + + Can be used on functions, filters, and tests. + + .. versionadded:: 3.0.0 + Replaces ``environmentfunction`` and ``environmentfilter``. """ - f.environmentfunction = True + f.jinja_pass_arg = _PassArg.environment # type: ignore return f -def internalcode(f): +class _PassArg(enum.Enum): + context = enum.auto() + eval_context = enum.auto() + environment = enum.auto() + + @classmethod + def from_obj(cls, obj: F) -> t.Optional["_PassArg"]: + if hasattr(obj, "jinja_pass_arg"): + return obj.jinja_pass_arg # type: ignore + + return None + + +def internalcode(f: F) -> F: """Marks the function as internally used""" internal_code.add(f.__code__) return f -def is_undefined(obj): +def is_undefined(obj: t.Any) -> bool: """Check if the object passed is undefined. This does nothing more than performing an instance check against :class:`Undefined` but looks nicer. This can be used for custom filters or tests that want to react to @@ -91,26 +109,26 @@ return isinstance(obj, Undefined) -def consume(iterable): +def consume(iterable: t.Iterable[t.Any]) -> None: """Consumes an iterable without doing anything with it.""" for _ in iterable: pass -def clear_caches(): +def clear_caches() -> None: """Jinja keeps internal caches for environments and lexers. These are used so that Jinja doesn't have to recreate environments and lexers all the time. Normally you don't have to care about that but if you are measuring memory consumption you may want to clean the caches. """ - from .environment import _spontaneous_environments + from .environment import get_spontaneous_environment from .lexer import _lexer_cache - _spontaneous_environments.clear() + get_spontaneous_environment.cache_clear() _lexer_cache.clear() -def import_string(import_name, silent=False): +def import_string(import_name: str, silent: bool = False) -> t.Any: """Imports an object based on a string. This is useful if you want to use import paths as endpoints or something similar. An import path can be specified either in dotted notation (``xml.sax.saxutils.escape``) @@ -134,7 +152,7 @@ raise -def open_if_exists(filename, mode="rb"): +def open_if_exists(filename: str, mode: str = "rb") -> t.Optional[t.IO]: """Returns a file descriptor for the filename if that file exists, otherwise ``None``. """ @@ -144,7 +162,7 @@ return open(filename, mode) -def object_type_repr(obj): +def object_type_repr(obj: t.Any) -> str: """Returns the name of the object's type. For some recognized singletons the name of the object is returned instead. (For example for `None` and `Ellipsis`). @@ -156,51 +174,104 @@ cls = type(obj) - # __builtin__ in 2.x, builtins in 3.x - if cls.__module__ in ("__builtin__", "builtins"): - name = cls.__name__ - else: - name = cls.__module__ + "." + cls.__name__ + if cls.__module__ == "builtins": + return f"{cls.__name__} object" - return "%s object" % name + return f"{cls.__module__}.{cls.__name__} object" -def pformat(obj, verbose=False): - """Prettyprint an object. Either use the `pretty` library or the - builtin `pprint`. - """ - try: - from pretty import pretty +def pformat(obj: t.Any) -> str: + """Format an object using :func:`pprint.pformat`.""" + from pprint import pformat # type: ignore - return pretty(obj, verbose=verbose) - except ImportError: - from pprint import pformat - - return pformat(obj) + return pformat(obj) -def urlize(text, trim_url_limit=None, rel=None, target=None): - """Converts any URLs in text into clickable links. Works on http://, - https:// and www. links. Links can have trailing punctuation (periods, - commas, close-parens) and leading punctuation (opening parens) and - it'll still do the right thing. - - If trim_url_limit is not None, the URLs in link text will be limited - to trim_url_limit characters. - - If nofollow is True, the URLs in link text will get a rel="nofollow" - attribute. - - If target is not None, a target attribute will be added to the link. - """ - trim_url = ( - lambda x, limit=trim_url_limit: limit is not None - and (x[:limit] + (len(x) >= limit and "..." or "")) - or x +_http_re = re.compile( + r""" + ^ + ( + (https?://|www\.) # scheme or www + (([\w%-]+\.)+)? # subdomain + ( + [a-z]{2,63} # basic tld + | + xn--[\w%]{2,59} # idna tld + ) + | + ([\w%-]{2,63}\.)+ # basic domain + (com|net|int|edu|gov|org|info|mil) # basic tld + | + (https?://) # scheme + ( + (([\d]{1,3})(\.[\d]{1,3}){3}) # IPv4 + | + (\[([\da-f]{0,4}:){2}([\da-f]{0,4}:?){1,6}]) # IPv6 + ) ) - words = re.split(r"(\s+)", text_type(escape(text))) - rel_attr = rel and ' rel="%s"' % text_type(escape(rel)) or "" - target_attr = target and ' target="%s"' % escape(target) or "" + (?::[\d]{1,5})? # port + (?:[/?#]\S*)? # path, query, and fragment + $ + """, + re.IGNORECASE | re.VERBOSE, +) +_email_re = re.compile(r"^\S+@\w[\w.-]*\.\w+$") + + +def urlize( + text: str, + trim_url_limit: t.Optional[int] = None, + rel: t.Optional[str] = None, + target: t.Optional[str] = None, + extra_schemes: t.Optional[t.Iterable[str]] = None, +) -> str: + """Convert URLs in text into clickable links. + + This may not recognize links in some situations. Usually, a more + comprehensive formatter, such as a Markdown library, is a better + choice. + + Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email + addresses. Links with trailing punctuation (periods, commas, closing + parentheses) and leading punctuation (opening parentheses) are + recognized excluding the punctuation. Email addresses that include + header fields are not recognized (for example, + ``mailto:address@example.com?cc=copy@example.com``). + + :param text: Original text containing URLs to link. + :param trim_url_limit: Shorten displayed URL values to this length. + :param target: Add the ``target`` attribute to links. + :param rel: Add the ``rel`` attribute to links. + :param extra_schemes: Recognize URLs that start with these schemes + in addition to the default behavior. + + .. versionchanged:: 3.0 + The ``extra_schemes`` parameter was added. + + .. versionchanged:: 3.0 + Generate ``https://`` links for URLs without a scheme. + + .. versionchanged:: 3.0 + The parsing rules were updated. Recognize email addresses with + or without the ``mailto:`` scheme. Validate IP addresses. Ignore + parentheses and brackets in more cases. + """ + if trim_url_limit is not None: + + def trim_url(x: str) -> str: + if len(x) > trim_url_limit: # type: ignore + return f"{x[:trim_url_limit]}..." + + return x + + else: + + def trim_url(x: str) -> str: + return x + + words = re.split(r"(\s+)", str(markupsafe.escape(text))) + rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else "" + target_attr = f' target="{markupsafe.escape(target)}"' if target else "" for i, word in enumerate(words): head, middle, tail = "", word, "" @@ -220,47 +291,57 @@ tail = match.group() middle = middle[: match.start()] - if middle.startswith("www.") or ( - "@" not in middle - and not middle.startswith("http://") - and not middle.startswith("https://") - and len(middle) > 0 - and middle[0] in _letters + _digits - and ( - middle.endswith(".org") - or middle.endswith(".net") - or middle.endswith(".com") - ) - ): - middle = '<a href="http://%s"%s%s>%s</a>' % ( - middle, - rel_attr, - target_attr, - trim_url(middle), - ) + # Prefer balancing parentheses in URLs instead of ignoring a + # trailing character. + for start_char, end_char in ("(", ")"), ("<", ">"), ("<", ">"): + start_count = middle.count(start_char) - if middle.startswith("http://") or middle.startswith("https://"): - middle = '<a href="%s"%s%s>%s</a>' % ( - middle, - rel_attr, - target_attr, - trim_url(middle), - ) + if start_count <= middle.count(end_char): + # Balanced, or lighter on the left + continue - if ( + # Move as many as possible from the tail to balance + for _ in range(min(start_count, tail.count(end_char))): + end_index = tail.index(end_char) + len(end_char) + # Move anything in the tail before the end char too + middle += tail[:end_index] + tail = tail[end_index:] + + if _http_re.match(middle): + if middle.startswith("https://") or middle.startswith("http://"): + middle = ( + f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>' + ) + else: + middle = ( + f'<a href="https://{middle}"{rel_attr}{target_attr}>' + f"{trim_url(middle)}</a>" + ) + + elif middle.startswith("mailto:") and _email_re.match(middle[7:]): + middle = f'<a href="{middle}">{middle[7:]}</a>' + + elif ( "@" in middle and not middle.startswith("www.") and ":" not in middle - and re.match(r"^\S+@\w[\w.-]*\.\w+$", middle) + and _email_re.match(middle) ): - middle = '<a href="mailto:%s">%s</a>' % (middle, middle) + middle = f'<a href="mailto:{middle}">{middle}</a>' - words[i] = head + middle + tail + elif extra_schemes is not None: + for scheme in extra_schemes: + if middle != scheme and middle.startswith(scheme): + middle = f'<a href="{middle}"{rel_attr}{target_attr}>{middle}</a>' - return u"".join(words) + words[i] = f"{head}{middle}{tail}" + + return "".join(words) -def generate_lorem_ipsum(n=5, html=True, min=20, max=100): +def generate_lorem_ipsum( + n: int = 5, html: bool = True, min: int = 20, max: int = 100 +) -> str: """Generate some lorem ipsum for the template.""" from .constants import LOREM_IPSUM_WORDS @@ -297,40 +378,38 @@ p.append(word) # ensure that the paragraph ends with a dot. - p = u" ".join(p) - if p.endswith(","): - p = p[:-1] + "." - elif not p.endswith("."): - p += "." - result.append(p) + p_str = " ".join(p) + + if p_str.endswith(","): + p_str = p_str[:-1] + "." + elif not p_str.endswith("."): + p_str += "." + + result.append(p_str) if not html: - return u"\n\n".join(result) - return Markup(u"\n".join(u"<p>%s</p>" % escape(x) for x in result)) + return "\n\n".join(result) + return markupsafe.Markup( + "\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result) + ) -def unicode_urlencode(obj, charset="utf-8", for_qs=False): +def url_quote(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str: """Quote a string for use in a URL using the given charset. - This function is misnamed, it is a wrapper around - :func:`urllib.parse.quote`. - :param obj: String or bytes to quote. Other types are converted to string then encoded to bytes using the given charset. :param charset: Encode text to bytes using this charset. :param for_qs: Quote "/" and use "+" for spaces. """ - if not isinstance(obj, string_types): - obj = text_type(obj) + if not isinstance(obj, bytes): + if not isinstance(obj, str): + obj = str(obj) - if isinstance(obj, text_type): obj = obj.encode(charset) safe = b"" if for_qs else b"/" - rv = url_quote(obj, safe) - - if not isinstance(rv, text_type): - rv = rv.decode("utf-8") + rv = quote_from_bytes(obj, safe) if for_qs: rv = rv.replace("%20", "+") @@ -338,20 +417,21 @@ return rv -class LRUCache(object): +@abc.MutableMapping.register +class LRUCache: """A simple LRU Cache implementation.""" # this is fast for small capacities (something below 1000) but doesn't # scale. But as long as it's only used as storage for templates this # won't do any harm. - def __init__(self, capacity): + def __init__(self, capacity: int) -> None: self.capacity = capacity - self._mapping = {} - self._queue = deque() + self._mapping: t.Dict[t.Any, t.Any] = {} + self._queue: "te.Deque[t.Any]" = deque() self._postinit() - def _postinit(self): + def _postinit(self) -> None: # alias all queue methods for faster lookup self._popleft = self._queue.popleft self._pop = self._queue.pop @@ -359,35 +439,35 @@ self._wlock = Lock() self._append = self._queue.append - def __getstate__(self): + def __getstate__(self) -> t.Mapping[str, t.Any]: return { "capacity": self.capacity, "_mapping": self._mapping, "_queue": self._queue, } - def __setstate__(self, d): + def __setstate__(self, d: t.Mapping[str, t.Any]) -> None: self.__dict__.update(d) self._postinit() - def __getnewargs__(self): + def __getnewargs__(self) -> t.Tuple: return (self.capacity,) - def copy(self): + def copy(self) -> "LRUCache": """Return a shallow copy of the instance.""" rv = self.__class__(self.capacity) rv._mapping.update(self._mapping) rv._queue.extend(self._queue) return rv - def get(self, key, default=None): + def get(self, key: t.Any, default: t.Any = None) -> t.Any: """Return an item from the cache dict or `default`""" try: return self[key] except KeyError: return default - def setdefault(self, key, default=None): + def setdefault(self, key: t.Any, default: t.Any = None) -> t.Any: """Set `default` if the key is not in the cache otherwise leave unchanged. Return the value of this key. """ @@ -397,35 +477,32 @@ self[key] = default return default - def clear(self): + def clear(self) -> None: """Clear the cache.""" - self._wlock.acquire() - try: + with self._wlock: self._mapping.clear() self._queue.clear() - finally: - self._wlock.release() - def __contains__(self, key): + def __contains__(self, key: t.Any) -> bool: """Check if a key exists in this cache.""" return key in self._mapping - def __len__(self): + def __len__(self) -> int: """Return the current size of the cache.""" return len(self._mapping) - def __repr__(self): - return "<%s %r>" % (self.__class__.__name__, self._mapping) + def __repr__(self) -> str: + return f"<{type(self).__name__} {self._mapping!r}>" - def __getitem__(self, key): + def __getitem__(self, key: t.Any) -> t.Any: """Get an item from the cache. Moves the item up so that it has the highest priority then. Raise a `KeyError` if it does not exist. """ - self._wlock.acquire() - try: + with self._wlock: rv = self._mapping[key] + if self._queue[-1] != key: try: self._remove(key) @@ -434,100 +511,54 @@ # when we read, ignore the ValueError that we would # get otherwise. pass - self._append(key) - return rv - finally: - self._wlock.release() - def __setitem__(self, key, value): + self._append(key) + + return rv + + def __setitem__(self, key: t.Any, value: t.Any) -> None: """Sets the value for an item. Moves the item up so that it has the highest priority then. """ - self._wlock.acquire() - try: + with self._wlock: if key in self._mapping: self._remove(key) elif len(self._mapping) == self.capacity: del self._mapping[self._popleft()] + self._append(key) self._mapping[key] = value - finally: - self._wlock.release() - def __delitem__(self, key): + def __delitem__(self, key: t.Any) -> None: """Remove an item from the cache dict. Raise a `KeyError` if it does not exist. """ - self._wlock.acquire() - try: + with self._wlock: del self._mapping[key] + try: self._remove(key) except ValueError: pass - finally: - self._wlock.release() - def items(self): + def items(self) -> t.Iterable[t.Tuple[t.Any, t.Any]]: """Return a list of items.""" result = [(key, self._mapping[key]) for key in list(self._queue)] result.reverse() return result - def iteritems(self): - """Iterate over all items.""" - warnings.warn( - "'iteritems()' will be removed in version 3.0. Use" - " 'iter(cache.items())' instead.", - DeprecationWarning, - stacklevel=2, - ) - return iter(self.items()) - - def values(self): + def values(self) -> t.Iterable[t.Any]: """Return a list of all values.""" return [x[1] for x in self.items()] - def itervalue(self): - """Iterate over all values.""" - warnings.warn( - "'itervalue()' will be removed in version 3.0. Use" - " 'iter(cache.values())' instead.", - DeprecationWarning, - stacklevel=2, - ) - return iter(self.values()) - - def itervalues(self): - """Iterate over all values.""" - warnings.warn( - "'itervalues()' will be removed in version 3.0. Use" - " 'iter(cache.values())' instead.", - DeprecationWarning, - stacklevel=2, - ) - return iter(self.values()) - - def keys(self): + def keys(self) -> t.Iterable[t.Any]: """Return a list of all keys ordered by most recent usage.""" return list(self) - def iterkeys(self): - """Iterate over all keys in the cache dict, ordered by - the most recent usage. - """ - warnings.warn( - "'iterkeys()' will be removed in version 3.0. Use" - " 'iter(cache.keys())' instead.", - DeprecationWarning, - stacklevel=2, - ) - return iter(self) - - def __iter__(self): + def __iter__(self) -> t.Iterator[t.Any]: return reversed(tuple(self._queue)) - def __reversed__(self): + def __reversed__(self) -> t.Iterator[t.Any]: """Iterate over the keys in the cache dict, oldest items coming first. """ @@ -536,15 +567,12 @@ __copy__ = copy -abc.MutableMapping.register(LRUCache) - - def select_autoescape( - enabled_extensions=("html", "htm", "xml"), - disabled_extensions=(), - default_for_string=True, - default=False, -): + enabled_extensions: t.Collection[str] = ("html", "htm", "xml"), + disabled_extensions: t.Collection[str] = (), + default_for_string: bool = True, + default: bool = False, +) -> t.Callable[[t.Optional[str]], bool]: """Intelligently sets the initial value of autoescaping based on the filename of the template. This is the recommended way to configure autoescaping if you do not want to write a custom function yourself. @@ -579,10 +607,10 @@ .. versionadded:: 2.9 """ - enabled_patterns = tuple("." + x.lstrip(".").lower() for x in enabled_extensions) - disabled_patterns = tuple("." + x.lstrip(".").lower() for x in disabled_extensions) + enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions) + disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions) - def autoescape(template_name): + def autoescape(template_name: t.Optional[str]) -> bool: if template_name is None: return default_for_string template_name = template_name.lower() @@ -595,37 +623,47 @@ return autoescape -def htmlsafe_json_dumps(obj, dumper=None, **kwargs): - """Works exactly like :func:`dumps` but is safe for use in ``<script>`` - tags. It accepts the same arguments and returns a JSON string. Note that - this is available in templates through the ``|tojson`` filter which will - also mark the result as safe. Due to how this function escapes certain - characters this is safe even if used outside of ``<script>`` tags. +def htmlsafe_json_dumps( + obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any +) -> markupsafe.Markup: + """Serialize an object to a string of JSON with :func:`json.dumps`, + then replace HTML-unsafe characters with Unicode escapes and mark + the result safe with :class:`~markupsafe.Markup`. - The following characters are escaped in strings: + This is available in templates as the ``|tojson`` filter. - - ``<`` - - ``>`` - - ``&`` - - ``'`` + The following characters are escaped: ``<``, ``>``, ``&``, ``'``. - This makes it safe to embed such strings in any place in HTML with the - notable exception of double quoted attributes. In that case single - quote your attributes or HTML escape it in addition. + The returned string is safe to render in HTML documents and + ``<script>`` tags. The exception is in HTML attributes that are + double quoted; either use single quotes or the ``|forceescape`` + filter. + + :param obj: The object to serialize to JSON. + :param dumps: The ``dumps`` function to use. Defaults to + ``env.policies["json.dumps_function"]``, which defaults to + :func:`json.dumps`. + :param kwargs: Extra arguments to pass to ``dumps``. Merged onto + ``env.policies["json.dumps_kwargs"]``. + + .. versionchanged:: 3.0 + The ``dumper`` parameter is renamed to ``dumps``. + + .. versionadded:: 2.9 """ - if dumper is None: - dumper = json.dumps - rv = ( - dumper(obj, **kwargs) - .replace(u"<", u"\\u003c") - .replace(u">", u"\\u003e") - .replace(u"&", u"\\u0026") - .replace(u"'", u"\\u0027") + if dumps is None: + dumps = json.dumps + + return markupsafe.Markup( + dumps(obj, **kwargs) + .replace("<", "\\u003c") + .replace(">", "\\u003e") + .replace("&", "\\u0026") + .replace("'", "\\u0027") ) - return Markup(rv) -class Cycler(object): +class Cycler: """Cycle through values by yield them one at a time, then restarting once the end is reached. Available as ``cycler`` in templates. @@ -651,24 +689,24 @@ .. versionadded:: 2.1 """ - def __init__(self, *items): + def __init__(self, *items: t.Any) -> None: if not items: raise RuntimeError("at least one item has to be provided") self.items = items self.pos = 0 - def reset(self): + def reset(self) -> None: """Resets the current item to the first item.""" self.pos = 0 @property - def current(self): + def current(self) -> t.Any: """Return the current item. Equivalent to the item that will be returned next time :meth:`next` is called. """ return self.items[self.pos] - def next(self): + def next(self) -> t.Any: """Return the current item, then advance :attr:`current` to the next item. """ @@ -679,59 +717,39 @@ __next__ = next -class Joiner(object): +class Joiner: """A joining helper for templates.""" - def __init__(self, sep=u", "): + def __init__(self, sep: str = ", ") -> None: self.sep = sep self.used = False - def __call__(self): + def __call__(self) -> str: if not self.used: self.used = True - return u"" + return "" return self.sep -class Namespace(object): +class Namespace: """A namespace object that can hold arbitrary attributes. It may be initialized from a dictionary or with keyword arguments.""" - def __init__(*args, **kwargs): # noqa: B902 + def __init__(*args: t.Any, **kwargs: t.Any) -> None: # noqa: B902 self, args = args[0], args[1:] self.__attrs = dict(*args, **kwargs) - def __getattribute__(self, name): + def __getattribute__(self, name: str) -> t.Any: # __class__ is needed for the awaitable check in async mode if name in {"_Namespace__attrs", "__class__"}: return object.__getattribute__(self, name) try: return self.__attrs[name] except KeyError: - raise AttributeError(name) + raise AttributeError(name) from None - def __setitem__(self, name, value): + def __setitem__(self, name: str, value: t.Any) -> None: self.__attrs[name] = value - def __repr__(self): - return "<Namespace %r>" % self.__attrs - - -# does this python version support async for in and async generators? -try: - exec("async def _():\n async for _ in ():\n yield _") - have_async_gen = True -except SyntaxError: - have_async_gen = False - - -def soft_unicode(s): - from markupsafe import soft_unicode - - warnings.warn( - "'jinja2.utils.soft_unicode' will be removed in version 3.0." - " Use 'markupsafe.soft_unicode' instead.", - DeprecationWarning, - stacklevel=2, - ) - return soft_unicode(s) + def __repr__(self) -> str: + return f"<Namespace {self.__attrs!r}>"
diff --git a/third_party/jinja2/visitor.py b/third_party/jinja2/visitor.py index d1365bf..17c6aab 100644 --- a/third_party/jinja2/visitor.py +++ b/third_party/jinja2/visitor.py
@@ -1,11 +1,19 @@ -# -*- coding: utf-8 -*- """API for traversing the AST nodes. Implemented by the compiler and meta introspection. """ +import typing as t + from .nodes import Node +if t.TYPE_CHECKING: + import typing_extensions as te -class NodeVisitor(object): + class VisitCallable(te.Protocol): + def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: + ... + + +class NodeVisitor: """Walks the abstract syntax tree and call visitor functions for every node found. The visitor functions may return values which will be forwarded by the `visit` method. @@ -17,25 +25,26 @@ (return value `None`) the `generic_visit` visitor is used instead. """ - def get_visitor(self, node): + def get_visitor(self, node: Node) -> "t.Optional[VisitCallable]": """Return the visitor function for this node or `None` if no visitor exists for this node. In that case the generic visit function is used instead. """ - method = "visit_" + node.__class__.__name__ - return getattr(self, method, None) + return getattr(self, f"visit_{type(node).__name__}", None) - def visit(self, node, *args, **kwargs): + def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: """Visit a node.""" f = self.get_visitor(node) + if f is not None: return f(node, *args, **kwargs) + return self.generic_visit(node, *args, **kwargs) - def generic_visit(self, node, *args, **kwargs): + def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: """Called if no explicit visitor function exists for a node.""" - for node in node.iter_child_nodes(): - self.visit(node, *args, **kwargs) + for child_node in node.iter_child_nodes(): + self.visit(child_node, *args, **kwargs) class NodeTransformer(NodeVisitor): @@ -49,7 +58,7 @@ replacement takes place. """ - def generic_visit(self, node, *args, **kwargs): + def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> Node: for field, old_value in node.iter_fields(): if isinstance(old_value, list): new_values = [] @@ -71,11 +80,13 @@ setattr(node, field, new_node) return node - def visit_list(self, node, *args, **kwargs): + def visit_list(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.List[Node]: """As transformers may return lists in some places this method can be used to enforce a list as return value. """ rv = self.visit(node, *args, **kwargs) + if not isinstance(rv, list): - rv = [rv] + return [rv] + return rv
diff --git a/tools/json_schema_compiler/feature_compiler.py b/tools/json_schema_compiler/feature_compiler.py index 72fd4b50..7853df4 100644 --- a/tools/json_schema_compiler/feature_compiler.py +++ b/tools/json_schema_compiler/feature_compiler.py
@@ -836,6 +836,13 @@ # Handle complex features, which are lists of simple features. if type(feature_value) is list: + assert len(feature_value) > 1, ( + 'Error parsing feature "%s": A complex feature ' % feature_name + + 'definition is only needed when there are multiple objects ' + + 'specifying different groups of properties for feature ' + + 'availability. You can reduce it down to a single object on the ' + + 'feature key instead of a list.') + feature = ComplexFeature(feature_name) # This doesn't handle nested complex features. I think that's probably for
diff --git a/tools/json_schema_compiler/feature_compiler_test.py b/tools/json_schema_compiler/feature_compiler_test.py index 027d019..407e9eac 100755 --- a/tools/json_schema_compiler/feature_compiler_test.py +++ b/tools/json_schema_compiler/feature_compiler_test.py
@@ -369,6 +369,21 @@ 'No default parent found for bookmarks'): c._CompileFeature('bookmarks.export', { "allowlist": ["asdf"] }) + def testComplexFeatureWithSinglePropertyBlock(self): + compiler = self._createTestFeatureCompiler('APIFeature') + + error = ('Error parsing feature "feature_alpha": A complex feature ' + 'definition is only needed when there are multiple objects ' + 'specifying different groups of properties for feature ' + 'availability. You can reduce it down to a single object on the ' + 'feature key instead of a list.') + with self.assertRaisesRegex(AssertionError, error): + compiler._CompileFeature('feature_alpha', + [{ + 'contexts': ['blessed_extension'], + 'channel': 'stable', + }]) + def testRealIdsDisallowedInAllowlist(self): fake_id = 'a' * 32; f = self._parseFeature({'allowlist': [fake_id],
diff --git a/tools/metrics/histograms/enums.xml b/tools/metrics/histograms/enums.xml index d21a5d7e..0537c28 100644 --- a/tools/metrics/histograms/enums.xml +++ b/tools/metrics/histograms/enums.xml
@@ -34802,7 +34802,7 @@ <int value="1054" label="SERIAL_CLEARBREAK"/> <int value="1055" label="DELETED_EXTENSIONVIEWINTERNAL_LOADSRC"/> <int value="1056" label="DELETED_EXTENSIONVIEWINTERNAL_PARSESRC"/> - <int value="1057" label="HID_GETUSERSELECTEDDEVICES"/> + <int value="1057" label="DELETED_HID_GETUSERSELECTEDDEVICES"/> <int value="1058" label="FILESYSTEMPROVIDERINTERNAL_GETACTIONSREQUESTEDSUCCESS"/> <int value="1059" @@ -59067,6 +59067,7 @@ <int value="-1027941917" label="SnoopingProtection:enabled"/> <int value="-1027254093" label="LockScreenNotifications:disabled"/> <int value="-1027124889" label="NtlmV2Enabled:enabled"/> + <int value="-1026466717" label="BlockInsecureDownloads:enabled"/> <int value="-1026192558" label="OopRasterizationDDL:enabled"/> <int value="-1025125875" label="AssistantTimersV2:disabled"/> <int value="-1024731815" label="MessagesForAndroidNearOomReduction:enabled"/> @@ -62112,6 +62113,7 @@ <int value="760542355" label="ServiceWorkerScriptFullCodeCache:enabled"/> <int value="761770770" label="OverrideLanguagePrefsForHrefTranslate:disabled"/> + <int value="761815665" label="BlockInsecureDownloads:disabled"/> <int value="762030626" label="ViewTransition:disabled"/> <int value="762324154" label="ZeroSuggestPrefetchingOnWeb:disabled"/> <int value="762700519" label="enable-checker-imaging"/> @@ -92719,7 +92721,7 @@ <enum name="ShoppingSubscriptionsRequestStatus"> <summary>The different statuses of a shopping subscriptions request.</summary> - <int value="0" label="Succeeded"/> + <int value="0" label="Successfully added or removed on server"/> <int value="1" label="Server failed to parse the request"/> <int value="2" label="Server successfully parsed the request, but failed afterwards"/> @@ -92731,6 +92733,9 @@ label="The request was lost somewhere unknown and never came back. This is used for monitoring purpose only and should never happen if the subscriptions work correctly."/> + <int value="7" + label="No action taken because the product is already tracked/untracked + on the server."/> </enum> <enum name="ShortcutsCreationResult">
diff --git a/tools/metrics/histograms/metadata/apps/histograms.xml b/tools/metrics/histograms/metadata/apps/histograms.xml index 5262058..c19ed8f 100644 --- a/tools/metrics/histograms/metadata/apps/histograms.xml +++ b/tools/metrics/histograms/metadata/apps/histograms.xml
@@ -306,7 +306,7 @@ </histogram> <histogram name="Apps.AppList.AppLaunched{TabletOrClamshell}" - enum="AppListLaunchedFrom" expires_after="2022-12-19"> + enum="AppListLaunchedFrom" expires_after="2023-06-19"> <owner>gzadina@google.com</owner> <owner>tbarzic@chromium.org</owner> <summary> @@ -999,7 +999,7 @@ </histogram> <histogram name="Apps.AppList.SearchResultRemovalDecision" - enum="AppListResultRemovalConfirmation" expires_after="2022-12-31"> + enum="AppListResultRemovalConfirmation" expires_after="2023-06-30"> <owner>tbarzic@chromium.org</owner> <owner>gzadina@google.com</owner> <summary> @@ -1063,7 +1063,7 @@ <histogram name="Apps.AppList.SuccessfulFirstUsageByNewUsers{TabletOrClamshell}" - enum="Boolean" expires_after="2022-12-19"> + enum="Boolean" expires_after="2023-06-19"> <owner>gzadina@google.com</owner> <owner>tbarzic@chromium.org</owner> <summary> @@ -1089,7 +1089,7 @@ </histogram> <histogram name="Apps.AppList.TimeToUserAction{TabletOrClamshell}" units="ms" - expires_after="2022-12-19"> + expires_after="2023-06-19"> <owner>gzadina@google.com</owner> <owner>tbarzic@chromium.org</owner> <summary> @@ -1103,7 +1103,7 @@ </histogram> <histogram name="Apps.AppList.UserAction{TabletOrClamshell}" - enum="LauncherUserAction" expires_after="2022-12-19"> + enum="LauncherUserAction" expires_after="2023-06-19"> <owner>gzadina@google.com</owner> <owner>tbarzic@chromium.org</owner> <summary> @@ -1468,7 +1468,7 @@ </histogram> <histogram name="Apps.AppListHide.InputLatency" units="ms" - expires_after="2022-12-01"> + expires_after="2023-06-01"> <owner>newcomer@chromium.org</owner> <owner>tbarzic@chromium.org</owner> <owner>mmourgos@chromium.org</owner> @@ -1511,7 +1511,7 @@ </histogram> <histogram name="Apps.AppListOpenTime.{OpenMethod}" units="ms" - expires_after="2022-12-26"> + expires_after="2023-06-26"> <owner>angusmclean@chromium.org</owner> <owner>tbarzic@chromium.org</owner> <owner>src/ash/app_list/OWNERS</owner> @@ -1775,7 +1775,7 @@ </histogram> <histogram name="Apps.AppListUsageByNewUsers{TabletOrClamshell}" - enum="AppListUsageStateByNewUsers" expires_after="2022-12-19"> + enum="AppListUsageStateByNewUsers" expires_after="2023-06-19"> <owner>andrewxu@chromium.org</owner> <owner>tbarzic@chromium.org</owner> <summary> @@ -1888,7 +1888,7 @@ </histogram> <histogram name="Apps.ContextMenuExecuteCommand{ContextMenuFromApp}" - enum="ChromeOSUICommands" expires_after="2022-12-31"> + enum="ChromeOSUICommands" expires_after="2023-06-30"> <owner>tbarzic@chromium.org</owner> <owner>newcomer@chromium.org</owner> <owner>mmourgos@chromium.org</owner> @@ -2729,7 +2729,7 @@ <histogram name="Apps.ScrollableShelf.AnimationSmoothness{HomeLauncherVisibility}" - units="%" expires_after="2022-12-14"> + units="%" expires_after="2023-06-14"> <owner>tbarzic@chromium.org</owner> <owner>anasalazar@chromium.org</owner> <owner>newcomer@chromium.org</owner> @@ -2747,7 +2747,7 @@ <histogram name="Apps.ScrollableShelf.Drag.PresentationTime.MaxLatency{HomeLauncherVisibility}" - units="ms" expires_after="2022-12-06"> + units="ms" expires_after="2023-06-06"> <owner>tbarzic@chromium.org</owner> <owner>anasalazar@chromium.org</owner> <owner>newcomer@chromium.org</owner> @@ -2760,7 +2760,7 @@ <histogram name="Apps.ScrollableShelf.Drag.PresentationTime{HomeLauncherVisibility}" - units="ms" expires_after="2022-12-02"> + units="ms" expires_after="2023-06-02"> <owner>tbarzic@chromium.org</owner> <owner>anasalazar@chromium.org</owner> <owner>newcomer@chromium.org</owner> @@ -2796,7 +2796,7 @@ </histogram> <histogram name="Apps.StateTransition.AnimationSmoothness" units="%" - expires_after="2022-12-30"> + expires_after="2023-06-30"> <owner>tbarzic@chromium.org</owner> <owner>newcomer@chromium.org</owner> <owner>src/ash/app_list/OWNERS</owner>
diff --git a/tools/metrics/histograms/metadata/extensions/histograms.xml b/tools/metrics/histograms/metadata/extensions/histograms.xml index e6903517..d34734c 100644 --- a/tools/metrics/histograms/metadata/extensions/histograms.xml +++ b/tools/metrics/histograms/metadata/extensions/histograms.xml
@@ -216,6 +216,9 @@ <histogram name="Extensions.AttemptedToDowngradeVersionLocation" enum="ExtensionLocation" expires_after="2022-11-18"> + <obsolete> + Code removed 2022/12. + </obsolete> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -302,6 +305,9 @@ <histogram name="Extensions.Bindings.NativeBindingCreationTime" units="microseconds" expires_after="2023-01-22"> + <obsolete> + Code removed 2022/12. + </obsolete> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -834,7 +840,7 @@ </histogram> <histogram name="Extensions.DidCreateScriptContext_Blessed" units="ms" - expires_after="2022-11-18"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -846,7 +852,7 @@ </histogram> <histogram name="Extensions.DidCreateScriptContext_BlessedWebPage" units="ms" - expires_after="2022-11-18"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -857,7 +863,7 @@ </histogram> <histogram name="Extensions.DidCreateScriptContext_ContentScript" units="ms" - expires_after="2022-11-18"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -868,7 +874,7 @@ </histogram> <histogram name="Extensions.DidCreateScriptContext_LockScreenExtension" - units="units" expires_after="2022-11-18"> + units="units" expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -881,7 +887,7 @@ </histogram> <histogram name="Extensions.DidCreateScriptContext_Unblessed" units="ms" - expires_after="2022-11-18"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -892,7 +898,7 @@ </histogram> <histogram name="Extensions.DidCreateScriptContext_Unspecified" units="ms" - expires_after="2022-11-18"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -903,7 +909,7 @@ </histogram> <histogram name="Extensions.DidCreateScriptContext_WebPage" units="ms" - expires_after="2022-11-18"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -914,7 +920,7 @@ </histogram> <histogram name="Extensions.DidCreateScriptContext_WebUI" units="ms" - expires_after="2022-11-18"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -925,7 +931,7 @@ </histogram> <histogram name="Extensions.DidInitializeServiceWorkerContextOnWorkerThread" - units="ms" expires_after="2022-06-26"> + units="ms" expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -1036,7 +1042,7 @@ </histogram> <histogram name="Extensions.EventPageIdleTime" units="ms" - expires_after="2022-08-28"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary>The time an extension's event page has spent unloaded.</summary> @@ -1236,6 +1242,9 @@ <histogram name="Extensions.ExtensionUninstalled" units="units" expires_after="2022-05-01"> + <obsolete> + Code removed 2022/12. + </obsolete> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary>An extension has been uninstalled.</summary> @@ -1803,6 +1812,9 @@ <histogram name="Extensions.Functions.FailedTotalExecutionTime{IsKiosk}" units="ms" expires_after="2022-08-21"> + <obsolete> + Code removed 2022/12. + </obsolete> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -2599,6 +2611,9 @@ <histogram name="Extensions.Messaging.MessageSize" units="bytes" expires_after="2022-11-18"> + <obsolete> + Code removed 2022/12. + </obsolete> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -2622,8 +2637,9 @@ </token> </histogram> -<histogram name="Extensions.NetworkDelay" units="ms" expires_after="2023-03-26"> - <owner>battre@chromium.org</owner> +<histogram name="Extensions.NetworkDelay" units="ms" expires_after="2023-12-15"> + <owner>rdevlin.cronin@chromium.org</owner> + <owner>kelvinjiang@chromium.org</owner> <owner>src/extensions/OWNERS</owner> <summary>Time that network requests were blocked due to extensions.</summary> </histogram> @@ -2751,7 +2767,7 @@ </histogram> <histogram name="Extensions.ProcessManagerStartupHostsTime2" units="ms" - expires_after="2022-10-09"> + expires_after="2023-12-15"> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -2765,6 +2781,9 @@ <histogram name="Extensions.ResetPermissionsIncrease" enum="Boolean" expires_after="2022-12-01"> + <obsolete> + Code removed 2022/12. + </obsolete> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -3020,6 +3039,9 @@ <histogram name="Extensions.SyncGetMessageBundle" units="ms" expires_after="2022-09-11"> + <obsolete> + Code removed 2022/12. + </obsolete> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -3030,6 +3052,9 @@ <histogram name="Extensions.ThrottledNetworkRequestDelay" units="ms" expires_after="2022-06-01"> + <obsolete> + Code removed 2022/12. + </obsolete> <owner>rdevlin.cronin@chromium.org</owner> <owner>extensions-core@chromium.org</owner> <summary> @@ -3154,6 +3179,19 @@ </histogram> <histogram + name="Extensions.WebRequest.BeforeRequestDeclarativeNetRequestEvaluationTimeInMicroseconds" + units="microseconds" expires_after="2023-12-01"> + <owner>rdevlin.cronin@chromium.org</owner> + <owner>src/extensions/OWNERS</owner> + <summary> + The total amount of time taken to evaluate declarativeNetRequest rules in + the onBeforeRequest stage, measured in microseconds. Recorded once per + request if and only if at least one declarativeNetRequest rule is present. + This is only emitted for users with high resolution clocks. + </summary> +</histogram> + +<histogram name="Extensions.WebRequest.BeforeRequestListenerEvaluationTime.{HandlerTypes}" units="ms" expires_after="2023-12-01"> <owner>rdevlin.cronin@chromium.org</owner> @@ -3171,6 +3209,25 @@ </token> </histogram> +<histogram + name="Extensions.WebRequest.BeforeRequestListenerEvaluationTimeInMicroseconds.{HandlerTypes}" + units="microseconds" expires_after="2023-12-01"> + <owner>rdevlin.cronin@chromium.org</owner> + <owner>src/extensions/OWNERS</owner> + <summary> + The total amount of time, measured in milliseconds, between when an event is + dispatched to webRequest listeners and when all responses are received and + handled for an onBeforeRequest event. Emitted when there are {HandlerTypes} + for the request. Recorded once per request if the request completes. This is + only emitted for users with high resolution clocks. + </summary> + <token key="HandlerTypes"> + <variant name="WebRequestAndDeclarativeNetRequest" + summary="both webRequest listeners and declarativeNetRequest rules"/> + <variant name="WebRequestOnly" summary="only webRequest listeners"/> + </token> +</histogram> + <histogram name="Extensions.WebRequest.EventListenerFlag" enum="WebRequestEventListenerFlag" expires_after="never"> <!-- expires-never: For monitoring Web Request API usage statistics. -->
diff --git a/tools/metrics/histograms/metadata/privacy/histograms.xml b/tools/metrics/histograms/metadata/privacy/histograms.xml index 17d133bd..99b549f 100644 --- a/tools/metrics/histograms/metadata/privacy/histograms.xml +++ b/tools/metrics/histograms/metadata/privacy/histograms.xml
@@ -224,6 +224,29 @@ <token key="DIPSRedirectType" variants="DIPSRedirectType"/> </histogram> +<histogram name="Privacy.DIPS.ClearedSitesCount{DIPSCookieMode}" units="sites" + expires_after="2023-05-31"> + <owner>bcl@chromium.org</owner> + <owner>src/chrome/browser/dips/OWNERS</owner> + <summary> + The number of sites that may have had their storage cleared by DIPS. + + If DIPS deletion is on, this is the number of sites that have had storage + cleared by DIPS. Otherwise, this number is the sites that would've had their + storage cleared if the DIPS deletion feature was on. + + This is recorded every time the DIPS timer is fired, which is determined by + the `timer_delay` parameter of the DIPS feature. + + This is only logged for site-data clearing that occurs as a result of DIPS. + If a site contributes to this count then it has performed a potential + tracking action (e.g. writing to storage, bouncing the user, or both) + without the user interacting with that site before, or soon after, the + action. + </summary> + <token key="DIPSCookieMode" variants="DIPSCookieMode"/> +</histogram> + <histogram name="Privacy.DIPS.DatabaseEntryCount" units="entries" expires_after="2023-05-30"> <owner>bcl@chromium.org</owner> @@ -286,6 +309,19 @@ </summary> </histogram> +<histogram name="Privacy.DIPS.DeletionLatency" units="ms" + expires_after="2023-05-31"> + <owner>bcl@chromium.org</owner> + <owner>src/chrome/browser/dips/OWNERS</owner> + <summary> + The amount of time it takes to complete the DIPS deletion process. + + This is recorded each time after the DIPS timer fires and triggers deletion + of DIPS-eligible sites. This metric will be the result of |time when DIPS + deletion completed - time when DIPS deletion begins|. + </summary> +</histogram> + <histogram name="Privacy.DIPS.TimeFromInteractionToStorage{DIPSCookieMode}" units="ms" expires_after="2023-05-01"> <owner>bcl@chromium.org</owner>
diff --git a/ui/file_manager/file_manager/foreground/elements/files_quick_view.js b/ui/file_manager/file_manager/foreground/elements/files_quick_view.js index ef57e49..f5dfb10d 100644 --- a/ui/file_manager/file_manager/foreground/elements/files_quick_view.js +++ b/ui/file_manager/file_manager/foreground/elements/files_quick_view.js
@@ -76,48 +76,6 @@ }, /** - * Generate URL for browsable files (Text/PDF). - */ - generateBrowsableUrl_: function(sourceContent, subtype) { - let contentUrl = ''; - switch (sourceContent.dataType) { - case 'url': - contentUrl = sourceContent.data; - break; - case 'blob': - contentUrl = URL.createObjectURL(sourceContent.data); - break; - default: - return contentUrl; - } - - if (subtype === 'PDF') { - // contentUrls are only modified for PDFs, see getQuickViewParameters_ in - // quick_view_controller.js. - contentUrl += '#view=FitH'; - } - - return contentUrl; - }, - - /** - * Apply custom CSS to autogenerated webview content. - * - * @param {!Event} e - */ - applyTextCss: function(e) { - // Don't override the Chrome PDF viewer's CSS: crbug.com/1001034. - if (this.subtype === 'PDF') { - return; - } - - const webview = /** @type {WebView} */ (e.target); - webview.insertCSS({ - 'file': 'untrusted_resources/files_text_content.css', - }); - }, - - /** * Send browsable preview content (i.e. content that can be displayed by the * browser directly as PDF/text/html) to the chrome-untrusted:// <iframe>. */
diff --git a/ui/message_center/views/message_popup_collection.cc b/ui/message_center/views/message_popup_collection.cc index 5e40c08a..75af39ba 100644 --- a/ui/message_center/views/message_popup_collection.cc +++ b/ui/message_center/views/message_popup_collection.cc
@@ -9,9 +9,11 @@ #include "base/containers/cxx20_erase.h" #include "base/ranges/algorithm.h" #include "base/task/single_thread_task_runner.h" +#include "base/time/time.h" #include "base/timer/timer.h" #include "build/chromeos_buildflags.h" #include "ui/compositor/layer.h" +#include "ui/compositor/scoped_animation_duration_scale_mode.h" #include "ui/gfx/animation/linear_animation.h" #include "ui/gfx/animation/tween.h" #include "ui/message_center/message_center_types.h" @@ -80,10 +82,15 @@ if (state_ != State::IDLE) { // If not in IDLE state, start animation. - animation_->SetDuration(state_ == State::MOVE_DOWN || - state_ == State::MOVE_UP_FOR_INVERSE - ? kMoveDownDuration - : kFadeInFadeOutDuration); + base::TimeDelta animation_duration; + if (state_ == State::MOVE_DOWN || state_ == State::MOVE_UP_FOR_INVERSE) { + animation_duration = kMoveDownDuration; + } else { + animation_duration = kFadeInFadeOutDuration; + } + animation_->SetDuration( + animation_duration * + ui::ScopedAnimationDurationScaleMode::duration_multiplier()); animation_->Start(); AnimationStarted(); UpdateByAnimation();
diff --git a/ui/views/controls/button/image_button.h b/ui/views/controls/button/image_button.h index 7b87380..8eb1f73b 100644 --- a/ui/views/controls/button/image_button.h +++ b/ui/views/controls/button/image_button.h
@@ -126,6 +126,8 @@ VIEW_BUILDER_OVERLOAD_METHOD(SetImage, Button::ButtonState, const gfx::ImageSkia&) +VIEW_BUILDER_METHOD(SetImageModel, Button::ButtonState, const ui::ImageModel&) + END_VIEW_BUILDER //////////////////////////////////////////////////////////////////////////////// @@ -205,6 +207,9 @@ VIEW_BUILDER_PROPERTY(std::unique_ptr<Background>, ToggledBackground) VIEW_BUILDER_PROPERTY(std::u16string, ToggledTooltipText) VIEW_BUILDER_PROPERTY(std::u16string, ToggledAccessibleName) +VIEW_BUILDER_METHOD(SetToggledImageModel, + Button::ButtonState, + const ui::ImageModel&) END_VIEW_BUILDER } // namespace views
diff --git a/ui/views/controls/label.cc b/ui/views/controls/label.cc index 35ccc8f7..b392a769 100644 --- a/ui/views/controls/label.cc +++ b/ui/views/controls/label.cc
@@ -170,8 +170,9 @@ void Label::SetTextStyleRange(int style, const gfx::Range& range) { if (style == text_style_ || !range.IsValid() || range.is_empty() || - !gfx::Range(0, GetText().size()).Contains(range)) + !gfx::Range(0, GetText().size()).Contains(range)) { return; + } const auto details = style::GetFontDetails(text_context_, style); // This function is not prepared to handle style requests that vary by @@ -212,6 +213,10 @@ OnPropertyChanged(&requested_enabled_color_, kPropertyEffectsPaint); } +absl::optional<ui::ColorId> Label::GetEnabledColorId() const { + return enabled_color_id_; +} + void Label::SetEnabledColorId(absl::optional<ui::ColorId> enabled_color_id) { if (enabled_color_id_ == enabled_color_id) return; @@ -303,8 +308,9 @@ void Label::SetSkipSubpixelRenderingOpacityCheck( bool skip_subpixel_rendering_opacity_check) { if (skip_subpixel_rendering_opacity_check_ == - skip_subpixel_rendering_opacity_check) + skip_subpixel_rendering_opacity_check) { return; + } skip_subpixel_rendering_opacity_check_ = skip_subpixel_rendering_opacity_check; OnPropertyChanged(&skip_subpixel_rendering_opacity_check_, @@ -690,8 +696,9 @@ View* Label::GetTooltipHandlerForPoint(const gfx::Point& point) { if (!handles_tooltips_ || - (tooltip_text_.empty() && !ShouldShowDefaultTooltip())) + (tooltip_text_.empty() && !ShouldShowDefaultTooltip())) { return nullptr; + } return HitTestPoint(point) ? this : nullptr; } @@ -788,8 +795,9 @@ // cases), refactoring parents to use background() or by fixing // subpixel-rendering issues that the DCHECK detects. if (!display_text_ || display_text_->subpixel_rendering_suppressed() || - skip_subpixel_rendering_opacity_check_) + skip_subpixel_rendering_opacity_check_) { return; + } // Ensure that, if we're using subpixel rendering, we're painted to an opaque // region. Subpixel rendering will sample from the r,g,b color channels of the
diff --git a/ui/views/controls/label.h b/ui/views/controls/label.h index c420695..efe228bb 100644 --- a/ui/views/controls/label.h +++ b/ui/views/controls/label.h
@@ -13,6 +13,7 @@ #include "third_party/abseil-cpp/absl/types/optional.h" #include "ui/base/metadata/metadata_header_macros.h" #include "ui/base/models/simple_menu_model.h" +#include "ui/color/color_id.h" #include "ui/gfx/color_palette.h" #include "ui/gfx/render_text.h" #include "ui/gfx/text_constants.h" @@ -133,6 +134,7 @@ // enabled. SkColor GetEnabledColor() const; virtual void SetEnabledColor(SkColor color); + absl::optional<ui::ColorId> GetEnabledColorId() const; void SetEnabledColorId(absl::optional<ui::ColorId> enabled_color_id); // Gets/Sets the background color. This won't be explicitly drawn, but the