diff --git a/DEPS b/DEPS index 2ab76ed7..2e76392 100644 --- a/DEPS +++ b/DEPS
@@ -39,11 +39,11 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling Skia # and whatever else without interference from each other. - 'skia_revision': '4a339529612a43871d021877e58698e067d6c4cd', + 'skia_revision': '5ea95df02de9cd774d0b84d1341599bbd9c0d8db', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling V8 # and whatever else without interference from each other. - 'v8_revision': '72d6cb322ad62045cea296b1a28d21ce3ec72531', + 'v8_revision': 'f1f171fc634144cb10cec771ccea21bd6774c6cd', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling swarming_client # and whatever else without interference from each other. @@ -51,7 +51,7 @@ # Three lines of non-changing comments so that # the commit queue can handle CLs rolling ANGLE # and whatever else without interference from each other. - 'angle_revision': '8bd4b6c52e8a97093ed6c603287658cb46c334fd', + 'angle_revision': '566273222683314dfc8ac1cdb9bf6deb5d2b4c65', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling build tools # and whatever else without interference from each other. @@ -442,7 +442,7 @@ Var('chromium_git') + '/chromium/deps/findbugs.git' + '@' + '57f05238d3ac77ea0a194813d3065dd780c6e566', 'src/third_party/freetype-android/src': - Var('chromium_git') + '/chromium/src/third_party/freetype2.git' + '@' + 'e186230678ee8e4ea4ac4797ece8125761e3225a', + Var('chromium_git') + '/chromium/src/third_party/freetype2.git' + '@' + '8cabd919ca63f0e6c12e8405e8542a45d910fa62', 'src/third_party/elfutils/src': Var('chromium_git') + '/external/elfutils.git' + '@' + '249673729a7e5dbd5de4f3760bdcaa3d23d154d7',
diff --git a/ash/DEPS b/ash/DEPS index 30f219d..8952c301 100644 --- a/ash/DEPS +++ b/ash/DEPS
@@ -15,6 +15,7 @@ "+ui", "+win8", "-ash/host", + "-content", ] specific_include_rules = {
diff --git a/ash/ash.gyp b/ash/ash.gyp index 8d96a6c..4994964 100644 --- a/ash/ash.gyp +++ b/ash/ash.gyp
@@ -45,10 +45,6 @@ 'cancel_mode.h', 'cast_config_delegate.cc', 'cast_config_delegate.h', - 'content/display/display_color_manager_chromeos.cc', - 'content/display/display_color_manager_chromeos.h', - 'content/display/screen_orientation_controller_chromeos.cc', - 'content/display/screen_orientation_controller_chromeos.h', 'debug.cc', 'debug.h', 'default_accessibility_delegate.cc', @@ -67,6 +63,8 @@ 'display/cursor_window_controller.h', 'display/display_change_observer_chromeos.cc', 'display/display_change_observer_chromeos.h', + 'display/display_color_manager_chromeos.cc', + 'display/display_color_manager_chromeos.h', 'display/display_configurator_animation.cc', 'display/display_configurator_animation.h', 'display/window_tree_host_manager.cc', @@ -103,6 +101,8 @@ 'display/root_window_transformers.h', 'display/screen_ash.cc', 'display/screen_ash.h', + 'display/screen_orientation_controller_chromeos.cc', + 'display/screen_orientation_controller_chromeos.h', 'display/screen_position_controller.cc', 'display/screen_position_controller.h', 'display/shared_display_edge_indicator.cc', @@ -676,12 +676,14 @@ 'content/ash_with_content_export.h', 'content/gpu_support_impl.cc', 'content/gpu_support_impl.h', + 'content/keyboard_overlay/keyboard_overlay_delegate.cc', + 'content/keyboard_overlay/keyboard_overlay_delegate.h', + 'content/keyboard_overlay/keyboard_overlay_view.cc', + 'content/keyboard_overlay/keyboard_overlay_view.h', + 'content/screen_orientation_delegate_chromeos.cc', + 'content/screen_orientation_delegate_chromeos.h', 'content/shell_content_state.cc', 'content/shell_content_state.h', - 'keyboard_overlay/keyboard_overlay_delegate.cc', - 'keyboard_overlay/keyboard_overlay_delegate.h', - 'keyboard_overlay/keyboard_overlay_view.cc', - 'keyboard_overlay/keyboard_overlay_view.h', ], 'ash_test_support_sources': [ 'desktop_background/desktop_background_controller_test_api.cc', @@ -809,6 +811,8 @@ 'ash_touch_exploration_manager_chromeos_unittest.cc', 'autoclick/autoclick_unittest.cc', 'content/display/screen_orientation_controller_chromeos_unittest.cc', + 'content/keyboard_overlay/keyboard_overlay_delegate_unittest.cc', + 'content/keyboard_overlay/keyboard_overlay_view_unittest.cc', 'desktop_background/desktop_background_controller_unittest.cc', 'dip_unittest.cc', 'display/cursor_window_controller_unittest.cc', @@ -835,8 +839,6 @@ 'frame/custom_frame_view_ash_unittest.cc', 'frame/default_header_painter_unittest.cc', 'host/ash_window_tree_host_x11_unittest.cc', - 'keyboard_overlay/keyboard_overlay_delegate_unittest.cc', - 'keyboard_overlay/keyboard_overlay_view_unittest.cc', 'magnifier/magnification_controller_unittest.cc', 'metrics/desktop_task_switch_metric_recorder_unittest.cc', 'metrics/task_switch_metrics_recorder_unittest.cc',
diff --git a/ash/content/DEPS b/ash/content/DEPS index 5ed73eb..726b15c5 100644 --- a/ash/content/DEPS +++ b/ash/content/DEPS
@@ -3,3 +3,13 @@ # in this file, but I'm starting out conservative. "+content/public/browser/gpu_data_manager.h", ] + +specific_include_rules = { + "screen_orientation_delegate_chromeos.cc": [ + "+content/public/browser/screen_orientation_provider.h", + "+content/public/browser/web_contents.h", + ], + "screen_orientation_delegate_chromeos.h": [ + "+content/public/browser/screen_orientation_delegate.h", + ], +}
diff --git a/ash/content/display/DEPS b/ash/content/display/DEPS index 5f741582..8706cc9 100644 --- a/ash/content/display/DEPS +++ b/ash/content/display/DEPS
@@ -1,14 +1,9 @@ -include_rules = [ - "+content/public/browser/screen_orientation_delegate.h", - "+content/public/browser/screen_orientation_provider.h", - "+content/public/browser/browser_context.h", - "+content/public/browser/browser_thread.h", - "+content/public/browser/web_contents.h", - "+third_party/qcms/src/qcms.h", - "+third_party/WebKit/public/platform/modules/screen_orientation/WebScreenOrientationLockType.h", -] - specific_include_rules = { + "screen_orientation_controller_chromeos_unittest.cc": [ + "+content/public/browser/browser_context.h", + "+content/public/browser/web_contents.h", + "+third_party/WebKit/public/platform/modules/screen_orientation/WebScreenOrientationLockType.h", + ], ".*test\.cc": [ "+content/public/test/test_browser_context.h" ],
diff --git a/ash/content/display/screen_orientation_controller_chromeos_unittest.cc b/ash/content/display/screen_orientation_controller_chromeos_unittest.cc index f866c56..f5df053 100644 --- a/ash/content/display/screen_orientation_controller_chromeos_unittest.cc +++ b/ash/content/display/screen_orientation_controller_chromeos_unittest.cc
@@ -5,13 +5,14 @@ #include <vector> #include "ash/ash_switches.h" -#include "ash/content/display/screen_orientation_controller_chromeos.h" #include "ash/content/shell_content_state.h" #include "ash/display/display_info.h" #include "ash/display/display_manager.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/shell.h" #include "ash/test/ash_test_base.h" #include "ash/test/ash_test_helper.h" +#include "ash/test/content/test_shell_content_state.h" #include "ash/test/display_manager_test_api.h" #include "ash/test/test_shell_delegate.h" #include "ash/test/test_system_tray_delegate.h" @@ -103,8 +104,10 @@ ScreenOrientationControllerTest(); ~ScreenOrientationControllerTest() override; - ScreenOrientationController* delegate() { - return screen_orientation_controller_; + content::ScreenOrientationDelegate* delegate() { + return ash_test_helper() + ->test_shell_content_state() + ->screen_orientation_delegate(); } // Creates and initializes and empty content::WebContents that is backed by a @@ -119,8 +122,6 @@ void SetUp() override; private: - ScreenOrientationController* screen_orientation_controller_; - // Optional content::BrowserContext used for two window tests. scoped_ptr<content::BrowserContext> secondary_browser_context_; @@ -156,8 +157,6 @@ base::CommandLine::ForCurrentProcess()->AppendSwitch( switches::kAshEnableTouchViewTesting); test::AshTestBase::SetUp(); - screen_orientation_controller_ = - Shell::GetInstance()->screen_orientation_controller(); } // Tests that a content::WebContents can lock rotation. @@ -647,8 +646,8 @@ ASSERT_NE(kNewRotation, display_manager->GetDisplayInfo(kInternalDisplayId) .GetActiveRotation()); - delegate()->SetDisplayRotation(kNewRotation, - gfx::Display::ROTATION_SOURCE_ACTIVE); + Shell::GetInstance()->screen_orientation_controller()->SetDisplayRotation( + kNewRotation, gfx::Display::ROTATION_SOURCE_ACTIVE); EXPECT_EQ(kNewRotation, display_manager->GetDisplayInfo(kInternalDisplayId) .GetActiveRotation());
diff --git a/ash/keyboard_overlay/DEPS b/ash/content/keyboard_overlay/DEPS similarity index 100% rename from ash/keyboard_overlay/DEPS rename to ash/content/keyboard_overlay/DEPS
diff --git a/ash/keyboard_overlay/keyboard_overlay_delegate.cc b/ash/content/keyboard_overlay/keyboard_overlay_delegate.cc similarity index 78% rename from ash/keyboard_overlay/keyboard_overlay_delegate.cc rename to ash/content/keyboard_overlay/keyboard_overlay_delegate.cc index 76563f5b8..91f299a6 100644 --- a/ash/keyboard_overlay/keyboard_overlay_delegate.cc +++ b/ash/content/keyboard_overlay/keyboard_overlay_delegate.cc
@@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "ash/keyboard_overlay/keyboard_overlay_delegate.h" +#include "ash/content/keyboard_overlay/keyboard_overlay_delegate.h" #include <algorithm> @@ -28,9 +28,8 @@ const int kHorizontalMargin = 28; // A message handler for detecting the timing when the web contents is painted. -class PaintMessageHandler - : public WebUIMessageHandler, - public base::SupportsWeakPtr<PaintMessageHandler> { +class PaintMessageHandler : public WebUIMessageHandler, + public base::SupportsWeakPtr<PaintMessageHandler> { public: explicit PaintMessageHandler(views::Widget* widget) : widget_(widget) {} ~PaintMessageHandler() override {} @@ -48,8 +47,7 @@ void PaintMessageHandler::RegisterMessages() { web_ui()->RegisterMessageCallback( - "didPaint", - base::Bind(&PaintMessageHandler::DidPaint, AsWeakPtr())); + "didPaint", base::Bind(&PaintMessageHandler::DidPaint, AsWeakPtr())); } void PaintMessageHandler::DidPaint(const base::ListValue* args) { @@ -63,13 +61,9 @@ KeyboardOverlayDelegate::KeyboardOverlayDelegate(const base::string16& title, const GURL& url) - : title_(title), - url_(url), - widget_(NULL) { -} + : title_(title), url_(url), widget_(NULL) {} -KeyboardOverlayDelegate::~KeyboardOverlayDelegate() { -} +KeyboardOverlayDelegate::~KeyboardOverlayDelegate() {} views::Widget* KeyboardOverlayDelegate::Show(views::WebDialogView* view) { widget_ = new views::Widget; @@ -82,12 +76,12 @@ // Show the widget at the bottom of the work area. gfx::Size size; GetDialogSize(&size); - const gfx::Rect& rect = Shell::GetScreen()->GetDisplayNearestWindow( - widget_->GetNativeView()).work_area(); + const gfx::Rect& rect = + Shell::GetScreen() + ->GetDisplayNearestWindow(widget_->GetNativeView()) + .work_area(); gfx::Rect bounds(rect.x() + (rect.width() - size.width()) / 2, - rect.bottom() - size.height(), - size.width(), - size.height()); + rect.bottom() - size.height(), size.width(), size.height()); widget_->SetBounds(bounds); // The widget will be shown when the web contents gets ready to display. @@ -111,12 +105,12 @@ handlers->push_back(new PaintMessageHandler(widget_)); } -void KeyboardOverlayDelegate::GetDialogSize( - gfx::Size* size) const { +void KeyboardOverlayDelegate::GetDialogSize(gfx::Size* size) const { using std::min; DCHECK(widget_); - gfx::Rect rect = ash::Shell::GetScreen()->GetDisplayNearestWindow( - widget_->GetNativeView()).work_area(); + gfx::Rect rect = ash::Shell::GetScreen() + ->GetDisplayNearestWindow(widget_->GetNativeView()) + .work_area(); const int width = min(kBaseWidth, rect.width() - kHorizontalMargin); const int height = width * kBaseHeight / kBaseWidth; size->SetSize(width, height); @@ -126,15 +120,13 @@ return "[]"; } -void KeyboardOverlayDelegate::OnDialogClosed( - const std::string& json_retval) { +void KeyboardOverlayDelegate::OnDialogClosed(const std::string& json_retval) { delete this; return; } void KeyboardOverlayDelegate::OnCloseContents(WebContents* source, - bool* out_close_dialog) { -} + bool* out_close_dialog) {} bool KeyboardOverlayDelegate::ShouldShowDialogTitle() const { return false;
diff --git a/ash/keyboard_overlay/keyboard_overlay_delegate.h b/ash/content/keyboard_overlay/keyboard_overlay_delegate.h similarity index 100% rename from ash/keyboard_overlay/keyboard_overlay_delegate.h rename to ash/content/keyboard_overlay/keyboard_overlay_delegate.h
diff --git a/ash/keyboard_overlay/keyboard_overlay_delegate_unittest.cc b/ash/content/keyboard_overlay/keyboard_overlay_delegate_unittest.cc similarity index 96% rename from ash/keyboard_overlay/keyboard_overlay_delegate_unittest.cc rename to ash/content/keyboard_overlay/keyboard_overlay_delegate_unittest.cc index 2605263..ff206b3 100644 --- a/ash/keyboard_overlay/keyboard_overlay_delegate_unittest.cc +++ b/ash/content/keyboard_overlay/keyboard_overlay_delegate_unittest.cc
@@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "ash/keyboard_overlay/keyboard_overlay_delegate.h" +#include "ash/content/keyboard_overlay/keyboard_overlay_delegate.h" #include "ash/shelf/shelf_types.h" #include "ash/shell.h"
diff --git a/ash/keyboard_overlay/keyboard_overlay_view.cc b/ash/content/keyboard_overlay/keyboard_overlay_view.cc similarity index 70% rename from ash/keyboard_overlay/keyboard_overlay_view.cc rename to ash/content/keyboard_overlay/keyboard_overlay_view.cc index fe7c585b..9673df2 100644 --- a/ash/keyboard_overlay/keyboard_overlay_view.cc +++ b/ash/content/keyboard_overlay/keyboard_overlay_view.cc
@@ -2,9 +2,9 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "ash/keyboard_overlay/keyboard_overlay_view.h" +#include "ash/content/keyboard_overlay/keyboard_overlay_view.h" -#include "ash/keyboard_overlay/keyboard_overlay_delegate.h" +#include "ash/content/keyboard_overlay/keyboard_overlay_delegate.h" #include "ash/shell.h" #include "base/strings/utf_string_conversions.h" #include "grit/ash_strings.h" @@ -20,26 +20,22 @@ // Keys to invoke Cancel (Escape, Ctrl+Alt+/, or Shift+Ctrl+Alt+/, Help, F14). const ash::KeyboardOverlayView::KeyEventData kCancelKeys[] = { - { ui::VKEY_ESCAPE, ui::EF_NONE}, - { ui::VKEY_OEM_2, ui::EF_CONTROL_DOWN | ui::EF_ALT_DOWN }, - { ui::VKEY_OEM_2, ui::EF_SHIFT_DOWN | ui::EF_CONTROL_DOWN | ui::EF_ALT_DOWN }, - { ui::VKEY_HELP, ui::EF_NONE }, - { ui::VKEY_F14, ui::EF_NONE }, + {ui::VKEY_ESCAPE, ui::EF_NONE}, + {ui::VKEY_OEM_2, ui::EF_CONTROL_DOWN | ui::EF_ALT_DOWN}, + {ui::VKEY_OEM_2, ui::EF_SHIFT_DOWN | ui::EF_CONTROL_DOWN | ui::EF_ALT_DOWN}, + {ui::VKEY_HELP, ui::EF_NONE}, + {ui::VKEY_F14, ui::EF_NONE}, }; - } namespace ash { -KeyboardOverlayView::KeyboardOverlayView( - content::BrowserContext* context, - WebDialogDelegate* delegate, - WebContentsHandler* handler) - : views::WebDialogView(context, delegate, handler) { -} +KeyboardOverlayView::KeyboardOverlayView(content::BrowserContext* context, + WebDialogDelegate* delegate, + WebContentsHandler* handler) + : views::WebDialogView(context, delegate, handler) {} -KeyboardOverlayView::~KeyboardOverlayView() { -} +KeyboardOverlayView::~KeyboardOverlayView() {} void KeyboardOverlayView::Cancel() { Shell::GetInstance()->overlay_filter()->Deactivate(this); @@ -66,10 +62,9 @@ } // static -void KeyboardOverlayView::ShowDialog( - content::BrowserContext* context, - WebContentsHandler* handler, - const GURL& url) { +void KeyboardOverlayView::ShowDialog(content::BrowserContext* context, + WebContentsHandler* handler, + const GURL& url) { if (Shell::GetInstance()->overlay_filter()->IsActive()) return;
diff --git a/ash/keyboard_overlay/keyboard_overlay_view.h b/ash/content/keyboard_overlay/keyboard_overlay_view.h similarity index 100% rename from ash/keyboard_overlay/keyboard_overlay_view.h rename to ash/content/keyboard_overlay/keyboard_overlay_view.h
diff --git a/ash/keyboard_overlay/keyboard_overlay_view_unittest.cc b/ash/content/keyboard_overlay/keyboard_overlay_view_unittest.cc similarity index 83% rename from ash/keyboard_overlay/keyboard_overlay_view_unittest.cc rename to ash/content/keyboard_overlay/keyboard_overlay_view_unittest.cc index 1b2f297..62cdde7be 100644 --- a/ash/keyboard_overlay/keyboard_overlay_view_unittest.cc +++ b/ash/content/keyboard_overlay/keyboard_overlay_view_unittest.cc
@@ -2,13 +2,13 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "ash/keyboard_overlay/keyboard_overlay_view.h" +#include "ash/content/keyboard_overlay/keyboard_overlay_view.h" #include <algorithm> #include "ash/accelerators/accelerator_table.h" +#include "ash/content/keyboard_overlay/keyboard_overlay_delegate.h" #include "ash/content/shell_content_state.h" -#include "ash/keyboard_overlay/keyboard_overlay_delegate.h" #include "ash/shell.h" #include "ash/shell_delegate.h" #include "ash/test/ash_test_base.h" @@ -34,10 +34,9 @@ if (kAcceleratorData[i].action != SHOW_KEYBOARD_OVERLAY) continue; const AcceleratorData& open_key_data = kAcceleratorData[i]; - ui::KeyEvent open_key(open_key_data.trigger_on_press ? - ui::ET_KEY_PRESSED : ui::ET_KEY_RELEASED, - open_key_data.keycode, - open_key_data.modifiers); + ui::KeyEvent open_key(open_key_data.trigger_on_press ? ui::ET_KEY_PRESSED + : ui::ET_KEY_RELEASED, + open_key_data.keycode, open_key_data.modifiers); EXPECT_TRUE(view.IsCancelingKeyEvent(&open_key)); } } @@ -50,8 +49,7 @@ continue; // Escape is used just for canceling. KeyboardOverlayView::KeyEventData open_key = { - kAcceleratorData[i].keycode, - kAcceleratorData[i].modifiers, + kAcceleratorData[i].keycode, kAcceleratorData[i].modifiers, }; open_keys.push_back(open_key); } @@ -61,7 +59,7 @@ // Escape is used just for canceling, so exclude it from the comparison with // open keys. - KeyboardOverlayView::KeyEventData escape = { ui::VKEY_ESCAPE, ui::EF_NONE }; + KeyboardOverlayView::KeyEventData escape = {ui::VKEY_ESCAPE, ui::EF_NONE}; std::vector<KeyboardOverlayView::KeyEventData>::iterator escape_itr = std::find(canceling_keys.begin(), canceling_keys.end(), escape); canceling_keys.erase(escape_itr);
diff --git a/ash/content/screen_orientation_delegate_chromeos.cc b/ash/content/screen_orientation_delegate_chromeos.cc new file mode 100644 index 0000000..68777e67 --- /dev/null +++ b/ash/content/screen_orientation_delegate_chromeos.cc
@@ -0,0 +1,47 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "ash/content/screen_orientation_delegate_chromeos.h" + +#include "ash/display/screen_orientation_controller_chromeos.h" +#include "ash/shell.h" +#include "content/public/browser/screen_orientation_provider.h" +#include "content/public/browser/web_contents.h" + +namespace ash { + +ScreenOrientationDelegateChromeos::ScreenOrientationDelegateChromeos() { + content::ScreenOrientationProvider::SetDelegate(this); +} +ScreenOrientationDelegateChromeos::~ScreenOrientationDelegateChromeos() { + content::ScreenOrientationProvider::SetDelegate(nullptr); +} + +bool ScreenOrientationDelegateChromeos::FullScreenRequired( + content::WebContents* web_contents) { + return true; +} + +void ScreenOrientationDelegateChromeos::Lock( + content::WebContents* web_contents, + blink::WebScreenOrientationLockType lock_orientation) { + Shell::GetInstance() + ->screen_orientation_controller() + ->LockOrientationForWindow(web_contents->GetNativeView(), + lock_orientation); +} + +bool ScreenOrientationDelegateChromeos::ScreenOrientationProviderSupported() { + return Shell::GetInstance() + ->screen_orientation_controller() + ->ScreenOrientationProviderSupported(); +} +void ScreenOrientationDelegateChromeos::Unlock( + content::WebContents* web_contents) { + Shell::GetInstance() + ->screen_orientation_controller() + ->UnlockOrientationForWindow(web_contents->GetNativeView()); +} + +} // namespace ash
diff --git a/ash/content/screen_orientation_delegate_chromeos.h b/ash/content/screen_orientation_delegate_chromeos.h new file mode 100644 index 0000000..ce96841c --- /dev/null +++ b/ash/content/screen_orientation_delegate_chromeos.h
@@ -0,0 +1,31 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "content/public/browser/screen_orientation_delegate.h" + +#ifndef ASH_CONTENT_SCREEN_ORIENTATION_DELEGATE_CHROMEOS_H_ +#define ASH_CONTENT_SCREEN_ORIENTATION_DELEGATE_CHROMEOS_H_ + +namespace ash { + +class ScreenOrientationDelegateChromeos + : public content::ScreenOrientationDelegate { + public: + ScreenOrientationDelegateChromeos(); + ~ScreenOrientationDelegateChromeos() override; + + private: + // content::ScreenOrientationDelegate: + bool FullScreenRequired(content::WebContents* web_contents) override; + void Lock(content::WebContents* web_contents, + blink::WebScreenOrientationLockType lock_orientation) override; + bool ScreenOrientationProviderSupported() override; + void Unlock(content::WebContents* web_contents) override; + + DISALLOW_COPY_AND_ASSIGN(ScreenOrientationDelegateChromeos); +}; + +} // namespace ash + +#endif // ASH_CONTENT_SCREEN_ORIENTATION_DELEGATE_CHROMEOS_H_ \ No newline at end of file
diff --git a/ash/content/shell_content_state.h b/ash/content/shell_content_state.h index a9a6f8a..a1e79bad 100644 --- a/ash/content/shell_content_state.h +++ b/ash/content/shell_content_state.h
@@ -9,6 +9,10 @@ #include "ash/session/session_types.h" #include "base/macros.h" +#if defined(OS_CHROMEOS) +#include "ash/content/screen_orientation_delegate_chromeos.h" +#endif + namespace aura { class Window; } @@ -46,6 +50,10 @@ ShellContentState(); virtual ~ShellContentState(); +#if defined(OS_CHROMEOS) + ScreenOrientationDelegateChromeos orientation_delegate_; +#endif + private: static ShellContentState* instance_;
diff --git a/ash/display/DEPS b/ash/display/DEPS index e2976071..66bd25d 100644 --- a/ash/display/DEPS +++ b/ash/display/DEPS
@@ -1,4 +1,12 @@ include_rules = [ "+ash/host", ] - + +specific_include_rules = { + "display_color_manager_chromeos.cc": [ + "+third_party/qcms/src/qcms.h", + ], + "screen_orientation_controller_chromeos.h": [ + "+third_party/WebKit/public/platform/modules/screen_orientation/WebScreenOrientationLockType.h", + ], +}
diff --git a/ash/content/display/display_color_manager_chromeos.cc b/ash/display/display_color_manager_chromeos.cc similarity index 94% rename from ash/content/display/display_color_manager_chromeos.cc rename to ash/display/display_color_manager_chromeos.cc index f054bb0b..060ee62 100644 --- a/ash/content/display/display_color_manager_chromeos.cc +++ b/ash/display/display_color_manager_chromeos.cc
@@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "ash/content/display/display_color_manager_chromeos.h" +#include "ash/display/display_color_manager_chromeos.h" #include "base/bind.h" #include "base/bind_helpers.h" @@ -11,13 +11,14 @@ #include "base/files/file_util.h" #include "base/format_macros.h" #include "base/logging.h" +#include "base/message_loop/message_loop.h" #include "base/path_service.h" #include "base/stl_util.h" #include "base/strings/stringprintf.h" +#include "base/task_runner_util.h" #include "base/threading/sequenced_worker_pool.h" #include "chromeos/chromeos_paths.h" #include "chromeos/chromeos_switches.h" -#include "content/public/browser/browser_thread.h" #include "third_party/qcms/src/qcms.h" #include "ui/display/types/display_snapshot.h" #include "ui/display/types/gamma_ramp_rgb_entry.h" @@ -90,8 +91,7 @@ DisplayColorManager::DisplayColorManager( ui::DisplayConfigurator* configurator, base::SequencedWorkerPool* blocking_pool) - : configurator_(configurator), - blocking_pool_(blocking_pool) { + : configurator_(configurator), blocking_pool_(blocking_pool) { configurator_->AddObserver(this); } @@ -148,7 +148,7 @@ int64_t product_id, scoped_ptr<ColorCalibrationData> data, bool success) { - DCHECK_CURRENTLY_ON(content::BrowserThread::UI); + DCHECK_EQ(base::MessageLoop::current()->type(), base::MessageLoop::TYPE_UI); if (success) { // The map takes over ownership of the underlying memory. calibration_map_[product_id] = data.release(); @@ -156,10 +156,8 @@ } } -DisplayColorManager::ColorCalibrationData::ColorCalibrationData() { -} +DisplayColorManager::ColorCalibrationData::ColorCalibrationData() {} -DisplayColorManager::ColorCalibrationData::~ColorCalibrationData() { -} +DisplayColorManager::ColorCalibrationData::~ColorCalibrationData() {} } // namespace ash
diff --git a/ash/content/display/display_color_manager_chromeos.h b/ash/display/display_color_manager_chromeos.h similarity index 100% rename from ash/content/display/display_color_manager_chromeos.h rename to ash/display/display_color_manager_chromeos.h
diff --git a/ash/content/display/screen_orientation_controller_chromeos.cc b/ash/display/screen_orientation_controller_chromeos.cc similarity index 94% rename from ash/content/display/screen_orientation_controller_chromeos.cc rename to ash/display/screen_orientation_controller_chromeos.cc index 974dd651..31583563b 100644 --- a/ash/content/display/screen_orientation_controller_chromeos.cc +++ b/ash/display/screen_orientation_controller_chromeos.cc
@@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "ash/content/display/screen_orientation_controller_chromeos.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/ash_switches.h" #include "ash/display/display_info.h" @@ -14,8 +14,6 @@ #include "base/command_line.h" #include "chromeos/accelerometer/accelerometer_reader.h" #include "chromeos/accelerometer/accelerometer_types.h" -#include "content/public/browser/screen_orientation_provider.h" -#include "content/public/browser/web_contents.h" #include "ui/aura/window.h" #include "ui/aura/window_observer.h" #include "ui/chromeos/accelerometer/accelerometer_util.h" @@ -71,12 +69,10 @@ rotation_locked_orientation_(blink::WebScreenOrientationLockAny), user_rotation_(gfx::Display::ROTATE_0), current_rotation_(gfx::Display::ROTATE_0) { - content::ScreenOrientationProvider::SetDelegate(this); Shell::GetInstance()->AddShellObserver(this); } ScreenOrientationController::~ScreenOrientationController() { - content::ScreenOrientationProvider::SetDelegate(NULL); Shell::GetInstance()->RemoveShellObserver(this); chromeos::AccelerometerReader::GetInstance()->RemoveObserver(this); Shell::GetInstance()->window_tree_host_manager()->RemoveObserver(this); @@ -93,6 +89,36 @@ observers_.RemoveObserver(observer); } +void ScreenOrientationController::LockOrientationForWindow( + aura::Window* requesting_window, + blink::WebScreenOrientationLockType lock_orientation) { + if (locking_windows_.empty()) + Shell::GetInstance()->activation_client()->AddObserver(this); + + if (!requesting_window->HasObserver(this)) + requesting_window->AddObserver(this); + locking_windows_[requesting_window] = lock_orientation; + + ApplyLockForActiveWindow(); +} + +void ScreenOrientationController::UnlockOrientationForWindow( + aura::Window* window) { + locking_windows_.erase(window); + if (locking_windows_.empty()) + Shell::GetInstance()->activation_client()->RemoveObserver(this); + window->RemoveObserver(this); + ApplyLockForActiveWindow(); +} + +bool ScreenOrientationController::ScreenOrientationProviderSupported() const { + return Shell::GetInstance() + ->maximize_mode_controller() + ->IsMaximizeModeWindowManagerEnabled() && + !base::CommandLine::ForCurrentProcess()->HasSwitch( + switches::kAshDisableScreenOrientationLock); +} + void ScreenOrientationController::SetRotationLocked(bool rotation_locked) { if (rotation_locked_ == rotation_locked) return; @@ -151,7 +177,7 @@ } void ScreenOrientationController::OnWindowDestroying(aura::Window* window) { - RemoveLockingWindow(window); + UnlockOrientationForWindow(window); } void ScreenOrientationController::OnAccelerometerUpdated( @@ -168,38 +194,6 @@ } } -bool ScreenOrientationController::FullScreenRequired( - content::WebContents* web_contents) { - return true; -} - -void ScreenOrientationController::Lock( - content::WebContents* web_contents, - blink::WebScreenOrientationLockType lock_orientation) { - if (locking_windows_.empty()) - Shell::GetInstance()->activation_client()->AddObserver(this); - - aura::Window* requesting_window = web_contents->GetNativeView(); - if (!requesting_window->HasObserver(this)) - requesting_window->AddObserver(this); - locking_windows_[requesting_window] = lock_orientation; - - ApplyLockForActiveWindow(); -} - -bool ScreenOrientationController::ScreenOrientationProviderSupported() { - return Shell::GetInstance() - ->maximize_mode_controller() - ->IsMaximizeModeWindowManagerEnabled() && - !base::CommandLine::ForCurrentProcess()->HasSwitch( - switches::kAshDisableScreenOrientationLock); -} - -void ScreenOrientationController::Unlock(content::WebContents* web_contents) { - aura::Window* requesting_window = web_contents->GetNativeView(); - RemoveLockingWindow(requesting_window); -} - void ScreenOrientationController::OnDisplayConfigurationChanged() { if (ignore_display_configuration_updates_) return; @@ -406,14 +400,6 @@ SetRotationLocked(false); } -void ScreenOrientationController::RemoveLockingWindow(aura::Window* window) { - locking_windows_.erase(window); - if (locking_windows_.empty()) - Shell::GetInstance()->activation_client()->RemoveObserver(this); - window->RemoveObserver(this); - ApplyLockForActiveWindow(); -} - bool ScreenOrientationController::IsRotationAllowedInLockedState( gfx::Display::Rotation rotation) { if (!rotation_locked_)
diff --git a/ash/content/display/screen_orientation_controller_chromeos.h b/ash/display/screen_orientation_controller_chromeos.h similarity index 88% rename from ash/content/display/screen_orientation_controller_chromeos.h rename to ash/display/screen_orientation_controller_chromeos.h index b387198..1157425 100644 --- a/ash/content/display/screen_orientation_controller_chromeos.h +++ b/ash/display/screen_orientation_controller_chromeos.h
@@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#ifndef ASH_CONTENT_DISPLAY_SCREEN_ORIENTATION_CONTROLLER_CHROMEOS_H_ -#define ASH_CONTENT_DISPLAY_SCREEN_ORIENTATION_CONTROLLER_CHROMEOS_H_ +#ifndef ASH_DISPLAY_SCREEN_ORIENTATION_CONTROLLER_CHROMEOS_H_ +#define ASH_DISPLAY_SCREEN_ORIENTATION_CONTROLLER_CHROMEOS_H_ #include <map> @@ -14,7 +14,6 @@ #include "base/observer_list.h" #include "chromeos/accelerometer/accelerometer_reader.h" #include "chromeos/accelerometer/accelerometer_types.h" -#include "content/public/browser/screen_orientation_delegate.h" #include "third_party/WebKit/public/platform/modules/screen_orientation/WebScreenOrientationLockType.h" #include "ui/aura/window_observer.h" #include "ui/gfx/display.h" @@ -24,10 +23,6 @@ class Window; } -namespace content { -class WebContents; -} - namespace ash { // Implements ChromeOS specific functionality for ScreenOrientationProvider. @@ -35,7 +30,6 @@ : public aura::client::ActivationChangeObserver, public aura::WindowObserver, public chromeos::AccelerometerReader::Observer, - public content::ScreenOrientationDelegate, public WindowTreeHostManager::Observer, public ShellObserver { public: @@ -57,6 +51,14 @@ void AddObserver(Observer* observer); void RemoveObserver(Observer* observer); + // Allows/unallows a window to lock the screen orientation. + void LockOrientationForWindow( + aura::Window* requesting_windowwindow, + blink::WebScreenOrientationLockType lock_orientation); + void UnlockOrientationForWindow(aura::Window* window); + + bool ScreenOrientationProviderSupported() const; + bool ignore_display_configuration_updates() const { return ignore_display_configuration_updates_; } @@ -89,13 +91,6 @@ void OnAccelerometerUpdated( scoped_refptr<const chromeos::AccelerometerUpdate> update) override; - // content::ScreenOrientationDelegate: - bool FullScreenRequired(content::WebContents* web_contents) override; - void Lock(content::WebContents* web_contents, - blink::WebScreenOrientationLockType lock_orientation) override; - bool ScreenOrientationProviderSupported() override; - void Unlock(content::WebContents* web_contents) override; - // WindowTreeHostManager::Observer: void OnDisplayConfigurationChanged() override; @@ -144,9 +139,6 @@ // window, and applies it. If there is none, rotation lock will be removed. void ApplyLockForActiveWindow(); - // Removes a window and its locking preference. - void RemoveLockingWindow(aura::Window* window); - // Both |blink::WebScreenOrientationLockLandscape| and // |blink::WebScreenOrientationLockPortrait| allow for rotation between the // two angles of the same screen orientation @@ -192,4 +184,4 @@ } // namespace ash -#endif // ASH_CONTENT_DISPLAY_SCREEN_ORIENTATION_CONTROLLER_CHROMEOS_H_ +#endif // ASH_DISPLAY_SCREEN_ORIENTATION_CONTROLLER_CHROMEOS_H_
diff --git a/ash/shell.cc b/ash/shell.cc index 2b73c98..20f464d 100644 --- a/ash/shell.cc +++ b/ash/shell.cc
@@ -121,13 +121,13 @@ #include "ash/accelerators/magnifier_key_scroller.h" #include "ash/accelerators/spoken_feedback_toggler.h" #include "ash/ash_constants.h" -#include "ash/content/display/display_color_manager_chromeos.h" -#include "ash/content/display/screen_orientation_controller_chromeos.h" #include "ash/display/display_change_observer_chromeos.h" +#include "ash/display/display_color_manager_chromeos.h" #include "ash/display/display_configurator_animation.h" #include "ash/display/display_error_observer_chromeos.h" #include "ash/display/projecting_observer_chromeos.h" #include "ash/display/resolution_notification_controller.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/sticky_keys/sticky_keys_controller.h" #include "ash/system/chromeos/bluetooth/bluetooth_notification_controller.h" #include "ash/system/chromeos/brightness/brightness_controller_chromeos.h"
diff --git a/ash/system/chromeos/rotation/tray_rotation_lock.cc b/ash/system/chromeos/rotation/tray_rotation_lock.cc index 060bc52..4704586 100644 --- a/ash/system/chromeos/rotation/tray_rotation_lock.cc +++ b/ash/system/chromeos/rotation/tray_rotation_lock.cc
@@ -4,7 +4,7 @@ #include "ash/system/chromeos/rotation/tray_rotation_lock.h" -#include "ash/content/display/screen_orientation_controller_chromeos.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/shell.h" #include "ash/system/tray/system_tray.h" #include "ash/system/tray/tray_item_more.h"
diff --git a/ash/system/chromeos/rotation/tray_rotation_lock.h b/ash/system/chromeos/rotation/tray_rotation_lock.h index aa4853e..a1df95a 100644 --- a/ash/system/chromeos/rotation/tray_rotation_lock.h +++ b/ash/system/chromeos/rotation/tray_rotation_lock.h
@@ -5,7 +5,7 @@ #ifndef ASH_SYSTEM_CHROMEOS_ROTATION_TRAY_ROTATION_LOCK_H_ #define ASH_SYSTEM_CHROMEOS_ROTATION_TRAY_ROTATION_LOCK_H_ -#include "ash/content/display/screen_orientation_controller_chromeos.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/shell_observer.h" #include "ash/system/tray/tray_image_item.h"
diff --git a/ash/system/chromeos/rotation/tray_rotation_lock_unittest.cc b/ash/system/chromeos/rotation/tray_rotation_lock_unittest.cc index 6d036e9..6ff259f 100644 --- a/ash/system/chromeos/rotation/tray_rotation_lock_unittest.cc +++ b/ash/system/chromeos/rotation/tray_rotation_lock_unittest.cc
@@ -5,8 +5,8 @@ #include "ash/system/chromeos/rotation/tray_rotation_lock.h" #include "ash/ash_switches.h" -#include "ash/content/display/screen_orientation_controller_chromeos.h" #include "ash/display/display_manager.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/root_window_controller.h" #include "ash/shelf/shelf_widget.h" #include "ash/shell.h"
diff --git a/ash/system/chromeos/tray_display.cc b/ash/system/chromeos/tray_display.cc index 1445d03..f8a0157 100644 --- a/ash/system/chromeos/tray_display.cc +++ b/ash/system/chromeos/tray_display.cc
@@ -6,8 +6,8 @@ #include <vector> -#include "ash/content/display/screen_orientation_controller_chromeos.h" #include "ash/display/display_manager.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/display/window_tree_host_manager.h" #include "ash/shell.h" #include "ash/system/chromeos/devicetype_utils.h"
diff --git a/ash/system/tray/media_security/multi_profile_media_tray_item.cc b/ash/system/tray/media_security/multi_profile_media_tray_item.cc index 468aa647..75c2190 100644 --- a/ash/system/tray/media_security/multi_profile_media_tray_item.cc +++ b/ash/system/tray/media_security/multi_profile_media_tray_item.cc
@@ -5,7 +5,6 @@ #include "ash/system/tray/media_security/multi_profile_media_tray_item.h" #include "ash/ash_view_ids.h" -#include "ash/content/shell_content_state.h" #include "ash/media_delegate.h" #include "ash/session/session_state_delegate.h" #include "ash/shell.h"
diff --git a/ash/system/user/user_card_view.cc b/ash/system/user/user_card_view.cc index e582c8a..6ce0c18 100644 --- a/ash/system/user/user_card_view.cc +++ b/ash/system/user/user_card_view.cc
@@ -7,7 +7,6 @@ #include <algorithm> #include <vector> -#include "ash/content/shell_content_state.h" #include "ash/session/session_state_delegate.h" #include "ash/shell.h" #include "ash/system/tray/system_tray_delegate.h"
diff --git a/ash/test/ash_test_helper.cc b/ash/test/ash_test_helper.cc index a01da66..995b9f50 100644 --- a/ash/test/ash_test_helper.cc +++ b/ash/test/ash_test_helper.cc
@@ -49,7 +49,8 @@ : message_loop_(message_loop), test_shell_delegate_(nullptr), test_screenshot_delegate_(nullptr), - content_state_(nullptr) { + content_state_(nullptr), + test_shell_content_state_(nullptr) { CHECK(message_loop_); #if defined(OS_CHROMEOS) dbus_thread_manager_initialized_ = false; @@ -94,8 +95,12 @@ // created in AshTestBase tests. chromeos::CrasAudioHandler::InitializeForTesting(); #endif - ShellContentState::SetInstance(content_state_ ? content_state_ - : new TestShellContentState); + ShellContentState* content_state = content_state_; + if (!content_state) { + test_shell_content_state_ = new TestShellContentState; + content_state = test_shell_content_state_; + } + ShellContentState::SetInstance(content_state); ShellInitParams init_params; init_params.delegate = test_shell_delegate_;
diff --git a/ash/test/ash_test_helper.h b/ash/test/ash_test_helper.h index eb5bf66..daa3316 100644 --- a/ash/test/ash_test_helper.h +++ b/ash/test/ash_test_helper.h
@@ -29,6 +29,7 @@ namespace test { class TestScreenshotDelegate; +class TestShellContentState; class TestShellDelegate; class TestSessionStateDelegate; @@ -64,6 +65,9 @@ TestScreenshotDelegate* test_screenshot_delegate() { return test_screenshot_delegate_; } + TestShellContentState* test_shell_content_state() { + return test_shell_content_state_; + } void set_content_state(ShellContentState* content_state) { content_state_ = content_state; } @@ -86,7 +90,13 @@ scoped_ptr<views::ViewsDelegate> views_delegate_; + // An implementation of ShellContentState supplied by the user prior to + // SetUp(). ShellContentState* content_state_; + // If |content_state_| is not set prior to SetUp(), this value will be + // set to an instance of TestShellContentState created by this class. If + // |content_state_| is non-null, this will be nullptr. + TestShellContentState* test_shell_content_state_; #if defined(OS_CHROMEOS) // Check if DBus Thread Manager was initialized here.
diff --git a/ash/test/content/test_shell_content_state.cc b/ash/test/content/test_shell_content_state.cc index a46094b..b34200a 100644 --- a/ash/test/content/test_shell_content_state.cc +++ b/ash/test/content/test_shell_content_state.cc
@@ -7,10 +7,8 @@ #include "content/public/test/test_browser_context.h" namespace ash { +namespace test { -// Copyright 2015 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. TestShellContentState::TestShellContentState() {} TestShellContentState::~TestShellContentState() {} @@ -35,4 +33,5 @@ return nullptr; } +} // namespace test } // namespace ash
diff --git a/ash/test/content/test_shell_content_state.h b/ash/test/content/test_shell_content_state.h index 1f967959..9b06dbe 100644 --- a/ash/test/content/test_shell_content_state.h +++ b/ash/test/content/test_shell_content_state.h
@@ -13,11 +13,18 @@ } namespace ash { +namespace test { class TestShellContentState : public ShellContentState { public: TestShellContentState(); +#if defined(OS_CHROMEOS) + content::ScreenOrientationDelegate* screen_orientation_delegate() { + return &orientation_delegate_; + } +#endif + private: ~TestShellContentState() override; @@ -34,6 +41,7 @@ DISALLOW_COPY_AND_ASSIGN(TestShellContentState); }; +} // namespace test } // namespace ash #endif // ASH_TEST_CONTENT_TEST_SHELL_CONTENT_STATE_H_
diff --git a/ash/wm/overview/scoped_transform_overview_window.cc b/ash/wm/overview/scoped_transform_overview_window.cc index a3ebe83..993dcac 100644 --- a/ash/wm/overview/scoped_transform_overview_window.cc +++ b/ash/wm/overview/scoped_transform_overview_window.cc
@@ -225,6 +225,12 @@ gfx::Rect ScopedTransformOverviewWindow::GetTargetBoundsInScreen() const { gfx::Rect bounds; for (const auto& window : GetTransientTreeIterator(window_)) { + // Ignore other window types when computing bounding box of window + // selector target item. + if (window != window_ && window->type() != ui::wm::WINDOW_TYPE_NORMAL && + window->type() != ui::wm::WINDOW_TYPE_PANEL) { + continue; + } bounds.Union(ScreenUtil::ConvertRectToScreen(window->parent(), window->GetTargetBounds())); }
diff --git a/base/files/file_path.h b/base/files/file_path.h index 25b8391..fba2f98a 100644 --- a/base/files/file_path.h +++ b/base/files/file_path.h
@@ -124,6 +124,15 @@ #define FILE_PATH_USES_WIN_SEPARATORS #endif // OS_WIN +// To print path names portably use PRIsFP (based on PRIuS and friends from +// C99 and format_macros.h) like this: +// base::StringPrintf("Path is %" PRIsFP ".\n", path.value().c_str()); +#if defined(OS_POSIX) +#define PRIsFP "s" +#elif defined(OS_WIN) +#define PRIsFP "ls" +#endif // OS_WIN + namespace base { class Pickle;
diff --git a/base/memory/shared_memory_win.cc b/base/memory/shared_memory_win.cc index 3eef9a9..9665a7c2 100644 --- a/base/memory/shared_memory_win.cc +++ b/base/memory/shared_memory_win.cc
@@ -146,7 +146,7 @@ // So, we generate a random name when we need to enforce read-only. uint64_t rand_values[4]; RandBytes(&rand_values, sizeof(rand_values)); - name_ = StringPrintf(L"CrSharedMem_%016x%016x%016x%016x", + name_ = StringPrintf(L"CrSharedMem_%016llx%016llx%016llx%016llx", rand_values[0], rand_values[1], rand_values[2], rand_values[3]); }
diff --git a/base/strings/stringprintf.h b/base/strings/stringprintf.h index 523f7ee..4b2eab1 100644 --- a/base/strings/stringprintf.h +++ b/base/strings/stringprintf.h
@@ -11,15 +11,26 @@ #include "base/base_export.h" #include "base/compiler_specific.h" +#include "build/build_config.h" + +#ifdef COMPILER_MSVC +// For _Printf_format_string_. +#include <sal.h> +#else +// For nacl builds when sal.h is not available. +#define _Printf_format_string_ +#endif namespace base { // Return a C++ string given printf-like input. -BASE_EXPORT std::string StringPrintf(const char* format, ...) +BASE_EXPORT std::string StringPrintf(_Printf_format_string_ const char* format, + ...) PRINTF_FORMAT(1, 2) WARN_UNUSED_RESULT; #if defined(OS_WIN) -BASE_EXPORT std::wstring StringPrintf(const wchar_t* format, ...) - WPRINTF_FORMAT(1, 2) WARN_UNUSED_RESULT; +BASE_EXPORT std::wstring StringPrintf( + _Printf_format_string_ const wchar_t* format, + ...) WPRINTF_FORMAT(1, 2) WARN_UNUSED_RESULT; #endif // Return a C++ string given vprintf-like input. @@ -27,21 +38,25 @@ PRINTF_FORMAT(1, 0) WARN_UNUSED_RESULT; // Store result into a supplied string and return it. -BASE_EXPORT const std::string& SStringPrintf(std::string* dst, - const char* format, ...) - PRINTF_FORMAT(2, 3); +BASE_EXPORT const std::string& SStringPrintf( + std::string* dst, + _Printf_format_string_ const char* format, + ...) PRINTF_FORMAT(2, 3); #if defined(OS_WIN) -BASE_EXPORT const std::wstring& SStringPrintf(std::wstring* dst, - const wchar_t* format, ...) - WPRINTF_FORMAT(2, 3); +BASE_EXPORT const std::wstring& SStringPrintf( + std::wstring* dst, + _Printf_format_string_ const wchar_t* format, + ...) WPRINTF_FORMAT(2, 3); #endif // Append result to a supplied string. -BASE_EXPORT void StringAppendF(std::string* dst, const char* format, ...) - PRINTF_FORMAT(2, 3); +BASE_EXPORT void StringAppendF(std::string* dst, + _Printf_format_string_ const char* format, + ...) PRINTF_FORMAT(2, 3); #if defined(OS_WIN) -BASE_EXPORT void StringAppendF(std::wstring* dst, const wchar_t* format, ...) - WPRINTF_FORMAT(2, 3); +BASE_EXPORT void StringAppendF(std::wstring* dst, + _Printf_format_string_ const wchar_t* format, + ...) WPRINTF_FORMAT(2, 3); #endif // Lower-level routine that takes a va_list and appends to a specified
diff --git a/base/trace_event/process_memory_maps_dump_provider.cc b/base/trace_event/process_memory_maps_dump_provider.cc index 38b2573e..335dd829a 100644 --- a/base/trace_event/process_memory_maps_dump_provider.cc +++ b/base/trace_event/process_memory_maps_dump_provider.cc
@@ -4,11 +4,9 @@ #include "base/trace_event/process_memory_maps_dump_provider.h" -#include <cctype> -#include <fstream> - +#include "base/format_macros.h" #include "base/logging.h" -#include "base/process/process_metrics.h" +#include "base/strings/string_util.h" #include "base/trace_event/process_memory_dump.h" #include "base/trace_event/process_memory_maps.h" @@ -17,22 +15,25 @@ #if defined(OS_LINUX) || defined(OS_ANDROID) // static -std::istream* ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = nullptr; +FILE* ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = nullptr; namespace { const uint32 kMaxLineSize = 4096; -bool ParseSmapsHeader(std::istream* smaps, +bool ParseSmapsHeader(const char* header_line, ProcessMemoryMaps::VMRegion* region) { // e.g., "00400000-00421000 r-xp 00000000 fc:01 1234 /foo.so\n" bool res = true; // Whether this region should be appended or skipped. - uint64 end_addr; - std::string protection_flags; - std::string ignored; - *smaps >> std::hex >> region->start_address; - smaps->ignore(1); - *smaps >> std::hex >> end_addr; + uint64 end_addr = 0; + char protection_flags[5] = {0}; + char mapped_file[kMaxLineSize]; + + if (sscanf(header_line, "%" SCNx64 "-%" SCNx64 " %4c %*s %*s %*s%4095[^\n]\n", + ®ion->start_address, &end_addr, protection_flags, + mapped_file) != 4) + return false; + if (end_addr > region->start_address) { region->size_in_bytes = end_addr - region->start_address; } else { @@ -42,8 +43,6 @@ } region->protection_flags = 0; - *smaps >> protection_flags; - CHECK_EQ(4UL, protection_flags.size()); if (protection_flags[0] == 'r') { region->protection_flags |= ProcessMemoryMaps::VMRegion::kProtectionFlagsRead; @@ -56,84 +55,70 @@ region->protection_flags |= ProcessMemoryMaps::VMRegion::kProtectionFlagsExec; } - *smaps >> ignored; // Ignore mapped file offset. - *smaps >> ignored; // Ignore device maj-min (fc:01 in the example above). - *smaps >> ignored; // Ignore inode number (1234 in the example above). - while (smaps->peek() == ' ') - smaps->ignore(1); - char mapped_file[kMaxLineSize]; - smaps->getline(mapped_file, sizeof(mapped_file)); region->mapped_file = mapped_file; + TrimWhitespaceASCII(region->mapped_file, TRIM_ALL, ®ion->mapped_file); return res; } -uint64 ReadCounterBytes(std::istream* smaps) { +uint64 ReadCounterBytes(char* counter_line) { uint64 counter_value = 0; - *smaps >> std::dec >> counter_value; + int res = sscanf(counter_line, "%*s %" SCNu64 " kB", &counter_value); + DCHECK_EQ(1, res); return counter_value * 1024; } -uint32 ParseSmapsCounter(std::istream* smaps, +uint32 ParseSmapsCounter(char* counter_line, ProcessMemoryMaps::VMRegion* region) { // A smaps counter lines looks as follows: "RSS: 0 Kb\n" uint32 res = 1; - std::string counter_name; - *smaps >> counter_name; + char counter_name[20]; + int did_read = sscanf(counter_line, "%19[^\n ]", counter_name); + DCHECK_EQ(1, did_read); - // TODO(primiano): "Swap" should also be accounted as resident. Check - // whether Rss isn't already counting swapped and fix below if that is - // the case. - if (counter_name == "Pss:") { - region->byte_stats_proportional_resident = ReadCounterBytes(smaps); - } else if (counter_name == "Private_Dirty:") { - region->byte_stats_private_dirty_resident = ReadCounterBytes(smaps); - } else if (counter_name == "Private_Clean:") { - region->byte_stats_private_clean_resident = ReadCounterBytes(smaps); - } else if (counter_name == "Shared_Dirty:") { - region->byte_stats_shared_dirty_resident = ReadCounterBytes(smaps); - } else if (counter_name == "Shared_Clean:") { - region->byte_stats_shared_clean_resident = ReadCounterBytes(smaps); - } else if (counter_name == "Swap:") { - region->byte_stats_swapped = ReadCounterBytes(smaps); + if (strcmp(counter_name, "Pss:") == 0) { + region->byte_stats_proportional_resident = ReadCounterBytes(counter_line); + } else if (strcmp(counter_name, "Private_Dirty:") == 0) { + region->byte_stats_private_dirty_resident = ReadCounterBytes(counter_line); + } else if (strcmp(counter_name, "Private_Clean:") == 0) { + region->byte_stats_private_clean_resident = ReadCounterBytes(counter_line); + } else if (strcmp(counter_name, "Shared_Dirty:") == 0) { + region->byte_stats_shared_dirty_resident = ReadCounterBytes(counter_line); + } else if (strcmp(counter_name, "Shared_Clean:") == 0) { + region->byte_stats_shared_clean_resident = ReadCounterBytes(counter_line); + } else if (strcmp(counter_name, "Swap:") == 0) { + region->byte_stats_swapped = ReadCounterBytes(counter_line); } else { res = 0; } -#ifndef NDEBUG - // Paranoid check against changes of the Kernel /proc interface. - if (res) { - std::string unit; - *smaps >> unit; - DCHECK_EQ("kB", unit); - } -#endif - - smaps->ignore(kMaxLineSize, '\n'); - return res; } -uint32 ReadLinuxProcSmapsFile(std::istream* smaps, ProcessMemoryMaps* pmm) { - if (!smaps->good()) +uint32 ReadLinuxProcSmapsFile(FILE* smaps_file, ProcessMemoryMaps* pmm) { + if (!smaps_file) return 0; + fseek(smaps_file, 0, SEEK_SET); + + char line[kMaxLineSize]; const uint32 kNumExpectedCountersPerRegion = 6; uint32 counters_parsed_for_current_region = 0; uint32 num_valid_regions = 0; ProcessMemoryMaps::VMRegion region; bool should_add_current_region = false; for (;;) { - int next = smaps->peek(); - if (next == std::ifstream::traits_type::eof() || next == '\n') + line[0] = '\0'; + if (fgets(line, kMaxLineSize, smaps_file) == nullptr) break; - if (isxdigit(next) && !isupper(next)) { + DCHECK_GT(strlen(line), 0u); + if (isxdigit(line[0]) && !isupper(line[0])) { region = ProcessMemoryMaps::VMRegion(); counters_parsed_for_current_region = 0; - should_add_current_region = ParseSmapsHeader(smaps, ®ion); + should_add_current_region = ParseSmapsHeader(line, ®ion); } else { - counters_parsed_for_current_region += ParseSmapsCounter(smaps, ®ion); + counters_parsed_for_current_region += ParseSmapsCounter(line, ®ion); DCHECK_LE(counters_parsed_for_current_region, kNumExpectedCountersPerRegion); if (counters_parsed_for_current_region == kNumExpectedCountersPerRegion) { @@ -177,8 +162,8 @@ if (UNLIKELY(proc_smaps_for_testing)) { res = ReadLinuxProcSmapsFile(proc_smaps_for_testing, pmd->process_mmaps()); } else { - std::ifstream proc_self_smaps("/proc/self/smaps"); - res = ReadLinuxProcSmapsFile(&proc_self_smaps, pmd->process_mmaps()); + ScopedFILE smaps_file(fopen("/proc/self/smaps", "r")); + res = ReadLinuxProcSmapsFile(smaps_file.get(), pmd->process_mmaps()); } #else LOG(ERROR) << "ProcessMemoryMaps dump provider is supported only on Linux";
diff --git a/base/trace_event/process_memory_maps_dump_provider.h b/base/trace_event/process_memory_maps_dump_provider.h index 5a0f84c..74529bd 100644 --- a/base/trace_event/process_memory_maps_dump_provider.h +++ b/base/trace_event/process_memory_maps_dump_provider.h
@@ -5,12 +5,14 @@ #ifndef BASE_TRACE_EVENT_PROCESS_MEMORY_MAPS_DUMP_PROVIDER_H_ #define BASE_TRACE_EVENT_PROCESS_MEMORY_MAPS_DUMP_PROVIDER_H_ -#include <istream> - #include "base/gtest_prod_util.h" #include "base/memory/singleton.h" #include "base/trace_event/memory_dump_provider.h" +#if defined(OS_LINUX) || defined(OS_ANDROID) +#include "base/files/scoped_file.h" +#endif + namespace base { namespace trace_event { @@ -28,7 +30,7 @@ FRIEND_TEST_ALL_PREFIXES(ProcessMemoryMapsDumpProviderTest, ParseProcSmaps); #if defined(OS_LINUX) || defined(OS_ANDROID) - static std::istream* proc_smaps_for_testing; + static FILE* proc_smaps_for_testing; #endif ProcessMemoryMapsDumpProvider();
diff --git a/base/trace_event/process_memory_maps_dump_provider_unittest.cc b/base/trace_event/process_memory_maps_dump_provider_unittest.cc index a73a21c7..8fcc9cc 100644 --- a/base/trace_event/process_memory_maps_dump_provider_unittest.cc +++ b/base/trace_event/process_memory_maps_dump_provider_unittest.cc
@@ -4,9 +4,7 @@ #include "base/trace_event/process_memory_maps_dump_provider.h" -#include <fstream> -#include <sstream> - +#include "base/files/file_util.h" #include "base/trace_event/process_memory_dump.h" #include "base/trace_event/process_memory_maps.h" #include "base/trace_event/trace_event_argument.h" @@ -104,6 +102,18 @@ "MMUPageSize: 4 kB\n" "Locked: 0 kB\n" "VmFlags: rd wr mr mw me ac sd\n"; + +void CreateAndSetSmapsFileForTesting(const char* smaps_string, + ScopedFILE& file) { + FilePath temp_path; + FILE* temp_file = CreateAndOpenTemporaryFile(&temp_path); + file.reset(temp_file); + ASSERT_TRUE(temp_file); + + ASSERT_TRUE(base::WriteFileDescriptor(fileno(temp_file), smaps_string, + strlen(smaps_string))); +} + } // namespace TEST(ProcessMemoryMapsDumpProviderTest, ParseProcSmaps) { @@ -114,25 +124,19 @@ auto pmmdp = ProcessMemoryMapsDumpProvider::GetInstance(); - // Emulate a non-existent /proc/self/smaps. - ProcessMemoryDump pmd_invalid(nullptr /* session_state */); - std::ifstream non_existent_file("/tmp/does-not-exist"); - ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = &non_existent_file; - CHECK_EQ(false, non_existent_file.good()); - pmmdp->OnMemoryDump(dump_args, &pmd_invalid); - ASSERT_FALSE(pmd_invalid.has_process_mmaps()); - // Emulate an empty /proc/self/smaps. - std::ifstream empty_file("/dev/null"); - ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = &empty_file; - CHECK_EQ(true, empty_file.good()); + ProcessMemoryDump pmd_invalid(nullptr /* session_state */); + ScopedFILE empty_file(OpenFile(FilePath("/dev/null"), "r")); + ASSERT_TRUE(empty_file.get()); + ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = empty_file.get(); pmmdp->OnMemoryDump(dump_args, &pmd_invalid); ASSERT_FALSE(pmd_invalid.has_process_mmaps()); // Parse the 1st smaps file. ProcessMemoryDump pmd_1(nullptr /* session_state */); - std::istringstream test_smaps_1(kTestSmaps1); - ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = &test_smaps_1; + ScopedFILE temp_file1; + CreateAndSetSmapsFileForTesting(kTestSmaps1, temp_file1); + ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = temp_file1.get(); pmmdp->OnMemoryDump(dump_args, &pmd_1); ASSERT_TRUE(pmd_1.has_process_mmaps()); const auto& regions_1 = pmd_1.process_mmaps()->vm_regions(); @@ -162,8 +166,9 @@ // Parse the 2nd smaps file. ProcessMemoryDump pmd_2(nullptr /* session_state */); - std::istringstream test_smaps_2(kTestSmaps2); - ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = &test_smaps_2; + ScopedFILE temp_file2; + CreateAndSetSmapsFileForTesting(kTestSmaps2, temp_file2); + ProcessMemoryMapsDumpProvider::proc_smaps_for_testing = temp_file2.get(); pmmdp->OnMemoryDump(dump_args, &pmd_2); ASSERT_TRUE(pmd_2.has_process_mmaps()); const auto& regions_2 = pmd_2.process_mmaps()->vm_regions();
diff --git a/blimp/client/compositor/blimp_compositor.cc b/blimp/client/compositor/blimp_compositor.cc index 3cd8baf5..9086ad5 100644 --- a/blimp/client/compositor/blimp_compositor.cc +++ b/blimp/client/compositor/blimp_compositor.cc
@@ -68,7 +68,6 @@ cc::LayerTreeHost::CreateThreaded(GetCompositorTaskRunner(), ¶ms); host_->SetVisible(true); - host_->SetLayerTreeHostClientReady(); host_->SetViewportSize(viewport_size_); host_->SetDeviceScaleFactor(device_scale_factor_);
diff --git a/build/android/pylib/instrumentation/test_runner.py b/build/android/pylib/instrumentation/test_runner.py index 799d6a7..5266aed 100644 --- a/build/android/pylib/instrumentation/test_runner.py +++ b/build/android/pylib/instrumentation/test_runner.py
@@ -321,7 +321,7 @@ extras['class'] = test return self.device.StartInstrumentation( '%s/%s' % (self.test_pkg.GetPackageName(), self.options.test_runner), - raw=True, extras=extras, timeout=timeout, retries=3) + raw=True, extras=extras, timeout=timeout, retries=0) # pylint: disable=no-self-use def _GenerateTestResult(self, test, instr_result_code, instr_result_bundle,
diff --git a/build/config/nacl/config.gni b/build/config/nacl/config.gni index 887c853a..73e9c4d 100644 --- a/build/config/nacl/config.gni +++ b/build/config/nacl/config.gni
@@ -8,3 +8,5 @@ # is being used instead. is_nacl_glibc = false } + +nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86"
diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn index b0c233a..009c701d 100644 --- a/build/toolchain/mac/BUILD.gn +++ b/build/toolchain/mac/BUILD.gn
@@ -83,9 +83,8 @@ tool("objc") { depfile = "{{output}}.d" - command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}" precompiled_header_type = "gcc" - command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} {{cflags_objc}} -c {{source}} -o {{output}}" + command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}" depsformat = "gcc" description = "OBJC {{output}}" outputs = [ @@ -95,8 +94,8 @@ tool("objcxx") { depfile = "{{output}}.d" - command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}" precompiled_header_type = "gcc" + command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}" depsformat = "gcc" description = "OBJCXX {{output}}" outputs = [
diff --git a/build/toolchain/nacl/BUILD.gn b/build/toolchain/nacl/BUILD.gn index 68696029..6345cbc5 100644 --- a/build/toolchain/nacl/BUILD.gn +++ b/build/toolchain/nacl/BUILD.gn
@@ -3,11 +3,9 @@ # found in the LICENSE file. import("//build/config/sysroot.gni") +import("//build/config/nacl/config.gni") import("//build/toolchain/nacl_toolchain.gni") -nacl_toolchain_dir = rebase_path("//native_client/toolchain", root_build_dir) -os_toolchain_dir = "${nacl_toolchain_dir}/${host_os}_x86" - # Add the toolchain revision as a preprocessor define so that sources are # rebuilt when a toolchain is updated. # Idea we could use the toolchain deps feature, but currently that feature is @@ -28,7 +26,9 @@ toolchain_package = "pnacl_newlib" toolchain_revision = pnacl_newlib_rev toolchain_cpu = "pnacl" - toolprefix = "${os_toolchain_dir}/${toolchain_package}/bin/pnacl-" + toolprefix = + rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/pnacl-", + root_build_dir) cc = toolprefix + "clang" cxx = toolprefix + "clang++" @@ -48,7 +48,9 @@ toolchain_package = "nacl_x86_glibc" toolchain_revision = nacl_x86_glibc_rev toolchain_cpu = "x86" - toolprefix = "${os_toolchain_dir}/${toolchain_package}/bin/i686-nacl-" + toolprefix = + rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/i686-nacl-", + root_build_dir) is_clang = false is_nacl_glibc = true @@ -62,7 +64,9 @@ toolchain_package = "nacl_x86_glibc" toolchain_revision = nacl_x86_glibc_rev toolchain_cpu = "x64" - toolprefix = "${os_toolchain_dir}/${toolchain_package}/bin/x86_64-nacl-" + toolprefix = + rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/x86_64-nacl-", + root_build_dir) is_clang = false is_nacl_glibc = true @@ -78,8 +82,9 @@ toolchain_package = "pnacl_newlib" toolchain_revision = pnacl_newlib_rev - toolprefix = "${os_toolchain_dir}/${toolchain_package}/bin/" + - invoker.toolchain_tuple + "-" + toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + + invoker.toolchain_tuple + "-", + root_build_dir) nacl_toolchain("clang_newlib_" + toolchain_cpu) { is_clang = true @@ -96,8 +101,9 @@ toolchain_package = "pnacl_newlib" toolchain_revision = pnacl_newlib_rev - toolprefix = "${os_toolchain_dir}/${toolchain_package}/bin/" + - invoker.toolchain_tuple + "-" + toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + + invoker.toolchain_tuple + "-", + root_build_dir) link_irt = rebase_path("//native_client/build/link_irt.py", root_build_dir)
diff --git a/cc/cc.gyp b/cc/cc.gyp index 7d02860..a4f70ff 100644 --- a/cc/cc.gyp +++ b/cc/cc.gyp
@@ -370,7 +370,6 @@ 'playback/raster_source.h', 'playback/raster_source_helper.cc', 'playback/raster_source_helper.h', - 'playback/recording_source.h', 'playback/transform_display_item.cc', 'playback/transform_display_item.h', 'quads/content_draw_quad_base.cc',
diff --git a/cc/debug/rasterize_and_record_benchmark.cc b/cc/debug/rasterize_and_record_benchmark.cc index 9b958d3b..9083fe9 100644 --- a/cc/debug/rasterize_and_record_benchmark.cc +++ b/cc/debug/rasterize_and_record_benchmark.cc
@@ -17,6 +17,7 @@ #include "cc/layers/layer.h" #include "cc/layers/picture_layer.h" #include "cc/playback/display_item_list.h" +#include "cc/playback/display_list_recording_source.h" #include "cc/trees/layer_tree_host.h" #include "cc/trees/layer_tree_host_common.h" #include "skia/ext/analysis_canvas.h" @@ -34,11 +35,8 @@ const int kWarmupRuns = 0; const int kTimeCheckInterval = 1; -const char* kModeSuffixes[RecordingSource::RECORDING_MODE_COUNT] = { - "", - "_sk_null_canvas", - "_painting_disabled", - "_caching_disabled", +const char* kModeSuffixes[DisplayListRecordingSource::RECORDING_MODE_COUNT] = { + "", "_sk_null_canvas", "_painting_disabled", "_caching_disabled", "_construction_disabled"}; } // namespace @@ -77,7 +75,7 @@ results_->SetInteger("picture_memory_usage", static_cast<int>(record_results_.bytes_used)); - for (int i = 0; i < RecordingSource::RECORDING_MODE_COUNT; i++) { + for (int i = 0; i < DisplayListRecordingSource::RECORDING_MODE_COUNT; i++) { std::string name = base::StringPrintf("record_time%s_ms", kModeSuffixes[i]); results_->SetDouble(name, record_results_.total_best_time[i].InMillisecondsF()); @@ -121,24 +119,26 @@ const gfx::Rect& visible_layer_rect) { ContentLayerClient* painter = layer->client(); - for (int mode_index = 0; mode_index < RecordingSource::RECORDING_MODE_COUNT; + for (int mode_index = 0; + mode_index < DisplayListRecordingSource::RECORDING_MODE_COUNT; mode_index++) { ContentLayerClient::PaintingControlSetting painting_control = ContentLayerClient::PAINTING_BEHAVIOR_NORMAL; - switch (static_cast<RecordingSource::RecordingMode>(mode_index)) { - case RecordingSource::RECORD_NORMALLY: + switch ( + static_cast<DisplayListRecordingSource::RecordingMode>(mode_index)) { + case DisplayListRecordingSource::RECORD_NORMALLY: // Already setup for normal recording. break; - case RecordingSource::RECORD_WITH_SK_NULL_CANVAS: + case DisplayListRecordingSource::RECORD_WITH_SK_NULL_CANVAS: // Not supported for Display List recording. continue; - case RecordingSource::RECORD_WITH_PAINTING_DISABLED: + case DisplayListRecordingSource::RECORD_WITH_PAINTING_DISABLED: painting_control = ContentLayerClient::DISPLAY_LIST_PAINTING_DISABLED; break; - case RecordingSource::RECORD_WITH_CACHING_DISABLED: + case DisplayListRecordingSource::RECORD_WITH_CACHING_DISABLED: painting_control = ContentLayerClient::DISPLAY_LIST_CACHING_DISABLED; break; - case RecordingSource::RECORD_WITH_CONSTRUCTION_DISABLED: + case DisplayListRecordingSource::RECORD_WITH_CONSTRUCTION_DISABLED: painting_control = ContentLayerClient::DISPLAY_LIST_CONSTRUCTION_DISABLED; break; @@ -180,7 +180,7 @@ min_time = duration; } - if (mode_index == RecordingSource::RECORD_NORMALLY) { + if (mode_index == DisplayListRecordingSource::RECORD_NORMALLY) { record_results_.bytes_used += memory_used + painter->GetApproximateUnsharedMemoryUsage(); record_results_.pixels_recorded +=
diff --git a/cc/debug/rasterize_and_record_benchmark.h b/cc/debug/rasterize_and_record_benchmark.h index 97cecb1..1e69851 100644 --- a/cc/debug/rasterize_and_record_benchmark.h +++ b/cc/debug/rasterize_and_record_benchmark.h
@@ -13,7 +13,7 @@ #include "base/single_thread_task_runner.h" #include "base/time/time.h" #include "cc/debug/micro_benchmark_controller.h" -#include "cc/playback/recording_source.h" +#include "cc/playback/display_list_recording_source.h" #include "ui/gfx/geometry/rect.h" namespace base { @@ -50,7 +50,8 @@ int pixels_recorded; size_t bytes_used; - base::TimeDelta total_best_time[RecordingSource::RECORDING_MODE_COUNT]; + base::TimeDelta + total_best_time[DisplayListRecordingSource::RECORDING_MODE_COUNT]; }; RecordResults record_results_;
diff --git a/cc/layers/delegated_renderer_layer_impl_unittest.cc b/cc/layers/delegated_renderer_layer_impl_unittest.cc index 5deef6c1..11a0ed36 100644 --- a/cc/layers/delegated_renderer_layer_impl_unittest.cc +++ b/cc/layers/delegated_renderer_layer_impl_unittest.cc
@@ -39,6 +39,7 @@ host_impl_.reset(new FakeLayerTreeHostImpl( settings, &proxy_, &shared_bitmap_manager_, &task_graph_runner_)); + host_impl_->SetVisible(true); host_impl_->InitializeRenderer(output_surface_.get()); host_impl_->SetViewportSize(gfx::Size(10, 10)); }
diff --git a/cc/layers/heads_up_display_layer_impl_unittest.cc b/cc/layers/heads_up_display_layer_impl_unittest.cc index ef37783..6cbc8403 100644 --- a/cc/layers/heads_up_display_layer_impl_unittest.cc +++ b/cc/layers/heads_up_display_layer_impl_unittest.cc
@@ -38,6 +38,7 @@ FakeLayerTreeHostImpl host_impl(&proxy, &shared_bitmap_manager, &task_graph_runner); host_impl.CreatePendingTree(); + host_impl.SetVisible(true); host_impl.InitializeRenderer(output_surface.get()); scoped_ptr<HeadsUpDisplayLayerImpl> layer = HeadsUpDisplayLayerImpl::Create(host_impl.pending_tree(), 1);
diff --git a/cc/layers/layer_impl_unittest.cc b/cc/layers/layer_impl_unittest.cc index 7404e4e2..315544ad 100644 --- a/cc/layers/layer_impl_unittest.cc +++ b/cc/layers/layer_impl_unittest.cc
@@ -92,6 +92,7 @@ scoped_ptr<OutputSurface> output_surface = FakeOutputSurface::Create3d(); FakeLayerTreeHostImpl host_impl(&proxy, &shared_bitmap_manager, &task_graph_runner); + host_impl.SetVisible(true); EXPECT_TRUE(host_impl.InitializeRenderer(output_surface.get())); scoped_ptr<LayerImpl> root_clip = LayerImpl::Create(host_impl.active_tree(), 1); @@ -251,6 +252,7 @@ scoped_ptr<OutputSurface> output_surface = FakeOutputSurface::Create3d(); FakeLayerTreeHostImpl host_impl(&proxy, &shared_bitmap_manager, &task_graph_runner); + host_impl.SetVisible(true); EXPECT_TRUE(host_impl.InitializeRenderer(output_surface.get())); host_impl.active_tree()->SetRootLayer( LayerImpl::Create(host_impl.active_tree(), 1)); @@ -366,6 +368,7 @@ scoped_ptr<OutputSurface> output_surface = FakeOutputSurface::Create3d(); FakeLayerTreeHostImpl host_impl(&proxy, &shared_bitmap_manager, &task_graph_runner); + host_impl.SetVisible(true); EXPECT_TRUE(host_impl.InitializeRenderer(output_surface.get())); scoped_ptr<LayerImpl> layer = LayerImpl::Create(host_impl.active_tree(), 1);
diff --git a/cc/layers/nine_patch_layer_impl_unittest.cc b/cc/layers/nine_patch_layer_impl_unittest.cc index 3405bc54..50162f0 100644 --- a/cc/layers/nine_patch_layer_impl_unittest.cc +++ b/cc/layers/nine_patch_layer_impl_unittest.cc
@@ -49,6 +49,7 @@ scoped_ptr<OutputSurface> output_surface = FakeOutputSurface::Create3d(); FakeUIResourceLayerTreeHostImpl host_impl(&proxy, &shared_bitmap_manager, &task_graph_runner); + host_impl.SetVisible(true); host_impl.InitializeRenderer(output_surface.get()); scoped_ptr<NinePatchLayerImpl> layer =
diff --git a/cc/layers/picture_image_layer_impl_unittest.cc b/cc/layers/picture_image_layer_impl_unittest.cc index 8d36095..57af9c2 100644 --- a/cc/layers/picture_image_layer_impl_unittest.cc +++ b/cc/layers/picture_image_layer_impl_unittest.cc
@@ -49,6 +49,7 @@ &shared_bitmap_manager_, &task_graph_runner_) { host_impl_.CreatePendingTree(); + host_impl_.SetVisible(true); host_impl_.InitializeRenderer(output_surface_.get()); }
diff --git a/cc/layers/picture_layer.cc b/cc/layers/picture_layer.cc index 6c215a3..d92ccc1df 100644 --- a/cc/layers/picture_layer.cc +++ b/cc/layers/picture_layer.cc
@@ -32,7 +32,7 @@ PictureLayer::PictureLayer(const LayerSettings& settings, ContentLayerClient* client, - scoped_ptr<RecordingSource> source) + scoped_ptr<DisplayListRecordingSource> source) : PictureLayer(settings, client) { recording_source_ = source.Pass(); } @@ -150,7 +150,7 @@ DCHECK(client_); updated |= recording_source_->UpdateAndExpandInvalidation( client_, &recording_invalidation_, layer_size, update_rect, - update_source_frame_number_, RecordingSource::RECORD_NORMALLY); + update_source_frame_number_, DisplayListRecordingSource::RECORD_NORMALLY); last_updated_visible_layer_rect_ = visible_layer_rect(); if (updated) { @@ -169,18 +169,19 @@ } skia::RefPtr<SkPicture> PictureLayer::GetPicture() const { - // We could either flatten the RecordingSource into a single SkPicture, - // or paint a fresh one depending on what we intend to do with the + // We could either flatten the DisplayListRecordingSource into a single + // SkPicture, or paint a fresh one depending on what we intend to do with the // picture. For now we just paint a fresh one to get consistent results. if (!DrawsContent()) return skia::RefPtr<SkPicture>(); gfx::Size layer_size = bounds(); - scoped_ptr<RecordingSource> recording_source(new DisplayListRecordingSource); + scoped_ptr<DisplayListRecordingSource> recording_source( + new DisplayListRecordingSource); Region recording_invalidation; recording_source->UpdateAndExpandInvalidation( client_, &recording_invalidation, layer_size, gfx::Rect(layer_size), - update_source_frame_number_, RecordingSource::RECORD_NORMALLY); + update_source_frame_number_, DisplayListRecordingSource::RECORD_NORMALLY); scoped_refptr<RasterSource> raster_source = recording_source->CreateRasterSource(false);
diff --git a/cc/layers/picture_layer.h b/cc/layers/picture_layer.h index 771484c..6f4f17c 100644 --- a/cc/layers/picture_layer.h +++ b/cc/layers/picture_layer.h
@@ -13,7 +13,7 @@ namespace cc { class ContentLayerClient; -class RecordingSource; +class DisplayListRecordingSource; class ResourceUpdateQueue; class CC_EXPORT PictureLayer : public Layer { @@ -39,7 +39,7 @@ ContentLayerClient* client() { return client_; } - RecordingSource* GetRecordingSourceForTesting() { + DisplayListRecordingSource* GetDisplayListRecordingSourceForTesting() { return recording_source_.get(); } @@ -48,7 +48,7 @@ // Allow tests to inject a recording source. PictureLayer(const LayerSettings& settings, ContentLayerClient* client, - scoped_ptr<RecordingSource> source); + scoped_ptr<DisplayListRecordingSource> source); ~PictureLayer() override; bool HasDrawableContent() const override; @@ -57,7 +57,7 @@ private: ContentLayerClient* client_; - scoped_ptr<RecordingSource> recording_source_; + scoped_ptr<DisplayListRecordingSource> recording_source_; devtools_instrumentation:: ScopedLayerObjectTracker instrumentation_object_tracker_; // Invalidation to use the next time update is called.
diff --git a/cc/layers/picture_layer_impl_perftest.cc b/cc/layers/picture_layer_impl_perftest.cc index 6c45b09..ef41efc 100644 --- a/cc/layers/picture_layer_impl_perftest.cc +++ b/cc/layers/picture_layer_impl_perftest.cc
@@ -51,6 +51,7 @@ kTimeCheckInterval) {} void SetUp() override { + host_impl_.SetVisible(true); host_impl_.InitializeRenderer(output_surface_.get()); }
diff --git a/cc/layers/picture_layer_impl_unittest.cc b/cc/layers/picture_layer_impl_unittest.cc index 93c1f1f..6daa34a 100644 --- a/cc/layers/picture_layer_impl_unittest.cc +++ b/cc/layers/picture_layer_impl_unittest.cc
@@ -124,6 +124,7 @@ void SetUp() override { InitializeRenderer(); } virtual void InitializeRenderer() { + host_impl_.SetVisible(true); host_impl_.InitializeRenderer(output_surface_.get()); } @@ -1570,6 +1571,7 @@ host_impl_.DidLoseOutputSurface(); scoped_ptr<OutputSurface> new_output_surface = FakeOutputSurface::Create3d(context.Pass()); + host_impl_.SetVisible(true); host_impl_.InitializeRenderer(new_output_surface.get()); output_surface_ = new_output_surface.Pass(); @@ -1609,6 +1611,7 @@ host_impl_.DidLoseOutputSurface(); scoped_ptr<OutputSurface> new_output_surface = FakeOutputSurface::Create3d(context.Pass()); + host_impl_.SetVisible(true); host_impl_.InitializeRenderer(new_output_surface.get()); output_surface_ = new_output_surface.Pass(); @@ -3816,6 +3819,7 @@ } void InitializeRenderer() override { + host_impl_.SetVisible(true); host_impl_.InitializeRenderer(output_surface_.get()); } }; @@ -4481,7 +4485,8 @@ scoped_ptr<FakeLayerTreeHost> host = FakeLayerTreeHost::Create(&host_client, &task_graph_runner); host->SetRootLayer(layer); - RecordingSource* recording_source = layer->GetRecordingSourceForTesting(); + DisplayListRecordingSource* recording_source = + layer->GetDisplayListRecordingSourceForTesting(); int frame_number = 0; @@ -4490,7 +4495,7 @@ Region invalidation(layer_rect); recording_source->UpdateAndExpandInvalidation( &client, &invalidation, layer_bounds, layer_rect, frame_number++, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); scoped_refptr<RasterSource> pending_raster_source = recording_source->CreateRasterSource(true); @@ -4544,7 +4549,8 @@ scoped_ptr<FakeLayerTreeHost> host = FakeLayerTreeHost::Create(&host_client, &task_graph_runner); host->SetRootLayer(layer); - RecordingSource* recording_source = layer->GetRecordingSourceForTesting(); + DisplayListRecordingSource* recording_source = + layer->GetDisplayListRecordingSourceForTesting(); int frame_number = 0; @@ -4553,7 +4559,7 @@ Region invalidation1(layer_rect); recording_source->UpdateAndExpandInvalidation( &client, &invalidation1, layer_bounds, layer_rect, frame_number++, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); scoped_refptr<RasterSource> raster_source1 = recording_source->CreateRasterSource(true); @@ -4572,7 +4578,7 @@ Region invalidation2(layer_rect); recording_source->UpdateAndExpandInvalidation( &client, &invalidation2, layer_bounds, layer_rect, frame_number++, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); scoped_refptr<RasterSource> raster_source2 = recording_source->CreateRasterSource(true);
diff --git a/cc/layers/picture_layer_unittest.cc b/cc/layers/picture_layer_unittest.cc index cb73d15..2f5b55f 100644 --- a/cc/layers/picture_layer_unittest.cc +++ b/cc/layers/picture_layer_unittest.cc
@@ -101,7 +101,7 @@ Region invalidation(layer_rect); recording_source->UpdateAndExpandInvalidation( &client, &invalidation, layer_bounds, layer_rect, 1, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); // Layer is suitable for gpu rasterization by default. EXPECT_TRUE(recording_source->IsSuitableForGpuRasterization()); @@ -139,11 +139,13 @@ params.main_task_runner = base::ThreadTaskRunnerHandle::Get(); scoped_ptr<LayerTreeHost> host1 = LayerTreeHost::CreateSingleThreaded(&host_client1, ¶ms); + host1->SetVisible(true); host_client1.SetLayerTreeHost(host1.get()); params.client = &host_client2; scoped_ptr<LayerTreeHost> host2 = LayerTreeHost::CreateSingleThreaded(&host_client2, ¶ms); + host2->SetVisible(true); host_client2.SetLayerTreeHost(host2.get()); // The PictureLayer is put in one LayerTreeHost.
diff --git a/cc/layers/scrollbar_layer_unittest.cc b/cc/layers/scrollbar_layer_unittest.cc index f631fcea..2d92f60 100644 --- a/cc/layers/scrollbar_layer_unittest.cc +++ b/cc/layers/scrollbar_layer_unittest.cc
@@ -129,6 +129,7 @@ layer_tree_host_.reset( new FakeResourceTrackingLayerTreeHost(&fake_client_, ¶ms)); + layer_tree_host_->SetVisible(true); fake_client_.SetLayerTreeHost(layer_tree_host_.get()); // Force output surface creation for renderer capabilities. layer_tree_host_->Composite(base::TimeTicks());
diff --git a/cc/layers/texture_layer_unittest.cc b/cc/layers/texture_layer_unittest.cc index 8c138c3..81758641 100644 --- a/cc/layers/texture_layer_unittest.cc +++ b/cc/layers/texture_layer_unittest.cc
@@ -835,6 +835,7 @@ TextureLayerTest::SetUp(); layer_tree_host_ = MockLayerTreeHost::Create(&fake_client_, &task_graph_runner_); + host_impl_.SetVisible(true); EXPECT_TRUE(host_impl_.InitializeRenderer(output_surface_.get())); }
diff --git a/cc/layers/ui_resource_layer_impl_unittest.cc b/cc/layers/ui_resource_layer_impl_unittest.cc index 826c53f..95a5967 100644 --- a/cc/layers/ui_resource_layer_impl_unittest.cc +++ b/cc/layers/ui_resource_layer_impl_unittest.cc
@@ -63,6 +63,7 @@ scoped_ptr<OutputSurface> output_surface = FakeOutputSurface::Create3d(); FakeUIResourceLayerTreeHostImpl host_impl(&proxy, &shared_bitmap_manager, &task_graph_runner); + host_impl.SetVisible(true); host_impl.InitializeRenderer(output_surface.get()); // Make sure we're appending quads when there are valid values. @@ -110,6 +111,7 @@ scoped_ptr<OutputSurface> output_surface = FakeOutputSurface::Create3d(); FakeUIResourceLayerTreeHostImpl host_impl(&proxy, &shared_bitmap_manager, &task_graph_runner); + host_impl.SetVisible(true); host_impl.InitializeRenderer(output_surface.get()); gfx::Size bitmap_size(100, 100); @@ -141,6 +143,7 @@ scoped_ptr<OutputSurface> output_surface = FakeOutputSurface::Create3d(); FakeUIResourceLayerTreeHostImpl host_impl(&proxy, &shared_bitmap_manager, &task_graph_runner); + host_impl.SetVisible(true); host_impl.InitializeRenderer(output_surface.get()); gfx::Size bitmap_size(100, 100);
diff --git a/cc/playback/discardable_image_map_unittest.cc b/cc/playback/discardable_image_map_unittest.cc index 65bf68e..f5bf1ca 100644 --- a/cc/playback/discardable_image_map_unittest.cc +++ b/cc/playback/discardable_image_map_unittest.cc
@@ -66,7 +66,7 @@ recording_source.SetGenerateDiscardableImagesMetadata(true); recording_source.UpdateAndExpandInvalidation( &content_layer_client, &invalidation, visible_rect.size(), visible_rect, - 1, RecordingSource::RECORD_NORMALLY); + 1, DisplayListRecordingSource::RECORD_NORMALLY); DisplayItemList* display_list = recording_source.display_list(); DiscardableImageMap image_map; @@ -147,7 +147,7 @@ recording_source.SetGenerateDiscardableImagesMetadata(true); recording_source.UpdateAndExpandInvalidation( &content_layer_client, &invalidation, layer_size, visible_rect, 1, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); DisplayItemList* display_list = recording_source.display_list(); DiscardableImageMap image_map; @@ -252,7 +252,7 @@ recording_source.SetGenerateDiscardableImagesMetadata(true); recording_source.UpdateAndExpandInvalidation( &content_layer_client, &invalidation, visible_rect.size(), visible_rect, - 1, RecordingSource::RECORD_NORMALLY); + 1, DisplayListRecordingSource::RECORD_NORMALLY); DisplayItemList* display_list = recording_source.display_list(); DiscardableImageMap image_map; @@ -295,7 +295,7 @@ recording_source.SetGenerateDiscardableImagesMetadata(true); recording_source.UpdateAndExpandInvalidation( &content_layer_client, &invalidation, visible_rect.size(), visible_rect, - 1, RecordingSource::RECORD_NORMALLY); + 1, DisplayListRecordingSource::RECORD_NORMALLY); DisplayItemList* display_list = recording_source.display_list(); DiscardableImageMap image_map;
diff --git a/cc/playback/display_list_recording_source.cc b/cc/playback/display_list_recording_source.cc index c790ebe..26d7437 100644 --- a/cc/playback/display_list_recording_source.cc +++ b/cc/playback/display_list_recording_source.cc
@@ -175,8 +175,8 @@ case RECORD_WITH_CONSTRUCTION_DISABLED: painting_control = ContentLayerClient::DISPLAY_LIST_CONSTRUCTION_DISABLED; break; - default: - // case RecordingSource::RECORD_WITH_SK_NULL_CANVAS should not be reached + case RECORD_WITH_SK_NULL_CANVAS: + case RECORDING_MODE_COUNT: NOTREACHED(); }
diff --git a/cc/playback/display_list_recording_source.h b/cc/playback/display_list_recording_source.h index b4250d0b..d544349 100644 --- a/cc/playback/display_list_recording_source.h +++ b/cc/playback/display_list_recording_source.h
@@ -5,34 +5,54 @@ #ifndef CC_PLAYBACK_DISPLAY_LIST_RECORDING_SOURCE_H_ #define CC_PLAYBACK_DISPLAY_LIST_RECORDING_SOURCE_H_ +#include "base/memory/ref_counted.h" #include "base/memory/scoped_ptr.h" -#include "cc/playback/recording_source.h" +#include "cc/base/cc_export.h" +#include "third_party/skia/include/core/SkColor.h" +#include "ui/gfx/geometry/rect.h" +#include "ui/gfx/geometry/size.h" namespace cc { +class ContentLayerClient; class DisplayItemList; class DisplayListRasterSource; +class RasterSource; +class Region; -class CC_EXPORT DisplayListRecordingSource : public RecordingSource { +class CC_EXPORT DisplayListRecordingSource { public: - DisplayListRecordingSource(); - ~DisplayListRecordingSource() override; + // TODO(schenney) Remove RECORD_WITH_SK_NULL_CANVAS when we no longer + // support a non-Slimming Paint path. + enum RecordingMode { + RECORD_NORMALLY, + RECORD_WITH_SK_NULL_CANVAS, + RECORD_WITH_PAINTING_DISABLED, + RECORD_WITH_CACHING_DISABLED, + RECORD_WITH_CONSTRUCTION_DISABLED, + RECORDING_MODE_COUNT, // Must be the last entry. + }; - // RecordingSource overrides. + DisplayListRecordingSource(); + virtual ~DisplayListRecordingSource(); + bool UpdateAndExpandInvalidation(ContentLayerClient* painter, Region* invalidation, const gfx::Size& layer_size, const gfx::Rect& visible_layer_rect, int frame_number, - RecordingMode recording_mode) override; - scoped_refptr<RasterSource> CreateRasterSource( - bool can_use_lcd_text) const override; - gfx::Size GetSize() const final; - void SetEmptyBounds() override; - void SetSlowdownRasterScaleFactor(int factor) override; - void SetGenerateDiscardableImagesMetadata(bool generate_metadata) override; - void SetBackgroundColor(SkColor background_color) override; - void SetRequiresClear(bool requires_clear) override; - bool IsSuitableForGpuRasterization() const override; + RecordingMode recording_mode); + gfx::Size GetSize() const; + void SetEmptyBounds(); + void SetSlowdownRasterScaleFactor(int factor); + void SetGenerateDiscardableImagesMetadata(bool generate_metadata); + void SetBackgroundColor(SkColor background_color); + void SetRequiresClear(bool requires_clear); + + // These functions are virtual for testing. + virtual scoped_refptr<RasterSource> CreateRasterSource( + bool can_use_lcd_text) const; + virtual bool IsSuitableForGpuRasterization() const; + // Returns true if the new recorded viewport exposes enough new area to be // worth re-recording. static bool ExposesEnoughNewArea(
diff --git a/cc/playback/display_list_recording_source_unittest.cc b/cc/playback/display_list_recording_source_unittest.cc index 83d44036..4dcee93 100644 --- a/cc/playback/display_list_recording_source_unittest.cc +++ b/cc/playback/display_list_recording_source_unittest.cc
@@ -231,20 +231,20 @@ recording_source.UpdateAndExpandInvalidation( &client, &invalidation, layer_size, visible_rect, 0, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); EXPECT_EQ(gfx::Rect(0, 0, 4256, 4256), recording_source.recorded_viewport()); visible_rect.Offset(0, 512); recording_source.UpdateAndExpandInvalidation( &client, &invalidation, layer_size, visible_rect, 0, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); EXPECT_EQ(gfx::Rect(0, 0, 4256, 4256), recording_source.recorded_viewport()); // Move past the threshold for enough exposed new area. visible_rect.Offset(0, 1); recording_source.UpdateAndExpandInvalidation( &client, &invalidation, layer_size, visible_rect, 0, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); EXPECT_EQ(gfx::Rect(0, 0, 4256, 4769), recording_source.recorded_viewport()); // Make the bottom of the potential new recorded viewport coincide with the @@ -252,7 +252,7 @@ visible_rect.Offset(0, 231); recording_source.UpdateAndExpandInvalidation( &client, &invalidation, layer_size, visible_rect, 0, - RecordingSource::RECORD_NORMALLY); + DisplayListRecordingSource::RECORD_NORMALLY); EXPECT_EQ(gfx::Rect(0, 0, 4256, 4769), recording_source.recorded_viewport()); }
diff --git a/cc/playback/recording_source.h b/cc/playback/recording_source.h deleted file mode 100644 index 7d4aba1a..0000000 --- a/cc/playback/recording_source.h +++ /dev/null
@@ -1,59 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef CC_PLAYBACK_RECORDING_SOURCE_H_ -#define CC_PLAYBACK_RECORDING_SOURCE_H_ - -#include "base/memory/ref_counted.h" -#include "cc/base/cc_export.h" -#include "third_party/skia/include/core/SkColor.h" -#include "ui/gfx/geometry/rect.h" -#include "ui/gfx/geometry/size.h" - -namespace cc { -class ContentLayerClient; -class Region; -class RasterSource; - -class CC_EXPORT RecordingSource { - public: - // TODO(schenney) Remove RECORD_WITH_SK_NULL_CANVAS when we no longer - // support a non-Slimming Paint path. - enum RecordingMode { - RECORD_NORMALLY, - RECORD_WITH_SK_NULL_CANVAS, - RECORD_WITH_PAINTING_DISABLED, - RECORD_WITH_CACHING_DISABLED, - RECORD_WITH_CONSTRUCTION_DISABLED, - RECORDING_MODE_COUNT, // Must be the last entry. - }; - - virtual ~RecordingSource() {} - // Re-record parts of the picture that are invalid. - // Invalidations are in layer space, and will be expanded to cover everything - // that was either recorded/changed or that has no recording, leaving out only - // pieces that we had a recording for and it was not changed. - // Return true iff the pile was modified. - virtual bool UpdateAndExpandInvalidation(ContentLayerClient* painter, - Region* invalidation, - const gfx::Size& layer_size, - const gfx::Rect& visible_layer_rect, - int frame_number, - RecordingMode recording_mode) = 0; - - virtual scoped_refptr<RasterSource> CreateRasterSource( - bool can_use_lcd_text) const = 0; - - virtual gfx::Size GetSize() const = 0; - virtual void SetEmptyBounds() = 0; - virtual void SetSlowdownRasterScaleFactor(int factor) = 0; - virtual void SetGenerateDiscardableImagesMetadata(bool generate_metadata) = 0; - virtual void SetBackgroundColor(SkColor background_color) = 0; - virtual void SetRequiresClear(bool requires_clear) = 0; - virtual bool IsSuitableForGpuRasterization() const = 0; -}; - -} // namespace cc - -#endif // CC_PLAYBACK_RECORDING_SOURCE_H_
diff --git a/cc/scheduler/scheduler.cc b/cc/scheduler/scheduler.cc index 082fbee3..a78e629d 100644 --- a/cc/scheduler/scheduler.cc +++ b/cc/scheduler/scheduler.cc
@@ -131,11 +131,6 @@ estimated_parent_draw_time_ = draw_time; } -void Scheduler::SetCanStart() { - state_machine_.SetCanStart(); - ProcessScheduledActions(); -} - void Scheduler::SetVisible(bool visible) { state_machine_.SetVisible(visible); UpdateCompositorTimingHistoryRecordingEnabled();
diff --git a/cc/scheduler/scheduler.h b/cc/scheduler/scheduler.h index 8e9abce..089e0bed 100644 --- a/cc/scheduler/scheduler.h +++ b/cc/scheduler/scheduler.h
@@ -75,9 +75,8 @@ base::TimeDelta interval); void SetEstimatedParentDrawTime(base::TimeDelta draw_time); - void SetCanStart(); - void SetVisible(bool visible); + bool visible() { return state_machine_.visible(); } void SetCanDraw(bool can_draw); void NotifyReadyToActivate(); void NotifyReadyToDraw();
diff --git a/cc/scheduler/scheduler_state_machine.cc b/cc/scheduler/scheduler_state_machine.cc index 8c6175618..661dc7a 100644 --- a/cc/scheduler/scheduler_state_machine.cc +++ b/cc/scheduler/scheduler_state_machine.cc
@@ -15,7 +15,7 @@ SchedulerStateMachine::SchedulerStateMachine(const SchedulerSettings& settings) : settings_(settings), - output_surface_state_(OUTPUT_SURFACE_LOST), + output_surface_state_(OUTPUT_SURFACE_NONE), begin_impl_frame_state_(BEGIN_IMPL_FRAME_STATE_IDLE), begin_main_frame_state_(BEGIN_MAIN_FRAME_STATE_IDLE), forced_redraw_state_(FORCED_REDRAW_STATE_IDLE), @@ -40,7 +40,6 @@ needs_prepare_tiles_(false), needs_begin_main_frame_(false), visible_(false), - can_start_(false), can_draw_(false), has_pending_tree_(false), pending_tree_is_ready_for_activation_(false), @@ -60,10 +59,10 @@ const char* SchedulerStateMachine::OutputSurfaceStateToString( OutputSurfaceState state) { switch (state) { + case OUTPUT_SURFACE_NONE: + return "OUTPUT_SURFACE_NONE"; case OUTPUT_SURFACE_ACTIVE: return "OUTPUT_SURFACE_ACTIVE"; - case OUTPUT_SURFACE_LOST: - return "OUTPUT_SURFACE_LOST"; case OUTPUT_SURFACE_CREATING: return "OUTPUT_SURFACE_CREATING"; case OUTPUT_SURFACE_WAITING_FOR_FIRST_COMMIT: @@ -225,7 +224,6 @@ state->SetBoolean("needs_prepare_tiles", needs_prepare_tiles_); state->SetBoolean("needs_begin_main_frame", needs_begin_main_frame_); state->SetBoolean("visible", visible_); - state->SetBoolean("can_start", can_start_); state->SetBoolean("can_draw", can_draw_); state->SetBoolean("has_pending_tree", has_pending_tree_); state->SetBoolean("pending_tree_is_ready_for_activation", @@ -257,7 +255,7 @@ // draws will be aborted. However, when the embedder is Android WebView, // software draws could be scheduled by the Android OS at any time and draws // should not be aborted in this case. - bool is_output_surface_lost = (output_surface_state_ == OUTPUT_SURFACE_LOST); + bool is_output_surface_lost = (output_surface_state_ == OUTPUT_SURFACE_NONE); if (settings_.using_synchronous_renderer_compositor) return is_output_surface_lost || !can_draw_; @@ -274,7 +272,7 @@ // There is no output surface to trigger our activations. // If we do not force activations to make forward progress, we might deadlock // with the main thread. - if (output_surface_state_ == OUTPUT_SURFACE_LOST) + if (output_surface_state_ == OUTPUT_SURFACE_NONE) return true; // If we're not visible, we should force activation. @@ -294,10 +292,6 @@ if (!visible_) return false; - // Don't try to initialize too early. - if (!can_start_) - return false; - // We only want to start output surface initialization after the // previous commit is complete. if (begin_main_frame_state_ != BEGIN_MAIN_FRAME_STATE_IDLE) @@ -317,7 +311,7 @@ // We need to create the output surface if we don't have one and we haven't // started creating one yet. - return output_surface_state_ == OUTPUT_SURFACE_LOST; + return output_surface_state_ == OUTPUT_SURFACE_NONE; } bool SchedulerStateMachine::ShouldDraw() const { @@ -676,7 +670,7 @@ } void SchedulerStateMachine::WillBeginOutputSurfaceCreation() { - DCHECK_EQ(output_surface_state_, OUTPUT_SURFACE_LOST); + DCHECK_EQ(output_surface_state_, OUTPUT_SURFACE_NONE); output_surface_state_ = OUTPUT_SURFACE_CREATING; // The following DCHECKs make sure we are in the proper quiescent state. @@ -1063,10 +1057,10 @@ } void SchedulerStateMachine::DidLoseOutputSurface() { - if (output_surface_state_ == OUTPUT_SURFACE_LOST || + if (output_surface_state_ == OUTPUT_SURFACE_NONE || output_surface_state_ == OUTPUT_SURFACE_CREATING) return; - output_surface_state_ = OUTPUT_SURFACE_LOST; + output_surface_state_ = OUTPUT_SURFACE_NONE; needs_redraw_ = false; wait_for_ready_to_draw_ = false; } @@ -1102,7 +1096,7 @@ bool SchedulerStateMachine::HasInitializedOutputSurface() const { switch (output_surface_state_) { - case OUTPUT_SURFACE_LOST: + case OUTPUT_SURFACE_NONE: case OUTPUT_SURFACE_CREATING: return false;
diff --git a/cc/scheduler/scheduler_state_machine.h b/cc/scheduler/scheduler_state_machine.h index d84211a..c188941 100644 --- a/cc/scheduler/scheduler_state_machine.h +++ b/cc/scheduler/scheduler_state_machine.h
@@ -42,8 +42,8 @@ explicit SchedulerStateMachine(const SchedulerSettings& settings); enum OutputSurfaceState { + OUTPUT_SURFACE_NONE, OUTPUT_SURFACE_ACTIVE, - OUTPUT_SURFACE_LOST, OUTPUT_SURFACE_CREATING, OUTPUT_SURFACE_WAITING_FOR_FIRST_COMMIT, OUTPUT_SURFACE_WAITING_FOR_FIRST_ACTIVATION, @@ -216,11 +216,6 @@ // from NextAction if the client rejects the BeginMainFrame message. void BeginMainFrameAborted(CommitEarlyOutReason reason); - // Set that we can create the first OutputSurface and start the scheduler. - void SetCanStart() { can_start_ = true; } - // Allow access of the can_start_ state in tests. - bool CanStartForTesting() const { return can_start_; } - // Indicates production should be skipped to recover latency. void SetSkipNextBeginMainFrameToReduceLatency(); @@ -324,7 +319,6 @@ bool needs_prepare_tiles_; bool needs_begin_main_frame_; bool visible_; - bool can_start_; bool can_draw_; bool has_pending_tree_; bool pending_tree_is_ready_for_activation_;
diff --git a/cc/scheduler/scheduler_state_machine_unittest.cc b/cc/scheduler/scheduler_state_machine_unittest.cc index 6d00594..7d9605f 100644 --- a/cc/scheduler/scheduler_state_machine_unittest.cc +++ b/cc/scheduler/scheduler_state_machine_unittest.cc
@@ -51,7 +51,6 @@ } #define SET_UP_STATE(state) \ - state.SetCanStart(); \ state.SetVisible(true); \ EXPECT_ACTION_UPDATE_STATE( \ SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); \ @@ -205,7 +204,6 @@ TEST(SchedulerStateMachineTest, BeginFrameNeeded) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -254,7 +252,6 @@ // If no commit needed, do nothing. { StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -275,13 +272,12 @@ EXPECT_FALSE(state.NeedsCommit()); } - // If commit requested but can_start is still false, do nothing. + // If commit requested but not visible yet, do nothing. { StateMachine state(default_scheduler_settings); state.SetBeginMainFrameState( SchedulerStateMachine::BEGIN_MAIN_FRAME_STATE_IDLE); state.SetNeedsRedraw(false); - state.SetVisible(true); state.SetNeedsBeginMainFrame(); EXPECT_ACTION_UPDATE_STATE(SchedulerStateMachine::ACTION_NONE); @@ -300,7 +296,6 @@ StateMachine state(default_scheduler_settings); state.SetBeginMainFrameState( SchedulerStateMachine::BEGIN_MAIN_FRAME_STATE_IDLE); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -328,7 +323,6 @@ StateMachine state(default_scheduler_settings); state.SetBeginMainFrameState( SchedulerStateMachine::BEGIN_MAIN_FRAME_STATE_IDLE); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -720,7 +714,6 @@ for (size_t i = 0; i < num_begin_main_frame_states; ++i) { for (size_t j = 0; j < num_begin_impl_frame_states; ++j) { StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -749,7 +742,6 @@ // except if we're ready to commit, in which case we expect a commit first. for (size_t i = 0; i < num_begin_main_frame_states; ++i) { StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -790,7 +782,6 @@ // There shouldn't be any drawing regardless of BeginImplFrame. for (size_t j = 0; j < 2; ++j) { StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -827,7 +818,6 @@ // There shouldn't be any drawing regardless of BeginImplFrame. for (size_t j = 0; j < 2; ++j) { StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -850,7 +840,6 @@ TestCanRedrawWithWaitingForFirstDrawMakesProgress) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -1195,7 +1184,6 @@ TEST(SchedulerStateMachineTest, TestNoRequestCommitWhenInvisible) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -1209,7 +1197,6 @@ TEST(SchedulerStateMachineTest, TestNoRequestOutputSurfaceWhenInvisible) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); // We should not request an OutputSurface when we are still invisible. EXPECT_ACTION_UPDATE_STATE(SchedulerStateMachine::ACTION_NONE); state.SetVisible(true); @@ -1283,7 +1270,6 @@ TEST(SchedulerStateMachineTest, TestAbortBeginMainFrameBecauseCommitNotNeeded) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -1331,7 +1317,6 @@ TEST(SchedulerStateMachineTest, TestFirstContextCreation) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); state.SetCanDraw(true); @@ -1388,7 +1373,7 @@ state.NextAction()); state.DidLoseOutputSurface(); EXPECT_EQ(state.output_surface_state(), - SchedulerStateMachine::OUTPUT_SURFACE_LOST); + SchedulerStateMachine::OUTPUT_SURFACE_NONE); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -1695,7 +1680,6 @@ TEST(SchedulerStateMachineTest, TestNoBeginFrameNeededWhenInvisible) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -1716,7 +1700,6 @@ TEST(SchedulerStateMachineTest, TestNoBeginMainFrameWhenInvisible) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION); @@ -1738,7 +1721,6 @@ TEST(SchedulerStateMachineTest, TestFinishCommitWhenCommitInProgress) { SchedulerSettings default_scheduler_settings; StateMachine state(default_scheduler_settings); - state.SetCanStart(); state.SetVisible(true); EXPECT_ACTION_UPDATE_STATE( SchedulerStateMachine::ACTION_BEGIN_OUTPUT_SURFACE_CREATION);
diff --git a/cc/scheduler/scheduler_unittest.cc b/cc/scheduler/scheduler_unittest.cc index 669ed3b5..0d39262 100644 --- a/cc/scheduler/scheduler_unittest.cc +++ b/cc/scheduler/scheduler_unittest.cc
@@ -289,8 +289,6 @@ EXPECT_FALSE(client_->needs_begin_frames()); // Start the initial output surface creation. - EXPECT_FALSE(scheduler_->CanStart()); - scheduler_->SetCanStart(); scheduler_->SetVisible(true); scheduler_->SetCanDraw(true); EXPECT_SINGLE_ACTION("ScheduledActionBeginOutputSurfaceCreation", client_); @@ -411,7 +409,6 @@ TEST_F(SchedulerTest, InitializeOutputSurfaceDoesNotBeginImplFrame) { scheduler_settings_.use_external_begin_frame_source = true; SetUpScheduler(false); - scheduler_->SetCanStart(); scheduler_->SetVisible(true); scheduler_->SetCanDraw(true); @@ -2599,7 +2596,6 @@ scheduler_settings_.use_external_begin_frame_source = true; SetUpScheduler(false); - scheduler_->SetCanStart(); scheduler_->SetVisible(true); scheduler_->SetCanDraw(true);
diff --git a/cc/test/fake_picture_layer.cc b/cc/test/fake_picture_layer.cc index 0570625..8892d4c 100644 --- a/cc/test/fake_picture_layer.cc +++ b/cc/test/fake_picture_layer.cc
@@ -18,9 +18,10 @@ SetIsDrawable(true); } -FakePictureLayer::FakePictureLayer(const LayerSettings& settings, - ContentLayerClient* client, - scoped_ptr<RecordingSource> source) +FakePictureLayer::FakePictureLayer( + const LayerSettings& settings, + ContentLayerClient* client, + scoped_ptr<DisplayListRecordingSource> source) : PictureLayer(settings, client, source.Pass()), update_count_(0), push_properties_count_(0),
diff --git a/cc/test/fake_picture_layer.h b/cc/test/fake_picture_layer.h index 2b26498..aee328f 100644 --- a/cc/test/fake_picture_layer.h +++ b/cc/test/fake_picture_layer.h
@@ -8,7 +8,7 @@ #include "base/memory/ref_counted.h" #include "base/memory/scoped_ptr.h" #include "cc/layers/picture_layer.h" -#include "cc/playback/recording_source.h" +#include "cc/playback/display_list_recording_source.h" namespace cc { class FakePictureLayer : public PictureLayer { @@ -21,7 +21,7 @@ static scoped_refptr<FakePictureLayer> CreateWithRecordingSource( const LayerSettings& settings, ContentLayerClient* client, - scoped_ptr<RecordingSource> source) { + scoped_ptr<DisplayListRecordingSource> source) { return make_scoped_refptr( new FakePictureLayer(settings, client, source.Pass())); } @@ -46,7 +46,7 @@ FakePictureLayer(const LayerSettings& settings, ContentLayerClient* client); FakePictureLayer(const LayerSettings& settings, ContentLayerClient* client, - scoped_ptr<RecordingSource> source); + scoped_ptr<DisplayListRecordingSource> source); ~FakePictureLayer() override; int update_count_;
diff --git a/cc/test/fake_proxy.h b/cc/test/fake_proxy.h index d11d0721..4bd1b45 100644 --- a/cc/test/fake_proxy.h +++ b/cc/test/fake_proxy.h
@@ -26,7 +26,6 @@ bool CommitToActiveTree() const override; void SetOutputSurface(OutputSurface* output_surface) override {} void ReleaseOutputSurface() override; - void SetLayerTreeHostClientReady() override {} void SetVisible(bool visible) override {} void SetThrottleFrameProduction(bool throttle) override {} const RendererCapabilities& GetRendererCapabilities() const override;
diff --git a/cc/test/layer_test_common.cc b/cc/test/layer_test_common.cc index 04c6166..73af7ec1 100644 --- a/cc/test/layer_test_common.cc +++ b/cc/test/layer_test_common.cc
@@ -120,6 +120,7 @@ render_pass_(RenderPass::Create()), layer_impl_id_(2) { root_layer_impl_->SetHasRenderSurface(true); + host_->host_impl()->SetVisible(true); host_->host_impl()->InitializeRenderer(output_surface_.get()); }
diff --git a/cc/test/layer_tree_test.cc b/cc/test/layer_tree_test.cc index 5d37c73..86ba7c1 100644 --- a/cc/test/layer_tree_test.cc +++ b/cc/test/layer_tree_test.cc
@@ -755,7 +755,7 @@ } void LayerTreeTest::WillBeginTest() { - layer_tree_host_->SetLayerTreeHostClientReady(); + layer_tree_host_->SetVisible(true); } void LayerTreeTest::DoBeginTest() {
diff --git a/cc/test/scheduler_test_common.h b/cc/test/scheduler_test_common.h index ce6ccc4..41c21459 100644 --- a/cc/test/scheduler_test_common.h +++ b/cc/test/scheduler_test_common.h
@@ -215,8 +215,6 @@ bool SwapThrottled() const { return state_machine_.SwapThrottled(); } - bool CanStart() const { return state_machine_.CanStartForTesting(); } - bool NeedsBeginMainFrame() const { return state_machine_.needs_begin_main_frame(); }
diff --git a/cc/tiles/tile_manager_perftest.cc b/cc/tiles/tile_manager_perftest.cc index c7337cfbba..bb61942 100644 --- a/cc/tiles/tile_manager_perftest.cc +++ b/cc/tiles/tile_manager_perftest.cc
@@ -124,6 +124,7 @@ } virtual void InitializeRenderer() { + host_impl_.SetVisible(true); host_impl_.InitializeRenderer(output_surface_.get()); tile_manager()->SetTileTaskRunnerForTesting( g_fake_tile_task_runner.Pointer());
diff --git a/cc/tiles/tile_manager_unittest.cc b/cc/tiles/tile_manager_unittest.cc index 0a0cb60..73a3cec2 100644 --- a/cc/tiles/tile_manager_unittest.cc +++ b/cc/tiles/tile_manager_unittest.cc
@@ -37,7 +37,10 @@ class LowResTilingsSettings : public LayerTreeSettings { public: - LowResTilingsSettings() { create_low_res_tiling = true; } + LowResTilingsSettings() { + create_low_res_tiling = true; + verify_property_trees = true; + } }; class TileManagerTilePriorityQueueTest : public testing::Test { @@ -78,6 +81,7 @@ } virtual void InitializeRenderer() { + host_impl_.SetVisible(true); host_impl_.InitializeRenderer(output_surface_.get()); } @@ -143,6 +147,7 @@ // Add tilings/tiles for the layer. bool update_lcd_text = false; + host_impl_.pending_tree()->BuildPropertyTreesForTesting(); host_impl_.pending_tree()->UpdateDrawProperties(update_lcd_text); } @@ -615,6 +620,8 @@ host_impl_.SetViewportSize(gfx::Size(200, 200)); host_impl_.AdvanceToNextFrame(base::TimeDelta::FromMilliseconds(1)); bool update_lcd_text = false; + host_impl_.pending_tree()->property_trees()->needs_rebuild = true; + host_impl_.pending_tree()->BuildPropertyTreesForTesting(); host_impl_.pending_tree()->UpdateDrawProperties(update_lcd_text); host_impl_.SetRequiresHighResToDraw(); @@ -836,6 +843,8 @@ host_impl_.AdvanceToNextFrame(base::TimeDelta::FromMilliseconds(1)); bool update_lcd_text = false; + host_impl_.pending_tree()->property_trees()->needs_rebuild = true; + host_impl_.pending_tree()->BuildPropertyTreesForTesting(); host_impl_.pending_tree()->UpdateDrawProperties(update_lcd_text); ActivateTree(); @@ -951,6 +960,8 @@ host_impl_.AdvanceToNextFrame(base::TimeDelta::FromMilliseconds(1)); bool update_lcd_text = false; + host_impl_.pending_tree()->property_trees()->needs_rebuild = true; + host_impl_.pending_tree()->BuildPropertyTreesForTesting(); host_impl_.pending_tree()->UpdateDrawProperties(update_lcd_text); pending_child_layer->SetOpacity(0.0); @@ -1445,6 +1456,7 @@ host_impl_(new MockLayerTreeHostImpl(&proxy_, &shared_bitmap_manager_, &task_graph_runner_)) { + host_impl_->SetVisible(true); host_impl_->InitializeRenderer(output_surface_.get()); }
diff --git a/cc/trees/channel_main.h b/cc/trees/channel_main.h index 964d1aa..c4cdcc6 100644 --- a/cc/trees/channel_main.h +++ b/cc/trees/channel_main.h
@@ -23,7 +23,6 @@ public: // Interface for commands sent to the ProxyImpl virtual void SetThrottleFrameProductionOnImpl(bool throttle) = 0; - virtual void SetLayerTreeHostClientReadyOnImpl() = 0; virtual ~ChannelMain() {} };
diff --git a/cc/trees/draw_property_utils.cc b/cc/trees/draw_property_utils.cc index 9975fad..b023a999 100644 --- a/cc/trees/draw_property_utils.cc +++ b/cc/trees/draw_property_utils.cc
@@ -25,16 +25,13 @@ const ClipTree& clip_tree, const TransformTree& transform_tree) { for (auto& layer : visible_layer_list) { - // TODO(ajuma): Compute content_scale rather than using it. Note that for - // PictureLayer and PictureImageLayers, content_bounds == bounds and - // content_scale_x == content_scale_y == 1.0, so once impl painting is on - // everywhere, this code will be unnecessary. gfx::Size layer_bounds = layer->bounds(); - const bool has_clip = layer->clip_tree_index() > 0; + const ClipNode* clip_node = clip_tree.Node(layer->clip_tree_index()); + const bool is_unclipped = + clip_node->data.resets_clip && !clip_node->data.applies_local_clip; const TransformNode* transform_node = transform_tree.Node(layer->transform_tree_index()); - if (has_clip) { - const ClipNode* clip_node = clip_tree.Node(layer->clip_tree_index()); + if (!is_unclipped) { const TransformNode* target_node = transform_tree.Node(transform_node->data.content_target_id); @@ -407,19 +404,20 @@ } const TransformNode* transform_node = transform_tree.Node(clip_node->data.transform_id); + ClipNode* parent_clip_node = clip_tree->parent(clip_node); - // Only descendants of a real clipping layer (i.e., not 0) may have their - // clip adjusted due to intersecting with an ancestor clip. - const bool is_clipped = clip_node->parent_id > 0; - if (!is_clipped) { - clip_node->data.clip_in_target_space = MathUtil::MapClippedRect( - transform_node->data.to_target, clip_node->data.clip); - clip_node->data.combined_clip_in_target_space = - clip_node->data.clip_in_target_space; + // Only nodes affected by ancestor clips will have their combined clip + // adjusted due to intersecting with an ancestor clip. + if (clip_node->data.resets_clip) { + if (clip_node->data.applies_local_clip) { + clip_node->data.clip_in_target_space = MathUtil::MapClippedRect( + transform_node->data.to_target, clip_node->data.clip); + clip_node->data.combined_clip_in_target_space = + clip_node->data.clip_in_target_space; + } continue; } - ClipNode* parent_clip_node = clip_tree->parent(clip_node); gfx::Transform parent_to_current; const TransformNode* parent_transform_node = transform_tree.Node(parent_clip_node->data.transform_id); @@ -456,7 +454,8 @@ parent_clip_node->data.combined_clip_in_target_space); } - if (clip_node->data.use_only_parent_clip) { + bool use_only_parent_clip = !clip_node->data.applies_local_clip; + if (use_only_parent_clip) { clip_node->data.combined_clip_in_target_space = parent_combined_clip_in_target_space; if (!clip_node->data.render_surface_is_clipped) {
diff --git a/cc/trees/layer_tree_host.cc b/cc/trees/layer_tree_host.cc index f4231160..ee76c66 100644 --- a/cc/trees/layer_tree_host.cc +++ b/cc/trees/layer_tree_host.cc
@@ -101,7 +101,7 @@ top_controls_shown_ratio_(0.f), hide_pinch_scrollbars_near_min_scale_(false), device_scale_factor_(1.f), - visible_(true), + visible_(false), page_scale_factor_(1.f), min_page_scale_factor_(1.f), max_page_scale_factor_(1.f), @@ -203,10 +203,6 @@ } } -void LayerTreeHost::SetLayerTreeHostClientReady() { - proxy_->SetLayerTreeHostClientReady(); -} - void LayerTreeHost::WillBeginMainFrame() { devtools_instrumentation::WillBeginMainThreadFrame(id(), source_frame_number()); @@ -682,7 +678,6 @@ DCHECK(!settings_.single_thread_proxy_scheduler); SingleThreadProxy* proxy = static_cast<SingleThreadProxy*>(proxy_.get()); - SetLayerTreeHostClientReady(); proxy->LayoutAndUpdateLayers(); } @@ -692,7 +687,6 @@ DCHECK(!settings_.single_thread_proxy_scheduler); SingleThreadProxy* proxy = static_cast<SingleThreadProxy*>(proxy_.get()); - SetLayerTreeHostClientReady(); proxy->CompositeImmediately(frame_begin_time); }
diff --git a/cc/trees/layer_tree_host.h b/cc/trees/layer_tree_host.h index 5c92f976..cbfa014 100644 --- a/cc/trees/layer_tree_host.h +++ b/cc/trees/layer_tree_host.h
@@ -96,8 +96,6 @@ InitParams* params); virtual ~LayerTreeHost(); - void SetLayerTreeHostClientReady(); - // LayerTreeHost interface to Proxy. void WillBeginMainFrame(); void DidBeginMainFrame();
diff --git a/cc/trees/layer_tree_host_common_unittest.cc b/cc/trees/layer_tree_host_common_unittest.cc index 8af5a06..7b8af5ac 100644 --- a/cc/trees/layer_tree_host_common_unittest.cc +++ b/cc/trees/layer_tree_host_common_unittest.cc
@@ -2594,6 +2594,7 @@ root->AddChild(child.Pass()); root->AddChild(occluding_child.Pass()); host_impl.active_tree()->SetRootLayer(root.Pass()); + host_impl.SetVisible(true); host_impl.InitializeRenderer(output_surface.get()); bool update_lcd_text = false; host_impl.active_tree()->UpdateDrawProperties(update_lcd_text); @@ -7714,7 +7715,7 @@ ClipTree clip_tree = root->layer_tree_impl()->property_trees()->clip_tree; ClipNode* clip_node = clip_tree.Node(render_surface->clip_tree_index()); - EXPECT_TRUE(clip_node->data.use_only_parent_clip); + EXPECT_FALSE(clip_node->data.applies_local_clip); EXPECT_EQ(gfx::Rect(30, 21), test_layer->visible_rect_from_property_trees()); } @@ -7793,6 +7794,45 @@ } TEST_F(LayerTreeHostCommonTest, + RenderSurfaceWithUnclippedDescendantsButDoesntApplyOwnClip) { + // Ensure that the visible layer rect of a descendant of a surface with + // unclipped descendants is computed correctly, when the surface doesn't apply + // a clip. + LayerImpl* root = root_layer(); + LayerImpl* clip_parent = AddChildToRoot<LayerImpl>(); + LayerImpl* render_surface = AddChild<LayerImpl>(clip_parent); + LayerImpl* clip_child = AddChild<LayerImpl>(render_surface); + LayerImpl* child = AddChild<LayerImpl>(render_surface); + + const gfx::Transform identity_matrix; + + clip_child->SetDrawsContent(true); + child->SetDrawsContent(true); + clip_child->SetClipParent(clip_parent); + scoped_ptr<std::set<LayerImpl*>> clip_children(new std::set<LayerImpl*>); + clip_children->insert(clip_child); + clip_parent->SetClipChildren(clip_children.release()); + SetLayerPropertiesForTesting(root, identity_matrix, gfx::Point3F(), + gfx::PointF(), gfx::Size(30, 10), true, false, + true); + SetLayerPropertiesForTesting(clip_parent, identity_matrix, gfx::Point3F(), + gfx::PointF(), gfx::Size(30, 30), true, false, + false); + SetLayerPropertiesForTesting(render_surface, identity_matrix, gfx::Point3F(), + gfx::PointF(), gfx::Size(10, 15), true, false, + true); + SetLayerPropertiesForTesting(clip_child, identity_matrix, gfx::Point3F(), + gfx::PointF(), gfx::Size(10, 10), true, false, + false); + SetLayerPropertiesForTesting(child, identity_matrix, gfx::Point3F(), + gfx::PointF(), gfx::Size(40, 40), true, false, + false); + + ExecuteCalculateDrawProperties(root); + EXPECT_EQ(gfx::Rect(40, 40), child->visible_layer_rect()); +} + +TEST_F(LayerTreeHostCommonTest, RenderSurfaceClipsSubtreeAndHasUnclippedDescendants) { LayerImpl* root = root_layer(); LayerImpl* clip_parent = AddChildToRoot<LayerImpl>();
diff --git a/cc/trees/layer_tree_host_impl.cc b/cc/trees/layer_tree_host_impl.cc index 3b10fb7..0f5abf2 100644 --- a/cc/trees/layer_tree_host_impl.cc +++ b/cc/trees/layer_tree_host_impl.cc
@@ -176,7 +176,7 @@ scroll_layer_id_when_mouse_over_scrollbar_(0), tile_priorities_dirty_(false), settings_(settings), - visible_(true), + visible_(false), cached_managed_memory_policy_(settings.memory_policy_), is_synchronous_single_threaded_(!proxy->HasImplThread() && !settings.single_thread_proxy_scheduler),
diff --git a/cc/trees/layer_tree_host_impl_unittest.cc b/cc/trees/layer_tree_host_impl_unittest.cc index 3d47528b..71e7a15 100644 --- a/cc/trees/layer_tree_host_impl_unittest.cc +++ b/cc/trees/layer_tree_host_impl_unittest.cc
@@ -170,6 +170,7 @@ &shared_bitmap_manager_, &gpu_memory_buffer_manager_, &task_graph_runner_, 0); output_surface_ = output_surface.Pass(); + host_impl_->SetVisible(true); bool init = host_impl_->InitializeRenderer(output_surface_.get()); host_impl_->SetViewportSize(gfx::Size(10, 10)); host_impl_->active_tree()->PushPageScaleFromMainThread(1.f, 1.f, 1.f); @@ -2145,6 +2146,7 @@ &task_graph_runner_, &stats_instrumentation_); host_impl_ = make_scoped_ptr(host_impl_override_time); output_surface_ = CreateOutputSurface(); + host_impl_->SetVisible(true); host_impl_->InitializeRenderer(output_surface_.get()); SetupScrollAndContentsLayers(content_size); @@ -5694,6 +5696,7 @@ LayerTreeHostImpl::Create( settings, this, &proxy_, &stats_instrumentation_, &shared_bitmap_manager_, NULL, &task_graph_runner_, 0); + layer_tree_host_impl->SetVisible(true); layer_tree_host_impl->InitializeRenderer(output_surface.get()); layer_tree_host_impl->WillBeginImplFrame( CreateBeginFrameArgsForTesting(BEGINFRAME_FROM_HERE)); @@ -5977,6 +5980,7 @@ scoped_ptr<LayerTreeHostImpl> my_host_impl = LayerTreeHostImpl::Create(settings, client, proxy, stats_instrumentation, manager, nullptr, task_graph_runner, 0); + my_host_impl->SetVisible(true); my_host_impl->InitializeRenderer(output_surface); my_host_impl->WillBeginImplFrame( CreateBeginFrameArgsForTesting(BEGINFRAME_FROM_HERE)); @@ -6497,6 +6501,7 @@ output_surface_ = FakeOutputSurface::Create3d(TestWebGraphicsContext3D::Create()); + host_impl_->SetVisible(true); host_impl_->InitializeRenderer(output_surface_.get()); EXPECT_LT(0ul, host_impl_->memory_allocation_limit_bytes()); } @@ -6559,6 +6564,7 @@ &shared_bitmap_manager_, &task_graph_runner_); host_impl_.reset(fake_host_impl_); output_surface_ = CreateOutputSurface(); + host_impl_->SetVisible(true); host_impl_->InitializeRenderer(output_surface_.get()); host_impl_->SetViewportSize(gfx::Size(10, 10)); } @@ -6567,10 +6573,11 @@ }; TEST_F(LayerTreeHostImplTestPrepareTiles, PrepareTilesWhenInvisible) { - fake_host_impl_->DidModifyTilePriorities(); EXPECT_TRUE(fake_host_impl_->prepare_tiles_needed()); - fake_host_impl_->SetVisible(false); + host_impl_->SetVisible(false); EXPECT_FALSE(fake_host_impl_->prepare_tiles_needed()); + host_impl_->SetVisible(true); + EXPECT_TRUE(fake_host_impl_->prepare_tiles_needed()); } TEST_F(LayerTreeHostImplTest, UIResourceManagement) {
diff --git a/cc/trees/layer_tree_host_unittest.cc b/cc/trees/layer_tree_host_unittest.cc index fe10a47..95b7171 100644 --- a/cc/trees/layer_tree_host_unittest.cc +++ b/cc/trees/layer_tree_host_unittest.cc
@@ -4886,7 +4886,7 @@ // Once invisible, we can go visible again. if (!visible) { PostSetVisibleToMainThread(true); - } else { + } else if (activation_count_) { EXPECT_TRUE(host_impl->RequiresHighResToDraw()); EndTest(); }
diff --git a/cc/trees/layer_tree_host_unittest_context.cc b/cc/trees/layer_tree_host_unittest_context.cc index 34a70b9..0754ef1 100644 --- a/cc/trees/layer_tree_host_unittest_context.cc +++ b/cc/trees/layer_tree_host_unittest_context.cc
@@ -354,14 +354,15 @@ SINGLE_AND_MULTI_THREAD_TEST_F(LayerTreeHostContextTestLostContextSucceeds); -class LayerTreeHostClientNotReadyDoesNotCreateOutputSurface +class LayerTreeHostClientNotVisibleDoesNotCreateOutputSurface : public LayerTreeHostContextTest { public: - LayerTreeHostClientNotReadyDoesNotCreateOutputSurface() + LayerTreeHostClientNotVisibleDoesNotCreateOutputSurface() : LayerTreeHostContextTest() {} void WillBeginTest() override { - // Override and do not signal SetLayerTreeHostClientReady. + // Override to not become visible. + DCHECK(!layer_tree_host()->visible()); } void BeginTest() override { @@ -380,7 +381,7 @@ }; SINGLE_AND_MULTI_THREAD_TEST_F( - LayerTreeHostClientNotReadyDoesNotCreateOutputSurface); + LayerTreeHostClientNotVisibleDoesNotCreateOutputSurface); // This tests the OutputSurface release logic in the following sequence. // SetUp LTH and create and init OutputSurface @@ -1525,8 +1526,8 @@ } void DidSetVisibleOnImplTree(LayerTreeHostImpl* impl, bool visible) override { - TestWebGraphicsContext3D* context = TestContext(); if (!visible) { + TestWebGraphicsContext3D* context = TestContext(); // All resources should have been evicted. ASSERT_EQ(0u, context->NumTextures()); EXPECT_EQ(0u, impl->ResourceIdForUIResource(ui_resource_->id()));
diff --git a/cc/trees/layer_tree_impl_unittest.cc b/cc/trees/layer_tree_impl_unittest.cc index ba5fc7b..2907cc7 100644 --- a/cc/trees/layer_tree_impl_unittest.cc +++ b/cc/trees/layer_tree_impl_unittest.cc
@@ -28,6 +28,7 @@ settings.verify_property_trees = true; host_impl_.reset(new FakeLayerTreeHostImpl( settings, &proxy_, &shared_bitmap_manager_, &task_graph_runner_)); + host_impl_->SetVisible(true); EXPECT_TRUE(host_impl_->InitializeRenderer(output_surface_.get())); } @@ -103,6 +104,7 @@ scoped_ptr<FakeLayerTreeHostImpl> host_impl; host_impl.reset(new FakeLayerTreeHostImpl( settings, &proxy, &shared_bitmap_manager, &task_graph_runner)); + host_impl->SetVisible(true); EXPECT_TRUE(host_impl->InitializeRenderer(output_surface.get())); scoped_ptr<LayerImpl> root = LayerImpl::Create(host_impl->active_tree(), 12345);
diff --git a/cc/trees/occlusion_tracker_perftest.cc b/cc/trees/occlusion_tracker_perftest.cc index 01b63de..d29765a 100644 --- a/cc/trees/occlusion_tracker_perftest.cc +++ b/cc/trees/occlusion_tracker_perftest.cc
@@ -42,6 +42,7 @@ host_impl_ = LayerTreeHostImpl::Create(settings, &client_, &proxy_, &stats_, &shared_bitmap_manager_, nullptr, &task_graph_runner_, 1); + host_impl_->SetVisible(true); host_impl_->InitializeRenderer(output_surface_.get()); scoped_ptr<LayerImpl> root_layer = LayerImpl::Create(active_tree(), 1);
diff --git a/cc/trees/property_tree.cc b/cc/trees/property_tree.cc index d45d065..86a776a 100644 --- a/cc/trees/property_tree.cc +++ b/cc/trees/property_tree.cc
@@ -101,11 +101,12 @@ ClipNodeData::ClipNodeData() : transform_id(-1), target_id(-1), - use_only_parent_clip(false), + applies_local_clip(true), layer_clipping_uses_only_local_clip(false), layer_visibility_uses_only_local_clip(false), render_surface_is_clipped(false), - layers_are_clipped(false) {} + layers_are_clipped(false), + resets_clip(false) {} EffectNodeData::EffectNodeData() : opacity(1.f),
diff --git a/cc/trees/property_tree.h b/cc/trees/property_tree.h index 49d159e..b3568d7 100644 --- a/cc/trees/property_tree.h +++ b/cc/trees/property_tree.h
@@ -155,16 +155,51 @@ struct CC_EXPORT ClipNodeData { ClipNodeData(); + // The clip rect that this node contributes, expressed in the space of its + // transform node. gfx::RectF clip; + + // Clip nodes are uses for two reasons. First, they are used for determining + // which parts of each layer are visible. Second, they are used for + // determining whether a clip needs to be applied when drawing a layer, and if + // so, the rect that needs to be used. These can be different since not all + // clips need to be applied directly to each layer. For example, a layer is + // implicitly clipped by the bounds of its target render surface and by clips + // applied to this surface. |combined_clip_in_target_space| is used for + // computing visible rects, and |clip_in_target_space| is used for computing + // clips applied at draw time. Both rects are expressed in the space of the + // target transform node, and may include clips contributed by ancestors. gfx::RectF combined_clip_in_target_space; gfx::RectF clip_in_target_space; + + // The id of the transform node that defines the clip node's local space. int transform_id; + + // The id of the transform node that defines the clip node's target space. int target_id; - bool use_only_parent_clip : 1; + + // Whether this node contributes a new clip (that is, whether |clip| needs to + // be applied), rather than only inheriting ancestor clips. + bool applies_local_clip : 1; + + // When true, |clip_in_target_space| does not include clips from ancestor + // nodes. bool layer_clipping_uses_only_local_clip : 1; + + // When true, |combined_clip_in_target_space| does not include clips from + // ancestor nodes. bool layer_visibility_uses_only_local_clip : 1; + + // True if render surfaces with this clip tree node need to be drawn with a + // clip applied. bool render_surface_is_clipped : 1; + + // True if layers with this clip tree node need to be drawn with a clip + // applied. bool layers_are_clipped : 1; + + // Nodes that correspond to unclipped surfaces disregard ancestor clips. + bool resets_clip : 1; }; typedef TreeNode<ClipNodeData> ClipNode;
diff --git a/cc/trees/property_tree_builder.cc b/cc/trees/property_tree_builder.cc index a5e209c..e0acdd9 100644 --- a/cc/trees/property_tree_builder.cc +++ b/cc/trees/property_tree_builder.cc
@@ -22,7 +22,6 @@ static const int kInvalidPropertyTreeNodeId = -1; static const int kRootPropertyTreeNodeId = 0; -static const int kUnclippedRootClipTreeNodeId = 0; template <typename LayerType> struct DataForRecursion { @@ -66,10 +65,9 @@ } template <typename LayerType> -static bool RequiresClipNode(LayerType* layer, - const DataForRecursion<LayerType>& data, - int parent_transform_id, - bool is_clipped) { +static bool AppliesClip(LayerType* layer, + const DataForRecursion<LayerType>& data, + bool is_clipped) { const bool render_surface_applies_clip = layer->has_render_surface() && is_clipped; const bool render_surface_may_grow_due_to_clip_children = @@ -100,17 +98,31 @@ int parent_id = parent->id; bool is_root = !layer->parent(); + + // Whether we have an ancestor clip that we might need to apply. bool ancestor_clips_subtree = is_root || parent->data.layers_are_clipped; bool layers_are_clipped = false; bool has_unclipped_surface = false; if (layer->has_render_surface()) { - if (ancestor_clips_subtree && layer->num_unclipped_descendants() > 0) + // Clips can usually be applied to a surface's descendants simply by + // clipping the surface (or applied implicitly by the surface's bounds). + // However, if the surface has unclipped descendants (layers that aren't + // affected by the ancestor clip), we cannot clip the surface itself, and + // must instead apply clips to the clipped descendants. + if (ancestor_clips_subtree && layer->num_unclipped_descendants() > 0) { layers_are_clipped = true; - else if (!ancestor_clips_subtree) + } else if (!ancestor_clips_subtree) { + // When there are no ancestor clips that need to be applied to a render + // surface, we reset clipping state. The surface might contribute a clip + // of its own, but clips from ancestor nodes don't need to be considered + // when computing clip rects or visibility. has_unclipped_surface = true; + } } else { + // Without a new render surface, layer clipping state from ancestors needs + // to continue to propagate. layers_are_clipped = ancestor_clips_subtree; } @@ -118,17 +130,21 @@ if (layer_clips_subtree) layers_are_clipped = true; - if (has_unclipped_surface) { - parent_id = kUnclippedRootClipTreeNodeId; - data_for_children->effect_tree->Node(data_for_children->render_target) - ->data.clip_id = kUnclippedRootClipTreeNodeId; - } - if (!RequiresClipNode(layer, data_from_ancestor, parent->data.transform_id, - ancestor_clips_subtree)) { - // Unclipped surfaces reset the clip rect. + bool applies_clip = + AppliesClip(layer, data_from_ancestor, ancestor_clips_subtree); + bool parent_applies_clip = + !parent->data.resets_clip || parent->data.applies_local_clip; + + // When we have an unclipped surface, all ancestor clips no longer apply. + // However, if our parent already clears ancestor clips and applies no clip of + // its own, there aren't any ancestor clips that need clearing. + bool needs_to_clear_ancestor_clips = + has_unclipped_surface && parent_applies_clip; + bool requires_node = applies_clip || needs_to_clear_ancestor_clips; + + if (!requires_node) { data_for_children->clip_tree_parent = parent_id; - DCHECK_EQ(layers_are_clipped, data_for_children->clip_tree->Node(parent_id) - ->data.layers_are_clipped); + DCHECK_EQ(layers_are_clipped, parent->data.layers_are_clipped); } else { LayerType* transform_parent = data_for_children->transform_tree_parent; if (layer->position_constraint().is_fixed_position() && @@ -151,23 +167,33 @@ } node.owner_id = layer->id(); - if (ancestor_clips_subtree) { - node.data.use_only_parent_clip = !layer_clips_subtree; - // If the layer has render surface, the target has changed and so we use - // only the local clip for layer clipping. + if (ancestor_clips_subtree || layer_clips_subtree) { + node.data.applies_local_clip = layer_clips_subtree; + + // Surfaces reset the rect used for layer clipping. At other nodes, layer + // clipping state from ancestors must continue to get propagated. node.data.layer_clipping_uses_only_local_clip = - layer->has_render_surface(); + layer->has_render_surface() || !ancestor_clips_subtree; + + // A surface with unclipped descendants won't be clipped by ancestor clips + // at draw time (since it may need to expand to include its unclipped + // descendants), so we don't consider these ancestor clips when computing + // visible rects. + node.data.layer_visibility_uses_only_local_clip = + layer->has_render_surface() && layer->num_unclipped_descendants(); } else { - node.data.use_only_parent_clip = false; - node.data.layer_clipping_uses_only_local_clip = true; + // Otherwise, we're either unclipped, or exist only in order to apply our + // parent's clips in our space. + node.data.applies_local_clip = false; + node.data.layer_clipping_uses_only_local_clip = false; + node.data.layer_visibility_uses_only_local_clip = false; } - // If render surface clips subtree and has unclipped descendants, the - // surface isn't clipped and we don't want to use ancestor's clips while - // calculating visible rect. - node.data.layer_visibility_uses_only_local_clip = - layer->has_render_surface() && layer->num_unclipped_descendants() && - layer_clips_subtree; + node.data.resets_clip = has_unclipped_surface || !parent_applies_clip; + + // A surface with unclipped descendants cannot be clipped by its ancestor + // clip at draw time since the unclipped descendants aren't affected by the + // ancestor clip. node.data.render_surface_is_clipped = layer->has_render_surface() && ancestor_clips_subtree && !layer->num_unclipped_descendants(); @@ -441,7 +467,7 @@ // factor) and clip node created from root layer (include viewports) applies // to root render surface's content, but not root render surface itself. node.data.transform_id = kRootPropertyTreeNodeId; - node.data.clip_id = kUnclippedRootClipTreeNodeId; + node.data.clip_id = kRootPropertyTreeNodeId; } data_for_children->effect_tree_parent = data_for_children->effect_tree->Insert(node, parent_id); @@ -521,7 +547,7 @@ data_for_recursion.transform_tree_parent = nullptr; data_for_recursion.transform_fixed_parent = nullptr; data_for_recursion.render_target = kRootPropertyTreeNodeId; - data_for_recursion.clip_tree_parent = kUnclippedRootClipTreeNodeId; + data_for_recursion.clip_tree_parent = kRootPropertyTreeNodeId; data_for_recursion.effect_tree_parent = kInvalidPropertyTreeNodeId; data_for_recursion.page_scale_layer = page_scale_layer; data_for_recursion.inner_viewport_scroll_layer = inner_viewport_scroll_layer; @@ -540,10 +566,12 @@ data_for_recursion.sequence_number = property_trees->sequence_number; ClipNode root_clip; + root_clip.data.resets_clip = true; + root_clip.data.applies_local_clip = true; root_clip.data.clip = gfx::RectF(viewport); root_clip.data.transform_id = kRootPropertyTreeNodeId; - data_for_recursion.clip_tree_parent = data_for_recursion.clip_tree->Insert( - root_clip, kUnclippedRootClipTreeNodeId); + data_for_recursion.clip_tree_parent = + data_for_recursion.clip_tree->Insert(root_clip, kRootPropertyTreeNodeId); BuildPropertyTreesInternal(root_layer, data_for_recursion); property_trees->needs_rebuild = false;
diff --git a/cc/trees/proxy.h b/cc/trees/proxy.h index b40bb2f..93f1cdb 100644 --- a/cc/trees/proxy.h +++ b/cc/trees/proxy.h
@@ -64,10 +64,6 @@ virtual void ReleaseOutputSurface() = 0; - // Indicates that the compositing surface associated with our context is - // ready to use. - virtual void SetLayerTreeHostClientReady() = 0; - virtual void SetVisible(bool visible) = 0; virtual void SetThrottleFrameProduction(bool throttle) = 0;
diff --git a/cc/trees/proxy_impl.h b/cc/trees/proxy_impl.h index 3a6c82b..998463f 100644 --- a/cc/trees/proxy_impl.h +++ b/cc/trees/proxy_impl.h
@@ -20,7 +20,6 @@ public: // Callback for impl side commands received from the channel. virtual void SetThrottleFrameProductionOnImpl(bool throttle) = 0; - virtual void SetLayerTreeHostClientReadyOnImpl() = 0; // TODO(khushalsagar): Rename as GetWeakPtr() once ThreadProxy is split. virtual base::WeakPtr<ProxyImpl> GetImplWeakPtr() = 0;
diff --git a/cc/trees/single_thread_proxy.cc b/cc/trees/single_thread_proxy.cc index 49f614c..1742582 100644 --- a/cc/trees/single_thread_proxy.cc +++ b/cc/trees/single_thread_proxy.cc
@@ -108,18 +108,6 @@ return true; } -void SingleThreadProxy::SetLayerTreeHostClientReady() { - TRACE_EVENT0("cc", "SingleThreadProxy::SetLayerTreeHostClientReady"); - // Scheduling is controlled by the embedder in the single thread case, so - // nothing to do. - DCHECK(Proxy::IsMainThread()); - DebugScopedSetImplThread impl(this); - if (scheduler_on_impl_thread_) { - scheduler_on_impl_thread_->SetCanStart(); - scheduler_on_impl_thread_->SetVisible(layer_tree_host_impl_->visible()); - } -} - void SingleThreadProxy::SetVisible(bool visible) { TRACE_EVENT1("cc", "SingleThreadProxy::SetVisible", "visible", visible); DebugScopedSetImplThread impl(this);
diff --git a/cc/trees/single_thread_proxy.h b/cc/trees/single_thread_proxy.h index 3caf1399..990b37f 100644 --- a/cc/trees/single_thread_proxy.h +++ b/cc/trees/single_thread_proxy.h
@@ -40,7 +40,6 @@ bool CommitToActiveTree() const override; void SetOutputSurface(OutputSurface* output_surface) override; void ReleaseOutputSurface() override; - void SetLayerTreeHostClientReady() override; void SetVisible(bool visible) override; void SetThrottleFrameProduction(bool throttle) override; const RendererCapabilities& GetRendererCapabilities() const override;
diff --git a/cc/trees/thread_proxy.cc b/cc/trees/thread_proxy.cc index ca6eb73..8f23e5c9 100644 --- a/cc/trees/thread_proxy.cc +++ b/cc/trees/thread_proxy.cc
@@ -158,16 +158,6 @@ return false; } -void ThreadProxy::SetLayerTreeHostClientReady() { - TRACE_EVENT0("cc", "ThreadProxy::SetLayerTreeHostClientReady"); - main().channel_main->SetLayerTreeHostClientReadyOnImpl(); -} - -void ThreadProxy::SetLayerTreeHostClientReadyOnImpl() { - TRACE_EVENT0("cc", "ThreadProxy::SetLayerTreeHostClientReadyOnImplThread"); - impl().scheduler->SetCanStart(); -} - void ThreadProxy::SetVisible(bool visible) { TRACE_EVENT1("cc", "ThreadProxy::SetVisible", "visible", visible); DebugScopedSetMainThreadBlocked main_thread_blocked(this); @@ -1037,7 +1027,8 @@ ImplThreadTaskRunner(), impl().external_begin_frame_source.get(), compositor_timing_history.Pass()); - impl().scheduler->SetVisible(impl().layer_tree_host_impl->visible()); + DCHECK_EQ(impl().scheduler->visible(), + impl().layer_tree_host_impl->visible()); impl_thread_weak_ptr_ = impl().weak_factory.GetWeakPtr(); completion->Signal(); }
diff --git a/cc/trees/thread_proxy.h b/cc/trees/thread_proxy.h index a6b3cf7..303db962 100644 --- a/cc/trees/thread_proxy.h +++ b/cc/trees/thread_proxy.h
@@ -171,7 +171,6 @@ bool IsStarted() const override; bool CommitToActiveTree() const override; void SetOutputSurface(OutputSurface* output_surface) override; - void SetLayerTreeHostClientReady() override; void SetVisible(bool visible) override; void SetThrottleFrameProduction(bool throttle) override; const RendererCapabilities& GetRendererCapabilities() const override; @@ -254,7 +253,6 @@ // ProxyImpl implementation base::WeakPtr<ProxyImpl> GetImplWeakPtr() override; void SetThrottleFrameProductionOnImpl(bool throttle) override; - void SetLayerTreeHostClientReadyOnImpl() override; protected: ThreadProxy(
diff --git a/cc/trees/threaded_channel.cc b/cc/trees/threaded_channel.cc index 3bc44c9..38efdc6 100644 --- a/cc/trees/threaded_channel.cc +++ b/cc/trees/threaded_channel.cc
@@ -33,12 +33,6 @@ proxy_impl_->GetImplWeakPtr(), throttle)); } -void ThreadedChannel::SetLayerTreeHostClientReadyOnImpl() { - ImplThreadTaskRunner()->PostTask( - FROM_HERE, base::Bind(&ProxyImpl::SetLayerTreeHostClientReadyOnImpl, - proxy_impl_->GetImplWeakPtr())); -} - void ThreadedChannel::DidCompleteSwapBuffers() { MainThreadTaskRunner()->PostTask( FROM_HERE, base::Bind(&ProxyMain::DidCompleteSwapBuffers,
diff --git a/cc/trees/threaded_channel.h b/cc/trees/threaded_channel.h index 70238f3..a3f1fa5 100644 --- a/cc/trees/threaded_channel.h +++ b/cc/trees/threaded_channel.h
@@ -76,7 +76,6 @@ // ChannelMain Implementation void SetThrottleFrameProductionOnImpl(bool throttle) override; - void SetLayerTreeHostClientReadyOnImpl() override; // ChannelImpl Implementation void DidCompleteSwapBuffers() override;
diff --git a/chrome/android/java/src/org/chromium/chrome/browser/dom_distiller/OWNERS b/chrome/android/java/src/org/chromium/chrome/browser/dom_distiller/OWNERS new file mode 100644 index 0000000..5ebdab3f --- /dev/null +++ b/chrome/android/java/src/org/chromium/chrome/browser/dom_distiller/OWNERS
@@ -0,0 +1 @@ +mdjones@chromium.org
diff --git a/chrome/android/java/src/org/chromium/chrome/browser/toolbar/ToolbarControlContainer.java b/chrome/android/java/src/org/chromium/chrome/browser/toolbar/ToolbarControlContainer.java index 467ff14..6c7914c 100644 --- a/chrome/android/java/src/org/chromium/chrome/browser/toolbar/ToolbarControlContainer.java +++ b/chrome/android/java/src/org/chromium/chrome/browser/toolbar/ToolbarControlContainer.java
@@ -141,8 +141,12 @@ mToolbarContainer = toolbarContainer; mToolbar = toolbar; + int containerHeightResId = R.dimen.control_container_height; + if (mToolbar instanceof CustomTabToolbar) { + containerHeightResId = R.dimen.custom_tabs_control_container_height; + } mToolbarActualHeightPx = toolbarContainer.getResources().getDimensionPixelSize( - R.dimen.control_container_height); + containerHeightResId); } /**
diff --git a/chrome/android/javatests/src/org/chromium/chrome/browser/dom_distiller/OWNERS b/chrome/android/javatests/src/org/chromium/chrome/browser/dom_distiller/OWNERS new file mode 100644 index 0000000..5ebdab3f --- /dev/null +++ b/chrome/android/javatests/src/org/chromium/chrome/browser/dom_distiller/OWNERS
@@ -0,0 +1 @@ +mdjones@chromium.org
diff --git a/chrome/app/generated_resources.grd b/chrome/app/generated_resources.grd index 6fbd7e5..0a317572 100644 --- a/chrome/app/generated_resources.grd +++ b/chrome/app/generated_resources.grd
@@ -13158,6 +13158,19 @@ </message> </if> + <!-- Media throttle infobar --> + <if expr="is_android"> + <message name="IDS_MEDIA_THROTTLE_INFOBAR_TEXT" desc="Text to display on the info bar whenever user wants to playback a video after being throttled."> + Android is having trouble playing media. + </message> + <message name="IDS_MEDIA_THROTTLE_INFOBAR_BLOCK_BUTTON" desc="Label for choosing 'Block' on media throttle infobar.[CHAR-LIMIT=32]"> + Wait + </message> + <message name="IDS_MEDIA_THROTTLE_INFOBAR_ALLOW_BUTTON" desc="Label for choosing 'Allow' on media throttle infobar. [CHAR-LIMIT=32]"> + Try again + </message> + </if> + <message name="IDS_MANAGE_PASSWORDS_CONFIRM_GENERATED_TITLE" desc="The title text that is used in the manage passwords bubble when the user has generated a password."> Password saved </message>
diff --git a/chrome/app/settings_strings.grdp b/chrome/app/settings_strings.grdp index eb89db2..be08601e 100644 --- a/chrome/app/settings_strings.grdp +++ b/chrome/app/settings_strings.grdp
@@ -452,6 +452,12 @@ <message name="IDS_SETTINGS_SYNC" desc="Name of the settings page which manages syncing data between multiple browser instances with the same Google profile."> Advanced sync settings </message> + <message name="IDS_SETTINGS_SYNC_LOADING" desc="The message shown when waiting for the sync backend to start up."> + Please wait... + </message> + <message name="IDS_SETTINGS_SYNC_TIMEOUT" desc="Text explaining what to do if sync times out."> + Please make sure your network connection is working and if the problem persists, please sign out and sign in again to refresh your credentials. + </message> <message name="IDS_SETTINGS_SYNC_EVERYTHING_MENU_OPTION" desc="Name of the menu option which, when selected, causes all properties to be synced."> Sync everything </message> @@ -497,6 +503,24 @@ <message name="IDS_SETTINGS_ENCRYPT_WITH_SYNC_PASSPHRASE_LEARN_MORE_LINK" desc="Text for the link which explains how synced settings are encrypted with a user-provided password"> Learn more </message> + <message name="IDS_SETTINGS_PASSPHRASE_EXPLANATION_TEXT" desc="Message when explicit passphrase is selected."> + Only someone with your passphrase can read your encrypted data. The passphrase is not sent to or stored by Google. If you forget your passphrase, you will need to reset sync. + </message> + <message name="IDS_SETTINGS_EMPTY_PASSPHRASE_ERROR" desc="Error message when the passphrase is empty."> + Empty passphrase is not allowed. + </message> + <message name="IDS_SETTINGS_MISMATCHED_PASSPHRASE_ERROR" desc="Error message when the passphrase and confirmation don't match."> + You must enter the same passphrase twice. + </message> + <message name="IDS_SETTINGS_INCORRECT_PASSPHRASE_ERROR" desc="Message when the passphrase is incorrect."> + The passphrase you entered is incorrect. + </message> + <message name="IDS_SETTINGS_PASSPHRASE_PLACEHOLDER" desc="Placeholder for the passphrase field."> + Passphrase + </message> + <message name="IDS_SETTINGS_PASSPHRASE_CONFIRMATION_PLACEHOLDER" desc="Placeholder for the confirm-passphrase field."> + Confirm passphrase + </message> <message name="IDS_SETTINGS_USE_DEFAULT_SETTINGS_BUTTON" desc="Text for button which, when clicked, activates the default sync settings."> Use default settings </message>
diff --git a/chrome/app/theme/material_100_percent/common/ash/otr_icon.png b/chrome/app/theme/material_100_percent/common/ash/otr_icon.png index 7afbe50..e5266ba 100644 --- a/chrome/app/theme/material_100_percent/common/ash/otr_icon.png +++ b/chrome/app/theme/material_100_percent/common/ash/otr_icon.png Binary files differ
diff --git a/chrome/app/theme/material_100_percent/common/otr_icon.png b/chrome/app/theme/material_100_percent/common/otr_icon.png new file mode 100644 index 0000000..e5266ba --- /dev/null +++ b/chrome/app/theme/material_100_percent/common/otr_icon.png Binary files differ
diff --git a/chrome/app/theme/material_200_percent/common/ash/otr_icon.png b/chrome/app/theme/material_200_percent/common/ash/otr_icon.png index c5b6e2bf..4847aae 100644 --- a/chrome/app/theme/material_200_percent/common/ash/otr_icon.png +++ b/chrome/app/theme/material_200_percent/common/ash/otr_icon.png Binary files differ
diff --git a/chrome/app/theme/material_200_percent/common/otr_icon.png b/chrome/app/theme/material_200_percent/common/otr_icon.png new file mode 100644 index 0000000..4847aae --- /dev/null +++ b/chrome/app/theme/material_200_percent/common/otr_icon.png Binary files differ
diff --git a/chrome/app/theme/theme_resources.grd b/chrome/app/theme/theme_resources.grd index f75053d3..bc10356 100644 --- a/chrome/app/theme/theme_resources.grd +++ b/chrome/app/theme/theme_resources.grd
@@ -566,6 +566,9 @@ <structure type="chrome_scaled_image" name="IDR_OMNIBOX_TTS_DARK" file="common/omnibox_tts_dark.png" /> <structure type="chrome_scaled_image" name="IDR_OMNIBOX_TTS_SELECTED" file="common/omnibox_tts_selected.png" /> <if expr="toolkit_views and not is_macosx"> + <!-- In Material Design the Ash image is the same as the common one, so + once that mode is always on, these should be collapsed and the + duplicate image removed from the tree. --> <if expr="not use_ash"> <structure type="chrome_scaled_image" name="IDR_OTR_ICON" file="common/otr_icon.png" /> </if>
diff --git a/chrome/browser/BUILD.gn b/chrome/browser/BUILD.gn index dab46f1..867e6d8d 100644 --- a/chrome/browser/BUILD.gn +++ b/chrome/browser/BUILD.gn
@@ -683,7 +683,7 @@ ] } - if (!is_official_build) { + if (!is_chrome_branded) { sources += [ "search/local_files_ntp_source.cc", "search/local_files_ntp_source.h", @@ -1017,8 +1017,6 @@ "notifications/notification_test_util.h", "password_manager/mock_password_store_service.cc", "password_manager/mock_password_store_service.h", - "password_manager/password_manager_test_base.cc", - "password_manager/password_manager_test_base.h", "password_manager/test_password_store_service.cc", "password_manager/test_password_store_service.h", "profile_resetter/profile_resetter_test_base.cc", @@ -1045,8 +1043,6 @@ "sync/profile_sync_components_factory_mock.h", "sync/profile_sync_service_mock.cc", "sync/profile_sync_service_mock.h", - "ui/webui/signin/login_ui_test_utils.cc", - "ui/webui/signin/login_ui_test_utils.h", ] configs += [ "//build/config:precompiled_headers" ] @@ -1084,12 +1080,8 @@ if (is_android) { sources -= [ - "password_manager/password_manager_test_base.cc", - "password_manager/password_manager_test_base.h", "sessions/session_service_test_helper.cc", "sessions/session_service_test_helper.h", - "ui/webui/signin/login_ui_test_utils.cc", - "ui/webui/signin/login_ui_test_utils.h", ] } @@ -1229,6 +1221,29 @@ } } +# In GYP this is part of test_support_ui. +source_set("test_support_ui") { + testonly = true + + # Always include this via the main test support UI target. + visibility = [ "//chrome/test:test_support_ui" ] + + sources = [ + "password_manager/password_manager_test_base.cc", + "password_manager/password_manager_test_base.h", + "ui/webui/signin/login_ui_test_utils.cc", + "ui/webui/signin/login_ui_test_utils.h", + ] + + configs += [ "//build/config:precompiled_headers" ] + + deps = [ + "//components/metrics:test_support", + "//skia", + "//testing/gtest", + ] +} + if (enable_rlz_support) { source_set("rlz") { sources =
diff --git a/chrome/browser/android/media/media_throttle_infobar_delegate.cc b/chrome/browser/android/media/media_throttle_infobar_delegate.cc new file mode 100644 index 0000000..45f8db3 --- /dev/null +++ b/chrome/browser/android/media/media_throttle_infobar_delegate.cc
@@ -0,0 +1,74 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "chrome/browser/android/media/media_throttle_infobar_delegate.h" + +#include "chrome/browser/infobars/infobar_service.h" +#include "chrome/grit/generated_resources.h" +#include "components/infobars/core/infobar.h" +#include "content/public/browser/web_contents.h" +#include "grit/components_strings.h" +#include "grit/theme_resources.h" +#include "ui/base/l10n/l10n_util.h" +#include "url/gurl.h" + +// static +void MediaThrottleInfoBarDelegate::Create( + content::WebContents* web_contents, + const DecodeRequestGrantedCallback& callback) { + InfoBarService* infobar_service = + InfoBarService::FromWebContents(web_contents); + scoped_ptr<infobars::InfoBar> new_infobar( + infobar_service->CreateConfirmInfoBar(scoped_ptr<ConfirmInfoBarDelegate>( + new MediaThrottleInfoBarDelegate(callback)))); + + for (size_t i = 0; i < infobar_service->infobar_count(); ++i) { + infobars::InfoBar* old_infobar = infobar_service->infobar_at(i); + MediaThrottleInfoBarDelegate* delegate = + old_infobar->delegate()->AsMediaThrottleInfoBarDelegate(); + if (delegate != nullptr) { + infobar_service->ReplaceInfoBar(old_infobar, new_infobar.Pass()); + return; + } + } + + infobar_service->AddInfoBar(new_infobar.Pass()); +} + +MediaThrottleInfoBarDelegate::MediaThrottleInfoBarDelegate( + const DecodeRequestGrantedCallback& callback) + : decode_granted_callback_(callback) { +} + +MediaThrottleInfoBarDelegate::~MediaThrottleInfoBarDelegate() {} + +MediaThrottleInfoBarDelegate* + MediaThrottleInfoBarDelegate::AsMediaThrottleInfoBarDelegate() { + return this; +} + +base::string16 MediaThrottleInfoBarDelegate::GetMessageText() const { + return l10n_util::GetStringUTF16(IDS_MEDIA_THROTTLE_INFOBAR_TEXT); +} + +base::string16 MediaThrottleInfoBarDelegate::GetButtonLabel( + InfoBarButton button) const { + return l10n_util::GetStringUTF16((button == BUTTON_OK) ? + IDS_MEDIA_THROTTLE_INFOBAR_BLOCK_BUTTON : + IDS_MEDIA_THROTTLE_INFOBAR_ALLOW_BUTTON); +} + +bool MediaThrottleInfoBarDelegate::Accept() { + decode_granted_callback_.Run(false); + return true; +} + +bool MediaThrottleInfoBarDelegate::Cancel() { + decode_granted_callback_.Run(true); + return true; +} + +void MediaThrottleInfoBarDelegate::InfoBarDismissed() { + decode_granted_callback_.Run(false); +}
diff --git a/chrome/browser/android/media/media_throttle_infobar_delegate.h b/chrome/browser/android/media/media_throttle_infobar_delegate.h new file mode 100644 index 0000000..5a717cb --- /dev/null +++ b/chrome/browser/android/media/media_throttle_infobar_delegate.h
@@ -0,0 +1,44 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef CHROME_BROWSER_ANDROID_MEDIA_MEDIA_THROTTLE_INFOBAR_DELEGATE_H_ +#define CHROME_BROWSER_ANDROID_MEDIA_MEDIA_THROTTLE_INFOBAR_DELEGATE_H_ + +#include <string> + +#include "base/callback.h" +#include "components/infobars/core/confirm_infobar_delegate.h" + +namespace content { +class WebContents; +} + +class MediaThrottleInfoBarDelegate : public ConfirmInfoBarDelegate { + public: + typedef base::Callback<void(bool)> DecodeRequestGrantedCallback; + ~MediaThrottleInfoBarDelegate() override; + + // Static method to create the object + static void Create( + content::WebContents* web_contents, + const DecodeRequestGrantedCallback& callback); + + private: + explicit MediaThrottleInfoBarDelegate( + const DecodeRequestGrantedCallback& callback); + + // ConfirmInfoBarDelegate: + MediaThrottleInfoBarDelegate* AsMediaThrottleInfoBarDelegate() override; + base::string16 GetMessageText() const override; + base::string16 GetButtonLabel(InfoBarButton button) const override; + bool Accept() override; + bool Cancel() override; + void InfoBarDismissed() override; + + DecodeRequestGrantedCallback decode_granted_callback_; + + DISALLOW_COPY_AND_ASSIGN(MediaThrottleInfoBarDelegate); +}; + +#endif // CHROME_BROWSER_ANDROID_MEDIA_MEDIA_THROTTLE_INFOBAR_DELEGATE_H_
diff --git a/chrome/browser/android/tab_web_contents_delegate_android.cc b/chrome/browser/android/tab_web_contents_delegate_android.cc index 34a1f6de..f5b9d9d 100644 --- a/chrome/browser/android/tab_web_contents_delegate_android.cc +++ b/chrome/browser/android/tab_web_contents_delegate_android.cc
@@ -9,6 +9,7 @@ #include "base/command_line.h" #include "chrome/browser/android/feature_utilities.h" #include "chrome/browser/android/hung_renderer_infobar_delegate.h" +#include "chrome/browser/android/media/media_throttle_infobar_delegate.h" #include "chrome/browser/chrome_notification_types.h" #include "chrome/browser/file_select_helper.h" #include "chrome/browser/infobars/infobar_service.h" @@ -273,6 +274,12 @@ ->CheckMediaAccessPermission(web_contents, security_origin, type); } +void TabWebContentsDelegateAndroid::RequestMediaDecodePermission( + content::WebContents* web_contents, + const base::Callback<void(bool)>& callback) { + MediaThrottleInfoBarDelegate::Create(web_contents, callback); +} + bool TabWebContentsDelegateAndroid::RequestPpapiBrokerPermission( WebContents* web_contents, const GURL& url,
diff --git a/chrome/browser/android/tab_web_contents_delegate_android.h b/chrome/browser/android/tab_web_contents_delegate_android.h index b0405f6..af0795db 100644 --- a/chrome/browser/android/tab_web_contents_delegate_android.h +++ b/chrome/browser/android/tab_web_contents_delegate_android.h
@@ -68,6 +68,9 @@ bool CheckMediaAccessPermission(content::WebContents* web_contents, const GURL& security_origin, content::MediaStreamType type) override; + void RequestMediaDecodePermission( + content::WebContents* web_contents, + const base::Callback<void(bool)>& callback) override; bool RequestPpapiBrokerPermission( content::WebContents* web_contents, const GURL& url,
diff --git a/chrome/browser/chromeos/display/display_preferences_unittest.cc b/chrome/browser/chromeos/display/display_preferences_unittest.cc index b504d7c..51fde15e 100644 --- a/chrome/browser/chromeos/display/display_preferences_unittest.cc +++ b/chrome/browser/chromeos/display/display_preferences_unittest.cc
@@ -7,11 +7,11 @@ #include <string> #include <vector> -#include "ash/content/display/screen_orientation_controller_chromeos.h" #include "ash/display/display_layout_store.h" #include "ash/display/display_manager.h" #include "ash/display/display_util.h" #include "ash/display/resolution_notification_controller.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/display/window_tree_host_manager.h" #include "ash/screen_util.h" #include "ash/shell.h"
diff --git a/chrome/browser/chromeos/first_run/goodies_displayer.cc b/chrome/browser/chromeos/first_run/goodies_displayer.cc index e84cd00..c95401c 100644 --- a/chrome/browser/chromeos/first_run/goodies_displayer.cc +++ b/chrome/browser/chromeos/first_run/goodies_displayer.cc
@@ -4,13 +4,13 @@ #include "chrome/browser/chromeos/first_run/goodies_displayer.h" -#include "base/files/file_util.h" #include "base/prefs/pref_service.h" #include "chrome/browser/browser_process.h" -#include "chrome/browser/chromeos/login/session/user_session_manager.h" #include "chrome/browser/chromeos/login/startup_utils.h" #include "chrome/browser/chromeos/policy/browser_policy_connector_chromeos.h" +#include "chrome/browser/profiles/profile.h" #include "chrome/browser/ui/browser.h" +#include "chrome/browser/ui/browser_list.h" #include "chrome/browser/ui/browser_tabstrip.h" #include "chrome/common/pref_names.h" #include "content/public/browser/browser_thread.h" @@ -20,29 +20,19 @@ namespace { -// ChromeOS Goodies page for device's first New Window. -const char kGoodiesURL[] = "https://www.google.com/chrome/devices/goodies.html"; +GoodiesDisplayer* g_goodies_displayer = nullptr; +GoodiesDisplayerTestInfo* g_test_info = nullptr; -// Max days after initial login that we're willing to show Goodies. -static const int kMaxDaysAfterOobeForGoodies = 14; - -// Checks timestamp on OOBE Complete flag file. kCanShowOobeGoodiesPage -// defaults to |true|; we set it to |false| (return |false|) for any device over -// kMaxDaysAfterOobeForGoodies days old, to avoid showing it after update on -// older devices. +// Checks timestamp on OOBE Complete flag file, or use fake device age for test. +// kCanShowOobeGoodiesPage defaults to |true|; set to |false| (return |false|) +// for any device over kMaxDaysAfterOobeForGoodies days old, to avoid showing +// Goodies after update on older devices. bool CheckGoodiesPrefAgainstOobeTimestamp() { DCHECK(content::BrowserThread::GetBlockingPool()->RunsTasksOnCurrentThread()); - const base::FilePath oobe_timestamp_file = - StartupUtils::GetOobeCompleteFlagPath(); - base::File::Info fileInfo; - if (base::GetFileInfo(oobe_timestamp_file, &fileInfo)) { - const base::TimeDelta time_since_oobe = - base::Time::Now() - fileInfo.creation_time; - if (time_since_oobe > - base::TimeDelta::FromDays(kMaxDaysAfterOobeForGoodies)) - return false; - } - return true; + const int days_since_oobe = + g_test_info ? g_test_info->days_since_oobe + : StartupUtils::GetTimeSinceOobeFlagFileCreation().InDays(); + return days_since_oobe <= GoodiesDisplayer::kMaxDaysAfterOobeForGoodies; } // Callback into main thread to set pref to |false| if too long since oobe, or @@ -50,42 +40,84 @@ void UpdateGoodiesPrefCantShow(bool can_show_goodies) { DCHECK_CURRENTLY_ON(content::BrowserThread::UI); if (can_show_goodies) { - UserSessionManager::GetInstance()->CreateGoodiesDisplayer(); + if (!g_goodies_displayer) + g_goodies_displayer = new GoodiesDisplayer(); } else { g_browser_process->local_state()->SetBoolean(prefs::kCanShowOobeGoodiesPage, false); } + + if (g_test_info) { + g_test_info->setup_complete = true; + if (!g_test_info->on_setup_complete_callback.is_null()) + g_test_info->on_setup_complete_callback.Run(); + } } } // namespace +GoodiesDisplayerTestInfo::GoodiesDisplayerTestInfo() + : days_since_oobe(0), setup_complete(false) {} + +GoodiesDisplayerTestInfo::~GoodiesDisplayerTestInfo() {} + +const char GoodiesDisplayer::kGoodiesURL[] = + "https://www.google.com/chrome/devices/goodies.html"; + GoodiesDisplayer::GoodiesDisplayer() { + DCHECK_CURRENTLY_ON(content::BrowserThread::UI); BrowserList::AddObserver(this); } +GoodiesDisplayer::~GoodiesDisplayer() { + DCHECK_CURRENTLY_ON(content::BrowserThread::UI); + BrowserList::RemoveObserver(this); +} + // If Goodies page hasn't been shown yet, and Chromebook isn't too old, create -// GoodiesDisplayer to observe BrowserList. -void GoodiesDisplayer::Init() { - if (g_browser_process->local_state()->GetBoolean( - prefs::kCanShowOobeGoodiesPage)) +// GoodiesDisplayer to observe BrowserList. Return |true| if checking age. +// static +bool GoodiesDisplayer::Init() { + const bool can_show = g_browser_process->local_state()->GetBoolean( + prefs::kCanShowOobeGoodiesPage); + if (can_show) { base::PostTaskAndReplyWithResult( content::BrowserThread::GetBlockingPool(), FROM_HERE, base::Bind(&CheckGoodiesPrefAgainstOobeTimestamp), base::Bind(&UpdateGoodiesPrefCantShow)); + } + return can_show; +} + +// static +void GoodiesDisplayer::InitForTesting(GoodiesDisplayerTestInfo* test_info) { + CHECK(!g_test_info) << "GoodiesDisplayer::InitForTesting called twice"; + g_test_info = test_info; + test_info->setup_complete = !Init(); +} + +// static +void GoodiesDisplayer::Delete() { + delete g_goodies_displayer; + g_goodies_displayer = nullptr; } // If conditions enumerated below are met, this loads the Oobe Goodies page for // new Chromebooks; when appropriate, it uses pref to mark page as shown, // removes itself from BrowserListObservers, and deletes itself. void GoodiesDisplayer::OnBrowserSetLastActive(Browser* browser) { - // 1. Not guest or incognito session (keep observing). + // 1. Must be an actual tabbed brower window. + if (browser->type() != Browser::TYPE_TABBED) + return; + + // 2. Not guest or incognito session (keep observing). if (browser->profile()->IsOffTheRecord()) return; PrefService* local_state = g_browser_process->local_state(); - // 2. Not previously shown, or otherwise marked as unavailable. + // 3. Not previously shown, or otherwise marked as unavailable. if (local_state->GetBoolean(prefs::kCanShowOobeGoodiesPage)) { - // 3. Device not enterprise enrolled. + // 4. Device not enterprise enrolled. if (!g_browser_process->platform_part() ->browser_policy_connector_chromeos() ->IsEnterpriseManaged()) @@ -96,8 +128,7 @@ } // Regardless of how we got here, we don't henceforth need to show Goodies. - BrowserList::RemoveObserver(this); - UserSessionManager::GetInstance()->DestroyGoodiesDisplayer(); + base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(&Delete)); } } // namespace first_run
diff --git a/chrome/browser/chromeos/first_run/goodies_displayer.h b/chrome/browser/chromeos/first_run/goodies_displayer.h index c3caddf..cf9a081 100644 --- a/chrome/browser/chromeos/first_run/goodies_displayer.h +++ b/chrome/browser/chromeos/first_run/goodies_displayer.h
@@ -5,20 +5,30 @@ #ifndef CHROME_BROWSER_CHROMEOS_FIRST_RUN_GOODIES_DISPLAYER_H_ #define CHROME_BROWSER_CHROMEOS_FIRST_RUN_GOODIES_DISPLAYER_H_ -#include "chrome/browser/ui/browser_list.h" +#include "base/callback.h" #include "chrome/browser/ui/browser_list_observer.h" namespace chromeos { namespace first_run { +struct GoodiesDisplayerTestInfo; + // Handles display of OOBE Goodies page on first display of browser window on // new Chromebooks. class GoodiesDisplayer : public chrome::BrowserListObserver { public: - GoodiesDisplayer(); - ~GoodiesDisplayer() override {} + // ChromeOS Goodies page for new Chromebook promos. + static const char kGoodiesURL[]; - static void Init(); + // Max days after initial login that we're willing to show Goodies. + static const int kMaxDaysAfterOobeForGoodies = 14; + + GoodiesDisplayer(); + ~GoodiesDisplayer() override; + + static bool Init(); + static void InitForTesting(GoodiesDisplayerTestInfo* test_info); + static void Delete(); private: // Overridden from chrome::BrowserListObserver. @@ -27,6 +37,16 @@ DISALLOW_COPY_AND_ASSIGN(GoodiesDisplayer); }; +// For setup during browser test. +struct GoodiesDisplayerTestInfo { + GoodiesDisplayerTestInfo(); + ~GoodiesDisplayerTestInfo(); + + int days_since_oobe; // Fake age of device. + bool setup_complete; // True when finished, whether GD created or not. + base::Closure on_setup_complete_callback; // Called after multithread setup. +}; + } // namespace first_run } // namespace chromeos
diff --git a/chrome/browser/chromeos/login/session/user_session_manager.cc b/chrome/browser/chromeos/login/session/user_session_manager.cc index 3695b22..0f5e8cd 100644 --- a/chrome/browser/chromeos/login/session/user_session_manager.cc +++ b/chrome/browser/chromeos/login/session/user_session_manager.cc
@@ -1139,7 +1139,7 @@ // launch browser. bool browser_launched = InitializeUserSession(profile); - // Check whether to, then set up for, display OOBE Goodies page on first run. + // If needed, create browser observer to display first run OOBE Goodies page. first_run::GoodiesDisplayer::Init(); // TODO(nkostylev): This pointer should probably never be NULL, but it looks @@ -1523,15 +1523,6 @@ return auth_request_context; } -void UserSessionManager::CreateGoodiesDisplayer() { - if (goodies_displayer_ == nullptr) - goodies_displayer_.reset(new first_run::GoodiesDisplayer); -} - -void UserSessionManager::DestroyGoodiesDisplayer() { - goodies_displayer_.reset(); -} - void UserSessionManager::AttemptRestart(Profile* profile) { // Restart unconditionally in case if we are stuck somewhere in a session // restore process. http://crbug.com/520346. @@ -1768,7 +1759,7 @@ void UserSessionManager::Shutdown() { token_handle_fetcher_.reset(); token_handle_util_.reset(); - goodies_displayer_.reset(); + first_run::GoodiesDisplayer::Delete(); } void UserSessionManager::CreateTokenUtilIfMissing() {
diff --git a/chrome/browser/chromeos/login/session/user_session_manager.h b/chrome/browser/chromeos/login/session/user_session_manager.h index 6230cde..dee2273 100644 --- a/chrome/browser/chromeos/login/session/user_session_manager.h +++ b/chrome/browser/chromeos/login/session/user_session_manager.h
@@ -40,10 +40,6 @@ namespace chromeos { -namespace first_run { -class GoodiesDisplayer; -} // namespace first_run - namespace test { class UserSessionManagerTestApi; } // namespace test @@ -240,12 +236,6 @@ // Returns the auth request context associated with auth data. net::URLRequestContextGetter* GetAuthRequestContext() const; - // Create observer to display OOBE Goodies page on first run browser start. - void CreateGoodiesDisplayer(); - - // Remove observer for OOBE Goodies page when no longer needed. - void DestroyGoodiesDisplayer(); - // Removes a profile from the per-user input methods states map. void RemoveProfileForTesting(Profile* profile); @@ -485,9 +475,6 @@ // Child account status is necessary for InitializeStartUrls call. bool waiting_for_child_account_status_; - // Used to display OOBE Goodies page when browser is first opened. - scoped_ptr<first_run::GoodiesDisplayer> goodies_displayer_; - base::WeakPtrFactory<UserSessionManager> weak_factory_; DISALLOW_COPY_AND_ASSIGN(UserSessionManager);
diff --git a/chrome/browser/chromeos/login/startup_utils.cc b/chrome/browser/chromeos/login/startup_utils.cc index 9dcfdc0..9ba6ff23 100644 --- a/chrome/browser/chromeos/login/startup_utils.cc +++ b/chrome/browser/chromeos/login/startup_utils.cc
@@ -96,8 +96,7 @@ // completed. // On chrome device, returns /home/chronos/.oobe_completed. // On Linux desktop, returns {DIR_USER_DATA}/.oobe_completed. -// static -base::FilePath StartupUtils::GetOobeCompleteFlagPath() { +static base::FilePath GetOobeCompleteFlagPath() { // The constant is defined here so it won't be referenced directly. const char kOobeCompleteFlagFilePath[] = "/home/chronos/.oobe_completed"; @@ -110,14 +109,22 @@ } } +// static +base::TimeDelta StartupUtils::GetTimeSinceOobeFlagFileCreation() { + const base::FilePath oobe_complete_flag_path = GetOobeCompleteFlagPath(); + base::File::Info file_info; + if (base::GetFileInfo(oobe_complete_flag_path, &file_info)) + return base::Time::Now() - file_info.creation_time; + return base::TimeDelta(); +} + static void CreateOobeCompleteFlagFile() { // Create flag file for boot-time init scripts. - const base::FilePath oobe_complete_path = - StartupUtils::GetOobeCompleteFlagPath(); - if (!base::PathExists(oobe_complete_path)) { - FILE* oobe_flag_file = base::OpenFile(oobe_complete_path, "w+b"); + const base::FilePath oobe_complete_flag_path = GetOobeCompleteFlagPath(); + if (!base::PathExists(oobe_complete_flag_path)) { + FILE* oobe_flag_file = base::OpenFile(oobe_complete_flag_path, "w+b"); if (oobe_flag_file == NULL) - DLOG(WARNING) << oobe_complete_path.value() << " doesn't exist."; + DLOG(WARNING) << oobe_complete_flag_path.value() << " doesn't exist."; else base::CloseFile(oobe_flag_file); } @@ -140,8 +147,8 @@ // Pref is not set. For compatibility check flag file. It causes blocking // IO on UI thread. But it's required for update from old versions. base::ThreadRestrictions::ScopedAllowIO allow_io; - base::FilePath oobe_complete_flag_file_path = GetOobeCompleteFlagPath(); - bool file_exists = base::PathExists(oobe_complete_flag_file_path); + const base::FilePath oobe_complete_flag_path = GetOobeCompleteFlagPath(); + bool file_exists = base::PathExists(oobe_complete_flag_path); SaveIntegerPreferenceForced(prefs::kDeviceRegistered, file_exists ? 1 : 0); return file_exists; }
diff --git a/chrome/browser/chromeos/login/startup_utils.h b/chrome/browser/chromeos/login/startup_utils.h index 3314e02..a4507e7 100644 --- a/chrome/browser/chromeos/login/startup_utils.h +++ b/chrome/browser/chromeos/login/startup_utils.h
@@ -8,10 +8,13 @@ #include <string> #include "base/callback.h" -#include "base/files/file_path.h" class PrefRegistrySimple; +namespace base { +class TimeDelta; +} + namespace chromeos { // Static utility methods used at startup time to get/change bits of device @@ -33,8 +36,8 @@ // Stores the next pending OOBE screen in case it will need to be resumed. static void SaveOobePendingScreen(const std::string& screen); - // Path to flag file indicating oobe completion. - static base::FilePath GetOobeCompleteFlagPath(); + // Returns the time since the Oobe flag file was created. + static base::TimeDelta GetTimeSinceOobeFlagFileCreation(); // Returns device registration completion status, i.e. second part of OOBE. static bool IsDeviceRegistered();
diff --git a/chrome/browser/extensions/display_info_provider_chromeos_unittest.cc b/chrome/browser/extensions/display_info_provider_chromeos_unittest.cc index a1b71ace..6dfd93d 100644 --- a/chrome/browser/extensions/display_info_provider_chromeos_unittest.cc +++ b/chrome/browser/extensions/display_info_provider_chromeos_unittest.cc
@@ -5,8 +5,8 @@ #include "extensions/browser/api/system_display/display_info_provider.h" #include "ash/ash_switches.h" -#include "ash/content/display/screen_orientation_controller_chromeos.h" #include "ash/display/display_manager.h" +#include "ash/display/screen_orientation_controller_chromeos.h" #include "ash/display/window_tree_host_manager.h" #include "ash/screen_util.h" #include "ash/shell.h"
diff --git a/chrome/browser/media/chrome_webrtc_audio_quality_browsertest.cc b/chrome/browser/media/chrome_webrtc_audio_quality_browsertest.cc index 45debc9..a85d556 100644 --- a/chrome/browser/media/chrome_webrtc_audio_quality_browsertest.cc +++ b/chrome/browser/media/chrome_webrtc_audio_quality_browsertest.cc
@@ -137,6 +137,9 @@ // getUserMedia, and the getUserMedia-based tests will play back a file // through the fake device using using --use-file-for-fake-audio-capture. command_line->AppendSwitch(switches::kUseFakeDeviceForMediaStream); + + // Add loopback interface such that there is always connectivity. + command_line->AppendSwitch(switches::kAllowLoopbackInPeerConnection); } void ConfigureFakeDeviceToPlayFile(const base::FilePath& wav_file_path) {
diff --git a/chrome/browser/metrics/perf/cpu_identity.cc b/chrome/browser/metrics/perf/cpu_identity.cc new file mode 100644 index 0000000..476285e --- /dev/null +++ b/chrome/browser/metrics/perf/cpu_identity.cc
@@ -0,0 +1,103 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "chrome/browser/metrics/perf/cpu_identity.h" + +#include <algorithm> // for std::lower_bound() +#include <string.h> + +#include "base/cpu.h" +#include "base/macros.h" +#include "base/strings/string_util.h" +#include "base/strings/stringprintf.h" +#include "base/sys_info.h" + +namespace internal { + +const IntelUarchTableEntry kIntelUarchTable[] = { + // These were found on various sources on the Internet. Main ones are: + // http://instlatx64.atw.hu/ for CPUID to model name and + // http://www.cpu-world.com for model name to microarchitecture + {"06_09", "Banias"}, + {"06_0D", "Dothan"}, + {"06_0F", "Merom"}, + {"06_16", "Merom"}, + {"06_17", "Nehalem"}, + {"06_1A", "Nehalem"}, + {"06_1C", "Bonnell"}, // Atom + {"06_1D", "Nehalem"}, + {"06_1E", "Nehalem"}, + {"06_1F", "Nehalem"}, + {"06_25", "Westmere"}, + {"06_26", "Bonnell"}, // Atom + {"06_2A", "SandyBridge"}, + {"06_2C", "Westmere"}, + {"06_2D", "SandyBridge"}, + {"06_2E", "Nehalem"}, + {"06_2F", "Westmere"}, + {"06_36", "Saltwell"}, // Atom + {"06_37", "Silvermont"}, + {"06_3A", "IvyBridge"}, + {"06_3C", "Haswell"}, + {"06_3D", "Broadwell"}, + {"06_3E", "IvyBridge"}, + {"06_3F", "Haswell"}, + {"06_45", "Haswell"}, + {"06_46", "Haswell"}, + {"06_47", "Broadwell"}, // Broadwell-H + {"06_4C", "Airmont"}, // Braswell + {"06_4E", "Skylake"}, + {"06_56", "Broadwell"}, // Broadwell-DE + {"0F_03", "Prescott"}, + {"0F_04", "Prescott"}, + {"0F_06", "Presler"}, +}; + +const IntelUarchTableEntry* kIntelUarchTableEnd = + kIntelUarchTable + arraysize(kIntelUarchTable); + +bool IntelUarchTableCmp(const IntelUarchTableEntry& a, + const IntelUarchTableEntry& b) { + return strcmp(a.family_model, b.family_model) < 0; +} + +} // namespace internal + +CPUIdentity::CPUIdentity() : family(0), model(0) {} + +CPUIdentity::~CPUIdentity() {} + +std::string GetIntelUarch(const CPUIdentity& cpuid) { + if (cpuid.vendor != "GenuineIntel") + return std::string(); // Non-Intel + + std::string family_model = + base::StringPrintf("%02X_%02X", cpuid.family, cpuid.model); + const internal::IntelUarchTableEntry search_elem = {family_model.c_str(), ""}; + const auto bound = std::lower_bound( + internal::kIntelUarchTable, internal::kIntelUarchTableEnd, + search_elem, internal::IntelUarchTableCmp); + if (bound->family_model != family_model) + return std::string(); // Unknown uarch + return bound->uarch; +} + +CPUIdentity GetCPUIdentity() { + CPUIdentity result = {}; + result.arch = base::SysInfo::OperatingSystemArchitecture(); + base::CPU cpuid; + result.vendor = cpuid.vendor_name(); + result.family = cpuid.family(); + result.model = cpuid.model(); + result.model_name = cpuid.cpu_brand(); + return result; +} + +std::string SimplifyCPUModelName(const std::string& model_name) { + std::string result = model_name; + std::replace(result.begin(), result.end(), ' ', '-'); + base::ReplaceSubstringsAfterOffset(&result, 0, "(R)", ""); + return base::ToLowerASCII(result); +} +
diff --git a/chrome/browser/metrics/perf/cpu_identity.h b/chrome/browser/metrics/perf/cpu_identity.h new file mode 100644 index 0000000..0f63b36 --- /dev/null +++ b/chrome/browser/metrics/perf/cpu_identity.h
@@ -0,0 +1,59 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef CHROME_BROWSER_METRICS_PERF_CPU_IDENTITY_H_ +#define CHROME_BROWSER_METRICS_PERF_CPU_IDENTITY_H_ + +#include <string> + +// Struct containing the CPU identity fields used to choose perf commands. +// These are populated from base::CPU, but having them in a settable struct +// makes things testable. +struct CPUIdentity { + CPUIdentity(); + ~CPUIdentity(); + + // The system architecture based on uname(). + // (Technically, not a property of the CPU.) + std::string arch; + // CUID fields: + std::string vendor; // e.g. "GenuineIntel" + int family; + int model; + // CPU model name. e.g. "Intel(R) Celeron(R) 2955U @ 1.40GHz" + std::string model_name; +}; + +// Get the CPUIdentity based on the actual system. +CPUIdentity GetCPUIdentity(); + +// Return the Intel microarchitecture based on the family and model derived +// from |cpuid|, and kIntelUarchTable, or the empty string for non-Intel or +// unknown microarchitectures. +std::string GetIntelUarch(const CPUIdentity& cpuid); + +// Simplify a CPU model name. The rules are: +// - Replace spaces with hyphens. +// - Strip all "(R)" symbols. +// - Convert to lower case. +std::string SimplifyCPUModelName(const std::string& model_name); + +namespace internal { + +// Exposed for unit testing. + +struct IntelUarchTableEntry { + const char *family_model; + const char *uarch; +}; + +bool IntelUarchTableCmp(const IntelUarchTableEntry& a, + const IntelUarchTableEntry& b); + +extern const IntelUarchTableEntry kIntelUarchTable[]; +extern const IntelUarchTableEntry* kIntelUarchTableEnd; + +} // namespace internal + +#endif // CHROME_BROWSER_METRICS_PERF_CPU_IDENTITY_H_
diff --git a/chrome/browser/metrics/perf/cpu_identity_unittest.cc b/chrome/browser/metrics/perf/cpu_identity_unittest.cc new file mode 100644 index 0000000..f75320aa --- /dev/null +++ b/chrome/browser/metrics/perf/cpu_identity_unittest.cc
@@ -0,0 +1,66 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "chrome/browser/metrics/perf/cpu_identity.h" + +#include <algorithm> +#include <string> +#include <vector> + +#include "testing/gtest/include/gtest/gtest.h" + +TEST(CpuIdentityTest, IntelUarchTableIsSorted) { + EXPECT_TRUE(std::is_sorted( + internal::kIntelUarchTable, + internal::kIntelUarchTableEnd, + internal::IntelUarchTableCmp)); +} + +TEST(CpuIdentityTest, DefaultCommandsBasedOnUarch_IvyBridge) { + CPUIdentity cpuid; + cpuid.arch = "x86_64"; + cpuid.vendor = "GenuineIntel"; + cpuid.family = 0x06; + cpuid.model = 0x3a; // IvyBridge + cpuid.model_name = ""; + EXPECT_EQ("IvyBridge", GetIntelUarch(cpuid)); +} + +TEST(CpuIdentityTest, DefaultCommandsBasedOnUarch_SandyBridge) { + CPUIdentity cpuid; + cpuid.arch = "x86_64"; + cpuid.vendor = "GenuineIntel"; + cpuid.family = 0x06; + cpuid.model = 0x2a; // SandyBridge + cpuid.model_name = ""; + EXPECT_EQ("SandyBridge", GetIntelUarch(cpuid)); +} + +TEST(CpuIdentityTest, DefaultCommandsBasedOnArch_x86_32) { + CPUIdentity cpuid; + cpuid.arch = "x86"; + cpuid.vendor = "GenuineIntel"; + cpuid.family = 0x06; + cpuid.model = 0x2f; // Westmere + cpuid.model_name = ""; + EXPECT_EQ("Westmere", GetIntelUarch(cpuid)); +} + +TEST(CpuIdentityTest, DefaultCommandsBasedOnArch_Unknown) { + CPUIdentity cpuid; + cpuid.arch = "x86_64"; + cpuid.vendor = "NotIntel"; + cpuid.family = 0; + cpuid.model = 0; + cpuid.model_name = ""; + EXPECT_EQ("", GetIntelUarch(cpuid)); +} + +TEST(CpuIdentityTest, SimplifyCPUModelName) { + EXPECT_EQ("", SimplifyCPUModelName("")); + EXPECT_EQ("intel-celeron-2955u-@-1.40ghz", + SimplifyCPUModelName("Intel(R) Celeron(R) 2955U @ 1.40GHz")); + EXPECT_EQ("armv7-processor-rev-3-(v7l)", + SimplifyCPUModelName("ARMv7 Processor rev 3 (v7l)")); +}
diff --git a/chrome/browser/password_manager/password_manager_test_base.cc b/chrome/browser/password_manager/password_manager_test_base.cc index 275b674a..4c192a3 100644 --- a/chrome/browser/password_manager/password_manager_test_base.cc +++ b/chrome/browser/password_manager/password_manager_test_base.cc
@@ -23,7 +23,6 @@ #include "components/password_manager/core/browser/test_password_store.h" #include "components/password_manager/core/common/password_manager_switches.h" #include "content/public/browser/render_frame_host.h" -#include "content/public/browser/render_view_host.h" #include "content/public/test/browser_test_utils.h" #include "content/public/test/test_utils.h" #include "net/test/embedded_test_server/embedded_test_server.h"
diff --git a/chrome/browser/printing/print_preview_dialog_controller.cc b/chrome/browser/printing/print_preview_dialog_controller.cc index 37058430..6937e1367 100644 --- a/chrome/browser/printing/print_preview_dialog_controller.cc +++ b/chrome/browser/printing/print_preview_dialog_controller.cc
@@ -156,7 +156,7 @@ // static void PrintPreviewDialogController::PrintPreview(WebContents* initiator) { - if (initiator->ShowingInterstitialPage()) + if (initiator->ShowingInterstitialPage() || initiator->IsCrashed()) return; PrintPreviewDialogController* dialog_controller = GetInstance();
diff --git a/chrome/browser/printing/print_view_manager_base.cc b/chrome/browser/printing/print_view_manager_base.cc index 6515b03..84cabec 100644 --- a/chrome/browser/printing/print_view_manager_base.cc +++ b/chrome/browser/printing/print_view_manager_base.cc
@@ -504,8 +504,9 @@ } bool PrintViewManagerBase::PrintNowInternal(IPC::Message* message) { - // Don't print / print preview interstitials. - if (web_contents()->ShowingInterstitialPage()) { + // Don't print / print preview interstitials or crashed tabs. + if (web_contents()->ShowingInterstitialPage() || + web_contents()->IsCrashed()) { delete message; return false; }
diff --git a/chrome/browser/resources/chromeos/chromevox/BUILD.gn b/chrome/browser/resources/chromeos/chromevox/BUILD.gn index 0416c622..105a1db 100644 --- a/chrome/browser/resources/chromeos/chromevox/BUILD.gn +++ b/chrome/browser/resources/chromeos/chromevox/BUILD.gn
@@ -315,6 +315,7 @@ "//chrome/browser", "//chrome/renderer", "//chrome/test:test_support", + "//chrome/test:test_support_ui", "//testing/gmock", "//testing/gtest", ":chromevox_extjs_tests",
diff --git a/chrome/browser/resources/chromeos/chromevox/chromevox_tests.gypi b/chrome/browser/resources/chromeos/chromevox/chromevox_tests.gypi index ca47324..7fb72f1 100644 --- a/chrome/browser/resources/chromeos/chromevox/chromevox_tests.gypi +++ b/chrome/browser/resources/chromeos/chromevox/chromevox_tests.gypi
@@ -98,6 +98,7 @@ '<(DEPTH)/chrome/chrome.gyp:browser', '<(DEPTH)/chrome/chrome.gyp:renderer', '<(DEPTH)/chrome/chrome.gyp:test_support_common', + '<(DEPTH)/chrome/chrome.gyp:test_support_ui', '<(DEPTH)/chrome/chrome_resources.gyp:chrome_resources', '<(DEPTH)/chrome/chrome_resources.gyp:chrome_strings', '<(DEPTH)/chrome/chrome_resources.gyp:packed_extra_resources',
diff --git a/chrome/browser/resources/md_downloads/crisper.js b/chrome/browser/resources/md_downloads/crisper.js index 2f7e3258..d6c6389 100644 --- a/chrome/browser/resources/md_downloads/crisper.js +++ b/chrome/browser/resources/md_downloads/crisper.js
@@ -15345,6 +15345,9 @@ is: 'downloads-toolbar', attached: function() { + // isRTL() only works after i18n_template.js runs to set <html dir>. + this.overflowAlign_ = isRTL() ? 'left' : 'right'; + /** @private {!SearchFieldDelegate} */ this.searchFieldDelegate_ = new ToolbarSearchFieldDelegate(this); this.$['search-input'].setDelegate(this.searchFieldDelegate_); @@ -15357,6 +15360,11 @@ value: false, observer: 'onDownloadsShowingChange_', }, + + overflowAlign_: { + type: String, + value: 'right', + }, }, /** @return {boolean} Whether removal can be undone. */
diff --git a/chrome/browser/resources/md_downloads/toolbar.html b/chrome/browser/resources/md_downloads/toolbar.html index e7d4a9b..5161a21 100644 --- a/chrome/browser/resources/md_downloads/toolbar.html +++ b/chrome/browser/resources/md_downloads/toolbar.html
@@ -8,6 +8,7 @@ <link rel="import" href="chrome://resources/cr_elements/v1_0/cr_search_field/cr_search_field.html"> <link rel="import" href="chrome://resources/html/assert.html"> <link rel="import" href="chrome://resources/html/cr.html"> +<link rel="import" href="chrome://resources/html/util.html"> <dom-module id="downloads-toolbar"> <template> @@ -23,7 +24,7 @@ <div id="search"> <cr-search-field id="search-input" i18n-values="label:search;clear-label:clearSearch"></cr-search-field> - <paper-menu-button id="more" horizontal-align="right"> + <paper-menu-button id="more" horizontal-align="[[overflowAlign_]]"> <paper-icon-button icon="more-vert" i18n-values="title:moreActions" class="dropdown-trigger"></paper-icon-button> <paper-menu class="dropdown-content">
diff --git a/chrome/browser/resources/md_downloads/toolbar.js b/chrome/browser/resources/md_downloads/toolbar.js index 4827868..f9437c02 100644 --- a/chrome/browser/resources/md_downloads/toolbar.js +++ b/chrome/browser/resources/md_downloads/toolbar.js
@@ -7,6 +7,9 @@ is: 'downloads-toolbar', attached: function() { + // isRTL() only works after i18n_template.js runs to set <html dir>. + this.overflowAlign_ = isRTL() ? 'left' : 'right'; + /** @private {!SearchFieldDelegate} */ this.searchFieldDelegate_ = new ToolbarSearchFieldDelegate(this); this.$['search-input'].setDelegate(this.searchFieldDelegate_); @@ -19,6 +22,11 @@ value: false, observer: 'onDownloadsShowingChange_', }, + + overflowAlign_: { + type: String, + value: 'right', + }, }, /** @return {boolean} Whether removal can be undone. */
diff --git a/chrome/browser/resources/md_downloads/vulcanized.html b/chrome/browser/resources/md_downloads/vulcanized.html index 20b005f7f..fe3dd30 100644 --- a/chrome/browser/resources/md_downloads/vulcanized.html +++ b/chrome/browser/resources/md_downloads/vulcanized.html
@@ -3259,7 +3259,7 @@ </div> <div id="search"> <cr-search-field id="search-input" i18n-values="label:search;clear-label:clearSearch"></cr-search-field> - <paper-menu-button id="more" horizontal-align="right"> + <paper-menu-button id="more" horizontal-align="[[overflowAlign_]]"> <paper-icon-button icon="more-vert" i18n-values="title:moreActions" class="dropdown-trigger"></paper-icon-button> <paper-menu class="dropdown-content"> <paper-item class="clear-all" i18n-content="clearAll" on-click="onClearAllClick_"></paper-item>
diff --git a/chrome/browser/search_engines/template_url_scraper_browsertest.cc b/chrome/browser/search_engines/template_url_scraper_browsertest.cc new file mode 100644 index 0000000..b5beb9a --- /dev/null +++ b/chrome/browser/search_engines/template_url_scraper_browsertest.cc
@@ -0,0 +1,107 @@ +// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "base/files/file_util.h" +#include "base/path_service.h" +#include "base/strings/string_number_conversions.h" +#include "base/strings/utf_string_conversions.h" +#include "chrome/browser/profiles/profile.h" +#include "chrome/browser/search_engines/template_url_service_factory.h" +#include "chrome/browser/ui/browser.h" +#include "chrome/browser/ui/tabs/tab_strip_model.h" +#include "chrome/common/chrome_paths.h" +#include "chrome/test/base/in_process_browser_test.h" +#include "chrome/test/base/ui_test_utils.h" +#include "components/search_engines/template_url_prepopulate_data.h" +#include "components/search_engines/template_url_service.h" +#include "content/public/test/browser_test_utils.h" +#include "content/public/test/test_navigation_observer.h" +#include "net/base/net_util.h" +#include "net/dns/mock_host_resolver.h" +#include "net/test/embedded_test_server/embedded_test_server.h" +#include "net/test/embedded_test_server/http_response.h" + +namespace { + +using TemplateURLScraperTest = InProcessBrowserTest; + +class TemplateURLServiceLoader { + public: + explicit TemplateURLServiceLoader(TemplateURLService* model) : model_(model) { + if (model_->loaded()) + return; + + scoped_refptr<content::MessageLoopRunner> message_loop_runner = + new content::MessageLoopRunner; + scoped_ptr<TemplateURLService::Subscription> subscription = + model_->RegisterOnLoadedCallback( + message_loop_runner->QuitClosure()); + model_->Load(); + message_loop_runner->Run(); + } + + private: + TemplateURLService* model_; + + DISALLOW_COPY_AND_ASSIGN(TemplateURLServiceLoader); +}; + +scoped_ptr<net::test_server::HttpResponse> SendResponse( + const net::test_server::HttpRequest& request) { + base::FilePath test_data_dir; + PathService::Get(chrome::DIR_TEST_DATA, &test_data_dir); + base::FilePath index_file = test_data_dir.AppendASCII("template_url_scraper") + .AppendASCII("submit_handler") + .AppendASCII("index.html"); + std::string file_contents; + EXPECT_TRUE(base::ReadFileToString(index_file, &file_contents)); + scoped_ptr<net::test_server::BasicHttpResponse> response( + new net::test_server::BasicHttpResponse); + response->set_content(file_contents); + return response.Pass(); +} + +} // namespace + +IN_PROC_BROWSER_TEST_F(TemplateURLScraperTest, ScrapeWithOnSubmit) { + host_resolver()->AddRule("*.foo.com", "localhost"); + embedded_test_server()->RegisterRequestHandler(base::Bind(&SendResponse)); + ASSERT_TRUE(embedded_test_server()->InitializeAndWaitUntilReady()); + + TemplateURLService* template_urls = + TemplateURLServiceFactory::GetInstance()->GetForProfile( + browser()->profile()); + TemplateURLServiceLoader loader(template_urls); + + TemplateURLService::TemplateURLVector all_urls = + template_urls->GetTemplateURLs(); + + // We need to substract the default pre-populated engines that the profile is + // set up with. + size_t default_index = 0; + ScopedVector<TemplateURLData> prepopulate_urls = + TemplateURLPrepopulateData::GetPrepopulatedEngines( + browser()->profile()->GetPrefs(), + &default_index); + + EXPECT_EQ(prepopulate_urls.size(), all_urls.size()); + + std::string port(base::IntToString(embedded_test_server()->port())); + ui_test_utils::NavigateToURLBlockUntilNavigationsComplete( + browser(), GURL("http://www.foo.com:" + port + "/"), 1); + + base::string16 title; + ui_test_utils::GetCurrentTabTitle(browser(), &title); + ASSERT_EQ(base::ASCIIToUTF16("Submit handler TemplateURL scraping test"), + title); + + content::WebContents* web_contents = + browser()->tab_strip_model()->GetActiveWebContents(); + content::TestNavigationObserver observer(web_contents); + EXPECT_TRUE(content::ExecuteScript(web_contents, "submit_form()")); + observer.Wait(); + + all_urls = template_urls->GetTemplateURLs(); + EXPECT_EQ(prepopulate_urls.size() + 1, all_urls.size()); +}
diff --git a/chrome/browser/search_engines/template_url_scraper_unittest.cc b/chrome/browser/search_engines/template_url_scraper_unittest.cc deleted file mode 100644 index 719a296..0000000 --- a/chrome/browser/search_engines/template_url_scraper_unittest.cc +++ /dev/null
@@ -1,77 +0,0 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "chrome/browser/profiles/profile.h" -#include "chrome/browser/search_engines/template_url_service_factory.h" -#include "chrome/browser/ui/browser.h" -#include "chrome/test/base/in_process_browser_test.h" -#include "chrome/test/base/ui_test_utils.h" -#include "components/search_engines/template_url_prepopulate_data.h" -#include "components/search_engines/template_url_service.h" -#include "net/base/net_util.h" -#include "net/dns/mock_host_resolver.h" - -namespace { -class TemplateURLScraperTest : public InProcessBrowserTest { - public: - TemplateURLScraperTest() { - } - - private: - DISALLOW_COPY_AND_ASSIGN(TemplateURLScraperTest); -}; - -class TemplateURLServiceLoader { - public: - explicit TemplateURLServiceLoader(TemplateURLService* model) : model_(model) { - scoped_refptr<content::MessageLoopRunner> message_loop_runner = - new content::MessageLoopRunner; - scoped_ptr<TemplateURLService::Subscription> subscription = - model_->RegisterOnLoadedCallback( - message_loop_runner->QuitClosure()); - model_->Load(); - message_loop_runner->Run(); - } - - private: - TemplateURLService* model_; - - DISALLOW_COPY_AND_ASSIGN(TemplateURLServiceLoader); -}; - -} // namespace - -/* -IN_PROC_BROWSER_TEST_F(TemplateURLScraperTest, ScrapeWithOnSubmit) { - host_resolver()->AddRule("*.foo.com", "localhost"); - - TemplateURLService* template_urls = - TemplateURLServiceFactory::GetInstance(browser()->profile()); - TemplateURLServiceLoader loader(template_urls); - - TemplateURLService::TemplateURLVector all_urls = - template_urls->GetTemplateURLs(); - - // We need to substract the default pre-populated engines that the profile is - // set up with. - size_t default_index = 0; - std::vector<TemplateURL*> prepopulate_urls; - TemplateURLPrepopulateData::GetPrepopulatedEngines( - browser()->profile()->GetPrefs(), - &prepopulate_urls, - &default_index); - - EXPECT_EQ(prepopulate_urls.size(), all_urls.size()); - - scoped_refptr<HTTPTestServer> server( - HTTPTestServer::CreateServerWithFileRootURL( - L"chrome/test/data/template_url_scraper/submit_handler", L"/", - g_browser_process->io_thread()->message_loop())); - ui_test_utils::NavigateToURLBlockUntilNavigationsComplete( - browser(), GURL("http://www.foo.com:1337/"), 2); - - all_urls = template_urls->GetTemplateURLs(); - EXPECT_EQ(1, all_urls.size() - prepopulate_urls.size()); -} -*/
diff --git a/chrome/browser/spellchecker/spellcheck_custom_dictionary.cc b/chrome/browser/spellchecker/spellcheck_custom_dictionary.cc index a0175c0..84a9b526 100644 --- a/chrome/browser/spellchecker/spellcheck_custom_dictionary.cc +++ b/chrome/browser/spellchecker/spellcheck_custom_dictionary.cc
@@ -87,44 +87,6 @@ base::TrimWhitespaceASCII(word, base::TRIM_ALL, &tmp); } -// Loads the custom spellcheck dictionary from |path| into |custom_words|. If -// the dictionary checksum is not valid, but backup checksum is valid, then -// restores the backup and loads that into |custom_words| instead. If the backup -// is invalid too, then clears |custom_words|. Must be called on the file -// thread. -void LoadDictionaryFileReliably(const base::FilePath& path, - std::set<std::string>* custom_words) { - DCHECK_CURRENTLY_ON(BrowserThread::FILE); - DCHECK(custom_words); - // Load the contents and verify the checksum. - if (LoadFile(path, custom_words) == VALID_CHECKSUM) - return; - // Checksum is not valid. See if there's a backup. - base::FilePath backup = path.AddExtension(BACKUP_EXTENSION); - if (!base::PathExists(backup)) - return; - // Load the backup and verify its checksum. - if (LoadFile(backup, custom_words) != VALID_CHECKSUM) - return; - // Backup checksum is valid. Restore the backup. - base::CopyFile(backup, path); -} - -// Backs up the original dictionary, saves |custom_words| and its checksum into -// the custom spellcheck dictionary at |path|. -void SaveDictionaryFileReliably(const base::FilePath& path, - const std::set<std::string>& custom_words) { - DCHECK_CURRENTLY_ON(BrowserThread::FILE); - std::stringstream content; - for (const std::string& word : custom_words) - content << word << '\n'; - - std::string checksum = base::MD5String(content.str()); - content << CHECKSUM_PREFIX << checksum; - base::CopyFile(path, path.AddExtension(BACKUP_EXTENSION)); - base::ImportantFileWriter::WriteFileAtomically(path, content.str()); -} - // Removes duplicate and invalid words from |to_add| word list. Looks for // duplicates in both |to_add| and |existing| word lists. Returns a bitmap of // |ChangeSanitationResult| values. @@ -150,6 +112,51 @@ return result; } +// Loads and returns the custom spellcheck dictionary from |path|. Must be +// called on the file thread. +scoped_ptr<SpellcheckCustomDictionary::LoadFileResult> +LoadDictionaryFileReliably(const base::FilePath& path) { + DCHECK_CURRENTLY_ON(BrowserThread::FILE); + // Load the contents and verify the checksum. + scoped_ptr<SpellcheckCustomDictionary::LoadFileResult> result( + new SpellcheckCustomDictionary::LoadFileResult); + if (LoadFile(path, &result->words) == VALID_CHECKSUM) { + result->is_valid_file = + VALID_CHANGE == + SanitizeWordsToAdd(std::set<std::string>(), &result->words); + return result; + } + // Checksum is not valid. See if there's a backup. + base::FilePath backup = path.AddExtension(BACKUP_EXTENSION); + if (base::PathExists(backup)) + LoadFile(backup, &result->words); + SanitizeWordsToAdd(std::set<std::string>(), &result->words); + return result; +} + +// Backs up the original dictionary, saves |custom_words| and its checksum into +// the custom spellcheck dictionary at |path|. +void SaveDictionaryFileReliably(const base::FilePath& path, + const std::set<std::string>& custom_words) { + DCHECK_CURRENTLY_ON(BrowserThread::FILE); + std::stringstream content; + for (const std::string& word : custom_words) + content << word << '\n'; + + std::string checksum = base::MD5String(content.str()); + content << CHECKSUM_PREFIX << checksum; + base::CopyFile(path, path.AddExtension(BACKUP_EXTENSION)); + base::ImportantFileWriter::WriteFileAtomically(path, content.str()); +} + +void SavePassedWordsToDictionaryFileReliably( + const base::FilePath& path, + scoped_ptr<SpellcheckCustomDictionary::LoadFileResult> load_file_result) { + DCHECK_CURRENTLY_ON(BrowserThread::FILE); + DCHECK(load_file_result); + SaveDictionaryFileReliably(path, load_file_result->words); +} + // Removes word from |to_remove| that are missing from |existing| word list and // sorts |to_remove|. Returns a bitmap of |ChangeSanitationResult| values. int SanitizeWordsToRemove(const std::set<std::string>& existing, @@ -372,19 +379,18 @@ return syncer::SyncError(); } +SpellcheckCustomDictionary::LoadFileResult::LoadFileResult() + : is_valid_file(false) {} + +SpellcheckCustomDictionary::LoadFileResult::~LoadFileResult() {} + // static -scoped_ptr<std::set<std::string>> +scoped_ptr<SpellcheckCustomDictionary::LoadFileResult> SpellcheckCustomDictionary::LoadDictionaryFile(const base::FilePath& path) { DCHECK_CURRENTLY_ON(BrowserThread::FILE); - scoped_ptr<std::set<std::string>> words(new std::set<std::string>); - LoadDictionaryFileReliably(path, words.get()); - if (!words->empty() && - VALID_CHANGE != - SanitizeWordsToAdd(std::set<std::string>(), words.get())) { - SaveDictionaryFileReliably(path, *words); - } - SpellCheckHostMetrics::RecordCustomWordCountStats(words->size()); - return words; + scoped_ptr<LoadFileResult> result = LoadDictionaryFileReliably(path); + SpellCheckHostMetrics::RecordCustomWordCountStats(result->words.size()); + return result; } // static @@ -397,30 +403,38 @@ if (dictionary_change->empty()) return; - std::set<std::string> custom_words; - LoadDictionaryFileReliably(path, &custom_words); + scoped_ptr<LoadFileResult> result = LoadDictionaryFileReliably(path); // Add words. - custom_words.insert(dictionary_change->to_add().begin(), - dictionary_change->to_add().end()); + result->words.insert(dictionary_change->to_add().begin(), + dictionary_change->to_add().end()); // Remove words and save the remainder. - SaveDictionaryFileReliably(path, - base::STLSetDifference<std::set<std::string>>( - custom_words, dictionary_change->to_remove())); + SaveDictionaryFileReliably( + path, base::STLSetDifference<std::set<std::string>>( + result->words, dictionary_change->to_remove())); } -void SpellcheckCustomDictionary::OnLoaded( - scoped_ptr<std::set<std::string>> custom_words) { +void SpellcheckCustomDictionary::OnLoaded(scoped_ptr<LoadFileResult> result) { DCHECK_CURRENTLY_ON(BrowserThread::UI); - DCHECK(custom_words); + DCHECK(result); Change dictionary_change; - dictionary_change.AddWords(*custom_words); + dictionary_change.AddWords(result->words); dictionary_change.Sanitize(GetWords()); Apply(dictionary_change); Sync(dictionary_change); is_loaded_ = true; FOR_EACH_OBSERVER(Observer, observers_, OnCustomDictionaryLoaded()); + if (!result->is_valid_file) { + // Save cleaned up data only after startup. + fix_invalid_file_.Reset( + base::Bind(&SpellcheckCustomDictionary::FixInvalidFile, + weak_ptr_factory_.GetWeakPtr(), base::Passed(&result))); + BrowserThread::PostAfterStartupTask( + FROM_HERE, + BrowserThread::GetMessageLoopProxyForThread(BrowserThread::UI), + fix_invalid_file_.callback()); + } } void SpellcheckCustomDictionary::Apply(const Change& dictionary_change) { @@ -437,8 +451,18 @@ } } +void SpellcheckCustomDictionary::FixInvalidFile( + scoped_ptr<LoadFileResult> load_file_result) { + DCHECK_CURRENTLY_ON(BrowserThread::UI); + BrowserThread::PostTask( + BrowserThread::FILE, FROM_HERE, + base::Bind(&SavePassedWordsToDictionaryFileReliably, + custom_dictionary_path_, base::Passed(&load_file_result))); +} + void SpellcheckCustomDictionary::Save(scoped_ptr<Change> dictionary_change) { DCHECK_CURRENTLY_ON(BrowserThread::UI); + fix_invalid_file_.Cancel(); BrowserThread::PostTask( BrowserThread::FILE, FROM_HERE, base::Bind(&SpellcheckCustomDictionary::UpdateDictionaryFile,
diff --git a/chrome/browser/spellchecker/spellcheck_custom_dictionary.h b/chrome/browser/spellchecker/spellcheck_custom_dictionary.h index e2717929..8926eb4 100644 --- a/chrome/browser/spellchecker/spellcheck_custom_dictionary.h +++ b/chrome/browser/spellchecker/spellcheck_custom_dictionary.h
@@ -8,6 +8,7 @@ #include <set> #include <string> +#include "base/cancelable_callback.h" #include "base/files/file_path.h" #include "base/macros.h" #include "base/memory/scoped_ptr.h" @@ -91,6 +92,22 @@ virtual void OnCustomDictionaryChanged(const Change& dictionary_change) = 0; }; + struct LoadFileResult { + LoadFileResult(); + ~LoadFileResult(); + + // The contents of the custom dictionary file or its backup. Does not + // contain data that failed checksum. Does not contain invalid words. + std::set<std::string> words; + + // True when the custom dictionary file on disk has a valid checksum and + // contains only valid words. + bool is_valid_file; + + private: + DISALLOW_COPY_AND_ASSIGN(LoadFileResult); + }; + // The dictionary will be saved in |dictionary_directory_name|. explicit SpellcheckCustomDictionary( const base::FilePath& dictionary_directory_name); @@ -144,10 +161,9 @@ friend class SpellcheckCustomDictionaryTest; // Returns the list of words in the custom spellcheck dictionary at |path|. - // Makes sure that the custom dictionary file does not have duplicates and - // contains only valid words. Must be called on the FILE thread. The caller - // owns the result. - static scoped_ptr<std::set<std::string>> LoadDictionaryFile( + // Validates that the custom dictionary file does not have duplicates and + // contains only valid words. Must be called on the FILE thread. + static scoped_ptr<LoadFileResult> LoadDictionaryFile( const base::FilePath& path); // Applies the change in |dictionary_change| to the custom spellcheck @@ -157,13 +173,16 @@ const base::FilePath& path); // The reply point for PostTaskAndReplyWithResult, called when - // LoadDictionaryFile finishes reading the dictionary file. Takes ownership of - // |custom_words|. - void OnLoaded(scoped_ptr<std::set<std::string>> custom_words); + // LoadDictionaryFile finishes reading the dictionary file. + void OnLoaded(scoped_ptr<LoadFileResult> result); // Applies the |dictionary_change| to the in-memory copy of the dictionary. void Apply(const Change& dictionary_change); + // Schedules a write of the words in |load_file_result| to disk when the + // custom dictionary file is invalid. + void FixInvalidFile(scoped_ptr<LoadFileResult> load_file_result); + // Schedules a write of |dictionary_change| to disk. Takes ownership of // |dictionary_change| to pass it to the FILE thread. void Save(scoped_ptr<Change> dictionary_change); @@ -195,6 +214,9 @@ // True if the dictionary has been loaded. Otherwise false. bool is_loaded_; + // A post-startup task to fix the invalid custom dictionary file. + base::CancelableClosure fix_invalid_file_; + // Used to create weak pointers for an instance of this class. base::WeakPtrFactory<SpellcheckCustomDictionary> weak_ptr_factory_;
diff --git a/chrome/browser/spellchecker/spellcheck_custom_dictionary_unittest.cc b/chrome/browser/spellchecker/spellcheck_custom_dictionary_unittest.cc index dbd4da5a..e40d5ec02 100644 --- a/chrome/browser/spellchecker/spellcheck_custom_dictionary_unittest.cc +++ b/chrome/browser/spellchecker/spellcheck_custom_dictionary_unittest.cc
@@ -72,7 +72,7 @@ // A wrapper around SpellcheckCustomDictionary::LoadDictionaryFile private // function to avoid a large number of FRIEND_TEST declarations in // SpellcheckCustomDictionary. - scoped_ptr<std::set<std::string>> LoadDictionaryFile( + scoped_ptr<SpellcheckCustomDictionary::LoadFileResult> LoadDictionaryFile( const base::FilePath& path) { return SpellcheckCustomDictionary::LoadDictionaryFile(path); } @@ -91,8 +91,12 @@ // avoid a large number of FRIEND_TEST declarations in // SpellcheckCustomDictionary. void OnLoaded(SpellcheckCustomDictionary& dictionary, - scoped_ptr<std::set<std::string>> custom_words) { - dictionary.OnLoaded(custom_words.Pass()); + scoped_ptr<std::set<std::string>> words) { + scoped_ptr<SpellcheckCustomDictionary::LoadFileResult> result( + new SpellcheckCustomDictionary::LoadFileResult); + result->is_valid_file = true; + result->words = *words; + dictionary.OnLoaded(result.Pass()); } // A wrapper around SpellcheckCustomDictionary::Apply private method to avoid @@ -161,7 +165,7 @@ profile_.GetPath().Append(chrome::kCustomDictionaryFileName); // The custom word list should be empty now. - EXPECT_TRUE(LoadDictionaryFile(path)->empty()); + EXPECT_TRUE(LoadDictionaryFile(path)->words.empty()); scoped_ptr<SpellcheckCustomDictionary::Change> change( new SpellcheckCustomDictionary::Change); @@ -174,14 +178,14 @@ expected.insert("foo"); // The custom word list should include written words. - EXPECT_EQ(expected, *LoadDictionaryFile(path)); + EXPECT_EQ(expected, LoadDictionaryFile(path)->words); scoped_ptr<SpellcheckCustomDictionary::Change> change2( new SpellcheckCustomDictionary::Change); change2->RemoveWord("bar"); change2->RemoveWord("foo"); UpdateDictionaryFile(change2.Pass(), path); - EXPECT_TRUE(LoadDictionaryFile(path)->empty()); + EXPECT_TRUE(LoadDictionaryFile(path)->words.empty()); } TEST_F(SpellcheckCustomDictionaryTest, MultiProfile) { @@ -224,7 +228,7 @@ std::string content; base::WriteFile(path, content.c_str(), content.length()); - EXPECT_TRUE(LoadDictionaryFile(path)->empty()); + EXPECT_TRUE(LoadDictionaryFile(path)->words.empty()); } // Legacy dictionary with two words should be converted to new format dictionary @@ -239,7 +243,7 @@ std::set<std::string> expected; expected.insert("bar"); expected.insert("foo"); - EXPECT_EQ(expected, *LoadDictionaryFile(path)); + EXPECT_EQ(expected, LoadDictionaryFile(path)->words); } // Illegal words should be removed. Leading and trailing whitespace should be @@ -257,7 +261,7 @@ expected.insert("bar"); expected.insert("foo"); expected.insert("foo bar"); - EXPECT_EQ(expected, *LoadDictionaryFile(path)); + EXPECT_EQ(expected, LoadDictionaryFile(path)->words); } // Write to dictionary should backup previous version and write the word to the @@ -272,7 +276,7 @@ std::set<std::string> expected; expected.insert("bar"); expected.insert("foo"); - EXPECT_EQ(expected, *LoadDictionaryFile(path)); + EXPECT_EQ(expected, LoadDictionaryFile(path)->words); scoped_ptr<SpellcheckCustomDictionary::Change> change( new SpellcheckCustomDictionary::Change); @@ -282,7 +286,7 @@ base::ReadFileToString(path, &content); content.append("corruption"); base::WriteFile(path, content.c_str(), content.length()); - EXPECT_EQ(expected, *LoadDictionaryFile(path)); + EXPECT_EQ(expected, LoadDictionaryFile(path)->words); } TEST_F(SpellcheckCustomDictionaryTest, @@ -1179,7 +1183,7 @@ // Load the dictionary which should be empty. base::FilePath path = profile_.GetPath().Append(chrome::kCustomDictionaryFileName); - EXPECT_TRUE(LoadDictionaryFile(path)->empty()); + EXPECT_TRUE(LoadDictionaryFile(path)->words.empty()); // We expect there to be an entry with 0. histogram = @@ -1197,7 +1201,7 @@ UpdateDictionaryFile(change.Pass(), path); // Load the dictionary again and it should have 2 entries. - EXPECT_EQ(2u, LoadDictionaryFile(path)->size()); + EXPECT_EQ(2u, LoadDictionaryFile(path)->words.size()); histogram = StatisticsRecorder::FindHistogram("SpellCheck.CustomWords");
diff --git a/chrome/browser/sync/profile_sync_components_factory_impl.cc b/chrome/browser/sync/profile_sync_components_factory_impl.cc index 442197e9..207a0a7 100644 --- a/chrome/browser/sync/profile_sync_components_factory_impl.cc +++ b/chrome/browser/sync/profile_sync_components_factory_impl.cc
@@ -11,7 +11,6 @@ #include "chrome/browser/profiles/profile.h" #include "chrome/browser/sync/glue/autofill_data_type_controller.h" #include "chrome/browser/sync/glue/autofill_profile_data_type_controller.h" -#include "chrome/browser/sync/glue/autofill_wallet_data_type_controller.h" #include "chrome/browser/sync/glue/bookmark_change_processor.h" #include "chrome/browser/sync/glue/bookmark_data_type_controller.h" #include "chrome/browser/sync/glue/bookmark_model_associator.h" @@ -29,6 +28,7 @@ #include "chrome/browser/sync/sessions/session_data_type_controller.h" #include "chrome/common/chrome_switches.h" #include "chrome/common/pref_names.h" +#include "components/autofill/core/browser/autofill_wallet_data_type_controller.h" #include "components/autofill/core/common/autofill_pref_names.h" #include "components/autofill/core/common/autofill_switches.h" #include "components/dom_distiller/core/dom_distiller_features.h" @@ -187,7 +187,10 @@ if (!wallet_disabled) { sync_service->RegisterDataTypeController( new browser_sync::AutofillWalletDataTypeController( - sync_client, syncer::AUTOFILL_WALLET_DATA)); + BrowserThread::GetMessageLoopProxyForThread(BrowserThread::UI), + BrowserThread::GetMessageLoopProxyForThread(BrowserThread::DB), + base::Bind(&ChromeReportUnrecoverableError), sync_client, + syncer::AUTOFILL_WALLET_DATA)); } // Wallet metadata sync depends on Wallet data sync and is disabled by @@ -196,7 +199,10 @@ if (!wallet_disabled && enabled_types.Has(syncer::AUTOFILL_WALLET_METADATA)) { sync_service->RegisterDataTypeController( new browser_sync::AutofillWalletDataTypeController( - sync_client, syncer::AUTOFILL_WALLET_METADATA)); + BrowserThread::GetMessageLoopProxyForThread(BrowserThread::UI), + BrowserThread::GetMessageLoopProxyForThread(BrowserThread::DB), + base::Bind(&ChromeReportUnrecoverableError), sync_client, + syncer::AUTOFILL_WALLET_DATA)); } // Bookmark sync is enabled by default. Register unless explicitly
diff --git a/chrome/browser/sync_file_system/drive_backend/local_to_remote_syncer.cc b/chrome/browser/sync_file_system/drive_backend/local_to_remote_syncer.cc index 7280f5e..220d384b7 100644 --- a/chrome/browser/sync_file_system/drive_backend/local_to_remote_syncer.cc +++ b/chrome/browser/sync_file_system/drive_backend/local_to_remote_syncer.cc
@@ -148,9 +148,9 @@ } else if (active_ancestor_path != path) { if (!active_ancestor_path.AppendRelativePath(path, &missing_entries)) { NOTREACHED(); - token->RecordLog(base::StringPrintf( - "Detected invalid ancestor: %s", - active_ancestor_path.value().c_str())); + token->RecordLog( + base::StringPrintf("Detected invalid ancestor: %" PRIsFP, + active_ancestor_path.value().c_str())); SyncTaskManager::NotifyTaskDone(token.Pass(), SYNC_STATUS_FAILED); return; }
diff --git a/chrome/browser/tracing/chrome_tracing_delegate.cc b/chrome/browser/tracing/chrome_tracing_delegate.cc index bc333821..5126f9222 100644 --- a/chrome/browser/tracing/chrome_tracing_delegate.cc +++ b/chrome/browser/tracing/chrome_tracing_delegate.cc
@@ -15,6 +15,7 @@ #include "chrome/browser/ui/browser_list.h" #include "chrome/browser/ui/browser_otr_state.h" #include "chrome/common/pref_names.h" +#include "components/variations/active_field_trials.h" #include "content/public/browser/background_tracing_config.h" #include "content/public/browser/browser_thread.h" @@ -129,3 +130,16 @@ return true; } + +void ChromeTracingDelegate::GenerateMetadataDict( + base::DictionaryValue* metadata_dict) { + DCHECK(metadata_dict); + std::vector<std::string> variations; + variations::GetFieldTrialActiveGroupIdsAsStrings(&variations); + + scoped_ptr<base::ListValue> variations_list(new base::ListValue()); + for (const auto& it : variations) + variations_list->Append(new base::StringValue(it)); + + metadata_dict->Set("field-trials", variations_list.Pass()); +}
diff --git a/chrome/browser/tracing/chrome_tracing_delegate.h b/chrome/browser/tracing/chrome_tracing_delegate.h index 99964df1..9f5dcb6 100644 --- a/chrome/browser/tracing/chrome_tracing_delegate.h +++ b/chrome/browser/tracing/chrome_tracing_delegate.h
@@ -29,6 +29,8 @@ const content::BackgroundTracingConfig& config, bool requires_anonymized_data) override; + void GenerateMetadataDict(base::DictionaryValue* metadata_dict) override; + private: // chrome::BrowserListObserver implementation. void OnBrowserAdded(Browser* browser) override;
diff --git a/chrome/browser/ui/ash/chrome_new_window_delegate_chromeos.cc b/chrome/browser/ui/ash/chrome_new_window_delegate_chromeos.cc index 8aad0a4..95633f6b 100644 --- a/chrome/browser/ui/ash/chrome_new_window_delegate_chromeos.cc +++ b/chrome/browser/ui/ash/chrome_new_window_delegate_chromeos.cc
@@ -4,7 +4,7 @@ #include "chrome/browser/ui/ash/chrome_new_window_delegate_chromeos.h" -#include "ash/keyboard_overlay/keyboard_overlay_view.h" +#include "ash/content/keyboard_overlay/keyboard_overlay_view.h" #include "chrome/browser/chromeos/file_manager/app_id.h" #include "chrome/browser/extensions/api/terminal/terminal_extension_helper.h" #include "chrome/browser/extensions/extension_service.h"
diff --git a/chrome/browser/ui/ash/multi_user/multi_user_window_manager_chromeos_unittest.cc b/chrome/browser/ui/ash/multi_user/multi_user_window_manager_chromeos_unittest.cc index 8caccfe..7cf0ea87 100644 --- a/chrome/browser/ui/ash/multi_user/multi_user_window_manager_chromeos_unittest.cc +++ b/chrome/browser/ui/ash/multi_user/multi_user_window_manager_chromeos_unittest.cc
@@ -263,7 +263,7 @@ void MultiUserWindowManagerChromeOSTest::SetUp() { ash_test_helper()->set_test_shell_delegate(new TestShellDelegateChromeOS); - ash_test_helper()->set_content_state(new TestShellContentState); + ash_test_helper()->set_content_state(new ::TestShellContentState); AshTestBase::SetUp(); session_state_delegate_ = static_cast<TestSessionStateDelegate*>( ash::Shell::GetInstance()->session_state_delegate());
diff --git a/chrome/browser/ui/cocoa/bookmarks/bookmark_bubble_controller.h b/chrome/browser/ui/cocoa/bookmarks/bookmark_bubble_controller.h index 9da88dfe..e9e73f5 100644 --- a/chrome/browser/ui/cocoa/bookmarks/bookmark_bubble_controller.h +++ b/chrome/browser/ui/cocoa/bookmarks/bookmark_bubble_controller.h
@@ -8,6 +8,7 @@ #include "base/memory/scoped_ptr.h" #import "chrome/browser/ui/cocoa/base_bubble_controller.h" #import "chrome/browser/ui/cocoa/bookmarks/bookmark_model_observer_for_cocoa.h" +#import "chrome/browser/ui/cocoa/has_weak_browser_pointer.h" @class BookmarkBubbleController; @class BookmarkSyncPromoController; @@ -23,7 +24,8 @@ // bubble that pops up when clicking on the STAR next to the URL to // add or remove it as a bookmark. This bubble allows for editing of // the bookmark in various ways (name, folder, etc.) -@interface BookmarkBubbleController : BaseBubbleController { +@interface BookmarkBubbleController + : BaseBubbleController<HasWeakBrowserPointer> { @private // |managed_|, |model_| and |node_| are weak and owned by the current // browser's profile.
diff --git a/chrome/browser/ui/cocoa/bookmarks/bookmark_bubble_controller.mm b/chrome/browser/ui/cocoa/bookmarks/bookmark_bubble_controller.mm index e1d41ee..f7fe68dd 100644 --- a/chrome/browser/ui/cocoa/bookmarks/bookmark_bubble_controller.mm +++ b/chrome/browser/ui/cocoa/bookmarks/bookmark_bubble_controller.mm
@@ -104,6 +104,10 @@ } } +- (void)browserWillBeDestroyed { + bookmarkBubbleObserver_ = nullptr; +} + - (void)notifyBubbleClosed { if (!bookmarkBubbleObserver_) return;
diff --git a/chrome/browser/ui/cocoa/browser_window_controller.mm b/chrome/browser/ui/cocoa/browser_window_controller.mm index 6249bad..a0154399 100644 --- a/chrome/browser/ui/cocoa/browser_window_controller.mm +++ b/chrome/browser/ui/cocoa/browser_window_controller.mm
@@ -455,6 +455,7 @@ [downloadShelfController_ browserWillBeDestroyed]; [bookmarkBarController_ browserWillBeDestroyed]; [avatarButtonController_ browserWillBeDestroyed]; + [bookmarkBubbleController_ browserWillBeDestroyed]; [super dealloc]; }
diff --git a/chrome/browser/ui/search/search_tab_helper_unittest.cc b/chrome/browser/ui/search/search_tab_helper_unittest.cc index af2d27fc..29469cb 100644 --- a/chrome/browser/ui/search/search_tab_helper_unittest.cc +++ b/chrome/browser/ui/search/search_tab_helper_unittest.cc
@@ -29,7 +29,6 @@ #include "chrome/test/base/browser_with_test_window_test.h" #include "chrome/test/base/chrome_render_view_host_test_harness.h" #include "chrome/test/base/testing_profile.h" -#include "chrome/test/base/ui_test_utils.h" #include "components/omnibox/common/omnibox_focus_state.h" #include "components/search_engines/template_url_service.h" #include "content/public/browser/navigation_controller.h"
diff --git a/chrome/browser/ui/tab_helpers.cc b/chrome/browser/ui/tab_helpers.cc index d49e7a2..efe0627 100644 --- a/chrome/browser/ui/tab_helpers.cc +++ b/chrome/browser/ui/tab_helpers.cc
@@ -163,8 +163,11 @@ ManagePasswordsUIController::CreateForWebContents(web_contents); NavigationCorrectionTabObserver::CreateForWebContents(web_contents); NavigationMetricsRecorder::CreateForWebContents(web_contents); + // rappor_service will either be NULL or share a lifetime with the + // BrowserProcess g_browser_process. This ensures rappor_service() will + // survive as long as the web_contents will survive. page_load_metrics::MetricsWebContentsObserver::CreateForWebContents( - web_contents); + web_contents, g_browser_process->rappor_service()); PopupBlockerTabHelper::CreateForWebContents(web_contents); PrefsTabHelper::CreateForWebContents(web_contents); prerender::PrerenderTabHelper::CreateForWebContents(web_contents);
diff --git a/chrome/browser/ui/views/desktop_media_picker_views.cc b/chrome/browser/ui/views/desktop_media_picker_views.cc index b2975b2..7c438b2 100644 --- a/chrome/browser/ui/views/desktop_media_picker_views.cc +++ b/chrome/browser/ui/views/desktop_media_picker_views.cc
@@ -206,6 +206,7 @@ media_list_(media_list.Pass()), weak_factory_(this) { media_list_->SetThumbnailSize(gfx::Size(kThumbnailWidth, kThumbnailHeight)); + SetFocusable(true); } DesktopMediaListView::~DesktopMediaListView() {} @@ -495,6 +496,10 @@ return true; } +views::View* DesktopMediaPickerDialogView::GetInitiallyFocusedView() { + return list_view_; +} + base::string16 DesktopMediaPickerDialogView::GetDialogButtonLabel( ui::DialogButton button) const { return l10n_util::GetStringUTF16(button == ui::DIALOG_BUTTON_OK ? @@ -543,6 +548,11 @@ GetWidget()->CenterWindow(gfx::Size(widget_bound.width(), new_height)); } +DesktopMediaListView* DesktopMediaPickerDialogView::GetMediaListViewForTesting() + const { + return list_view_; +} + DesktopMediaSourceView* DesktopMediaPickerDialogView::GetMediaSourceViewForTesting(int index) const { if (list_view_->child_count() <= index)
diff --git a/chrome/browser/ui/views/desktop_media_picker_views.h b/chrome/browser/ui/views/desktop_media_picker_views.h index 3552dba..5a978e75 100644 --- a/chrome/browser/ui/views/desktop_media_picker_views.h +++ b/chrome/browser/ui/views/desktop_media_picker_views.h
@@ -133,12 +133,14 @@ ui::ModalType GetModalType() const override; base::string16 GetWindowTitle() const override; bool IsDialogButtonEnabled(ui::DialogButton button) const override; + views::View* GetInitiallyFocusedView() override; base::string16 GetDialogButtonLabel(ui::DialogButton button) const override; bool Accept() override; void DeleteDelegate() override; void OnMediaListRowsChanged(); + DesktopMediaListView* GetMediaListViewForTesting() const; DesktopMediaSourceView* GetMediaSourceViewForTesting(int index) const; private:
diff --git a/chrome/browser/ui/views/desktop_media_picker_views_unittest.cc b/chrome/browser/ui/views/desktop_media_picker_views_unittest.cc index d43e36c9..a5003a2 100644 --- a/chrome/browser/ui/views/desktop_media_picker_views_unittest.cc +++ b/chrome/browser/ui/views/desktop_media_picker_views_unittest.cc
@@ -87,6 +87,38 @@ base::RunLoop().RunUntilIdle(); } +// Verifies that a MediaSourceView is selected with mouse left click and +// original selected MediaSourceView gets unselected. +TEST_F(DesktopMediaPickerViewsTest, SelectMediaSourceViewOnSingleClick) { + media_list_->AddSource(0); + media_list_->AddSource(1); + + DesktopMediaSourceView* source_view_0 = + GetPickerDialogView()->GetMediaSourceViewForTesting(0); + + DesktopMediaSourceView* source_view_1 = + GetPickerDialogView()->GetMediaSourceViewForTesting(1); + + // Both media source views are not selected initially. + EXPECT_FALSE(source_view_0->is_selected()); + EXPECT_FALSE(source_view_1->is_selected()); + + // Source view 0 is selected with mouse click. + ui::MouseEvent press(ui::ET_MOUSE_PRESSED, gfx::Point(), gfx::Point(), + ui::EventTimeForNow(), ui::EF_LEFT_MOUSE_BUTTON, 0); + + GetPickerDialogView()->GetMediaSourceViewForTesting(0)->OnMousePressed(press); + + EXPECT_TRUE(source_view_0->is_selected()); + EXPECT_FALSE(source_view_1->is_selected()); + + // Source view 1 is selected and source view 0 is unselected with mouse click. + GetPickerDialogView()->GetMediaSourceViewForTesting(1)->OnMousePressed(press); + + EXPECT_FALSE(source_view_0->is_selected()); + EXPECT_TRUE(source_view_1->is_selected()); +} + TEST_F(DesktopMediaPickerViewsTest, DoneCallbackCalledOnDoubleClick) { const int kFakeId = 222; EXPECT_CALL(*this, @@ -185,4 +217,9 @@ GetPickerDialogView()->IsDialogButtonEnabled(ui::DIALOG_BUTTON_OK)); } +// Verifies that the MediaListView get the initial focus. +TEST_F(DesktopMediaPickerViewsTest, ListViewHasInitialFocus) { + EXPECT_TRUE(GetPickerDialogView()->GetMediaListViewForTesting()->HasFocus()); +} + } // namespace views
diff --git a/chrome/browser/ui/views/frame/browser_non_client_frame_view_ash.cc b/chrome/browser/ui/views/frame/browser_non_client_frame_view_ash.cc index e8b68fa5..72bfc76 100644 --- a/chrome/browser/ui/views/frame/browser_non_client_frame_view_ash.cc +++ b/chrome/browser/ui/views/frame/browser_non_client_frame_view_ash.cc
@@ -420,7 +420,7 @@ } /////////////////////////////////////////////////////////////////////////////// -// chrome::TabIconViewModel: +// TabIconViewModel: bool BrowserNonClientFrameViewAsh::ShouldTabIconViewAnimate() const { // This function is queried during the creation of the window as the
diff --git a/chrome/browser/ui/views/frame/browser_non_client_frame_view_ash.h b/chrome/browser/ui/views/frame/browser_non_client_frame_view_ash.h index dce0c27..708ada0b 100644 --- a/chrome/browser/ui/views/frame/browser_non_client_frame_view_ash.h +++ b/chrome/browser/ui/views/frame/browser_non_client_frame_view_ash.h
@@ -29,7 +29,7 @@ class BrowserNonClientFrameViewAsh : public BrowserNonClientFrameView, public ash::ShellObserver, - public chrome::TabIconViewModel, + public TabIconViewModel, public views::ButtonListener { public: static const char kViewClassName[]; @@ -70,7 +70,7 @@ void OnMaximizeModeStarted() override; void OnMaximizeModeEnded() override; - // chrome::TabIconViewModel: + // TabIconViewModel: bool ShouldTabIconViewAnimate() const override; gfx::ImageSkia GetFaviconForTabIconView() override;
diff --git a/chrome/browser/ui/views/frame/opaque_browser_frame_view.h b/chrome/browser/ui/views/frame/opaque_browser_frame_view.h index 4e3ef389..c9d896c 100644 --- a/chrome/browser/ui/views/frame/opaque_browser_frame_view.h +++ b/chrome/browser/ui/views/frame/opaque_browser_frame_view.h
@@ -30,7 +30,7 @@ class OpaqueBrowserFrameView : public BrowserNonClientFrameView, public views::ButtonListener, public views::MenuButtonListener, - public chrome::TabIconViewModel, + public TabIconViewModel, public OpaqueBrowserFrameViewLayoutDelegate { public: // Constructs a non-client view for an BrowserFrame. @@ -65,7 +65,7 @@ void OnMenuButtonClicked(views::View* source, const gfx::Point& point) override; - // chrome::TabIconViewModel: + // TabIconViewModel: bool ShouldTabIconViewAnimate() const override; gfx::ImageSkia GetFaviconForTabIconView() override;
diff --git a/chrome/browser/ui/views/layout_constants.cc b/chrome/browser/ui/views/layout_constants.cc index 66e1a1f..2414aba 100644 --- a/chrome/browser/ui/views/layout_constants.cc +++ b/chrome/browser/ui/views/layout_constants.cc
@@ -8,9 +8,10 @@ #include "ui/base/resource/material_design/material_design_controller.h" int GetLayoutConstant(LayoutConstant constant) { + const int kIconLabelViewInternalPadding[] = {3, 2, 2}; const int kIconLabelViewTrailingPadding[] = {2, 8, 8}; const int kLocationBarBubbleHorizontalPadding[] = {1, 5, 5}; - const int kLocationBarBubbleVerticalPadding[] = {1, 3, 3}; + const int kLocationBarBubbleVerticalPadding[] = {1, 5, 5}; const int kLocationBarHeight[] = {0, 28, 32}; const int kLocationBarHorizontalPadding[] = {3, 6, 6}; const int kLocationBarVerticalPadding[] = {2, 2, 2}; @@ -35,6 +36,8 @@ const int mode = ui::MaterialDesignController::GetMode(); switch (constant) { + case ICON_LABEL_VIEW_INTERNAL_PADDING: + return kIconLabelViewInternalPadding[mode]; case ICON_LABEL_VIEW_TRAILING_PADDING: return kIconLabelViewTrailingPadding[mode]; case LOCATION_BAR_BUBBLE_HORIZONTAL_PADDING:
diff --git a/chrome/browser/ui/views/layout_constants.h b/chrome/browser/ui/views/layout_constants.h index 217158f4..3c14b98 100644 --- a/chrome/browser/ui/views/layout_constants.h +++ b/chrome/browser/ui/views/layout_constants.h
@@ -8,6 +8,9 @@ #include "ui/gfx/geometry/insets.h" enum LayoutConstant { + // Horizontal padding applied between items of icon-label views. + ICON_LABEL_VIEW_INTERNAL_PADDING, + // Additional horizontal padding applied on the trailing edge of icon-label // views. ICON_LABEL_VIEW_TRAILING_PADDING,
diff --git a/chrome/browser/ui/views/location_bar/icon_label_bubble_view.cc b/chrome/browser/ui/views/location_bar/icon_label_bubble_view.cc index ed09f59..3e8a3e64 100644 --- a/chrome/browser/ui/views/location_bar/icon_label_bubble_view.cc +++ b/chrome/browser/ui/views/location_bar/icon_label_bubble_view.cc
@@ -105,7 +105,7 @@ GetBubbleOuterPadding(!is_extension_icon_)), 0, image_->GetPreferredSize().width(), height()); - const int padding = GetLayoutConstant(LOCATION_BAR_HORIZONTAL_PADDING); + const int padding = GetLayoutConstant(ICON_LABEL_VIEW_INTERNAL_PADDING); int pre_label_width = GetBubbleOuterPadding(true) + (image_width ? (image_width + padding) : 0); label_->SetBounds(pre_label_width, 0, @@ -117,7 +117,7 @@ gfx::Size size(image_->GetPreferredSize()); if (ShouldShowBackground()) { const int image_width = image_->GetPreferredSize().width(); - const int padding = GetLayoutConstant(LOCATION_BAR_HORIZONTAL_PADDING); + const int padding = GetLayoutConstant(ICON_LABEL_VIEW_INTERNAL_PADDING); const int non_label_width = GetBubbleOuterPadding(true) + (image_width ? (image_width + padding) : 0) +
diff --git a/chrome/browser/ui/views/location_bar/location_bar_view.cc b/chrome/browser/ui/views/location_bar/location_bar_view.cc index 810fac6d..c39d2fc 100644 --- a/chrome/browser/ui/views/location_bar/location_bar_view.cc +++ b/chrome/browser/ui/views/location_bar/location_bar_view.cc
@@ -215,17 +215,20 @@ // Determine the font for use inside the bubbles. The bubble background // images have 1 px thick edges, which we don't want to overlap. const int kBubbleInteriorVerticalPadding = 1; - const int bubble_vertical_padding = - (GetLayoutConstant(LOCATION_BAR_BUBBLE_VERTICAL_PADDING) + - kBubbleInteriorVerticalPadding) * 2; - const gfx::FontList bubble_font_list(font_list.DeriveWithHeightUpperBound( - location_height - bubble_vertical_padding)); + const int bubble_padding = + GetVerticalEdgeThickness() + + GetLayoutConstant(LOCATION_BAR_BUBBLE_VERTICAL_PADDING) + + kBubbleInteriorVerticalPadding; + const int bubble_height = GetPreferredSize().height() - (bubble_padding * 2); + gfx::FontList bubble_font_list = + font_list.DeriveWithHeightUpperBound(bubble_height); const SkColor background_color = GetColor(SecurityStateModel::NONE, LocationBarView::BACKGROUND); - ev_bubble_view_ = new EVBubbleView( - bubble_font_list, GetColor(SecurityStateModel::EV_SECURE, SECURITY_TEXT), - background_color, this); + const SkColor ev_text_color = + GetColor(SecurityStateModel::EV_SECURE, SECURITY_TEXT); + ev_bubble_view_ = + new EVBubbleView(bubble_font_list, ev_text_color, background_color, this); ev_bubble_view_->set_drag_controller(this); AddChildView(ev_bubble_view_); @@ -250,9 +253,12 @@ ime_inline_autocomplete_view_->SetVisible(false); AddChildView(ime_inline_autocomplete_view_); - const SkColor text_color = GetColor(SecurityStateModel::NONE, TEXT); + const SkColor selected_text_color = GetColor( + SecurityStateModel::NONE, ui::MaterialDesignController::IsModeMaterial() + ? KEYWORD_SEARCH_TEXT + : TEXT); selected_keyword_view_ = new SelectedKeywordView( - bubble_font_list, text_color, background_color, profile()); + bubble_font_list, selected_text_color, background_color, profile()); AddChildView(selected_keyword_view_); suggested_text_view_ = new views::Label(base::string16(), font_list); @@ -283,6 +289,7 @@ mic_search_view_->SetVisible(false); AddChildView(mic_search_view_); + const SkColor text_color = GetColor(SecurityStateModel::NONE, TEXT); for (ContentSettingsType type : ContentSettingBubbleModel::GetSupportedBubbleTypes()) { ContentSettingImageView* content_blocked_view = new ContentSettingImageView( @@ -348,7 +355,10 @@ switch (security_level) { case SecurityStateModel::EV_SECURE: case SecurityStateModel::SECURE: - color = SkColorSetRGB(7, 149, 0); + if (ui::MaterialDesignController::IsModeMaterial()) + color = SkColorSetRGB(11, 128, 67); + else + color = SkColorSetRGB(7, 149, 0); break; case SecurityStateModel::SECURITY_POLICY_WARNING: @@ -371,6 +381,9 @@ color, GetColor(security_level, BACKGROUND)); } + case KEYWORD_SEARCH_TEXT: + return SkColorSetRGB(51, 103, 214); + default: NOTREACHED(); return GetColor(security_level, TEXT); @@ -860,6 +873,9 @@ } int LocationBarView::GetVerticalEdgeThickness() const { + // In Material Design vertical layout disregards the border. + if (ui::MaterialDesignController::IsModeMaterial()) + return 0; return is_popup_mode_ ? kPopupEdgeThickness : kNormalEdgeThickness; }
diff --git a/chrome/browser/ui/views/location_bar/location_bar_view.h b/chrome/browser/ui/views/location_bar/location_bar_view.h index a5d1127..a88a1d64 100644 --- a/chrome/browser/ui/views/location_bar/location_bar_view.h +++ b/chrome/browser/ui/views/location_bar/location_bar_view.h
@@ -121,6 +121,7 @@ SELECTED_TEXT, DEEMPHASIZED_TEXT, SECURITY_TEXT, + KEYWORD_SEARCH_TEXT, }; LocationBarView(Browser* browser,
diff --git a/chrome/browser/ui/views/panels/panel_frame_view.h b/chrome/browser/ui/views/panels/panel_frame_view.h index 3fc00d34..e3a1143 100644 --- a/chrome/browser/ui/views/panels/panel_frame_view.h +++ b/chrome/browser/ui/views/panels/panel_frame_view.h
@@ -20,7 +20,7 @@ class PanelFrameView : public views::NonClientFrameView, public views::ButtonListener, - public chrome::TabIconViewModel { + public TabIconViewModel { public: enum PaintState { PAINT_AS_INACTIVE, @@ -81,7 +81,7 @@ // Overridden from views::ButtonListener: void ButtonPressed(views::Button* sender, const ui::Event& event) override; - // Overridden from chrome::TabIconViewModel: + // Overridden from TabIconViewModel: bool ShouldTabIconViewAnimate() const override; gfx::ImageSkia GetFaviconForTabIconView() override;
diff --git a/chrome/browser/ui/views/status_bubble_views.cc b/chrome/browser/ui/views/status_bubble_views.cc index bee00e8..2c5526f 100644 --- a/chrome/browser/ui/views/status_bubble_views.cc +++ b/chrome/browser/ui/views/status_bubble_views.cc
@@ -620,6 +620,8 @@ void StatusBubbleViews::RepositionPopup() { if (popup_.get()) { gfx::Point top_left; + // TODO(flackr): Get the non-transformed point so that the status bubble + // popup window's position is consistent with the base_view_'s window. views::View::ConvertPointToScreen(base_view_, &top_left); popup_->SetBounds(gfx::Rect(top_left.x() + position_.x(),
diff --git a/chrome/browser/ui/views/tab_icon_view.cc b/chrome/browser/ui/views/tab_icon_view.cc index 1c0e836..1422d96b 100644 --- a/chrome/browser/ui/views/tab_icon_view.cc +++ b/chrome/browser/ui/views/tab_icon_view.cc
@@ -54,7 +54,7 @@ } } -TabIconView::TabIconView(chrome::TabIconViewModel* model, +TabIconView::TabIconView(TabIconViewModel* model, views::MenuButtonListener* listener) : views::MenuButton(NULL, base::string16(), listener, false), model_(model),
diff --git a/chrome/browser/ui/views/tab_icon_view.h b/chrome/browser/ui/views/tab_icon_view.h index e7de72a54..e5985eda 100644 --- a/chrome/browser/ui/views/tab_icon_view.h +++ b/chrome/browser/ui/views/tab_icon_view.h
@@ -5,15 +5,11 @@ #ifndef CHROME_BROWSER_UI_VIEWS_TAB_ICON_VIEW_H_ #define CHROME_BROWSER_UI_VIEWS_TAB_ICON_VIEW_H_ -#include "base/basictypes.h" -#include "base/compiler_specific.h" +#include "base/macros.h" #include "base/time/time.h" #include "ui/views/controls/button/menu_button.h" -#include "ui/views/view.h" -namespace chrome { class TabIconViewModel; -} namespace gfx { class ImageSkia; @@ -24,7 +20,7 @@ public: static void InitializeIfNeeded(); - TabIconView(chrome::TabIconViewModel* model, + TabIconView(TabIconViewModel* model, views::MenuButtonListener* menu_button_listener); ~TabIconView() override; @@ -44,7 +40,7 @@ void PaintFavicon(gfx::Canvas* canvas, const gfx::ImageSkia& image); // Our model. - chrome::TabIconViewModel* model_; + TabIconViewModel* model_; // Whether we should display our light or dark style. bool is_light_;
diff --git a/chrome/browser/ui/views/tab_icon_view_model.h b/chrome/browser/ui/views/tab_icon_view_model.h index ae3ab2b..2c0a30a 100644 --- a/chrome/browser/ui/views/tab_icon_view_model.h +++ b/chrome/browser/ui/views/tab_icon_view_model.h
@@ -9,8 +9,6 @@ class ImageSkia; } -namespace chrome { - // Classes implement this interface to provide state for the TabIconView. class TabIconViewModel { public: @@ -24,6 +22,4 @@ virtual ~TabIconViewModel() {} }; -} // namespace chrome - #endif // CHROME_BROWSER_UI_VIEWS_TAB_ICON_VIEW_MODEL_H_
diff --git a/chrome/browser/ui/webui/chromeos/login/gaia_screen_handler.cc b/chrome/browser/ui/webui/chromeos/login/gaia_screen_handler.cc index 26251aa..92adcb5 100644 --- a/chrome/browser/ui/webui/chromeos/login/gaia_screen_handler.cc +++ b/chrome/browser/ui/webui/chromeos/login/gaia_screen_handler.cc
@@ -403,16 +403,15 @@ void GaiaScreenHandler::OnPortalDetectionCompleted( const NetworkState* network, const NetworkPortalDetector::CaptivePortalState& state) { - VLOG(1) << "OnPortalDetectionCompleted " << state.status; + VLOG(1) << "OnPortalDetectionCompleted " + << NetworkPortalDetector::CaptivePortalStatusString(state.status); - NetworkPortalDetector::CaptivePortalStatus status = state.status; + const NetworkPortalDetector::CaptivePortalStatus status = state.status; if (status == captive_portal_status_) return; - // Only consider online/portal status. - if (status == NetworkPortalDetector::CAPTIVE_PORTAL_STATUS_ONLINE || - status == NetworkPortalDetector::CAPTIVE_PORTAL_STATUS_PORTAL) { - captive_portal_status_ = status; + captive_portal_status_ = status; + if (signin_screen_handler_->ShouldLoadGaia()) { LoadAuthExtension(true /* force */, true /* silent_load */, false /* offline */); } @@ -841,15 +840,15 @@ } void GaiaScreenHandler::MaybePreloadAuthExtension() { - VLOG(1) << "MaybePreloadAuthExtension() call."; + VLOG(1) << "MaybePreloadAuthExtension"; if (!network_portal_detector_) { NetworkPortalDetectorImpl* detector = new NetworkPortalDetectorImpl( g_browser_process->system_request_context(), false); detector->set_portal_test_url(GURL(kRestrictiveProxyURL)); network_portal_detector_.reset(detector); + network_portal_detector_->AddObserver(this); network_portal_detector_->Enable(true); - network_portal_detector_->AddAndFireObserver(this); } // If cookies clearing was initiated or |dns_clear_task_running_| then auth @@ -878,6 +877,9 @@ void GaiaScreenHandler::LoadAuthExtension(bool force, bool silent_load, bool offline) { + VLOG(1) << "LoadAuthExtension, force: " << force + << ", silent_load: " << silent_load + << ", offline: " << offline; GaiaContext context; context.force_reload = force; context.is_local = offline; @@ -916,7 +918,9 @@ bool GaiaScreenHandler::IsRestrictiveProxy() const { return captive_portal_status_ == - NetworkPortalDetector::CAPTIVE_PORTAL_STATUS_PORTAL; + NetworkPortalDetector::CAPTIVE_PORTAL_STATUS_PORTAL || + captive_portal_status_ == + NetworkPortalDetector::CAPTIVE_PORTAL_STATUS_OFFLINE; } } // namespace chromeos
diff --git a/chrome/browser/ui/webui/settings/md_settings_localized_strings_provider.cc b/chrome/browser/ui/webui/settings/md_settings_localized_strings_provider.cc index edb16cf..ecc1cf6 100644 --- a/chrome/browser/ui/webui/settings/md_settings_localized_strings_provider.cc +++ b/chrome/browser/ui/webui/settings/md_settings_localized_strings_provider.cc
@@ -452,8 +452,9 @@ } void AddSyncStrings(content::WebUIDataSource* html_source) { - html_source->AddLocalizedString("syncPageTitle", - IDS_SETTINGS_SYNC); + html_source->AddLocalizedString("syncPageTitle", IDS_SETTINGS_SYNC); + html_source->AddLocalizedString("syncLoading", IDS_SETTINGS_SYNC_LOADING); + html_source->AddLocalizedString("syncTimeout", IDS_SETTINGS_SYNC_TIMEOUT); html_source->AddLocalizedString("syncEverythingMenuOption", IDS_SETTINGS_SYNC_EVERYTHING_MENU_OPTION); html_source->AddLocalizedString("chooseWhatToSyncMenuOption", @@ -496,6 +497,20 @@ IDS_SETTINGS_CANCEL_BUTTON); html_source->AddLocalizedString("okButton", IDS_SETTINGS_OK_BUTTON); + html_source->AddLocalizedString("passphraseExplanationText", + IDS_SETTINGS_PASSPHRASE_EXPLANATION_TEXT); + html_source->AddLocalizedString("emptyPassphraseError", + IDS_SETTINGS_EMPTY_PASSPHRASE_ERROR); + html_source->AddLocalizedString("mismatchedPassphraseError", + IDS_SETTINGS_MISMATCHED_PASSPHRASE_ERROR); + html_source->AddLocalizedString("incorrectPassphraseError", + IDS_SETTINGS_INCORRECT_PASSPHRASE_ERROR); + html_source->AddLocalizedString("passphrasePlaceholder", + IDS_SETTINGS_PASSPHRASE_PLACEHOLDER); + html_source->AddLocalizedString( + "passphraseConfirmationPlaceholder", + IDS_SETTINGS_PASSPHRASE_CONFIRMATION_PLACEHOLDER); + } void AddUsersStrings(content::WebUIDataSource* html_source) {
diff --git a/chrome/chrome_browser.gypi b/chrome/chrome_browser.gypi index 5a40643..471be68 100644 --- a/chrome/chrome_browser.gypi +++ b/chrome/chrome_browser.gypi
@@ -169,6 +169,8 @@ 'browser/android/logo_bridge.h', 'browser/android/logo_service.cc', 'browser/android/logo_service.h', + 'browser/android/media/media_throttle_infobar_delegate.cc', + 'browser/android/media/media_throttle_infobar_delegate.h', 'browser/android/metrics/launch_metrics.cc', 'browser/android/metrics/launch_metrics.h', 'browser/android/metrics/uma_session_stats.cc', @@ -914,6 +916,8 @@ 'browser/media/protected_media_identifier_permission_context_factory.h', 'browser/metrics/chromeos_metrics_provider.cc', 'browser/metrics/chromeos_metrics_provider.h', + 'browser/metrics/perf/cpu_identity.cc', + 'browser/metrics/perf/cpu_identity.h', 'browser/metrics/perf/random_selector.cc', 'browser/metrics/perf/random_selector.h', 'browser/metrics/perf/windowed_incognito_observer.cc', @@ -2849,8 +2853,6 @@ 'browser/sync/glue/autofill_data_type_controller.h', 'browser/sync/glue/autofill_profile_data_type_controller.cc', 'browser/sync/glue/autofill_profile_data_type_controller.h', - 'browser/sync/glue/autofill_wallet_data_type_controller.cc', - 'browser/sync/glue/autofill_wallet_data_type_controller.h', 'browser/sync/glue/bookmark_change_processor.cc', 'browser/sync/glue/bookmark_change_processor.h', 'browser/sync/glue/bookmark_data_type_controller.cc', @@ -3416,7 +3418,7 @@ 'browser/media/router/media_router.gyp:media_router', ], }], - ['buildtype!="Official"', { + ['branding!="Chrome"', { 'sources': [ 'browser/search/local_files_ntp_source.cc', 'browser/search/local_files_ntp_source.h',
diff --git a/chrome/chrome_tests.gypi b/chrome/chrome_tests.gypi index 1bb47cd..bac25b5 100644 --- a/chrome/chrome_tests.gypi +++ b/chrome/chrome_tests.gypi
@@ -399,6 +399,7 @@ 'browser/safe_json_parser_browsertest.cc', 'browser/search/hotword_installer_browsertest.cc', 'browser/search/suggestions/image_fetcher_impl_browsertest.cc', + 'browser/search_engines/template_url_scraper_browsertest.cc', 'browser/service_process/service_process_control_browsertest.cc', 'browser/services/gcm/fake_gcm_profile_service.cc', 'browser/services/gcm/fake_gcm_profile_service.h', @@ -1551,6 +1552,41 @@ ], 'targets': [ { + # This target contains non-unittest test utilities that don't belong in + # production libraries but are used by more than one test executable. + # + # GN version: //chrome/test:test_support_ui + 'target_name': 'test_support_ui', + 'type': 'static_library', + 'dependencies': [ + '../components/components.gyp:metrics_test_support', + '../skia/skia.gyp:skia', + '../testing/gtest.gyp:gtest', + ], + 'include_dirs': [ + '..', + ], + 'sources': [ + 'browser/password_manager/password_manager_test_base.cc', + 'browser/password_manager/password_manager_test_base.h', + 'browser/ui/webui/signin/login_ui_test_utils.cc', + 'browser/ui/webui/signin/login_ui_test_utils.h', + 'test/base/in_process_browser_test.cc', + 'test/base/in_process_browser_test.h', + 'test/base/in_process_browser_test_mac.cc', + 'test/base/ui_test_utils.cc', + 'test/base/ui_test_utils.h', + ], + 'conditions': [ + ['enable_plugins==1', { + "sources" : [ + 'test/ppapi/ppapi_test.cc', + 'test/ppapi/ppapi_test.h', + ], + }], + ], + }, + { # GN version: //chrome/test:interactive_ui_tests 'target_name': 'interactive_ui_tests', 'type': 'executable', @@ -1563,6 +1599,7 @@ 'debugger', 'renderer', 'test_support_common', + 'test_support_ui', '../components/components.gyp:guest_view_test_support', '../components/components_resources.gyp:components_resources', '../content/app/resources/content_resources.gyp:content_resources', @@ -2055,6 +2092,7 @@ 'renderer', 'test_support_common', 'test_support_sync_integration', + 'test_support_ui', '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../base/base.gyp:test_support_base', @@ -2589,6 +2627,7 @@ 'chrome_resources.gyp:packed_resources', 'renderer', 'test_support_common', + 'test_support_ui', '../base/base.gyp:base', '../base/base.gyp:base_i18n', '../base/base.gyp:test_support_base', @@ -2821,6 +2860,7 @@ 'renderer', 'test_support_common', 'test_support_sync_integration', + 'test_support_ui', '../sync/sync.gyp:sync', '../testing/gmock.gyp:gmock', '../testing/gtest.gyp:gtest', @@ -2905,9 +2945,9 @@ '../ui/views/views.gyp:views', ], }], - ['chromeos == 0', { - 'sources!': [ - # Note: this list is duplicated in the GN build. + ['chromeos == 0', { + 'sources!': [ + # Note: this list is duplicated in the GN build. 'browser/sync/test/integration/single_client_wifi_credentials_sync_test.cc', 'browser/sync/test/integration/two_client_wifi_credentials_sync_test.cc', ], @@ -2936,6 +2976,7 @@ 'type': 'executable', 'dependencies': [ 'test_support_sync_integration', + 'test_support_ui', '../sync/sync.gyp:sync', '../testing/gmock.gyp:gmock', '../testing/gtest.gyp:gtest',
diff --git a/chrome/chrome_tests_unit.gypi b/chrome/chrome_tests_unit.gypi index c5a5794..27fec64 100644 --- a/chrome/chrome_tests_unit.gypi +++ b/chrome/chrome_tests_unit.gypi
@@ -1310,6 +1310,7 @@ 'browser/extensions/api/log_private/syslog_parser_unittest.cc', 'browser/extensions/updater/local_extension_cache_unittest.cc', 'browser/metrics/chromeos_metrics_provider_unittest.cc', + 'browser/metrics/perf/cpu_identity_unittest.cc', 'browser/metrics/perf/random_selector_unittest.cc', 'browser/notifications/login_state_notification_blocker_chromeos_unittest.cc', 'browser/ui/browser_finder_chromeos_unittest.cc', @@ -1392,7 +1393,6 @@ ], 'chrome_unit_tests_win_sources': [ 'app/chrome_dll.rc', - 'browser/search_engines/template_url_scraper_unittest.cc', 'test/data/resource.rc', ], 'chrome_unit_tests_mac_sources': [ @@ -1741,8 +1741,6 @@ 'browser/notifications/notification_test_util.h', 'browser/password_manager/mock_password_store_service.cc', 'browser/password_manager/mock_password_store_service.h', - 'browser/password_manager/password_manager_test_base.cc', - 'browser/password_manager/password_manager_test_base.h', 'browser/password_manager/test_password_store_service.cc', 'browser/password_manager/test_password_store_service.h', 'browser/profile_resetter/profile_resetter_test_base.cc', @@ -1792,8 +1790,6 @@ 'browser/ui/views/toolbar/browser_action_test_util_views.cc', 'browser/ui/website_settings/mock_permission_bubble_request.cc', 'browser/ui/website_settings/mock_permission_bubble_request.h', - 'browser/ui/webui/signin/login_ui_test_utils.cc', - 'browser/ui/webui/signin/login_ui_test_utils.h', 'renderer/chrome_mock_render_thread.cc', 'renderer/chrome_mock_render_thread.h', 'renderer/safe_browsing/mock_feature_extractor_clock.cc', @@ -1817,9 +1813,6 @@ 'test/base/find_in_page_observer.h', 'test/base/history_index_restore_observer.cc', 'test/base/history_index_restore_observer.h', - 'test/base/in_process_browser_test.cc', - 'test/base/in_process_browser_test.h', - 'test/base/in_process_browser_test_mac.cc', 'test/base/profile_mock.cc', 'test/base/profile_mock.h', 'test/base/scoped_browser_locale.cc', @@ -1848,8 +1841,6 @@ 'test/base/testing_profile_manager.h', 'test/base/tracing.cc', 'test/base/tracing.h', - 'test/base/ui_test_utils.cc', - 'test/base/ui_test_utils.h', 'test/logging/win/file_logger.cc', 'test/logging/win/file_logger.h', 'test/logging/win/log_file_printer.cc', @@ -1908,8 +1899,6 @@ }], ['OS=="android"', { 'sources!': [ - 'browser/password_manager/password_manager_test_base.cc', - 'browser/password_manager/password_manager_test_base.h', 'browser/sessions/session_service_test_helper.cc', 'browser/sessions/session_service_test_helper.h', 'browser/ui/exclusive_access/fullscreen_controller_state_test.cc', @@ -1917,16 +1906,10 @@ 'browser/ui/exclusive_access/fullscreen_controller_state_tests.h', 'browser/ui/exclusive_access/fullscreen_controller_test.cc', 'browser/ui/exclusive_access/fullscreen_controller_test.h', - 'browser/ui/webui/signin/login_ui_test_utils.cc', - 'browser/ui/webui/signin/login_ui_test_utils.h', 'test/base/dialog_test_browser_window.cc', 'test/base/dialog_test_browser_window.h', - 'test/base/in_process_browser_test.cc', - 'test/base/in_process_browser_test.h', 'test/base/test_browser_window.cc', 'test/base/test_browser_window.h', - 'test/base/ui_test_utils.cc', - 'test/base/ui_test_utils.h', ], }], ['chromeos==1', { @@ -2127,10 +2110,6 @@ ], }], ['enable_plugins==1', { - "sources" : [ - 'test/ppapi/ppapi_test.cc', - 'test/ppapi/ppapi_test.h', - ], 'dependencies': [ '../pdf/pdf.gyp:pdf', ],
diff --git a/chrome/common/extensions/api/_permission_features.json b/chrome/common/extensions/api/_permission_features.json index 8d19c17d..1525b4b 100644 --- a/chrome/common/extensions/api/_permission_features.json +++ b/chrome/common/extensions/api/_permission_features.json
@@ -42,11 +42,12 @@ "channel": "stable", "extension_types": ["extension", "platform_app"], "whitelist": [ - "EE17C698905F7F2E6DDC87C9C30F11E164C829F4", // Watchdog release - "90113DA9516526D24DAF156C629CC41C049E8882", // Watchdog testing - "A9EFD71948A480C87D3B7C758FD938215F445F00", // Obsolete - "AE27D69DBE571F4B1694F05C89B710C646792231", // Published ADT. - "5107DE9024C329EEA9C9A72D94C16723790C6422" // Apps Developer Tool Dev. + "EE17C698905F7F2E6DDC87C9C30F11E164C829F4", // For unit testing ALP + "90113DA9516526D24DAF156C629CC41C049E8882", // http://crbug.com/520205 + "736F218710D373A285B3A9C70BF90C567B22321A", // http://crbug.com/520205 + "A9EFD71948A480C87D3B7C758FD938215F445F00", // Obsolete + "AE27D69DBE571F4B1694F05C89B710C646792231", // Published ADT. + "5107DE9024C329EEA9C9A72D94C16723790C6422" // Apps Developer Tool Dev. ] }, "audioModem": [
diff --git a/chrome/common/extensions/api/storage/storage_schema_manifest_handler.cc b/chrome/common/extensions/api/storage/storage_schema_manifest_handler.cc index 861bd1b..a8a7588 100644 --- a/chrome/common/extensions/api/storage/storage_schema_manifest_handler.cc +++ b/chrome/common/extensions/api/storage/storage_schema_manifest_handler.cc
@@ -49,13 +49,13 @@ } file = extension->path().AppendASCII(path); if (!base::PathExists(file)) { - *error = - base::StringPrintf("File does not exist: %s", file.value().c_str()); + *error = base::StringPrintf("File does not exist: %" PRIsFP, + file.value().c_str()); return policy::Schema(); } std::string content; if (!base::ReadFileToString(file, &content)) { - *error = base::StringPrintf("Can't read %s", file.value().c_str()); + *error = base::StringPrintf("Can't read %" PRIsFP, file.value().c_str()); return policy::Schema(); } return policy::Schema::Parse(content, error);
diff --git a/chrome/installer/linux/rpm/expected_deps_i386 b/chrome/installer/linux/rpm/expected_deps_i386 index c68d229a..b8bf602 100644 --- a/chrome/installer/linux/rpm/expected_deps_i386 +++ b/chrome/installer/linux/rpm/expected_deps_i386
@@ -59,7 +59,6 @@ libstdc++.so.6(GLIBCXX_3.4.10) libstdc++.so.6(GLIBCXX_3.4.11) libstdc++.so.6(GLIBCXX_3.4.15) -libstdc++.so.6(GLIBCXX_3.4.5) libstdc++.so.6(GLIBCXX_3.4.9) libX11.so.6 libXcomposite.so.1
diff --git a/chrome/installer/linux/rpm/expected_deps_x86_64 b/chrome/installer/linux/rpm/expected_deps_x86_64 index 74f2920..c6caba5 100644 --- a/chrome/installer/linux/rpm/expected_deps_x86_64 +++ b/chrome/installer/linux/rpm/expected_deps_x86_64
@@ -49,7 +49,6 @@ libstdc++.so.6(GLIBCXX_3.4.10)(64bit) libstdc++.so.6(GLIBCXX_3.4.11)(64bit) libstdc++.so.6(GLIBCXX_3.4.15)(64bit) -libstdc++.so.6(GLIBCXX_3.4.5)(64bit) libstdc++.so.6(GLIBCXX_3.4)(64bit) libstdc++.so.6(GLIBCXX_3.4.9)(64bit) libX11.so.6()(64bit)
diff --git a/chrome/test/BUILD.gn b/chrome/test/BUILD.gn index 58706b35..3e159608 100644 --- a/chrome/test/BUILD.gn +++ b/chrome/test/BUILD.gn
@@ -45,9 +45,6 @@ "base/find_in_page_observer.h", "base/history_index_restore_observer.cc", "base/history_index_restore_observer.h", - "base/in_process_browser_test.cc", - "base/in_process_browser_test.h", - "base/in_process_browser_test_mac.cc", "base/profile_mock.cc", "base/profile_mock.h", "base/scoped_browser_locale.cc", @@ -76,8 +73,6 @@ "base/testing_profile_manager.h", "base/tracing.cc", "base/tracing.h", - "base/ui_test_utils.cc", - "base/ui_test_utils.h", "logging/win/file_logger.cc", "logging/win/file_logger.h", "logging/win/log_file_printer.cc", @@ -169,12 +164,8 @@ sources -= [ "base/dialog_test_browser_window.cc", "base/dialog_test_browser_window.h", - "base/in_process_browser_test.cc", - "base/in_process_browser_test.h", "base/test_browser_window.cc", "base/test_browser_window.h", - "base/ui_test_utils.cc", - "base/ui_test_utils.h", ] } @@ -212,10 +203,6 @@ } if (enable_plugins) { - sources += [ - "ppapi/ppapi_test.cc", - "ppapi/ppapi_test.h", - ] deps += [ "//pdf" ] } @@ -234,6 +221,38 @@ } } +# GYP version: chrome/chrome_tests.gypi:test_support_ui +source_set("test_support_ui") { + defines = [] + testonly = true + + sources = [ + "base/in_process_browser_test.cc", + "base/in_process_browser_test.h", + "base/in_process_browser_test_mac.cc", + "base/ui_test_utils.cc", + "base/ui_test_utils.h", + ] + + configs += [ "//build/config:precompiled_headers" ] + + public_deps = [ + "//chrome/browser:test_support_ui", + ] + deps = [ + "//components/metrics:test_support", + "//skia", + "//testing/gtest", + ] + + if (enable_plugins) { + sources += [ + "ppapi/ppapi_test.cc", + "ppapi/ppapi_test.h", + ] + } +} + group("telemetry_binary_deps") { data_deps = [] @@ -305,11 +324,12 @@ ldflags = [] deps = [ + ":test_support", + ":test_support_ui", "//base/allocator", "//chrome/browser", "//chrome/browser/devtools", "//chrome/renderer", - "//chrome/test:test_support", "//chrome:resources", "//chrome:strings", "//chrome:packed_extra_resources", @@ -777,6 +797,7 @@ ":browser_tests_js_webui", ":sync_integration_test_support", ":test_support", + ":test_support_ui", "//base", "//base:i18n", "//base/allocator", @@ -1249,6 +1270,7 @@ deps = [ ":sync_integration_test_support", ":test_support", + ":test_support_ui", "//base/allocator", "//chrome:packed_extra_resources", "//chrome:packed_resources", @@ -1333,6 +1355,7 @@ deps = [ ":sync_integration_test_support", + ":test_support_ui", "//base/allocator", "//crypto:platform", "//sync", @@ -2034,6 +2057,7 @@ deps = [ ":test_support", + ":test_support_ui", "//base", "//base:i18n", "//base/test:test_support",
diff --git a/chrome/test/data/template_url_scraper/submit_handler/index.html b/chrome/test/data/template_url_scraper/submit_handler/index.html index 2e0b04ff..41883dd6 100644 --- a/chrome/test/data/template_url_scraper/submit_handler/index.html +++ b/chrome/test/data/template_url_scraper/submit_handler/index.html
@@ -2,7 +2,6 @@ <head> <title>Submit handler TemplateURL scraping test</title> <script> - function submit_handler() { document.getElementById("data").value = "test_data"; } @@ -10,12 +9,11 @@ function submit_form() { document.forms[0].submit(); } - </script> </head> -<body onload="submit_form();"> -<form action="http://www.foo.com:1337" onsubmit="submit_handler();"> +<body> +<form action="" onsubmit="submit_handler();"> <input type="hidden" id="data" /> <input type="text" name="q" /> <input type="submit" />
diff --git a/components/autofill.gypi b/components/autofill.gypi index c13ad099..348e365 100644 --- a/components/autofill.gypi +++ b/components/autofill.gypi
@@ -95,6 +95,7 @@ 'rappor', 'signin_core_browser', 'signin_core_common', + 'sync_driver', 'variations_http_provider', 'webdata_common', ], @@ -143,6 +144,8 @@ 'autofill/core/browser/autofill_sync_constants.h', 'autofill/core/browser/autofill_type.cc', 'autofill/core/browser/autofill_type.h', + 'autofill/core/browser/autofill_wallet_data_type_controller.cc', + 'autofill/core/browser/autofill_wallet_data_type_controller.h', 'autofill/core/browser/autofill_xml_parser.cc', 'autofill/core/browser/autofill_xml_parser.h', 'autofill/core/browser/card_unmask_delegate.cc',
diff --git a/components/autofill/core/browser/BUILD.gn b/components/autofill/core/browser/BUILD.gn index ce5b85d..9d89783 100644 --- a/components/autofill/core/browser/BUILD.gn +++ b/components/autofill/core/browser/BUILD.gn
@@ -60,6 +60,8 @@ "autofill_sync_constants.h", "autofill_type.cc", "autofill_type.h", + "autofill_wallet_data_type_controller.cc", + "autofill_wallet_data_type_controller.h", "autofill_xml_parser.cc", "autofill_xml_parser.h", "card_unmask_delegate.cc",
diff --git a/chrome/browser/sync/glue/autofill_wallet_data_type_controller.cc b/components/autofill/core/browser/autofill_wallet_data_type_controller.cc similarity index 74% rename from chrome/browser/sync/glue/autofill_wallet_data_type_controller.cc rename to components/autofill/core/browser/autofill_wallet_data_type_controller.cc index 063666f8..3cbf3d7 100644 --- a/chrome/browser/sync/glue/autofill_wallet_data_type_controller.cc +++ b/components/autofill/core/browser/autofill_wallet_data_type_controller.cc
@@ -2,36 +2,34 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "chrome/browser/sync/glue/autofill_wallet_data_type_controller.h" +#include "components/autofill/core/browser/autofill_wallet_data_type_controller.h" #include "base/bind.h" #include "base/prefs/pref_service.h" -#include "chrome/browser/sync/glue/chrome_report_unrecoverable_error.h" -#include "chrome/common/pref_names.h" #include "components/autofill/core/browser/personal_data_manager.h" #include "components/autofill/core/browser/webdata/autofill_webdata_service.h" +#include "components/autofill/core/common/autofill_pref_names.h" #include "components/sync_driver/sync_client.h" #include "components/sync_driver/sync_service.h" -#include "content/public/browser/browser_thread.h" #include "sync/api/sync_error.h" #include "sync/api/syncable_service.h" -using content::BrowserThread; - namespace browser_sync { AutofillWalletDataTypeController::AutofillWalletDataTypeController( + const scoped_refptr<base::SingleThreadTaskRunner>& ui_thread, + const scoped_refptr<base::SingleThreadTaskRunner>& db_thread, + const base::Closure& error_callback, sync_driver::SyncClient* sync_client, syncer::ModelType model_type) - : NonUIDataTypeController( - BrowserThread::GetMessageLoopProxyForThread(BrowserThread::UI), - base::Bind(&ChromeReportUnrecoverableError), - sync_client), + : NonUIDataTypeController(ui_thread, error_callback, sync_client), + ui_thread_(ui_thread), + db_thread_(db_thread), sync_client_(sync_client), callback_registered_(false), model_type_(model_type), currently_enabled_(IsEnabled()) { - DCHECK_CURRENTLY_ON(BrowserThread::UI); + DCHECK(ui_thread_->BelongsToCurrentThread()); DCHECK(model_type_ == syncer::AUTOFILL_WALLET_DATA || model_type_ == syncer::AUTOFILL_WALLET_METADATA); pref_registrar_.Init(sync_client_->GetPrefService()); @@ -45,27 +43,26 @@ base::Unretained(this))); } -AutofillWalletDataTypeController::~AutofillWalletDataTypeController() { -} +AutofillWalletDataTypeController::~AutofillWalletDataTypeController() {} syncer::ModelType AutofillWalletDataTypeController::type() const { return model_type_; } -syncer::ModelSafeGroup - AutofillWalletDataTypeController::model_safe_group() const { +syncer::ModelSafeGroup AutofillWalletDataTypeController::model_safe_group() + const { return syncer::GROUP_DB; } bool AutofillWalletDataTypeController::PostTaskOnBackendThread( const tracked_objects::Location& from_here, const base::Closure& task) { - DCHECK_CURRENTLY_ON(BrowserThread::UI); - return BrowserThread::PostTask(BrowserThread::DB, from_here, task); + DCHECK(ui_thread_->BelongsToCurrentThread()); + return db_thread_->PostTask(from_here, task); } bool AutofillWalletDataTypeController::StartModels() { - DCHECK_CURRENTLY_ON(BrowserThread::UI); + DCHECK(ui_thread_->BelongsToCurrentThread()); DCHECK_EQ(state(), MODEL_STARTING); scoped_refptr<autofill::AutofillWebDataService> web_data_service = @@ -78,16 +75,16 @@ return true; if (!callback_registered_) { - web_data_service->RegisterDBLoadedCallback(base::Bind( - &AutofillWalletDataTypeController::OnModelLoaded, this)); - callback_registered_ = true; + web_data_service->RegisterDBLoadedCallback( + base::Bind(&AutofillWalletDataTypeController::OnModelLoaded, this)); + callback_registered_ = true; } return false; } void AutofillWalletDataTypeController::StopModels() { - DCHECK_CURRENTLY_ON(BrowserThread::UI); + DCHECK(ui_thread_->BelongsToCurrentThread()); // This function is called when shutting down (nothing is changing), when // sync is disabled completely, or when wallet sync is disabled. In the @@ -102,8 +99,7 @@ // currently_enabled_ indicates if the other prefs are enabled. All of these // have to be enabled to sync wallet data/metadata. if (!service->HasSyncSetupCompleted() || - !service->GetPreferredDataTypes().Has(type()) || - !currently_enabled_) { + !service->GetPreferredDataTypes().Has(type()) || !currently_enabled_) { autofill::PersonalDataManager* pdm = sync_client_->GetPersonalDataManager(); if (pdm) pdm->ClearAllServerData(); @@ -111,12 +107,12 @@ } bool AutofillWalletDataTypeController::ReadyForStart() const { - DCHECK_CURRENTLY_ON(BrowserThread::UI); + DCHECK(ui_thread_->BelongsToCurrentThread()); return currently_enabled_; } void AutofillWalletDataTypeController::OnSyncPrefChanged() { - DCHECK_CURRENTLY_ON(BrowserThread::UI); + DCHECK(ui_thread_->BelongsToCurrentThread()); bool new_enabled = IsEnabled(); if (currently_enabled_ == new_enabled) @@ -136,23 +132,21 @@ base::Bind(&DataTypeController::OnSingleDataTypeUnrecoverableError, this, syncer::SyncError( - FROM_HERE, - syncer::SyncError::DATATYPE_POLICY_ERROR, - "Wallet syncing is disabled by policy.", - type()))); + FROM_HERE, syncer::SyncError::DATATYPE_POLICY_ERROR, + "Wallet syncing is disabled by policy.", type()))); } } } bool AutofillWalletDataTypeController::IsEnabled() { - DCHECK_CURRENTLY_ON(BrowserThread::UI); + DCHECK(ui_thread_->BelongsToCurrentThread()); // Require both the sync experiment and the user-visible pref to be // enabled to sync Wallet data/metadata. PrefService* ps = sync_client_->GetPrefService(); - return - ps->GetBoolean(autofill::prefs::kAutofillWalletSyncExperimentEnabled) && - ps->GetBoolean(autofill::prefs::kAutofillWalletImportEnabled); + return ps->GetBoolean( + autofill::prefs::kAutofillWalletSyncExperimentEnabled) && + ps->GetBoolean(autofill::prefs::kAutofillWalletImportEnabled); } } // namespace browser_sync
diff --git a/chrome/browser/sync/glue/autofill_wallet_data_type_controller.h b/components/autofill/core/browser/autofill_wallet_data_type_controller.h similarity index 74% rename from chrome/browser/sync/glue/autofill_wallet_data_type_controller.h rename to components/autofill/core/browser/autofill_wallet_data_type_controller.h index f67ba597..b24f3b3 100644 --- a/chrome/browser/sync/glue/autofill_wallet_data_type_controller.h +++ b/components/autofill/core/browser/autofill_wallet_data_type_controller.h
@@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#ifndef CHROME_BROWSER_SYNC_GLUE_AUTOFILL_WALLET_DATA_TYPE_CONTROLLER_H_ -#define CHROME_BROWSER_SYNC_GLUE_AUTOFILL_WALLET_DATA_TYPE_CONTROLLER_H_ +#ifndef COMPONENTS_SYNC_DRIVER_GLUE_AUTOFILL_WALLET_DATA_TYPE_CONTROLLER_H_ +#define COMPONENTS_SYNC_DRIVER_GLUE_AUTOFILL_WALLET_DATA_TYPE_CONTROLLER_H_ #include "base/basictypes.h" #include "base/prefs/pref_change_registrar.h" @@ -17,6 +17,9 @@ public: // |model_type| should be either AUTOFILL_WALLET or AUTOFILL_WALLET_METADATA. AutofillWalletDataTypeController( + const scoped_refptr<base::SingleThreadTaskRunner>& ui_thread, + const scoped_refptr<base::SingleThreadTaskRunner>& db_thread, + const base::Closure& error_callback, sync_driver::SyncClient* sync_client, syncer::ModelType model_type); @@ -41,6 +44,12 @@ // Returns true if the prefs are set such that wallet sync should be enabled. bool IsEnabled(); + // A reference to the UI thread's task runner. + const scoped_refptr<base::SingleThreadTaskRunner> ui_thread_; + + // A reference to the DB thread's task runner. + const scoped_refptr<base::SingleThreadTaskRunner> db_thread_; + sync_driver::SyncClient* const sync_client_; bool callback_registered_; syncer::ModelType model_type_; @@ -57,4 +66,4 @@ } // namespace browser_sync -#endif // CHROME_BROWSER_SYNC_GLUE_AUTOFILL_WALLET_DATA_TYPE_CONTROLLER_H_ +#endif // COMPONENTS_SYNC_DRIVER_GLUE_AUTOFILL_WALLET_DATA_TYPE_CONTROLLER_H_
diff --git a/components/bubble/bubble_manager.cc b/components/bubble/bubble_manager.cc index 4acc8d7..173a5ac5 100644 --- a/components/bubble/bubble_manager.cc +++ b/components/bubble/bubble_manager.cc
@@ -17,6 +17,7 @@ BubbleReference BubbleManager::ShowBubble(scoped_ptr<BubbleDelegate> bubble) { DCHECK(thread_checker_.CalledOnValidThread()); + DCHECK_NE(manager_state_, ITERATING_BUBBLES); DCHECK(bubble); scoped_ptr<BubbleController> controller( @@ -29,13 +30,13 @@ controller->Show(); controllers_.push_back(controller.Pass()); break; - case QUEUE_BUBBLES: - show_queue_.push_back(controller.Pass()); - break; case NO_MORE_BUBBLES: FOR_EACH_OBSERVER(BubbleManagerObserver, observers_, OnBubbleNeverShown(controller->AsWeakPtr())); break; + default: + NOTREACHED(); + break; } return bubble_ref; @@ -44,6 +45,7 @@ bool BubbleManager::CloseBubble(BubbleReference bubble, BubbleCloseReason reason) { DCHECK(thread_checker_.CalledOnValidThread()); + DCHECK_NE(manager_state_, ITERATING_BUBBLES); return CloseAllMatchingBubbles(bubble.get(), reason); } @@ -52,13 +54,20 @@ DCHECK_NE(reason, BUBBLE_CLOSE_ACCEPTED); DCHECK_NE(reason, BUBBLE_CLOSE_CANCELED); DCHECK(thread_checker_.CalledOnValidThread()); + DCHECK_NE(manager_state_, ITERATING_BUBBLES); CloseAllMatchingBubbles(nullptr, reason); } void BubbleManager::UpdateAllBubbleAnchors() { DCHECK(thread_checker_.CalledOnValidThread()); + DCHECK_NE(manager_state_, ITERATING_BUBBLES); + + // Guard against bubbles being added or removed while iterating the bubbles. + ManagerState original_state = manager_state_; + manager_state_ = ITERATING_BUBBLES; for (auto controller : controllers_) controller->UpdateAnchorPosition(); + manager_state_ = original_state; } void BubbleManager::AddBubbleManagerObserver(BubbleManagerObserver* observer) { @@ -79,33 +88,13 @@ CloseAllBubbles(BUBBLE_CLOSE_FORCED); } -void BubbleManager::ShowPendingBubbles() { - if (manager_state_ == QUEUE_BUBBLES) - manager_state_ = SHOW_BUBBLES; - - if (manager_state_ == SHOW_BUBBLES) { - for (auto controller : show_queue_) - controller->Show(); - - controllers_.insert(controllers_.end(), show_queue_.begin(), - show_queue_.end()); - - show_queue_.weak_clear(); - } else { - for (auto controller : show_queue_) { - FOR_EACH_OBSERVER(BubbleManagerObserver, observers_, - OnBubbleNeverShown(controller->AsWeakPtr())); - } - - // Clear the queue if bubbles can't be shown. - show_queue_.clear(); - } -} - bool BubbleManager::CloseAllMatchingBubbles(BubbleController* match, BubbleCloseReason reason) { ScopedVector<BubbleController> close_queue; + // Guard against bubbles being added or removed while iterating the bubbles. + ManagerState original_state = manager_state_; + manager_state_ = ITERATING_BUBBLES; for (auto iter = controllers_.begin(); iter != controllers_.end();) { if ((!match || match == *iter) && (*iter)->ShouldClose(reason)) { close_queue.push_back(*iter); @@ -114,6 +103,7 @@ ++iter; } } + manager_state_ = original_state; for (auto controller : close_queue) { controller->DoClose();
diff --git a/components/bubble/bubble_manager.h b/components/bubble/bubble_manager.h index 08429a5..b89cba88 100644 --- a/components/bubble/bubble_manager.h +++ b/components/bubble/bubble_manager.h
@@ -69,15 +69,12 @@ void FinalizePendingRequests(); private: - enum ManagerStates { + enum ManagerState { SHOW_BUBBLES, - QUEUE_BUBBLES, NO_MORE_BUBBLES, + ITERATING_BUBBLES, }; - // Show any bubbles that were added to |show_queue_|. - void ShowPendingBubbles(); - // All bubbles will get a close event for the specified |reason| if |match| is // nullptr, otherwise only the bubble held by |match| will get a close event. // Any bubble that is closed will also be deleted. @@ -90,14 +87,11 @@ base::ThreadChecker thread_checker_; // Determines what happens to a bubble when |ShowBubble| is called. - ManagerStates manager_state_; + ManagerState manager_state_; // The bubbles that are being managed. ScopedVector<BubbleController> controllers_; - // The bubbles queued to be shown when possible. - ScopedVector<BubbleController> show_queue_; - DISALLOW_COPY_AND_ASSIGN(BubbleManager); };
diff --git a/components/cronet.gypi b/components/cronet.gypi index 6e22078a..5a63b94 100644 --- a/components/cronet.gypi +++ b/components/cronet.gypi
@@ -41,6 +41,14 @@ 'includes': [ '../build/android/java_cpp_enum.gypi' ], }, { + 'target_name': 'network_quality_observations_java', + 'type': 'none', + 'variables': { + 'source_file': '../net/base/network_quality_estimator.h', + }, + 'includes': [ '../build/android/java_cpp_enum.gypi' ], + }, + { 'target_name': 'cronet_url_request_context_config_list', 'type': 'none', 'sources': [ @@ -209,6 +217,7 @@ 'cronet_url_request_context_config_list', 'cronet_version', 'load_states_list', + 'network_quality_observations_java', ], 'variables': { 'java_in_dir': 'cronet/android/java', @@ -219,6 +228,8 @@ '**/HttpUrlConnection*.java', '**/HttpUrlRequest*.java', '**/LoadState.java', + '**/NetworkQualityRttListener.java', + '**/NetworkQualityThroughputListener.java', '**/ResponseInfo.java', '**/ResponseTooLargeException.java', '**/UploadDataProvider.java', @@ -245,6 +256,7 @@ 'cronet_url_request_java', 'libcronet', 'net_request_priority_java', + 'network_quality_observations_java', ], 'variables': { 'java_in_dir': 'cronet/android/java',
diff --git a/components/cronet/android/cronet_url_request_context_adapter.cc b/components/cronet/android/cronet_url_request_context_adapter.cc index 9290441..29e9c8b0 100644 --- a/components/cronet/android/cronet_url_request_context_adapter.cc +++ b/components/cronet/android/cronet_url_request_context_adapter.cc
@@ -4,6 +4,8 @@ #include "components/cronet/android/cronet_url_request_context_adapter.h" +#include <map> + #include "base/android/jni_android.h" #include "base/android/jni_string.h" #include "base/bind.h" @@ -21,6 +23,7 @@ #include "base/values.h" #include "components/cronet/url_request_context_config.h" #include "jni/CronetUrlRequestContext_jni.h" +#include "net/base/external_estimate_provider.h" #include "net/base/load_flags.h" #include "net/base/net_errors.h" #include "net/base/network_delegate_impl.h" @@ -140,6 +143,10 @@ http_server_properties_manager_->ShutdownOnPrefThread(); if (pref_service_) pref_service_->CommitPendingWrite(); + if (network_quality_estimator_) { + network_quality_estimator_->RemoveRTTObserver(this); + network_quality_estimator_->RemoveThroughputObserver(this); + } StopNetLogOnNetworkThread(); } @@ -164,6 +171,73 @@ jcaller_ref)); } +void CronetURLRequestContextAdapter:: + EnableNetworkQualityEstimatorOnNetworkThread(bool use_local_host_requests, + bool use_smaller_responses) { + DCHECK(GetNetworkTaskRunner()->BelongsToCurrentThread()); + DCHECK(!network_quality_estimator_); + network_quality_estimator_.reset(new net::NetworkQualityEstimator( + scoped_ptr<net::ExternalEstimateProvider>(), + std::map<std::string, std::string>(), use_local_host_requests, + use_smaller_responses)); + context_->set_network_quality_estimator(network_quality_estimator_.get()); +} + +void CronetURLRequestContextAdapter::EnableNetworkQualityEstimator( + JNIEnv* env, + jobject jcaller, + jboolean use_local_host_requests, + jboolean use_smaller_responses) { + PostTaskToNetworkThread( + FROM_HERE, base::Bind(&CronetURLRequestContextAdapter:: + EnableNetworkQualityEstimatorOnNetworkThread, + base::Unretained(this), use_local_host_requests, + use_smaller_responses)); +} + +void CronetURLRequestContextAdapter::ProvideRTTObservationsOnNetworkThread( + bool should) { + DCHECK(GetNetworkTaskRunner()->BelongsToCurrentThread()); + if (!network_quality_estimator_) + return; + if (should) { + network_quality_estimator_->AddRTTObserver(this); + } else { + network_quality_estimator_->RemoveRTTObserver(this); + } +} + +void CronetURLRequestContextAdapter::ProvideRTTObservations(JNIEnv* env, + jobject jcaller, + bool should) { + PostTaskToNetworkThread(FROM_HERE, + base::Bind(&CronetURLRequestContextAdapter:: + ProvideRTTObservationsOnNetworkThread, + base::Unretained(this), should)); +} + +void CronetURLRequestContextAdapter:: + ProvideThroughputObservationsOnNetworkThread(bool should) { + DCHECK(GetNetworkTaskRunner()->BelongsToCurrentThread()); + if (!network_quality_estimator_) + return; + if (should) { + network_quality_estimator_->AddThroughputObserver(this); + } else { + network_quality_estimator_->RemoveThroughputObserver(this); + } +} + +void CronetURLRequestContextAdapter::ProvideThroughputObservations( + JNIEnv* env, + jobject jcaller, + bool should) { + PostTaskToNetworkThread( + FROM_HERE, base::Bind(&CronetURLRequestContextAdapter:: + ProvideThroughputObservationsOnNetworkThread, + base::Unretained(this), should)); +} + void CronetURLRequestContextAdapter::InitializeOnNetworkThread( scoped_ptr<URLRequestContextConfig> config, const base::android::ScopedJavaGlobalRef<jobject>& @@ -291,6 +365,7 @@ } JNIEnv* env = base::android::AttachCurrentThread(); + jcronet_url_request_context_.Reset(env, jcronet_url_request_context.obj()); Java_CronetUrlRequestContext_initNetworkThread( env, jcronet_url_request_context.obj()); @@ -409,6 +484,26 @@ return file_thread_.get(); } +void CronetURLRequestContextAdapter::OnRTTObservation( + int32_t rtt_ms, + const base::TimeTicks& timestamp, + net::NetworkQualityEstimator::ObservationSource source) { + Java_CronetUrlRequestContext_onRttObservation( + base::android::AttachCurrentThread(), jcronet_url_request_context_.obj(), + rtt_ms, (timestamp - base::TimeTicks::UnixEpoch()).InMilliseconds(), + source); +} + +void CronetURLRequestContextAdapter::OnThroughputObservation( + int32_t throughput_kbps, + const base::TimeTicks& timestamp, + net::NetworkQualityEstimator::ObservationSource source) { + Java_CronetUrlRequestContext_onThroughputObservation( + base::android::AttachCurrentThread(), jcronet_url_request_context_.obj(), + throughput_kbps, + (timestamp - base::TimeTicks::UnixEpoch()).InMilliseconds(), source); +} + // Creates RequestContextAdater if config is valid URLRequestContextConfig, // returns 0 otherwise. static jlong CreateRequestContextAdapter(JNIEnv* env,
diff --git a/components/cronet/android/cronet_url_request_context_adapter.h b/components/cronet/android/cronet_url_request_context_adapter.h index 3d83f01..cbc3e2c 100644 --- a/components/cronet/android/cronet_url_request_context_adapter.h +++ b/components/cronet/android/cronet_url_request_context_adapter.h
@@ -17,11 +17,13 @@ #include "base/memory/scoped_ptr.h" #include "base/prefs/json_pref_store.h" #include "base/threading/thread.h" +#include "net/base/network_quality_estimator.h" class PrefService; namespace base { class SingleThreadTaskRunner; +class TimeTicks; } // namespace base namespace net { @@ -44,12 +46,14 @@ bool CronetUrlRequestContextAdapterRegisterJni(JNIEnv* env); // Adapter between Java CronetUrlRequestContext and net::URLRequestContext. -class CronetURLRequestContextAdapter { +class CronetURLRequestContextAdapter + : public net::NetworkQualityEstimator::RTTObserver, + public net::NetworkQualityEstimator::ThroughputObserver { public: explicit CronetURLRequestContextAdapter( scoped_ptr<URLRequestContextConfig> context_config); - ~CronetURLRequestContextAdapter(); + ~CronetURLRequestContextAdapter() override; // Called on main Java thread to initialize URLRequestContext. void InitRequestContextOnMainThread(JNIEnv* env, jobject jcaller); @@ -78,6 +82,19 @@ // Called on main Java thread to initialize URLRequestContext. void InitRequestContextOnMainThread(); + // Enables the network quality estimator and optionally configures it to + // observe localhost requests, and to consider smaller responses when + // observing throughput. It is recommended that both options be set to false. + void EnableNetworkQualityEstimator(JNIEnv* env, + jobject jcaller, + jboolean use_local_host_requests, + jboolean use_smaller_responses); + + // Request that RTT and/or throughput observations should or should not be + // provided by the network quality estimator. + void ProvideRTTObservations(JNIEnv* env, jobject jcaller, bool should); + void ProvideThroughputObservations(JNIEnv* env, jobject jcaller, bool should); + private: // Initializes |context_| on the Network thread. void InitializeOnNetworkThread(scoped_ptr<URLRequestContextConfig> config, @@ -99,6 +116,29 @@ // Gets the file thread. Create one if there is none. base::Thread* GetFileThread(); + // Instantiate and configure the network quality estimator. For default + // behavior, parameters should be set to false; otherwise the estimator + // can be configured to observe requests to localhost, as well as to use + // observe smaller responses when estimating throughput. + void EnableNetworkQualityEstimatorOnNetworkThread( + bool use_local_host_requests, + bool use_smaller_responses); + + void ProvideRTTObservationsOnNetworkThread(bool should); + void ProvideThroughputObservationsOnNetworkThread(bool should); + + // net::NetworkQualityEstimator::RTTObserver implementation. + void OnRTTObservation( + int32_t rtt_ms, + const base::TimeTicks& timestamp, + net::NetworkQualityEstimator::ObservationSource source) override; + + // net::NetworkQualityEstimator::ThroughputObserver implementation. + void OnThroughputObservation( + int32_t throughput_kbps, + const base::TimeTicks& timestamp, + net::NetworkQualityEstimator::ObservationSource source) override; + // Network thread is owned by |this|, but is destroyed from java thread. base::Thread* network_thread_; @@ -131,6 +171,12 @@ bool is_context_initialized_; int default_load_flags_; + // A network quality estimator. + scoped_ptr<net::NetworkQualityEstimator> network_quality_estimator_; + + // Java object that owns this CronetURLRequestContextAdapter. + base::android::ScopedJavaGlobalRef<jobject> jcronet_url_request_context_; + #if defined(DATA_REDUCTION_PROXY_SUPPORT) scoped_ptr<CronetDataReductionProxy> data_reduction_proxy_; #endif
diff --git a/components/cronet/android/java/build.xml b/components/cronet/android/java/build.xml index ce5576c..b4893b8 100644 --- a/components/cronet/android/java/build.xml +++ b/components/cronet/android/java/build.xml
@@ -16,6 +16,7 @@ <exclude name="**/Chromium*.java"/> <exclude name="**/ChunkedWritableByteChannel*.java"/> <exclude name="**/HttpUrl*.java"/> + <exclude name="**/NetworkQuality*.java"/> </fileset> </javadoc> </target>
diff --git a/components/cronet/android/java/src/org/chromium/net/CronetUrlRequestContext.java b/components/cronet/android/java/src/org/chromium/net/CronetUrlRequestContext.java index 7bae288..cd4b710 100644 --- a/components/cronet/android/java/src/org/chromium/net/CronetUrlRequestContext.java +++ b/components/cronet/android/java/src/org/chromium/net/CronetUrlRequestContext.java
@@ -12,6 +12,7 @@ import android.os.Process; import android.util.Log; +import org.chromium.base.ObserverList; import org.chromium.base.VisibleForTesting; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; @@ -19,8 +20,11 @@ import org.chromium.base.annotations.UsedByReflection; import java.util.concurrent.Executor; +import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicInteger; +import javax.annotation.concurrent.GuardedBy; + /** * UrlRequestContext using Chromium HTTP stack implementation. */ @@ -42,6 +46,22 @@ private long mUrlRequestContextAdapter = 0; private Thread mNetworkThread; + private Executor mNetworkQualityExecutor; + private boolean mNetworkQualityEstimatorEnabled; + + /** Locks operations on network quality listeners, because listener + * addition and removal may occur on a different thread from notification. + */ + private final Object mNetworkQualityLock = new Object(); + + @GuardedBy("mNetworkQualityLock") + private final ObserverList<NetworkQualityRttListener> mRttListenerList = + new ObserverList<NetworkQualityRttListener>(); + + @GuardedBy("mNetworkQualityLock") + private final ObserverList<NetworkQualityThroughputListener> mThroughputListenerList = + new ObserverList<NetworkQualityThroughputListener>(); + @UsedByReflection("UrlRequestContext.java") public CronetUrlRequestContext(Context context, UrlRequestContextConfig config) { @@ -148,6 +168,94 @@ } } + @Override + public void enableNetworkQualityEstimator(Executor executor) { + enableNetworkQualityEstimatorForTesting(false, false, executor); + } + + @VisibleForTesting + @Override + void enableNetworkQualityEstimatorForTesting( + boolean useLocalHostRequests, boolean useSmallerResponses, Executor executor) { + if (mNetworkQualityEstimatorEnabled) { + throw new IllegalStateException("Network quality estimator already enabled"); + } + mNetworkQualityEstimatorEnabled = true; + if (executor == null) { + throw new NullPointerException("Network quality estimator requires an executor"); + } + mNetworkQualityExecutor = executor; + synchronized (mLock) { + checkHaveAdapter(); + nativeEnableNetworkQualityEstimator( + mUrlRequestContextAdapter, useLocalHostRequests, useSmallerResponses); + } + } + + @Override + public void addRttListener(NetworkQualityRttListener listener) { + if (!mNetworkQualityEstimatorEnabled) { + throw new IllegalStateException("Network quality estimator must be enabled"); + } + synchronized (mNetworkQualityLock) { + if (mRttListenerList.isEmpty()) { + synchronized (mLock) { + checkHaveAdapter(); + nativeProvideRTTObservations(mUrlRequestContextAdapter, true); + } + } + mRttListenerList.addObserver(listener); + } + } + + @Override + public void removeRttListener(NetworkQualityRttListener listener) { + if (!mNetworkQualityEstimatorEnabled) { + throw new IllegalStateException("Network quality estimator must be enabled"); + } + synchronized (mNetworkQualityLock) { + mRttListenerList.removeObserver(listener); + if (mRttListenerList.isEmpty()) { + synchronized (mLock) { + checkHaveAdapter(); + nativeProvideRTTObservations(mUrlRequestContextAdapter, false); + } + } + } + } + + @Override + public void addThroughputListener(NetworkQualityThroughputListener listener) { + if (!mNetworkQualityEstimatorEnabled) { + throw new IllegalStateException("Network quality estimator must be enabled"); + } + synchronized (mNetworkQualityLock) { + if (mThroughputListenerList.isEmpty()) { + synchronized (mLock) { + checkHaveAdapter(); + nativeProvideThroughputObservations(mUrlRequestContextAdapter, true); + } + } + mThroughputListenerList.addObserver(listener); + } + } + + @Override + public void removeThroughputListener(NetworkQualityThroughputListener listener) { + if (!mNetworkQualityEstimatorEnabled) { + throw new IllegalStateException("Network quality estimator must be enabled"); + } + synchronized (mNetworkQualityLock) { + mThroughputListenerList.removeObserver(listener); + if (mThroughputListenerList.isEmpty()) { + synchronized (mLock) { + checkHaveAdapter(); + nativeProvideThroughputObservations(mUrlRequestContextAdapter, false); + } + } + } + } + /** * Mark request as started to prevent shutdown when there are active * requests. @@ -209,6 +317,48 @@ Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); } + @SuppressWarnings("unused") + @CalledByNative + private void onRttObservation(final int rttMs, final long whenMs, final int source) { + Runnable task = new Runnable() { + @Override + public void run() { + synchronized (mNetworkQualityLock) { + for (NetworkQualityRttListener listener : mRttListenerList) { + listener.onRttObservation(rttMs, whenMs, source); + } + } + } + }; + postObservationTaskToNetworkQualityExecutor(task); + } + + @SuppressWarnings("unused") + @CalledByNative + private void onThroughputObservation( + final int throughputKbps, final long whenMs, final int source) { + Runnable task = new Runnable() { + @Override + public void run() { + synchronized (mNetworkQualityLock) { + for (NetworkQualityThroughputListener listener : mThroughputListenerList) { + listener.onThroughputObservation(throughputKbps, whenMs, source); + } + } + } + }; + postObservationTaskToNetworkQualityExecutor(task); + } + + void postObservationTaskToNetworkQualityExecutor(Runnable task) { + try { + mNetworkQualityExecutor.execute(task); + } catch (RejectedExecutionException failException) { + Log.e(CronetUrlRequestContext.LOG_TAG, "Exception posting task to executor", + failException); + } + } + // Native methods are implemented in cronet_url_request_context.cc. private static native long nativeCreateRequestContextAdapter(String config); @@ -226,4 +376,14 @@ @NativeClassQualifiedName("CronetURLRequestContextAdapter") private native void nativeInitRequestContextOnMainThread(long nativePtr); + + @NativeClassQualifiedName("CronetURLRequestContextAdapter") + private native void nativeEnableNetworkQualityEstimator( + long nativePtr, boolean useLocalHostRequests, boolean useSmallerResponses); + + @NativeClassQualifiedName("CronetURLRequestContextAdapter") + private native void nativeProvideRTTObservations(long nativePtr, boolean should); + + @NativeClassQualifiedName("CronetURLRequestContextAdapter") + private native void nativeProvideThroughputObservations(long nativePtr, boolean should); }
diff --git a/components/cronet/android/java/src/org/chromium/net/NetworkQualityRttListener.java b/components/cronet/android/java/src/org/chromium/net/NetworkQualityRttListener.java new file mode 100644 index 0000000..381b477 --- /dev/null +++ b/components/cronet/android/java/src/org/chromium/net/NetworkQualityRttListener.java
@@ -0,0 +1,21 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.net; + +/** + * Interface to watch for observations of various round trip times (RTTs) at + * various layers of the network stack. These include RTT estimates by QUIC + * and TCP, as well as the time between when a URL request is sent and when + * the first byte of the response is received. + */ +public interface NetworkQualityRttListener { + /** + * Reports a new round trip time observation. + * @param rttMs the round trip time in milliseconds. + * @param whenMs milliseconds since the Epoch (January 1st 1970, 00:00:00.000). + * @param source the observation source from {@link NetworkQualityObservationSource}. + */ + public void onRttObservation(int rttMs, long whenMs, int source); +} \ No newline at end of file
diff --git a/components/cronet/android/java/src/org/chromium/net/NetworkQualityThroughputListener.java b/components/cronet/android/java/src/org/chromium/net/NetworkQualityThroughputListener.java new file mode 100644 index 0000000..1461644f --- /dev/null +++ b/components/cronet/android/java/src/org/chromium/net/NetworkQualityThroughputListener.java
@@ -0,0 +1,18 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.net; + +/** + * Interface to watch for observations of throughput. + */ +public interface NetworkQualityThroughputListener { + /** + * Reports a new throughput observation. + * @param throughputKbps the downstream throughput in kilobits per second. + * @param whenMs milliseconds since the Epoch (January 1st 1970, 00:00:00.000). + * @param source the observation source from {@link NetworkQualityObservationSource}. + */ + public void onThroughputObservation(int throughputKbps, long whenMs, int source); +} \ No newline at end of file
diff --git a/components/cronet/android/java/src/org/chromium/net/UrlRequestContext.java b/components/cronet/android/java/src/org/chromium/net/UrlRequestContext.java index 3ed4106..30ff080 100644 --- a/components/cronet/android/java/src/org/chromium/net/UrlRequestContext.java +++ b/components/cronet/android/java/src/org/chromium/net/UrlRequestContext.java
@@ -94,6 +94,82 @@ public abstract void stopNetLog(); /** + * Enables the network quality estimator, which collects and reports + * measurements of round trip time (RTT) and downstream throughput at + * various layers of the network stack. After enabling the estimator, + * listeners of RTT and throughput can be added with + * {@link #addRttListener} and {@link #addThroughputListener} and + * removed with {@link #removeRttListener} and + * {@link #removeThroughputListener}. The estimator uses memory and CPU + * only when enabled. + * @param executor an executor that will be used to notified all + * added RTT and throughput listeners. + * @deprecated not really deprecated but hidden for now as it's a prototype. + */ + @Deprecated public abstract void enableNetworkQualityEstimator(Executor executor); + + /** + * Enables the network quality estimator for testing. This must be called + * before round trip time and throughput listeners are added. Set both + * boolean parameters to false for default behavior. + * @param useLocalHostRequests include requests to localhost in estimates. + * @param useSmallerResponses include small responses in throughput estimates. + * @param executor an {@link java.util.concurrent.Executor} on which all + * listeners will be called. + * @deprecated not really deprecated but hidden for now as it's a prototype. + */ + @Deprecated + abstract void enableNetworkQualityEstimatorForTesting( + boolean useLocalHostRequests, boolean useSmallerResponses, Executor executor); + + /** + * Registers a listener that gets called whenever the network quality + * estimator witnesses a sample round trip time. This must be called + * after {@link #enableNetworkQualityEstimator}, and with throw an + * exception otherwise. Round trip times may be recorded at various layers + * of the network stack, including TCP, QUIC, and at the URL request layer. + * The listener is called on the {@link java.util.concurrent.Executor} that + * is passed to {@link #enableNetworkQualityEstimator}. + * @param listener the listener of round trip times. + * @deprecated not really deprecated but hidden for now as it's a prototype. + */ + @Deprecated public abstract void addRttListener(NetworkQualityRttListener listener); + + /** + * Removes a listener of round trip times if previously registered with + * {@link #addRttListener}. This should be called after a + * {@link NetworkQualityRttListener} is added in order to stop receiving + * observations. + * @param listener the listener of round trip times. + * @deprecated not really deprecated but hidden for now as it's a prototype. + */ + @Deprecated public abstract void removeRttListener(NetworkQualityRttListener listener); + + /** + * Registers a listener that gets called whenever the network quality + * estimator witnesses a sample throughput measurement. This must be called + * after {@link #enableNetworkQualityEstimator}. Throughput observations + * are computed by measuring bytes read over the active network interface + * at times when at least one URL response is being received. The listener + * is called on the {@link java.util.concurrent.Executor} that is passed to + * {@link #enableNetworkQualityEstimator}. + * @param listener the listener of throughput. + * @deprecated not really deprecated but hidden for now as it's a prototype. + */ + @Deprecated + public abstract void addThroughputListener(NetworkQualityThroughputListener listener); + + /** + * Removes a listener of throughput. This should be called after a + * {@link NetworkQualityThroughputListener} is added with + * {@link #addThroughputListener} in order to stop receiving observations. + * @param listener the listener of throughput. + * @deprecated not really deprecated but hidden for now as it's a prototype. + */ + @Deprecated + public abstract void removeThroughputListener(NetworkQualityThroughputListener listener); + + /** * Creates a {@link UrlRequestContext} with the given * {@link UrlRequestContextConfig}. * @param context Android {@link Context}.
diff --git a/components/cronet/android/test/javatests/src/org/chromium/net/CronetUrlRequestContextTest.java b/components/cronet/android/test/javatests/src/org/chromium/net/CronetUrlRequestContextTest.java index d9cc557..395a8b1 100644 --- a/components/cronet/android/test/javatests/src/org/chromium/net/CronetUrlRequestContextTest.java +++ b/components/cronet/android/test/javatests/src/org/chromium/net/CronetUrlRequestContextTest.java
@@ -18,6 +18,9 @@ import java.io.BufferedReader; import java.io.File; import java.io.FileReader; +import java.util.LinkedList; +import java.util.NoSuchElementException; +import java.util.concurrent.Executor; /** * Test CronetUrlRequestContext. @@ -79,6 +82,46 @@ } } + static class TestExecutor implements Executor { + private final LinkedList<Runnable> mTaskQueue = new LinkedList<Runnable>(); + + @Override + public void execute(Runnable task) { + mTaskQueue.add(task); + } + + public void runAllTasks() { + try { + while (mTaskQueue.size() > 0) { + mTaskQueue.remove().run(); + } + } catch (NoSuchElementException e) { + } + } + } + + static class TestNetworkQualityListener + implements NetworkQualityRttListener, NetworkQualityThroughputListener { + int mRttObservationCount; + int mThroughputObservationCount; + + public void onRttObservation(int rttMs, long when, int source) { + mRttObservationCount++; + } + + public void onThroughputObservation(int throughputKbps, long when, int source) { + mThroughputObservationCount++; + } + + public int rttObservationCount() { + return mRttObservationCount; + } + + public int throughputObservationCount() { + return mThroughputObservationCount; + } + } + @SmallTest @Feature({"Cronet"}) public void testConfigUserAgent() throws Exception { @@ -154,6 +197,75 @@ @SmallTest @Feature({"Cronet"}) + public void testRealTimeNetworkQualityObservationsNotEnabled() throws Exception { + mActivity = launchCronetTestApp(); + TestNetworkQualityListener networkQualityListener = new TestNetworkQualityListener(); + try { + mActivity.mUrlRequestContext.addRttListener(networkQualityListener); + fail("Should throw an exception."); + } catch (IllegalStateException e) { + } + try { + mActivity.mUrlRequestContext.addThroughputListener(networkQualityListener); + fail("Should throw an exception."); + } catch (IllegalStateException e) { + } + TestUrlRequestListener listener = new TestUrlRequestListener(); + UrlRequest urlRequest = mActivity.mUrlRequestContext.createRequest( + TEST_URL, listener, listener.getExecutor()); + urlRequest.start(); + listener.blockForDone(); + assertEquals(0, networkQualityListener.rttObservationCount()); + assertEquals(0, networkQualityListener.throughputObservationCount()); + mActivity.mUrlRequestContext.shutdown(); + } + + @SmallTest + @Feature({"Cronet"}) + public void testRealTimeNetworkQualityObservationsListenerRemoved() throws Exception { + mActivity = launchCronetTestApp(); + TestExecutor testExecutor = new TestExecutor(); + TestNetworkQualityListener networkQualityListener = new TestNetworkQualityListener(); + mActivity.mUrlRequestContext.enableNetworkQualityEstimatorForTesting( + true, true, testExecutor); + mActivity.mUrlRequestContext.addRttListener(networkQualityListener); + mActivity.mUrlRequestContext.addThroughputListener(networkQualityListener); + mActivity.mUrlRequestContext.removeRttListener(networkQualityListener); + mActivity.mUrlRequestContext.removeThroughputListener(networkQualityListener); + TestUrlRequestListener listener = new TestUrlRequestListener(); + UrlRequest urlRequest = mActivity.mUrlRequestContext.createRequest( + TEST_URL, listener, listener.getExecutor()); + urlRequest.start(); + listener.blockForDone(); + testExecutor.runAllTasks(); + assertEquals(0, networkQualityListener.rttObservationCount()); + assertEquals(0, networkQualityListener.throughputObservationCount()); + mActivity.mUrlRequestContext.shutdown(); + } + + @SmallTest + @Feature({"Cronet"}) + public void testRealTimeNetworkQualityObservations() throws Exception { + mActivity = launchCronetTestApp(); + TestExecutor testExecutor = new TestExecutor(); + TestNetworkQualityListener networkQualityListener = new TestNetworkQualityListener(); + mActivity.mUrlRequestContext.enableNetworkQualityEstimatorForTesting( + true, true, testExecutor); + mActivity.mUrlRequestContext.addRttListener(networkQualityListener); + mActivity.mUrlRequestContext.addThroughputListener(networkQualityListener); + TestUrlRequestListener listener = new TestUrlRequestListener(); + UrlRequest urlRequest = mActivity.mUrlRequestContext.createRequest( + TEST_URL, listener, listener.getExecutor()); + urlRequest.start(); + listener.blockForDone(); + testExecutor.runAllTasks(); + assertTrue(networkQualityListener.rttObservationCount() > 0); + assertTrue(networkQualityListener.throughputObservationCount() > 0); + mActivity.mUrlRequestContext.shutdown(); + } + + @SmallTest + @Feature({"Cronet"}) public void testShutdown() throws Exception { mActivity = launchCronetTestApp(); TestUrlRequestListener listener = new ShutdownTestUrlRequestListener();
diff --git a/components/data_reduction_proxy/core/browser/data_store_impl.cc b/components/data_reduction_proxy/core/browser/data_store_impl.cc index be855bb9..a34407e 100644 --- a/components/data_reduction_proxy/core/browser/data_store_impl.cc +++ b/components/data_reduction_proxy/core/browser/data_store_impl.cc
@@ -83,9 +83,7 @@ leveldb::WriteBatch batch; for (const auto& iter : map) { - leveldb::Slice key_slice(iter.first); - leveldb::Slice value_slice(iter.second); - batch.Put(key_slice, value_slice); + batch.Put(iter.first, iter.second); } leveldb::WriteOptions write_options; @@ -129,6 +127,18 @@ LOG(ERROR) << "Failed to open Data Reduction Proxy DB: " << status; db_.reset(dbptr); + + if (db_) { + leveldb::Range range; + uint64_t size; + // We try to capture the size of the entire DB by using the highest and + // lowest keys. + range.start = ""; + range.limit = "z"; // Keys starting with 'z' will not be included. + dbptr->GetApproximateSizes(&range, 1, &size); + UMA_HISTOGRAM_MEMORY_KB("DataReductionProxy.LevelDBSize", size / 1024); + } + return status; }
diff --git a/components/data_reduction_proxy/core/browser/data_usage_store.cc b/components/data_reduction_proxy/core/browser/data_usage_store.cc index fa2dda7c..0ea68f0 100644 --- a/components/data_reduction_proxy/core/browser/data_usage_store.cc +++ b/components/data_reduction_proxy/core/browser/data_usage_store.cc
@@ -17,6 +17,7 @@ #include <string> #include <utility> +#include "base/metrics/histogram_macros.h" #include "base/strings/stringprintf.h" #include "base/threading/sequenced_worker_pool.h" #include "base/time/time.h" @@ -73,7 +74,10 @@ } void DataUsageStore::LoadDataUsage(std::vector<DataUsageBucket>* data_usage) { + SCOPED_UMA_HISTOGRAM_TIMER("DataReductionProxy.HistoricalDataUsageLoadTime"); + DCHECK(data_usage); + DataUsageBucket empty_bucket; data_usage->clear(); data_usage->resize(kNumDataUsageBuckets, empty_bucket);
diff --git a/components/html_viewer/web_layer_tree_view_impl.cc b/components/html_viewer/web_layer_tree_view_impl.cc index 2ee1c75..0870aa8 100644 --- a/components/html_viewer/web_layer_tree_view_impl.cc +++ b/components/html_viewer/web_layer_tree_view_impl.cc
@@ -70,7 +70,7 @@ output_surface_.reset( new mus::OutputSurface(context_provider, view_->RequestSurface())); } - layer_tree_host_->SetLayerTreeHostClientReady(); + layer_tree_host_->SetVisible(view_->visible()); } WebLayerTreeViewImpl::~WebLayerTreeViewImpl() {
diff --git a/components/infobars/core/infobar_delegate.cc b/components/infobars/core/infobar_delegate.cc index b22a685..820766b 100644 --- a/components/infobars/core/infobar_delegate.cc +++ b/components/infobars/core/infobar_delegate.cc
@@ -126,10 +126,17 @@ } translate::TranslateInfoBarDelegate* -InfoBarDelegate::AsTranslateInfoBarDelegate() { + InfoBarDelegate::AsTranslateInfoBarDelegate() { return nullptr; } +#if defined(OS_ANDROID) +MediaThrottleInfoBarDelegate* + InfoBarDelegate::AsMediaThrottleInfoBarDelegate() { + return nullptr; +} +#endif + InfoBarDelegate::InfoBarDelegate() : nav_entry_id_(0) { }
diff --git a/components/infobars/core/infobar_delegate.h b/components/infobars/core/infobar_delegate.h index 774c215..40b73a9a 100644 --- a/components/infobars/core/infobar_delegate.h +++ b/components/infobars/core/infobar_delegate.h
@@ -21,6 +21,10 @@ class ThemeInstalledInfoBarDelegate; class ThreeDAPIInfoBarDelegate; +#if defined(OS_ANDROID) +class MediaThrottleInfoBarDelegate; +#endif + namespace translate { class TranslateInfoBarDelegate; } @@ -130,6 +134,9 @@ virtual ThemeInstalledInfoBarDelegate* AsThemePreviewInfobarDelegate(); virtual ThreeDAPIInfoBarDelegate* AsThreeDAPIInfoBarDelegate(); virtual translate::TranslateInfoBarDelegate* AsTranslateInfoBarDelegate(); +#if defined(OS_ANDROID) + virtual MediaThrottleInfoBarDelegate* AsMediaThrottleInfoBarDelegate(); +#endif void set_infobar(InfoBar* infobar) { infobar_ = infobar; } void set_nav_entry_id(int nav_entry_id) { nav_entry_id_ = nav_entry_id; }
diff --git a/components/mus/surfaces/surfaces_scheduler.cc b/components/mus/surfaces/surfaces_scheduler.cc index f3752f83..50789e58 100644 --- a/components/mus/surfaces/surfaces_scheduler.cc +++ b/components/mus/surfaces/surfaces_scheduler.cc
@@ -20,7 +20,6 @@ scheduler_ = cc::Scheduler::Create( this, settings, 0, base::MessageLoop::current()->task_runner().get(), nullptr, compositor_timing_history.Pass()); - scheduler_->SetCanStart(); scheduler_->SetVisible(true); scheduler_->SetCanDraw(true); scheduler_->SetNeedsBeginMainFrame();
diff --git a/components/nacl/broker/nacl_broker_listener.cc b/components/nacl/broker/nacl_broker_listener.cc index 1116aadf..a9b8c6b 100644 --- a/components/nacl/broker/nacl_broker_listener.cc +++ b/components/nacl/broker/nacl_broker_listener.cc
@@ -23,10 +23,6 @@ #include "ipc/ipc_switches.h" #include "sandbox/win/src/sandbox_policy.h" -#if defined(OS_WIN) -#include "ipc/attachment_broker_unprivileged_win.h" -#endif - namespace { void SendReply(IPC::Channel* channel, int32 pid, bool result) { @@ -36,10 +32,8 @@ } // namespace NaClBrokerListener::NaClBrokerListener() { -#if defined(OS_WIN) - attachment_broker_.reset(new IPC::AttachmentBrokerUnprivilegedWin); - IPC::AttachmentBroker::SetGlobal(attachment_broker_.get()); -#endif + attachment_broker_.reset( + IPC::AttachmentBrokerUnprivileged::CreateBroker().release()); } NaClBrokerListener::~NaClBrokerListener() {
diff --git a/components/nacl/loader/nacl_listener.cc b/components/nacl/loader/nacl_listener.cc index 918e8c7..d6f8849b 100644 --- a/components/nacl/loader/nacl_listener.cc +++ b/components/nacl/loader/nacl_listener.cc
@@ -45,7 +45,6 @@ #include <io.h> #include "content/public/common/sandbox_init.h" -#include "ipc/attachment_broker_unprivileged_win.h" #endif namespace { @@ -207,10 +206,8 @@ #endif main_loop_(NULL), is_started_(false) { -#if defined(OS_WIN) - attachment_broker_.reset(new IPC::AttachmentBrokerUnprivilegedWin); - IPC::AttachmentBroker::SetGlobal(attachment_broker_.get()); -#endif + attachment_broker_.reset( + IPC::AttachmentBrokerUnprivileged::CreateBroker().release()); io_thread_.StartWithOptions( base::Thread::Options(base::MessageLoop::TYPE_IO, 0)); DCHECK(g_listener == NULL);
diff --git a/components/page_load_metrics/browser/DEPS b/components/page_load_metrics/browser/DEPS index 9579c84..fc1f5909 100644 --- a/components/page_load_metrics/browser/DEPS +++ b/components/page_load_metrics/browser/DEPS
@@ -1,5 +1,6 @@ include_rules = [ "+content/public/browser", "+components/page_load_metrics/browser", + "+components/rappor", "+net/base", ]
diff --git a/components/page_load_metrics/browser/metrics_web_contents_observer.cc b/components/page_load_metrics/browser/metrics_web_contents_observer.cc index df37a05..aa161f8f 100644 --- a/components/page_load_metrics/browser/metrics_web_contents_observer.cc +++ b/components/page_load_metrics/browser/metrics_web_contents_observer.cc
@@ -8,6 +8,8 @@ #include "base/metrics/histogram.h" #include "components/page_load_metrics/common/page_load_metrics_messages.h" #include "components/page_load_metrics/common/page_load_timing.h" +#include "components/rappor/rappor_service.h" +#include "components/rappor/rappor_utils.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/navigation_details.h" #include "content/public/browser/navigation_handle.h" @@ -51,11 +53,33 @@ return true; } -base::Time WallTimeFromTimeTicks(base::TimeTicks time) { +base::Time WallTimeFromTimeTicks(const base::TimeTicks& time) { return base::Time::FromDoubleT( (time - base::TimeTicks::UnixEpoch()).InSecondsF()); } +// The number of buckets in the bitfield histogram. These buckets are described +// in rappor.xml in PageLoad.CoarseTiming.NavigationToFirstLayout. The bucket +// flag is defined by 1 << bucket_index, and is the bitfield representing which +// timing bucket the page load falls into, i.e. 000010 would be the bucket flag +// showing that the page took between 2 and 4 seconds to load. +const size_t kNumRapporHistogramBuckets = 6; + +uint64_t RapporHistogramBucketIndex(const base::TimeDelta& time) { + int64 seconds = time.InSeconds(); + if (seconds < 2) + return 0; + if (seconds < 4) + return 1; + if (seconds < 8) + return 2; + if (seconds < 16) + return 3; + if (seconds < 32) + return 4; + return 5; +} + } // namespace #define PAGE_LOAD_HISTOGRAM(name, sample) \ @@ -63,8 +87,11 @@ base::TimeDelta::FromMilliseconds(10), \ base::TimeDelta::FromMinutes(10), 100) -PageLoadTracker::PageLoadTracker(bool in_foreground) - : has_commit_(false), started_in_foreground_(in_foreground) { +PageLoadTracker::PageLoadTracker(bool in_foreground, + rappor::RapporService* const rappor_service) + : has_commit_(false), + started_in_foreground_(in_foreground), + rappor_service_(rappor_service) { RecordEvent(PAGE_LOAD_STARTED); } @@ -74,8 +101,10 @@ if (timing_.first_layout.is_zero()) RecordEvent(PAGE_LOAD_ABORTED_BEFORE_FIRST_LAYOUT); - if (has_commit_) + if (has_commit_) { RecordTimingHistograms(); + RecordRappor(); + } } void PageLoadTracker::WebContentsHidden() { @@ -85,8 +114,9 @@ } } -void PageLoadTracker::Commit() { +void PageLoadTracker::Commit(const GURL& committed_url) { has_commit_ = true; + url_ = committed_url; } bool PageLoadTracker::UpdateTiming(const PageLoadTiming& timing) { @@ -103,17 +133,35 @@ return false; } +const GURL& PageLoadTracker::GetCommittedURL() { + DCHECK(has_commit_); + return url_; +} + +// Blink calculates navigation start using TimeTicks, but converts to epoch time +// in its public API. Thus, to compare time values to navigation start, we +// calculate the current time since the epoch using TimeTicks, and convert to +// Time. This method is similar to how blink converts TimeTicks to epoch time. +// There may be slight inaccuracies due to inter-process timestamps, but +// this solution is the best we have right now. +// +// returns a TimeDelta which is +// - Infinity if we were never backgrounded +// - null (TimeDelta()) if we started backgrounded +// - elapsed time to first background if we started in the foreground and +// backgrounded. +base::TimeDelta PageLoadTracker::GetBackgroundDelta() { + if (started_in_foreground_) { + if (background_time_.is_null()) + return base::TimeDelta::Max(); + return WallTimeFromTimeTicks(background_time_) - timing_.navigation_start; + } + return base::TimeDelta(); +} + void PageLoadTracker::RecordTimingHistograms() { DCHECK(has_commit_); - // This method is similar to how blink converts TimeTicks to epoch time. - // There may be slight inaccuracies due to inter-process timestamps, but - // this solution is the best we have right now. - base::TimeDelta background_delta; - if (started_in_foreground_) { - background_delta = background_time_.is_null() - ? base::TimeDelta::Max() - : WallTimeFromTimeTicks(background_time_) - timing_.navigation_start; - } + base::TimeDelta background_delta = GetBackgroundDelta(); if (!timing_.dom_content_loaded_event_start.is_zero()) { if (timing_.dom_content_loaded_event_start < background_delta) { @@ -154,9 +202,44 @@ "PageLoad.EventCounts", event, PAGE_LOAD_LAST_ENTRY); } +void PageLoadTracker::RecordRappor() { + DCHECK(!GetCommittedURL().is_empty()); + if (!rappor_service_) + return; + // Log the eTLD+1 of sites that show poor loading performance. + if (!timing_.first_layout.is_zero() && + timing_.first_layout < GetBackgroundDelta()) { + scoped_ptr<rappor::Sample> sample = + rappor_service_->CreateSample(rappor::UMA_RAPPOR_TYPE); + sample->SetStringField("Domain", rappor::GetDomainAndRegistrySampleFromGURL( + GetCommittedURL())); + sample->SetFlagsField("Bucket", + 1 << RapporHistogramBucketIndex(timing_.first_layout), + kNumRapporHistogramBuckets); + // The IsSlow flag is just a one bit boolean if the first layout was > 10s. + sample->SetFlagsField("IsSlow", timing_.first_layout.InSecondsF() >= 10, 1); + rappor_service_->RecordSampleObj( + "PageLoad.CoarseTiming.NavigationToFirstLayout", sample.Pass()); + } +} + +// static +void MetricsWebContentsObserver::CreateForWebContents( + content::WebContents* web_contents, + rappor::RapporService* rappor_service) { + DCHECK(web_contents); + if (!FromWebContents(web_contents)) { + web_contents->SetUserData(UserDataKey(), new MetricsWebContentsObserver( + web_contents, rappor_service)); + } +} + MetricsWebContentsObserver::MetricsWebContentsObserver( - content::WebContents* web_contents) - : content::WebContentsObserver(web_contents), in_foreground_(false) {} + content::WebContents* web_contents, + rappor::RapporService* rappor_service) + : content::WebContentsObserver(web_contents), + in_foreground_(false), + rappor_service_(rappor_service) {} MetricsWebContentsObserver::~MetricsWebContentsObserver() {} @@ -182,7 +265,8 @@ // from the omnibox. DCHECK_GT(2ul, provisional_loads_.size()); provisional_loads_.insert( - navigation_handle, make_scoped_ptr(new PageLoadTracker(in_foreground_))); + navigation_handle, + make_scoped_ptr(new PageLoadTracker(in_foreground_, rappor_service_))); } void MetricsWebContentsObserver::DidFinishNavigation( @@ -215,7 +299,7 @@ return; committed_load_ = finished_nav.Pass(); - committed_load_->Commit(); + committed_load_->Commit(navigation_handle->GetURL()); } void MetricsWebContentsObserver::WasShown() {
diff --git a/components/page_load_metrics/browser/metrics_web_contents_observer.h b/components/page_load_metrics/browser/metrics_web_contents_observer.h index cf39dc9..8149395 100644 --- a/components/page_load_metrics/browser/metrics_web_contents_observer.h +++ b/components/page_load_metrics/browser/metrics_web_contents_observer.h
@@ -23,6 +23,10 @@ class Message; } // namespace IPC +namespace rappor { +class RapporService; +} + namespace page_load_metrics { // If you add elements from this enum, make sure you update the enum @@ -56,11 +60,17 @@ PAGE_LOAD_LAST_ENTRY }; +// This class tracks a given page load, starting from navigation start / +// provisional load, until a new navigation commits or the navigation fails. It +// also records RAPPOR/UMA about the page load. +// MetricsWebContentsObserver manages a set of provisional PageLoadTrackers, as +// well as a committed PageLoadTracker. class PageLoadTracker { public: - explicit PageLoadTracker(bool in_foreground); + PageLoadTracker(bool in_foreground, + rappor::RapporService* const rappor_service); ~PageLoadTracker(); - void Commit(); + void Commit(const GURL& committed_url); void WebContentsHidden(); // Returns true if the timing was successfully updated. @@ -68,7 +78,12 @@ void RecordEvent(PageLoadEvent event); private: + // Only valid to call post-commit. + const GURL& GetCommittedURL(); + + base::TimeDelta GetBackgroundDelta(); void RecordTimingHistograms(); + void RecordRappor(); bool has_commit_; @@ -79,6 +94,12 @@ bool started_in_foreground_; PageLoadTiming timing_; + GURL url_; + + // This RapporService is owned by and shares a lifetime with + // g_browser_process's MetricsServicesManager. It can be NULL. The underlying + // RapporService will be freed when the when the browser process is killed. + rappor::RapporService* const rappor_service_; DISALLOW_COPY_AND_ASSIGN(PageLoadTracker); }; @@ -89,6 +110,10 @@ : public content::WebContentsObserver, public content::WebContentsUserData<MetricsWebContentsObserver> { public: + // The caller must guarantee that the RapporService (if non-null) will + // outlive the WebContents. + static void CreateForWebContents(content::WebContents* web_contents, + rappor::RapporService* rappor_service); ~MetricsWebContentsObserver() override; // content::WebContentsObserver implementation: @@ -105,7 +130,8 @@ void RenderProcessGone(base::TerminationStatus status) override; private: - explicit MetricsWebContentsObserver(content::WebContents* web_contents); + MetricsWebContentsObserver(content::WebContents* web_contents, + rappor::RapporService* rappor_service); friend class content::WebContentsUserData<MetricsWebContentsObserver>; friend class MetricsWebContentsObserverTest; @@ -124,6 +150,8 @@ provisional_loads_; scoped_ptr<PageLoadTracker> committed_load_; + rappor::RapporService* const rappor_service_; + DISALLOW_COPY_AND_ASSIGN(MetricsWebContentsObserver); };
diff --git a/components/page_load_metrics/browser/metrics_web_contents_observer_unittest.cc b/components/page_load_metrics/browser/metrics_web_contents_observer_unittest.cc index 147f09434..fb4e5f3 100644 --- a/components/page_load_metrics/browser/metrics_web_contents_observer_unittest.cc +++ b/components/page_load_metrics/browser/metrics_web_contents_observer_unittest.cc
@@ -9,6 +9,7 @@ #include "base/test/histogram_tester.h" #include "base/time/time.h" #include "components/page_load_metrics/common/page_load_metrics_messages.h" +#include "components/rappor/test_rappor_service.h" #include "content/public/browser/navigation_handle.h" #include "content/public/browser/render_frame_host.h" #include "content/public/test/test_renderer_host.h" @@ -47,7 +48,9 @@ void SetUp() override { RenderViewHostTestHarness::SetUp(); - observer_ = make_scoped_ptr(new MetricsWebContentsObserver(web_contents())); + observer_ = make_scoped_ptr( + new MetricsWebContentsObserver(web_contents(), + &rappor_tester_)); observer_->WasShown(); } @@ -59,6 +62,7 @@ protected: base::HistogramTester histogram_tester_; + rappor::TestRapporService rappor_tester_; scoped_ptr<MetricsWebContentsObserver> observer_; };
diff --git a/components/scheduler/renderer/renderer_scheduler_impl.cc b/components/scheduler/renderer/renderer_scheduler_impl.cc index 1193339b..f122501 100644 --- a/components/scheduler/renderer/renderer_scheduler_impl.cc +++ b/components/scheduler/renderer/renderer_scheduler_impl.cc
@@ -921,7 +921,7 @@ // Make sure that we don't initially assume there is no idle time. MainThreadOnly().short_idle_period_duration.InsertSample( cc::BeginFrameArgs::DefaultInterval()); - AnyThread().user_model.Reset(); + AnyThread().user_model.Reset(helper_.Now()); MainThreadOnly().have_seen_a_begin_main_frame = false; UpdatePolicyLocked(UpdateType::MAY_EARLY_OUT_IF_POLICY_UNCHANGED); }
diff --git a/components/scheduler/renderer/user_model.cc b/components/scheduler/renderer/user_model.cc index 81abada..168419a 100644 --- a/components/scheduler/renderer/user_model.cc +++ b/components/scheduler/renderer/user_model.cc
@@ -4,9 +4,30 @@ #include "components/scheduler/renderer/user_model.h" +#include "base/metrics/histogram_macros.h" + namespace scheduler { -UserModel::UserModel() : pending_input_event_count_(0) {} +namespace { +// This enum is used to back a histogram, and should therefore be treated as +// append-only. +enum GesturePredictionResult { + GESTURE_OCCURED_WAS_PREDICTED = 0, + GESTURE_OCCURED_BUT_NOT_PREDICTED = 1, + GESTURE_PREDICTED_BUT_DID_NOT_OCCUR = 2, + GESTURE_PREDICTION_RESULT_COUNT = 3 +}; + +void RecordGesturePrediction(GesturePredictionResult result) { + UMA_HISTOGRAM_ENUMERATION( + "RendererScheduler.UserModel.GesturePredictedCorrectly", result, + GESTURE_PREDICTION_RESULT_COUNT); +} + +} // namespace + +UserModel::UserModel() + : pending_input_event_count_(0), is_gesture_expected_(false) {} UserModel::~UserModel() {} void UserModel::DidStartProcessingInputEvent(blink::WebInputEvent::Type type, @@ -16,6 +37,26 @@ type == blink::WebInputEvent::GestureScrollBegin || type == blink::WebInputEvent::GesturePinchBegin) { last_gesture_start_time_ = now; + + RecordGesturePrediction(is_gesture_expected_ + ? GESTURE_OCCURED_WAS_PREDICTED + : GESTURE_OCCURED_BUT_NOT_PREDICTED); + + if (!last_reset_time_.is_null()) { + base::TimeDelta time_since_reset = now - last_reset_time_; + UMA_HISTOGRAM_MEDIUM_TIMES( + "RendererScheduler.UserModel.GestureStartTimeSinceModelReset", + time_since_reset); + } + + // If there has been a previous gesture, record a UMA metric for the time + // interval between then and now. + if (!last_continuous_gesture_time_.is_null()) { + base::TimeDelta time_since_last_gesture = now - last_gesture_start_time_; + UMA_HISTOGRAM_MEDIUM_TIMES( + "RendererScheduler.UserModel.TimeBetweenGestures", + time_since_last_gesture); + } } // We need to track continuous gestures seperatly for scroll detection @@ -31,6 +72,14 @@ last_continuous_gesture_time_ = now; } + // If the gesture has ended, record a UMA metric that tracks its duration. + if (type == blink::WebInputEvent::GestureScrollEnd || + type == blink::WebInputEvent::GesturePinchEnd) { + base::TimeDelta duration = now - last_gesture_start_time_; + UMA_HISTOGRAM_TIMES("RendererScheduler.UserModel.GestureDuration", + duration); + } + pending_input_event_count_++; } @@ -58,6 +107,26 @@ bool UserModel::IsGestureExpectedSoon( RendererScheduler::UseCase use_case, const base::TimeTicks now, + base::TimeDelta* prediction_valid_duration) { + bool was_gesture_expected = is_gesture_expected_; + is_gesture_expected_ = + IsGestureExpectedSoonImpl(use_case, now, prediction_valid_duration); + + // Track when we start expecting a gesture so we can work out later if a + // gesture actually happened. + if (!was_gesture_expected && is_gesture_expected_) + last_gesture_expected_start_time_ = now; + + if (was_gesture_expected && !is_gesture_expected_ && + last_gesture_expected_start_time_ > last_gesture_start_time_) { + RecordGesturePrediction(GESTURE_PREDICTED_BUT_DID_NOT_OCCUR); + } + return is_gesture_expected_; +} + +bool UserModel::IsGestureExpectedSoonImpl( + RendererScheduler::UseCase use_case, + const base::TimeTicks now, base::TimeDelta* prediction_valid_duration) const { if (use_case == RendererScheduler::UseCase::NONE) { // If we've scrolled recently then future scrolling is likely. @@ -89,10 +158,13 @@ return false; } -void UserModel::Reset() { +void UserModel::Reset(base::TimeTicks now) { last_input_signal_time_ = base::TimeTicks(); last_gesture_start_time_ = base::TimeTicks(); last_continuous_gesture_time_ = base::TimeTicks(); + last_gesture_expected_start_time_ = base::TimeTicks(); + last_reset_time_ = now; + is_gesture_expected_ = false; } void UserModel::AsValueInto(base::trace_event::TracedValue* state) const { @@ -104,6 +176,10 @@ state->SetDouble( "last_touchstart_time", (last_gesture_start_time_ - base::TimeTicks()).InMillisecondsF()); + state->SetDouble("last_gesture_expected_start_time", + (last_gesture_expected_start_time_ - base::TimeTicks()) + .InMillisecondsF()); + state->SetBoolean("is_gesture_expected", is_gesture_expected_); state->EndDictionary(); }
diff --git a/components/scheduler/renderer/user_model.h b/components/scheduler/renderer/user_model.h index 4eea8b1..0176843 100644 --- a/components/scheduler/renderer/user_model.h +++ b/components/scheduler/renderer/user_model.h
@@ -37,7 +37,7 @@ // The prediction may change after |prediction_valid_duration| has elapsed. bool IsGestureExpectedSoon(RendererScheduler::UseCase use_case, const base::TimeTicks now, - base::TimeDelta* prediction_valid_duration) const; + base::TimeDelta* prediction_valid_duration); void AsValueInto(base::trace_event::TracedValue* state) const; @@ -53,13 +53,21 @@ static const int kExpectSubsequentGestureMillis = 2000; // Clears input signals. - void Reset(); + void Reset(base::TimeTicks now); private: + bool IsGestureExpectedSoonImpl( + RendererScheduler::UseCase use_case, + const base::TimeTicks now, + base::TimeDelta* prediction_valid_duration) const; + int pending_input_event_count_; base::TimeTicks last_input_signal_time_; base::TimeTicks last_gesture_start_time_; base::TimeTicks last_continuous_gesture_time_; // Doesn't include Taps. + base::TimeTicks last_gesture_expected_start_time_; + base::TimeTicks last_reset_time_; + bool is_gesture_expected_; DISALLOW_COPY_AND_ASSIGN(UserModel); };
diff --git a/components/sync_driver/non_blocking_data_type_controller.cc b/components/sync_driver/non_blocking_data_type_controller.cc index 9b20d31..c7c417cd 100644 --- a/components/sync_driver/non_blocking_data_type_controller.cc +++ b/components/sync_driver/non_blocking_data_type_controller.cc
@@ -8,22 +8,27 @@ #include "base/bind_helpers.h" #include "base/location.h" #include "sync/engine/model_type_processor_impl.h" +#include "sync/internal_api/public/activation_context.h" namespace sync_driver_v2 { NonBlockingDataTypeController::NonBlockingDataTypeController( - syncer::ModelType type, bool is_preferred) - : type_(type), + const scoped_refptr<base::SingleThreadTaskRunner>& ui_thread, + syncer::ModelType type, + bool is_preferred) + : base::RefCountedDeleteOnMessageLoop<NonBlockingDataTypeController>( + ui_thread), + type_(type), current_state_(DISCONNECTED), is_preferred_(is_preferred) {} NonBlockingDataTypeController::~NonBlockingDataTypeController() {} void NonBlockingDataTypeController::InitializeType( - const scoped_refptr<base::SequencedTaskRunner>& task_runner, + const scoped_refptr<base::SingleThreadTaskRunner>& task_runner, const base::WeakPtr<syncer_v2::ModelTypeProcessorImpl>& type_processor) { DCHECK(!IsSyncProxyConnected()); - task_runner_ = task_runner; + model_task_runner_ = task_runner; type_processor_ = type_processor; DCHECK(IsSyncProxyConnected()); @@ -84,19 +89,18 @@ DCHECK_EQ(ENABLED, GetDesiredState()); DVLOG(1) << "Enabling non-blocking sync type " << ModelTypeToString(type_); - task_runner_->PostTask( + model_task_runner_->PostTask( FROM_HERE, - base::Bind(&syncer_v2::ModelTypeProcessorImpl::Enable, type_processor_, - base::Passed(sync_context_proxy_->Clone()))); + base::Bind(&NonBlockingDataTypeController::StartProcessor, this)); current_state_ = ENABLED; } void NonBlockingDataTypeController::SendDisableSignal() { DCHECK_EQ(DISABLED, GetDesiredState()); DVLOG(1) << "Disabling non-blocking sync type " << ModelTypeToString(type_); - task_runner_->PostTask(FROM_HERE, - base::Bind(&syncer_v2::ModelTypeProcessorImpl::Disable, - type_processor_)); + model_task_runner_->PostTask( + FROM_HERE, + base::Bind(&syncer_v2::ModelTypeProcessorImpl::Disable, type_processor_)); current_state_ = DISABLED; } @@ -104,18 +108,36 @@ DCHECK_EQ(DISCONNECTED, GetDesiredState()); DVLOG(1) << "Disconnecting non-blocking sync type " << ModelTypeToString(type_); - task_runner_->PostTask( - FROM_HERE, base::Bind(&syncer_v2::ModelTypeProcessorImpl::Disconnect, - type_processor_)); + model_task_runner_->PostTask( + FROM_HERE, + base::Bind(&NonBlockingDataTypeController::StopProcessor, this)); current_state_ = DISCONNECTED; } +void NonBlockingDataTypeController::StartProcessor() { + DCHECK(model_task_runner_->BelongsToCurrentThread()); + type_processor_->Start( + base::Bind(&NonBlockingDataTypeController::OnProcessorStarted, this)); +} + +void NonBlockingDataTypeController::OnProcessorStarted( + /*syncer::SyncError error,*/ + scoped_ptr<syncer_v2::ActivationContext> activation_context) { + DCHECK(model_task_runner_->BelongsToCurrentThread()); + sync_context_proxy_->ConnectTypeToSync(type_, activation_context.Pass()); +} + +void NonBlockingDataTypeController::StopProcessor() { + DCHECK(model_task_runner_->BelongsToCurrentThread()); + type_processor_->Stop(); +} + bool NonBlockingDataTypeController::IsPreferred() const { return is_preferred_; } bool NonBlockingDataTypeController::IsSyncProxyConnected() const { - return task_runner_.get() != NULL; + return model_task_runner_.get() != NULL; } bool NonBlockingDataTypeController::IsSyncBackendConnected() const {
diff --git a/components/sync_driver/non_blocking_data_type_controller.h b/components/sync_driver/non_blocking_data_type_controller.h index 5faa79f1..eb3a0c45 100644 --- a/components/sync_driver/non_blocking_data_type_controller.h +++ b/components/sync_driver/non_blocking_data_type_controller.h
@@ -5,13 +5,15 @@ #ifndef COMPONENTS_SYNC_DRIVER_NON_BLOCKING_DATA_TYPE_CONTROLLER_H_ #define COMPONENTS_SYNC_DRIVER_NON_BLOCKING_DATA_TYPE_CONTROLLER_H_ +#include "base/memory/ref_counted_delete_on_message_loop.h" #include "base/memory/scoped_ptr.h" #include "base/memory/weak_ptr.h" -#include "base/sequenced_task_runner.h" +#include "base/single_thread_task_runner.h" #include "sync/internal_api/public/base/model_type.h" #include "sync/internal_api/public/sync_context_proxy.h" namespace syncer_v2 { +struct ActivationContext; class ModelTypeProcessorImpl; } @@ -52,17 +54,21 @@ // This class is structured using some state machine patterns. It's a bit // awkward at times, but this seems to be the clearest way to express the // behaviors this class must implement. -class NonBlockingDataTypeController { +class NonBlockingDataTypeController + : public base::RefCountedDeleteOnMessageLoop< + NonBlockingDataTypeController> { public: - NonBlockingDataTypeController(syncer::ModelType type, bool is_preferred); - ~NonBlockingDataTypeController(); + NonBlockingDataTypeController( + const scoped_refptr<base::SingleThreadTaskRunner>& ui_thread, + syncer::ModelType type, + bool is_preferred); // Connects the ModelTypeProcessor to this controller. // // There is no "undo" for this operation. The NonBlockingDataTypeController // will only ever deal with a single type proxy. void InitializeType( - const scoped_refptr<base::SequencedTaskRunner>& task_runner, + const scoped_refptr<base::SingleThreadTaskRunner>& task_runner, const base::WeakPtr<syncer_v2::ModelTypeProcessorImpl>& type_processor); // Initialize the connection to the SyncContextProxy. @@ -82,6 +88,12 @@ void SetIsPreferred(bool is_preferred); private: + friend class base::RefCountedDeleteOnMessageLoop< + NonBlockingDataTypeController>; + friend class base::DeleteHelper<NonBlockingDataTypeController>; + + ~NonBlockingDataTypeController(); + enum TypeState { ENABLED, DISABLED, DISCONNECTED }; // Figures out which signals need to be sent then send then sends them. @@ -108,6 +120,20 @@ // Returns the state that the type sync proxy should be in. TypeState GetDesiredState() const; + // Called on the model thread to start the processor. The processor is + // expected to invoke OnProcessorStarted when it is ready to be activated + // with the sycn backend. + void StartProcessor(); + + // Callback passed to the processor to be invoked when the processor has + // started. This is called on the model thread. + void OnProcessorStarted( + /*syncer::SyncError error,*/ + scoped_ptr<syncer_v2::ActivationContext> activation_context); + + // Called on the model thread to stop the processor. + void StopProcessor(); + // The ModelType we're controlling. Kept mainly for debugging. const syncer::ModelType type_; @@ -124,7 +150,7 @@ bool is_preferred_; // The ModelTypeProcessorImpl and its associated thread. May be NULL. - scoped_refptr<base::SequencedTaskRunner> task_runner_; + scoped_refptr<base::SingleThreadTaskRunner> model_task_runner_; base::WeakPtr<syncer_v2::ModelTypeProcessorImpl> type_processor_; // The SyncContextProxy that connects to the current sync backend. May be
diff --git a/components/sync_driver/non_blocking_data_type_controller_unittest.cc b/components/sync_driver/non_blocking_data_type_controller_unittest.cc index d74c722fb4..692c6a1 100644 --- a/components/sync_driver/non_blocking_data_type_controller_unittest.cc +++ b/components/sync_driver/non_blocking_data_type_controller_unittest.cc
@@ -8,6 +8,7 @@ #include "base/callback.h" #include "base/memory/ref_counted.h" #include "base/memory/weak_ptr.h" +#include "base/message_loop/message_loop.h" #include "base/sequenced_task_runner.h" #include "base/test/test_simple_task_runner.h" #include "base/thread_task_runner_handle.h" @@ -121,18 +122,26 @@ : type_processor_(syncer::DICTIONARY, base::WeakPtr<syncer_v2::ModelTypeStore>()), model_thread_(new base::TestSimpleTaskRunner()), - model_thread_handle_(model_thread_), sync_thread_(new base::TestSimpleTaskRunner()), - controller_(syncer::DICTIONARY, true), mock_context_proxy_(&mock_sync_context_, model_thread_, sync_thread_), auto_run_tasks_(true) {} ~NonBlockingDataTypeControllerTest() override {} + void SetUp() override { + controller_ = new NonBlockingDataTypeController( + base::ThreadTaskRunnerHandle::Get(), syncer::DICTIONARY, true); + } + + void TearDown() override { + controller_ = NULL; + ui_loop_.RunUntilIdle(); + } + // Connects the sync type proxy to the NonBlockingDataTypeController. void InitTypeSyncProxy() { - controller_.InitializeType(model_thread_, - type_processor_.AsWeakPtrForUI()); + controller_->InitializeType(model_thread_, + type_processor_.AsWeakPtrForUI()); if (auto_run_tasks_) { RunAllTasks(); } @@ -140,7 +149,7 @@ // Connects the sync backend to the NonBlockingDataTypeController. void InitSyncBackend() { - controller_.InitializeSyncContext(mock_context_proxy_.Clone()); + controller_->InitializeSyncContext(mock_context_proxy_.Clone()); if (auto_run_tasks_) { RunAllTasks(); } @@ -148,7 +157,7 @@ // Disconnects the sync backend from the NonBlockingDataTypeController. void UninitializeSyncBackend() { - controller_.ClearSyncContext(); + controller_->ClearSyncContext(); if (auto_run_tasks_) { RunAllTasks(); } @@ -156,7 +165,7 @@ // Toggles the user's preference for syncing this type. void SetIsPreferred(bool preferred) { - controller_.SetIsPreferred(preferred); + controller_->SetIsPreferred(preferred); if (auto_run_tasks_) { RunAllTasks(); } @@ -184,15 +193,15 @@ protected: syncer_v2::ModelTypeProcessorImpl type_processor_; scoped_refptr<base::TestSimpleTaskRunner> model_thread_; - base::ThreadTaskRunnerHandle model_thread_handle_; scoped_refptr<base::TestSimpleTaskRunner> sync_thread_; - NonBlockingDataTypeController controller_; + scoped_refptr<NonBlockingDataTypeController> controller_; MockSyncContext mock_sync_context_; MockSyncContextProxy mock_context_proxy_; bool auto_run_tasks_; + base::MessageLoopForUI ui_loop_; }; // Initialization when the user has disabled syncing for this type. @@ -201,12 +210,12 @@ InitTypeSyncProxy(); InitSyncBackend(); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); UninitializeSyncBackend(); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); } @@ -214,15 +223,15 @@ TEST_F(NonBlockingDataTypeControllerTest, Enabled_SyncFirst) { SetIsPreferred(true); InitSyncBackend(); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); InitTypeSyncProxy(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_TRUE(type_processor_.IsConnected()); UninitializeSyncBackend(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); } @@ -230,15 +239,15 @@ TEST_F(NonBlockingDataTypeControllerTest, Enabled_ProcessorFirst) { SetIsPreferred(true); InitTypeSyncProxy(); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); InitSyncBackend(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_TRUE(type_processor_.IsConnected()); UninitializeSyncBackend(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); } @@ -248,11 +257,11 @@ InitTypeSyncProxy(); InitSyncBackend(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_TRUE(type_processor_.IsConnected()); SetIsPreferred(false); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); } @@ -261,23 +270,23 @@ SetIsPreferred(false); InitTypeSyncProxy(); InitSyncBackend(); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); SetIsPreferred(true); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_TRUE(type_processor_.IsConnected()); SetIsPreferred(false); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); SetIsPreferred(true); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_TRUE(type_processor_.IsConnected()); SetIsPreferred(false); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); } @@ -286,17 +295,17 @@ SetIsPreferred(true); InitTypeSyncProxy(); InitSyncBackend(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_TRUE(type_processor_.IsConnected()); // Shutting down sync backend should disconnect but not disable the type. UninitializeSyncBackend(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); // Brining the backend back should reconnect the type. InitSyncBackend(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_TRUE(type_processor_.IsConnected()); } @@ -325,17 +334,17 @@ // This should emit a disable signal. SetIsPreferred(false); EXPECT_FALSE(type_processor_.IsConnected()); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); // This won't enable us, since we don't have a sync backend. SetIsPreferred(true); EXPECT_FALSE(type_processor_.IsConnected()); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); // Only now do we start sending enable signals. InitSyncBackend(); EXPECT_TRUE(type_processor_.IsConnected()); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); } // Turns off auto-task-running to test the effects of delaying a connection @@ -357,7 +366,7 @@ // That should result in a request to connect, but it won't be // executed right away. EXPECT_FALSE(type_processor_.IsConnected()); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); // Let the sync thread process the request and the model thread handle its // response. @@ -365,7 +374,7 @@ RunQueuedModelThreadTasks(); EXPECT_TRUE(type_processor_.IsConnected()); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); } // Send Disable signal while a connection request is in progress. @@ -382,7 +391,7 @@ // That should result in a request to connect, but it won't be // executed right away. EXPECT_FALSE(type_processor_.IsConnected()); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); // Send and execute a disable signal before the OnConnect callback returns. SetIsPreferred(false); @@ -397,7 +406,7 @@ // from the UI thread earlier. We need to make sure that doesn't happen. RunQueuedModelThreadTasks(); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); EXPECT_FALSE(type_processor_.IsConnected()); } @@ -415,17 +424,17 @@ // That was the first enable. EXPECT_FALSE(type_processor_.IsConnected()); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); // Now disable. SetIsPreferred(false); RunQueuedModelThreadTasks(); - EXPECT_FALSE(type_processor_.IsPreferred()); + EXPECT_FALSE(type_processor_.IsEnabled()); // And re-enable. SetIsPreferred(true); RunQueuedModelThreadTasks(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); // The sync thread has three messages related to those enables and // disables sittin in its queue. Let's allow it to process them. @@ -433,7 +442,7 @@ // Let the model thread process any messages from the sync thread. RunQueuedModelThreadTasks(); - EXPECT_TRUE(type_processor_.IsPreferred()); + EXPECT_TRUE(type_processor_.IsEnabled()); EXPECT_TRUE(type_processor_.IsConnected()); }
diff --git a/components/variations/proto/study.proto b/components/variations/proto/study.proto index 25ff116..f2bae5b 100644 --- a/components/variations/proto/study.proto +++ b/components/variations/proto/study.proto
@@ -44,7 +44,7 @@ // An experiment within the study. // - // Next tag: 12 + // Next tag: 13 message Experiment { // A named parameter value for this experiment. // @@ -109,7 +109,7 @@ // a non-zero |probability_weight|. optional string forcing_feature_off = 4; } - optional FeatureAssociation feature_association = 11; + optional FeatureAssociation feature_association = 12; // Optional name of a Chrome flag that, when present, causes this experiment // to be forced. If the forcing_flag field is set, users will not be @@ -248,6 +248,7 @@ // omitted, the study applies to all hardware classes unless // |exclude_hardware_class| is specified. Mutually exclusive with // |exclude_hardware_class|. + // An example might be "lumpy", "daisy", etc. repeated string hardware_class = 8; // List of ChromeOS hardware classes that will be excluded in this @@ -256,6 +257,7 @@ // "Device FOOBAR". If omitted, the study applies to all hardware classes // unless |hardware_class| is specified. Mutually exclusive with // |hardware_class|. + // An example might be "lumpy", "daisy", etc. repeated string exclude_hardware_class = 9; // List of lowercase ISO 3166-1 alpha-2 country codes that will receive this
diff --git a/components/web_view/BUILD.gn b/components/web_view/BUILD.gn index f86231c..cad4dc7 100644 --- a/components/web_view/BUILD.gn +++ b/components/web_view/BUILD.gn
@@ -59,6 +59,7 @@ "//mojo/application/public/cpp:sources", "//mojo/application/public/interfaces", "//mojo/common:common_base", + "//mojo/common:url_type_converters", "//mojo/converters/geometry", "//mojo/services/network/public/interfaces", "//mojo/services/tracing/public/interfaces",
diff --git a/components/web_view/frame.cc b/components/web_view/frame.cc index 93232af..0670977 100644 --- a/components/web_view/frame.cc +++ b/components/web_view/frame.cc
@@ -17,6 +17,7 @@ #include "components/web_view/frame_user_data.h" #include "components/web_view/frame_utils.h" #include "mojo/application/public/interfaces/shell.mojom.h" +#include "mojo/common/url_type_converters.h" #include "url/gurl.h" using mus::View; @@ -550,7 +551,7 @@ } void Frame::DidNavigateLocally(const mojo::String& url) { - NOTIMPLEMENTED(); + tree_->DidNavigateLocally(this, url.To<GURL>()); } void Frame::DispatchLoadEventToParent() {
diff --git a/components/web_view/frame_tree.cc b/components/web_view/frame_tree.cc index fc0cb3a..e8d071f 100644 --- a/components/web_view/frame_tree.cc +++ b/components/web_view/frame_tree.cc
@@ -82,6 +82,10 @@ delegate_->DidCommitProvisionalLoad(source); } +void FrameTree::DidNavigateLocally(Frame* source, const GURL& url) { + delegate_->DidNavigateLocally(source, url); +} + void FrameTree::ClientPropertyChanged(const Frame* source, const mojo::String& name, const mojo::Array<uint8_t>& value) {
diff --git a/components/web_view/frame_tree.h b/components/web_view/frame_tree.h index a3f50d3..aadcdba 100644 --- a/components/web_view/frame_tree.h +++ b/components/web_view/frame_tree.h
@@ -8,6 +8,7 @@ #include "components/mus/public/interfaces/view_tree.mojom.h" #include "components/web_view/frame.h" #include "third_party/mojo/src/mojo/public/cpp/bindings/array.h" +#include "url/gurl.h" namespace mojo { class String; @@ -66,6 +67,7 @@ void LoadingStateChanged(); void TitleChanged(const mojo::String& title); void DidCommitProvisionalLoad(Frame* source); + void DidNavigateLocally(Frame* source, const GURL& url); void ClientPropertyChanged(const Frame* source, const mojo::String& name, const mojo::Array<uint8_t>& value);
diff --git a/components/web_view/frame_tree_delegate.h b/components/web_view/frame_tree_delegate.h index b1eab678..4b317a79 100644 --- a/components/web_view/frame_tree_delegate.h +++ b/components/web_view/frame_tree_delegate.h
@@ -12,6 +12,7 @@ #include "components/mus/public/interfaces/view_tree.mojom.h" #include "components/web_view/public/interfaces/frame.mojom.h" #include "mojo/services/network/public/interfaces/url_loader.mojom.h" +#include "url/gurl.h" namespace web_view { @@ -70,6 +71,9 @@ // Invoked when blink has started displaying the frame. virtual void DidCommitProvisionalLoad(Frame* frame) = 0; + // Invoked when the frame has changed its own URL. + virtual void DidNavigateLocally(Frame* source, const GURL& url) = 0; + // Notification of various frame state changes. Generally only useful for // tests. virtual void DidCreateFrame(Frame* frame);
diff --git a/components/web_view/navigation_controller.cc b/components/web_view/navigation_controller.cc index b9ffdb9ed..ef6010b 100644 --- a/components/web_view/navigation_controller.cc +++ b/components/web_view/navigation_controller.cc
@@ -157,14 +157,7 @@ // appears that blink can change some of the data during the navigation. Do // it for now for bootstrapping purposes. if (pending_entry_index_ == -1 && pending_entry_) { - int current_size = static_cast<int>(entries_.size()); - if (current_size > 0) { - while (last_committed_entry_index_ < (current_size - 1)) { - entries_.pop_back(); - current_size--; - } - } - + ClearForwardEntries(); entries_.push_back(pending_entry_); last_committed_entry_index_ = static_cast<int>(entries_.size() - 1); pending_entry_ = nullptr; @@ -184,4 +177,33 @@ delegate_->OnDidNavigate(); } +void NavigationController::FrameDidNavigateLocally(Frame* frame, + const GURL& url) { + // If this is a local navigation of a non-top frame, don't try to commit + // it. + if (frame->parent()) + return; + + ClearForwardEntries(); + + // TODO(erg): This is overly cheap handling of local navigations in + // frames. We don't have all the information needed to construct a real + // URLRequest. + + entries_.push_back(new NavigationEntry(url)); + last_committed_entry_index_ = static_cast<int>(entries_.size() - 1); + + delegate_->OnDidNavigate(); +} + +void NavigationController::ClearForwardEntries() { + int current_size = static_cast<int>(entries_.size()); + if (current_size > 0) { + while (last_committed_entry_index_ < (current_size - 1)) { + entries_.pop_back(); + current_size--; + } + } +} + } // namespace web_view
diff --git a/components/web_view/navigation_controller.h b/components/web_view/navigation_controller.h index 6847a083..1d4a0ae 100644 --- a/components/web_view/navigation_controller.h +++ b/components/web_view/navigation_controller.h
@@ -8,6 +8,7 @@ #include "base/memory/scoped_vector.h" #include "components/web_view/public/interfaces/web_view.mojom.h" +#include "url/gurl.h" namespace web_view { @@ -55,8 +56,15 @@ // Called when a frame is committed. void FrameDidCommitProvisionalLoad(Frame* frame); + // Called when a frame navigated by itself. Adds the new url to the + // back/forward stack. + void FrameDidNavigateLocally(Frame* frame, const GURL& url); + private: using NavigationEntries = ScopedVector<NavigationEntry>; + + void ClearForwardEntries(); + NavigationEntries entries_; // An entry we haven't gotten a response for yet. This will be discarded
diff --git a/components/web_view/navigation_entry.cc b/components/web_view/navigation_entry.cc index 5b3fce43..0b3aaa8 100644 --- a/components/web_view/navigation_entry.cc +++ b/components/web_view/navigation_entry.cc
@@ -9,6 +9,9 @@ NavigationEntry::NavigationEntry(mojo::URLRequestPtr original_request) : url_request_(original_request.Pass()) {} +NavigationEntry::NavigationEntry(const GURL& raw_url) + : url_request_(raw_url) {} + NavigationEntry::~NavigationEntry() {} mojo::URLRequestPtr NavigationEntry::BuildURLRequest() const {
diff --git a/components/web_view/navigation_entry.h b/components/web_view/navigation_entry.h index b6bcde7..db3df6a 100644 --- a/components/web_view/navigation_entry.h +++ b/components/web_view/navigation_entry.h
@@ -7,6 +7,8 @@ #include "components/web_view/url_request_cloneable.h" +#include "url/gurl.h" + namespace web_view { // Contains all information needed about an individual navigation in the @@ -14,6 +16,7 @@ class NavigationEntry { public: explicit NavigationEntry(mojo::URLRequestPtr original_request); + explicit NavigationEntry(const GURL& raw_url); ~NavigationEntry(); // Builds a copy of the URLRequest that generated this navigation. This
diff --git a/components/web_view/test_frame_tree_delegate.cc b/components/web_view/test_frame_tree_delegate.cc index ac95488..2b7ee8cc 100644 --- a/components/web_view/test_frame_tree_delegate.cc +++ b/components/web_view/test_frame_tree_delegate.cc
@@ -81,6 +81,9 @@ void TestFrameTreeDelegate::DidCommitProvisionalLoad(Frame* frame) {} +void TestFrameTreeDelegate::DidNavigateLocally(Frame* source, + const GURL& url) {} + void TestFrameTreeDelegate::DidCreateFrame(Frame* frame) { if (waiting_for_create_frame_) { most_recent_frame_ = frame;
diff --git a/components/web_view/test_frame_tree_delegate.h b/components/web_view/test_frame_tree_delegate.h index 176b9838..1f021ee 100644 --- a/components/web_view/test_frame_tree_delegate.h +++ b/components/web_view/test_frame_tree_delegate.h
@@ -50,6 +50,7 @@ const CanNavigateFrameCallback& callback) override; void DidStartNavigation(Frame* frame) override; void DidCommitProvisionalLoad(Frame* frame) override; + void DidNavigateLocally(Frame* source, const GURL& url) override; void DidCreateFrame(Frame* frame) override; void DidDestroyFrame(Frame* frame) override; void OnViewEmbeddedInFrameDisconnected(Frame* frame) override;
diff --git a/components/web_view/url_request_cloneable.cc b/components/web_view/url_request_cloneable.cc index 106faf1..4ee870aa 100644 --- a/components/web_view/url_request_cloneable.cc +++ b/components/web_view/url_request_cloneable.cc
@@ -6,9 +6,16 @@ #include "base/logging.h" #include "mojo/common/data_pipe_utils.h" +#include "mojo/common/url_type_converters.h" namespace web_view { +// TODO(erg): In the long run, we might not want to have a stack of +// URLRequestPtrs, but another type that captures most of the data. When I saw +// NavigationController the first time, I didn't understand why they made their +// own datastructure which kept track of everything in a request. The reason is +// that they have to build requests from multiple different datatypes. + URLRequestCloneable::URLRequestCloneable(mojo::URLRequestPtr original_request) : url_(original_request->url), method_(original_request->method), @@ -25,6 +32,16 @@ } } +URLRequestCloneable::URLRequestCloneable(const GURL& raw_url) + : url_(mojo::String::From(raw_url)), + method_("GET"), + headers_(), + response_body_buffer_size_(0), + auto_follow_redirects_(false), + bypass_cache_(false), + original_body_null_(true) { +} + URLRequestCloneable::~URLRequestCloneable() {} mojo::URLRequestPtr URLRequestCloneable::Clone() const {
diff --git a/components/web_view/url_request_cloneable.h b/components/web_view/url_request_cloneable.h index f9f6530..b8b3c3d0 100644 --- a/components/web_view/url_request_cloneable.h +++ b/components/web_view/url_request_cloneable.h
@@ -10,6 +10,7 @@ #include "base/basictypes.h" #include "mojo/services/network/public/interfaces/url_loader.mojom.h" +#include "url/gurl.h" namespace web_view { @@ -21,6 +22,7 @@ class URLRequestCloneable { public: explicit URLRequestCloneable(mojo::URLRequestPtr original_request); + explicit URLRequestCloneable(const GURL& raw_url); ~URLRequestCloneable(); // Creates a new URLRequest.
diff --git a/components/web_view/web_view_impl.cc b/components/web_view/web_view_impl.cc index 545fbe6..4bd3485 100644 --- a/components/web_view/web_view_impl.cc +++ b/components/web_view/web_view_impl.cc
@@ -215,6 +215,13 @@ navigation_controller_.FrameDidCommitProvisionalLoad(frame); } +void WebViewImpl::DidNavigateLocally(Frame* source, + const GURL& url) { + navigation_controller_.FrameDidNavigateLocally(source, url); + if (source == frame_tree_->root()) + client_->TopLevelNavigationStarted(url.spec()); +} + void WebViewImpl::DidDestroyFrame(Frame* frame) { find_controller_.DidDestroyFrame(frame); }
diff --git a/components/web_view/web_view_impl.h b/components/web_view/web_view_impl.h index 449fdd8..dd5d785 100644 --- a/components/web_view/web_view_impl.h +++ b/components/web_view/web_view_impl.h
@@ -90,6 +90,7 @@ const CanNavigateFrameCallback& callback) override; void DidStartNavigation(Frame* frame) override; void DidCommitProvisionalLoad(Frame* frame) override; + void DidNavigateLocally(Frame* source, const GURL& url) override; void DidDestroyFrame(Frame* frame) override; void OnFindInFrameCountUpdated(int32_t request_id, Frame* frame,
diff --git a/content/browser/bluetooth/bluetooth_dispatcher_host.cc b/content/browser/bluetooth/bluetooth_dispatcher_host.cc index 7335a7b..30d468b 100644 --- a/content/browser/bluetooth/bluetooth_dispatcher_host.cc +++ b/content/browser/bluetooth/bluetooth_dispatcher_host.cc
@@ -273,6 +273,7 @@ void BluetoothDispatcherHost::set_adapter( scoped_refptr<device::BluetoothAdapter> adapter) { DCHECK_CURRENTLY_ON(BrowserThread::UI); + connections_.clear(); if (adapter_.get()) adapter_->RemoveObserver(this); adapter_ = adapter; @@ -841,8 +842,7 @@ base::TimeTicks start_time, scoped_ptr<device::BluetoothGattConnection> connection) { DCHECK_CURRENTLY_ON(BrowserThread::UI); - // TODO(ortuno): Save the BluetoothGattConnection so we can disconnect - // from it. + connections_.push_back(connection.Pass()); RecordConnectGATTTimeSuccess(base::TimeTicks::Now() - start_time); RecordConnectGATTOutcome(UMAConnectGATTOutcome::SUCCESS); Send(new BluetoothMsg_ConnectGATTSuccess(thread_id, request_id,
diff --git a/content/browser/bluetooth/bluetooth_dispatcher_host.h b/content/browser/bluetooth/bluetooth_dispatcher_host.h index 8a8714f..83e4a45 100644 --- a/content/browser/bluetooth/bluetooth_dispatcher_host.h +++ b/content/browser/bluetooth/bluetooth_dispatcher_host.h
@@ -7,6 +7,7 @@ #include "base/basictypes.h" #include "base/id_map.h" +#include "base/memory/scoped_vector.h" #include "base/memory/weak_ptr.h" #include "content/public/browser/bluetooth_chooser.h" #include "content/public/browser/browser_message_filter.h" @@ -187,6 +188,10 @@ // sessions when other sessions are active. base::Timer discovery_session_timer_; + // Retain BluetoothGattConnection objects to keep connections open. + // TODO(scheib): Destroy as connections are closed. http://crbug.com/539643 + ScopedVector<device::BluetoothGattConnection> connections_; + // |weak_ptr_on_ui_thread_| provides weak pointers, e.g. for callbacks, and // because it exists and has been bound to the UI thread enforces that all // copies verify they are also used on the UI thread.
diff --git a/content/browser/indexed_db/indexed_db_backing_store.cc b/content/browser/indexed_db/indexed_db_backing_store.cc index 7eb73aa..e0db04b 100644 --- a/content/browser/indexed_db/indexed_db_backing_store.cc +++ b/content/browser/indexed_db/indexed_db_backing_store.cc
@@ -325,6 +325,8 @@ *known = true; return true; } + if (db_schema_version < 0) + return false; // Only corruption should cause this. if (db_schema_version > kLatestKnownSchemaVersion) { *known = false; return true; @@ -340,7 +342,8 @@ *known = true; return true; } - + if (db_data_version < 0) + return false; // Only corruption should cause this. if (db_data_version > latest_known_data_version) { *known = false; return true;
diff --git a/content/browser/media/android/browser_media_player_manager.cc b/content/browser/media/android/browser_media_player_manager.cc index 76c22c19..37e11209 100644 --- a/content/browser/media/android/browser_media_player_manager.cc +++ b/content/browser/media/android/browser_media_player_manager.cc
@@ -9,6 +9,7 @@ #include "content/browser/media/android/browser_demuxer_android.h" #include "content/browser/media/android/media_resource_getter_impl.h" #include "content/browser/media/android/media_session.h" +#include "content/browser/media/android/media_throttler.h" #include "content/browser/media/media_web_contents_observer.h" #include "content/browser/renderer_host/render_view_host_impl.h" #include "content/browser/web_contents/web_contents_view_android.h" @@ -143,16 +144,19 @@ user_agent, hide_url_log, this, - base::Bind(&BrowserMediaPlayerManager::OnMediaResourcesRequested, + base::Bind(&BrowserMediaPlayerManager::OnDecoderResourcesReleased, weak_ptr_factory_.GetWeakPtr()), media_player_params.frame_url, media_player_params.allow_credentials); ContentViewCoreImpl* content_view_core_impl = static_cast<ContentViewCoreImpl*>(ContentViewCore::FromWebContents( web_contents_)); - if (!content_view_core_impl) { - // May reach here due to prerendering. Don't extract the metadata - // since it is expensive. + if (!content_view_core_impl + // Initialize the player will cause MediaMetadataExtractor to decode + // small chunks of data. + || !RequestDecoderResources(media_player_params.player_id, true)) { + // May reach here due to prerendering or throttling. Don't extract the + // metadata since it is expensive. // TODO(qinmin): extract the metadata once the user decided to load // the page. OnMediaMetadataChanged( @@ -169,7 +173,7 @@ return new MediaCodecPlayer( media_player_params.player_id, weak_ptr_factory_.GetWeakPtr(), - base::Bind(&BrowserMediaPlayerManager::OnMediaResourcesRequested, + base::Bind(&BrowserMediaPlayerManager::OnDecoderResourcesReleased, weak_ptr_factory_.GetWeakPtr()), demuxer->CreateDemuxer(media_player_params.demuxer_client_id), media_player_params.frame_url); @@ -177,7 +181,7 @@ return new MediaSourcePlayer( media_player_params.player_id, this, - base::Bind(&BrowserMediaPlayerManager::OnMediaResourcesRequested, + base::Bind(&BrowserMediaPlayerManager::OnDecoderResourcesReleased, weak_ptr_factory_.GetWeakPtr()), demuxer->CreateDemuxer(media_player_params.demuxer_client_id), media_player_params.frame_url); @@ -319,8 +323,10 @@ void BrowserMediaPlayerManager::OnError(int player_id, int error) { Send(new MediaPlayerMsg_MediaError(RoutingID(), player_id, error)); - if (fullscreen_player_id_ == player_id) + if (fullscreen_player_id_ == player_id && + error != MediaPlayerAndroid::MEDIA_ERROR_INVALID_CODE) { video_view_->OnMediaPlayerError(error); + } } void BrowserMediaPlayerManager::OnVideoSizeChanged( @@ -475,6 +481,11 @@ base::Unretained(this))); } } + +void BrowserMediaPlayerManager::ReleaseExternalSurface(int player_id) { + if (external_video_surface_container_) + external_video_surface_container_->ReleaseExternalVideoSurface(player_id); +} #endif // defined(VIDEO_HOLE) void BrowserMediaPlayerManager::OnEnterFullscreen(int player_id) { @@ -533,10 +544,14 @@ MediaPlayerAndroid* player = GetPlayer(player_id); if (!player) return; - player->Start(); - if (fullscreen_player_id_ == player_id && fullscreen_player_is_released_) { - video_view_->OpenVideo(); - fullscreen_player_is_released_ = false; + + if (RequestDecoderResources(player_id, false)) { + StartInternal(player_id); + } else if (WebContentsDelegate* delegate = web_contents_->GetDelegate()){ + delegate->RequestMediaDecodePermission( + web_contents_, + base::Bind(&BrowserMediaPlayerManager::OnPlaybackPermissionGranted, + weak_ptr_factory_.GetWeakPtr(), player_id)); } } @@ -607,13 +622,16 @@ for (ScopedVector<MediaPlayerAndroid>::iterator it = players_.begin(); it != players_.end(); ++it) { if ((*it)->player_id() == player_id) { - ReleaseMediaResources(player_id); +#if defined(VIDEO_HOLE) + ReleaseExternalSurface(player_id); +#endif (*it)->DeleteOnCorrectThread(); players_.weak_erase(it); MediaSession::Get(web_contents())->RemovePlayer(this, player_id); break; } } + active_players_.erase(player_id); } scoped_ptr<media::MediaPlayerAndroid> BrowserMediaPlayerManager::SwapPlayer( @@ -623,7 +641,9 @@ it != players_.end(); ++it) { if ((*it)->player_id() == player_id) { previous_player = *it; - ReleaseMediaResources(player_id); +#if defined(VIDEO_HOLE) + ReleaseExternalSurface(player_id); +#endif players_.weak_erase(it); players_.push_back(player); break; @@ -632,6 +652,52 @@ return scoped_ptr<media::MediaPlayerAndroid>(previous_player); } +bool BrowserMediaPlayerManager::RequestDecoderResources( + int player_id, bool temporary) { + if (!MediaThrottler::GetInstance()->RequestDecoderResources()) + return false; + + ActivePlayerMap::iterator it; + // The player is already active, ignore it. A long running player should not + // request temporary permissions. + if ((it = active_players_.find(player_id)) != active_players_.end()) { + DCHECK(!temporary || it->second); + return true; + } + + if (!temporary) { + int long_running_player = 0; + for (it = active_players_.begin(); it != active_players_.end(); ++it) { + if (!it->second) + long_running_player++; + } + + // Number of active players are less than the threshold, do nothing. + if (long_running_player < kMediaPlayerThreshold) + return true; + + for (it = active_players_.begin(); it != active_players_.end(); ++it) { + if (!it->second && !GetPlayer(it->first)->IsPlaying() && + fullscreen_player_id_ != it->first) { + ReleasePlayer(GetPlayer(it->first)); + Send(new MediaPlayerMsg_MediaPlayerReleased(RoutingID(), + (it->first))); + } + } + } + + active_players_[player_id] = temporary; + return true; +} + +void BrowserMediaPlayerManager::OnDecoderResourcesReleased(int player_id) { + if (active_players_.find(player_id) == active_players_.end()) + return; + + active_players_.erase(player_id); + MediaThrottler::GetInstance()->OnDecodeRequestFinished(); +} + int BrowserMediaPlayerManager::RoutingID() { return render_frame_host_->GetRoutingID(); } @@ -645,44 +711,31 @@ ReleasePlayer(player); } -void BrowserMediaPlayerManager::OnMediaResourcesRequested(int player_id) { - int num_active_player = 0; - ScopedVector<MediaPlayerAndroid>::iterator it; - for (it = players_.begin(); it != players_.end(); ++it) { - if (!(*it)->IsPlayerReady()) - continue; - - // The player is already active, ignore it. - if ((*it)->player_id() == player_id) - return; - else - num_active_player++; - } - - // Number of active players are less than the threshold, do nothing. - if (num_active_player < kMediaPlayerThreshold) - return; - - for (it = players_.begin(); it != players_.end(); ++it) { - if ((*it)->IsPlayerReady() && !(*it)->IsPlaying() && - fullscreen_player_id_ != (*it)->player_id()) { - ReleasePlayer(*it); - Send(new MediaPlayerMsg_MediaPlayerReleased(RoutingID(), - (*it)->player_id())); - } - } -} - -void BrowserMediaPlayerManager::ReleaseMediaResources(int player_id) { -#if defined(VIDEO_HOLE) - if (external_video_surface_container_) - external_video_surface_container_->ReleaseExternalVideoSurface(player_id); -#endif // defined(VIDEO_HOLE) -} - void BrowserMediaPlayerManager::ReleasePlayer(MediaPlayerAndroid* player) { player->Release(); - ReleaseMediaResources(player->player_id()); +#if defined(VIDEO_HOLE) + ReleaseExternalSurface(player->player_id()); +#endif +} + +void BrowserMediaPlayerManager::OnPlaybackPermissionGranted( + int player_id, bool granted) { + if (!granted) + return; + + MediaThrottler::GetInstance()->Reset(); + StartInternal(player_id); +} + +void BrowserMediaPlayerManager::StartInternal(int player_id) { + MediaPlayerAndroid* player = GetPlayer(player_id); + if (!player) + return; + player->Start(); + if (fullscreen_player_id_ == player_id && fullscreen_player_is_released_) { + video_view_->OpenVideo(); + fullscreen_player_is_released_ = false; + } } } // namespace content
diff --git a/content/browser/media/android/browser_media_player_manager.h b/content/browser/media/android/browser_media_player_manager.h index 9571ab1c..450e22d 100644 --- a/content/browser/media/android/browser_media_player_manager.h +++ b/content/browser/media/android/browser_media_player_manager.h
@@ -5,6 +5,8 @@ #ifndef CONTENT_BROWSER_MEDIA_ANDROID_BROWSER_MEDIA_PLAYER_MANAGER_H_ #define CONTENT_BROWSER_MEDIA_ANDROID_BROWSER_MEDIA_PLAYER_MANAGER_H_ +#include <map> + #include "base/basictypes.h" #include "base/callback.h" #include "base/memory/scoped_ptr.h" @@ -148,6 +150,21 @@ int player_id, media::MediaPlayerAndroid* player); + // Called to request decoder resources. Returns true if the request is + // permitted, or false otherwise. The manager object maintains a list + // of active MediaPlayerAndroid objects and releases the inactive resources + // when needed. If |temporary| is true, the request is short lived + // and it will not be cleaned up when handling other requests. + // On the contrary, requests with false |temporary| value are subject to + // clean up if their players are idle. + virtual bool RequestDecoderResources(int player_id, bool temporary); + + // MediaPlayerAndroid must call this to inform the manager that it has + // released the decoder resources. This can be triggered by the + // ReleasePlayer() call below, or when meta data is extracted, or when player + // is stuck in an error. + virtual void OnDecoderResourcesReleased(int player_id); + int RoutingID(); // Helper function to send messages to RenderFrameObserver. @@ -160,22 +177,20 @@ bool hide_url_log, BrowserDemuxerAndroid* demuxer); - // MediaPlayerAndroid must call this before it is going to decode - // media streams. This helps the manager object maintain an array - // of active MediaPlayerAndroid objects and release the resources - // when needed. Currently we only count video resources as they are - // constrained by hardware and memory limits. - virtual void OnMediaResourcesRequested(int player_id); - - // Called when a player releases all decoding resources. - void ReleaseMediaResources(int player_id); - - // Releases the player. However, don't remove it from |players_|. + // Instructs |player| to release its java player. This will not remove the + // player from |players_|. void ReleasePlayer(media::MediaPlayerAndroid* player); + // Called when user approves media playback after being throttled. + void OnPlaybackPermissionGranted(int player_id, bool granted); + + // Helper method to start playback. + void StartInternal(int player_id); + #if defined(VIDEO_HOLE) void ReleasePlayerOfExternalVideoSurfaceIfNeeded(int future_player); void OnRequestExternalSurface(int player_id, const gfx::RectF& rect); + void ReleaseExternalSurface(int player_id); #endif // defined(VIDEO_HOLE) RenderFrameHost* const render_frame_host_; @@ -183,6 +198,12 @@ // An array of managed players. ScopedVector<media::MediaPlayerAndroid> players_; + typedef std::map<int, bool> ActivePlayerMap; + // Players that have requested decoding resources. Even though resource is + // requested, a player may be in a paused or error state and the manager + // will release its resources later. + ActivePlayerMap active_players_; + // The fullscreen video view object or NULL if video is not played in // fullscreen. scoped_ptr<ContentVideoView> video_view_;
diff --git a/content/browser/media/android/media_throttler.cc b/content/browser/media/android/media_throttler.cc new file mode 100644 index 0000000..04c8030 --- /dev/null +++ b/content/browser/media/android/media_throttler.cc
@@ -0,0 +1,48 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "content/browser/media/android/media_throttler.h" + +#include "base/android/jni_android.h" +#include "jni/MediaThrottler_jni.h" + +namespace content { + +// static +MediaThrottler* MediaThrottler::GetInstance() { + return base::Singleton<MediaThrottler>::get(); +} + +// static +bool MediaThrottler::RegisterMediaThrottler(JNIEnv* env) { + return RegisterNativesImpl(env); +} + +MediaThrottler::~MediaThrottler() {} + +bool MediaThrottler::RequestDecoderResources() { + JNIEnv* env = base::android::AttachCurrentThread(); + return Java_MediaThrottler_requestDecoderResources( + env, j_media_throttler_.obj()); +} + +void MediaThrottler::OnDecodeRequestFinished() { + JNIEnv* env = base::android::AttachCurrentThread(); + Java_MediaThrottler_onDecodeRequestFinished(env, j_media_throttler_.obj()); +} + +void MediaThrottler::Reset() { + JNIEnv* env = base::android::AttachCurrentThread(); + Java_MediaThrottler_reset(env, j_media_throttler_.obj()); +} + +MediaThrottler::MediaThrottler() { + JNIEnv* env = base::android::AttachCurrentThread(); + CHECK(env); + + j_media_throttler_.Reset(Java_MediaThrottler_create( + env, base::android::GetApplicationContext())); +} + +} // namespace content
diff --git a/content/browser/media/android/media_throttler.h b/content/browser/media/android/media_throttler.h new file mode 100644 index 0000000..002dcd77 --- /dev/null +++ b/content/browser/media/android/media_throttler.h
@@ -0,0 +1,45 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef CONTENT_BROWSER_MEDIA_ANDROID_MEDIA_THROTTLER_H_ +#define CONTENT_BROWSER_MEDIA_ANDROID_MEDIA_THROTTLER_H_ + +#include <jni.h> + +#include "base/android/scoped_java_ref.h" +#include "base/memory/singleton.h" + +namespace content { + +class MediaThrottler { + public: + // Called to get the singleton MediaThrottler instance. + static MediaThrottler* GetInstance(); + + // Jni registration. + static bool RegisterMediaThrottler(JNIEnv* env); + + virtual ~MediaThrottler(); + + // Called to request the permission to decode media data. Returns true if + // permitted, or false otherwise. + bool RequestDecoderResources(); + + // Called when a decode request finishes. + void OnDecodeRequestFinished(); + + // Resets the throttler to a fresh state. + void Reset(); + + private: + friend struct base::DefaultSingletonTraits<MediaThrottler>; + MediaThrottler(); + + base::android::ScopedJavaGlobalRef<jobject> j_media_throttler_; + DISALLOW_COPY_AND_ASSIGN(MediaThrottler); +}; + +} // namespace content + +#endif // CONTENT_BROWSER_MEDIA_ANDROID_MEDIA_THROTTLER_H_
diff --git a/content/browser/renderer_host/compositor_impl_android.cc b/content/browser/renderer_host/compositor_impl_android.cc index 1b2eb07..4920cf2e9 100644 --- a/content/browser/renderer_host/compositor_impl_android.cc +++ b/content/browser/renderer_host/compositor_impl_android.cc
@@ -473,9 +473,8 @@ params.main_task_runner = base::ThreadTaskRunnerHandle::Get(); params.settings = &settings; host_ = cc::LayerTreeHost::CreateSingleThreaded(this, ¶ms); - host_->SetVisible(false); + DCHECK(!host_->visible()); host_->SetRootLayer(root_layer_); - host_->SetLayerTreeHostClientReady(); host_->SetViewportSize(size_); host_->set_has_transparent_background(has_transparent_background_); host_->SetDeviceScaleFactor(device_scale_factor_);
diff --git a/content/browser/renderer_host/render_message_filter.cc b/content/browser/renderer_host/render_message_filter.cc index d598288..efe1ce6 100644 --- a/content/browser/renderer_host/render_message_filter.cc +++ b/content/browser/renderer_host/render_message_filter.cc
@@ -73,6 +73,7 @@ #endif #if defined(OS_ANDROID) +#include "content/browser/media/android/media_throttler.h" #include "media/base/android/webaudio_media_codec_bridge.h" #endif @@ -97,6 +98,13 @@ LAZY_INSTANCE_INITIALIZER; #endif +#if defined(OS_ANDROID) +void CloseWebAudioFileDescriptor(int fd) { + if (close(fd)) + VLOG(1) << "Couldn't close output webaudio fd: " << strerror(errno); +} +#endif + } // namespace RenderMessageFilter::RenderMessageFilter( @@ -640,14 +648,24 @@ base::SharedMemoryHandle encoded_data_handle, base::FileDescriptor pcm_output, uint32_t data_size) { - // Let a WorkerPool handle this request since the WebAudio - // MediaCodec bridge is slow and can block while sending the data to - // the renderer. - base::WorkerPool::PostTask( - FROM_HERE, - base::Bind(&media::WebAudioMediaCodecBridge::RunWebAudioMediaCodec, - encoded_data_handle, pcm_output, data_size), - true); + if (!MediaThrottler::GetInstance()->RequestDecoderResources()) { + base::WorkerPool::PostTask( + FROM_HERE, + base::Bind(&CloseWebAudioFileDescriptor, pcm_output.fd), + true); + VLOG(1) << "Cannot decode audio data due to throttling"; + } else { + // Let a WorkerPool handle this request since the WebAudio + // MediaCodec bridge is slow and can block while sending the data to + // the renderer. + base::WorkerPool::PostTask( + FROM_HERE, + base::Bind(&media::WebAudioMediaCodecBridge::RunWebAudioMediaCodec, + encoded_data_handle, pcm_output, data_size, + base::Bind(&MediaThrottler::OnDecodeRequestFinished, + base::Unretained(MediaThrottler::GetInstance()))), + true); + } } #endif
diff --git a/content/browser/service_worker/embedded_worker_instance.cc b/content/browser/service_worker/embedded_worker_instance.cc index 8cfa8bd..6bcafe2 100644 --- a/content/browser/service_worker/embedded_worker_instance.cc +++ b/content/browser/service_worker/embedded_worker_instance.cc
@@ -74,10 +74,11 @@ base::Bind(callback, worker_devtools_agent_route_id, wait_for_debugger)); } -void SetupMojoOnUIThread(int process_id, - int thread_id, - mojo::InterfaceRequest<mojo::ServiceProvider> services, - mojo::ServiceProviderPtr exposed_services) { +void SetupMojoOnUIThread( + int process_id, + int thread_id, + mojo::InterfaceRequest<mojo::ServiceProvider> services, + mojo::InterfacePtrInfo<mojo::ServiceProvider> exposed_services) { RenderProcessHost* rph = RenderProcessHost::FromID(process_id); // |rph| may be NULL in unit tests. if (!rph) @@ -85,7 +86,7 @@ EmbeddedWorkerSetupPtr setup; rph->GetServiceRegistry()->ConnectToRemoteService(mojo::GetProxy(&setup)); setup->ExchangeServiceProviders(thread_id, services.Pass(), - exposed_services.Pass()); + mojo::MakeProxy(exposed_services.Pass())); } } // namespace @@ -390,7 +391,7 @@ BrowserThread::UI, FROM_HERE, base::Bind(SetupMojoOnUIThread, process_id_, thread_id_, base::Passed(&services_request), - base::Passed(&exposed_services))); + base::Passed(exposed_services.PassInterface()))); service_registry_->BindRemoteServiceProvider(services.Pass()); }
diff --git a/content/browser/tracing/background_tracing_manager_browsertest.cc b/content/browser/tracing/background_tracing_manager_browsertest.cc index c8d9b81..959237f4 100644 --- a/content/browser/tracing/background_tracing_manager_browsertest.cc +++ b/content/browser/tracing/background_tracing_manager_browsertest.cc
@@ -270,6 +270,47 @@ EXPECT_TRUE(!upload_config_wrapper.TraceHasMatchingString("this_not_found")); } +// This tests that browser metadata gets included in the trace. +IN_PROC_BROWSER_TEST_F(BackgroundTracingManagerBrowserTest, + TraceMetadataInTrace) { + SetupBackgroundTracingManager(); + + base::trace_event::TraceLog::GetInstance()->SetArgumentFilterPredicate( + base::Bind(&IsTraceEventArgsWhitelisted)); + + base::RunLoop wait_for_upload; + BackgroundTracingManagerUploadConfigWrapper upload_config_wrapper( + wait_for_upload.QuitClosure()); + + scoped_ptr<BackgroundTracingConfig> config = CreatePreemptiveConfig(); + + content::BackgroundTracingManager::TriggerHandle handle = + content::BackgroundTracingManager::GetInstance()->RegisterTriggerType( + "preemptive_test"); + + base::RunLoop wait_for_activated; + BackgroundTracingManager::GetInstance()->SetTracingEnabledCallbackForTesting( + wait_for_activated.QuitClosure()); + EXPECT_TRUE(BackgroundTracingManager::GetInstance()->SetActiveScenario( + config.Pass(), upload_config_wrapper.get_receive_callback(), + BackgroundTracingManager::ANONYMIZE_DATA)); + + wait_for_activated.Run(); + + BackgroundTracingManager::GetInstance()->WhenIdle( + base::Bind(&DisableScenarioWhenIdle)); + + BackgroundTracingManager::GetInstance()->TriggerNamedEvent( + handle, base::Bind(&StartedFinalizingCallback, base::Closure(), true)); + + wait_for_upload.Run(); + + EXPECT_TRUE(upload_config_wrapper.get_receive_count() == 1); + EXPECT_TRUE(upload_config_wrapper.TraceHasMatchingString("cpu-brand")); + EXPECT_TRUE(upload_config_wrapper.TraceHasMatchingString("network-type")); + EXPECT_TRUE(upload_config_wrapper.TraceHasMatchingString("user-agent")); +} + // This tests subprocesses (like a navigating renderer) which gets told to // provide a argument-filtered trace and has no predicate in place to do the // filtering (in this case, only the browser process gets it set), will crash
diff --git a/content/browser/tracing/background_tracing_manager_impl.cc b/content/browser/tracing/background_tracing_manager_impl.cc index 3985a24..e2685cc1 100644 --- a/content/browser/tracing/background_tracing_manager_impl.cc +++ b/content/browser/tracing/background_tracing_manager_impl.cc
@@ -396,6 +396,7 @@ metadata_dict->Set("config", config_dict.Pass()); metadata_dict->SetString("network-type", network_type); metadata_dict->SetString("product-version", product_version); + metadata_dict->SetString("user-agent", GetContentClient()->GetUserAgent()); // OS metadata_dict->SetString("os-name", base::SysInfo::OperatingSystemName()); @@ -440,6 +441,9 @@ metadata_dict->SetString("gpu-gl-renderer", gpu_info.gl_renderer); #endif + if (delegate_) + delegate_->GenerateMetadataDict(metadata_dict.get()); + return metadata_dict.Pass(); }
diff --git a/content/child/child_thread_impl.cc b/content/child/child_thread_impl.cc index 782c5d5..89231ab 100644 --- a/content/child/child_thread_impl.cc +++ b/content/child/child_thread_impl.cc
@@ -72,10 +72,6 @@ #include "ui/ozone/public/client_native_pixmap_factory.h" #endif -#if defined(OS_WIN) -#include "ipc/attachment_broker_unprivileged_win.h" -#endif - using tracked_objects::ThreadData; namespace content { @@ -385,8 +381,8 @@ // The only reason a global would already exist is if the thread is being run // in the browser process because of a command line switch. if (!IPC::AttachmentBroker::GetGlobal()) { - attachment_broker_.reset(new IPC::AttachmentBrokerUnprivilegedWin()); - IPC::AttachmentBroker::SetGlobal(attachment_broker_.get()); + attachment_broker_.reset( + IPC::AttachmentBrokerUnprivileged::CreateBroker().release()); } #endif
diff --git a/content/common/child_process_host_impl.cc b/content/common/child_process_host_impl.cc index dcc52ac99..cc0fc54 100644 --- a/content/common/child_process_host_impl.cc +++ b/content/common/child_process_host_impl.cc
@@ -33,7 +33,6 @@ #include "base/linux_util.h" #elif defined(OS_WIN) #include "content/common/font_cache_dispatcher_win.h" -#include "ipc/attachment_broker_privileged_win.h" #endif // OS_LINUX namespace { @@ -45,15 +44,16 @@ class AttachmentBrokerWrapper { public: AttachmentBrokerWrapper() { - IPC::AttachmentBroker::SetGlobal(&attachment_broker_); + attachment_broker_.reset( + IPC::AttachmentBrokerPrivileged::CreateBroker().release()); } IPC::AttachmentBrokerPrivileged* GetAttachmentBroker() { - return &attachment_broker_; + return attachment_broker_.get(); } private: - IPC::AttachmentBrokerPrivilegedWin attachment_broker_; + scoped_ptr<IPC::AttachmentBrokerPrivileged> attachment_broker_; }; base::LazyInstance<AttachmentBrokerWrapper>::Leaky
diff --git a/content/common/gpu/media/android_copying_backing_strategy.cc b/content/common/gpu/media/android_copying_backing_strategy.cc index 7118335..b0a37fe 100644 --- a/content/common/gpu/media/android_copying_backing_strategy.cc +++ b/content/common/gpu/media/android_copying_backing_strategy.cc
@@ -29,7 +29,8 @@ 0.0f, 0.0f, 0.0f, 1.0f}; AndroidCopyingBackingStrategy::AndroidCopyingBackingStrategy() - : state_provider_(nullptr) {} + : state_provider_(nullptr) + , surface_texture_id_(0) {} AndroidCopyingBackingStrategy::~AndroidCopyingBackingStrategy() {} @@ -42,6 +43,9 @@ DCHECK(state_provider_->ThreadChecker().CalledOnValidThread()); if (copier_) copier_->Destroy(); + + if (surface_texture_id_) + glDeleteTextures(1, &surface_texture_id_); } uint32 AndroidCopyingBackingStrategy::GetNumPictureBuffers() const { @@ -52,7 +56,25 @@ return GL_TEXTURE_2D; } -void AndroidCopyingBackingStrategy::AssignCurrentSurfaceToPictureBuffer( +scoped_refptr<gfx::SurfaceTexture> +AndroidCopyingBackingStrategy::CreateSurfaceTexture() { + glGenTextures(1, &surface_texture_id_); + glActiveTexture(GL_TEXTURE0); + glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); + + glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + state_provider_->GetGlDecoder()->RestoreTextureUnitBindings(0); + state_provider_->GetGlDecoder()->RestoreActiveTexture(); + + surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_); + + return surface_texture_; +} + +void AndroidCopyingBackingStrategy::UseCodecBufferForPictureBuffer( int32 codec_buf_index, const media::PictureBuffer& picture_buffer) { // Make sure that the decoder is available. @@ -83,14 +105,13 @@ true); } - gfx::SurfaceTexture* surface_texture = state_provider_->GetSurfaceTexture(); { TRACE_EVENT0("media", "AVDA::UpdateTexImage"); - surface_texture->UpdateTexImage(); + surface_texture_->UpdateTexImage(); } float transfrom_matrix[16]; - surface_texture->GetTransformMatrix(transfrom_matrix); + surface_texture_->GetTransformMatrix(transfrom_matrix); uint32 picture_buffer_texture_id = picture_buffer.texture_id(); @@ -112,7 +133,7 @@ // instead of using default matrix crbug.com/226218. copier_->DoCopyTextureWithTransform( state_provider_->GetGlDecoder(), GL_TEXTURE_EXTERNAL_OES, - state_provider_->GetSurfaceTextureId(), picture_buffer_texture_id, + surface_texture_id_, picture_buffer_texture_id, state_provider_->GetSize().width(), state_provider_->GetSize().height(), false, false, false, kIdentityMatrix); }
diff --git a/content/common/gpu/media/android_copying_backing_strategy.h b/content/common/gpu/media/android_copying_backing_strategy.h index 354dcfc..7b2fa659 100644 --- a/content/common/gpu/media/android_copying_backing_strategy.h +++ b/content/common/gpu/media/android_copying_backing_strategy.h
@@ -35,15 +35,21 @@ void Cleanup() override; uint32 GetNumPictureBuffers() const override; uint32 GetTextureTarget() const override; - void AssignCurrentSurfaceToPictureBuffer( - int32 codec_buffer_index, - const media::PictureBuffer&) override; + scoped_refptr<gfx::SurfaceTexture> CreateSurfaceTexture() override; + void UseCodecBufferForPictureBuffer(int32 codec_buffer_index, + const media::PictureBuffer&) override; private: // Used for copy the texture from surface texture to picture buffers. scoped_ptr<gpu::CopyTextureCHROMIUMResourceManager> copier_; AndroidVideoDecodeAcceleratorStateProvider* state_provider_; + + // A container of texture. Used to set a texture to |media_codec_|. + scoped_refptr<gfx::SurfaceTexture> surface_texture_; + + // The texture id which is set to |surface_texture_|. + uint32 surface_texture_id_; }; } // namespace content
diff --git a/content/common/gpu/media/android_video_decode_accelerator.cc b/content/common/gpu/media/android_video_decode_accelerator.cc index f033282..e0262ad 100644 --- a/content/common/gpu/media/android_video_decode_accelerator.cc +++ b/content/common/gpu/media/android_video_decode_accelerator.cc
@@ -77,7 +77,6 @@ make_context_current_(make_context_current), codec_(media::kCodecH264), state_(NO_ERROR), - surface_texture_id_(0), picturebuffers_requested_(false), gl_decoder_(decoder), strategy_(strategy.Pass()), @@ -128,20 +127,7 @@ LOG(ERROR) << "Failed to get gles2 decoder instance."; return false; } - glGenTextures(1, &surface_texture_id_); - glActiveTexture(GL_TEXTURE0); - glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); - - glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); - glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameteri(GL_TEXTURE_EXTERNAL_OES, - GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_EXTERNAL_OES, - GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - gl_decoder_->RestoreTextureUnitBindings(0); - gl_decoder_->RestoreActiveTexture(); - - surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_); + surface_texture_ = strategy_->CreateSurfaceTexture(); if (!ConfigureMediaCodec()) { LOG(ERROR) << "Failed to create MediaCodec instance."; @@ -358,7 +344,7 @@ // Connect the PictureBuffer to the decoded frame, via whatever // mechanism the strategy likes. - strategy_->AssignCurrentSurfaceToPictureBuffer(codec_buffer_index, i->second); + strategy_->UseCodecBufferForPictureBuffer(codec_buffer_index, i->second); // TODO(henryhsu): Pass (0, 0) as visible size will cause several test // cases failed. We should make sure |size_| is coded size or visible size. @@ -513,15 +499,12 @@ void AndroidVideoDecodeAccelerator::Destroy() { DCHECK(thread_checker_.CalledOnValidThread()); - strategy_->Cleanup(); - weak_this_factory_.InvalidateWeakPtrs(); if (media_codec_) { io_timer_.Stop(); media_codec_->Stop(); } - if (surface_texture_id_) - glDeleteTextures(1, &surface_texture_id_); + strategy_->Cleanup(); delete this; } @@ -538,14 +521,6 @@ return thread_checker_; } -gfx::SurfaceTexture* AndroidVideoDecodeAccelerator::GetSurfaceTexture() const { - return surface_texture_.get(); -} - -uint32 AndroidVideoDecodeAccelerator::GetSurfaceTextureId() const { - return surface_texture_id_; -} - gpu::gles2::GLES2Decoder* AndroidVideoDecodeAccelerator::GetGlDecoder() const { return gl_decoder_.get(); }
diff --git a/content/common/gpu/media/android_video_decode_accelerator.h b/content/common/gpu/media/android_video_decode_accelerator.h index fec0102b..39d8425 100644 --- a/content/common/gpu/media/android_video_decode_accelerator.h +++ b/content/common/gpu/media/android_video_decode_accelerator.h
@@ -55,8 +55,12 @@ // Return the GL texture target that the PictureBuffer textures use. virtual uint32 GetTextureTarget() const = 0; - // Use the provided PictureBuffer to hold the current surface. - virtual void AssignCurrentSurfaceToPictureBuffer( + // Create and return a surface texture for the MediaCodec to use. + virtual scoped_refptr<gfx::SurfaceTexture> CreateSurfaceTexture() = 0; + + // Make the provided PictureBuffer draw the image that is represented by + // the decoded output buffer at codec_buffer_index. + virtual void UseCodecBufferForPictureBuffer( int32 codec_buffer_index, const media::PictureBuffer&) = 0; }; @@ -82,8 +86,6 @@ // AndroidVideoDecodeStateProvider const gfx::Size& GetSize() const override; const base::ThreadChecker& ThreadChecker() const override; - gfx::SurfaceTexture* GetSurfaceTexture() const override; - uint32 GetSurfaceTextureId() const override; gpu::gles2::GLES2Decoder* GetGlDecoder() const override; media::VideoCodecBridge* GetMediaCodec() override; void PostError(const ::tracked_objects::Location& from_here, @@ -174,9 +176,6 @@ // A container of texture. Used to set a texture to |media_codec_|. scoped_refptr<gfx::SurfaceTexture> surface_texture_; - // The texture id which is set to |surface_texture_|. - uint32 surface_texture_id_; - // Set to true after requesting picture buffers to the client. bool picturebuffers_requested_;
diff --git a/content/common/gpu/media/android_video_decode_accelerator_state_provider.h b/content/common/gpu/media/android_video_decode_accelerator_state_provider.h index 8e10ac4..e97da7e 100644 --- a/content/common/gpu/media/android_video_decode_accelerator_state_provider.h +++ b/content/common/gpu/media/android_video_decode_accelerator_state_provider.h
@@ -35,8 +35,6 @@ // Various handy getters. virtual const gfx::Size& GetSize() const = 0; virtual const base::ThreadChecker& ThreadChecker() const = 0; - virtual gfx::SurfaceTexture* GetSurfaceTexture() const = 0; - virtual uint32 GetSurfaceTextureId() const = 0; virtual gpu::gles2::GLES2Decoder* GetGlDecoder() const = 0; virtual media::VideoCodecBridge* GetMediaCodec() = 0;
diff --git a/content/common/sandbox_win.cc b/content/common/sandbox_win.cc index 5860239..acc71de4 100644 --- a/content/common/sandbox_win.cc +++ b/content/common/sandbox_win.cc
@@ -104,6 +104,8 @@ L"radprlib.dll", // Radiant Naomi Internet Filter. L"rapportnikko.dll", // Trustware Rapport. L"rlhook.dll", // Trustware Bufferzone. + L"rooksbas.dll", // IBM Trusteer Rapport. + L"rooksbas_x64.dll" // IBM Trusteer Rapport. L"rooksdol.dll", // Trustware Rapport. L"rndlpepperbrowserrecordhelper.dll", // RealPlayer. L"rpchromebrowserrecordhelper.dll", // RealPlayer. @@ -224,7 +226,7 @@ // Returns the object path prepended with the current logon session. base::string16 PrependWindowsSessionPath(const base::char16* object) { // Cache this because it can't change after process creation. - static uintptr_t s_session_id = 0; + static DWORD s_session_id = 0; if (s_session_id == 0) { HANDLE token; DWORD session_id_length; @@ -238,7 +240,7 @@ s_session_id = session_id; } - return base::StringPrintf(L"\\Sessions\\%d%ls", s_session_id, object); + return base::StringPrintf(L"\\Sessions\\%lu%ls", s_session_id, object); } // Checks if the sandbox should be let to run without a job object assigned.
diff --git a/content/content_browser.gypi b/content/content_browser.gypi index 1f70d50c..a2b2be9 100644 --- a/content/content_browser.gypi +++ b/content/content_browser.gypi
@@ -408,6 +408,7 @@ 'browser/android/interstitial_page_delegate_android.h', 'browser/android/load_url_params.cc', 'browser/android/load_url_params.h', + 'public/browser/android/download_controller_android.h', 'browser/android/overscroll_controller_android.cc', 'browser/android/overscroll_controller_android.h', 'browser/android/overscroll_glow.cc', @@ -1039,6 +1040,8 @@ 'browser/media/android/media_session.cc', 'browser/media/android/media_session.h', 'browser/media/android/media_session_observer.h', + 'browser/media/android/media_throttler.cc', + 'browser/media/android/media_throttler.h', 'browser/media/audio_stream_monitor.cc', 'browser/media/audio_stream_monitor.h', 'browser/media/capture/audio_mirroring_manager.cc',
diff --git a/content/content_jni.gypi b/content/content_jni.gypi index 3a154d4..2d465d5 100644 --- a/content/content_jni.gypi +++ b/content/content_jni.gypi
@@ -32,6 +32,7 @@ 'public/android/java/src/org/chromium/content/browser/MediaDrmCredentialManager.java', 'public/android/java/src/org/chromium/content/browser/MediaSession.java', 'public/android/java/src/org/chromium/content/browser/MediaResourceGetter.java', + 'public/android/java/src/org/chromium/content/browser/MediaThrottler.java', 'public/android/java/src/org/chromium/content/browser/MotionEventSynthesizer.java', 'public/android/java/src/org/chromium/content/browser/PowerSaveBlocker.java', 'public/android/java/src/org/chromium/content/browser/ServiceRegistrar.java',
diff --git a/content/public/android/java/res/raw/empty.wav b/content/public/android/java/res/raw/empty.wav new file mode 100644 index 0000000..6181d58 --- /dev/null +++ b/content/public/android/java/res/raw/empty.wav Binary files differ
diff --git a/content/public/android/java/src/org/chromium/content/browser/MediaThrottler.java b/content/public/android/java/src/org/chromium/content/browser/MediaThrottler.java new file mode 100644 index 0000000..5ab6d53 --- /dev/null +++ b/content/public/android/java/src/org/chromium/content/browser/MediaThrottler.java
@@ -0,0 +1,204 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.content.browser; + +import android.content.Context; +import android.media.MediaPlayer; +import android.os.AsyncTask; +import android.os.Handler; +import android.os.Looper; +import android.os.SystemClock; + +import org.chromium.base.Log; +import org.chromium.base.annotations.CalledByNative; +import org.chromium.base.annotations.JNINamespace; +import org.chromium.content.R; + +/** + * Class for listening to Android MediaServer Crashes to throttle media decoding + * when needed. + */ +@JNINamespace("content") +class MediaThrottler implements MediaPlayer.OnErrorListener { + private static final String TAG = "cr_MediaThrottler"; + private static final long UNKNOWN_LAST_SERVER_CRASH_TIME = -1; + + // Number of active decode requests. + private int mRequestCount; + + // Application context. + private final Context mContext; + + // Watch dog player. Used to listen to all media server crashes. + private MediaPlayer mPlayer; + + // The last media server crash time since Chrome lauches. + private long mLastCrashTime = UNKNOWN_LAST_SERVER_CRASH_TIME; + + // Server crash count since last reset() call. + private int mServerCrashCount; + + // Object for synchronized access to memeber variables. + private final Object mLock = new Object(); + + // Handler for posting delayed tasks. + private Handler mHandler; + + // Intervals between media server crashes that are considered normal. It + // takes about 5 seconds to restart the media server. So this value has to + // be larger than 5 seconds. + private static final long SERVER_CRASH_INTERVAL_THRESHOLD_IN_MILLIS = 60000; + + // Delay to keep the watch dog player alive When there are no decoding + // requests. This is introduced to avoid recreating the watch dog over and + // over if a burst of small decoding requests arrive. + private static final int RELEASE_WATCH_DOG_PLAYER_DELAY_IN_MILLIS = 5000; + + // When |mServerCrashCount| reaches this threshold, throttling will start. + // This is to prevent a page from loading a malformed video over and over + // to crash the media server excessively. + private static final int SERVER_CRASH_COUNT_THRESHOLD_FOR_THROTTLING = 4; + + /** + * A background task to release the watch dog player. + */ + private class ReleaseWatchDogTask extends AsyncTask<Void, Void, Void> { + @Override + protected Void doInBackground(Void... voids) { + synchronized (mLock) { + if (mRequestCount == 0 && mPlayer != null) { + mPlayer.release(); + mPlayer = null; + } + } + return null; + } + } + + private final Runnable mDelayedReleaseRunnable = new Runnable() { + @Override + public void run() { + new ReleaseWatchDogTask().execute(); + } + }; + + @CalledByNative + private static MediaThrottler create(Context context) { + return new MediaThrottler(context); + } + + private MediaThrottler(Context context) { + mContext = context; + mHandler = new Handler(Looper.getMainLooper()); + } + + /** + * A background task to start the watch dog player. + */ + private class StartWatchDogTask extends AsyncTask<Void, Void, Void> { + @Override + protected Void doInBackground(Void... voids) { + synchronized (mLock) { + if (mPlayer != null || mRequestCount == 0) return null; + mPlayer = MediaPlayer.create(mContext, R.raw.empty); + if (mPlayer == null) { + Log.e(TAG, "Unable to create watch dog player, treat it as server crash."); + onMediaServerCrash(); + } else { + mPlayer.setOnErrorListener(MediaThrottler.this); + } + } + return null; + } + } + + /** + * Called to request the permission to decode media data. + * + * @return true if the request is permitted, or false otherwise. + */ + @CalledByNative + private boolean requestDecoderResources() { + synchronized (mLock) { + long currentTime = SystemClock.elapsedRealtime(); + if (mLastCrashTime != UNKNOWN_LAST_SERVER_CRASH_TIME + && (currentTime - mLastCrashTime < SERVER_CRASH_INTERVAL_THRESHOLD_IN_MILLIS) + && mServerCrashCount >= SERVER_CRASH_COUNT_THRESHOLD_FOR_THROTTLING) { + Log.e(TAG, "Request to decode media data denied due to throttling."); + return false; + } + mRequestCount++; + if (mRequestCount == 1) { + mHandler.removeCallbacks(mDelayedReleaseRunnable); + mHandler.post(new Runnable() { + @Override + public void run() { + new StartWatchDogTask().execute(); + } + }); + } + } + return true; + } + + /** + * Called to signal that a decode request has been completed. + */ + @CalledByNative + private void onDecodeRequestFinished() { + synchronized (mLock) { + mRequestCount--; + if (mRequestCount == 0) { + // Don't release the watch dog immediately, there could be a + // number of small requests coming together. + prepareToStopWatchDog(); + } + } + } + + /** + * Posts a delayed task to stop the watch dog player. + */ + private void prepareToStopWatchDog() { + mHandler.postDelayed(mDelayedReleaseRunnable, RELEASE_WATCH_DOG_PLAYER_DELAY_IN_MILLIS); + } + + @Override + public boolean onError(MediaPlayer mp, int what, int extra) { + if (what == MediaPlayer.MEDIA_ERROR_SERVER_DIED) { + synchronized (mLock) { + onMediaServerCrash(); + } + } + return true; + } + + /** + * Called when media server crashes. + */ + private void onMediaServerCrash() { + assert Thread.holdsLock(mLock); + long currentTime = SystemClock.elapsedRealtime(); + if (mLastCrashTime != UNKNOWN_LAST_SERVER_CRASH_TIME + && (currentTime - mLastCrashTime < SERVER_CRASH_INTERVAL_THRESHOLD_IN_MILLIS)) { + mServerCrashCount++; + } else { + mServerCrashCount = 1; + } + mLastCrashTime = currentTime; + } + + /** + * Resets the MediaThrottler to its initial state so that subsequent requests + * will not be throttled. + */ + @CalledByNative + private void reset() { + synchronized (mLock) { + mServerCrashCount = 0; + mLastCrashTime = UNKNOWN_LAST_SERVER_CRASH_TIME; + } + } +}
diff --git a/content/public/browser/tracing_delegate.h b/content/public/browser/tracing_delegate.h index 2d5ba95..e72e71b2 100644 --- a/content/public/browser/tracing_delegate.h +++ b/content/public/browser/tracing_delegate.h
@@ -9,6 +9,7 @@ #include "content/common/content_export.h" namespace base { +class DictionaryValue; class Time; } @@ -38,6 +39,9 @@ virtual bool IsAllowedToEndBackgroundScenario( const content::BackgroundTracingConfig& config, bool requires_anonymized_data); + + // Used to add any additional metadata to traces. + virtual void GenerateMetadataDict(base::DictionaryValue* metadata_dict) {} }; } // namespace content
diff --git a/content/public/browser/web_contents_delegate.cc b/content/public/browser/web_contents_delegate.cc index b3d2d70..3d18cee 100644 --- a/content/public/browser/web_contents_delegate.cc +++ b/content/public/browser/web_contents_delegate.cc
@@ -195,6 +195,14 @@ return false; } +#if defined(OS_ANDROID) +void WebContentsDelegate::RequestMediaDecodePermission( + WebContents* web_contents, + const base::Callback<void(bool)>& callback) { + callback.Run(false); +} +#endif + bool WebContentsDelegate::RequestPpapiBrokerPermission( WebContents* web_contents, const GURL& url,
diff --git a/content/public/browser/web_contents_delegate.h b/content/public/browser/web_contents_delegate.h index 76dfd36..9c344f21 100644 --- a/content/public/browser/web_contents_delegate.h +++ b/content/public/browser/web_contents_delegate.h
@@ -453,6 +453,14 @@ const GURL& security_origin, MediaStreamType type); +#if defined(OS_ANDROID) + // Asks permission to decode media stream. After permission is determined, + // |callback| will be called with the result. + virtual void RequestMediaDecodePermission( + WebContents* web_contents, + const base::Callback<void(bool)>& callback); +#endif + // Requests permission to access the PPAPI broker. The delegate should return // true and call the passed in |callback| with the result, or return false // to indicate that it does not support asking for permission.
diff --git a/content/renderer/gpu/render_widget_compositor.cc b/content/renderer/gpu/render_widget_compositor.cc index c82b3b1..2e97735 100644 --- a/content/renderer/gpu/render_widget_compositor.cc +++ b/content/renderer/gpu/render_widget_compositor.cc
@@ -214,6 +214,7 @@ : num_failed_recreate_attempts_(0), widget_(widget), compositor_deps_(compositor_deps), + never_visible_(false), layout_and_paint_async_callback_(nullptr), weak_factory_(this) { } @@ -499,6 +500,11 @@ RenderWidgetCompositor::~RenderWidgetCompositor() {} +void RenderWidgetCompositor::SetNeverVisible() { + DCHECK(!layer_tree_host_->visible()); + never_visible_ = true; +} + const base::WeakPtr<cc::InputHandler>& RenderWidgetCompositor::GetInputHandler() { return layer_tree_host_->GetInputHandler(); @@ -573,10 +579,6 @@ return layer_tree_host_->SendMessageToMicroBenchmark(id, value.Pass()); } -void RenderWidgetCompositor::StartCompositor() { - layer_tree_host_->SetLayerTreeHostClientReady(); -} - void RenderWidgetCompositor::setRootLayer(const blink::WebLayer& layer) { layer_tree_host_->SetRootLayer( static_cast<const cc_blink::WebLayerImpl*>(&layer)->layer()); @@ -633,6 +635,9 @@ } void RenderWidgetCompositor::setVisible(bool visible) { + if (never_visible_) + return; + layer_tree_host_->SetVisible(visible); }
diff --git a/content/renderer/gpu/render_widget_compositor.h b/content/renderer/gpu/render_widget_compositor.h index 6e8b8fd..2aecd43 100644 --- a/content/renderer/gpu/render_widget_compositor.h +++ b/content/renderer/gpu/render_widget_compositor.h
@@ -49,6 +49,7 @@ ~RenderWidgetCompositor() override; + void SetNeverVisible(); const base::WeakPtr<cc::InputHandler>& GetInputHandler(); bool BeginMainFrameRequested() const; void SetNeedsDisplayOnAllLayers(); @@ -77,7 +78,6 @@ scoped_ptr<base::Value> value, const base::Callback<void(scoped_ptr<base::Value>)>& callback); bool SendMessageToMicroBenchmark(int id, scoped_ptr<base::Value> value); - void StartCompositor(); void SetSurfaceIdNamespace(uint32_t surface_id_namespace); cc::ManagedMemoryPolicy GetGpuMemoryPolicy( const cc::ManagedMemoryPolicy& policy); @@ -189,6 +189,7 @@ RenderWidget* widget_; CompositorDependencies* compositor_deps_; scoped_ptr<cc::LayerTreeHost> layer_tree_host_; + bool never_visible_; blink::WebLayoutAndPaintAsyncCallback* layout_and_paint_async_callback_; scoped_ptr<cc::CopyOutputRequest> temporary_copy_output_request_;
diff --git a/content/renderer/gpu/render_widget_compositor_unittest.cc b/content/renderer/gpu/render_widget_compositor_unittest.cc index f1d7ec32..6d96d89 100644 --- a/content/renderer/gpu/render_widget_compositor_unittest.cc +++ b/content/renderer/gpu/render_widget_compositor_unittest.cc
@@ -251,7 +251,7 @@ render_widget_compositor_->SetUp( use_null_output_surface, num_failures_before_success, expected_successes, expected_fallback_succeses); - render_widget_compositor_->StartCompositor(); + render_widget_compositor_->setVisible(true); base::ThreadTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::Bind(&RenderWidgetCompositorOutputSurface::SynchronousComposite,
diff --git a/content/renderer/render_thread_impl.cc b/content/renderer/render_thread_impl.cc index 998e05b..bfc8cba 100644 --- a/content/renderer/render_thread_impl.cc +++ b/content/renderer/render_thread_impl.cc
@@ -395,7 +395,7 @@ void SetupEmbeddedWorkerOnWorkerThread( mojo::InterfaceRequest<mojo::ServiceProvider> services, - mojo::ServiceProviderPtr exposed_services) { + mojo::InterfacePtrInfo<mojo::ServiceProvider> exposed_services) { ServiceWorkerContextClient* client = ServiceWorkerContextClient::ThreadSpecificInstance(); // It is possible for client to be null if for some reason the worker died @@ -403,7 +403,8 @@ // nothing and let mojo close the connection. if (!client) return; - client->BindServiceRegistry(services.Pass(), exposed_services.Pass()); + client->BindServiceRegistry(services.Pass(), + mojo::MakeProxy(exposed_services.Pass())); } class EmbeddedWorkerSetupImpl : public EmbeddedWorkerSetup { @@ -416,12 +417,10 @@ int32_t thread_id, mojo::InterfaceRequest<mojo::ServiceProvider> services, mojo::ServiceProviderPtr exposed_services) override { - mojo::ServiceProviderPtr exposed_services_with_id(28); - exposed_services_with_id.Bind(exposed_services.PassInterface()); WorkerTaskRunner::Instance()->GetTaskRunnerFor(thread_id)->PostTask( FROM_HERE, base::Bind(&SetupEmbeddedWorkerOnWorkerThread, base::Passed(&services), - base::Passed(&exposed_services_with_id))); + base::Passed(exposed_services.PassInterface()))); } private:
diff --git a/content/renderer/render_widget.cc b/content/renderer/render_widget.cc index c9a9a41..78e5df11 100644 --- a/content/renderer/render_widget.cc +++ b/content/renderer/render_widget.cc
@@ -492,7 +492,7 @@ need_update_rect_for_auto_resize_(false), did_show_(false), is_hidden_(hidden), - never_visible_(never_visible), + compositor_never_visible_(never_visible), is_fullscreen_granted_(false), display_mode_(blink::WebDisplayModeUndefined), has_focus_(false), @@ -967,7 +967,7 @@ scoped_ptr<cc::OutputSurface> RenderWidget::CreateOutputSurface(bool fallback) { // For widgets that are never visible, we don't start the compositor, so we // never get a request for a cc::OutputSurface. - DCHECK(!never_visible_); + DCHECK(!compositor_never_visible_); #if defined(OS_ANDROID) if (SynchronousCompositorFactory* factory = @@ -1339,6 +1339,12 @@ compositor_ = RenderWidgetCompositor::Create(this, compositor_deps_); compositor_->setViewportSize(size_, physical_backing_size_); + + // For background pages and certain tests, we don't want to trigger + // OutputSurface creation. + if (compositor_never_visible_ || !RenderThreadImpl::current()) + compositor_->SetNeverVisible(); + StartCompositor(); } @@ -2168,15 +2174,8 @@ } void RenderWidget::StartCompositor() { - // For widgets that are never visible, we don't need the compositor to run - // at all. - if (never_visible_) - return; - // In tests without a RenderThreadImpl, don't set ready as this kicks - // off creating output surfaces that the test can't create. - if (!RenderThreadImpl::current()) - return; - compositor_->StartCompositor(); + if (!is_hidden()) + compositor_->setVisible(true); } void RenderWidget::SchedulePluginMove(const WebPluginGeometry& move) {
diff --git a/content/renderer/render_widget.h b/content/renderer/render_widget.h index 4a744ab..68e0432 100644 --- a/content/renderer/render_widget.h +++ b/content/renderer/render_widget.h
@@ -644,7 +644,7 @@ bool is_hidden_; // Indicates that we are never visible, so never produce graphical output. - bool never_visible_; + const bool compositor_never_visible_; // Indicates whether tab-initiated fullscreen was granted. bool is_fullscreen_granted_;
diff --git a/content/test/gpu/gpu_tests/pixel.py b/content/test/gpu/gpu_tests/pixel.py index 52acf4692..9ea1f775 100644 --- a/content/test/gpu/gpu_tests/pixel.py +++ b/content/test/gpu/gpu_tests/pixel.py
@@ -156,7 +156,8 @@ if ref_png is not None: return ref_png - print 'Reference image not found. Writing tab contents as reference.' + print ('Reference image not found. Writing tab contents as reference to: ' + + image_path) self._WriteImage(image_path, screenshot) return screenshot @@ -182,7 +183,8 @@ default=default_reference_image_dir) def CreateStorySet(self, options): - story_set = page_sets.PixelTestsStorySet(self.GetExpectations()) + story_set = page_sets.PixelTestsStorySet(self.GetExpectations(), + try_es3=True) for page in story_set: page.script_to_evaluate_on_commit = test_harness_script return story_set
diff --git a/content/test/gpu/gpu_tests/pixel_expectations.py b/content/test/gpu/gpu_tests/pixel_expectations.py index ac6bf2a..9cdfe0d4 100644 --- a/content/test/gpu/gpu_tests/pixel_expectations.py +++ b/content/test/gpu/gpu_tests/pixel_expectations.py
@@ -14,3 +14,15 @@ self.Fail('Pixel.ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) + + self.Fail('Pixel.ScissorTestWithPreserveDrawingBufferES3', + ['mac'], bug=540039) + + # TODO(kbr): remove these failure expectations once reference + # images are generated. + self.Fail('Pixel.Canvas2DRedBoxES3', + ['mac'], bug=534114) + self.Fail('Pixel.CSS3DBlueBoxES3', + ['mac'], bug=534114) + self.Fail('Pixel.WebGLGreenTriangleES3', + ['mac'], bug=534114)
diff --git a/content/test/gpu/page_sets/pixel_tests.py b/content/test/gpu/page_sets/pixel_tests.py index 77b8e5b..3854df6 100644 --- a/content/test/gpu/page_sets/pixel_tests.py +++ b/content/test/gpu/page_sets/pixel_tests.py
@@ -3,14 +3,18 @@ # found in the LICENSE file. from telemetry.story import story_set as story_set_module +import sys + from gpu_tests import gpu_test_base class PixelTestsPage(gpu_test_base.PageBase): - def __init__(self, url, name, test_rect, revision, story_set, expectations, - expected_colors=None): - super(PixelTestsPage, self).__init__(url=url, page_set=story_set, name=name, - expectations=expectations) + def __init__(self, url, name, test_rect, revision, story_set, + shared_page_state_class, expectations, expected_colors=None): + super(PixelTestsPage, self).__init__( + url=url, page_set=story_set, name=name, + shared_page_state_class=shared_page_state_class, + expectations=expectations) self.test_rect = test_rect self.revision = revision if expected_colors: @@ -22,41 +26,78 @@ 'domAutomationController._finished', timeout_in_seconds=30) +class PixelTestsES3SharedPageState(gpu_test_base.GpuSharedPageState): + def __init__(self, test, finder_options, story_set): + super(PixelTestsES3SharedPageState, self).__init__( + test, finder_options, story_set) + finder_options.browser_options.AppendExtraBrowserArgs( + ['--enable-unsafe-es3-apis']) + + class PixelTestsStorySet(story_set_module.StorySet): """ Some basic test cases for GPU. """ - - def __init__(self, expectations, base_name='Pixel'): + def __init__(self, expectations, base_name='Pixel', try_es3=False): super(PixelTestsStorySet, self).__init__() + self._AddAllPages(expectations, base_name, False) + # Would be better to fetch this from Telemetry. + # TODO(kbr): enable this on all platforms. Don't know what will + # happen on Android right now. + if try_es3 and sys.platform.startswith('darwin'): + # Add all the tests again, this time with the + # --enable-unsafe-es3-apis command line argument. This has the + # side-effect of enabling the Core Profile rendering path on Mac + # OS. + self._AddAllPages(expectations, base_name, True) + + def _AddAllPages(self, expectations, base_name, use_es3): + if use_es3: + es3_suffix = 'ES3' + shared_page_state_class = PixelTestsES3SharedPageState + else: + es3_suffix = '' + shared_page_state_class = gpu_test_base.GpuSharedPageState + self.AddStory(PixelTestsPage( url='file://../../data/gpu/pixel_canvas2d.html', - name=base_name + '.Canvas2DRedBox', + name=base_name + '.Canvas2DRedBox' + es3_suffix, test_rect=[0, 0, 300, 300], revision=7, story_set=self, + shared_page_state_class=shared_page_state_class, expectations=expectations)) self.AddStory(PixelTestsPage( url='file://../../data/gpu/pixel_css3d.html', - name=base_name + '.CSS3DBlueBox', + name=base_name + '.CSS3DBlueBox' + es3_suffix, test_rect=[0, 0, 300, 300], revision=15, story_set=self, + shared_page_state_class=shared_page_state_class, expectations=expectations)) self.AddStory(PixelTestsPage( url='file://../../data/gpu/pixel_webgl.html', - name=base_name + '.WebGLGreenTriangle', + name=base_name + '.WebGLGreenTriangle' + es3_suffix, test_rect=[0, 0, 300, 300], revision=12, story_set=self, + shared_page_state_class=shared_page_state_class, expectations=expectations)) self.AddStory(PixelTestsPage( url='file://../../data/gpu/pixel_scissor.html', - name=base_name + '.ScissorTestWithPreserveDrawingBuffer', + name=base_name + '.ScissorTestWithPreserveDrawingBuffer' + es3_suffix, test_rect=[0, 0, 300, 300], revision=0, # This is not used. story_set=self, + shared_page_state_class=shared_page_state_class, expectations=expectations, expected_colors='../../data/gpu/pixel_scissor_expectations.json')) + + @property + def allow_mixed_story_states(self): + # Return True here in order to be able to add the same tests with + # a different SharedPageState on Mac which tests them with the + # Core Profile rendering path. + return True
diff --git a/content/test/web_layer_tree_view_impl_for_testing.cc b/content/test/web_layer_tree_view_impl_for_testing.cc index 86b1fa4..45a4a288 100644 --- a/content/test/web_layer_tree_view_impl_for_testing.cc +++ b/content/test/web_layer_tree_view_impl_for_testing.cc
@@ -127,10 +127,11 @@ } void WebLayerTreeViewImplForTesting::RequestNewOutputSurface() { - bool flipped_output_surface = false; - layer_tree_host_->SetOutputSurface( - make_scoped_ptr(new cc::PixelTestOutputSurface( - cc::TestContextProvider::Create(), flipped_output_surface))); + // TODO(crbug.com/540026): Fix crashes with real OutputSurface + // bool flipped_output_surface = false; + // layer_tree_host_->SetOutputSurface( + // make_scoped_ptr(new cc::PixelTestOutputSurface( + // cc::TestContextProvider::Create(), flipped_output_surface))); } void WebLayerTreeViewImplForTesting::DidFailToInitializeOutputSurface() {
diff --git a/ios/chrome/browser/autofill/form_suggestion_controller.h b/ios/chrome/browser/autofill/form_suggestion_controller.h index 6cf39c7..92d37821 100644 --- a/ios/chrome/browser/autofill/form_suggestion_controller.h +++ b/ios/chrome/browser/autofill/form_suggestion_controller.h
@@ -35,6 +35,13 @@ - (instancetype)initWithWebState:(web::WebState*)webState providers:(NSArray*)providers; +// Finds a FormSuggestionProvider that can supply suggestions for the specified +// form, requests them, and updates the view accordingly. +- (void)retrieveSuggestionsForFormNamed:(const std::string&)formName + fieldName:(const std::string&)fieldName + type:(const std::string&)type + webState:(web::WebState*)webState; + // Instructs the controller to detach itself from the WebState. - (void)detachFromWebState; @@ -55,6 +62,9 @@ // Overrides the web view proxy. - (void)setWebViewProxy:(id<CRWWebViewProxy>)webViewProxy; +// Invoked when an attempt to retrieve suggestions yields no results. +- (void)onNoSuggestionsAvailable; + @end #endif // IOS_CHROME_BROWSER_AUTOFILL_FORM_SUGGESTION_CONTROLLER_H_
diff --git a/ios/chrome/browser/autofill/form_suggestion_controller.mm b/ios/chrome/browser/autofill/form_suggestion_controller.mm index f245760..04ad0d1 100644 --- a/ios/chrome/browser/autofill/form_suggestion_controller.mm +++ b/ios/chrome/browser/autofill/form_suggestion_controller.mm
@@ -81,13 +81,6 @@ // Clears state in between page loads. - (void)resetSuggestionState; -// Finds a FormSuggestionProvider that can supply suggestions for the specified -// form, requests them, and updates the view accordingly. -- (void)retrieveSuggestionsForFormNamed:(const std::string&)formName - fieldName:(const std::string&)fieldName - type:(const std::string&)type - webState:(web::WebState*)webState; - @end @implementation FormSuggestionController { @@ -126,6 +119,9 @@ JsSuggestionManager:jsSuggestionManager]; } +- (void)onNoSuggestionsAvailable { +} + - (void)detachFromWebState { _webStateObserverBridge.reset(); } @@ -211,8 +207,10 @@ // Once a provider is found, use it to retrieve suggestions. passwords::PipelineCompletionBlock completion = ^(NSUInteger providerIndex) { - if (providerIndex == NSNotFound) + if (providerIndex == NSNotFound) { + [weakSelf onNoSuggestionsAvailable]; return; + } base::scoped_nsobject<FormSuggestionController> strongSelf( [weakSelf retain]); if (!strongSelf) @@ -229,7 +227,7 @@ // Run all the blocks in |findProviderBlocks| until one invokes its // completion with YES. The first one to do so will be passed to - // |onProviderFound|. + // |completion|. passwords::RunSearchPipeline(findProviderBlocks, completion); }
diff --git a/ipc/BUILD.gn b/ipc/BUILD.gn index 4327bd18..ae64490 100644 --- a/ipc/BUILD.gn +++ b/ipc/BUILD.gn
@@ -11,10 +11,14 @@ "attachment_broker_messages.h", "attachment_broker_privileged.cc", "attachment_broker_privileged.h", + "attachment_broker_privileged_mac.cc", + "attachment_broker_privileged_mac.h", "attachment_broker_privileged_win.cc", "attachment_broker_privileged_win.h", "attachment_broker_unprivileged.cc", "attachment_broker_unprivileged.h", + "attachment_broker_unprivileged_mac.cc", + "attachment_broker_unprivileged_mac.h", "attachment_broker_unprivileged_win.cc", "attachment_broker_unprivileged_win.h", "brokerable_attachment.cc", @@ -138,6 +142,7 @@ test("ipc_tests") { sources = [ + "attachment_broker_privileged_mac_unittest.cc", "attachment_broker_privileged_win_unittest.cc", "attachment_broker_unprivileged_win_unittest.cc", "ipc_channel_posix_unittest.cc",
diff --git a/ipc/attachment_broker.cc b/ipc/attachment_broker.cc index 2fc4fb4..dac046b 100644 --- a/ipc/attachment_broker.cc +++ b/ipc/attachment_broker.cc
@@ -14,8 +14,9 @@ // static void AttachmentBroker::SetGlobal(AttachmentBroker* broker) { - CHECK(!g_attachment_broker) - << "An attachment broker already exists with memory address: " << broker; + CHECK(!g_attachment_broker || !broker) + << "Global attachment broker address: " << broker + << ". New attachment broker address: " << broker; g_attachment_broker = broker; }
diff --git a/ipc/attachment_broker_messages.h b/ipc/attachment_broker_messages.h index f0e103d..a6301c2 100644 --- a/ipc/attachment_broker_messages.h +++ b/ipc/attachment_broker_messages.h
@@ -14,6 +14,10 @@ #include "ipc/handle_attachment_win.h" #endif // defined(OS_WIN) +#if defined(OS_MACOSX) +#include "ipc/mach_port_attachment_mac.h" +#endif // defined(OS_MACOSX) + // ---------------------------------------------------------------------------- // Serialization of structs. // ---------------------------------------------------------------------------- @@ -30,6 +34,14 @@ IPC_STRUCT_TRAITS_END() #endif // defined(OS_WIN) +#if defined(OS_MACOSX) +IPC_STRUCT_TRAITS_BEGIN(IPC::internal::MachPortAttachmentMac::WireFormat) + IPC_STRUCT_TRAITS_MEMBER(mach_port) + IPC_STRUCT_TRAITS_MEMBER(destination_process) + IPC_STRUCT_TRAITS_MEMBER(attachment_id) +IPC_STRUCT_TRAITS_END() +#endif // defined(OS_MACOSX) + #undef IPC_MESSAGE_EXPORT #define IPC_MESSAGE_EXPORT IPC_EXPORT #define IPC_MESSAGE_START AttachmentBrokerMsgStart @@ -47,6 +59,16 @@ IPC::internal::HandleAttachmentWin::WireFormat /* wire_format */) #endif // defined(OS_WIN) +#if defined(OS_MACOSX) +// Sent from a broker process to a non-broker process to indicate that an OSX +// Mach port has been duplicated. Contains all information necessary for the +// non-broker process to translate a BrokerAttachment::AttachmentId to a +// BrokerAttachment. +IPC_MESSAGE_CONTROL1( + AttachmentBrokerMsg_MachPortHasBeenDuplicated, + IPC::internal::MachPortAttachmentMac::WireFormat /* wire_format */) +#endif // defined(OS_MACOSX) + // ---------------------------------------------------------------------------- // Messages sent from a non-broker process to a broker process. // ---------------------------------------------------------------------------- @@ -59,3 +81,12 @@ AttachmentBrokerMsg_DuplicateWinHandle, IPC::internal::HandleAttachmentWin::WireFormat /* wire_format */) #endif // defined(OS_WIN) + +#if defined(OS_MACOSX) +// Sent from a non-broker process to a broker process to request the duplication +// of a Mach port into a different process (possibly the broker process, or even +// the original process). +IPC_MESSAGE_CONTROL1( + AttachmentBrokerMsg_DuplicateMachPort, + IPC::internal::MachPortAttachmentMac::WireFormat /* wire_format */) +#endif // defined(OS_MACOSX)
diff --git a/ipc/attachment_broker_privileged.cc b/ipc/attachment_broker_privileged.cc index 0f3ac48..d040392 100644 --- a/ipc/attachment_broker_privileged.cc +++ b/ipc/attachment_broker_privileged.cc
@@ -9,11 +9,30 @@ #include "base/metrics/histogram_macros.h" #include "ipc/ipc_endpoint.h" +#if defined(OS_WIN) +#include "ipc/attachment_broker_privileged_win.h" +#endif + namespace IPC { -AttachmentBrokerPrivileged::AttachmentBrokerPrivileged() {} +AttachmentBrokerPrivileged::AttachmentBrokerPrivileged() { + IPC::AttachmentBroker::SetGlobal(this); +} -AttachmentBrokerPrivileged::~AttachmentBrokerPrivileged() {} +AttachmentBrokerPrivileged::~AttachmentBrokerPrivileged() { + IPC::AttachmentBroker::SetGlobal(nullptr); +} + +// static +scoped_ptr<AttachmentBrokerPrivileged> +AttachmentBrokerPrivileged::CreateBroker() { +#if defined(OS_WIN) + return scoped_ptr<AttachmentBrokerPrivileged>( + new IPC::AttachmentBrokerPrivilegedWin); +#else + return nullptr; +#endif +} void AttachmentBrokerPrivileged::RegisterCommunicationChannel( Endpoint* endpoint) {
diff --git a/ipc/attachment_broker_privileged.h b/ipc/attachment_broker_privileged.h index 7b3975ac..0d5f4a87 100644 --- a/ipc/attachment_broker_privileged.h +++ b/ipc/attachment_broker_privileged.h
@@ -7,6 +7,7 @@ #include <vector> +#include "base/memory/scoped_ptr.h" #include "ipc/attachment_broker.h" #include "ipc/ipc_export.h" @@ -24,6 +25,14 @@ AttachmentBrokerPrivileged(); ~AttachmentBrokerPrivileged() override; + // On platforms that support attachment brokering, returns a new instance of + // a platform-specific attachment broker. Otherwise returns |nullptr|. + // The caller takes ownership of the newly created instance, and is + // responsible for ensuring that the attachment broker lives longer than + // every IPC::Channel. The new instance automatically registers itself as the + // global attachment broker. + static scoped_ptr<AttachmentBrokerPrivileged> CreateBroker(); + // Each unprivileged process should have one IPC channel on which it // communicates attachment information with the broker process. In the broker // process, these channels must be registered and deregistered with the
diff --git a/ipc/attachment_broker_privileged_mac.cc b/ipc/attachment_broker_privileged_mac.cc new file mode 100644 index 0000000..7399326 --- /dev/null +++ b/ipc/attachment_broker_privileged_mac.cc
@@ -0,0 +1,275 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "ipc/attachment_broker_privileged_mac.h" + +#include "base/mac/scoped_mach_port.h" +#include "base/memory/shared_memory.h" +#include "base/process/process.h" +#include "ipc/attachment_broker_messages.h" +#include "ipc/brokerable_attachment.h" +#include "ipc/ipc_channel.h" +#include "ipc/mach_port_attachment_mac.h" + +namespace { + +// Struct for sending a complex Mach message. +struct MachSendComplexMessage { + mach_msg_header_t header; + mach_msg_body_t body; + mach_msg_port_descriptor_t data; +}; + +// Sends a Mach port to |endpoint|. Assumes that |endpoint| is a send once +// right. Takes ownership of |endpoint|. +kern_return_t SendMachPort(mach_port_t endpoint, + mach_port_t port_to_send, + int disposition) { + MachSendComplexMessage send_msg; + send_msg.header.msgh_bits = + MACH_MSGH_BITS(MACH_MSG_TYPE_MOVE_SEND_ONCE, 0) | MACH_MSGH_BITS_COMPLEX; + send_msg.header.msgh_size = sizeof(send_msg); + send_msg.header.msgh_remote_port = endpoint; + send_msg.header.msgh_local_port = MACH_PORT_NULL; + send_msg.header.msgh_reserved = 0; + send_msg.header.msgh_id = 0; + send_msg.body.msgh_descriptor_count = 1; + send_msg.data.name = port_to_send; + send_msg.data.disposition = disposition; + send_msg.data.type = MACH_MSG_PORT_DESCRIPTOR; + + return mach_msg(&send_msg.header, + MACH_SEND_MSG | MACH_SEND_TIMEOUT, + send_msg.header.msgh_size, + 0, // receive limit + MACH_PORT_NULL, // receive name + 0, // timeout + MACH_PORT_NULL); // notification port +} + +} // namespace + +namespace IPC { + +AttachmentBrokerPrivilegedMac::AttachmentBrokerPrivilegedMac() + : port_provider_(nullptr) {} + +AttachmentBrokerPrivilegedMac::~AttachmentBrokerPrivilegedMac() {} + +void AttachmentBrokerPrivilegedMac::SetPortProvider( + base::PortProvider* port_provider) { + CHECK(!port_provider_); + port_provider_ = port_provider; +} + +bool AttachmentBrokerPrivilegedMac::SendAttachmentToProcess( + const BrokerableAttachment* attachment, + base::ProcessId destination_process) { + switch (attachment->GetBrokerableType()) { + case BrokerableAttachment::MACH_PORT: { + const internal::MachPortAttachmentMac* mach_port_attachment = + static_cast<const internal::MachPortAttachmentMac*>(attachment); + MachPortWireFormat wire_format = + mach_port_attachment->GetWireFormat(destination_process); + MachPortWireFormat new_wire_format = + DuplicateMachPort(wire_format, base::Process::Current().Pid()); + if (new_wire_format.mach_port == 0) + return false; + RouteDuplicatedMachPort(new_wire_format); + return true; + } + default: + NOTREACHED(); + return false; + } + return false; +} + +bool AttachmentBrokerPrivilegedMac::OnMessageReceived(const Message& msg) { + bool handled = true; + switch (msg.type()) { + IPC_MESSAGE_HANDLER_GENERIC(AttachmentBrokerMsg_DuplicateMachPort, + OnDuplicateMachPort(msg)) + IPC_MESSAGE_UNHANDLED(handled = false) + } + return handled; +} + +void AttachmentBrokerPrivilegedMac::OnDuplicateMachPort( + const IPC::Message& message) { + AttachmentBrokerMsg_DuplicateMachPort::Param param; + if (!AttachmentBrokerMsg_DuplicateMachPort::Read(&message, ¶m)) + return; + IPC::internal::MachPortAttachmentMac::WireFormat wire_format = + base::get<0>(param); + + if (wire_format.destination_process == base::kNullProcessId) { + LogError(NO_DESTINATION); + return; + } + + MachPortWireFormat new_wire_format = + DuplicateMachPort(wire_format, message.get_sender_pid()); + RouteDuplicatedMachPort(new_wire_format); +} + +void AttachmentBrokerPrivilegedMac::RouteDuplicatedMachPort( + const MachPortWireFormat& wire_format) { + // This process is the destination. + if (wire_format.destination_process == base::Process::Current().Pid()) { + scoped_refptr<BrokerableAttachment> attachment( + new internal::MachPortAttachmentMac(wire_format)); + HandleReceivedAttachment(attachment); + return; + } + + // Another process is the destination. + base::ProcessId dest = wire_format.destination_process; + Sender* sender = GetSenderWithProcessId(dest); + if (!sender) { + // Assuming that this message was not sent from a malicious process, the + // channel endpoint that would have received this message will block + // forever. + LOG(ERROR) << "Failed to deliver brokerable attachment to process with id: " + << dest; + LogError(DESTINATION_NOT_FOUND); + return; + } + + LogError(DESTINATION_FOUND); + sender->Send(new AttachmentBrokerMsg_MachPortHasBeenDuplicated(wire_format)); +} + +AttachmentBrokerPrivilegedMac::MachPortWireFormat +AttachmentBrokerPrivilegedMac::DuplicateMachPort( + const MachPortWireFormat& wire_format, + base::ProcessId source_pid) { + // If the source is the destination, just increment the ref count. + if (source_pid == wire_format.destination_process) { + mach_port_t task_port = + port_provider_->TaskForPid(wire_format.destination_process); + kern_return_t kr = mach_port_mod_refs(task_port, wire_format.mach_port, + MACH_PORT_RIGHT_SEND, 1); + if (kr != KERN_SUCCESS) { + // TODO(erikchen): UMA metric. + return CopyWireFormat(wire_format, MACH_PORT_NULL); + } + return wire_format; + } + + // Acquire a send right to the memory object. + base::mac::ScopedMachSendRight memory_object( + AcquireSendRight(source_pid, wire_format.mach_port)); + if (!memory_object) + return CopyWireFormat(wire_format, MACH_PORT_NULL); + + mach_port_t task_port = + port_provider_->TaskForPid(wire_format.destination_process); + mach_port_name_t inserted_memory_object = + InsertIndirectMachPort(task_port, memory_object); + return CopyWireFormat(wire_format, inserted_memory_object); +} + +mach_port_name_t AttachmentBrokerPrivilegedMac::InsertIndirectMachPort( + mach_port_t task_port, + mach_port_t port_to_insert) { + DCHECK_NE(mach_task_self(), task_port); + + // Make a port with receive rights in the destination task. + mach_port_name_t endpoint; + kern_return_t kr = + mach_port_allocate(task_port, MACH_PORT_RIGHT_RECEIVE, &endpoint); + if (kr != KERN_SUCCESS) { + // TODO(erikchen): UMA metric. + return MACH_PORT_NULL; + } + + // Change its message queue limit so that it accepts one message. + mach_port_limits limits = {}; + limits.mpl_qlimit = 1; + kr = mach_port_set_attributes(task_port, endpoint, MACH_PORT_LIMITS_INFO, + reinterpret_cast<mach_port_info_t>(&limits), + MACH_PORT_LIMITS_INFO_COUNT); + if (kr != KERN_SUCCESS) { + // TODO(erikchen): UMA metric. + mach_port_deallocate(task_port, endpoint); + return MACH_PORT_NULL; + } + + // Get a send right. + mach_port_t send_once_right; + mach_msg_type_name_t send_right_type; + kr = + mach_port_extract_right(task_port, endpoint, MACH_MSG_TYPE_MAKE_SEND_ONCE, + &send_once_right, &send_right_type); + if (kr != KERN_SUCCESS) { + // TODO(erikchen): UMA metric. + mach_port_deallocate(task_port, endpoint); + return MACH_PORT_NULL; + } + DCHECK_EQ(static_cast<mach_msg_type_name_t>(MACH_MSG_TYPE_PORT_SEND_ONCE), + send_right_type); + + // This call takes ownership of |send_once_right|. + kr = SendMachPort(send_once_right, port_to_insert, MACH_MSG_TYPE_COPY_SEND); + if (kr != KERN_SUCCESS) { + // TODO(erikchen): UMA metric. + mach_port_deallocate(task_port, endpoint); + return MACH_PORT_NULL; + } + + // Endpoint is intentionally leaked into the destination task. An IPC must be + // sent to the destination task so that it can clean up this port. + return endpoint; +} + +base::mac::ScopedMachSendRight AttachmentBrokerPrivilegedMac::AcquireSendRight( + base::ProcessId pid, + mach_port_name_t named_right) { + if (pid == base::GetCurrentProcId()) { + kern_return_t kr = mach_port_mod_refs(mach_task_self(), named_right, + MACH_PORT_RIGHT_SEND, 1); + if (kr != KERN_SUCCESS) + return base::mac::ScopedMachSendRight(MACH_PORT_NULL); + return base::mac::ScopedMachSendRight(named_right); + } + + mach_port_t task_port = port_provider_->TaskForPid(pid); + return ExtractNamedRight(task_port, named_right); +} + +base::mac::ScopedMachSendRight AttachmentBrokerPrivilegedMac::ExtractNamedRight( + mach_port_t task_port, + mach_port_name_t named_right) { + mach_port_t extracted_right = MACH_PORT_NULL; + mach_msg_type_name_t extracted_right_type; + kern_return_t kr = + mach_port_extract_right(task_port, named_right, MACH_MSG_TYPE_COPY_SEND, + &extracted_right, &extracted_right_type); + if (kr != KERN_SUCCESS) + return base::mac::ScopedMachSendRight(MACH_PORT_NULL); + + DCHECK_EQ(static_cast<mach_msg_type_name_t>(MACH_MSG_TYPE_PORT_SEND), + extracted_right_type); + + // Decrement the reference count of the send right from the source process. + kr = mach_port_mod_refs(task_port, named_right, MACH_PORT_RIGHT_SEND, -1); + if (kr != KERN_SUCCESS) { + // TODO(erikchen): UMA metric. + // Failure does not actually affect attachment brokering, so there's no need + // to return |MACH_PORT_NULL|. + } + + return base::mac::ScopedMachSendRight(extracted_right); +} + +AttachmentBrokerPrivilegedMac::MachPortWireFormat +AttachmentBrokerPrivilegedMac::CopyWireFormat( + const MachPortWireFormat& wire_format, + uint32_t mach_port) { + return MachPortWireFormat(mach_port, wire_format.destination_process, + wire_format.attachment_id); +} + +} // namespace IPC
diff --git a/ipc/attachment_broker_privileged_mac.h b/ipc/attachment_broker_privileged_mac.h new file mode 100644 index 0000000..24c8b0c --- /dev/null +++ b/ipc/attachment_broker_privileged_mac.h
@@ -0,0 +1,87 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef IPC_ATTACHMENT_BROKER_PRIVILEGED_MAC_H_ +#define IPC_ATTACHMENT_BROKER_PRIVILEGED_MAC_H_ + +#include <mach/mach.h> + +#include "base/gtest_prod_util.h" +#include "base/mac/scoped_mach_port.h" +#include "base/process/port_provider_mac.h" +#include "ipc/attachment_broker_privileged.h" +#include "ipc/ipc_export.h" +#include "ipc/mach_port_attachment_mac.h" + +namespace IPC { + +// This class is a concrete subclass of AttachmentBrokerPrivileged for the +// OSX platform. +class IPC_EXPORT AttachmentBrokerPrivilegedMac + : public AttachmentBrokerPrivileged { + public: + AttachmentBrokerPrivilegedMac(); + ~AttachmentBrokerPrivilegedMac() override; + + // The port provider must live as long as the AttachmentBrokerPrivilegedMac. A + // port provider must be set before any attachment brokering occurs. + void SetPortProvider(base::PortProvider* port_provider); + + // IPC::AttachmentBroker overrides. + bool SendAttachmentToProcess(const BrokerableAttachment* attachment, + base::ProcessId destination_process) override; + + // IPC::Listener overrides. + bool OnMessageReceived(const Message& message) override; + + private: + FRIEND_TEST_ALL_PREFIXES(AttachmentBrokerPrivilegedMacMultiProcessTest, + InsertRight); + FRIEND_TEST_ALL_PREFIXES(AttachmentBrokerPrivilegedMacMultiProcessTest, + InsertSameRightTwice); + FRIEND_TEST_ALL_PREFIXES(AttachmentBrokerPrivilegedMacMultiProcessTest, + InsertTwoRights); + using MachPortWireFormat = internal::MachPortAttachmentMac::WireFormat; + // IPC message handlers. + void OnDuplicateMachPort(const Message& message); + + // Duplicates the Mach port referenced from |wire_format| from + // |source_process| into |wire_format|'s destination process. + MachPortWireFormat DuplicateMachPort(const MachPortWireFormat& wire_format, + base::ProcessId source_process); + + // Returns the name of the inserted right of a port, which contains a queued + // message with |port_to_insert|. Returns |MACH_PORT_NULL| on failure. + // |task_port| must be for a different task, and |port_to_insert| is a port + // right in the current task. + mach_port_name_t InsertIndirectMachPort(mach_port_t task_port, + mach_port_t port_to_insert); + + // Acquire a send right to a named right in |pid|. + // Returns MACH_PORT_NULL on error. + base::mac::ScopedMachSendRight AcquireSendRight(base::ProcessId pid, + mach_port_name_t named_right); + + // Extracts a copy of the send right to |named_right| from |task_port|. + // Returns MACH_PORT_NULL on error. + base::mac::ScopedMachSendRight ExtractNamedRight( + mach_port_t task_port, + mach_port_name_t named_right); + + // Copies an existing |wire_format|, but substitutes in a different mach port. + MachPortWireFormat CopyWireFormat(const MachPortWireFormat& wire_format, + uint32_t mach_port); + + // If the mach port's destination is this process, queue it and notify the + // observers. Otherwise, send it in an IPC to its destination. + void RouteDuplicatedMachPort(const MachPortWireFormat& wire_format); + + base::PortProvider* port_provider_; + + DISALLOW_COPY_AND_ASSIGN(AttachmentBrokerPrivilegedMac); +}; + +} // namespace IPC + +#endif // IPC_ATTACHMENT_BROKER_PRIVILEGED_MAC_H_
diff --git a/ipc/attachment_broker_privileged_mac_unittest.cc b/ipc/attachment_broker_privileged_mac_unittest.cc new file mode 100644 index 0000000..3c5f3173 --- /dev/null +++ b/ipc/attachment_broker_privileged_mac_unittest.cc
@@ -0,0 +1,477 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "ipc/attachment_broker_privileged_mac.h" + +#include <mach/mach.h> +#include <mach/mach_vm.h> +#include <servers/bootstrap.h> + +#include <map> + +#include "base/command_line.h" +#include "base/mac/mach_logging.h" +#include "base/mac/scoped_mach_port.h" +#include "base/memory/shared_memory.h" +#include "base/process/port_provider_mac.h" +#include "base/process/process_handle.h" +#include "base/rand_util.h" +#include "base/strings/stringprintf.h" +#include "base/sys_info.h" +#include "base/test/multiprocess_test.h" +#include "base/test/test_timeouts.h" +#include "testing/multiprocess_func_list.h" + +namespace IPC { + +namespace { + +static const std::string g_service_switch_name = "service_name"; + +// Structs used to pass a mach port from client to server. +struct MachSendPortMessage { + mach_msg_header_t header; + mach_msg_body_t body; + mach_msg_port_descriptor_t data; +}; +struct MachReceivePortMessage : public MachSendPortMessage { + mach_msg_trailer_t trailer; +}; + +// Makes the current process into a Mach Server with the given |service_name|. +base::mac::ScopedMachSendRight BecomeMachServer(const char* service_name) { + mach_port_t port; + kern_return_t kr = bootstrap_check_in(bootstrap_port, service_name, &port); + MACH_CHECK(kr == KERN_SUCCESS, kr) << "BecomeMachServer"; + return base::mac::ScopedMachSendRight(port); +} + +// Returns the mach port for the Mach Server with the given |service_name|. +base::mac::ScopedMachSendRight LookupServer(const char* service_name) { + mach_port_t server_port; + kern_return_t kr = + bootstrap_look_up(bootstrap_port, service_name, &server_port); + MACH_CHECK(kr == KERN_SUCCESS, kr) << "LookupServer"; + return base::mac::ScopedMachSendRight(server_port); +} + +base::mac::ScopedMachReceiveRight MakeReceivingPort() { + mach_port_t client_port; + kern_return_t kr = + mach_port_allocate(mach_task_self(), // our task is acquiring + MACH_PORT_RIGHT_RECEIVE, // a new receive right + &client_port); // with this name + MACH_CHECK(kr == KERN_SUCCESS, kr) << "MakeReceivingPort"; + return base::mac::ScopedMachReceiveRight(client_port); +} + +// Blocks until a mach message is sent to |server_port|. This mach message +// must contain a mach port. Returns that mach port. +base::mac::ScopedMachSendRight ReceiveMachPort(mach_port_t port_to_listen_on) { + MachReceivePortMessage recv_msg; + mach_msg_header_t* recv_hdr = &recv_msg.header; + recv_hdr->msgh_local_port = port_to_listen_on; + recv_hdr->msgh_size = sizeof(recv_msg); + kern_return_t kr = + mach_msg(recv_hdr, MACH_RCV_MSG, 0, recv_hdr->msgh_size, + port_to_listen_on, MACH_MSG_TIMEOUT_NONE, MACH_PORT_NULL); + MACH_CHECK(kr == KERN_SUCCESS, kr) << "ReceiveMachPort"; + mach_port_t other_task_port = recv_msg.data.name; + return base::mac::ScopedMachSendRight(other_task_port); +} + +// Passes a copy of the send right of |port_to_send| to |receiving_port|. +void SendMachPort(mach_port_t receiving_port, + mach_port_t port_to_send, + int disposition) { + MachSendPortMessage send_msg; + send_msg.header.msgh_bits = + MACH_MSGH_BITS(MACH_MSG_TYPE_COPY_SEND, 0) | MACH_MSGH_BITS_COMPLEX; + send_msg.header.msgh_size = sizeof(send_msg); + send_msg.header.msgh_remote_port = receiving_port; + send_msg.header.msgh_local_port = MACH_PORT_NULL; + send_msg.header.msgh_reserved = 0; + send_msg.header.msgh_id = 0; + send_msg.body.msgh_descriptor_count = 1; + send_msg.data.name = port_to_send; + send_msg.data.disposition = disposition; + send_msg.data.type = MACH_MSG_PORT_DESCRIPTOR; + int kr = mach_msg(&send_msg.header, MACH_SEND_MSG, send_msg.header.msgh_size, + 0, MACH_PORT_NULL, MACH_MSG_TIMEOUT_NONE, MACH_PORT_NULL); + MACH_CHECK(kr == KERN_SUCCESS, kr) << "SendMachPort"; +} + +// Sends a uint32_t to a mach port. +void SendUInt32(mach_port_t port, uint32_t message) { + int message_size = sizeof(uint32_t); + int total_size = message_size + sizeof(mach_msg_header_t); + void* buffer = malloc(total_size); + mach_msg_header_t* header = (mach_msg_header_t*)buffer; + header->msgh_remote_port = port; + header->msgh_local_port = MACH_PORT_NULL; + header->msgh_bits = MACH_MSGH_BITS(MACH_MSG_TYPE_COPY_SEND, 0); + header->msgh_reserved = 0; + header->msgh_id = 0; + header->msgh_size = total_size; + memcpy(static_cast<char*>(buffer) + sizeof(mach_msg_header_t), &message, + message_size); + + kern_return_t kr; + kr = mach_msg(static_cast<mach_msg_header_t*>(buffer), MACH_SEND_MSG, + total_size, 0, MACH_PORT_NULL, MACH_MSG_TIMEOUT_NONE, + MACH_PORT_NULL); + MACH_CHECK(kr == KERN_SUCCESS, kr) << "SendUInt32"; + free(buffer); +} + +// Receives a uint32_t from a mach port. +uint32_t ReceiveUInt32(mach_port_t listening_port) { + int message_size = sizeof(uint32_t); + int total_size = + message_size + sizeof(mach_msg_header_t) + sizeof(mach_msg_trailer_t); + int options = MACH_RCV_MSG; + void* buffer = malloc(total_size); + + int kr = + mach_msg(static_cast<mach_msg_header_t*>(buffer), options, 0, total_size, + listening_port, MACH_MSG_TIMEOUT_NONE, MACH_PORT_NULL); + MACH_CHECK(kr == KERN_SUCCESS, kr) << "ReceiveUInt32"; + + uint32_t response; + memcpy(&response, static_cast<char*>(buffer) + sizeof(mach_msg_header_t), + message_size); + + free(buffer); + return response; +} + +std::string CreateRandomServiceName() { + return base::StringPrintf( + "AttachmentBrokerPrivilegedMacMultiProcessTest.%llu", base::RandUint64()); +} + +// The number of active names in the current task's port name space. +mach_msg_type_number_t GetActiveNameCount() { + mach_port_name_array_t name_array; + mach_msg_type_number_t names_count; + mach_port_type_array_t type_array; + mach_msg_type_number_t types_count; + kern_return_t kr = mach_port_names(mach_task_self(), &name_array, + &names_count, &type_array, &types_count); + MACH_CHECK(kr == KERN_SUCCESS, kr) << "GetActiveNameCount"; + return names_count; +} + +// Sets up the mach communication ports with the server. Returns a port to which +// the server will send mach objects. +// |original_name_count| is an output variable that describes the number of +// active names in this task before the task port is shared with the server. +base::mac::ScopedMachReceiveRight CommonChildProcessSetUp( + mach_msg_type_number_t* original_name_count) { + base::CommandLine cmd_line = *base::CommandLine::ForCurrentProcess(); + std::string service_name = + cmd_line.GetSwitchValueASCII(g_service_switch_name); + base::mac::ScopedMachSendRight server_port( + LookupServer(service_name.c_str())); + base::mac::ScopedMachReceiveRight client_port(MakeReceivingPort()); + + // |server_port| is a newly allocated right which will be deallocated once + // this method returns. + *original_name_count = GetActiveNameCount() - 1; + + // Send the port that this process is listening on to the server. + SendMachPort(server_port, client_port, MACH_MSG_TYPE_MAKE_SEND); + + // Send the task port for this process. + SendMachPort(server_port, mach_task_self(), MACH_MSG_TYPE_COPY_SEND); + return client_port; +} + +// Creates a new shared memory region populated with 'a'. +scoped_ptr<base::SharedMemory> CreateAndPopulateSharedMemoryHandle( + size_t size) { + base::SharedMemoryHandle shm(size); + scoped_ptr<base::SharedMemory> shared_memory( + new base::SharedMemory(shm, false)); + shared_memory->Map(size); + memset(shared_memory->memory(), 'a', size); + return shared_memory; +} + +// Create a shared memory region from a memory object. The returned object takes +// ownership of |memory_object|. +scoped_ptr<base::SharedMemory> MapMemoryObject(mach_port_t memory_object, + size_t size) { + base::SharedMemoryHandle shm(memory_object, size, base::GetCurrentProcId()); + scoped_ptr<base::SharedMemory> shared_memory( + new base::SharedMemory(shm, false)); + shared_memory->Map(size); + return shared_memory; +} + +} // namespace + +class AttachmentBrokerPrivilegedMacMultiProcessTest + : public base::MultiProcessTest { + public: + AttachmentBrokerPrivilegedMacMultiProcessTest() {} + + base::CommandLine MakeCmdLine(const std::string& procname) override { + base::CommandLine command_line = MultiProcessTest::MakeCmdLine(procname); + // Pass the service name to the child process. + command_line.AppendSwitchASCII(g_service_switch_name, service_name_); + return command_line; + } + + void SetUpChild(const std::string& name) { + // Make a random service name so that this test doesn't conflict with other + // similar tests. + service_name_ = CreateRandomServiceName(); + server_port_.reset(BecomeMachServer(service_name_.c_str()).release()); + child_process_ = SpawnChild(name); + client_port_.reset(ReceiveMachPort(server_port_).release()); + client_task_port_.reset(ReceiveMachPort(server_port_).release()); + } + + static const int s_memory_size = 99999; + + protected: + std::string service_name_; + + // A port on which the main process listens for mach messages from the child + // process. + base::mac::ScopedMachReceiveRight server_port_; + + // A port on which the child process listens for mach messages from the main + // process. + base::mac::ScopedMachSendRight client_port_; + + // Child process's task port. + base::mac::ScopedMachSendRight client_task_port_; + + base::Process child_process_; + DISALLOW_COPY_AND_ASSIGN(AttachmentBrokerPrivilegedMacMultiProcessTest); +}; + +// The attachment broker inserts a right for a memory object into the +// destination task. +TEST_F(AttachmentBrokerPrivilegedMacMultiProcessTest, InsertRight) { + SetUpChild("InsertRightClient"); + mach_msg_type_number_t original_name_count = GetActiveNameCount(); + IPC::AttachmentBrokerPrivilegedMac broker; + + // Create some shared memory. + scoped_ptr<base::SharedMemory> shared_memory = + CreateAndPopulateSharedMemoryHandle(s_memory_size); + ASSERT_TRUE(shared_memory->handle().IsValid()); + + // Insert it indirectly into the destination task. + mach_port_name_t inserted_memory_object = broker.InsertIndirectMachPort( + client_task_port_, shared_memory->handle().GetMemoryObject()); + EXPECT_NE(inserted_memory_object, + static_cast<mach_port_name_t>(MACH_PORT_NULL)); + SendUInt32(client_port_, inserted_memory_object); + + // Check that no names have been leaked. + shared_memory.reset(); + EXPECT_EQ(original_name_count, GetActiveNameCount()); + + int rv = -1; + ASSERT_TRUE(child_process_.WaitForExitWithTimeout( + TestTimeouts::action_timeout(), &rv)); + EXPECT_EQ(0, rv); +} + +MULTIPROCESS_TEST_MAIN(InsertRightClient) { + mach_msg_type_number_t original_name_count = 0; + base::mac::ScopedMachReceiveRight client_port( + CommonChildProcessSetUp(&original_name_count).release()); + base::mac::ScopedMachReceiveRight inserted_port(ReceiveUInt32(client_port)); + base::mac::ScopedMachSendRight memory_object(ReceiveMachPort(inserted_port)); + inserted_port.reset(); + + // The server should have inserted a right into this process. + EXPECT_EQ(original_name_count + 1, GetActiveNameCount()); + + // Map the memory object and check its contents. + scoped_ptr<base::SharedMemory> shared_memory(MapMemoryObject( + memory_object.release(), + AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size)); + const char* start = static_cast<const char*>(shared_memory->memory()); + for (int i = 0; + i < AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size; ++i) { + DCHECK_EQ(start[i], 'a'); + } + + // Check that no names have been leaked. + shared_memory.reset(); + EXPECT_EQ(original_name_count, GetActiveNameCount()); + + return 0; +} + +// The attachment broker inserts the right for a memory object into the +// destination task twice. +TEST_F(AttachmentBrokerPrivilegedMacMultiProcessTest, InsertSameRightTwice) { + SetUpChild("InsertSameRightTwiceClient"); + mach_msg_type_number_t original_name_count = GetActiveNameCount(); + IPC::AttachmentBrokerPrivilegedMac broker; + + // Create some shared memory. + scoped_ptr<base::SharedMemory> shared_memory = + CreateAndPopulateSharedMemoryHandle(s_memory_size); + ASSERT_TRUE(shared_memory->handle().IsValid()); + + // Insert it indirectly into the destination task, twice. + for (int i = 0; i < 2; ++i) { + mach_port_name_t inserted_memory_object = broker.InsertIndirectMachPort( + client_task_port_, shared_memory->handle().GetMemoryObject()); + EXPECT_NE(inserted_memory_object, + static_cast<mach_port_name_t>(MACH_PORT_NULL)); + SendUInt32(client_port_, inserted_memory_object); + } + + // Check that no names have been leaked. + shared_memory.reset(); + EXPECT_EQ(original_name_count, GetActiveNameCount()); + + int rv = -1; + ASSERT_TRUE(child_process_.WaitForExitWithTimeout( + TestTimeouts::action_timeout(), &rv)); + EXPECT_EQ(0, rv); +} + +MULTIPROCESS_TEST_MAIN(InsertSameRightTwiceClient) { + mach_msg_type_number_t original_name_count = 0; + base::mac::ScopedMachReceiveRight client_port( + CommonChildProcessSetUp(&original_name_count).release()); + + // Receive two memory objects. + base::mac::ScopedMachReceiveRight inserted_port(ReceiveUInt32(client_port)); + base::mac::ScopedMachReceiveRight inserted_port2(ReceiveUInt32(client_port)); + base::mac::ScopedMachSendRight memory_object(ReceiveMachPort(inserted_port)); + base::mac::ScopedMachSendRight memory_object2( + ReceiveMachPort(inserted_port2)); + inserted_port.reset(); + inserted_port2.reset(); + + // Both rights are for the same Mach port, so only one new name should appear. + EXPECT_EQ(original_name_count + 1, GetActiveNameCount()); + + // Map both memory objects and check their contents. + scoped_ptr<base::SharedMemory> shared_memory(MapMemoryObject( + memory_object.release(), + AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size)); + char* start = static_cast<char*>(shared_memory->memory()); + for (int i = 0; + i < AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size; ++i) { + DCHECK_EQ(start[i], 'a'); + } + + scoped_ptr<base::SharedMemory> shared_memory2(MapMemoryObject( + memory_object2.release(), + AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size)); + char* start2 = static_cast<char*>(shared_memory2->memory()); + for (int i = 0; + i < AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size; ++i) { + DCHECK_EQ(start2[i], 'a'); + } + + // Check that the contents of both regions are shared. + start[0] = 'b'; + DCHECK_EQ(start2[0], 'b'); + + // After releasing one shared memory region, the name count shouldn't change, + // since another reference exists. + shared_memory.reset(); + EXPECT_EQ(original_name_count + 1, GetActiveNameCount()); + + // After releasing the second shared memory region, the name count should be + // as if no names were ever inserted + shared_memory2.reset(); + EXPECT_EQ(original_name_count, GetActiveNameCount()); + + return 0; +} + +// The attachment broker inserts the rights for two memory objects into the +// destination task. +TEST_F(AttachmentBrokerPrivilegedMacMultiProcessTest, InsertTwoRights) { + SetUpChild("InsertTwoRightsClient"); + mach_msg_type_number_t original_name_count = GetActiveNameCount(); + IPC::AttachmentBrokerPrivilegedMac broker; + + for (int i = 0; i < 2; ++i) { + // Create some shared memory. + scoped_ptr<base::SharedMemory> shared_memory = + CreateAndPopulateSharedMemoryHandle(s_memory_size); + ASSERT_TRUE(shared_memory->handle().IsValid()); + + // Insert it indirectly into the destination task. + mach_port_name_t inserted_memory_object = broker.InsertIndirectMachPort( + client_task_port_, shared_memory->handle().GetMemoryObject()); + EXPECT_NE(inserted_memory_object, + static_cast<mach_port_name_t>(MACH_PORT_NULL)); + SendUInt32(client_port_, inserted_memory_object); + } + + // Check that no names have been leaked. + EXPECT_EQ(original_name_count, GetActiveNameCount()); + + int rv = -1; + ASSERT_TRUE(child_process_.WaitForExitWithTimeout( + TestTimeouts::action_timeout(), &rv)); + EXPECT_EQ(0, rv); +} + +MULTIPROCESS_TEST_MAIN(InsertTwoRightsClient) { + mach_msg_type_number_t original_name_count = 0; + base::mac::ScopedMachReceiveRight client_port( + CommonChildProcessSetUp(&original_name_count).release()); + + // Receive two memory objects. + base::mac::ScopedMachReceiveRight inserted_port(ReceiveUInt32(client_port)); + base::mac::ScopedMachReceiveRight inserted_port2(ReceiveUInt32(client_port)); + base::mac::ScopedMachSendRight memory_object(ReceiveMachPort(inserted_port)); + base::mac::ScopedMachSendRight memory_object2( + ReceiveMachPort(inserted_port2)); + inserted_port.reset(); + inserted_port2.reset(); + + // There should be two new names to reflect the two new shared memory regions. + EXPECT_EQ(original_name_count + 2, GetActiveNameCount()); + + // Map both memory objects and check their contents. + scoped_ptr<base::SharedMemory> shared_memory(MapMemoryObject( + memory_object.release(), + AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size)); + char* start = static_cast<char*>(shared_memory->memory()); + for (int i = 0; + i < AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size; ++i) { + DCHECK_EQ(start[i], 'a'); + } + + scoped_ptr<base::SharedMemory> shared_memory2(MapMemoryObject( + memory_object2.release(), + AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size)); + char* start2 = static_cast<char*>(shared_memory2->memory()); + for (int i = 0; + i < AttachmentBrokerPrivilegedMacMultiProcessTest::s_memory_size; ++i) { + DCHECK_EQ(start2[i], 'a'); + } + + // Check that the contents of both regions are not shared. + start[0] = 'b'; + DCHECK_EQ(start2[0], 'a'); + + // After releasing one shared memory region, the name count should decrement. + shared_memory.reset(); + EXPECT_EQ(original_name_count + 1, GetActiveNameCount()); + shared_memory2.reset(); + EXPECT_EQ(original_name_count, GetActiveNameCount()); + + return 0; +} + +} // namespace IPC
diff --git a/ipc/attachment_broker_privileged_win_unittest.cc b/ipc/attachment_broker_privileged_win_unittest.cc index 9c4a11952..25cadcb6 100644 --- a/ipc/attachment_broker_privileged_win_unittest.cc +++ b/ipc/attachment_broker_privileged_win_unittest.cc
@@ -220,7 +220,6 @@ // Takes ownership of |broker|. Has no effect if called after CommonSetUp(). void set_broker(IPC::AttachmentBrokerUnprivilegedWin* broker) { broker_.reset(broker); - IPC::AttachmentBroker::SetGlobal(broker); } void CommonSetUp() { @@ -427,7 +426,6 @@ // Set up IPC channel. IPC::AttachmentBrokerPrivilegedWin broker; - IPC::AttachmentBroker::SetGlobal(&broker); scoped_ptr<IPC::Channel> channel(IPC::Channel::CreateClient( IPCTestBase::GetChannelName(channel_name), &listener)); broker.RegisterCommunicationChannel(channel.get());
diff --git a/ipc/attachment_broker_unprivileged.cc b/ipc/attachment_broker_unprivileged.cc index ce4e8ab..75f61e4 100644 --- a/ipc/attachment_broker_unprivileged.cc +++ b/ipc/attachment_broker_unprivileged.cc
@@ -8,12 +8,31 @@ #include "ipc/ipc_channel.h" #include "ipc/ipc_endpoint.h" +#if defined(OS_WIN) +#include "ipc/attachment_broker_unprivileged_win.h" +#endif + namespace IPC { AttachmentBrokerUnprivileged::AttachmentBrokerUnprivileged() - : sender_(nullptr) {} + : sender_(nullptr) { + IPC::AttachmentBroker::SetGlobal(this); +} -AttachmentBrokerUnprivileged::~AttachmentBrokerUnprivileged() {} +AttachmentBrokerUnprivileged::~AttachmentBrokerUnprivileged() { + IPC::AttachmentBroker::SetGlobal(nullptr); +} + +// static +scoped_ptr<AttachmentBrokerUnprivileged> +AttachmentBrokerUnprivileged::CreateBroker() { +#if defined(OS_WIN) + return scoped_ptr<AttachmentBrokerUnprivileged>( + new IPC::AttachmentBrokerUnprivilegedWin); +#else + return nullptr; +#endif +} void AttachmentBrokerUnprivileged::DesignateBrokerCommunicationChannel( Endpoint* endpoint) {
diff --git a/ipc/attachment_broker_unprivileged.h b/ipc/attachment_broker_unprivileged.h index 4f847a6..26cc1bf 100644 --- a/ipc/attachment_broker_unprivileged.h +++ b/ipc/attachment_broker_unprivileged.h
@@ -5,6 +5,7 @@ #ifndef IPC_ATTACHMENT_BROKER_UNPRIVILEGED_H_ #define IPC_ATTACHMENT_BROKER_UNPRIVILEGED_H_ +#include "base/memory/scoped_ptr.h" #include "ipc/attachment_broker.h" #include "ipc/ipc_export.h" @@ -20,6 +21,14 @@ AttachmentBrokerUnprivileged(); ~AttachmentBrokerUnprivileged() override; + // On platforms that support attachment brokering, returns a new instance of + // a platform-specific attachment broker. Otherwise returns |nullptr|. + // The caller takes ownership of the newly created instance, and is + // responsible for ensuring that the attachment broker lives longer than + // every IPC::Channel. The new instance automatically registers itself as the + // global attachment broker. + static scoped_ptr<AttachmentBrokerUnprivileged> CreateBroker(); + // In each unprivileged process, exactly one channel should be used to // communicate brokerable attachments with the broker process. void DesignateBrokerCommunicationChannel(Endpoint* endpoint);
diff --git a/ipc/attachment_broker_unprivileged_mac.cc b/ipc/attachment_broker_unprivileged_mac.cc new file mode 100644 index 0000000..b4ced08 --- /dev/null +++ b/ipc/attachment_broker_unprivileged_mac.cc
@@ -0,0 +1,100 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "ipc/attachment_broker_unprivileged_mac.h" + +#include <mach/mach.h> + +#include "base/mac/scoped_mach_port.h" +#include "base/process/process.h" +#include "ipc/attachment_broker_messages.h" +#include "ipc/brokerable_attachment.h" +#include "ipc/ipc_sender.h" +#include "ipc/mach_port_attachment_mac.h" + +namespace { + +// Struct for receiving a complex message. +struct MachReceiveComplexMessage { + mach_msg_header_t header; + mach_msg_body_t body; + mach_msg_port_descriptor_t data; + mach_msg_trailer_t trailer; +}; + +// Receives a Mach port from |port_to_listen_on|, which should have exactly one +// queued message. Returns |MACH_PORT_NULL| on any error. +base::mac::ScopedMachSendRight ReceiveMachPort(mach_port_t port_to_listen_on) { + MachReceiveComplexMessage recv_msg; + mach_msg_header_t* recv_hdr = &recv_msg.header; + recv_hdr->msgh_local_port = port_to_listen_on; + recv_hdr->msgh_size = sizeof(recv_msg); + + kern_return_t kr = + mach_msg(recv_hdr, MACH_RCV_MSG | MACH_RCV_TIMEOUT, 0, + recv_hdr->msgh_size, port_to_listen_on, 0, MACH_PORT_NULL); + if (kr != KERN_SUCCESS) + return base::mac::ScopedMachSendRight(MACH_PORT_NULL); + if (recv_msg.header.msgh_id != 0) + return base::mac::ScopedMachSendRight(MACH_PORT_NULL); + return base::mac::ScopedMachSendRight(recv_msg.data.name); +} + +} // namespace + +namespace IPC { + +AttachmentBrokerUnprivilegedMac::AttachmentBrokerUnprivilegedMac() {} + +AttachmentBrokerUnprivilegedMac::~AttachmentBrokerUnprivilegedMac() {} + +bool AttachmentBrokerUnprivilegedMac::SendAttachmentToProcess( + const BrokerableAttachment* attachment, + base::ProcessId destination_process) { + switch (attachment->GetBrokerableType()) { + case BrokerableAttachment::MACH_PORT: { + const internal::MachPortAttachmentMac* mach_port_attachment = + static_cast<const internal::MachPortAttachmentMac*>(attachment); + internal::MachPortAttachmentMac::WireFormat format = + mach_port_attachment->GetWireFormat(destination_process); + return get_sender()->Send( + new AttachmentBrokerMsg_DuplicateMachPort(format)); + } + default: + NOTREACHED(); + return false; + } + return false; +} + +bool AttachmentBrokerUnprivilegedMac::OnMessageReceived(const Message& msg) { + bool handled = true; + IPC_BEGIN_MESSAGE_MAP(AttachmentBrokerUnprivilegedMac, msg) + IPC_MESSAGE_HANDLER(AttachmentBrokerMsg_MachPortHasBeenDuplicated, + OnMachPortHasBeenDuplicated) + IPC_MESSAGE_UNHANDLED(handled = false) + IPC_END_MESSAGE_MAP() + return handled; +} + +void AttachmentBrokerUnprivilegedMac::OnMachPortHasBeenDuplicated( + const IPC::internal::MachPortAttachmentMac::WireFormat& wire_format) { + // The IPC message was intended for a different process. Ignore it. + if (wire_format.destination_process != base::Process::Current().Pid()) { + // TODO(erikchen): UMA metric. + return; + } + + base::mac::ScopedMachReceiveRight message_port(wire_format.mach_port); + base::mac::ScopedMachSendRight memory_object(ReceiveMachPort(message_port)); + IPC::internal::MachPortAttachmentMac::WireFormat translated_wire_format( + memory_object.release(), wire_format.destination_process, + wire_format.attachment_id); + + scoped_refptr<BrokerableAttachment> attachment( + new IPC::internal::MachPortAttachmentMac(translated_wire_format)); + HandleReceivedAttachment(attachment); +} + +} // namespace IPC
diff --git a/ipc/attachment_broker_unprivileged_mac.h b/ipc/attachment_broker_unprivileged_mac.h new file mode 100644 index 0000000..58c4f87 --- /dev/null +++ b/ipc/attachment_broker_unprivileged_mac.h
@@ -0,0 +1,41 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef IPC_ATTACHMENT_BROKER_UNPRIVILEGED_MAC_H_ +#define IPC_ATTACHMENT_BROKER_UNPRIVILEGED_MAC_H_ + +#include "ipc/attachment_broker_unprivileged.h" +#include "ipc/ipc_export.h" +#include "ipc/mach_port_attachment_mac.h" + +namespace IPC { + +class BrokerableAttachment; + +// This class is an implementation of AttachmentBroker for the OSX platform +// for non-privileged processes. +class IPC_EXPORT AttachmentBrokerUnprivilegedMac + : public IPC::AttachmentBrokerUnprivileged { + public: + AttachmentBrokerUnprivilegedMac(); + ~AttachmentBrokerUnprivilegedMac() override; + + // IPC::AttachmentBroker overrides. + bool SendAttachmentToProcess(const BrokerableAttachment* attachment, + base::ProcessId destination_process) override; + + // IPC::Listener overrides. + bool OnMessageReceived(const Message& message) override; + + private: + // IPC message handlers. + void OnMachPortHasBeenDuplicated( + const IPC::internal::MachPortAttachmentMac::WireFormat& wire_format); + + DISALLOW_COPY_AND_ASSIGN(AttachmentBrokerUnprivilegedMac); +}; + +} // namespace IPC + +#endif // IPC_ATTACHMENT_BROKER_UNPRIVILEGED_MAC_H_
diff --git a/ipc/handle_win.cc b/ipc/handle_win.cc index 5d25905..51f3f95f 100644 --- a/ipc/handle_win.cc +++ b/ipc/handle_win.cc
@@ -48,7 +48,7 @@ // static void ParamTraits<HandleWin>::Log(const param_type& p, std::string* l) { - l->append(base::StringPrintf("0x%X", p.get_handle())); + l->append(base::StringPrintf("0x%p", p.get_handle())); l->append(base::IntToString(p.get_permissions())); }
diff --git a/ipc/ipc.gyp b/ipc/ipc.gyp index 7924ec62..ad178ab 100644 --- a/ipc/ipc.gyp +++ b/ipc/ipc.gyp
@@ -52,6 +52,7 @@ '..' ], 'sources': [ + 'attachment_broker_privileged_mac_unittest.cc', 'attachment_broker_privileged_win_unittest.cc', 'attachment_broker_unprivileged_win_unittest.cc', 'ipc_channel_posix_unittest.cc',
diff --git a/ipc/ipc.gypi b/ipc/ipc.gypi index 696b34a..44fcdab 100644 --- a/ipc/ipc.gypi +++ b/ipc/ipc.gypi
@@ -16,10 +16,14 @@ 'attachment_broker_messages.h', 'attachment_broker_privileged.cc', 'attachment_broker_privileged.h', + 'attachment_broker_privileged_mac.cc', + 'attachment_broker_privileged_mac.h', 'attachment_broker_privileged_win.cc', 'attachment_broker_privileged_win.h', 'attachment_broker_unprivileged.cc', 'attachment_broker_unprivileged.h', + 'attachment_broker_unprivileged_mac.cc', + 'attachment_broker_unprivileged_mac.h', 'attachment_broker_unprivileged_win.cc', 'attachment_broker_unprivileged_win.h', 'brokerable_attachment.cc',
diff --git a/ipc/ipc_channel.h b/ipc/ipc_channel.h index ce3a00af..142f3d59 100644 --- a/ipc/ipc_channel.h +++ b/ipc/ipc_channel.h
@@ -239,10 +239,10 @@ ~OutputElement(); size_t size() const { return message_ ? message_->size() : length_; } const void* data() const { return message_ ? message_->data() : buffer_; } - const Message* get_message() const { return message_.get(); } + Message* get_message() const { return message_.get(); } private: - scoped_ptr<const Message> message_; + scoped_ptr<Message> message_; void* buffer_; size_t length_; };
diff --git a/ipc/ipc_channel_posix.cc b/ipc/ipc_channel_posix.cc index 3f5771b..b9ab794 100644 --- a/ipc/ipc_channel_posix.cc +++ b/ipc/ipc_channel_posix.cc
@@ -21,6 +21,7 @@ #include <sys/un.h> #endif +#include <algorithm> #include <map> #include <string> @@ -383,22 +384,23 @@ } bool ChannelPosix::ProcessOutgoingMessages() { - DCHECK(!waiting_connect_); // Why are we trying to send messages if there's - // no connection? + if (waiting_connect_) + return true; + if (is_blocked_on_write_) + return true; if (output_queue_.empty()) return true; - if (!pipe_.is_valid()) return false; // Write out all the messages we can till the write blocks or there are no // more outgoing messages. while (!output_queue_.empty()) { - Message* msg = output_queue_.front(); + OutputElement* element = output_queue_.front(); - size_t amt_to_write = msg->size() - message_send_bytes_written_; + size_t amt_to_write = element->size() - message_send_bytes_written_; DCHECK_NE(0U, amt_to_write); - const char* out_bytes = reinterpret_cast<const char*>(msg->data()) + + const char* out_bytes = reinterpret_cast<const char*>(element->data()) + message_send_bytes_written_; struct msghdr msgh = {0}; @@ -411,7 +413,9 @@ ssize_t bytes_written = 1; int fd_written = -1; - if (message_send_bytes_written_ == 0 && !msg->attachment_set()->empty()) { + Message* msg = element->get_message(); + if (message_send_bytes_written_ == 0 && msg && + !msg->attachment_set()->empty()) { // This is the first chunk of a message which has descriptors to send struct cmsghdr *cmsg; const unsigned num_fds = msg->attachment_set()->size(); @@ -446,7 +450,7 @@ fd_written = pipe_.get(); bytes_written = HANDLE_EINTR(sendmsg(pipe_.get(), &msgh, MSG_DONTWAIT)); } - if (bytes_written > 0) + if (bytes_written > 0 && msg) CloseFileDescriptors(msg); if (bytes_written < 0 && !SocketWriteErrorIsRecoverable()) { @@ -468,7 +472,7 @@ PLOG(ERROR) << "pipe error on " << fd_written << " Currently writing message of size: " - << msg->size(); + << element->size(); return false; } @@ -491,8 +495,13 @@ message_send_bytes_written_ = 0; // Message sent OK! - DVLOG(2) << "sent message @" << msg << " on channel @" << this - << " with type " << msg->type() << " on fd " << pipe_.get(); + if (msg) { + DVLOG(2) << "sent message @" << msg << " on channel @" << this + << " with type " << msg->type() << " on fd " << pipe_.get(); + } else { + DVLOG(2) << "sent buffer @" << element->data() << " on channel @" + << this << " on fd " << pipe_.get(); + } delete output_queue_.front(); output_queue_.pop(); } @@ -506,20 +515,18 @@ << " with type " << message->type() << " (" << output_queue_.size() << " in queue)"; -#ifdef IPC_MESSAGE_LOG_ENABLED - Logging::GetInstance()->OnSendMessage(message, ""); -#endif // IPC_MESSAGE_LOG_ENABLED - - TRACE_EVENT_WITH_FLOW0(TRACE_DISABLED_BY_DEFAULT("ipc.flow"), - "ChannelPosix::Send", - message->flags(), - TRACE_EVENT_FLAG_FLOW_OUT); - output_queue_.push(message); - if (!is_blocked_on_write_ && !waiting_connect_) { - return ProcessOutgoingMessages(); + if (!prelim_queue_.empty()) { + prelim_queue_.push(message); + return true; } - return true; + if (message->HasBrokerableAttachments() && + peer_pid_ == base::kNullProcessId) { + prelim_queue_.push(message); + return true; + } + + return ProcessMessageForDelivery(message); } AttachmentBroker* ChannelPosix::GetAttachmentBroker() { @@ -570,10 +577,11 @@ ResetSafely(&pipe_); while (!output_queue_.empty()) { - Message* m = output_queue_.front(); + OutputElement* element = output_queue_.front(); output_queue_.pop(); - CloseFileDescriptors(m); - delete m; + if (element->get_message()) + CloseFileDescriptors(element->get_message()); + delete element; } // Close any outstanding, received file descriptors. @@ -668,10 +676,8 @@ // only send our handshake message after we've processed the client's. // This gives us a chance to kill the client if the incoming handshake // is invalid. This also flushes any closefd messages. - if (!is_blocked_on_write_) { - if (!ProcessOutgoingMessages()) { - ClosePipeOnError(); - } + if (!ProcessOutgoingMessages()) { + ClosePipeOnError(); } } @@ -684,6 +690,73 @@ } } +bool ChannelPosix::ProcessMessageForDelivery(Message* message) { + // Sending a brokerable attachment requires a call to Channel::Send(), so + // Send() may be re-entrant. Brokered attachments must be sent before the + // Message itself. + if (message->HasBrokerableAttachments()) { + DCHECK(GetAttachmentBroker()); + DCHECK(peer_pid_ != base::kNullProcessId); + for (const BrokerableAttachment* attachment : + message->attachment_set()->PeekBrokerableAttachments()) { + if (!GetAttachmentBroker()->SendAttachmentToProcess(attachment, + peer_pid_)) { + delete message; + return false; + } + } + } + +#ifdef IPC_MESSAGE_LOG_ENABLED + Logging::GetInstance()->OnSendMessage(message, ""); +#endif // IPC_MESSAGE_LOG_ENABLED + + TRACE_EVENT_WITH_FLOW0(TRACE_DISABLED_BY_DEFAULT("ipc.flow"), + "ChannelPosix::Send", + message->flags(), + TRACE_EVENT_FLAG_FLOW_OUT); + + // |output_queue_| takes ownership of |message|. + OutputElement* element = new OutputElement(message); + output_queue_.push(element); + + if (message->HasBrokerableAttachments()) { + // |output_queue_| takes ownership of |ids.buffer|. + Message::SerializedAttachmentIds ids = + message->SerializedIdsOfBrokerableAttachments(); + output_queue_.push(new OutputElement(ids.buffer, ids.size)); + } + + return ProcessOutgoingMessages(); +} + +bool ChannelPosix::FlushPrelimQueue() { + DCHECK_NE(peer_pid_, base::kNullProcessId); + + // Due to the possibly re-entrant nature of ProcessMessageForDelivery(), + // |prelim_queue_| should appear empty. + std::queue<Message*> prelim_queue; + std::swap(prelim_queue_, prelim_queue); + + bool processing_error = false; + while (!prelim_queue.empty()) { + Message* m = prelim_queue.front(); + processing_error = !ProcessMessageForDelivery(m); + prelim_queue.pop(); + if (processing_error) + break; + } + + // Delete any unprocessed messages. + while (!prelim_queue.empty()) { + Message* m = prelim_queue.front(); + delete m; + prelim_queue.pop(); + } + + return !processing_error; +} + bool ChannelPosix::AcceptConnection() { base::MessageLoopForIO::current()->WatchFileDescriptor( pipe_.get(), @@ -747,7 +820,8 @@ if (!msg->WriteInt(GetHelloMessageProcId())) { NOTREACHED() << "Unable to pickle hello message proc id"; } - output_queue_.push(msg.release()); + OutputElement* element = new OutputElement(msg.release()); + output_queue_.push(element); } ChannelPosix::ReadState ChannelPosix::ReadData( @@ -902,8 +976,9 @@ if (!msg->WriteInt(hops - 1) || !msg->WriteInt(fd)) { NOTREACHED() << "Unable to pickle close fd."; } - // Send(msg.release()); - output_queue_.push(msg.release()); + + OutputElement* element = new OutputElement(msg.release()); + output_queue_.push(element); break; } @@ -929,6 +1004,9 @@ peer_pid_ = pid; listener()->OnChannelConnected(pid); + + if (!FlushPrelimQueue()) + ClosePipeOnError(); break; #if defined(OS_MACOSX)
diff --git a/ipc/ipc_channel_posix.h b/ipc/ipc_channel_posix.h index be47705d..d5d3494 100644 --- a/ipc/ipc_channel_posix.h +++ b/ipc/ipc_channel_posix.h
@@ -67,6 +67,13 @@ private: bool CreatePipe(const IPC::ChannelHandle& channel_handle); + // Returns false on recoverable error. + // There are two reasons why this method might leave messages in the + // output_queue_. + // 1. |waiting_connect_| is |true|. + // 2. |is_blocked_on_write_| is |true|. + // If any of these conditionals change, this method should be called, as + // previously blocked messages may no longer be blocked. bool ProcessOutgoingMessages(); bool AcceptConnection(); @@ -100,6 +107,18 @@ void OnFileCanReadWithoutBlocking(int fd) override; void OnFileCanWriteWithoutBlocking(int fd) override; + // Returns |false| on channel error. + // If |message| has brokerable attachments, those attachments are passed to + // the AttachmentBroker (which in turn invokes Send()), so this method must + // be re-entrant. + // Adds |message| to |output_queue_| and calls ProcessOutgoingMessages(). + bool ProcessMessageForDelivery(Message* message); + + // Moves all messages from |prelim_queue_| to |output_queue_| by calling + // ProcessMessageForDelivery(). + // Returns |false| on channel error. + bool FlushPrelimQueue(); + Mode mode_; base::ProcessId peer_pid_; @@ -135,8 +154,18 @@ // the pipe. On POSIX it's used as a key in a local map of file descriptors. std::string pipe_name_; + // Messages not yet ready to be sent are queued here. Messages removed from + // this queue are placed in the output_queue_. The double queue is + // unfortunate, but is necessary because messages with brokerable attachments + // can generate multiple messages to be sent (possibly from other channels). + // Some of these generated messages cannot be sent until |peer_pid_| has been + // configured. + // As soon as |peer_pid| has been configured, there is no longer any need for + // |prelim_queue_|. All messages are flushed, and no new messages are added. + std::queue<Message*> prelim_queue_; + // Messages to be sent are queued here. - std::queue<Message*> output_queue_; + std::queue<OutputElement*> output_queue_; // We assume a worst case: kReadBufferSize bytes of messages, where each // message has no payload and a full complement of descriptors.
diff --git a/ipc/ipc_channel_win.cc b/ipc/ipc_channel_win.cc index e758d944..c04f0c6 100644 --- a/ipc/ipc_channel_win.cc +++ b/ipc/ipc_channel_win.cc
@@ -161,7 +161,17 @@ while (!prelim_queue.empty()) { Message* m = prelim_queue.front(); - ProcessMessageForDelivery(m); + bool success = ProcessMessageForDelivery(m); + prelim_queue.pop(); + + if (!success) + break; + } + + // Delete any unprocessed messages. + while (!prelim_queue.empty()) { + Message* m = prelim_queue.front(); + delete m; prelim_queue.pop(); } } @@ -254,9 +264,9 @@ // Validation completed. validate_client_ = false; - FlushPrelimQueue(); - listener()->OnChannelConnected(claimed_pid); + + FlushPrelimQueue(); } base::ProcessId ChannelWin::GetSenderPID() {
diff --git a/ipc/ipc_message_utils.cc b/ipc/ipc_message_utils.cc index 3baf2ee..8341e51 100644 --- a/ipc/ipc_message_utils.cc +++ b/ipc/ipc_message_utils.cc
@@ -966,7 +966,7 @@ } void ParamTraits<HANDLE>::Log(const param_type& p, std::string* l) { - l->append(base::StringPrintf("0x%X", p)); + l->append(base::StringPrintf("0x%p", p)); } void ParamTraits<LOGFONT>::Write(Message* m, const param_type& p) {
diff --git a/media/base/android/java/src/org/chromium/media/MediaPlayerListener.java b/media/base/android/java/src/org/chromium/media/MediaPlayerListener.java index 469e049..0cac2e0 100644 --- a/media/base/android/java/src/org/chromium/media/MediaPlayerListener.java +++ b/media/base/android/java/src/org/chromium/media/MediaPlayerListener.java
@@ -19,7 +19,7 @@ MediaPlayer.OnSeekCompleteListener, MediaPlayer.OnVideoSizeChangedListener, MediaPlayer.OnErrorListener { - // These values are mirrored as enums in media/base/android/media_player_bridge.h. + // These values are mirrored as enums in media/base/android/media_player_android.h. // Please ensure they stay in sync. private static final int MEDIA_ERROR_FORMAT = 0; private static final int MEDIA_ERROR_DECODE = 1; @@ -56,9 +56,6 @@ break; } break; - case MediaPlayer.MEDIA_ERROR_SERVER_DIED: - errorType = MEDIA_ERROR_DECODE; - break; case MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK: errorType = MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK; break;
diff --git a/media/base/android/media_codec_decoder_unittest.cc b/media/base/android/media_codec_decoder_unittest.cc index a667125..322407b 100644 --- a/media/base/android/media_codec_decoder_unittest.cc +++ b/media/base/android/media_codec_decoder_unittest.cc
@@ -302,8 +302,6 @@ base::Bind(&MediaCodecDecoderTest::OnUpdateCurrentTime, base::Unretained(this)), base::Bind(&MediaCodecDecoderTest::OnVideoSizeChanged, - base::Unretained(this)), - base::Bind(&MediaCodecDecoderTest::OnVideoCodecCreated, base::Unretained(this)))); data_available_cb_ = base::Bind(&MediaCodecDecoder::OnDemuxerDataAvailable,
diff --git a/media/base/android/media_codec_player.cc b/media/base/android/media_codec_player.cc index 2fc8c18..4c297f0 100644 --- a/media/base/android/media_codec_player.cc +++ b/media/base/android/media_codec_player.cc
@@ -36,12 +36,12 @@ MediaCodecPlayer::MediaCodecPlayer( int player_id, base::WeakPtr<MediaPlayerManager> manager, - const RequestMediaResourcesCB& request_media_resources_cb, + const OnDecoderResourcesReleasedCB& on_decoder_resources_released_cb, scoped_ptr<DemuxerAndroid> demuxer, const GURL& frame_url) : MediaPlayerAndroid(player_id, manager.get(), - request_media_resources_cb, + on_decoder_resources_released_cb, frame_url), ui_task_runner_(base::ThreadTaskRunnerHandle::Get()), demuxer_(demuxer.Pass()), @@ -58,8 +58,6 @@ DVLOG(1) << "MediaCodecPlayer::MediaCodecPlayer: player_id:" << player_id; - request_resources_cb_ = base::Bind(request_media_resources_cb_, player_id); - completion_cb_ = base::Bind(&MediaPlayerManager::OnPlaybackComplete, manager, player_id); waiting_for_decryption_key_cb_ = base::Bind( @@ -317,6 +315,15 @@ } void MediaCodecPlayer::Release() { + // TODO(qinmin): the callback should be posted onto the UI thread when + // Release() finishes on media thread. However, the BrowserMediaPlayerManager + // could be gone in that case, which cause the MediaThrottler unable to + // track the active players. We should pass + // MediaThrottler::OnDecodeRequestFinished() to this class in the ctor, but + // also need a way for BrowserMediaPlayerManager to track active players. + if (ui_task_runner_->BelongsToCurrentThread()) + on_decoder_resources_released_cb_.Run(player_id()); + RUN_ON_MEDIA_THREAD(Release); DVLOG(1) << __FUNCTION__; @@ -900,13 +907,6 @@ } } -void MediaCodecPlayer::OnVideoCodecCreated() { - DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread()); - - // This callback requests resources by releasing other players. - ui_task_runner_->PostTask(FROM_HERE, request_resources_cb_); -} - void MediaCodecPlayer::OnVideoResolutionChanged(const gfx::Size& size) { DCHECK(GetMediaTaskRunner()->BelongsToCurrentThread()); @@ -1380,8 +1380,8 @@ internal_error_cb_, base::Bind(&MediaCodecPlayer::OnTimeIntervalUpdate, media_weak_this_, DemuxerStream::VIDEO), - base::Bind(&MediaCodecPlayer::OnVideoResolutionChanged, media_weak_this_), - base::Bind(&MediaCodecPlayer::OnVideoCodecCreated, media_weak_this_))); + base::Bind(&MediaCodecPlayer::OnVideoResolutionChanged, + media_weak_this_))); } bool MediaCodecPlayer::AudioFinished() const {
diff --git a/media/base/android/media_codec_player.h b/media/base/android/media_codec_player.h index 5662055..376c2e4 100644 --- a/media/base/android/media_codec_player.h +++ b/media/base/android/media_codec_player.h
@@ -186,11 +186,12 @@ // Constructs a player with the given ID and demuxer. |manager| must outlive // the lifetime of this object. - MediaCodecPlayer(int player_id, - base::WeakPtr<MediaPlayerManager> manager, - const RequestMediaResourcesCB& request_media_resources_cb, - scoped_ptr<DemuxerAndroid> demuxer, - const GURL& frame_url); + MediaCodecPlayer( + int player_id, + base::WeakPtr<MediaPlayerManager> manager, + const OnDecoderResourcesReleasedCB& on_decoder_resources_released_cb, + scoped_ptr<DemuxerAndroid> demuxer, + const GURL& frame_url); ~MediaCodecPlayer() override; // A helper method that performs the media thread part of initialization. @@ -297,7 +298,6 @@ bool postpone); // Callbacks from video decoder - void OnVideoCodecCreated(); void OnVideoResolutionChanged(const gfx::Size& size); // Callbacks from CDM @@ -350,7 +350,6 @@ PlayerState state_; // Notification callbacks, they call MediaPlayerManager. - base::Closure request_resources_cb_; TimeUpdateCallback time_update_cb_; base::Closure completion_cb_; base::Closure waiting_for_decryption_key_cb_;
diff --git a/media/base/android/media_codec_video_decoder.cc b/media/base/android/media_codec_video_decoder.cc index 57fc153..f20bbeb 100644 --- a/media/base/android/media_codec_video_decoder.cc +++ b/media/base/android/media_codec_video_decoder.cc
@@ -27,8 +27,7 @@ const base::Closure& waiting_for_decryption_key_cb, const base::Closure& error_cb, const SetTimeCallback& update_current_time_cb, - const VideoSizeChangedCallback& video_size_changed_cb, - const base::Closure& codec_created_cb) + const VideoSizeChangedCallback& video_size_changed_cb) : MediaCodecDecoder("VideoDecoder", media_task_runner, frame_statistics, @@ -40,8 +39,7 @@ error_cb), is_protected_surface_required_(false), update_current_time_cb_(update_current_time_cb), - video_size_changed_cb_(video_size_changed_cb), - codec_created_cb_(codec_created_cb) { + video_size_changed_cb_(video_size_changed_cb) { } MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { @@ -187,8 +185,6 @@ DVLOG(0) << class_name() << "::" << __FUNCTION__ << " succeeded"; - media_task_runner_->PostTask(FROM_HERE, codec_created_cb_); - if (!codec_created_for_tests_cb_.is_null()) media_task_runner_->PostTask(FROM_HERE, codec_created_for_tests_cb_);
diff --git a/media/base/android/media_codec_video_decoder.h b/media/base/android/media_codec_video_decoder.h index d247271c..072a61cd 100644 --- a/media/base/android/media_codec_video_decoder.h +++ b/media/base/android/media_codec_video_decoder.h
@@ -36,8 +36,7 @@ const base::Closure& waiting_for_decryption_key_cb, const base::Closure& error_cb, const SetTimeCallback& update_current_time_cb, - const VideoSizeChangedCallback& video_size_changed_cb, - const base::Closure& codec_created_cb); + const VideoSizeChangedCallback& video_size_changed_cb); ~MediaCodecVideoDecoder() override; const char* class_name() const override; @@ -107,9 +106,6 @@ // Informs the callee that video size is changed. VideoSizeChangedCallback video_size_changed_cb_; - // Informs the callee that the MediaCodec is created. - base::Closure codec_created_cb_; - // Current video size to be sent with |video_size_changed_cb_|. gfx::Size video_size_;
diff --git a/media/base/android/media_player_android.cc b/media/base/android/media_player_android.cc index ed38d971..c08c259 100644 --- a/media/base/android/media_player_android.cc +++ b/media/base/android/media_player_android.cc
@@ -16,9 +16,9 @@ MediaPlayerAndroid::MediaPlayerAndroid( int player_id, MediaPlayerManager* manager, - const RequestMediaResourcesCB& request_media_resources_cb, + const OnDecoderResourcesReleasedCB& on_decoder_resources_released_cb, const GURL& frame_url) - : request_media_resources_cb_(request_media_resources_cb), + : on_decoder_resources_released_cb_(on_decoder_resources_released_cb), player_id_(player_id), manager_(manager), frame_url_(frame_url),
diff --git a/media/base/android/media_player_android.h b/media/base/android/media_player_android.h index 303f8a6..e24ad6c 100644 --- a/media/base/android/media_player_android.h +++ b/media/base/android/media_player_android.h
@@ -37,8 +37,8 @@ MEDIA_ERROR_INVALID_CODE, }; - // Callback when the player needs decoding resources. - typedef base::Callback<void(int player_id)> RequestMediaResourcesCB; + // Callback when the player releases decoding resources. + typedef base::Callback<void(int player_id)> OnDecoderResourcesReleasedCB; // Virtual destructor. // For most subclasses we can delete on the caller thread. @@ -70,10 +70,10 @@ virtual base::TimeDelta GetDuration() = 0; virtual base::TimeDelta GetCurrentTime() = 0; virtual bool IsPlaying() = 0; - virtual bool IsPlayerReady() = 0; virtual bool CanPause() = 0; virtual bool CanSeekForward() = 0; virtual bool CanSeekBackward() = 0; + virtual bool IsPlayerReady() = 0; virtual GURL GetUrl(); virtual GURL GetFirstPartyForCookies(); @@ -103,10 +103,11 @@ void DetachListener(); protected: - MediaPlayerAndroid(int player_id, - MediaPlayerManager* manager, - const RequestMediaResourcesCB& request_media_resources_cb, - const GURL& frame_url); + MediaPlayerAndroid( + int player_id, + MediaPlayerManager* manager, + const OnDecoderResourcesReleasedCB& on_decoder_resources_released_cb, + const GURL& frame_url); // TODO(qinmin): Simplify the MediaPlayerListener class to only listen to // media interrupt events. And have a separate child class to listen to all @@ -129,7 +130,7 @@ base::WeakPtr<MediaPlayerAndroid> WeakPtrForUIThread(); - RequestMediaResourcesCB request_media_resources_cb_; + OnDecoderResourcesReleasedCB on_decoder_resources_released_cb_; private: friend class MediaPlayerListener;
diff --git a/media/base/android/media_player_bridge.cc b/media/base/android/media_player_bridge.cc index 62d589d..e043a787 100644 --- a/media/base/android/media_player_bridge.cc +++ b/media/base/android/media_player_bridge.cc
@@ -28,12 +28,12 @@ const std::string& user_agent, bool hide_url_log, MediaPlayerManager* manager, - const RequestMediaResourcesCB& request_media_resources_cb, + const OnDecoderResourcesReleasedCB& on_decoder_resources_released_cb, const GURL& frame_url, bool allow_credentials) : MediaPlayerAndroid(player_id, manager, - request_media_resources_cb, + on_decoder_resources_released_cb, frame_url), prepared_(false), pending_play_(false), @@ -110,13 +110,14 @@ void MediaPlayerBridge::SetVideoSurface(gfx::ScopedJavaSurface surface) { if (j_media_player_bridge_.is_null()) { - if (surface.IsEmpty()) - return; - Prepare(); + if (!surface.IsEmpty()) + surface_ = surface.Pass(); + return; } JNIEnv* env = base::android::AttachCurrentThread(); CHECK(env); + Java_MediaPlayerBridge_setSurface( env, j_media_player_bridge_.obj(), surface.j_surface().obj()); } @@ -181,7 +182,6 @@ } } - request_media_resources_cb_.Run(player_id()); if (!Java_MediaPlayerBridge_prepareAsync(env, j_media_player_bridge_.obj())) OnMediaError(MEDIA_ERROR_FORMAT); } @@ -212,7 +212,6 @@ return; } - request_media_resources_cb_.Run(player_id()); if (!Java_MediaPlayerBridge_prepareAsync(env, j_media_player_bridge_.obj())) OnMediaError(MEDIA_ERROR_FORMAT); } @@ -240,6 +239,7 @@ void MediaPlayerBridge::ExtractMediaMetadata(const std::string& url) { if (url.empty()) { OnMediaError(MEDIA_ERROR_FORMAT); + on_decoder_resources_released_cb_.Run(player_id()); return; } @@ -268,6 +268,7 @@ } manager()->OnMediaMetadataChanged( player_id(), duration_, width_, height_, success); + on_decoder_resources_released_cb_.Run(player_id()); } void MediaPlayerBridge::Start() { @@ -325,9 +326,7 @@ pending_seek_ = timestamp; should_seek_on_prepare_ = true; - if (j_media_player_bridge_.is_null()) - Prepare(); - else if (prepared_) + if (prepared_) SeekInternal(GetCurrentTime(), timestamp); } @@ -351,6 +350,7 @@ } void MediaPlayerBridge::Release() { + on_decoder_resources_released_cb_.Run(player_id()); if (j_media_player_bridge_.is_null()) return; @@ -413,6 +413,9 @@ should_seek_on_prepare_ = false; } + if (!surface_.IsEmpty()) + SetVideoSurface(surface_.Pass()); + if (pending_play_) { StartInternal(); pending_play_ = false;
diff --git a/media/base/android/media_player_bridge.h b/media/base/android/media_player_bridge.h index f2f53e7..fbc54adf 100644 --- a/media/base/android/media_player_bridge.h +++ b/media/base/android/media_player_bridge.h
@@ -41,15 +41,16 @@ // |manager| to track unused resources and free them when needed. // MediaPlayerBridge also forwards Android MediaPlayer callbacks to // the |manager| when needed. - MediaPlayerBridge(int player_id, - const GURL& url, - const GURL& first_party_for_cookies, - const std::string& user_agent, - bool hide_url_log, - MediaPlayerManager* manager, - const RequestMediaResourcesCB& request_media_resources_cb, - const GURL& frame_url, - bool allow_credentials); + MediaPlayerBridge( + int player_id, + const GURL& url, + const GURL& first_party_for_cookies, + const std::string& user_agent, + bool hide_url_log, + MediaPlayerManager* manager, + const OnDecoderResourcesReleasedCB& on_decoder_resources_released_cb, + const GURL& frame_url, + bool allow_credentials); ~MediaPlayerBridge() override; // Initialize this object and extract the metadata from the media. @@ -174,6 +175,9 @@ // Cookies for |url_|. std::string cookies_; + // The surface object currently owned by the player. + gfx::ScopedJavaSurface surface_; + // Java MediaPlayerBridge instance. base::android::ScopedJavaGlobalRef<jobject> j_media_player_bridge_;
diff --git a/media/base/android/media_source_player.cc b/media/base/android/media_source_player.cc index 92f08cb..4674191 100644 --- a/media/base/android/media_source_player.cc +++ b/media/base/android/media_source_player.cc
@@ -26,12 +26,12 @@ MediaSourcePlayer::MediaSourcePlayer( int player_id, MediaPlayerManager* manager, - const RequestMediaResourcesCB& request_media_resources_cb, + const OnDecoderResourcesReleasedCB& on_decoder_resources_released_cb, scoped_ptr<DemuxerAndroid> demuxer, const GURL& frame_url) : MediaPlayerAndroid(player_id, manager, - request_media_resources_cb, + on_decoder_resources_released_cb, frame_url), demuxer_(demuxer.Pass()), pending_event_(NO_EVENT_PENDING), @@ -60,7 +60,6 @@ base::Bind(&DemuxerAndroid::RequestDemuxerData, base::Unretained(demuxer_.get()), DemuxerStream::VIDEO), - base::Bind(request_media_resources_cb_, player_id), base::Bind(&MediaSourcePlayer::OnDemuxerConfigsChanged, weak_factory_.GetWeakPtr()), &media_stat_->video_frame_stats())); @@ -193,6 +192,7 @@ decoder_starvation_callback_.Cancel(); DetachListener(); + on_decoder_resources_released_cb_.Run(player_id()); } void MediaSourcePlayer::SetVolume(double volume) { @@ -212,7 +212,7 @@ } bool MediaSourcePlayer::IsPlayerReady() { - return audio_decoder_job_ || video_decoder_job_; + return HasAudio() || HasVideo(); } void MediaSourcePlayer::StartInternal() {
diff --git a/media/base/android/media_source_player.h b/media/base/android/media_source_player.h index d7b6715..eb4e3fc 100644 --- a/media/base/android/media_source_player.h +++ b/media/base/android/media_source_player.h
@@ -39,11 +39,12 @@ public: // Constructs a player with the given ID and demuxer. |manager| must outlive // the lifetime of this object. - MediaSourcePlayer(int player_id, - MediaPlayerManager* manager, - const RequestMediaResourcesCB& request_media_resources_cb, - scoped_ptr<DemuxerAndroid> demuxer, - const GURL& frame_url); + MediaSourcePlayer( + int player_id, + MediaPlayerManager* manager, + const OnDecoderResourcesReleasedCB& on_decoder_resources_released_cb, + scoped_ptr<DemuxerAndroid> demuxer, + const GURL& frame_url); ~MediaSourcePlayer() override; // MediaPlayerAndroid implementation.
diff --git a/media/base/android/media_source_player_unittest.cc b/media/base/android/media_source_player_unittest.cc index 0e5194d..65b40ec 100644 --- a/media/base/android/media_source_player_unittest.cc +++ b/media/base/android/media_source_player_unittest.cc
@@ -45,7 +45,6 @@ explicit MockMediaPlayerManager(base::MessageLoop* message_loop) : message_loop_(message_loop), playback_completed_(false), - num_resources_requested_(0), num_metadata_changes_(0), timestamp_updated_(false), allow_play_(true) {} @@ -80,6 +79,7 @@ void OnWaitingForDecryptionKey(int player_id) override {} MediaPlayerAndroid* GetFullscreenPlayer() override { return NULL; } MediaPlayerAndroid* GetPlayer(int player_id) override { return NULL; } + void OnDecorderResourcesReleased(int player_id) {} bool RequestPlay(int player_id, base::TimeDelta duration) override { return allow_play_; @@ -89,18 +89,10 @@ return playback_completed_; } - int num_resources_requested() const { - return num_resources_requested_; - } - int num_metadata_changes() const { return num_metadata_changes_; } - void OnMediaResourcesRequested(int player_id) { - num_resources_requested_++; - } - bool timestamp_updated() const { return timestamp_updated_; } @@ -116,8 +108,6 @@ private: base::MessageLoop* message_loop_; bool playback_completed_; - // The number of resource requests this object has seen. - int num_resources_requested_; // The number of metadata changes reported by the player. int num_metadata_changes_; // Playback timestamp was updated. @@ -175,7 +165,7 @@ : manager_(&message_loop_), demuxer_(new MockDemuxerAndroid(&message_loop_)), player_(0, &manager_, - base::Bind(&MockMediaPlayerManager::OnMediaResourcesRequested, + base::Bind(&MockMediaPlayerManager::OnDecorderResourcesReleased, base::Unretained(&manager_)), scoped_ptr<DemuxerAndroid>(demuxer_), GURL()), @@ -1066,8 +1056,6 @@ // not be immediately released. CreateNextTextureAndSetVideoSurface(); StartVideoDecoderJob(); - // No resource is requested since there is no data to decode. - EXPECT_EQ(0, manager_.num_resources_requested()); ReleasePlayer(); player_.OnDemuxerDataAvailable(CreateReadFromDemuxerAckForVideo(false)); @@ -1077,7 +1065,6 @@ while (!GetMediaDecoderJob(false)->is_decoding()) message_loop_.RunUntilIdle(); EXPECT_EQ(0, demuxer_->num_browser_seek_requests()); - EXPECT_EQ(1, manager_.num_resources_requested()); ReleasePlayer(); // Wait for the media codec bridge to finish decoding and be reset. while (GetMediaDecoderJob(false)->is_decoding()) @@ -1939,10 +1926,6 @@ EXPECT_TRUE(GetMediaCodecBridge(false)); EXPECT_EQ(3, demuxer_->num_data_requests()); EXPECT_EQ(0, demuxer_->num_seek_requests()); - - // 2 codecs should have been created, one before the config change, and one - // after it. - EXPECT_EQ(2, manager_.num_resources_requested()); WaitForVideoDecodeDone(); } @@ -1958,9 +1941,6 @@ EXPECT_TRUE(GetMediaCodecBridge(false)); EXPECT_EQ(3, demuxer_->num_data_requests()); EXPECT_EQ(0, demuxer_->num_seek_requests()); - - // Only 1 codec should have been created so far. - EXPECT_EQ(1, manager_.num_resources_requested()); WaitForVideoDecodeDone(); }
diff --git a/media/base/android/video_decoder_job.cc b/media/base/android/video_decoder_job.cc index ebb64e1..4c2cc76 100644 --- a/media/base/android/video_decoder_job.cc +++ b/media/base/android/video_decoder_job.cc
@@ -27,7 +27,6 @@ VideoDecoderJob::VideoDecoderJob( const base::Closure& request_data_cb, - const base::Closure& request_resources_cb, const base::Closure& on_demuxer_config_changed_cb, FrameStatistics* frame_statistics) : MediaDecoderJob(g_video_decoder_thread.Pointer()->task_runner(), @@ -38,8 +37,7 @@ config_width_(0), config_height_(0), output_width_(0), - output_height_(0), - request_resources_cb_(request_resources_cb) { + output_height_(0) { } VideoDecoderJob::~VideoDecoderJob() {} @@ -143,7 +141,6 @@ if (!media_codec_bridge_) return STATUS_FAILURE; - request_resources_cb_.Run(); return STATUS_SUCCESS; }
diff --git a/media/base/android/video_decoder_job.h b/media/base/android/video_decoder_job.h index 7700ee4c..372fe1ab 100644 --- a/media/base/android/video_decoder_job.h +++ b/media/base/android/video_decoder_job.h
@@ -18,11 +18,9 @@ public: // Create a new VideoDecoderJob instance. // |request_data_cb| - Callback used to request more data for the decoder. - // |request_resources_cb| - Callback used to request resources. // |on_demuxer_config_changed_cb| - Callback used to inform the caller that // demuxer config has changed. VideoDecoderJob(const base::Closure& request_data_cb, - const base::Closure& request_resources_cb, const base::Closure& on_demuxer_config_changed_cb, FrameStatistics* frame_statistics); ~VideoDecoderJob() override; @@ -69,10 +67,6 @@ // The surface object currently owned by the player. gfx::ScopedJavaSurface surface_; - // Callbacks to inform the caller about decoder resources change. - base::Closure request_resources_cb_; - base::Closure release_resources_cb_; - DISALLOW_COPY_AND_ASSIGN(VideoDecoderJob); };
diff --git a/media/base/android/webaudio_media_codec_bridge.cc b/media/base/android/webaudio_media_codec_bridge.cc index 12861d67..312e9663 100644 --- a/media/base/android/webaudio_media_codec_bridge.cc +++ b/media/base/android/webaudio_media_codec_bridge.cc
@@ -30,10 +30,13 @@ void WebAudioMediaCodecBridge::RunWebAudioMediaCodec( base::SharedMemoryHandle encoded_audio_handle, base::FileDescriptor pcm_output, - uint32_t data_size) { - WebAudioMediaCodecBridge bridge(encoded_audio_handle, pcm_output, data_size); + uint32_t data_size, + base::Closure on_decode_finished_cb) { + WebAudioMediaCodecBridge bridge( + encoded_audio_handle, pcm_output, data_size); bridge.DecodeInMemoryAudioFile(); + on_decode_finished_cb.Run(); } WebAudioMediaCodecBridge::WebAudioMediaCodecBridge(
diff --git a/media/base/android/webaudio_media_codec_bridge.h b/media/base/android/webaudio_media_codec_bridge.h index fda6126..1585c4c 100644 --- a/media/base/android/webaudio_media_codec_bridge.h +++ b/media/base/android/webaudio_media_codec_bridge.h
@@ -7,6 +7,7 @@ #include <jni.h> +#include "base/callback.h" #include "base/file_descriptor_posix.h" #include "base/memory/shared_memory.h" #include "media/base/media_export.h" @@ -36,7 +37,8 @@ static void RunWebAudioMediaCodec( base::SharedMemoryHandle encoded_audio_handle, base::FileDescriptor pcm_output, - uint32_t data_size); + uint32_t data_size, + base::Closure on_decode_finished_cb); void OnChunkDecoded(JNIEnv* env, jobject /*java object*/,
diff --git a/mojo/fetcher/network_fetcher.cc b/mojo/fetcher/network_fetcher.cc index 603cf50d..c9a37f2 100644 --- a/mojo/fetcher/network_fetcher.cc +++ b/mojo/fetcher/network_fetcher.cc
@@ -90,8 +90,8 @@ base::FilePath map_path = temp_dir.AppendASCII(map_name); // TODO(eseidel): Paths or URLs with spaces will need quoting. - std::string map_entry = - base::StringPrintf("%s %s\n", path.value().c_str(), url.spec().c_str()); + std::string map_entry = base::StringPrintf( + "%" PRIsFP " %s\n", path.value().c_str(), url.spec().c_str()); // TODO(eseidel): AppendToFile is missing O_CREAT, crbug.com/450696 if (!PathExists(map_path)) { base::WriteFile(map_path, map_entry.data(),
diff --git a/net/BUILD.gn b/net/BUILD.gn index d2a4573..0075633 100644 --- a/net/BUILD.gn +++ b/net/BUILD.gn
@@ -11,13 +11,7 @@ import("//testing/test.gni") import("//third_party/icu/config.gni") import("//third_party/protobuf/proto_library.gni") - -# TODO(cjhopman): //build/config/android/rules.gni also imports grit_rule.gni. -# Currently, that file can't be imported multiple times. Make this always -# imported when http://crbug.com/393704 is fixed. -if (!is_android) { - import("//tools/grit/grit_rule.gni") -} +import("//tools/grit/grit_rule.gni") if (is_android) { import("//build/config/android/config.gni")
diff --git a/net/base/network_quality_estimator.cc b/net/base/network_quality_estimator.cc index d192bf0f..c3380958 100644 --- a/net/base/network_quality_estimator.cc +++ b/net/base/network_quality_estimator.cc
@@ -253,16 +253,21 @@ void NetworkQualityEstimator::AddDefaultEstimates() { DCHECK(thread_checker_.CalledOnValidThread()); if (default_observations_[current_network_id_.type].rtt() != InvalidRTT()) { - rtt_msec_observations_.AddObservation(Observation( + Observation rtt_observation( default_observations_[current_network_id_.type].rtt().InMilliseconds(), - base::TimeTicks::Now())); + base::TimeTicks::Now(), DEFAULT_FROM_PLATFORM); + rtt_msec_observations_.AddObservation(rtt_observation); + NotifyObserversOfRTT(rtt_observation); } if (default_observations_[current_network_id_.type] .downstream_throughput_kbps() != kInvalidThroughput) { + Observation throughput_observation( + default_observations_[current_network_id_.type] + .downstream_throughput_kbps(), + base::TimeTicks::Now(), DEFAULT_FROM_PLATFORM); downstream_throughput_kbps_observations_.AddObservation( - Observation(default_observations_[current_network_id_.type] - .downstream_throughput_kbps(), - base::TimeTicks::Now())); + throughput_observation); + NotifyObserversOfThroughput(throughput_observation); } } @@ -310,8 +315,10 @@ observed_rtt, peak_network_quality_.downstream_throughput_kbps()); } - rtt_msec_observations_.AddObservation( - Observation(observed_rtt.InMilliseconds(), now)); + Observation rtt_observation(observed_rtt.InMilliseconds(), now, + URL_REQUEST); + rtt_msec_observations_.AddObservation(rtt_observation); + NotifyObserversOfRTT(rtt_observation); // Compare the RTT observation with the estimated value and record it. if (estimated_median_network_quality_.rtt() != InvalidRTT()) { @@ -378,8 +385,33 @@ peak_network_quality_ = NetworkQuality(peak_network_quality_.rtt(), downstream_kbps_as_integer); + Observation throughput_observation(downstream_kbps_as_integer, now, + URL_REQUEST); downstream_throughput_kbps_observations_.AddObservation( - Observation(downstream_kbps_as_integer, now)); + throughput_observation); + NotifyObserversOfThroughput(throughput_observation); +} + +void NetworkQualityEstimator::AddRTTObserver(RTTObserver* rtt_observer) { + DCHECK(thread_checker_.CalledOnValidThread()); + rtt_observer_list_.AddObserver(rtt_observer); +} + +void NetworkQualityEstimator::RemoveRTTObserver(RTTObserver* rtt_observer) { + DCHECK(thread_checker_.CalledOnValidThread()); + rtt_observer_list_.RemoveObserver(rtt_observer); +} + +void NetworkQualityEstimator::AddThroughputObserver( + ThroughputObserver* throughput_observer) { + DCHECK(thread_checker_.CalledOnValidThread()); + throughput_observer_list_.AddObserver(throughput_observer); +} + +void NetworkQualityEstimator::RemoveThroughputObserver( + ThroughputObserver* throughput_observer) { + DCHECK(thread_checker_.CalledOnValidThread()); + throughput_observer_list_.RemoveObserver(throughput_observer); } void NetworkQualityEstimator::RecordRTTUMA(int32_t estimated_value_msec, @@ -605,8 +637,9 @@ } NetworkQualityEstimator::Observation::Observation(int32_t value, - base::TimeTicks timestamp) - : value(value), timestamp(timestamp) { + base::TimeTicks timestamp, + ObservationSource source) + : value(value), timestamp(timestamp), source(source) { DCHECK_GE(value, 0); DCHECK(!timestamp.is_null()); } @@ -821,10 +854,18 @@ DCHECK_NE(InvalidRTT(), network_quality.rtt()); DCHECK_NE(kInvalidThroughput, network_quality.downstream_throughput_kbps()); - downstream_throughput_kbps_observations_.AddObservation(Observation( - network_quality.downstream_throughput_kbps(), base::TimeTicks::Now())); - rtt_msec_observations_.AddObservation(Observation( - network_quality.rtt().InMilliseconds(), base::TimeTicks::Now())); + Observation througphput_observation( + network_quality.downstream_throughput_kbps(), base::TimeTicks::Now(), + CACHED_ESTIMATE); + downstream_throughput_kbps_observations_.AddObservation( + througphput_observation); + NotifyObserversOfThroughput(througphput_observation); + + Observation rtt_observation(network_quality.rtt().InMilliseconds(), + base::TimeTicks::Now(), CACHED_ESTIMATE); + rtt_msec_observations_.AddObservation(rtt_observation); + NotifyObserversOfRTT(rtt_observation); + return true; } @@ -865,15 +906,15 @@ EXTERNAL_ESTIMATE_PROVIDER_STATUS_QUERY_SUCCESSFUL); base::TimeDelta rtt; if (external_estimate_provider_->GetRTT(&rtt)) { - rtt_msec_observations_.AddObservation( - Observation(rtt.InMilliseconds(), base::TimeTicks::Now())); + rtt_msec_observations_.AddObservation(Observation( + rtt.InMilliseconds(), base::TimeTicks::Now(), EXTERNAL_ESTIMATE)); } int32_t downstream_throughput_kbps; if (external_estimate_provider_->GetDownstreamThroughputKbps( &downstream_throughput_kbps)) { - downstream_throughput_kbps_observations_.AddObservation( - Observation(downstream_throughput_kbps, base::TimeTicks::Now())); + downstream_throughput_kbps_observations_.AddObservation(Observation( + downstream_throughput_kbps, base::TimeTicks::Now(), EXTERNAL_ESTIMATE)); } } @@ -930,6 +971,21 @@ new SocketPerformanceWatcherUDP()); } +void NetworkQualityEstimator::NotifyObserversOfRTT( + const Observation& observation) { + FOR_EACH_OBSERVER(RTTObserver, rtt_observer_list_, + OnRTTObservation(observation.value, observation.timestamp, + observation.source)); +} + +void NetworkQualityEstimator::NotifyObserversOfThroughput( + const Observation& observation) { + FOR_EACH_OBSERVER( + ThroughputObserver, throughput_observer_list_, + OnThroughputObservation(observation.value, observation.timestamp, + observation.source)); +} + NetworkQualityEstimator::CachedNetworkQuality::CachedNetworkQuality( const NetworkQuality& network_quality) : last_update_time_(base::TimeTicks::Now()),
diff --git a/net/base/network_quality_estimator.h b/net/base/network_quality_estimator.h index f951bd4..d14c5cd 100644 --- a/net/base/network_quality_estimator.h +++ b/net/base/network_quality_estimator.h
@@ -13,7 +13,9 @@ #include "base/gtest_prod_util.h" #include "base/macros.h" +#include "base/memory/ref_counted.h" #include "base/memory/scoped_ptr.h" +#include "base/observer_list.h" #include "base/threading/thread_checker.h" #include "base/time/time.h" #include "net/base/external_estimate_provider.h" @@ -39,6 +41,64 @@ public ExternalEstimateProvider::UpdatedEstimateDelegate, public SocketPerformanceWatcherFactory { public: + // On Android, a Java counterpart will be generated for this enum. + // GENERATED_JAVA_ENUM_PACKAGE: org.chromium.net + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: NetworkQualityObservationSource + // GENERATED_JAVA_PREFIX_TO_STRIP: + enum ObservationSource { + // The observation was taken at the request layer, e.g., a round trip time + // is recorded as the time between the request being sent and the first byte + // being received. + URL_REQUEST, + // The observation is taken from TCP statistics maintained by the kernel. + TCP, + // The observation is taken at the QUIC layer. + QUIC, + // The observation is a previously cached estimate of the metric. + CACHED_ESTIMATE, + // The observation is derived from network connection information provided + // by the platform. For example, typical RTT and throughput values are used + // for a given type of network connection. + DEFAULT_FROM_PLATFORM, + // The observation came from a Chromium-external source. + EXTERNAL_ESTIMATE + }; + + // Observes measurements of round trip time. + class NET_EXPORT_PRIVATE RTTObserver { + public: + // Will be called when a new RTT observation is available. The round trip + // time is specified in milliseconds. The time when the observation was + // taken and the source of the observation are provided. + virtual void OnRTTObservation(int32_t rtt_ms, + const base::TimeTicks& timestamp, + ObservationSource source) = 0; + + protected: + RTTObserver() {} + virtual ~RTTObserver() {} + + private: + DISALLOW_COPY_AND_ASSIGN(RTTObserver); + }; + + // Observes measurements of throughput. + class NET_EXPORT_PRIVATE ThroughputObserver { + public: + // Will be called when a new throughput observation is available. + // Throughput is specified in kilobits per second. + virtual void OnThroughputObservation(int32_t throughput_kbps, + const base::TimeTicks& timestamp, + ObservationSource source) = 0; + + protected: + ThroughputObserver() {} + virtual ~ThroughputObserver() {} + + private: + DISALLOW_COPY_AND_ASSIGN(ThroughputObserver); + }; + // Creates a new NetworkQualityEstimator. // |variation_params| is the map containing all field trial parameters // related to NetworkQualityEstimator field trial. @@ -47,6 +107,25 @@ scoped_ptr<ExternalEstimateProvider> external_estimates_provider, const std::map<std::string, std::string>& variation_params); + // Construct a NetworkQualityEstimator instance allowing for test + // configuration. Registers for network type change notifications so estimates + // can be kept network specific. + // |external_estimates_provider| may be NULL. + // |variation_params| is the map containing all field trial parameters for the + // network quality estimator field trial. + // |allow_local_host_requests_for_tests| should only be true when testing + // against local HTTP server and allows the requests to local host to be + // used for network quality estimation. + // |allow_smaller_responses_for_tests| should only be true when testing. + // Allows the responses smaller than |kMinTransferSizeInBytes| or shorter than + // |kMinRequestDurationMicroseconds| to be used for network quality + // estimation. + NetworkQualityEstimator( + scoped_ptr<ExternalEstimateProvider> external_estimates_provider, + const std::map<std::string, std::string>& variation_params, + bool allow_local_host_requests_for_tests, + bool allow_smaller_responses_for_tests); + ~NetworkQualityEstimator() override; // Returns true if RTT is available and sets |rtt| to estimated RTT. @@ -85,6 +164,22 @@ scoped_ptr<SocketPerformanceWatcher> CreateUDPSocketPerformanceWatcher() const override; + // Adds |rtt_observer| to the list of round trip time observers. Must be + // called on the IO thread. + void AddRTTObserver(RTTObserver* rtt_observer); + + // Removes |rtt_observer| from the list of round trip time observers if it + // is on the list of observers. Must be called on the IO thread. + void RemoveRTTObserver(RTTObserver* rtt_observer); + + // Adds |throughput_observer| to the list of throughput observers. Must be + // called on the IO thread. + void AddThroughputObserver(ThroughputObserver* throughput_observer); + + // Removes |throughput_observer| from the list of throughput observers if it + // is on the list of observers. Must be called on the IO thread. + void RemoveThroughputObserver(ThroughputObserver* throughput_observer); + protected: // NetworkID is used to uniquely identify a network. // For the purpose of network quality estimation and caching, a network is @@ -123,25 +218,6 @@ std::string id; }; - // Construct a NetworkQualityEstimator instance allowing for test - // configuration. Registers for network type change notifications so estimates - // can be kept network specific. - // |external_estimates_provider| may be NULL. - // |variation_params| is the map containing all field trial parameters for the - // network quality estimator field trial. - // |allow_local_host_requests_for_tests| should only be true when testing - // against local HTTP server and allows the requests to local host to be - // used for network quality estimation. - // |allow_smaller_responses_for_tests| should only be true when testing. - // Allows the responses smaller than |kMinTransferSizeInBytes| or shorter than - // |kMinRequestDurationMicroseconds| to be used for network quality - // estimation. - NetworkQualityEstimator( - scoped_ptr<ExternalEstimateProvider> external_estimates_provider, - const std::map<std::string, std::string>& variation_params, - bool allow_local_host_requests_for_tests, - bool allow_smaller_responses_for_tests); - // Returns true if the cached network quality estimate was successfully read. bool ReadCachedNetworkQualityEstimate(); @@ -172,6 +248,7 @@ TestExternalEstimateProvider); FRIEND_TEST_ALL_PREFIXES(NetworkQualityEstimatorTest, TestExternalEstimateProviderMergeEstimates); + FRIEND_TEST_ALL_PREFIXES(NetworkQualityEstimatorTest, TestObservers); // NetworkQuality is used to cache the quality of a network connection. class NET_EXPORT_PRIVATE NetworkQuality { @@ -229,9 +306,15 @@ }; // Records the round trip time or throughput observation, along with the time - // the observation was made. + // the observation was made. The units of value are type specific. For round + // trip time observations, the value is in milliseconds. For throughput, + // the value is in kilobits per second. Observations can be made at several + // places in the network stack, thus the observation source is provided as + // well. struct NET_EXPORT_PRIVATE Observation { - Observation(int32_t value, base::TimeTicks timestamp); + Observation(int32_t value, + base::TimeTicks timestamp, + ObservationSource source); ~Observation(); // Value of the observation. @@ -239,6 +322,9 @@ // Time when the observation was taken. const base::TimeTicks timestamp; + + // The source of the observation. + const ObservationSource source; }; // Holds an observation and its weight. @@ -400,6 +486,10 @@ // Writes the estimated quality of the current network to the cache. void CacheNetworkQualityEstimate(); + void NotifyObserversOfRTT(const Observation& observation); + + void NotifyObserversOfThroughput(const Observation& observation); + // Records the UMA related to RTT. void RecordRTTUMA(int32_t estimated_value_msec, int32_t actual_value_msec) const; @@ -470,6 +560,10 @@ // system APIs. May be NULL. const scoped_ptr<ExternalEstimateProvider> external_estimate_provider_; + // Observer lists for round trip times and throughput measurements. + base::ObserverList<RTTObserver> rtt_observer_list_; + base::ObserverList<ThroughputObserver> throughput_observer_list_; + base::ThreadChecker thread_checker_; DISALLOW_COPY_AND_ASSIGN(NetworkQualityEstimator);
diff --git a/net/base/network_quality_estimator_unittest.cc b/net/base/network_quality_estimator_unittest.cc index 3f6854b..ba23d4a3 100644 --- a/net/base/network_quality_estimator_unittest.cc +++ b/net/base/network_quality_estimator_unittest.cc
@@ -8,6 +8,7 @@ #include <limits> #include <map> +#include <vector> #include "base/basictypes.h" #include "base/files/file_path.h" @@ -103,6 +104,59 @@ DISALLOW_COPY_AND_ASSIGN(TestNetworkQualityEstimator); }; +class TestRTTObserver : public net::NetworkQualityEstimator::RTTObserver { + public: + struct Observation { + Observation(int32_t ms, + const base::TimeTicks& ts, + net::NetworkQualityEstimator::ObservationSource src) + : rtt_ms(ms), timestamp(ts), source(src) {} + int32_t rtt_ms; + base::TimeTicks timestamp; + net::NetworkQualityEstimator::ObservationSource source; + }; + + std::vector<Observation>& observations() { return observations_; } + + // RttObserver implementation: + void OnRTTObservation( + int32_t rtt_ms, + const base::TimeTicks& timestamp, + net::NetworkQualityEstimator::ObservationSource source) override { + observations_.push_back(Observation(rtt_ms, timestamp, source)); + } + + private: + std::vector<Observation> observations_; +}; + +class TestThroughputObserver + : public net::NetworkQualityEstimator::ThroughputObserver { + public: + struct Observation { + Observation(int32_t kbps, + const base::TimeTicks& ts, + net::NetworkQualityEstimator::ObservationSource src) + : throughput_kbps(kbps), timestamp(ts), source(src) {} + int32_t throughput_kbps; + base::TimeTicks timestamp; + net::NetworkQualityEstimator::ObservationSource source; + }; + + std::vector<Observation>& observations() { return observations_; } + + // ThroughputObserver implementation: + void OnThroughputObservation( + int32_t throughput_kbps, + const base::TimeTicks& timestamp, + net::NetworkQualityEstimator::ObservationSource source) override { + observations_.push_back(Observation(throughput_kbps, timestamp, source)); + } + + private: + std::vector<Observation> observations_; +}; + } // namespace namespace net { @@ -250,18 +304,22 @@ // samples. This helps in verifying that the order of samples does not matter. for (int i = 1; i <= 99; i += 2) { estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(i, now)); + NetworkQualityEstimator::Observation( + i, now, NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(i, now)); + NetworkQualityEstimator::Observation( + i, now, NetworkQualityEstimator::URL_REQUEST)); EXPECT_TRUE(estimator.GetRTTEstimate(&rtt)); EXPECT_TRUE(estimator.GetDownlinkThroughputKbpsEstimate(&kbps)); } for (int i = 2; i <= 100; i += 2) { estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(i, now)); + NetworkQualityEstimator::Observation( + i, now, NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(i, now)); + NetworkQualityEstimator::Observation( + i, now, NetworkQualityEstimator::URL_REQUEST)); EXPECT_TRUE(estimator.GetRTTEstimate(&rtt)); EXPECT_TRUE(estimator.GetDownlinkThroughputKbpsEstimate(&kbps)); } @@ -302,17 +360,21 @@ // First 50 samples have very old timestamp. for (int i = 1; i <= 50; ++i) { estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(i, very_old)); + NetworkQualityEstimator::Observation( + i, very_old, NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(i, very_old)); + NetworkQualityEstimator::Observation( + i, very_old, NetworkQualityEstimator::URL_REQUEST)); } // Next 50 (i.e., from 51 to 100) have recent timestamp. for (int i = 51; i <= 100; ++i) { estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(i, now)); + NetworkQualityEstimator::Observation( + i, now, NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(i, now)); + NetworkQualityEstimator::Observation( + i, now, NetworkQualityEstimator::URL_REQUEST)); } // Older samples have very little weight. So, all percentiles are >= 51 @@ -513,9 +575,11 @@ // Cache entry will not be added for (NONE, ""). estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(1, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 1, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(1000, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 1000, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.SimulateNetworkChangeTo( NetworkChangeNotifier::ConnectionType::CONNECTION_2G, "test-1"); EXPECT_EQ(expected_cache_size, estimator.cached_network_qualities_.size()); @@ -524,9 +588,11 @@ // Also, set the network quality for (2G, "test1") so that it is stored in // the cache. estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(1, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 1, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(1000, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 1000, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.SimulateNetworkChangeTo( NetworkChangeNotifier::ConnectionType::CONNECTION_3G, "test-1"); @@ -537,9 +603,11 @@ // Also, set the network quality for (3G, "test1") so that it is stored in // the cache. estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(2, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 2, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(500, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 500, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.SimulateNetworkChangeTo( NetworkChangeNotifier::ConnectionType::CONNECTION_3G, "test-2"); ++expected_cache_size; @@ -603,9 +671,11 @@ base::TimeTicks update_time_of_network_100; for (size_t i = 0; i < network_count; ++i) { estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(2, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 2, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(500, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 500, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); if (i == 100) update_time_of_network_100 = base::TimeTicks::Now(); @@ -621,9 +691,11 @@ } // One more call so that the last network is also written to cache. estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(2, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 2, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(500, base::TimeTicks::Now())); + NetworkQualityEstimator::Observation( + 500, base::TimeTicks::Now(), NetworkQualityEstimator::URL_REQUEST)); estimator.SimulateNetworkChangeTo( net::NetworkChangeNotifier::ConnectionType::CONNECTION_WIFI, base::SizeTToString(network_count - 1)); @@ -649,14 +721,18 @@ // First sample has very old timestamp. estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(1, old)); + NetworkQualityEstimator::Observation( + 1, old, NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(1, old)); + NetworkQualityEstimator::Observation( + 1, old, NetworkQualityEstimator::URL_REQUEST)); estimator.downstream_throughput_kbps_observations_.AddObservation( - NetworkQualityEstimator::Observation(100, now)); + NetworkQualityEstimator::Observation( + 100, now, NetworkQualityEstimator::URL_REQUEST)); estimator.rtt_msec_observations_.AddObservation( - NetworkQualityEstimator::Observation(100, now)); + NetworkQualityEstimator::Observation( + 100, now, NetworkQualityEstimator::URL_REQUEST)); base::TimeDelta rtt; EXPECT_FALSE(estimator.GetRecentMedianRTT( @@ -934,4 +1010,54 @@ EXPECT_EQ(2U, estimator.downstream_throughput_kbps_observations_.Size()); } +TEST(NetworkQualityEstimatorTest, TestObservers) { + TestRTTObserver rtt_observer; + TestThroughputObserver throughput_observer; + std::map<std::string, std::string> variation_params; + TestNetworkQualityEstimator estimator(variation_params); + estimator.AddRTTObserver(&rtt_observer); + estimator.AddThroughputObserver(&throughput_observer); + + TestDelegate test_delegate; + TestURLRequestContext context(true); + context.set_network_quality_estimator(&estimator); + context.Init(); + + EXPECT_EQ(0U, rtt_observer.observations().size()); + EXPECT_EQ(0U, throughput_observer.observations().size()); + base::TimeTicks then = base::TimeTicks::Now(); + + scoped_ptr<URLRequest> request(context.CreateRequest( + estimator.GetEchoURL(), DEFAULT_PRIORITY, &test_delegate)); + request->SetLoadFlags(request->load_flags() | LOAD_MAIN_FRAME); + request->Start(); + base::RunLoop().Run(); + + scoped_ptr<URLRequest> request2(context.CreateRequest( + estimator.GetEchoURL(), DEFAULT_PRIORITY, &test_delegate)); + request2->SetLoadFlags(request->load_flags() | LOAD_MAIN_FRAME); + request2->Start(); + base::RunLoop().Run(); + + // Both RTT and downstream throughput should be updated. + EXPECT_NE(NetworkQualityEstimator::InvalidRTT(), + estimator.GetRTTEstimateInternal(base::TimeTicks(), 100)); + EXPECT_NE(NetworkQualityEstimator::kInvalidThroughput, + estimator.GetDownlinkThroughputKbpsEstimateInternal( + base::TimeTicks(), 100)); + + EXPECT_EQ(2U, rtt_observer.observations().size()); + EXPECT_EQ(2U, throughput_observer.observations().size()); + for (auto observation : rtt_observer.observations()) { + EXPECT_LE(0, observation.rtt_ms); + EXPECT_LE(0, (observation.timestamp - then).InMilliseconds()); + EXPECT_EQ(NetworkQualityEstimator::URL_REQUEST, observation.source); + } + for (auto observation : throughput_observer.observations()) { + EXPECT_LE(0, observation.throughput_kbps); + EXPECT_LE(0, (observation.timestamp - then).InMilliseconds()); + EXPECT_EQ(NetworkQualityEstimator::URL_REQUEST, observation.source); + } +} + } // namespace net
diff --git a/ppapi/BUILD.gn b/ppapi/BUILD.gn index 2a982027..6023bbf 100644 --- a/ppapi/BUILD.gn +++ b/ppapi/BUILD.gn
@@ -209,78 +209,140 @@ } } + if (current_cpu == "pnacl") { + action("translate_pexe_to_nexe") { + pexe = "${root_out_dir}/ppapi_nacl_tests.pexe" + nexe = "${root_out_dir}/ppapi_nacl_tests.nexe" + + script = "${nacl_toolchain_dir}/pnacl_newlib/bin/pydir/loader.py" + sources = [ + pexe, + ] + outputs = [ + nexe, + ] + + # TODO(phosek): remove the following once change 1360243003 is rolled + # into Chrome and use $target_cpu directly. + if (target_cpu == "x86") { + arch = "i686" + } else if (target_cpu == "x64") { + arch = "x86-64" + } else if (target_cpu == "arm") { + arch = "armv7" + } + + # The pre-translated object file has to be linked with an IRT shim to + # get a runnable nexe. This is handled by pnacl-translate, which passes + # -l:libpnacl_irt_shim.a to native linker, and we need to ensure the + # linker can find the correct library. + pnacl_irt_shim = "//ppapi/native_client/src/untrusted/pnacl_irt_shim:aot(//build/toolchain/nacl:clang_newlib_${target_cpu})" + + args = [ + "pnacl-translate", + rebase_path(pexe, root_build_dir), + "-o", + rebase_path(nexe, root_build_dir), + "-arch", + arch, + "-Wl,-L" + + rebase_path(get_label_info(pnacl_irt_shim, "target_out_dir")), + ] + deps = [ + ":ppapi_nacl_tests", + ] + data_deps = [ + pnacl_irt_shim, + ] + } + } + copy("nacl_tests_copy") { sources = [ "${root_out_dir}/ppapi_nacl_tests.nexe", ] - if (is_nacl_glibc) { - suffix = "glibc" - } else { - suffix = "newlib" - } # The CPU names used in tests/ppapi_nacl_tests_newlib.nmf # are the ones used in GYP (x32 for x86). if (target_cpu == "x86") { nmf_cpu = "x32" - } else { + } else if (current_cpu != "pnacl") { nmf_cpu = target_cpu } + if (is_nacl_glibc) { + suffix = "glibc_${nmf_cpu}" + } else if (current_cpu == "pnacl") { + suffix = "pnacl" + } else { + suffix = "newlib_${nmf_cpu}" + } outputs = [ - "${root_build_dir}/{{source_name_part}}_${suffix}_${nmf_cpu}.nexe", + "${root_build_dir}/{{source_name_part}}_${suffix}.nexe", ] - deps = [ - ":ppapi_nacl_tests", - ] + if (current_cpu == "pnacl") { + deps = [ + ":translate_pexe_to_nexe", + ] + } else { + deps = [ + ":ppapi_nacl_tests", + ] + } } action("generate_nmf") { - nacl_toolchain_dir = rebase_path("//native_client/toolchain") - os_toolchain_dir = "${nacl_toolchain_dir}/${host_os}_x86" if (is_nacl_glibc) { - toolchain_dir = "${os_toolchain_dir}/nacl_x86_glibc" + toolchain_dir = "${nacl_toolchain_dir}/nacl_x86_glibc" nmf = "${root_build_dir}/ppapi_nacl_tests_glibc.nmf" + } else if (current_cpu == "pnacl") { + toolchain_dir = "${nacl_toolchain_dir}/pnacl_newlib" + nmf = "${root_build_dir}/ppapi_nacl_tests_pnacl.nmf" } else { - toolchain_dir = "${os_toolchain_dir}/nacl_x86_newlib" + toolchain_dir = "${nacl_toolchain_dir}/nacl_x86_newlib" nmf = "${root_build_dir}/ppapi_nacl_tests_newlib.nmf" } + nexe = "${root_out_dir}/ppapi_nacl_tests.nexe" + script = "//native_client_sdk/src/tools/create_nmf.py" - sources = get_target_outputs(":nacl_tests_copy") + sources = [ + nexe, + ] outputs = [ nmf, ] + data = [ + nexe, + ] nmf_flags = [] if (is_nacl_glibc) { nmf_flags += [ "--library-path=" + rebase_path(root_out_dir) ] if (current_cpu == "x86") { nmf_flags += [ "--library-path=" + - rebase_path("${toolchain_dir}/x86_64-nacl/lib32", - root_build_dir) ] - data = [ - "$root_build_dir/lib32/", - ] + rebase_path("${toolchain_dir}/x86_64-nacl/lib32") ] + data += [ "$root_build_dir/lib32/" ] } if (target_cpu == "x64" || (target_cpu == "x86" && is_win)) { nmf_flags += [ "--library-path=" + - rebase_path("${toolchain_dir}/x86_64-nacl/lib", - root_build_dir) ] - data = [ - "$root_build_dir/lib64/", - ] + rebase_path("${toolchain_dir}/x86_64-nacl/lib") ] + data += [ "$root_build_dir/lib64/" ] } } args = [ "--no-default-libpath", - "--objdump=${toolchain_dir}/bin/x86_64-nacl-objdump", + "--objdump=" + + rebase_path("${toolchain_dir}/bin/x86_64-nacl-objdump"), "--output=" + rebase_path(nmf, root_build_dir), "--stage-dependencies=" + rebase_path(root_build_dir), ] + nmf_flags + rebase_path(sources, root_build_dir) - deps = [ - ":nacl_tests_copy", - ] - data_deps = [ - ":nacl_tests_copy", - ] + if (current_cpu == "pnacl") { + deps = [ + ":translate_pexe_to_nexe", + ] + } else { + deps = [ + ":ppapi_nacl_tests", + ] + } } } @@ -291,6 +353,7 @@ ":copy_test_files", ":nacl_tests_copy(//build/toolchain/nacl:clang_newlib_${target_cpu})", ":generate_nmf(//build/toolchain/nacl:glibc_${target_cpu})", + ":generate_nmf(//build/toolchain/nacl:newlib_pnacl)", ] } }
diff --git a/remoting/host/linux/linux_me2me_host.py b/remoting/host/linux/linux_me2me_host.py index 703c2c9..998ee57 100755 --- a/remoting/host/linux/linux_me2me_host.py +++ b/remoting/host/linux/linux_me2me_host.py
@@ -9,6 +9,8 @@ # This script is intended to run continuously as a background daemon # process, running under an ordinary (non-root) user account. +from __future__ import print_function + import atexit import errno import fcntl @@ -139,7 +141,7 @@ """ if not self.changed: return - old_umask = os.umask(0066) + old_umask = os.umask(0o066) try: settings_file = open(self.path, 'w') settings_file.write(json.dumps(self.data, indent=2)) @@ -262,7 +264,7 @@ "USER", "USERNAME", LOG_FILE_ENV_VAR]: - if os.environ.has_key(key): + if key in os.environ: self.child_env[key] = os.environ[key] # Ensure that the software-rendering GL drivers are loaded by the desktop @@ -313,7 +315,7 @@ try: if not os.path.exists(pulse_path): os.mkdir(pulse_path) - except IOError, e: + except IOError as e: logging.error("Failed to create pulseaudio pipe: " + str(e)) return False @@ -331,7 +333,7 @@ "rate=48000 channels=2 format=s16le\n") % (sink_name, pipe_name)) pulse_script.close() - except IOError, e: + except IOError as e: logging.error("Failed to write pulseaudio config: " + str(e)) return False @@ -350,7 +352,7 @@ def _launch_x_server(self, extra_x_args): x_auth_file = os.path.expanduser("~/.Xauthority") self.child_env["XAUTHORITY"] = x_auth_file - devnull = open(os.devnull, "rw") + devnull = open(os.devnull, "r+") display = self.get_unused_display_number() # Run "xauth add" with |child_env| so that it modifies the same XAUTHORITY @@ -518,7 +520,7 @@ raise Exception("Could not start Chrome Remote Desktop host") try: - self.host_proc.stdin.write(json.dumps(host_config.data)) + self.host_proc.stdin.write(json.dumps(host_config.data).encode('UTF-8')) self.host_proc.stdin.flush() except IOError as e: # This can occur in rare situations, for example, if the machine is @@ -671,7 +673,7 @@ old_flags = fcntl.fcntl(write_pipe, fcntl.F_GETFD) fcntl.fcntl(write_pipe, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC) self._read_file = os.fdopen(read_pipe, 'r') - self._write_file = os.fdopen(write_pipe, 'a') + self._write_file = os.fdopen(write_pipe, 'w') self._logging_handler = None ParentProcessLogger.__instance = self @@ -704,8 +706,8 @@ # This signal will cause the read loop below to stop with an EINTR IOError. def sigint_handler(signum, frame): _ = signum, frame - print >> sys.stderr, ("Interrupted. The daemon is still running in the " - "background.") + print("Interrupted. The daemon is still running in the background.", + file=sys.stderr) signal.signal(signal.SIGINT, sigint_handler) @@ -714,8 +716,8 @@ # This signal will cause the read loop below to stop with an EINTR IOError. def sigalrm_handler(signum, frame): _ = signum, frame - print >> sys.stderr, ("No response from daemon. It may have crashed, or " - "may still be running in the background.") + print("No response from daemon. It may have crashed, or may still be " + "running in the background.", file=sys.stderr) signal.signal(signal.SIGALRM, sigalrm_handler) signal.alarm(30) @@ -730,7 +732,7 @@ except IOError as e: if e.errno != errno.EINTR: raise - print >> sys.stderr, "Log file: %s" % os.environ[LOG_FILE_ENV_VAR] + print("Log file: %s" % os.environ[LOG_FILE_ENV_VAR], file=sys.stderr) @staticmethod def instance(): @@ -755,7 +757,7 @@ # The mode is provided, since Python otherwise sets a default mode of 0777, # which would result in the new file having permissions of 0777 & ~umask, # possibly leaving the executable bits set. - if not os.environ.has_key(LOG_FILE_ENV_VAR): + if not LOG_FILE_ENV_VAR in os.environ: log_file_prefix = "chrome_remote_desktop_%s_" % time.strftime( '%Y%m%d_%H%M%S', time.localtime(time.time())) log_file = tempfile.NamedTemporaryFile(prefix=log_file_prefix, delete=False) @@ -763,7 +765,7 @@ log_fd = log_file.file.fileno() else: log_fd = os.open(os.environ[LOG_FILE_ENV_VAR], - os.O_WRONLY | os.O_CREAT | os.O_APPEND, 0600) + os.O_WRONLY | os.O_CREAT | os.O_APPEND, 0o600) devnull_fd = os.open(os.devnull, os.O_RDONLY) @@ -963,7 +965,7 @@ else: pid_result, status = waitpid_with_timeout(pid, deadline) return (pid_result, status) - except OSError, e: + except OSError as e: if e.errno == errno.EINTR: continue elif e.errno == errno.ECHILD: @@ -1075,11 +1077,11 @@ if options.get_status: proc = get_daemon_proc() if proc is not None: - print "STARTED" + print("STARTED") elif is_supported_platform(): - print "STOPPED" + print("STOPPED") else: - print "NOT_IMPLEMENTED" + print("NOT_IMPLEMENTED") return 0 # TODO(sergeyu): Remove --check-running once NPAPI plugin and NM host are @@ -1091,14 +1093,14 @@ if options.stop: proc = get_daemon_proc() if proc is None: - print "The daemon is not currently running" + print("The daemon is not currently running") else: - print "Killing process %s" % proc.pid + print("Killing process %s" % proc.pid) proc.terminate() try: proc.wait(timeout=30) except psutil.TimeoutExpired: - print "Timed out trying to kill daemon process" + print("Timed out trying to kill daemon process") return 1 return 0 @@ -1167,7 +1169,7 @@ if not options.start: # If no modal command-line options specified, print an error and exit. - print >> sys.stderr, EPILOG + print(EPILOG, file=sys.stderr) return 1 # If a RANDR-supporting Xvfb is not available, limit the default size to @@ -1179,7 +1181,7 @@ # Collate the list of sizes that XRANDR should support. if not options.size: - if os.environ.has_key(DEFAULT_SIZES_ENV_VAR): + if DEFAULT_SIZES_ENV_VAR in os.environ: default_sizes = os.environ[DEFAULT_SIZES_ENV_VAR] options.size = default_sizes.split(",") @@ -1210,7 +1212,7 @@ try: host_config.load() except (IOError, ValueError) as e: - print >> sys.stderr, "Failed to load config: " + str(e) + print("Failed to load config: " + str(e), file=sys.stderr) return 1 # Register handler to re-load the configuration in response to signals. @@ -1232,7 +1234,7 @@ if proc is not None: # Debian policy requires that services should "start" cleanly and return 0 # if they are already running. - print "Service already running." + print("Service already running.") return 0 # Detach a separate "daemon" process to run the session, unless specifically
diff --git a/remoting/remoting_host_win.gypi b/remoting/remoting_host_win.gypi index 9c0b3e9..d4cdfd62 100644 --- a/remoting/remoting_host_win.gypi +++ b/remoting/remoting_host_win.gypi
@@ -104,6 +104,8 @@ '-Wno-incompatible-pointer-types', # Generated code contains unused variables. '-Wno-unused-variable', + # PROXYFILE_LIST_START is an extern with initializer. + '-Wno-extern-initializer', ], }, }, # end of target 'remoting_lib_ps'
diff --git a/sandbox/linux/sandbox_linux_nacl_nonsfi.gyp b/sandbox/linux/sandbox_linux_nacl_nonsfi.gyp index 0b40d1c..50e637c3 100644 --- a/sandbox/linux/sandbox_linux_nacl_nonsfi.gyp +++ b/sandbox/linux/sandbox_linux_nacl_nonsfi.gyp
@@ -23,7 +23,9 @@ 'build_irt': 0, 'build_pnacl_newlib': 0, 'build_nonsfi_helper': 1, - + 'compile_flags': [ + '-fgnu-inline-asm', + ], 'sources': [ # This is the subset of linux build target, needed for # nacl_helper_nonsfi's sandbox implementation.
diff --git a/sync/BUILD.gn b/sync/BUILD.gn index 9e449bd..5dbcc46 100644 --- a/sync/BUILD.gn +++ b/sync/BUILD.gn
@@ -473,8 +473,6 @@ "test/engine/fake_model_worker.h", "test/engine/fake_sync_scheduler.cc", "test/engine/fake_sync_scheduler.h", - "test/engine/injectable_sync_context_proxy.cc", - "test/engine/injectable_sync_context_proxy.h", "test/engine/mock_commit_queue.cc", "test/engine/mock_commit_queue.h", "test/engine/mock_connection_manager.cc",
diff --git a/sync/engine/model_type_processor_impl.cc b/sync/engine/model_type_processor_impl.cc index 94e5c631..2b02906 100644 --- a/sync/engine/model_type_processor_impl.cc +++ b/sync/engine/model_type_processor_impl.cc
@@ -10,7 +10,6 @@ #include "sync/engine/commit_queue.h" #include "sync/engine/model_type_entity.h" #include "sync/internal_api/public/activation_context.h" -#include "sync/internal_api/public/sync_context_proxy.h" #include "sync/syncable/syncable_util.h" namespace syncer_v2 { @@ -19,7 +18,7 @@ syncer::ModelType type, base::WeakPtr<ModelTypeStore> store) : type_(type), - is_preferred_(false), + is_enabled_(false), is_connected_(false), store_(store), weak_ptr_factory_for_ui_(this), @@ -28,29 +27,13 @@ ModelTypeProcessorImpl::~ModelTypeProcessorImpl() { } -bool ModelTypeProcessorImpl::IsPreferred() const { +void ModelTypeProcessorImpl::Start(StartCallback callback) { DCHECK(CalledOnValidThread()); - return is_preferred_; -} + DVLOG(1) << "Starting " << ModelTypeToString(type_); -bool ModelTypeProcessorImpl::IsConnected() const { - DCHECK(CalledOnValidThread()); - return is_connected_; -} + is_enabled_ = true; -syncer::ModelType ModelTypeProcessorImpl::GetModelType() const { - DCHECK(CalledOnValidThread()); - return type_; -} - -void ModelTypeProcessorImpl::Enable( - scoped_ptr<SyncContextProxy> sync_context_proxy) { - DCHECK(CalledOnValidThread()); - DVLOG(1) << "Asked to enable " << ModelTypeToString(type_); - - is_preferred_ = true; - - // TODO(rlarocque): At some point, this should be loaded from storage. + // TODO: At some point, this should be loaded from storage. data_type_state_.progress_marker.set_data_type_id( GetSpecificsFieldNumberFromModelType(type_)); @@ -61,29 +44,32 @@ activation_context->type_task_runner = base::ThreadTaskRunnerHandle::Get(); activation_context->type_processor = weak_ptr_factory_for_sync_.GetWeakPtr(); - sync_context_proxy_ = sync_context_proxy.Pass(); - sync_context_proxy_->ConnectTypeToSync(GetModelType(), - activation_context.Pass()); + callback.Run(/*syncer::SyncError(), */ activation_context.Pass()); } +bool ModelTypeProcessorImpl::IsEnabled() const { + DCHECK(CalledOnValidThread()); + return is_enabled_; +} + +bool ModelTypeProcessorImpl::IsConnected() const { + DCHECK(CalledOnValidThread()); + return is_connected_; +} + +// TODO(stanisc): crbug.com/537027: This needs to be called from +// DataTypeController when the type is disabled void ModelTypeProcessorImpl::Disable() { DCHECK(CalledOnValidThread()); - is_preferred_ = false; - Disconnect(); - + is_enabled_ = false; + Stop(); ClearSyncState(); } -void ModelTypeProcessorImpl::Disconnect() { +void ModelTypeProcessorImpl::Stop() { DCHECK(CalledOnValidThread()); - DVLOG(1) << "Asked to disconnect " << ModelTypeToString(type_); + DVLOG(1) << "Stopping " << ModelTypeToString(type_); is_connected_ = false; - - if (sync_context_proxy_) { - sync_context_proxy_->Disconnect(GetModelType()); - sync_context_proxy_.reset(); - } - weak_ptr_factory_for_sync_.InvalidateWeakPtrs(); worker_.reset();
diff --git a/sync/engine/model_type_processor_impl.h b/sync/engine/model_type_processor_impl.h index 04a3141..80dc15df 100644 --- a/sync/engine/model_type_processor_impl.h +++ b/sync/engine/model_type_processor_impl.h
@@ -16,10 +16,10 @@ #include "sync/protocol/sync.pb.h" namespace syncer_v2 { +struct ActivationContext; class CommitQueue; class ModelTypeEntity; class ModelTypeStore; -class SyncContextProxy; // A sync component embedded on the synced type's thread that helps to handle // communication between sync and model type threads. @@ -30,38 +30,38 @@ base::WeakPtr<ModelTypeStore> store); ~ModelTypeProcessorImpl() override; - // Returns true if this object believes that sync is preferred for this type. - // - // By "preferred", we mean that a policy decision has been made that this - // type should be synced. Most of the time this is controlled by a user - // clicking a checkbox on the settings page. - // - // The canonical preferred state is based on SyncPrefs on the UI thread. At - // best, this value is stale and may lag behind the one set on the UI thread. - // Before this type has registered with the UI thread, it's mostly just an - // informed guess. - bool IsPreferred() const; + typedef base::Callback<void( + /*syncer::SyncError,*/ scoped_ptr<ActivationContext>)> StartCallback; - // Returns true if the handshake with sync thread is complete. - bool IsConnected() const; + // Called by DataTypeController to begins asynchronous operation of preparing + // the model to sync. Once the model is ready to be activated with Sync the + // callback will be invoked with the activation context. If the model is + // already ready it is safe to call the callback right away. Otherwise the + // callback needs to be stored and called when the model is ready. + void Start(StartCallback callback); - // Returns the model type handled by this type sync proxy. - syncer::ModelType GetModelType() const; + // Called by DataTypeController to inform the model that the sync is + // stopping for the model type. + void Stop(); - // Starts the handshake with the sync thread. - void Enable(scoped_ptr<SyncContextProxy> context_proxy); + // Returns true if the datatype is enabled. + // TODO(stanisc): crbug.com/537027: There is no explicit call to indicate + // that the datatype is enabled. The flag is set to true when Start is called + // and reset to false when Disable is called. + bool IsEnabled() const; + // TODO(stanisc): crbug.com/537027: This needs to be called from + // DataTypeController when the type is disabled // Severs all ties to the sync thread and may delete local sync state. // Another call to Enable() can be used to re-establish this connection. void Disable(); - // Severs all ties to the sync thread. - // Another call to Enable() can be used to re-establish this connection. - void Disconnect(); - - // Callback used to process the handshake response. + // Callback used to process the handshake response from the sync thread. void OnConnect(scoped_ptr<CommitQueue> worker) override; + // Returns true if the handshake with sync thread is complete. + bool IsConnected() const; + // Requests that an item be stored in sync. void Put(const std::string& client_tag, const sync_pb::EntitySpecifics& specifics); @@ -113,21 +113,13 @@ syncer::ModelType type_; DataTypeState data_type_state_; - // Whether or not sync is preferred for this type. This is a cached copy of - // the canonical copy information on the UI thread. - bool is_preferred_; + // Whether or not sync is enabled by this type's DataTypeController. + bool is_enabled_; // Whether or not this object has completed its initial handshake with the // SyncContextProxy. bool is_connected_; - // Our link to data type management on the sync thread. - // Used for enabling and disabling sync for this type. - // - // Beware of NULL pointers: This object is uninitialized when we are not - // connected to sync. - scoped_ptr<SyncContextProxy> sync_context_proxy_; - // Reference to the CommitQueue. // // The interface hides the posting of tasks across threads as well as the
diff --git a/sync/engine/model_type_processor_impl_unittest.cc b/sync/engine/model_type_processor_impl_unittest.cc index 5e0bc68e..31fd29f 100644 --- a/sync/engine/model_type_processor_impl_unittest.cc +++ b/sync/engine/model_type_processor_impl_unittest.cc
@@ -4,14 +4,14 @@ #include "sync/engine/model_type_processor_impl.h" +#include "base/bind.h" #include "base/message_loop/message_loop.h" #include "sync/engine/commit_queue.h" +#include "sync/internal_api/public/activation_context.h" #include "sync/internal_api/public/base/model_type.h" #include "sync/internal_api/public/non_blocking_sync_common.h" -#include "sync/internal_api/public/sync_context_proxy.h" #include "sync/protocol/sync.pb.h" #include "sync/syncable/syncable_util.h" -#include "sync/test/engine/injectable_sync_context_proxy.h" #include "sync/test/engine/mock_commit_queue.h" #include "testing/gtest/include/gtest/gtest.h" @@ -45,22 +45,22 @@ ModelTypeProcessorImplTest(); ~ModelTypeProcessorImplTest() override; - // Initialize with no local state. The type sync proxy will be unable to - // commit until it receives notification that initial sync has completed. - void FirstTimeInitialize(); - // Initialize to a "ready-to-commit" state. void InitializeToReadyState(); - // Disconnect the CommitQueue from our ModelTypeProcessorImpl. - void Disconnect(); + // Start our ModelTypeProcessorImpl, which will be unable to commit until it + // receives notification that initial sync has completed. + void Start(); + + // Stop and disconnect the CommitQueue from our ModelTypeProcessorImpl. + void Stop(); // Disable sync for this ModelTypeProcessorImpl. Should cause sync state to // be discarded. void Disable(); - // Re-enable sync after Disconnect() or Disable(). - void ReEnable(); + // Restart sync after Stop() or Disable(). + void Restart(); // Local data modification. Emulates signals from the model thread. void WriteItem(const std::string& tag, const std::string& value); @@ -125,8 +125,12 @@ int64 GetServerVersion(const std::string& tag); void SetServerVersion(const std::string& tag, int64 version); + void StartDone(/*syncer::SyncError,*/ scoped_ptr<ActivationContext> context); + + // The current mock queue which might be owned by either |mock_queue_ptr_| or + // |type_processor_|. MockCommitQueue* mock_queue_; - scoped_ptr<InjectableSyncContextProxy> injectable_sync_context_proxy_; + scoped_ptr<MockCommitQueue> mock_queue_ptr_; scoped_ptr<ModelTypeProcessorImpl> type_processor_; DataTypeState data_type_state_; @@ -136,8 +140,7 @@ ModelTypeProcessorImplTest::ModelTypeProcessorImplTest() : mock_queue_(new MockCommitQueue()), - injectable_sync_context_proxy_( - new InjectableSyncContextProxy(mock_queue_)), + mock_queue_ptr_(mock_queue_), type_processor_( new ModelTypeProcessorImpl(kModelType, base::WeakPtr<ModelTypeStore>())) {} @@ -145,42 +148,49 @@ ModelTypeProcessorImplTest::~ModelTypeProcessorImplTest() { } -void ModelTypeProcessorImplTest::FirstTimeInitialize() { - type_processor_->Enable(injectable_sync_context_proxy_->Clone()); -} - void ModelTypeProcessorImplTest::InitializeToReadyState() { // TODO(rlarocque): This should be updated to inject on-disk state. // At the time this code was written, there was no support for on-disk // state so this was the only way to inject a data_type_state into // the |type_processor_|. - FirstTimeInitialize(); + Start(); OnInitialSyncDone(); } -void ModelTypeProcessorImplTest::Disconnect() { - type_processor_->Disconnect(); - injectable_sync_context_proxy_.reset(); +void ModelTypeProcessorImplTest::Start() { + type_processor_->Start(base::Bind(&ModelTypeProcessorImplTest::StartDone, + base::Unretained(this))); +} + +void ModelTypeProcessorImplTest::Stop() { + type_processor_->Stop(); mock_queue_ = NULL; + mock_queue_ptr_.reset(); } void ModelTypeProcessorImplTest::Disable() { type_processor_->Disable(); - injectable_sync_context_proxy_.reset(); mock_queue_ = NULL; + mock_queue_ptr_.reset(); } -void ModelTypeProcessorImplTest::ReEnable() { +void ModelTypeProcessorImplTest::Restart() { DCHECK(!type_processor_->IsConnected()); // Prepare a new MockCommitQueue instance, just as we would // if this happened in the real world. - mock_queue_ = new MockCommitQueue(); - injectable_sync_context_proxy_.reset( - new InjectableSyncContextProxy(mock_queue_)); + mock_queue_ptr_.reset(new MockCommitQueue()); + mock_queue_ = mock_queue_ptr_.get(); + // Restart sync with the new CommitQueue. + Start(); +} - // Re-enable sync with the new CommitQueue. - type_processor_->Enable(injectable_sync_context_proxy_->Clone()); +void ModelTypeProcessorImplTest::StartDone( + /*syncer::SyncError,*/ scoped_ptr<ActivationContext> context) { + // Hand off ownership of |mock_queue_ptr_|, while keeping + // an unsafe pointer to it. This is why we can only connect once. + DCHECK(mock_queue_ptr_); + context->type_processor->OnConnect(mock_queue_ptr_.Pass()); } void ModelTypeProcessorImplTest::WriteItem(const std::string& tag, @@ -482,7 +492,7 @@ // Verify that it waits until initial sync is complete before requesting // commits. TEST_F(ModelTypeProcessorImplTest, NoCommitsUntilInitialSyncDone) { - FirstTimeInitialize(); + Start(); WriteItem("tag1", "value1"); EXPECT_EQ(0U, GetNumCommitRequestLists()); @@ -496,7 +506,7 @@ // // Creates items in various states of commit and verifies they re-attempt to // commit on reconnect. -TEST_F(ModelTypeProcessorImplTest, Disconnect) { +TEST_F(ModelTypeProcessorImplTest, Stop) { InitializeToReadyState(); // The first item is fully committed. @@ -508,12 +518,12 @@ WriteItem("tag2", "value2"); EXPECT_TRUE(HasCommitRequestForTag("tag2")); - Disconnect(); + Stop(); - // The third item is added after disconnection. + // The third item is added after stopping. WriteItem("tag3", "value3"); - ReEnable(); + Restart(); EXPECT_EQ(1U, GetNumCommitRequestLists()); EXPECT_EQ(2U, GetNthCommitRequestList(0).size()); @@ -550,7 +560,7 @@ WriteItem("tag3", "value3"); // Now we re-enable. - ReEnable(); + Restart(); // There should be nothing to commit right away, since we need to // re-initialize the client state first. @@ -606,22 +616,22 @@ ASSERT_TRUE(HasPendingUpdate("tag1")); Disable(); - ReEnable(); + Restart(); EXPECT_EQ(0U, GetNumPendingUpdates()); EXPECT_FALSE(HasPendingUpdate("tag1")); } -// Test that Disconnect does not clear pending update state. -TEST_F(ModelTypeProcessorImplTest, DisconnectWithPendingUpdates) { +// Test that Stop does not clear pending update state. +TEST_F(ModelTypeProcessorImplTest, StopWithPendingUpdates) { InitializeToReadyState(); PendingUpdateFromServer(5, "tag1", "value1", "key1"); EXPECT_EQ(1U, GetNumPendingUpdates()); ASSERT_TRUE(HasPendingUpdate("tag1")); - Disconnect(); - ReEnable(); + Stop(); + Restart(); EXPECT_EQ(1U, GetNumPendingUpdates()); EXPECT_TRUE(HasPendingUpdate("tag1"));
diff --git a/sync/engine/model_type_worker.cc b/sync/engine/model_type_worker.cc index 7e67dc8..2ffb33d 100644 --- a/sync/engine/model_type_worker.cc +++ b/sync/engine/model_type_worker.cc
@@ -178,9 +178,9 @@ } DVLOG(1) << ModelTypeToString(type_) << ": " - << base::StringPrintf( - "Delivering %zd applicable and %zd pending updates.", - response_datas.size(), pending_updates.size()); + << base::StringPrintf("Delivering %" PRIuS " applicable and %" PRIuS + " pending updates.", + response_datas.size(), pending_updates.size()); // Forward these updates to the model thread so it can do the rest. model_type_processor_->OnUpdateReceived(data_type_state_, response_datas, @@ -411,9 +411,9 @@ if (new_encryption_key || response_datas.size() > 0) { DVLOG(1) << ModelTypeToString(type_) << ": " - << base::StringPrintf( - "Delivering encryption key and %zd decrypted updates.", - response_datas.size()); + << base::StringPrintf("Delivering encryption key and %" PRIuS + " decrypted updates.", + response_datas.size()); model_type_processor_->OnUpdateReceived(data_type_state_, response_datas, UpdateResponseDataList()); }
diff --git a/sync/internal_api/public/activation_context.h b/sync/internal_api/public/activation_context.h index 824aa9c7..313546c 100644 --- a/sync/internal_api/public/activation_context.h +++ b/sync/internal_api/public/activation_context.h
@@ -5,8 +5,8 @@ #ifndef SYNC_INTERNAL_API_PUBLIC_ACTIVATION_CONTEXT_H_ #define SYNC_INTERNAL_API_PUBLIC_ACTIVATION_CONTEXT_H_ -#include "base/memory/weak_ptr.h" #include "base/memory/ref_counted.h" +#include "base/memory/weak_ptr.h" #include "base/sequenced_task_runner.h" #include "sync/base/sync_export.h" #include "sync/internal_api/public/non_blocking_sync_common.h"
diff --git a/sync/internal_api/sync_context_proxy_impl_unittest.cc b/sync/internal_api/sync_context_proxy_impl_unittest.cc index f4981d7d..48dafc9e 100644 --- a/sync/internal_api/sync_context_proxy_impl_unittest.cc +++ b/sync/internal_api/sync_context_proxy_impl_unittest.cc
@@ -2,11 +2,13 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include "base/bind.h" #include "base/message_loop/message_loop.h" #include "base/run_loop.h" #include "base/sequenced_task_runner.h" #include "base/thread_task_runner_handle.h" #include "sync/engine/model_type_processor_impl.h" +#include "sync/internal_api/public/activation_context.h" #include "sync/internal_api/public/base/model_type.h" #include "sync/internal_api/public/sync_context.h" #include "sync/internal_api/sync_context_proxy_impl.h" @@ -41,7 +43,15 @@ // function simulates such an event. void DisableSync() { registry_.reset(); } - scoped_ptr<SyncContextProxy> GetProxy() { return context_proxy_->Clone(); } + void Start(ModelTypeProcessorImpl* processor) { + processor->Start(base::Bind(&SyncContextProxyImplTest::StartDone, + base::Unretained(this))); + } + + void StartDone( + /*syncer::SyncError,*/ scoped_ptr<ActivationContext> context) { + context_proxy_->ConnectTypeToSync(syncer::THEMES, context.Pass()); + } private: base::MessageLoop loop_; @@ -58,53 +68,51 @@ // Try to connect a type to a SyncContext that has already shut down. TEST_F(SyncContextProxyImplTest, FailToConnect1) { - ModelTypeProcessorImpl themes_sync_proxy(syncer::THEMES, - base::WeakPtr<ModelTypeStore>()); + ModelTypeProcessorImpl processor(syncer::THEMES, + base::WeakPtr<ModelTypeStore>()); DisableSync(); - themes_sync_proxy.Enable(GetProxy()); + Start(&processor); base::RunLoop run_loop_; run_loop_.RunUntilIdle(); - EXPECT_FALSE(themes_sync_proxy.IsConnected()); + EXPECT_FALSE(processor.IsConnected()); } // Try to connect a type to a SyncContext as it shuts down. TEST_F(SyncContextProxyImplTest, FailToConnect2) { - ModelTypeProcessorImpl themes_sync_proxy(syncer::THEMES, - base::WeakPtr<ModelTypeStore>()); - themes_sync_proxy.Enable(GetProxy()); + ModelTypeProcessorImpl processor(syncer::THEMES, + base::WeakPtr<ModelTypeStore>()); + Start(&processor); DisableSync(); base::RunLoop run_loop_; run_loop_.RunUntilIdle(); - EXPECT_FALSE(themes_sync_proxy.IsConnected()); + EXPECT_FALSE(processor.IsConnected()); } // Tests the case where the type's sync proxy shuts down first. TEST_F(SyncContextProxyImplTest, TypeDisconnectsFirst) { - scoped_ptr<ModelTypeProcessorImpl> themes_sync_proxy( - new ModelTypeProcessorImpl(syncer::THEMES, - base::WeakPtr<ModelTypeStore>())); - themes_sync_proxy->Enable(GetProxy()); + scoped_ptr<ModelTypeProcessorImpl> processor(new ModelTypeProcessorImpl( + syncer::THEMES, base::WeakPtr<ModelTypeStore>())); + Start(processor.get()); base::RunLoop run_loop_; run_loop_.RunUntilIdle(); - EXPECT_TRUE(themes_sync_proxy->IsConnected()); - themes_sync_proxy.reset(); + EXPECT_TRUE(processor->IsConnected()); + processor.reset(); } // Tests the case where the sync thread shuts down first. TEST_F(SyncContextProxyImplTest, SyncDisconnectsFirst) { - scoped_ptr<ModelTypeProcessorImpl> themes_sync_proxy( - new ModelTypeProcessorImpl(syncer::THEMES, - base::WeakPtr<ModelTypeStore>())); - themes_sync_proxy->Enable(GetProxy()); + scoped_ptr<ModelTypeProcessorImpl> processor(new ModelTypeProcessorImpl( + syncer::THEMES, base::WeakPtr<ModelTypeStore>())); + Start(processor.get()); base::RunLoop run_loop_; run_loop_.RunUntilIdle(); - EXPECT_TRUE(themes_sync_proxy->IsConnected()); + EXPECT_TRUE(processor->IsConnected()); DisableSync(); }
diff --git a/sync/sync_tests.gypi b/sync/sync_tests.gypi index 6ea7b6c..c374da2 100644 --- a/sync/sync_tests.gypi +++ b/sync/sync_tests.gypi
@@ -41,8 +41,6 @@ 'test/engine/fake_model_worker.h', 'test/engine/fake_sync_scheduler.cc', 'test/engine/fake_sync_scheduler.h', - 'test/engine/injectable_sync_context_proxy.cc', - 'test/engine/injectable_sync_context_proxy.h', 'test/engine/mock_commit_queue.cc', 'test/engine/mock_commit_queue.h', 'test/engine/mock_connection_manager.cc',
diff --git a/sync/test/engine/injectable_sync_context_proxy.cc b/sync/test/engine/injectable_sync_context_proxy.cc deleted file mode 100644 index 0999caf..0000000 --- a/sync/test/engine/injectable_sync_context_proxy.cc +++ /dev/null
@@ -1,49 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "sync/test/engine/injectable_sync_context_proxy.h" - -#include "sync/engine/commit_queue.h" -#include "sync/engine/model_type_processor_impl.h" -#include "sync/internal_api/public/activation_context.h" - -namespace syncer_v2 { - -InjectableSyncContextProxy::InjectableSyncContextProxy( - CommitQueue* queue) - : is_worker_connected_(false), queue_(queue) { -} - -InjectableSyncContextProxy::~InjectableSyncContextProxy() { -} - -void InjectableSyncContextProxy::ConnectTypeToSync( - syncer::ModelType type, - scoped_ptr<ActivationContext> activation_context) { - // This class is allowed to participate in only one connection. - DCHECK(!is_worker_connected_); - is_worker_connected_ = true; - - // Hands off ownership of our member to the type_processor, while keeping - // an unsafe pointer to it. This is why we can only connect once. - scoped_ptr<CommitQueue> queue(queue_); - - activation_context->type_processor->OnConnect(queue.Pass()); -} - -void InjectableSyncContextProxy::Disconnect(syncer::ModelType type) { - // This should delete the queue, but we don't own it. - queue_ = NULL; -} - -scoped_ptr<SyncContextProxy> InjectableSyncContextProxy::Clone() const { - // This confuses ownership. We trust that our callers are well-behaved. - return scoped_ptr<SyncContextProxy>(new InjectableSyncContextProxy(queue_)); -} - -CommitQueue* InjectableSyncContextProxy::GetQueue() { - return queue_; -} - -} // namespace syncer
diff --git a/sync/test/engine/injectable_sync_context_proxy.h b/sync/test/engine/injectable_sync_context_proxy.h deleted file mode 100644 index 53c8f04..0000000 --- a/sync/test/engine/injectable_sync_context_proxy.h +++ /dev/null
@@ -1,46 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef SYNC_TEST_ENGINE_INJECTABLE_SYNC_CONTEXT_PROXY_H_ -#define SYNC_TEST_ENGINE_INJECTABLE_SYNC_CONTEXT_PROXY_H_ - -#include "sync/internal_api/public/base/model_type.h" -#include "sync/internal_api/public/non_blocking_sync_common.h" -#include "sync/internal_api/public/sync_context_proxy.h" - -namespace syncer_v2 { - -class ModelTypeProcessor; -class CommitQueue; - -// A SyncContextProxy implementation that, when a connection request is made, -// initalizes a connection to a previously injected ModelTypeProcessor. -class InjectableSyncContextProxy : public SyncContextProxy { - public: - explicit InjectableSyncContextProxy(CommitQueue* queue); - ~InjectableSyncContextProxy() override; - - void ConnectTypeToSync( - syncer::ModelType type, - scoped_ptr<ActivationContext> activation_context) override; - void Disconnect(syncer::ModelType type) override; - scoped_ptr<SyncContextProxy> Clone() const override; - - CommitQueue* GetQueue(); - - private: - // A flag to ensure ConnectTypeToSync is called at most once. - bool is_worker_connected_; - - // The ModelTypeProcessor's contract expects that it gets to own this object, - // so we can retain only a non-owned pointer to it. - // - // This is very unsafe, but we can get away with it since these tests are not - // exercising the proxy <-> worker connection code. - CommitQueue* queue_; -}; - -} // namespace syncer - -#endif // SYNC_TEST_ENGINE_INJECTABLE_SYNC_CONTEXT_PROXY_H_
diff --git a/third_party/Python-Markdown/LICENSE.md b/third_party/Python-Markdown/LICENSE.md new file mode 100644 index 0000000..4cd8b14 --- /dev/null +++ b/third_party/Python-Markdown/LICENSE.md
@@ -0,0 +1,30 @@ +Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later) +Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b) +Copyright 2004 Manfred Stienstra (the original version) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +* Neither the name of the <organization> nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE PYTHON MARKDOWN PROJECT ''AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL ANY CONTRIBUTORS TO THE PYTHON MARKDOWN PROJECT +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. +
diff --git a/third_party/Python-Markdown/OWNERS b/third_party/Python-Markdown/OWNERS new file mode 100644 index 0000000..3fc266c --- /dev/null +++ b/third_party/Python-Markdown/OWNERS
@@ -0,0 +1,2 @@ +dpranke@chromium.org +nodir@chromium.org
diff --git a/third_party/Python-Markdown/README.chromium b/third_party/Python-Markdown/README.chromium new file mode 100644 index 0000000..d3b1653 --- /dev/null +++ b/third_party/Python-Markdown/README.chromium
@@ -0,0 +1,34 @@ +Name: Python-Markdown +URL: https://pypi.python.org/packages/source/M/Markdown/Markdown-2.6.2.tar.gz#md5=256d19afcc564dc4ce4c229bb762f7ae +Version: 2.6.2 +Revision: None +Security Critical: no +License: BSD +License File: NOT_SHIPPED + +Description: + +Python-Markdown is a pure Python parser for Markdown. + +//tools/md_browser uses it to provide a local previewer for our +Markdown-based documentation. + +This code is not considered security critical since it is only used by +developer utilities. This should never be linked into chrome or any production +code. + +To update this, do something roughly along the lines of: + + cd .. + wget $URL # A newer version of the above URL + tar xvzf Markdown-$VERSION.tar.gz + cd Markdown-$VERSION + rm -fr bin docs INSTALL.md makefile MANIFEST PKG-INFO \ + run-tests.py setup.py tests tox.ini + cp ../Python-Markdown/README.chromium . + + # update the version numbers in README.chromium + + cd .. + rm -fr Python-Markdown + mv Markdown-$VERSION Python-Markdown
diff --git a/third_party/Python-Markdown/README.md b/third_party/Python-Markdown/README.md new file mode 100644 index 0000000..a7f8550 --- /dev/null +++ b/third_party/Python-Markdown/README.md
@@ -0,0 +1,35 @@ +[Python-Markdown][] +=================== + +[](https://travis-ci.org/waylan/Python-Markdown) +[](https://coveralls.io/r/waylan/Python-Markdown?branch=master) +[](https://pypi.python.org/pypi/Markdown#downloads) +[](http://pypi.python.org/pypi/Markdown) +[](http://opensource.org/licenses/BSD-3-Clause) + +This is a Python implementation of John Gruber's [Markdown][]. +It is almost completely compliant with the reference implementation, +though there are a few known issues. See [Features][] for information +on what exactly is supported and what is not. Additional features are +supported by the [Available Extensions][]. + +[Python-Markdown]: https://pythonhosted.org/Markdown/ +[Markdown]: http://daringfireball.net/projects/markdown/ +[Features]: https://pythonhosted.org/Markdown/index.html#Features +[Available Extensions]: https://pythonhosted.org/Markdown/extensions/index.html + + +Documentation +------------- + +Installation and usage documentation is available in the `docs/` directory +of the distribution and on the project website at +<https://pythonhosted.org/Markdown/>. + +Support +------- + +You may ask for help and discuss various other issues on the [mailing list][] and report bugs on the [bug tracker][]. + +[mailing list]: http://lists.sourceforge.net/lists/listinfo/python-markdown-discuss +[bug tracker]: http://github.com/waylan/Python-Markdown/issues
diff --git a/third_party/Python-Markdown/markdown/__init__.py b/third_party/Python-Markdown/markdown/__init__.py new file mode 100644 index 0000000..1b865531 --- /dev/null +++ b/third_party/Python-Markdown/markdown/__init__.py
@@ -0,0 +1,529 @@ +""" +Python Markdown +=============== + +Python Markdown converts Markdown to HTML and can be used as a library or +called from the command line. + +## Basic usage as a module: + + import markdown + html = markdown.markdown(your_text_string) + +See <https://pythonhosted.org/Markdown/> for more +information and instructions on how to extend the functionality of +Python Markdown. Read that before you try modifying this file. + +## Authors and License + +Started by [Manfred Stienstra](http://www.dwerg.net/). Continued and +maintained by [Yuri Takhteyev](http://www.freewisdom.org), [Waylan +Limberg](http://achinghead.com/) and [Artem Yunusov](http://blog.splyer.com). + +Contact: markdown@freewisdom.org + +Copyright 2007-2013 The Python Markdown Project (v. 1.7 and later) +Copyright 200? Django Software Foundation (OrderedDict implementation) +Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b) +Copyright 2004 Manfred Stienstra (the original version) + +License: BSD (see LICENSE for details). +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from .__version__ import version, version_info # noqa +import codecs +import sys +import logging +import warnings +import importlib +from . import util +from .preprocessors import build_preprocessors +from .blockprocessors import build_block_parser +from .treeprocessors import build_treeprocessors +from .inlinepatterns import build_inlinepatterns +from .postprocessors import build_postprocessors +from .extensions import Extension +from .serializers import to_html_string, to_xhtml_string + +__all__ = ['Markdown', 'markdown', 'markdownFromFile'] + + +logger = logging.getLogger('MARKDOWN') + + +class Markdown(object): + """Convert Markdown to HTML.""" + + doc_tag = "div" # Element used to wrap document - later removed + + option_defaults = { + 'html_replacement_text': '[HTML_REMOVED]', + 'tab_length': 4, + 'enable_attributes': True, + 'smart_emphasis': True, + 'lazy_ol': True, + } + + output_formats = { + 'html': to_html_string, + 'html4': to_html_string, + 'html5': to_html_string, + 'xhtml': to_xhtml_string, + 'xhtml1': to_xhtml_string, + 'xhtml5': to_xhtml_string, + } + + ESCAPED_CHARS = ['\\', '`', '*', '_', '{', '}', '[', ']', + '(', ')', '>', '#', '+', '-', '.', '!'] + + def __init__(self, *args, **kwargs): + """ + Creates a new Markdown instance. + + Keyword arguments: + + * extensions: A list of extensions. + If they are of type string, the module mdx_name.py will be loaded. + If they are a subclass of markdown.Extension, they will be used + as-is. + * extension_configs: Configuration settings for extensions. + * output_format: Format of output. Supported formats are: + * "xhtml1": Outputs XHTML 1.x. Default. + * "xhtml5": Outputs XHTML style tags of HTML 5 + * "xhtml": Outputs latest supported version of XHTML + (currently XHTML 1.1). + * "html4": Outputs HTML 4 + * "html5": Outputs HTML style tags of HTML 5 + * "html": Outputs latest supported version of HTML + (currently HTML 4). + Note that it is suggested that the more specific formats ("xhtml1" + and "html4") be used as "xhtml" or "html" may change in the future + if it makes sense at that time. + * safe_mode: Deprecated! Disallow raw html. One of "remove", "replace" + or "escape". + * html_replacement_text: Deprecated! Text used when safe_mode is set + to "replace". + * tab_length: Length of tabs in the source. Default: 4 + * enable_attributes: Enable the conversion of attributes. Default: True + * smart_emphasis: Treat `_connected_words_` intelligently Default: True + * lazy_ol: Ignore number of first item of ordered lists. Default: True + + """ + + # For backward compatibility, loop through old positional args + pos = ['extensions', 'extension_configs', 'safe_mode', 'output_format'] + for c, arg in enumerate(args): + if pos[c] not in kwargs: + kwargs[pos[c]] = arg + if c+1 == len(pos): # pragma: no cover + # ignore any additional args + break + if len(args): + warnings.warn('Positional arguments are deprecated in Markdown. ' + 'Use keyword arguments only.', + DeprecationWarning) + + # Loop through kwargs and assign defaults + for option, default in self.option_defaults.items(): + setattr(self, option, kwargs.get(option, default)) + + self.safeMode = kwargs.get('safe_mode', False) + if self.safeMode and 'enable_attributes' not in kwargs: + # Disable attributes in safeMode when not explicitly set + self.enable_attributes = False + + if 'safe_mode' in kwargs: + warnings.warn('"safe_mode" is deprecated in Python-Markdown. ' + 'Use an HTML sanitizer (like ' + 'Bleach http://bleach.readthedocs.org/) ' + 'if you are parsing untrusted markdown text. ' + 'See the 2.6 release notes for more info', + DeprecationWarning) + + if 'html_replacement_text' in kwargs: + warnings.warn('The "html_replacement_text" keyword is ' + 'deprecated along with "safe_mode".', + DeprecationWarning) + + self.registeredExtensions = [] + self.docType = "" + self.stripTopLevelTags = True + + self.build_parser() + + self.references = {} + self.htmlStash = util.HtmlStash() + self.registerExtensions(extensions=kwargs.get('extensions', []), + configs=kwargs.get('extension_configs', {})) + self.set_output_format(kwargs.get('output_format', 'xhtml1')) + self.reset() + + def build_parser(self): + """ Build the parser from the various parts. """ + self.preprocessors = build_preprocessors(self) + self.parser = build_block_parser(self) + self.inlinePatterns = build_inlinepatterns(self) + self.treeprocessors = build_treeprocessors(self) + self.postprocessors = build_postprocessors(self) + return self + + def registerExtensions(self, extensions, configs): + """ + Register extensions with this instance of Markdown. + + Keyword arguments: + + * extensions: A list of extensions, which can either + be strings or objects. See the docstring on Markdown. + * configs: A dictionary mapping module names to config options. + + """ + for ext in extensions: + if isinstance(ext, util.string_type): + ext = self.build_extension(ext, configs.get(ext, {})) + if isinstance(ext, Extension): + ext.extendMarkdown(self, globals()) + logger.debug( + 'Successfully loaded extension "%s.%s".' + % (ext.__class__.__module__, ext.__class__.__name__) + ) + elif ext is not None: + raise TypeError( + 'Extension "%s.%s" must be of type: "markdown.Extension"' + % (ext.__class__.__module__, ext.__class__.__name__)) + + return self + + def build_extension(self, ext_name, configs): + """Build extension by name, then return the module. + + The extension name may contain arguments as part of the string in the + following format: "extname(key1=value1,key2=value2)" + + """ + + configs = dict(configs) + + # Parse extensions config params (ignore the order) + pos = ext_name.find("(") # find the first "(" + if pos > 0: + ext_args = ext_name[pos+1:-1] + ext_name = ext_name[:pos] + pairs = [x.split("=") for x in ext_args.split(",")] + configs.update([(x.strip(), y.strip()) for (x, y) in pairs]) + warnings.warn('Setting configs in the Named Extension string is ' + 'deprecated. It is recommended that you ' + 'pass an instance of the extension class to ' + 'Markdown or use the "extension_configs" keyword. ' + 'The current behavior will raise an error in version 2.7. ' + 'See the Release Notes for Python-Markdown version ' + '2.6 for more info.', DeprecationWarning) + + # Get class name (if provided): `path.to.module:ClassName` + ext_name, class_name = ext_name.split(':', 1) \ + if ':' in ext_name else (ext_name, '') + + # Try loading the extension first from one place, then another + try: + # Assume string uses dot syntax (`path.to.some.module`) + module = importlib.import_module(ext_name) + logger.debug( + 'Successfuly imported extension module "%s".' % ext_name + ) + # For backward compat (until deprecation) + # check that this is an extension. + if ('.' not in ext_name and not (hasattr(module, 'makeExtension') or + (class_name and hasattr(module, class_name)))): + # We have a name conflict + # eg: extensions=['tables'] and PyTables is installed + raise ImportError + except ImportError: + # Preppend `markdown.extensions.` to name + module_name = '.'.join(['markdown.extensions', ext_name]) + try: + module = importlib.import_module(module_name) + logger.debug( + 'Successfuly imported extension module "%s".' % + module_name + ) + warnings.warn('Using short names for Markdown\'s builtin ' + 'extensions is deprecated. Use the ' + 'full path to the extension with Python\'s dot ' + 'notation (eg: "%s" instead of "%s"). The ' + 'current behavior will raise an error in version ' + '2.7. See the Release Notes for ' + 'Python-Markdown version 2.6 for more info.' % + (module_name, ext_name), + DeprecationWarning) + except ImportError: + # Preppend `mdx_` to name + module_name_old_style = '_'.join(['mdx', ext_name]) + try: + module = importlib.import_module(module_name_old_style) + logger.debug( + 'Successfuly imported extension module "%s".' % + module_name_old_style) + warnings.warn('Markdown\'s behavior of prepending "mdx_" ' + 'to an extension name is deprecated. ' + 'Use the full path to the ' + 'extension with Python\'s dot notation ' + '(eg: "%s" instead of "%s"). The current ' + 'behavior will raise an error in version 2.7. ' + 'See the Release Notes for Python-Markdown ' + 'version 2.6 for more info.' % + (module_name_old_style, ext_name), + DeprecationWarning) + except ImportError as e: + message = "Failed loading extension '%s' from '%s', '%s' " \ + "or '%s'" % (ext_name, ext_name, module_name, + module_name_old_style) + e.args = (message,) + e.args[1:] + raise + + if class_name: + # Load given class name from module. + return getattr(module, class_name)(**configs) + else: + # Expect makeExtension() function to return a class. + try: + return module.makeExtension(**configs) + except AttributeError as e: + message = e.args[0] + message = "Failed to initiate extension " \ + "'%s': %s" % (ext_name, message) + e.args = (message,) + e.args[1:] + raise + + def registerExtension(self, extension): + """ This gets called by the extension """ + self.registeredExtensions.append(extension) + return self + + def reset(self): + """ + Resets all state variables so that we can start with a new text. + """ + self.htmlStash.reset() + self.references.clear() + + for extension in self.registeredExtensions: + if hasattr(extension, 'reset'): + extension.reset() + + return self + + def set_output_format(self, format): + """ Set the output format for the class instance. """ + self.output_format = format.lower() + try: + self.serializer = self.output_formats[self.output_format] + except KeyError as e: + valid_formats = list(self.output_formats.keys()) + valid_formats.sort() + message = 'Invalid Output Format: "%s". Use one of %s.' \ + % (self.output_format, + '"' + '", "'.join(valid_formats) + '"') + e.args = (message,) + e.args[1:] + raise + return self + + def convert(self, source): + """ + Convert markdown to serialized XHTML or HTML. + + Keyword arguments: + + * source: Source text as a Unicode string. + + Markdown processing takes place in five steps: + + 1. A bunch of "preprocessors" munge the input text. + 2. BlockParser() parses the high-level structural elements of the + pre-processed text into an ElementTree. + 3. A bunch of "treeprocessors" are run against the ElementTree. One + such treeprocessor runs InlinePatterns against the ElementTree, + detecting inline markup. + 4. Some post-processors are run against the text after the ElementTree + has been serialized into text. + 5. The output is written to a string. + + """ + + # Fixup the source text + if not source.strip(): + return '' # a blank unicode string + + try: + source = util.text_type(source) + except UnicodeDecodeError as e: + # Customise error message while maintaining original trackback + e.reason += '. -- Note: Markdown only accepts unicode input!' + raise + + # Split into lines and run the line preprocessors. + self.lines = source.split("\n") + for prep in self.preprocessors.values(): + self.lines = prep.run(self.lines) + + # Parse the high-level elements. + root = self.parser.parseDocument(self.lines).getroot() + + # Run the tree-processors + for treeprocessor in self.treeprocessors.values(): + newRoot = treeprocessor.run(root) + if newRoot is not None: + root = newRoot + + # Serialize _properly_. Strip top-level tags. + output = self.serializer(root) + if self.stripTopLevelTags: + try: + start = output.index( + '<%s>' % self.doc_tag) + len(self.doc_tag) + 2 + end = output.rindex('</%s>' % self.doc_tag) + output = output[start:end].strip() + except ValueError: # pragma: no cover + if output.strip().endswith('<%s />' % self.doc_tag): + # We have an empty document + output = '' + else: + # We have a serious problem + raise ValueError('Markdown failed to strip top-level ' + 'tags. Document=%r' % output.strip()) + + # Run the text post-processors + for pp in self.postprocessors.values(): + output = pp.run(output) + + return output.strip() + + def convertFile(self, input=None, output=None, encoding=None): + """Converts a Markdown file and returns the HTML as a Unicode string. + + Decodes the file using the provided encoding (defaults to utf-8), + passes the file content to markdown, and outputs the html to either + the provided stream or the file with provided name, using the same + encoding as the source file. The 'xmlcharrefreplace' error handler is + used when encoding the output. + + **Note:** This is the only place that decoding and encoding of Unicode + takes place in Python-Markdown. (All other code is Unicode-in / + Unicode-out.) + + Keyword arguments: + + * input: File object or path. Reads from stdin if `None`. + * output: File object or path. Writes to stdout if `None`. + * encoding: Encoding of input and output files. Defaults to utf-8. + + """ + + encoding = encoding or "utf-8" + + # Read the source + if input: + if isinstance(input, util.string_type): + input_file = codecs.open(input, mode="r", encoding=encoding) + else: + input_file = codecs.getreader(encoding)(input) + text = input_file.read() + input_file.close() + else: + text = sys.stdin.read() + if not isinstance(text, util.text_type): + text = text.decode(encoding) + + text = text.lstrip('\ufeff') # remove the byte-order mark + + # Convert + html = self.convert(text) + + # Write to file or stdout + if output: + if isinstance(output, util.string_type): + output_file = codecs.open(output, "w", + encoding=encoding, + errors="xmlcharrefreplace") + output_file.write(html) + output_file.close() + else: + writer = codecs.getwriter(encoding) + output_file = writer(output, errors="xmlcharrefreplace") + output_file.write(html) + # Don't close here. User may want to write more. + else: + # Encode manually and write bytes to stdout. + html = html.encode(encoding, "xmlcharrefreplace") + try: + # Write bytes directly to buffer (Python 3). + sys.stdout.buffer.write(html) + except AttributeError: + # Probably Python 2, which works with bytes by default. + sys.stdout.write(html) + + return self + + +""" +EXPORTED FUNCTIONS +============================================================================= + +Those are the two functions we really mean to export: markdown() and +markdownFromFile(). +""" + + +def markdown(text, *args, **kwargs): + """Convert a Markdown string to HTML and return HTML as a Unicode string. + + This is a shortcut function for `Markdown` class to cover the most + basic use case. It initializes an instance of Markdown, loads the + necessary extensions and runs the parser on the given text. + + Keyword arguments: + + * text: Markdown formatted text as Unicode or ASCII string. + * Any arguments accepted by the Markdown class. + + Returns: An HTML document as a string. + + """ + md = Markdown(*args, **kwargs) + return md.convert(text) + + +def markdownFromFile(*args, **kwargs): + """Read markdown code from a file and write it to a file or a stream. + + This is a shortcut function which initializes an instance of Markdown, + and calls the convertFile method rather than convert. + + Keyword arguments: + + * input: a file name or readable object. + * output: a file name or writable object. + * encoding: Encoding of input and output. + * Any arguments accepted by the Markdown class. + + """ + # For backward compatibility loop through positional args + pos = ['input', 'output', 'extensions', 'encoding'] + c = 0 + for arg in args: + if pos[c] not in kwargs: + kwargs[pos[c]] = arg + c += 1 + if c == len(pos): + break + if len(args): + warnings.warn('Positional arguments are depreacted in ' + 'Markdown and will raise an error in version 2.7. ' + 'Use keyword arguments only.', + DeprecationWarning) + + md = Markdown(**kwargs) + md.convertFile(kwargs.get('input', None), + kwargs.get('output', None), + kwargs.get('encoding', None))
diff --git a/third_party/Python-Markdown/markdown/__main__.py b/third_party/Python-Markdown/markdown/__main__.py new file mode 100644 index 0000000..17bfa9f --- /dev/null +++ b/third_party/Python-Markdown/markdown/__main__.py
@@ -0,0 +1,136 @@ +""" +COMMAND-LINE SPECIFIC STUFF +============================================================================= + +""" + +import sys +import optparse +import codecs +import warnings +import markdown +try: + import yaml +except ImportError: # pragma: no cover + import json as yaml + +import logging +from logging import DEBUG, WARNING, CRITICAL + +logger = logging.getLogger('MARKDOWN') + + +def parse_options(args=None, values=None): + """ + Define and parse `optparse` options for command-line usage. + """ + usage = """%prog [options] [INPUTFILE] + (STDIN is assumed if no INPUTFILE is given)""" + desc = "A Python implementation of John Gruber's Markdown. " \ + "https://pythonhosted.org/Markdown/" + ver = "%%prog %s" % markdown.version + + parser = optparse.OptionParser(usage=usage, description=desc, version=ver) + parser.add_option("-f", "--file", dest="filename", default=None, + help="Write output to OUTPUT_FILE. Defaults to STDOUT.", + metavar="OUTPUT_FILE") + parser.add_option("-e", "--encoding", dest="encoding", + help="Encoding for input and output files.",) + parser.add_option("-s", "--safe", dest="safe", default=False, + metavar="SAFE_MODE", + help="Deprecated! 'replace', 'remove' or 'escape' HTML " + "tags in input") + parser.add_option("-o", "--output_format", dest="output_format", + default='xhtml1', metavar="OUTPUT_FORMAT", + help="'xhtml1' (default), 'html4' or 'html5'.") + parser.add_option("-n", "--no_lazy_ol", dest="lazy_ol", + action='store_false', default=True, + help="Observe number of first item of ordered lists.") + parser.add_option("-x", "--extension", action="append", dest="extensions", + help="Load extension EXTENSION.", metavar="EXTENSION") + parser.add_option("-c", "--extension_configs", + dest="configfile", default=None, + help="Read extension configurations from CONFIG_FILE. " + "CONFIG_FILE must be of JSON or YAML format. YAML" + "format requires that a python YAML library be " + "installed. The parsed JSON or YAML must result in a " + "python dictionary which would be accepted by the " + "'extension_configs' keyword on the markdown.Markdown " + "class. The extensions must also be loaded with the " + "`--extension` option.", + metavar="CONFIG_FILE") + parser.add_option("-q", "--quiet", default=CRITICAL, + action="store_const", const=CRITICAL+10, dest="verbose", + help="Suppress all warnings.") + parser.add_option("-v", "--verbose", + action="store_const", const=WARNING, dest="verbose", + help="Print all warnings.") + parser.add_option("--noisy", + action="store_const", const=DEBUG, dest="verbose", + help="Print debug messages.") + + (options, args) = parser.parse_args(args, values) + + if len(args) == 0: + input_file = None + else: + input_file = args[0] + + if not options.extensions: + options.extensions = [] + + extension_configs = {} + if options.configfile: + with codecs.open( + options.configfile, mode="r", encoding=options.encoding + ) as fp: + try: + extension_configs = yaml.load(fp) + except Exception as e: + message = "Failed parsing extension config file: %s" % \ + options.configfile + e.args = (message,) + e.args[1:] + raise + + opts = { + 'input': input_file, + 'output': options.filename, + 'extensions': options.extensions, + 'extension_configs': extension_configs, + 'encoding': options.encoding, + 'output_format': options.output_format, + 'lazy_ol': options.lazy_ol + } + + if options.safe: + # Avoid deprecation warning if user didn't set option + opts['safe_mode'] = options.safe + + return opts, options.verbose + + +def run(): # pragma: no cover + """Run Markdown from the command line.""" + + # Parse options and adjust logging level if necessary + options, logging_level = parse_options() + if not options: + sys.exit(2) + logger.setLevel(logging_level) + console_handler = logging.StreamHandler() + logger.addHandler(console_handler) + if logging_level <= WARNING: + # Ensure deprecation warnings get displayed + warnings.filterwarnings('default') + logging.captureWarnings(True) + warn_logger = logging.getLogger('py.warnings') + warn_logger.addHandler(console_handler) + + # Run + markdown.markdownFromFile(**options) + + +if __name__ == '__main__': # pragma: no cover + # Support running module as a commandline command. + # Python 2.7 & 3.x do: `python -m markdown [options] [args]`. + run()
diff --git a/third_party/Python-Markdown/markdown/__version__.py b/third_party/Python-Markdown/markdown/__version__.py new file mode 100644 index 0000000..34425040 --- /dev/null +++ b/third_party/Python-Markdown/markdown/__version__.py
@@ -0,0 +1,29 @@ +# +# markdown/__version__.py +# +# version_info should conform to PEP 386 +# (major, minor, micro, alpha/beta/rc/final, #) +# (1, 1, 2, 'alpha', 0) => "1.1.2.dev" +# (1, 2, 0, 'beta', 2) => "1.2b2" +version_info = (2, 6, 2, 'final', 0) + + +def _get_version(): + " Returns a PEP 386-compliant version number from version_info. " + assert len(version_info) == 5 + assert version_info[3] in ('alpha', 'beta', 'rc', 'final') + + parts = 2 if version_info[2] == 0 else 3 + main = '.'.join(map(str, version_info[:parts])) + + sub = '' + if version_info[3] == 'alpha' and version_info[4] == 0: + # TODO: maybe append some sort of git info here?? + sub = '.dev' + elif version_info[3] != 'final': + mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} + sub = mapping[version_info[3]] + str(version_info[4]) + + return str(main + sub) + +version = _get_version()
diff --git a/third_party/Python-Markdown/markdown/blockparser.py b/third_party/Python-Markdown/markdown/blockparser.py new file mode 100644 index 0000000..32d3254 --- /dev/null +++ b/third_party/Python-Markdown/markdown/blockparser.py
@@ -0,0 +1,100 @@ +from __future__ import unicode_literals +from __future__ import absolute_import +from . import util +from . import odict + + +class State(list): + """ Track the current and nested state of the parser. + + This utility class is used to track the state of the BlockParser and + support multiple levels if nesting. It's just a simple API wrapped around + a list. Each time a state is set, that state is appended to the end of the + list. Each time a state is reset, that state is removed from the end of + the list. + + Therefore, each time a state is set for a nested block, that state must be + reset when we back out of that level of nesting or the state could be + corrupted. + + While all the methods of a list object are available, only the three + defined below need be used. + + """ + + def set(self, state): + """ Set a new state. """ + self.append(state) + + def reset(self): + """ Step back one step in nested state. """ + self.pop() + + def isstate(self, state): + """ Test that top (current) level is of given state. """ + if len(self): + return self[-1] == state + else: + return False + + +class BlockParser: + """ Parse Markdown blocks into an ElementTree object. + + A wrapper class that stitches the various BlockProcessors together, + looping through them and creating an ElementTree object. + """ + + def __init__(self, markdown): + self.blockprocessors = odict.OrderedDict() + self.state = State() + self.markdown = markdown + + def parseDocument(self, lines): + """ Parse a markdown document into an ElementTree. + + Given a list of lines, an ElementTree object (not just a parent + Element) is created and the root element is passed to the parser + as the parent. The ElementTree object is returned. + + This should only be called on an entire document, not pieces. + + """ + # Create a ElementTree from the lines + self.root = util.etree.Element(self.markdown.doc_tag) + self.parseChunk(self.root, '\n'.join(lines)) + return util.etree.ElementTree(self.root) + + def parseChunk(self, parent, text): + """ Parse a chunk of markdown text and attach to given etree node. + + While the ``text`` argument is generally assumed to contain multiple + blocks which will be split on blank lines, it could contain only one + block. Generally, this method would be called by extensions when + block parsing is required. + + The ``parent`` etree Element passed in is altered in place. + Nothing is returned. + + """ + self.parseBlocks(parent, text.split('\n\n')) + + def parseBlocks(self, parent, blocks): + """ Process blocks of markdown text and attach to given etree node. + + Given a list of ``blocks``, each blockprocessor is stepped through + until there are no blocks left. While an extension could potentially + call this method directly, it's generally expected to be used + internally. + + This is a public method as an extension may need to add/alter + additional BlockProcessors which call this method to recursively + parse a nested block. + + """ + while blocks: + for processor in self.blockprocessors.values(): + if processor.test(parent, blocks[0]): + if processor.run(parent, blocks) is not False: + # run returns True or None + break
diff --git a/third_party/Python-Markdown/markdown/blockprocessors.py b/third_party/Python-Markdown/markdown/blockprocessors.py new file mode 100644 index 0000000..29db022 --- /dev/null +++ b/third_party/Python-Markdown/markdown/blockprocessors.py
@@ -0,0 +1,563 @@ +""" +CORE MARKDOWN BLOCKPARSER +=========================================================================== + +This parser handles basic parsing of Markdown blocks. It doesn't concern +itself with inline elements such as **bold** or *italics*, but rather just +catches blocks, lists, quotes, etc. + +The BlockParser is made up of a bunch of BlockProssors, each handling a +different type of block. Extensions may add/replace/remove BlockProcessors +as they need to alter how markdown blocks are parsed. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals +import logging +import re +from . import util +from .blockparser import BlockParser + +logger = logging.getLogger('MARKDOWN') + + +def build_block_parser(md_instance, **kwargs): + """ Build the default block parser used by Markdown. """ + parser = BlockParser(md_instance) + parser.blockprocessors['empty'] = EmptyBlockProcessor(parser) + parser.blockprocessors['indent'] = ListIndentProcessor(parser) + parser.blockprocessors['code'] = CodeBlockProcessor(parser) + parser.blockprocessors['hashheader'] = HashHeaderProcessor(parser) + parser.blockprocessors['setextheader'] = SetextHeaderProcessor(parser) + parser.blockprocessors['hr'] = HRProcessor(parser) + parser.blockprocessors['olist'] = OListProcessor(parser) + parser.blockprocessors['ulist'] = UListProcessor(parser) + parser.blockprocessors['quote'] = BlockQuoteProcessor(parser) + parser.blockprocessors['paragraph'] = ParagraphProcessor(parser) + return parser + + +class BlockProcessor: + """ Base class for block processors. + + Each subclass will provide the methods below to work with the source and + tree. Each processor will need to define it's own ``test`` and ``run`` + methods. The ``test`` method should return True or False, to indicate + whether the current block should be processed by this processor. If the + test passes, the parser will call the processors ``run`` method. + + """ + + def __init__(self, parser): + self.parser = parser + self.tab_length = parser.markdown.tab_length + + def lastChild(self, parent): + """ Return the last child of an etree element. """ + if len(parent): + return parent[-1] + else: + return None + + def detab(self, text): + """ Remove a tab from the front of each line of the given text. """ + newtext = [] + lines = text.split('\n') + for line in lines: + if line.startswith(' '*self.tab_length): + newtext.append(line[self.tab_length:]) + elif not line.strip(): + newtext.append('') + else: + break + return '\n'.join(newtext), '\n'.join(lines[len(newtext):]) + + def looseDetab(self, text, level=1): + """ Remove a tab from front of lines but allowing dedented lines. """ + lines = text.split('\n') + for i in range(len(lines)): + if lines[i].startswith(' '*self.tab_length*level): + lines[i] = lines[i][self.tab_length*level:] + return '\n'.join(lines) + + def test(self, parent, block): + """ Test for block type. Must be overridden by subclasses. + + As the parser loops through processors, it will call the ``test`` + method on each to determine if the given block of text is of that + type. This method must return a boolean ``True`` or ``False``. The + actual method of testing is left to the needs of that particular + block type. It could be as simple as ``block.startswith(some_string)`` + or a complex regular expression. As the block type may be different + depending on the parent of the block (i.e. inside a list), the parent + etree element is also provided and may be used as part of the test. + + Keywords: + + * ``parent``: A etree element which will be the parent of the block. + * ``block``: A block of text from the source which has been split at + blank lines. + """ + pass # pragma: no cover + + def run(self, parent, blocks): + """ Run processor. Must be overridden by subclasses. + + When the parser determines the appropriate type of a block, the parser + will call the corresponding processor's ``run`` method. This method + should parse the individual lines of the block and append them to + the etree. + + Note that both the ``parent`` and ``etree`` keywords are pointers + to instances of the objects which should be edited in place. Each + processor must make changes to the existing objects as there is no + mechanism to return new/different objects to replace them. + + This means that this method should be adding SubElements or adding text + to the parent, and should remove (``pop``) or add (``insert``) items to + the list of blocks. + + Keywords: + + * ``parent``: A etree element which is the parent of the current block. + * ``blocks``: A list of all remaining blocks of the document. + """ + pass # pragma: no cover + + +class ListIndentProcessor(BlockProcessor): + """ Process children of list items. + + Example: + * a list item + process this part + + or this part + + """ + + ITEM_TYPES = ['li'] + LIST_TYPES = ['ul', 'ol'] + + def __init__(self, *args): + BlockProcessor.__init__(self, *args) + self.INDENT_RE = re.compile(r'^(([ ]{%s})+)' % self.tab_length) + + def test(self, parent, block): + return block.startswith(' '*self.tab_length) and \ + not self.parser.state.isstate('detabbed') and \ + (parent.tag in self.ITEM_TYPES or + (len(parent) and parent[-1] is not None and + (parent[-1].tag in self.LIST_TYPES))) + + def run(self, parent, blocks): + block = blocks.pop(0) + level, sibling = self.get_level(parent, block) + block = self.looseDetab(block, level) + + self.parser.state.set('detabbed') + if parent.tag in self.ITEM_TYPES: + # It's possible that this parent has a 'ul' or 'ol' child list + # with a member. If that is the case, then that should be the + # parent. This is intended to catch the edge case of an indented + # list whose first member was parsed previous to this point + # see OListProcessor + if len(parent) and parent[-1].tag in self.LIST_TYPES: + self.parser.parseBlocks(parent[-1], [block]) + else: + # The parent is already a li. Just parse the child block. + self.parser.parseBlocks(parent, [block]) + elif sibling.tag in self.ITEM_TYPES: + # The sibling is a li. Use it as parent. + self.parser.parseBlocks(sibling, [block]) + elif len(sibling) and sibling[-1].tag in self.ITEM_TYPES: + # The parent is a list (``ol`` or ``ul``) which has children. + # Assume the last child li is the parent of this block. + if sibling[-1].text: + # If the parent li has text, that text needs to be moved to a p + # The p must be 'inserted' at beginning of list in the event + # that other children already exist i.e.; a nested sublist. + p = util.etree.Element('p') + p.text = sibling[-1].text + sibling[-1].text = '' + sibling[-1].insert(0, p) + self.parser.parseChunk(sibling[-1], block) + else: + self.create_item(sibling, block) + self.parser.state.reset() + + def create_item(self, parent, block): + """ Create a new li and parse the block with it as the parent. """ + li = util.etree.SubElement(parent, 'li') + self.parser.parseBlocks(li, [block]) + + def get_level(self, parent, block): + """ Get level of indent based on list level. """ + # Get indent level + m = self.INDENT_RE.match(block) + if m: + indent_level = len(m.group(1))/self.tab_length + else: + indent_level = 0 + if self.parser.state.isstate('list'): + # We're in a tightlist - so we already are at correct parent. + level = 1 + else: + # We're in a looselist - so we need to find parent. + level = 0 + # Step through children of tree to find matching indent level. + while indent_level > level: + child = self.lastChild(parent) + if (child is not None and + (child.tag in self.LIST_TYPES or child.tag in self.ITEM_TYPES)): + if child.tag in self.LIST_TYPES: + level += 1 + parent = child + else: + # No more child levels. If we're short of indent_level, + # we have a code block. So we stop here. + break + return level, parent + + +class CodeBlockProcessor(BlockProcessor): + """ Process code blocks. """ + + def test(self, parent, block): + return block.startswith(' '*self.tab_length) + + def run(self, parent, blocks): + sibling = self.lastChild(parent) + block = blocks.pop(0) + theRest = '' + if (sibling is not None and sibling.tag == "pre" and + len(sibling) and sibling[0].tag == "code"): + # The previous block was a code block. As blank lines do not start + # new code blocks, append this block to the previous, adding back + # linebreaks removed from the split into a list. + code = sibling[0] + block, theRest = self.detab(block) + code.text = util.AtomicString( + '%s\n%s\n' % (code.text, block.rstrip()) + ) + else: + # This is a new codeblock. Create the elements and insert text. + pre = util.etree.SubElement(parent, 'pre') + code = util.etree.SubElement(pre, 'code') + block, theRest = self.detab(block) + code.text = util.AtomicString('%s\n' % block.rstrip()) + if theRest: + # This block contained unindented line(s) after the first indented + # line. Insert these lines as the first block of the master blocks + # list for future processing. + blocks.insert(0, theRest) + + +class BlockQuoteProcessor(BlockProcessor): + + RE = re.compile(r'(^|\n)[ ]{0,3}>[ ]?(.*)') + + def test(self, parent, block): + return bool(self.RE.search(block)) + + def run(self, parent, blocks): + block = blocks.pop(0) + m = self.RE.search(block) + if m: + before = block[:m.start()] # Lines before blockquote + # Pass lines before blockquote in recursively for parsing forst. + self.parser.parseBlocks(parent, [before]) + # Remove ``> `` from begining of each line. + block = '\n'.join( + [self.clean(line) for line in block[m.start():].split('\n')] + ) + sibling = self.lastChild(parent) + if sibling is not None and sibling.tag == "blockquote": + # Previous block was a blockquote so set that as this blocks parent + quote = sibling + else: + # This is a new blockquote. Create a new parent element. + quote = util.etree.SubElement(parent, 'blockquote') + # Recursively parse block with blockquote as parent. + # change parser state so blockquotes embedded in lists use p tags + self.parser.state.set('blockquote') + self.parser.parseChunk(quote, block) + self.parser.state.reset() + + def clean(self, line): + """ Remove ``>`` from beginning of a line. """ + m = self.RE.match(line) + if line.strip() == ">": + return "" + elif m: + return m.group(2) + else: + return line + + +class OListProcessor(BlockProcessor): + """ Process ordered list blocks. """ + + TAG = 'ol' + # Detect an item (``1. item``). ``group(1)`` contains contents of item. + RE = re.compile(r'^[ ]{0,3}\d+\.[ ]+(.*)') + # Detect items on secondary lines. they can be of either list type. + CHILD_RE = re.compile(r'^[ ]{0,3}((\d+\.)|[*+-])[ ]+(.*)') + # Detect indented (nested) items of either type + INDENT_RE = re.compile(r'^[ ]{4,7}((\d+\.)|[*+-])[ ]+.*') + # The integer (python string) with which the lists starts (default=1) + # Eg: If list is intialized as) + # 3. Item + # The ol tag will get starts="3" attribute + STARTSWITH = '1' + # List of allowed sibling tags. + SIBLING_TAGS = ['ol', 'ul'] + + def test(self, parent, block): + return bool(self.RE.match(block)) + + def run(self, parent, blocks): + # Check fr multiple items in one block. + items = self.get_items(blocks.pop(0)) + sibling = self.lastChild(parent) + + if sibling is not None and sibling.tag in self.SIBLING_TAGS: + # Previous block was a list item, so set that as parent + lst = sibling + # make sure previous item is in a p- if the item has text, + # then it isn't in a p + if lst[-1].text: + # since it's possible there are other children for this + # sibling, we can't just SubElement the p, we need to + # insert it as the first item. + p = util.etree.Element('p') + p.text = lst[-1].text + lst[-1].text = '' + lst[-1].insert(0, p) + # if the last item has a tail, then the tail needs to be put in a p + # likely only when a header is not followed by a blank line + lch = self.lastChild(lst[-1]) + if lch is not None and lch.tail: + p = util.etree.SubElement(lst[-1], 'p') + p.text = lch.tail.lstrip() + lch.tail = '' + + # parse first block differently as it gets wrapped in a p. + li = util.etree.SubElement(lst, 'li') + self.parser.state.set('looselist') + firstitem = items.pop(0) + self.parser.parseBlocks(li, [firstitem]) + self.parser.state.reset() + elif parent.tag in ['ol', 'ul']: + # this catches the edge case of a multi-item indented list whose + # first item is in a blank parent-list item: + # * * subitem1 + # * subitem2 + # see also ListIndentProcessor + lst = parent + else: + # This is a new list so create parent with appropriate tag. + lst = util.etree.SubElement(parent, self.TAG) + # Check if a custom start integer is set + if not self.parser.markdown.lazy_ol and self.STARTSWITH != '1': + lst.attrib['start'] = self.STARTSWITH + + self.parser.state.set('list') + # Loop through items in block, recursively parsing each with the + # appropriate parent. + for item in items: + if item.startswith(' '*self.tab_length): + # Item is indented. Parse with last item as parent + self.parser.parseBlocks(lst[-1], [item]) + else: + # New item. Create li and parse with it as parent + li = util.etree.SubElement(lst, 'li') + self.parser.parseBlocks(li, [item]) + self.parser.state.reset() + + def get_items(self, block): + """ Break a block into list items. """ + items = [] + for line in block.split('\n'): + m = self.CHILD_RE.match(line) + if m: + # This is a new list item + # Check first item for the start index + if not items and self.TAG == 'ol': + # Detect the integer value of first list item + INTEGER_RE = re.compile('(\d+)') + self.STARTSWITH = INTEGER_RE.match(m.group(1)).group() + # Append to the list + items.append(m.group(3)) + elif self.INDENT_RE.match(line): + # This is an indented (possibly nested) item. + if items[-1].startswith(' '*self.tab_length): + # Previous item was indented. Append to that item. + items[-1] = '%s\n%s' % (items[-1], line) + else: + items.append(line) + else: + # This is another line of previous item. Append to that item. + items[-1] = '%s\n%s' % (items[-1], line) + return items + + +class UListProcessor(OListProcessor): + """ Process unordered list blocks. """ + + TAG = 'ul' + RE = re.compile(r'^[ ]{0,3}[*+-][ ]+(.*)') + + +class HashHeaderProcessor(BlockProcessor): + """ Process Hash Headers. """ + + # Detect a header at start of any line in block + RE = re.compile(r'(^|\n)(?P<level>#{1,6})(?P<header>.*?)#*(\n|$)') + + def test(self, parent, block): + return bool(self.RE.search(block)) + + def run(self, parent, blocks): + block = blocks.pop(0) + m = self.RE.search(block) + if m: + before = block[:m.start()] # All lines before header + after = block[m.end():] # All lines after header + if before: + # As the header was not the first line of the block and the + # lines before the header must be parsed first, + # recursively parse this lines as a block. + self.parser.parseBlocks(parent, [before]) + # Create header using named groups from RE + h = util.etree.SubElement(parent, 'h%d' % len(m.group('level'))) + h.text = m.group('header').strip() + if after: + # Insert remaining lines as first block for future parsing. + blocks.insert(0, after) + else: # pragma: no cover + # This should never happen, but just in case... + logger.warn("We've got a problem header: %r" % block) + + +class SetextHeaderProcessor(BlockProcessor): + """ Process Setext-style Headers. """ + + # Detect Setext-style header. Must be first 2 lines of block. + RE = re.compile(r'^.*?\n[=-]+[ ]*(\n|$)', re.MULTILINE) + + def test(self, parent, block): + return bool(self.RE.match(block)) + + def run(self, parent, blocks): + lines = blocks.pop(0).split('\n') + # Determine level. ``=`` is 1 and ``-`` is 2. + if lines[1].startswith('='): + level = 1 + else: + level = 2 + h = util.etree.SubElement(parent, 'h%d' % level) + h.text = lines[0].strip() + if len(lines) > 2: + # Block contains additional lines. Add to master blocks for later. + blocks.insert(0, '\n'.join(lines[2:])) + + +class HRProcessor(BlockProcessor): + """ Process Horizontal Rules. """ + + RE = r'^[ ]{0,3}((-+[ ]{0,2}){3,}|(_+[ ]{0,2}){3,}|(\*+[ ]{0,2}){3,})[ ]*' + # Detect hr on any line of a block. + SEARCH_RE = re.compile(RE, re.MULTILINE) + + def test(self, parent, block): + m = self.SEARCH_RE.search(block) + # No atomic grouping in python so we simulate it here for performance. + # The regex only matches what would be in the atomic group - the HR. + # Then check if we are at end of block or if next char is a newline. + if m and (m.end() == len(block) or block[m.end()] == '\n'): + # Save match object on class instance so we can use it later. + self.match = m + return True + return False + + def run(self, parent, blocks): + block = blocks.pop(0) + # Check for lines in block before hr. + prelines = block[:self.match.start()].rstrip('\n') + if prelines: + # Recursively parse lines before hr so they get parsed first. + self.parser.parseBlocks(parent, [prelines]) + # create hr + util.etree.SubElement(parent, 'hr') + # check for lines in block after hr. + postlines = block[self.match.end():].lstrip('\n') + if postlines: + # Add lines after hr to master blocks for later parsing. + blocks.insert(0, postlines) + + +class EmptyBlockProcessor(BlockProcessor): + """ Process blocks that are empty or start with an empty line. """ + + def test(self, parent, block): + return not block or block.startswith('\n') + + def run(self, parent, blocks): + block = blocks.pop(0) + filler = '\n\n' + if block: + # Starts with empty line + # Only replace a single line. + filler = '\n' + # Save the rest for later. + theRest = block[1:] + if theRest: + # Add remaining lines to master blocks for later. + blocks.insert(0, theRest) + sibling = self.lastChild(parent) + if (sibling is not None and sibling.tag == 'pre' and + len(sibling) and sibling[0].tag == 'code'): + # Last block is a codeblock. Append to preserve whitespace. + sibling[0].text = util.AtomicString( + '%s%s' % (sibling[0].text, filler) + ) + + +class ParagraphProcessor(BlockProcessor): + """ Process Paragraph blocks. """ + + def test(self, parent, block): + return True + + def run(self, parent, blocks): + block = blocks.pop(0) + if block.strip(): + # Not a blank block. Add to parent, otherwise throw it away. + if self.parser.state.isstate('list'): + # The parent is a tight-list. + # + # Check for any children. This will likely only happen in a + # tight-list when a header isn't followed by a blank line. + # For example: + # + # * # Header + # Line 2 of list item - not part of header. + sibling = self.lastChild(parent) + if sibling is not None: + # Insetrt after sibling. + if sibling.tail: + sibling.tail = '%s\n%s' % (sibling.tail, block) + else: + sibling.tail = '\n%s' % block + else: + # Append to parent.text + if parent.text: + parent.text = '%s\n%s' % (parent.text, block) + else: + parent.text = block.lstrip() + else: + # Create a regular paragraph + p = util.etree.SubElement(parent, 'p') + p.text = block.lstrip()
diff --git a/third_party/Python-Markdown/markdown/extensions/__init__.py b/third_party/Python-Markdown/markdown/extensions/__init__.py new file mode 100644 index 0000000..6e7a08a --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/__init__.py
@@ -0,0 +1,100 @@ +""" +Extensions +----------------------------------------------------------------------------- +""" + +from __future__ import unicode_literals +from ..util import parseBoolValue +import warnings + + +class Extension(object): + """ Base class for extensions to subclass. """ + + # Default config -- to be overriden by a subclass + # Must be of the following format: + # { + # 'key': ['value', 'description'] + # } + # Note that Extension.setConfig will raise a KeyError + # if a default is not set here. + config = {} + + def __init__(self, *args, **kwargs): + """ Initiate Extension and set up configs. """ + + # check for configs arg for backward compat. + # (there only ever used to be one so we use arg[0]) + if len(args): + if args[0] is not None: + self.setConfigs(args[0]) + warnings.warn('Extension classes accepting positional args is ' + 'pending Deprecation. Each setting should be ' + 'passed into the Class as a keyword. Positional ' + 'args are deprecated and will raise ' + 'an error in version 2.7. See the Release Notes for ' + 'Python-Markdown version 2.6 for more info.', + DeprecationWarning) + # check for configs kwarg for backward compat. + if 'configs' in kwargs.keys(): + if kwargs['configs'] is not None: + self.setConfigs(kwargs.pop('configs', {})) + warnings.warn('Extension classes accepting a dict on the single ' + 'keyword "config" is pending Deprecation. Each ' + 'setting should be passed into the Class as a ' + 'keyword directly. The "config" keyword is ' + 'deprecated and raise an error in ' + 'version 2.7. See the Release Notes for ' + 'Python-Markdown version 2.6 for more info.', + DeprecationWarning) + # finally, use kwargs + self.setConfigs(kwargs) + + def getConfig(self, key, default=''): + """ Return a setting for the given key or an empty string. """ + if key in self.config: + return self.config[key][0] + else: + return default + + def getConfigs(self): + """ Return all configs settings as a dict. """ + return dict([(key, self.getConfig(key)) for key in self.config.keys()]) + + def getConfigInfo(self): + """ Return all config descriptions as a list of tuples. """ + return [(key, self.config[key][1]) for key in self.config.keys()] + + def setConfig(self, key, value): + """ Set a config setting for `key` with the given `value`. """ + if isinstance(self.config[key][0], bool): + value = parseBoolValue(value) + if self.config[key][0] is None: + value = parseBoolValue(value, preserve_none=True) + self.config[key][0] = value + + def setConfigs(self, items): + """ Set multiple config settings given a dict or list of tuples. """ + if hasattr(items, 'items'): + # it's a dict + items = items.items() + for key, value in items: + self.setConfig(key, value) + + def extendMarkdown(self, md, md_globals): + """ + Add the various proccesors and patterns to the Markdown Instance. + + This method must be overriden by every extension. + + Keyword arguments: + + * md: The Markdown instance. + + * md_globals: Global variables in the markdown module namespace. + + """ + raise NotImplementedError( + 'Extension "%s.%s" must define an "extendMarkdown"' + 'method.' % (self.__class__.__module__, self.__class__.__name__) + )
diff --git a/third_party/Python-Markdown/markdown/extensions/abbr.py b/third_party/Python-Markdown/markdown/extensions/abbr.py new file mode 100644 index 0000000..353d126 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/abbr.py
@@ -0,0 +1,91 @@ +''' +Abbreviation Extension for Python-Markdown +========================================== + +This extension adds abbreviation handling to Python-Markdown. + +See <https://pythonhosted.org/Markdown/extensions/abbreviations.html> +for documentation. + +Oringinal code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/) and + [Seemant Kulleen](http://www.kulleen.org/) + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +''' + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..preprocessors import Preprocessor +from ..inlinepatterns import Pattern +from ..util import etree, AtomicString +import re + +# Global Vars +ABBR_REF_RE = re.compile(r'[*]\[(?P<abbr>[^\]]*)\][ ]?:\s*(?P<title>.*)') + + +class AbbrExtension(Extension): + """ Abbreviation Extension for Python-Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Insert AbbrPreprocessor before ReferencePreprocessor. """ + md.preprocessors.add('abbr', AbbrPreprocessor(md), '<reference') + + +class AbbrPreprocessor(Preprocessor): + """ Abbreviation Preprocessor - parse text for abbr references. """ + + def run(self, lines): + ''' + Find and remove all Abbreviation references from the text. + Each reference is set as a new AbbrPattern in the markdown instance. + + ''' + new_text = [] + for line in lines: + m = ABBR_REF_RE.match(line) + if m: + abbr = m.group('abbr').strip() + title = m.group('title').strip() + self.markdown.inlinePatterns['abbr-%s' % abbr] = \ + AbbrPattern(self._generate_pattern(abbr), title) + else: + new_text.append(line) + return new_text + + def _generate_pattern(self, text): + ''' + Given a string, returns an regex pattern to match that string. + + 'HTML' -> r'(?P<abbr>[H][T][M][L])' + + Note: we force each char as a literal match (in brackets) as we don't + know what they will be beforehand. + + ''' + chars = list(text) + for i in range(len(chars)): + chars[i] = r'[%s]' % chars[i] + return r'(?P<abbr>\b%s\b)' % (r''.join(chars)) + + +class AbbrPattern(Pattern): + """ Abbreviation inline pattern. """ + + def __init__(self, pattern, title): + super(AbbrPattern, self).__init__(pattern) + self.title = title + + def handleMatch(self, m): + abbr = etree.Element('abbr') + abbr.text = AtomicString(m.group('abbr')) + abbr.set('title', self.title) + return abbr + + +def makeExtension(*args, **kwargs): + return AbbrExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/admonition.py b/third_party/Python-Markdown/markdown/extensions/admonition.py new file mode 100644 index 0000000..76e0fb5 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/admonition.py
@@ -0,0 +1,96 @@ +""" +Admonition extension for Python-Markdown +======================================== + +Adds rST-style admonitions. Inspired by [rST][] feature with the same name. + +[rST]: http://docutils.sourceforge.net/docs/ref/rst/directives.html#specific-admonitions # noqa + +See <https://pythonhosted.org/Markdown/extensions/admonition.html> +for documentation. + +Original code Copyright [Tiago Serafim](http://www.tiagoserafim.com/). + +All changes Copyright The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import BlockProcessor +from ..util import etree +import re + + +class AdmonitionExtension(Extension): + """ Admonition extension for Python-Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Add Admonition to Markdown instance. """ + md.registerExtension(self) + + md.parser.blockprocessors.add('admonition', + AdmonitionProcessor(md.parser), + '_begin') + + +class AdmonitionProcessor(BlockProcessor): + + CLASSNAME = 'admonition' + CLASSNAME_TITLE = 'admonition-title' + RE = re.compile(r'(?:^|\n)!!!\ ?([\w\-]+)(?:\ "(.*?)")?') + + def test(self, parent, block): + sibling = self.lastChild(parent) + return self.RE.search(block) or \ + (block.startswith(' ' * self.tab_length) and sibling is not None and + sibling.get('class', '').find(self.CLASSNAME) != -1) + + def run(self, parent, blocks): + sibling = self.lastChild(parent) + block = blocks.pop(0) + m = self.RE.search(block) + + if m: + block = block[m.end() + 1:] # removes the first line + + block, theRest = self.detab(block) + + if m: + klass, title = self.get_class_and_title(m) + div = etree.SubElement(parent, 'div') + div.set('class', '%s %s' % (self.CLASSNAME, klass)) + if title: + p = etree.SubElement(div, 'p') + p.text = title + p.set('class', self.CLASSNAME_TITLE) + else: + div = sibling + + self.parser.parseChunk(div, block) + + if theRest: + # This block contained unindented line(s) after the first indented + # line. Insert these lines as the first block of the master blocks + # list for future processing. + blocks.insert(0, theRest) + + def get_class_and_title(self, match): + klass, title = match.group(1).lower(), match.group(2) + if title is None: + # no title was provided, use the capitalized classname as title + # e.g.: `!!! note` will render + # `<p class="admonition-title">Note</p>` + title = klass.capitalize() + elif title == '': + # an explicit blank title should not be rendered + # e.g.: `!!! warning ""` will *not* render `p` with a title + title = None + return klass, title + + +def makeExtension(*args, **kwargs): + return AdmonitionExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/attr_list.py b/third_party/Python-Markdown/markdown/extensions/attr_list.py new file mode 100644 index 0000000..683bdf83 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/attr_list.py
@@ -0,0 +1,177 @@ +""" +Attribute List Extension for Python-Markdown +============================================ + +Adds attribute list syntax. Inspired by +[maruku](http://maruku.rubyforge.org/proposal.html#attribute_lists)'s +feature of the same name. + +See <https://pythonhosted.org/Markdown/extensions/attr_list.html> +for documentation. + +Original code Copyright 2011 [Waylan Limberg](http://achinghead.com/). + +All changes Copyright 2011-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..treeprocessors import Treeprocessor +from ..util import isBlockLevel +import re + +try: + Scanner = re.Scanner +except AttributeError: # pragma: no cover + # must be on Python 2.4 + from sre import Scanner + + +def _handle_double_quote(s, t): + k, v = t.split('=') + return k, v.strip('"') + + +def _handle_single_quote(s, t): + k, v = t.split('=') + return k, v.strip("'") + + +def _handle_key_value(s, t): + return t.split('=') + + +def _handle_word(s, t): + if t.startswith('.'): + return '.', t[1:] + if t.startswith('#'): + return 'id', t[1:] + return t, t + +_scanner = Scanner([ + (r'[^ ]+=".*?"', _handle_double_quote), + (r"[^ ]+='.*?'", _handle_single_quote), + (r'[^ ]+=[^ =]+', _handle_key_value), + (r'[^ =]+', _handle_word), + (r' ', None) +]) + + +def get_attrs(str): + """ Parse attribute list and return a list of attribute tuples. """ + return _scanner.scan(str)[0] + + +def isheader(elem): + return elem.tag in ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'] + + +class AttrListTreeprocessor(Treeprocessor): + + BASE_RE = r'\{\:?([^\}]*)\}' + HEADER_RE = re.compile(r'[ ]+%s[ ]*$' % BASE_RE) + BLOCK_RE = re.compile(r'\n[ ]*%s[ ]*$' % BASE_RE) + INLINE_RE = re.compile(r'^%s' % BASE_RE) + NAME_RE = re.compile(r'[^A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff' + r'\u0370-\u037d\u037f-\u1fff\u200c-\u200d' + r'\u2070-\u218f\u2c00-\u2fef\u3001-\ud7ff' + r'\uf900-\ufdcf\ufdf0-\ufffd' + r'\:\-\.0-9\u00b7\u0300-\u036f\u203f-\u2040]+') + + def run(self, doc): + for elem in doc.getiterator(): + if isBlockLevel(elem.tag): + # Block level: check for attrs on last line of text + RE = self.BLOCK_RE + if isheader(elem) or elem.tag == 'dt': + # header or def-term: check for attrs at end of line + RE = self.HEADER_RE + if len(elem) and elem.tag == 'li': + # special case list items. children may include a ul or ol. + pos = None + # find the ul or ol position + for i, child in enumerate(elem): + if child.tag in ['ul', 'ol']: + pos = i + break + if pos is None and elem[-1].tail: + # use tail of last child. no ul or ol. + m = RE.search(elem[-1].tail) + if m: + self.assign_attrs(elem, m.group(1)) + elem[-1].tail = elem[-1].tail[:m.start()] + elif pos is not None and pos > 0 and elem[pos-1].tail: + # use tail of last child before ul or ol + m = RE.search(elem[pos-1].tail) + if m: + self.assign_attrs(elem, m.group(1)) + elem[pos-1].tail = elem[pos-1].tail[:m.start()] + elif elem.text: + # use text. ul is first child. + m = RE.search(elem.text) + if m: + self.assign_attrs(elem, m.group(1)) + elem.text = elem.text[:m.start()] + elif len(elem) and elem[-1].tail: + # has children. Get from tail of last child + m = RE.search(elem[-1].tail) + if m: + self.assign_attrs(elem, m.group(1)) + elem[-1].tail = elem[-1].tail[:m.start()] + if isheader(elem): + # clean up trailing #s + elem[-1].tail = elem[-1].tail.rstrip('#').rstrip() + elif elem.text: + # no children. Get from text. + m = RE.search(elem.text) + if not m and elem.tag == 'td': + m = re.search(self.BASE_RE, elem.text) + if m: + self.assign_attrs(elem, m.group(1)) + elem.text = elem.text[:m.start()] + if isheader(elem): + # clean up trailing #s + elem.text = elem.text.rstrip('#').rstrip() + else: + # inline: check for attrs at start of tail + if elem.tail: + m = self.INLINE_RE.match(elem.tail) + if m: + self.assign_attrs(elem, m.group(1)) + elem.tail = elem.tail[m.end():] + + def assign_attrs(self, elem, attrs): + """ Assign attrs to element. """ + for k, v in get_attrs(attrs): + if k == '.': + # add to class + cls = elem.get('class') + if cls: + elem.set('class', '%s %s' % (cls, v)) + else: + elem.set('class', v) + else: + # assign attr k with v + elem.set(self.sanitize_name(k), v) + + def sanitize_name(self, name): + """ + Sanitize name as 'an XML Name, minus the ":"'. + See http://www.w3.org/TR/REC-xml-names/#NT-NCName + """ + return self.NAME_RE.sub('_', name) + + +class AttrListExtension(Extension): + def extendMarkdown(self, md, md_globals): + md.treeprocessors.add( + 'attr_list', AttrListTreeprocessor(md), '>prettify' + ) + + +def makeExtension(*args, **kwargs): + return AttrListExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/codehilite.py b/third_party/Python-Markdown/markdown/extensions/codehilite.py new file mode 100644 index 0000000..0657c37 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/codehilite.py
@@ -0,0 +1,265 @@ +""" +CodeHilite Extension for Python-Markdown +======================================== + +Adds code/syntax highlighting to standard Python-Markdown code blocks. + +See <https://pythonhosted.org/Markdown/extensions/code_hilite.html> +for documentation. + +Original code Copyright 2006-2008 [Waylan Limberg](http://achinghead.com/). + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..treeprocessors import Treeprocessor + +try: + from pygments import highlight + from pygments.lexers import get_lexer_by_name, guess_lexer + from pygments.formatters import get_formatter_by_name + pygments = True +except ImportError: + pygments = False + + +def parse_hl_lines(expr): + """Support our syntax for emphasizing certain lines of code. + + expr should be like '1 2' to emphasize lines 1 and 2 of a code block. + Returns a list of ints, the line numbers to emphasize. + """ + if not expr: + return [] + + try: + return list(map(int, expr.split())) + except ValueError: + return [] + + +# ------------------ The Main CodeHilite Class ---------------------- +class CodeHilite(object): + """ + Determine language of source code, and pass it into pygments hilighter. + + Basic Usage: + >>> code = CodeHilite(src = 'some text') + >>> html = code.hilite() + + * src: Source string or any object with a .readline attribute. + + * linenums: (Boolean) Set line numbering to 'on' (True), + 'off' (False) or 'auto'(None). Set to 'auto' by default. + + * guess_lang: (Boolean) Turn language auto-detection + 'on' or 'off' (on by default). + + * css_class: Set class name of wrapper div ('codehilite' by default). + + * hl_lines: (List of integers) Lines to emphasize, 1-indexed. + + Low Level Usage: + >>> code = CodeHilite() + >>> code.src = 'some text' # String or anything with a .readline attr. + >>> code.linenos = True # Turns line numbering on or of. + >>> html = code.hilite() + + """ + + def __init__(self, src=None, linenums=None, guess_lang=True, + css_class="codehilite", lang=None, style='default', + noclasses=False, tab_length=4, hl_lines=None, use_pygments=True): + self.src = src + self.lang = lang + self.linenums = linenums + self.guess_lang = guess_lang + self.css_class = css_class + self.style = style + self.noclasses = noclasses + self.tab_length = tab_length + self.hl_lines = hl_lines or [] + self.use_pygments = use_pygments + + def hilite(self): + """ + Pass code to the [Pygments](http://pygments.pocoo.org/) highliter with + optional line numbers. The output should then be styled with css to + your liking. No styles are applied by default - only styling hooks + (i.e.: <span class="k">). + + returns : A string of html. + + """ + + self.src = self.src.strip('\n') + + if self.lang is None: + self._parseHeader() + + if pygments and self.use_pygments: + try: + lexer = get_lexer_by_name(self.lang) + except ValueError: + try: + if self.guess_lang: + lexer = guess_lexer(self.src) + else: + lexer = get_lexer_by_name('text') + except ValueError: + lexer = get_lexer_by_name('text') + formatter = get_formatter_by_name('html', + linenos=self.linenums, + cssclass=self.css_class, + style=self.style, + noclasses=self.noclasses, + hl_lines=self.hl_lines) + return highlight(self.src, lexer, formatter) + else: + # just escape and build markup usable by JS highlighting libs + txt = self.src.replace('&', '&') + txt = txt.replace('<', '<') + txt = txt.replace('>', '>') + txt = txt.replace('"', '"') + classes = [] + if self.lang: + classes.append('language-%s' % self.lang) + if self.linenums: + classes.append('linenums') + class_str = '' + if classes: + class_str = ' class="%s"' % ' '.join(classes) + return '<pre class="%s"><code%s>%s</code></pre>\n' % \ + (self.css_class, class_str, txt) + + def _parseHeader(self): + """ + Determines language of a code block from shebang line and whether said + line should be removed or left in place. If the sheband line contains a + path (even a single /) then it is assumed to be a real shebang line and + left alone. However, if no path is given (e.i.: #!python or :::python) + then it is assumed to be a mock shebang for language identifitation of + a code fragment and removed from the code block prior to processing for + code highlighting. When a mock shebang (e.i: #!python) is found, line + numbering is turned on. When colons are found in place of a shebang + (e.i.: :::python), line numbering is left in the current state - off + by default. + + Also parses optional list of highlight lines, like: + + :::python hl_lines="1 3" + """ + + import re + + # split text into lines + lines = self.src.split("\n") + # pull first line to examine + fl = lines.pop(0) + + c = re.compile(r''' + (?:(?:^::+)|(?P<shebang>^[#]!)) # Shebang or 2 or more colons + (?P<path>(?:/\w+)*[/ ])? # Zero or 1 path + (?P<lang>[\w+-]*) # The language + \s* # Arbitrary whitespace + # Optional highlight lines, single- or double-quote-delimited + (hl_lines=(?P<quot>"|')(?P<hl_lines>.*?)(?P=quot))? + ''', re.VERBOSE) + # search first line for shebang + m = c.search(fl) + if m: + # we have a match + try: + self.lang = m.group('lang').lower() + except IndexError: + self.lang = None + if m.group('path'): + # path exists - restore first line + lines.insert(0, fl) + if self.linenums is None and m.group('shebang'): + # Overridable and Shebang exists - use line numbers + self.linenums = True + + self.hl_lines = parse_hl_lines(m.group('hl_lines')) + else: + # No match + lines.insert(0, fl) + + self.src = "\n".join(lines).strip("\n") + + +# ------------------ The Markdown Extension ------------------------------- + + +class HiliteTreeprocessor(Treeprocessor): + """ Hilight source code in code blocks. """ + + def run(self, root): + """ Find code blocks and store in htmlStash. """ + blocks = root.iter('pre') + for block in blocks: + if len(block) == 1 and block[0].tag == 'code': + code = CodeHilite( + block[0].text, + linenums=self.config['linenums'], + guess_lang=self.config['guess_lang'], + css_class=self.config['css_class'], + style=self.config['pygments_style'], + noclasses=self.config['noclasses'], + tab_length=self.markdown.tab_length, + use_pygments=self.config['use_pygments'] + ) + placeholder = self.markdown.htmlStash.store(code.hilite(), + safe=True) + # Clear codeblock in etree instance + block.clear() + # Change to p element which will later + # be removed when inserting raw html + block.tag = 'p' + block.text = placeholder + + +class CodeHiliteExtension(Extension): + """ Add source code hilighting to markdown codeblocks. """ + + def __init__(self, *args, **kwargs): + # define default configs + self.config = { + 'linenums': [None, + "Use lines numbers. True=yes, False=no, None=auto"], + 'guess_lang': [True, + "Automatic language detection - Default: True"], + 'css_class': ["codehilite", + "Set class name for wrapper <div> - " + "Default: codehilite"], + 'pygments_style': ['default', + 'Pygments HTML Formatter Style ' + '(Colorscheme) - Default: default'], + 'noclasses': [False, + 'Use inline styles instead of CSS classes - ' + 'Default false'], + 'use_pygments': [True, + 'Use Pygments to Highlight code blocks. ' + 'Disable if using a JavaScript library. ' + 'Default: True'] + } + + super(CodeHiliteExtension, self).__init__(*args, **kwargs) + + def extendMarkdown(self, md, md_globals): + """ Add HilitePostprocessor to Markdown instance. """ + hiliter = HiliteTreeprocessor(md) + hiliter.config = self.getConfigs() + md.treeprocessors.add("hilite", hiliter, "<inline") + + md.registerExtension(self) + + +def makeExtension(*args, **kwargs): + return CodeHiliteExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/def_list.py b/third_party/Python-Markdown/markdown/extensions/def_list.py new file mode 100644 index 0000000..77cca6eb --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/def_list.py
@@ -0,0 +1,115 @@ +""" +Definition List Extension for Python-Markdown +============================================= + +Adds parsing of Definition Lists to Python-Markdown. + +See <https://pythonhosted.org/Markdown/extensions/definition_lists.html> +for documentation. + +Original code Copyright 2008 [Waylan Limberg](http://achinghead.com) + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import BlockProcessor, ListIndentProcessor +from ..util import etree +import re + + +class DefListProcessor(BlockProcessor): + """ Process Definition Lists. """ + + RE = re.compile(r'(^|\n)[ ]{0,3}:[ ]{1,3}(.*?)(\n|$)') + NO_INDENT_RE = re.compile(r'^[ ]{0,3}[^ :]') + + def test(self, parent, block): + return bool(self.RE.search(block)) + + def run(self, parent, blocks): + + raw_block = blocks.pop(0) + m = self.RE.search(raw_block) + terms = [l.strip() for l in + raw_block[:m.start()].split('\n') if l.strip()] + block = raw_block[m.end():] + no_indent = self.NO_INDENT_RE.match(block) + if no_indent: + d, theRest = (block, None) + else: + d, theRest = self.detab(block) + if d: + d = '%s\n%s' % (m.group(2), d) + else: + d = m.group(2) + sibling = self.lastChild(parent) + if not terms and sibling is None: + # This is not a definition item. Most likely a paragraph that + # starts with a colon at the begining of a document or list. + blocks.insert(0, raw_block) + return False + if not terms and sibling.tag == 'p': + # The previous paragraph contains the terms + state = 'looselist' + terms = sibling.text.split('\n') + parent.remove(sibling) + # Aquire new sibling + sibling = self.lastChild(parent) + else: + state = 'list' + + if sibling is not None and sibling.tag == 'dl': + # This is another item on an existing list + dl = sibling + if not terms and len(dl) and dl[-1].tag == 'dd' and len(dl[-1]): + state = 'looselist' + else: + # This is a new list + dl = etree.SubElement(parent, 'dl') + # Add terms + for term in terms: + dt = etree.SubElement(dl, 'dt') + dt.text = term + # Add definition + self.parser.state.set(state) + dd = etree.SubElement(dl, 'dd') + self.parser.parseBlocks(dd, [d]) + self.parser.state.reset() + + if theRest: + blocks.insert(0, theRest) + + +class DefListIndentProcessor(ListIndentProcessor): + """ Process indented children of definition list items. """ + + ITEM_TYPES = ['dd'] + LIST_TYPES = ['dl'] + + def create_item(self, parent, block): + """ Create a new dd and parse the block with it as the parent. """ + dd = etree.SubElement(parent, 'dd') + self.parser.parseBlocks(dd, [block]) + + +class DefListExtension(Extension): + """ Add definition lists to Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Add an instance of DefListProcessor to BlockParser. """ + md.parser.blockprocessors.add('defindent', + DefListIndentProcessor(md.parser), + '>indent') + md.parser.blockprocessors.add('deflist', + DefListProcessor(md.parser), + '>ulist') + + +def makeExtension(*args, **kwargs): + return DefListExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/extra.py b/third_party/Python-Markdown/markdown/extensions/extra.py new file mode 100644 index 0000000..de5db03 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/extra.py
@@ -0,0 +1,132 @@ +""" +Python-Markdown Extra Extension +=============================== + +A compilation of various Python-Markdown extensions that imitates +[PHP Markdown Extra](http://michelf.com/projects/php-markdown/extra/). + +Note that each of the individual extensions still need to be available +on your PYTHONPATH. This extension simply wraps them all up as a +convenience so that only one extension needs to be listed when +initiating Markdown. See the documentation for each individual +extension for specifics about that extension. + +There may be additional extensions that are distributed with +Python-Markdown that are not included here in Extra. Those extensions +are not part of PHP Markdown Extra, and therefore, not part of +Python-Markdown Extra. If you really would like Extra to include +additional extensions, we suggest creating your own clone of Extra +under a differant name. You could also edit the `extensions` global +variable defined below, but be aware that such changes may be lost +when you upgrade to any future version of Python-Markdown. + +See <https://pythonhosted.org/Markdown/extensions/extra.html> +for documentation. + +Copyright The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import BlockProcessor +from .. import util +import re + +extensions = [ + 'markdown.extensions.smart_strong', + 'markdown.extensions.fenced_code', + 'markdown.extensions.footnotes', + 'markdown.extensions.attr_list', + 'markdown.extensions.def_list', + 'markdown.extensions.tables', + 'markdown.extensions.abbr' +] + + +class ExtraExtension(Extension): + """ Add various extensions to Markdown class.""" + + def __init__(self, *args, **kwargs): + """ config is a dumb holder which gets passed to actual ext later. """ + self.config = kwargs.pop('configs', {}) + self.config.update(kwargs) + + def extendMarkdown(self, md, md_globals): + """ Register extension instances. """ + md.registerExtensions(extensions, self.config) + if not md.safeMode: + # Turn on processing of markdown text within raw html + md.preprocessors['html_block'].markdown_in_raw = True + md.parser.blockprocessors.add('markdown_block', + MarkdownInHtmlProcessor(md.parser), + '_begin') + md.parser.blockprocessors.tag_counter = -1 + md.parser.blockprocessors.contain_span_tags = re.compile( + r'^(p|h[1-6]|li|dd|dt|td|th|legend|address)$', re.IGNORECASE) + + +def makeExtension(*args, **kwargs): + return ExtraExtension(*args, **kwargs) + + +class MarkdownInHtmlProcessor(BlockProcessor): + """Process Markdown Inside HTML Blocks.""" + def test(self, parent, block): + return block == util.TAG_PLACEHOLDER % \ + str(self.parser.blockprocessors.tag_counter + 1) + + def _process_nests(self, element, block): + """Process the element's child elements in self.run.""" + # Build list of indexes of each nest within the parent element. + nest_index = [] # a list of tuples: (left index, right index) + i = self.parser.blockprocessors.tag_counter + 1 + while len(self._tag_data) > i and self._tag_data[i]['left_index']: + left_child_index = self._tag_data[i]['left_index'] + right_child_index = self._tag_data[i]['right_index'] + nest_index.append((left_child_index - 1, right_child_index)) + i += 1 + + # Create each nest subelement. + for i, (left_index, right_index) in enumerate(nest_index[:-1]): + self.run(element, block[left_index:right_index], + block[right_index:nest_index[i + 1][0]], True) + self.run(element, block[nest_index[-1][0]:nest_index[-1][1]], # last + block[nest_index[-1][1]:], True) # nest + + def run(self, parent, blocks, tail=None, nest=False): + self._tag_data = self.parser.markdown.htmlStash.tag_data + + self.parser.blockprocessors.tag_counter += 1 + tag = self._tag_data[self.parser.blockprocessors.tag_counter] + + # Create Element + markdown_value = tag['attrs'].pop('markdown') + element = util.etree.SubElement(parent, tag['tag'], tag['attrs']) + + # Slice Off Block + if nest: + self.parser.parseBlocks(parent, tail) # Process Tail + block = blocks[1:] + else: # includes nests since a third level of nesting isn't supported + block = blocks[tag['left_index'] + 1: tag['right_index']] + del blocks[:tag['right_index']] + + # Process Text + if (self.parser.blockprocessors.contain_span_tags.match( # Span Mode + tag['tag']) and markdown_value != 'block') or \ + markdown_value == 'span': + element.text = '\n'.join(block) + else: # Block Mode + i = self.parser.blockprocessors.tag_counter + 1 + if len(self._tag_data) > i and self._tag_data[i]['left_index']: + first_subelement_index = self._tag_data[i]['left_index'] - 1 + self.parser.parseBlocks( + element, block[:first_subelement_index]) + if not nest: + block = self._process_nests(element, block) + else: + self.parser.parseBlocks(element, block)
diff --git a/third_party/Python-Markdown/markdown/extensions/fenced_code.py b/third_party/Python-Markdown/markdown/extensions/fenced_code.py new file mode 100644 index 0000000..4af8891a --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/fenced_code.py
@@ -0,0 +1,112 @@ +""" +Fenced Code Extension for Python Markdown +========================================= + +This extension adds Fenced Code Blocks to Python-Markdown. + +See <https://pythonhosted.org/Markdown/extensions/fenced_code_blocks.html> +for documentation. + +Original code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/). + + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..preprocessors import Preprocessor +from .codehilite import CodeHilite, CodeHiliteExtension, parse_hl_lines +import re + + +class FencedCodeExtension(Extension): + + def extendMarkdown(self, md, md_globals): + """ Add FencedBlockPreprocessor to the Markdown instance. """ + md.registerExtension(self) + + md.preprocessors.add('fenced_code_block', + FencedBlockPreprocessor(md), + ">normalize_whitespace") + + +class FencedBlockPreprocessor(Preprocessor): + FENCED_BLOCK_RE = re.compile(r''' +(?P<fence>^(?:~{3,}|`{3,}))[ ]* # Opening ``` or ~~~ +(\{?\.?(?P<lang>[a-zA-Z0-9_+-]*))?[ ]* # Optional {, and lang +# Optional highlight lines, single- or double-quote-delimited +(hl_lines=(?P<quot>"|')(?P<hl_lines>.*?)(?P=quot))?[ ]* +}?[ ]*\n # Optional closing } +(?P<code>.*?)(?<=\n) +(?P=fence)[ ]*$''', re.MULTILINE | re.DOTALL | re.VERBOSE) + CODE_WRAP = '<pre><code%s>%s</code></pre>' + LANG_TAG = ' class="%s"' + + def __init__(self, md): + super(FencedBlockPreprocessor, self).__init__(md) + + self.checked_for_codehilite = False + self.codehilite_conf = {} + + def run(self, lines): + """ Match and store Fenced Code Blocks in the HtmlStash. """ + + # Check for code hilite extension + if not self.checked_for_codehilite: + for ext in self.markdown.registeredExtensions: + if isinstance(ext, CodeHiliteExtension): + self.codehilite_conf = ext.config + break + + self.checked_for_codehilite = True + + text = "\n".join(lines) + while 1: + m = self.FENCED_BLOCK_RE.search(text) + if m: + lang = '' + if m.group('lang'): + lang = self.LANG_TAG % m.group('lang') + + # If config is not empty, then the codehighlite extension + # is enabled, so we call it to highlight the code + if self.codehilite_conf: + highliter = CodeHilite( + m.group('code'), + linenums=self.codehilite_conf['linenums'][0], + guess_lang=self.codehilite_conf['guess_lang'][0], + css_class=self.codehilite_conf['css_class'][0], + style=self.codehilite_conf['pygments_style'][0], + lang=(m.group('lang') or None), + noclasses=self.codehilite_conf['noclasses'][0], + hl_lines=parse_hl_lines(m.group('hl_lines')) + ) + + code = highliter.hilite() + else: + code = self.CODE_WRAP % (lang, + self._escape(m.group('code'))) + + placeholder = self.markdown.htmlStash.store(code, safe=True) + text = '%s\n%s\n%s' % (text[:m.start()], + placeholder, + text[m.end():]) + else: + break + return text.split("\n") + + def _escape(self, txt): + """ basic html escaping """ + txt = txt.replace('&', '&') + txt = txt.replace('<', '<') + txt = txt.replace('>', '>') + txt = txt.replace('"', '"') + return txt + + +def makeExtension(*args, **kwargs): + return FencedCodeExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/footnotes.py b/third_party/Python-Markdown/markdown/extensions/footnotes.py new file mode 100644 index 0000000..d8caae27 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/footnotes.py
@@ -0,0 +1,319 @@ +""" +Footnotes Extension for Python-Markdown +======================================= + +Adds footnote handling to Python-Markdown. + +See <https://pythonhosted.org/Markdown/extensions/footnotes.html> +for documentation. + +Copyright The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..preprocessors import Preprocessor +from ..inlinepatterns import Pattern +from ..treeprocessors import Treeprocessor +from ..postprocessors import Postprocessor +from ..util import etree, text_type +from ..odict import OrderedDict +import re + +FN_BACKLINK_TEXT = "zz1337820767766393qq" +NBSP_PLACEHOLDER = "qq3936677670287331zz" +DEF_RE = re.compile(r'[ ]{0,3}\[\^([^\]]*)\]:\s*(.*)') +TABBED_RE = re.compile(r'((\t)|( ))(.*)') + + +class FootnoteExtension(Extension): + """ Footnote Extension. """ + + def __init__(self, *args, **kwargs): + """ Setup configs. """ + + self.config = { + 'PLACE_MARKER': + ["///Footnotes Go Here///", + "The text string that marks where the footnotes go"], + 'UNIQUE_IDS': + [False, + "Avoid name collisions across " + "multiple calls to reset()."], + "BACKLINK_TEXT": + ["↩", + "The text string that links from the footnote " + "to the reader's place."] + } + super(FootnoteExtension, self).__init__(*args, **kwargs) + + # In multiple invocations, emit links that don't get tangled. + self.unique_prefix = 0 + + self.reset() + + def extendMarkdown(self, md, md_globals): + """ Add pieces to Markdown. """ + md.registerExtension(self) + self.parser = md.parser + self.md = md + # Insert a preprocessor before ReferencePreprocessor + md.preprocessors.add( + "footnote", FootnotePreprocessor(self), "<reference" + ) + # Insert an inline pattern before ImageReferencePattern + FOOTNOTE_RE = r'\[\^([^\]]*)\]' # blah blah [^1] blah + md.inlinePatterns.add( + "footnote", FootnotePattern(FOOTNOTE_RE, self), "<reference" + ) + # Insert a tree-processor that would actually add the footnote div + # This must be before all other treeprocessors (i.e., inline and + # codehilite) so they can run on the the contents of the div. + md.treeprocessors.add( + "footnote", FootnoteTreeprocessor(self), "_begin" + ) + # Insert a postprocessor after amp_substitute oricessor + md.postprocessors.add( + "footnote", FootnotePostprocessor(self), ">amp_substitute" + ) + + def reset(self): + """ Clear footnotes on reset, and prepare for distinct document. """ + self.footnotes = OrderedDict() + self.unique_prefix += 1 + + def findFootnotesPlaceholder(self, root): + """ Return ElementTree Element that contains Footnote placeholder. """ + def finder(element): + for child in element: + if child.text: + if child.text.find(self.getConfig("PLACE_MARKER")) > -1: + return child, element, True + if child.tail: + if child.tail.find(self.getConfig("PLACE_MARKER")) > -1: + return child, element, False + finder(child) + return None + + res = finder(root) + return res + + def setFootnote(self, id, text): + """ Store a footnote for later retrieval. """ + self.footnotes[id] = text + + def get_separator(self): + if self.md.output_format in ['html5', 'xhtml5']: + return '-' + return ':' + + def makeFootnoteId(self, id): + """ Return footnote link id. """ + if self.getConfig("UNIQUE_IDS"): + return 'fn%s%d-%s' % (self.get_separator(), self.unique_prefix, id) + else: + return 'fn%s%s' % (self.get_separator(), id) + + def makeFootnoteRefId(self, id): + """ Return footnote back-link id. """ + if self.getConfig("UNIQUE_IDS"): + return 'fnref%s%d-%s' % (self.get_separator(), + self.unique_prefix, id) + else: + return 'fnref%s%s' % (self.get_separator(), id) + + def makeFootnotesDiv(self, root): + """ Return div of footnotes as et Element. """ + + if not list(self.footnotes.keys()): + return None + + div = etree.Element("div") + div.set('class', 'footnote') + etree.SubElement(div, "hr") + ol = etree.SubElement(div, "ol") + + for id in self.footnotes.keys(): + li = etree.SubElement(ol, "li") + li.set("id", self.makeFootnoteId(id)) + self.parser.parseChunk(li, self.footnotes[id]) + backlink = etree.Element("a") + backlink.set("href", "#" + self.makeFootnoteRefId(id)) + if self.md.output_format not in ['html5', 'xhtml5']: + backlink.set("rev", "footnote") # Invalid in HTML5 + backlink.set("class", "footnote-backref") + backlink.set( + "title", + "Jump back to footnote %d in the text" % + (self.footnotes.index(id)+1) + ) + backlink.text = FN_BACKLINK_TEXT + + if li.getchildren(): + node = li[-1] + if node.tag == "p": + node.text = node.text + NBSP_PLACEHOLDER + node.append(backlink) + else: + p = etree.SubElement(li, "p") + p.append(backlink) + return div + + +class FootnotePreprocessor(Preprocessor): + """ Find all footnote references and store for later use. """ + + def __init__(self, footnotes): + self.footnotes = footnotes + + def run(self, lines): + """ + Loop through lines and find, set, and remove footnote definitions. + + Keywords: + + * lines: A list of lines of text + + Return: A list of lines of text with footnote definitions removed. + + """ + newlines = [] + i = 0 + while True: + m = DEF_RE.match(lines[i]) + if m: + fn, _i = self.detectTabbed(lines[i+1:]) + fn.insert(0, m.group(2)) + i += _i-1 # skip past footnote + self.footnotes.setFootnote(m.group(1), "\n".join(fn)) + else: + newlines.append(lines[i]) + if len(lines) > i+1: + i += 1 + else: + break + return newlines + + def detectTabbed(self, lines): + """ Find indented text and remove indent before further proccesing. + + Keyword arguments: + + * lines: an array of strings + + Returns: a list of post processed items and the index of last line. + + """ + items = [] + blank_line = False # have we encountered a blank line yet? + i = 0 # to keep track of where we are + + def detab(line): + match = TABBED_RE.match(line) + if match: + return match.group(4) + + for line in lines: + if line.strip(): # Non-blank line + detabbed_line = detab(line) + if detabbed_line: + items.append(detabbed_line) + i += 1 + continue + elif not blank_line and not DEF_RE.match(line): + # not tabbed but still part of first par. + items.append(line) + i += 1 + continue + else: + return items, i+1 + + else: # Blank line: _maybe_ we are done. + blank_line = True + i += 1 # advance + + # Find the next non-blank line + for j in range(i, len(lines)): + if lines[j].strip(): + next_line = lines[j] + break + else: + break # There is no more text; we are done. + + # Check if the next non-blank line is tabbed + if detab(next_line): # Yes, more work to do. + items.append("") + continue + else: + break # No, we are done. + else: + i += 1 + + return items, i + + +class FootnotePattern(Pattern): + """ InlinePattern for footnote markers in a document's body text. """ + + def __init__(self, pattern, footnotes): + super(FootnotePattern, self).__init__(pattern) + self.footnotes = footnotes + + def handleMatch(self, m): + id = m.group(2) + if id in self.footnotes.footnotes.keys(): + sup = etree.Element("sup") + a = etree.SubElement(sup, "a") + sup.set('id', self.footnotes.makeFootnoteRefId(id)) + a.set('href', '#' + self.footnotes.makeFootnoteId(id)) + if self.footnotes.md.output_format not in ['html5', 'xhtml5']: + a.set('rel', 'footnote') # invalid in HTML5 + a.set('class', 'footnote-ref') + a.text = text_type(self.footnotes.footnotes.index(id) + 1) + return sup + else: + return None + + +class FootnoteTreeprocessor(Treeprocessor): + """ Build and append footnote div to end of document. """ + + def __init__(self, footnotes): + self.footnotes = footnotes + + def run(self, root): + footnotesDiv = self.footnotes.makeFootnotesDiv(root) + if footnotesDiv is not None: + result = self.footnotes.findFootnotesPlaceholder(root) + if result: + child, parent, isText = result + ind = parent.getchildren().index(child) + if isText: + parent.remove(child) + parent.insert(ind, footnotesDiv) + else: + parent.insert(ind + 1, footnotesDiv) + child.tail = None + else: + root.append(footnotesDiv) + + +class FootnotePostprocessor(Postprocessor): + """ Replace placeholders with html entities. """ + def __init__(self, footnotes): + self.footnotes = footnotes + + def run(self, text): + text = text.replace( + FN_BACKLINK_TEXT, self.footnotes.getConfig("BACKLINK_TEXT") + ) + return text.replace(NBSP_PLACEHOLDER, " ") + + +def makeExtension(*args, **kwargs): + """ Return an instance of the FootnoteExtension """ + return FootnoteExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/headerid.py b/third_party/Python-Markdown/markdown/extensions/headerid.py new file mode 100644 index 0000000..2cb20b97 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/headerid.py
@@ -0,0 +1,97 @@ +""" +HeaderID Extension for Python-Markdown +====================================== + +Auto-generate id attributes for HTML headers. + +See <https://pythonhosted.org/Markdown/extensions/header_id.html> +for documentation. + +Original code Copyright 2007-2011 [Waylan Limberg](http://achinghead.com/). + +All changes Copyright 2011-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..treeprocessors import Treeprocessor +from ..util import parseBoolValue +from .toc import slugify, unique, stashedHTML2text +import warnings + + +class HeaderIdTreeprocessor(Treeprocessor): + """ Assign IDs to headers. """ + + IDs = set() + + def run(self, doc): + start_level, force_id = self._get_meta() + slugify = self.config['slugify'] + sep = self.config['separator'] + for elem in doc: + if elem.tag in ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']: + if force_id: + if "id" in elem.attrib: + id = elem.get('id') + else: + id = stashedHTML2text(''.join(elem.itertext()), self.md) + id = slugify(id, sep) + elem.set('id', unique(id, self.IDs)) + if start_level: + level = int(elem.tag[-1]) + start_level + if level > 6: + level = 6 + elem.tag = 'h%d' % level + + def _get_meta(self): + """ Return meta data suported by this ext as a tuple """ + level = int(self.config['level']) - 1 + force = parseBoolValue(self.config['forceid']) + if hasattr(self.md, 'Meta'): + if 'header_level' in self.md.Meta: + level = int(self.md.Meta['header_level'][0]) - 1 + if 'header_forceid' in self.md.Meta: + force = parseBoolValue(self.md.Meta['header_forceid'][0]) + return level, force + + +class HeaderIdExtension(Extension): + def __init__(self, *args, **kwargs): + # set defaults + self.config = { + 'level': ['1', 'Base level for headers.'], + 'forceid': ['True', 'Force all headers to have an id.'], + 'separator': ['-', 'Word separator.'], + 'slugify': [slugify, 'Callable to generate anchors'] + } + + super(HeaderIdExtension, self).__init__(*args, **kwargs) + + warnings.warn( + 'The HeaderId Extension is pending deprecation. Use the TOC Extension instead.', + PendingDeprecationWarning + ) + + def extendMarkdown(self, md, md_globals): + md.registerExtension(self) + self.processor = HeaderIdTreeprocessor() + self.processor.md = md + self.processor.config = self.getConfigs() + if 'attr_list' in md.treeprocessors.keys(): + # insert after attr_list treeprocessor + md.treeprocessors.add('headerid', self.processor, '>attr_list') + else: + # insert after 'prettify' treeprocessor. + md.treeprocessors.add('headerid', self.processor, '>prettify') + + def reset(self): + self.processor.IDs = set() + + +def makeExtension(*args, **kwargs): + return HeaderIdExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/meta.py b/third_party/Python-Markdown/markdown/extensions/meta.py new file mode 100644 index 0000000..711235e --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/meta.py
@@ -0,0 +1,78 @@ +""" +Meta Data Extension for Python-Markdown +======================================= + +This extension adds Meta Data handling to markdown. + +See <https://pythonhosted.org/Markdown/extensions/meta_data.html> +for documentation. + +Original code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com). + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..preprocessors import Preprocessor +import re +import logging + +log = logging.getLogger('MARKDOWN') + +# Global Vars +META_RE = re.compile(r'^[ ]{0,3}(?P<key>[A-Za-z0-9_-]+):\s*(?P<value>.*)') +META_MORE_RE = re.compile(r'^[ ]{4,}(?P<value>.*)') +BEGIN_RE = re.compile(r'^-{3}(\s.*)?') +END_RE = re.compile(r'^(-{3}|\.{3})(\s.*)?') + + +class MetaExtension (Extension): + """ Meta-Data extension for Python-Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Add MetaPreprocessor to Markdown instance. """ + md.preprocessors.add("meta", + MetaPreprocessor(md), + ">normalize_whitespace") + + +class MetaPreprocessor(Preprocessor): + """ Get Meta-Data. """ + + def run(self, lines): + """ Parse Meta-Data and store in Markdown.Meta. """ + meta = {} + key = None + if lines and BEGIN_RE.match(lines[0]): + lines.pop(0) + while lines: + line = lines.pop(0) + m1 = META_RE.match(line) + if line.strip() == '' or END_RE.match(line): + break # blank line or end of YAML header - done + if m1: + key = m1.group('key').lower().strip() + value = m1.group('value').strip() + try: + meta[key].append(value) + except KeyError: + meta[key] = [value] + else: + m2 = META_MORE_RE.match(line) + if m2 and key: + # Add another line to existing key + meta[key].append(m2.group('value').strip()) + else: + lines.insert(0, line) + break # no meta data - done + self.markdown.Meta = meta + return lines + + +def makeExtension(*args, **kwargs): + return MetaExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/nl2br.py b/third_party/Python-Markdown/markdown/extensions/nl2br.py new file mode 100644 index 0000000..8acd60c --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/nl2br.py
@@ -0,0 +1,35 @@ +""" +NL2BR Extension +=============== + +A Python-Markdown extension to treat newlines as hard breaks; like +GitHub-flavored Markdown does. + +See <https://pythonhosted.org/Markdown/extensions/nl2br.html> +for documentation. + +Oringinal code Copyright 2011 [Brian Neal](http://deathofagremmie.com/) + +All changes Copyright 2011-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..inlinepatterns import SubstituteTagPattern + +BR_RE = r'\n' + + +class Nl2BrExtension(Extension): + + def extendMarkdown(self, md, md_globals): + br_tag = SubstituteTagPattern(BR_RE, 'br') + md.inlinePatterns.add('nl', br_tag, '_end') + + +def makeExtension(*args, **kwargs): + return Nl2BrExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/sane_lists.py b/third_party/Python-Markdown/markdown/extensions/sane_lists.py new file mode 100644 index 0000000..213c8a6 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/sane_lists.py
@@ -0,0 +1,47 @@ +""" +Sane List Extension for Python-Markdown +======================================= + +Modify the behavior of Lists in Python-Markdown to act in a sane manor. + +See <https://pythonhosted.org/Markdown/extensions/sane_lists.html> +for documentation. + +Original code Copyright 2011 [Waylan Limberg](http://achinghead.com) + +All changes Copyright 2011-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import OListProcessor, UListProcessor +import re + + +class SaneOListProcessor(OListProcessor): + + CHILD_RE = re.compile(r'^[ ]{0,3}((\d+\.))[ ]+(.*)') + SIBLING_TAGS = ['ol'] + + +class SaneUListProcessor(UListProcessor): + + CHILD_RE = re.compile(r'^[ ]{0,3}(([*+-]))[ ]+(.*)') + SIBLING_TAGS = ['ul'] + + +class SaneListExtension(Extension): + """ Add sane lists to Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Override existing Processors. """ + md.parser.blockprocessors['olist'] = SaneOListProcessor(md.parser) + md.parser.blockprocessors['ulist'] = SaneUListProcessor(md.parser) + + +def makeExtension(*args, **kwargs): + return SaneListExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/smart_strong.py b/third_party/Python-Markdown/markdown/extensions/smart_strong.py new file mode 100644 index 0000000..58570bb5 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/smart_strong.py
@@ -0,0 +1,41 @@ +''' +Smart_Strong Extension for Python-Markdown +========================================== + +This extention adds smarter handling of double underscores within words. + +See <https://pythonhosted.org/Markdown/extensions/smart_strong.html> +for documentation. + +Original code Copyright 2011 [Waylan Limberg](http://achinghead.com) + +All changes Copyright 2011-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +''' + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..inlinepatterns import SimpleTagPattern + +SMART_STRONG_RE = r'(?<!\w)(_{2})(?!_)(.+?)(?<!_)\2(?!\w)' +STRONG_RE = r'(\*{2})(.+?)\2' + + +class SmartEmphasisExtension(Extension): + """ Add smart_emphasis extension to Markdown class.""" + + def extendMarkdown(self, md, md_globals): + """ Modify inline patterns. """ + md.inlinePatterns['strong'] = SimpleTagPattern(STRONG_RE, 'strong') + md.inlinePatterns.add( + 'strong2', + SimpleTagPattern(SMART_STRONG_RE, 'strong'), + '>emphasis2' + ) + + +def makeExtension(*args, **kwargs): + return SmartEmphasisExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/smarty.py b/third_party/Python-Markdown/markdown/extensions/smarty.py new file mode 100644 index 0000000..46e54c1 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/smarty.py
@@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +''' +Smarty extension for Python-Markdown +==================================== + +Adds conversion of ASCII dashes, quotes and ellipses to their HTML +entity equivalents. + +See <https://pythonhosted.org/Markdown/extensions/smarty.html> +for documentation. + +Author: 2013, Dmitry Shachnev <mitya57@gmail.com> + +All changes Copyright 2013-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +SmartyPants license: + + Copyright (c) 2003 John Gruber <http://daringfireball.net/> + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name "SmartyPants" nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + This software is provided by the copyright holders and contributors "as + is" and any express or implied warranties, including, but not limited + to, the implied warranties of merchantability and fitness for a + particular purpose are disclaimed. In no event shall the copyright + owner or contributors be liable for any direct, indirect, incidental, + special, exemplary, or consequential damages (including, but not + limited to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on any + theory of liability, whether in contract, strict liability, or tort + (including negligence or otherwise) arising in any way out of the use + of this software, even if advised of the possibility of such damage. + + +smartypants.py license: + + smartypants.py is a derivative work of SmartyPants. + Copyright (c) 2004, 2007 Chad Miller <http://web.chad.org/> + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + This software is provided by the copyright holders and contributors "as + is" and any express or implied warranties, including, but not limited + to, the implied warranties of merchantability and fitness for a + particular purpose are disclaimed. In no event shall the copyright + owner or contributors be liable for any direct, indirect, incidental, + special, exemplary, or consequential damages (including, but not + limited to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on any + theory of liability, whether in contract, strict liability, or tort + (including negligence or otherwise) arising in any way out of the use + of this software, even if advised of the possibility of such damage. + +''' + + +from __future__ import unicode_literals +from . import Extension +from ..inlinepatterns import HtmlPattern +from ..odict import OrderedDict +from ..treeprocessors import InlineProcessor + + +# Constants for quote education. +punctClass = r"""[!"#\$\%'()*+,-.\/:;<=>?\@\[\\\]\^_`{|}~]""" +endOfWordClass = r"[\s.,;:!?)]" +closeClass = "[^\ \t\r\n\[\{\(\-\u0002\u0003]" + +openingQuotesBase = ( + '(\s' # a whitespace char + '| ' # or a non-breaking space entity + '|--' # or dashes + '|–|—' # or unicode + '|&[mn]dash;' # or named dash entities + '|–|—' # or decimal entities + ')' +) + +substitutions = { + 'mdash': '—', + 'ndash': '–', + 'ellipsis': '…', + 'left-angle-quote': '«', + 'right-angle-quote': '»', + 'left-single-quote': '‘', + 'right-single-quote': '’', + 'left-double-quote': '“', + 'right-double-quote': '”', +} + + +# Special case if the very first character is a quote +# followed by punctuation at a non-word-break. Close the quotes by brute force: +singleQuoteStartRe = r"^'(?=%s\B)" % punctClass +doubleQuoteStartRe = r'^"(?=%s\B)' % punctClass + +# Special case for double sets of quotes, e.g.: +# <p>He said, "'Quoted' words in a larger quote."</p> +doubleQuoteSetsRe = r""""'(?=\w)""" +singleQuoteSetsRe = r"""'"(?=\w)""" + +# Special case for decade abbreviations (the '80s): +decadeAbbrRe = r"(?<!\w)'(?=\d{2}s)" + +# Get most opening double quotes: +openingDoubleQuotesRegex = r'%s"(?=\w)' % openingQuotesBase + +# Double closing quotes: +closingDoubleQuotesRegex = r'"(?=\s)' +closingDoubleQuotesRegex2 = '(?<=%s)"' % closeClass + +# Get most opening single quotes: +openingSingleQuotesRegex = r"%s'(?=\w)" % openingQuotesBase + +# Single closing quotes: +closingSingleQuotesRegex = r"(?<=%s)'(?!\s|s\b|\d)" % closeClass +closingSingleQuotesRegex2 = r"(?<=%s)'(\s|s\b)" % closeClass + +# All remaining quotes should be opening ones +remainingSingleQuotesRegex = "'" +remainingDoubleQuotesRegex = '"' + + +class SubstituteTextPattern(HtmlPattern): + def __init__(self, pattern, replace, markdown_instance): + """ Replaces matches with some text. """ + HtmlPattern.__init__(self, pattern) + self.replace = replace + self.markdown = markdown_instance + + def handleMatch(self, m): + result = '' + for part in self.replace: + if isinstance(part, int): + result += m.group(part) + else: + result += self.markdown.htmlStash.store(part, safe=True) + return result + + +class SmartyExtension(Extension): + def __init__(self, *args, **kwargs): + self.config = { + 'smart_quotes': [True, 'Educate quotes'], + 'smart_angled_quotes': [False, 'Educate angled quotes'], + 'smart_dashes': [True, 'Educate dashes'], + 'smart_ellipses': [True, 'Educate ellipses'], + 'substitutions': [{}, 'Overwrite default substitutions'], + } + super(SmartyExtension, self).__init__(*args, **kwargs) + self.substitutions = dict(substitutions) + self.substitutions.update(self.getConfig('substitutions', default={})) + + def _addPatterns(self, md, patterns, serie): + for ind, pattern in enumerate(patterns): + pattern += (md,) + pattern = SubstituteTextPattern(*pattern) + after = ('>smarty-%s-%d' % (serie, ind - 1) if ind else '_begin') + name = 'smarty-%s-%d' % (serie, ind) + self.inlinePatterns.add(name, pattern, after) + + def educateDashes(self, md): + emDashesPattern = SubstituteTextPattern( + r'(?<!-)---(?!-)', (self.substitutions['mdash'],), md + ) + enDashesPattern = SubstituteTextPattern( + r'(?<!-)--(?!-)', (self.substitutions['ndash'],), md + ) + self.inlinePatterns.add('smarty-em-dashes', emDashesPattern, '_begin') + self.inlinePatterns.add( + 'smarty-en-dashes', enDashesPattern, '>smarty-em-dashes' + ) + + def educateEllipses(self, md): + ellipsesPattern = SubstituteTextPattern( + r'(?<!\.)\.{3}(?!\.)', (self.substitutions['ellipsis'],), md + ) + self.inlinePatterns.add('smarty-ellipses', ellipsesPattern, '_begin') + + def educateAngledQuotes(self, md): + leftAngledQuotePattern = SubstituteTextPattern( + r'\<\<', (self.substitutions['left-angle-quote'],), md + ) + rightAngledQuotePattern = SubstituteTextPattern( + r'\>\>', (self.substitutions['right-angle-quote'],), md + ) + self.inlinePatterns.add( + 'smarty-left-angle-quotes', leftAngledQuotePattern, '_begin' + ) + self.inlinePatterns.add( + 'smarty-right-angle-quotes', + rightAngledQuotePattern, + '>smarty-left-angle-quotes' + ) + + def educateQuotes(self, md): + lsquo = self.substitutions['left-single-quote'] + rsquo = self.substitutions['right-single-quote'] + ldquo = self.substitutions['left-double-quote'] + rdquo = self.substitutions['right-double-quote'] + patterns = ( + (singleQuoteStartRe, (rsquo,)), + (doubleQuoteStartRe, (rdquo,)), + (doubleQuoteSetsRe, (ldquo + lsquo,)), + (singleQuoteSetsRe, (lsquo + ldquo,)), + (decadeAbbrRe, (rsquo,)), + (openingSingleQuotesRegex, (2, lsquo)), + (closingSingleQuotesRegex, (rsquo,)), + (closingSingleQuotesRegex2, (rsquo, 2)), + (remainingSingleQuotesRegex, (lsquo,)), + (openingDoubleQuotesRegex, (2, ldquo)), + (closingDoubleQuotesRegex, (rdquo,)), + (closingDoubleQuotesRegex2, (rdquo,)), + (remainingDoubleQuotesRegex, (ldquo,)) + ) + self._addPatterns(md, patterns, 'quotes') + + def extendMarkdown(self, md, md_globals): + configs = self.getConfigs() + self.inlinePatterns = OrderedDict() + if configs['smart_ellipses']: + self.educateEllipses(md) + if configs['smart_quotes']: + self.educateQuotes(md) + if configs['smart_angled_quotes']: + self.educateAngledQuotes(md) + if configs['smart_dashes']: + self.educateDashes(md) + inlineProcessor = InlineProcessor(md) + inlineProcessor.inlinePatterns = self.inlinePatterns + md.treeprocessors.add('smarty', inlineProcessor, '_end') + md.ESCAPED_CHARS.extend(['"', "'"]) + + +def makeExtension(*args, **kwargs): + return SmartyExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/tables.py b/third_party/Python-Markdown/markdown/extensions/tables.py new file mode 100644 index 0000000..368321d --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/tables.py
@@ -0,0 +1,102 @@ +""" +Tables Extension for Python-Markdown +==================================== + +Added parsing of tables to Python-Markdown. + +See <https://pythonhosted.org/Markdown/extensions/tables.html> +for documentation. + +Original code Copyright 2009 [Waylan Limberg](http://achinghead.com) + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..blockprocessors import BlockProcessor +from ..util import etree + + +class TableProcessor(BlockProcessor): + """ Process Tables. """ + + def test(self, parent, block): + rows = block.split('\n') + return (len(rows) > 1 and '|' in rows[0] and + '|' in rows[1] and '-' in rows[1] and + rows[1].strip()[0] in ['|', ':', '-']) + + def run(self, parent, blocks): + """ Parse a table block and build table. """ + block = blocks.pop(0).split('\n') + header = block[0].strip() + seperator = block[1].strip() + rows = [] if len(block) < 3 else block[2:] + # Get format type (bordered by pipes or not) + border = False + if header.startswith('|'): + border = True + # Get alignment of columns + align = [] + for c in self._split_row(seperator, border): + if c.startswith(':') and c.endswith(':'): + align.append('center') + elif c.startswith(':'): + align.append('left') + elif c.endswith(':'): + align.append('right') + else: + align.append(None) + # Build table + table = etree.SubElement(parent, 'table') + thead = etree.SubElement(table, 'thead') + self._build_row(header, thead, align, border) + tbody = etree.SubElement(table, 'tbody') + for row in rows: + self._build_row(row.strip(), tbody, align, border) + + def _build_row(self, row, parent, align, border): + """ Given a row of text, build table cells. """ + tr = etree.SubElement(parent, 'tr') + tag = 'td' + if parent.tag == 'thead': + tag = 'th' + cells = self._split_row(row, border) + # We use align here rather than cells to ensure every row + # contains the same number of columns. + for i, a in enumerate(align): + c = etree.SubElement(tr, tag) + try: + c.text = cells[i].strip() + except IndexError: # pragma: no cover + c.text = "" + if a: + c.set('align', a) + + def _split_row(self, row, border): + """ split a row of text into list of cells. """ + if border: + if row.startswith('|'): + row = row[1:] + if row.endswith('|'): + row = row[:-1] + return row.split('|') + + +class TableExtension(Extension): + """ Add tables to Markdown. """ + + def extendMarkdown(self, md, md_globals): + """ Add an instance of TableProcessor to BlockParser. """ + md.parser.blockprocessors.add('table', + TableProcessor(md.parser), + '<hashheader') + + +def makeExtension(*args, **kwargs): + return TableExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/toc.py b/third_party/Python-Markdown/markdown/extensions/toc.py new file mode 100644 index 0000000..b3cf898f --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/toc.py
@@ -0,0 +1,309 @@ +""" +Table of Contents Extension for Python-Markdown +=============================================== + +See <https://pythonhosted.org/Markdown/extensions/toc.html> +for documentation. + +Oringinal code Copyright 2008 [Jack Miller](http://codezen.org) + +All changes Copyright 2008-2014 The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..treeprocessors import Treeprocessor +from ..util import etree, parseBoolValue, AMP_SUBSTITUTE, HTML_PLACEHOLDER_RE, string_type +import re +import unicodedata + + +def slugify(value, separator): + """ Slugify a string, to make it URL friendly. """ + value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') + value = re.sub('[^\w\s-]', '', value.decode('ascii')).strip().lower() + return re.sub('[%s\s]+' % separator, separator, value) + + +IDCOUNT_RE = re.compile(r'^(.*)_([0-9]+)$') + + +def unique(id, ids): + """ Ensure id is unique in set of ids. Append '_1', '_2'... if not """ + while id in ids or not id: + m = IDCOUNT_RE.match(id) + if m: + id = '%s_%d' % (m.group(1), int(m.group(2))+1) + else: + id = '%s_%d' % (id, 1) + ids.add(id) + return id + + +def stashedHTML2text(text, md): + """ Extract raw HTML from stash, reduce to plain text and swap with placeholder. """ + def _html_sub(m): + """ Substitute raw html with plain text. """ + try: + raw, safe = md.htmlStash.rawHtmlBlocks[int(m.group(1))] + except (IndexError, TypeError): # pragma: no cover + return m.group(0) + if md.safeMode and not safe: # pragma: no cover + return '' + # Strip out tags and entities - leaveing text + return re.sub(r'(<[^>]+>)|(&[\#a-zA-Z0-9]+;)', '', raw) + + return HTML_PLACEHOLDER_RE.sub(_html_sub, text) + + +def nest_toc_tokens(toc_list): + """Given an unsorted list with errors and skips, return a nested one. + [{'level': 1}, {'level': 2}] + => + [{'level': 1, 'children': [{'level': 2, 'children': []}]}] + + A wrong list is also converted: + [{'level': 2}, {'level': 1}] + => + [{'level': 2, 'children': []}, {'level': 1, 'children': []}] + """ + + ordered_list = [] + if len(toc_list): + # Initialize everything by processing the first entry + last = toc_list.pop(0) + last['children'] = [] + levels = [last['level']] + ordered_list.append(last) + parents = [] + + # Walk the rest nesting the entries properly + while toc_list: + t = toc_list.pop(0) + current_level = t['level'] + t['children'] = [] + + # Reduce depth if current level < last item's level + if current_level < levels[-1]: + # Pop last level since we know we are less than it + levels.pop() + + # Pop parents and levels we are less than or equal to + to_pop = 0 + for p in reversed(parents): + if current_level <= p['level']: + to_pop += 1 + else: # pragma: no cover + break + if to_pop: + levels = levels[:-to_pop] + parents = parents[:-to_pop] + + # Note current level as last + levels.append(current_level) + + # Level is the same, so append to + # the current parent (if available) + if current_level == levels[-1]: + (parents[-1]['children'] if parents + else ordered_list).append(t) + + # Current level is > last item's level, + # So make last item a parent and append current as child + else: + last['children'].append(t) + parents.append(last) + levels.append(current_level) + last = t + + return ordered_list + + +class TocTreeprocessor(Treeprocessor): + def __init__(self, md, config): + super(TocTreeprocessor, self).__init__(md) + + self.marker = config["marker"] + self.title = config["title"] + self.base_level = int(config["baselevel"]) - 1 + self.slugify = config["slugify"] + self.sep = config["separator"] + self.use_anchors = parseBoolValue(config["anchorlink"]) + self.use_permalinks = parseBoolValue(config["permalink"], False) + if self.use_permalinks is None: + self.use_permalinks = config["permalink"] + + self.header_rgx = re.compile("[Hh][123456]") + + def iterparent(self, root): + ''' Iterator wrapper to get parent and child all at once. ''' + for parent in root.iter(): + for child in parent: + yield parent, child + + def replace_marker(self, root, elem): + ''' Replace marker with elem. ''' + for (p, c) in self.iterparent(root): + text = ''.join(c.itertext()).strip() + if not text: + continue + + # To keep the output from screwing up the + # validation by putting a <div> inside of a <p> + # we actually replace the <p> in its entirety. + # We do not allow the marker inside a header as that + # would causes an enless loop of placing a new TOC + # inside previously generated TOC. + if c.text and c.text.strip() == self.marker and \ + not self.header_rgx.match(c.tag) and c.tag not in ['pre', 'code']: + for i in range(len(p)): + if p[i] == c: + p[i] = elem + break + + def set_level(self, elem): + ''' Adjust header level according to base level. ''' + level = int(elem.tag[-1]) + self.base_level + if level > 6: + level = 6 + elem.tag = 'h%d' % level + + def add_anchor(self, c, elem_id): # @ReservedAssignment + anchor = etree.Element("a") + anchor.text = c.text + anchor.attrib["href"] = "#" + elem_id + anchor.attrib["class"] = "toclink" + c.text = "" + for elem in c: + anchor.append(elem) + c.remove(elem) + c.append(anchor) + + def add_permalink(self, c, elem_id): + permalink = etree.Element("a") + permalink.text = ("%spara;" % AMP_SUBSTITUTE + if self.use_permalinks is True + else self.use_permalinks) + permalink.attrib["href"] = "#" + elem_id + permalink.attrib["class"] = "headerlink" + permalink.attrib["title"] = "Permanent link" + c.append(permalink) + + def build_toc_div(self, toc_list): + """ Return a string div given a toc list. """ + div = etree.Element("div") + div.attrib["class"] = "toc" + + # Add title to the div + if self.title: + header = etree.SubElement(div, "span") + header.attrib["class"] = "toctitle" + header.text = self.title + + def build_etree_ul(toc_list, parent): + ul = etree.SubElement(parent, "ul") + for item in toc_list: + # List item link, to be inserted into the toc div + li = etree.SubElement(ul, "li") + link = etree.SubElement(li, "a") + link.text = item.get('name', '') + link.attrib["href"] = '#' + item.get('id', '') + if item['children']: + build_etree_ul(item['children'], li) + return ul + + build_etree_ul(toc_list, div) + prettify = self.markdown.treeprocessors.get('prettify') + if prettify: + prettify.run(div) + return div + + def run(self, doc): + # Get a list of id attributes + used_ids = set() + for el in doc.iter(): + if "id" in el.attrib: + used_ids.add(el.attrib["id"]) + + toc_tokens = [] + for el in doc.iter(): + if isinstance(el.tag, string_type) and self.header_rgx.match(el.tag): + self.set_level(el) + text = ''.join(el.itertext()).strip() + + # Do not override pre-existing ids + if "id" not in el.attrib: + innertext = stashedHTML2text(text, self.markdown) + el.attrib["id"] = unique(self.slugify(innertext, self.sep), used_ids) + + toc_tokens.append({ + 'level': int(el.tag[-1]), + 'id': el.attrib["id"], + 'name': text + }) + + if self.use_anchors: + self.add_anchor(el, el.attrib["id"]) + if self.use_permalinks: + self.add_permalink(el, el.attrib["id"]) + + div = self.build_toc_div(nest_toc_tokens(toc_tokens)) + if self.marker: + self.replace_marker(doc, div) + + # serialize and attach to markdown instance. + toc = self.markdown.serializer(div) + for pp in self.markdown.postprocessors.values(): + toc = pp.run(toc) + self.markdown.toc = toc + + +class TocExtension(Extension): + + TreeProcessorClass = TocTreeprocessor + + def __init__(self, *args, **kwargs): + self.config = { + "marker": ['[TOC]', + 'Text to find and replace with Table of Contents - ' + 'Set to an empty string to disable. Defaults to "[TOC]"'], + "title": ["", + "Title to insert into TOC <div> - " + "Defaults to an empty string"], + "anchorlink": [False, + "True if header should be a self link - " + "Defaults to False"], + "permalink": [0, + "True or link text if a Sphinx-style permalink should " + "be added - Defaults to False"], + "baselevel": ['1', 'Base level for headers.'], + "slugify": [slugify, + "Function to generate anchors based on header text - " + "Defaults to the headerid ext's slugify function."], + 'separator': ['-', 'Word separator. Defaults to "-".'] + } + + super(TocExtension, self).__init__(*args, **kwargs) + + def extendMarkdown(self, md, md_globals): + md.registerExtension(self) + self.md = md + self.reset() + tocext = self.TreeProcessorClass(md, self.getConfigs()) + # Headerid ext is set to '>prettify'. With this set to '_end', + # it should always come after headerid ext (and honor ids assinged + # by the header id extension) if both are used. Same goes for + # attr_list extension. This must come last because we don't want + # to redefine ids after toc is created. But we do want toc prettified. + md.treeprocessors.add("toc", tocext, "_end") + + def reset(self): + self.md.toc = '' + + +def makeExtension(*args, **kwargs): + return TocExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/extensions/wikilinks.py b/third_party/Python-Markdown/markdown/extensions/wikilinks.py new file mode 100644 index 0000000..94e1b67 --- /dev/null +++ b/third_party/Python-Markdown/markdown/extensions/wikilinks.py
@@ -0,0 +1,89 @@ +''' +WikiLinks Extension for Python-Markdown +====================================== + +Converts [[WikiLinks]] to relative links. + +See <https://pythonhosted.org/Markdown/extensions/wikilinks.html> +for documentation. + +Original code Copyright [Waylan Limberg](http://achinghead.com/). + +All changes Copyright The Python Markdown Project + +License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + +''' + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import Extension +from ..inlinepatterns import Pattern +from ..util import etree +import re + + +def build_url(label, base, end): + """ Build a url from the label, a base, and an end. """ + clean_label = re.sub(r'([ ]+_)|(_[ ]+)|([ ]+)', '_', label) + return '%s%s%s' % (base, clean_label, end) + + +class WikiLinkExtension(Extension): + + def __init__(self, *args, **kwargs): + self.config = { + 'base_url': ['/', 'String to append to beginning or URL.'], + 'end_url': ['/', 'String to append to end of URL.'], + 'html_class': ['wikilink', 'CSS hook. Leave blank for none.'], + 'build_url': [build_url, 'Callable formats URL from label.'], + } + + super(WikiLinkExtension, self).__init__(*args, **kwargs) + + def extendMarkdown(self, md, md_globals): + self.md = md + + # append to end of inline patterns + WIKILINK_RE = r'\[\[([\w0-9_ -]+)\]\]' + wikilinkPattern = WikiLinks(WIKILINK_RE, self.getConfigs()) + wikilinkPattern.md = md + md.inlinePatterns.add('wikilink', wikilinkPattern, "<not_strong") + + +class WikiLinks(Pattern): + def __init__(self, pattern, config): + super(WikiLinks, self).__init__(pattern) + self.config = config + + def handleMatch(self, m): + if m.group(2).strip(): + base_url, end_url, html_class = self._getMeta() + label = m.group(2).strip() + url = self.config['build_url'](label, base_url, end_url) + a = etree.Element('a') + a.text = label + a.set('href', url) + if html_class: + a.set('class', html_class) + else: + a = '' + return a + + def _getMeta(self): + """ Return meta data or config data. """ + base_url = self.config['base_url'] + end_url = self.config['end_url'] + html_class = self.config['html_class'] + if hasattr(self.md, 'Meta'): + if 'wiki_base_url' in self.md.Meta: + base_url = self.md.Meta['wiki_base_url'][0] + if 'wiki_end_url' in self.md.Meta: + end_url = self.md.Meta['wiki_end_url'][0] + if 'wiki_html_class' in self.md.Meta: + html_class = self.md.Meta['wiki_html_class'][0] + return base_url, end_url, html_class + + +def makeExtension(*args, **kwargs): + return WikiLinkExtension(*args, **kwargs)
diff --git a/third_party/Python-Markdown/markdown/inlinepatterns.py b/third_party/Python-Markdown/markdown/inlinepatterns.py new file mode 100644 index 0000000..95d358d7 --- /dev/null +++ b/third_party/Python-Markdown/markdown/inlinepatterns.py
@@ -0,0 +1,529 @@ +""" +INLINE PATTERNS +============================================================================= + +Inline patterns such as *emphasis* are handled by means of auxiliary +objects, one per pattern. Pattern objects must be instances of classes +that extend markdown.Pattern. Each pattern object uses a single regular +expression and needs support the following methods: + + pattern.getCompiledRegExp() # returns a regular expression + + pattern.handleMatch(m) # takes a match object and returns + # an ElementTree element or just plain text + +All of python markdown's built-in patterns subclass from Pattern, +but you can add additional patterns that don't. + +Also note that all the regular expressions used by inline must +capture the whole block. For this reason, they all start with +'^(.*)' and end with '(.*)!'. In case with built-in expression +Pattern takes care of adding the "^(.*)" and "(.*)!". + +Finally, the order in which regular expressions are applied is very +important - e.g. if we first replace http://.../ links with <a> tags +and _then_ try to replace inline html, we would end up with a mess. +So, we apply the expressions in the following order: + +* escape and backticks have to go before everything else, so + that we can preempt any markdown patterns by escaping them. + +* then we handle auto-links (must be done before inline html) + +* then we handle inline HTML. At this point we will simply + replace all inline HTML strings with a placeholder and add + the actual HTML to a hash. + +* then inline images (must be done before links) + +* then bracketed links, first regular then reference-style + +* finally we apply strong and emphasis +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import util +from . import odict +import re +try: # pragma: no cover + from urllib.parse import urlparse, urlunparse +except ImportError: # pragma: no cover + from urlparse import urlparse, urlunparse +try: # pragma: no cover + from html import entities +except ImportError: # pragma: no cover + import htmlentitydefs as entities + + +def build_inlinepatterns(md_instance, **kwargs): + """ Build the default set of inline patterns for Markdown. """ + inlinePatterns = odict.OrderedDict() + inlinePatterns["backtick"] = BacktickPattern(BACKTICK_RE) + inlinePatterns["escape"] = EscapePattern(ESCAPE_RE, md_instance) + inlinePatterns["reference"] = ReferencePattern(REFERENCE_RE, md_instance) + inlinePatterns["link"] = LinkPattern(LINK_RE, md_instance) + inlinePatterns["image_link"] = ImagePattern(IMAGE_LINK_RE, md_instance) + inlinePatterns["image_reference"] = ImageReferencePattern( + IMAGE_REFERENCE_RE, md_instance + ) + inlinePatterns["short_reference"] = ReferencePattern( + SHORT_REF_RE, md_instance + ) + inlinePatterns["autolink"] = AutolinkPattern(AUTOLINK_RE, md_instance) + inlinePatterns["automail"] = AutomailPattern(AUTOMAIL_RE, md_instance) + inlinePatterns["linebreak"] = SubstituteTagPattern(LINE_BREAK_RE, 'br') + if md_instance.safeMode != 'escape': + inlinePatterns["html"] = HtmlPattern(HTML_RE, md_instance) + inlinePatterns["entity"] = HtmlPattern(ENTITY_RE, md_instance) + inlinePatterns["not_strong"] = SimpleTextPattern(NOT_STRONG_RE) + inlinePatterns["em_strong"] = DoubleTagPattern(EM_STRONG_RE, 'strong,em') + inlinePatterns["strong_em"] = DoubleTagPattern(STRONG_EM_RE, 'em,strong') + inlinePatterns["strong"] = SimpleTagPattern(STRONG_RE, 'strong') + inlinePatterns["emphasis"] = SimpleTagPattern(EMPHASIS_RE, 'em') + if md_instance.smart_emphasis: + inlinePatterns["emphasis2"] = SimpleTagPattern(SMART_EMPHASIS_RE, 'em') + else: + inlinePatterns["emphasis2"] = SimpleTagPattern(EMPHASIS_2_RE, 'em') + return inlinePatterns + +""" +The actual regular expressions for patterns +----------------------------------------------------------------------------- +""" + +NOBRACKET = r'[^\]\[]*' +BRK = ( + r'\[(' + + (NOBRACKET + r'(\[')*6 + + (NOBRACKET + r'\])*')*6 + + NOBRACKET + r')\]' +) +NOIMG = r'(?<!\!)' + +# `e=f()` or ``e=f("`")`` +BACKTICK_RE = r'(?<!\\)(`+)(.+?)(?<!`)\2(?!`)' + +# \< +ESCAPE_RE = r'\\(.)' + +# *emphasis* +EMPHASIS_RE = r'(\*)([^\*]+)\2' + +# **strong** +STRONG_RE = r'(\*{2}|_{2})(.+?)\2' + +# ***strongem*** or ***em*strong** +EM_STRONG_RE = r'(\*|_)\2{2}(.+?)\2(.*?)\2{2}' + +# ***strong**em* +STRONG_EM_RE = r'(\*|_)\2{2}(.+?)\2{2}(.*?)\2' + +# _smart_emphasis_ +SMART_EMPHASIS_RE = r'(?<!\w)(_)(?!_)(.+?)(?<!_)\2(?!\w)' + +# _emphasis_ +EMPHASIS_2_RE = r'(_)(.+?)\2' + +# [text](url) or [text](<url>) or [text](url "title") +LINK_RE = NOIMG + BRK + \ + r'''\(\s*(<.*?>|((?:(?:\(.*?\))|[^\(\)]))*?)\s*((['"])(.*?)\12\s*)?\)''' + +#  or  +IMAGE_LINK_RE = r'\!' + BRK + r'\s*\((<.*?>|([^")]+"[^"]*"|[^\)]*))\)' + +# [Google][3] +REFERENCE_RE = NOIMG + BRK + r'\s?\[([^\]]*)\]' + +# [Google] +SHORT_REF_RE = NOIMG + r'\[([^\]]+)\]' + +# ![alt text][2] +IMAGE_REFERENCE_RE = r'\!' + BRK + '\s?\[([^\]]*)\]' + +# stand-alone * or _ +NOT_STRONG_RE = r'((^| )(\*|_)( |$))' + +# <http://www.123.com> +AUTOLINK_RE = r'<((?:[Ff]|[Hh][Tt])[Tt][Pp][Ss]?://[^>]*)>' + +# <me@example.com> +AUTOMAIL_RE = r'<([^> \!]*@[^> ]*)>' + +# <...> +HTML_RE = r'(\<([a-zA-Z/][^\>]*?|\!--.*?--)\>)' + +# & +ENTITY_RE = r'(&[\#a-zA-Z0-9]*;)' + +# two spaces at end of line +LINE_BREAK_RE = r' \n' + + +def dequote(string): + """Remove quotes from around a string.""" + if ((string.startswith('"') and string.endswith('"')) or + (string.startswith("'") and string.endswith("'"))): + return string[1:-1] + else: + return string + + +ATTR_RE = re.compile("\{@([^\}]*)=([^\}]*)}") # {@id=123} + + +def handleAttributes(text, parent): + """Set values of an element based on attribute definitions ({@id=123}).""" + def attributeCallback(match): + parent.set(match.group(1), match.group(2).replace('\n', ' ')) + return ATTR_RE.sub(attributeCallback, text) + + +""" +The pattern classes +----------------------------------------------------------------------------- +""" + + +class Pattern(object): + """Base class that inline patterns subclass. """ + + def __init__(self, pattern, markdown_instance=None): + """ + Create an instant of an inline pattern. + + Keyword arguments: + + * pattern: A regular expression that matches a pattern + + """ + self.pattern = pattern + self.compiled_re = re.compile("^(.*?)%s(.*?)$" % pattern, + re.DOTALL | re.UNICODE) + + # Api for Markdown to pass safe_mode into instance + self.safe_mode = False + if markdown_instance: + self.markdown = markdown_instance + + def getCompiledRegExp(self): + """ Return a compiled regular expression. """ + return self.compiled_re + + def handleMatch(self, m): + """Return a ElementTree element from the given match. + + Subclasses should override this method. + + Keyword arguments: + + * m: A re match object containing a match of the pattern. + + """ + pass # pragma: no cover + + def type(self): + """ Return class name, to define pattern type """ + return self.__class__.__name__ + + def unescape(self, text): + """ Return unescaped text given text with an inline placeholder. """ + try: + stash = self.markdown.treeprocessors['inline'].stashed_nodes + except KeyError: # pragma: no cover + return text + + def itertext(el): # pragma: no cover + ' Reimplement Element.itertext for older python versions ' + tag = el.tag + if not isinstance(tag, util.string_type) and tag is not None: + return + if el.text: + yield el.text + for e in el: + for s in itertext(e): + yield s + if e.tail: + yield e.tail + + def get_stash(m): + id = m.group(1) + if id in stash: + value = stash.get(id) + if isinstance(value, util.string_type): + return value + else: + # An etree Element - return text content only + return ''.join(itertext(value)) + return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text) + + +class SimpleTextPattern(Pattern): + """ Return a simple text of group(2) of a Pattern. """ + def handleMatch(self, m): + return m.group(2) + + +class EscapePattern(Pattern): + """ Return an escaped character. """ + + def handleMatch(self, m): + char = m.group(2) + if char in self.markdown.ESCAPED_CHARS: + return '%s%s%s' % (util.STX, ord(char), util.ETX) + else: + return None + + +class SimpleTagPattern(Pattern): + """ + Return element of type `tag` with a text attribute of group(3) + of a Pattern. + + """ + def __init__(self, pattern, tag): + Pattern.__init__(self, pattern) + self.tag = tag + + def handleMatch(self, m): + el = util.etree.Element(self.tag) + el.text = m.group(3) + return el + + +class SubstituteTagPattern(SimpleTagPattern): + """ Return an element of type `tag` with no children. """ + def handleMatch(self, m): + return util.etree.Element(self.tag) + + +class BacktickPattern(Pattern): + """ Return a `<code>` element containing the matching text. """ + def __init__(self, pattern): + Pattern.__init__(self, pattern) + self.tag = "code" + + def handleMatch(self, m): + el = util.etree.Element(self.tag) + el.text = util.AtomicString(m.group(3).strip()) + return el + + +class DoubleTagPattern(SimpleTagPattern): + """Return a ElementTree element nested in tag2 nested in tag1. + + Useful for strong emphasis etc. + + """ + def handleMatch(self, m): + tag1, tag2 = self.tag.split(",") + el1 = util.etree.Element(tag1) + el2 = util.etree.SubElement(el1, tag2) + el2.text = m.group(3) + if len(m.groups()) == 5: + el2.tail = m.group(4) + return el1 + + +class HtmlPattern(Pattern): + """ Store raw inline html and return a placeholder. """ + def handleMatch(self, m): + rawhtml = self.unescape(m.group(2)) + place_holder = self.markdown.htmlStash.store(rawhtml) + return place_holder + + def unescape(self, text): + """ Return unescaped text given text with an inline placeholder. """ + try: + stash = self.markdown.treeprocessors['inline'].stashed_nodes + except KeyError: # pragma: no cover + return text + + def get_stash(m): + id = m.group(1) + value = stash.get(id) + if value is not None: + try: + return self.markdown.serializer(value) + except: + return '\%s' % value + + return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text) + + +class LinkPattern(Pattern): + """ Return a link element from the given match. """ + def handleMatch(self, m): + el = util.etree.Element("a") + el.text = m.group(2) + title = m.group(13) + href = m.group(9) + + if href: + if href[0] == "<": + href = href[1:-1] + el.set("href", self.sanitize_url(self.unescape(href.strip()))) + else: + el.set("href", "") + + if title: + title = dequote(self.unescape(title)) + el.set("title", title) + return el + + def sanitize_url(self, url): + """ + Sanitize a url against xss attacks in "safe_mode". + + Rather than specifically blacklisting `javascript:alert("XSS")` and all + its aliases (see <http://ha.ckers.org/xss.html>), we whitelist known + safe url formats. Most urls contain a network location, however some + are known not to (i.e.: mailto links). Script urls do not contain a + location. Additionally, for `javascript:...`, the scheme would be + "javascript" but some aliases will appear to `urlparse()` to have no + scheme. On top of that relative links (i.e.: "foo/bar.html") have no + scheme. Therefore we must check "path", "parameters", "query" and + "fragment" for any literal colons. We don't check "scheme" for colons + because it *should* never have any and "netloc" must allow the form: + `username:password@host:port`. + + """ + if not self.markdown.safeMode: + # Return immediately bipassing parsing. + return url + + try: + scheme, netloc, path, params, query, fragment = url = urlparse(url) + except ValueError: # pragma: no cover + # Bad url - so bad it couldn't be parsed. + return '' + + locless_schemes = ['', 'mailto', 'news'] + allowed_schemes = locless_schemes + ['http', 'https', 'ftp', 'ftps'] + if scheme not in allowed_schemes: + # Not a known (allowed) scheme. Not safe. + return '' + + if netloc == '' and scheme not in locless_schemes: # pragma: no cover + # This should not happen. Treat as suspect. + return '' + + for part in url[2:]: + if ":" in part: + # A colon in "path", "parameters", "query" + # or "fragment" is suspect. + return '' + + # Url passes all tests. Return url as-is. + return urlunparse(url) + + +class ImagePattern(LinkPattern): + """ Return a img element from the given match. """ + def handleMatch(self, m): + el = util.etree.Element("img") + src_parts = m.group(9).split() + if src_parts: + src = src_parts[0] + if src[0] == "<" and src[-1] == ">": + src = src[1:-1] + el.set('src', self.sanitize_url(self.unescape(src))) + else: + el.set('src', "") + if len(src_parts) > 1: + el.set('title', dequote(self.unescape(" ".join(src_parts[1:])))) + + if self.markdown.enable_attributes: + truealt = handleAttributes(m.group(2), el) + else: + truealt = m.group(2) + + el.set('alt', self.unescape(truealt)) + return el + + +class ReferencePattern(LinkPattern): + """ Match to a stored reference and return link element. """ + + NEWLINE_CLEANUP_RE = re.compile(r'[ ]?\n', re.MULTILINE) + + def handleMatch(self, m): + try: + id = m.group(9).lower() + except IndexError: + id = None + if not id: + # if we got something like "[Google][]" or "[Goggle]" + # we'll use "google" as the id + id = m.group(2).lower() + + # Clean up linebreaks in id + id = self.NEWLINE_CLEANUP_RE.sub(' ', id) + if id not in self.markdown.references: # ignore undefined refs + return None + href, title = self.markdown.references[id] + + text = m.group(2) + return self.makeTag(href, title, text) + + def makeTag(self, href, title, text): + el = util.etree.Element('a') + + el.set('href', self.sanitize_url(href)) + if title: + el.set('title', title) + + el.text = text + return el + + +class ImageReferencePattern(ReferencePattern): + """ Match to a stored reference and return img element. """ + def makeTag(self, href, title, text): + el = util.etree.Element("img") + el.set("src", self.sanitize_url(href)) + if title: + el.set("title", title) + + if self.markdown.enable_attributes: + text = handleAttributes(text, el) + + el.set("alt", self.unescape(text)) + return el + + +class AutolinkPattern(Pattern): + """ Return a link Element given an autolink (`<http://example/com>`). """ + def handleMatch(self, m): + el = util.etree.Element("a") + el.set('href', self.unescape(m.group(2))) + el.text = util.AtomicString(m.group(2)) + return el + + +class AutomailPattern(Pattern): + """ + Return a mailto link Element given an automail link (`<foo@example.com>`). + """ + def handleMatch(self, m): + el = util.etree.Element('a') + email = self.unescape(m.group(2)) + if email.startswith("mailto:"): + email = email[len("mailto:"):] + + def codepoint2name(code): + """Return entity definition by code, or the code if not defined.""" + entity = entities.codepoint2name.get(code) + if entity: + return "%s%s;" % (util.AMP_SUBSTITUTE, entity) + else: + return "%s#%d;" % (util.AMP_SUBSTITUTE, code) + + letters = [codepoint2name(ord(letter)) for letter in email] + el.text = util.AtomicString(''.join(letters)) + + mailto = "mailto:" + email + mailto = "".join([util.AMP_SUBSTITUTE + '#%d;' % + ord(letter) for letter in mailto]) + el.set('href', mailto) + return el
diff --git a/third_party/Python-Markdown/markdown/odict.py b/third_party/Python-Markdown/markdown/odict.py new file mode 100644 index 0000000..584ad7c1 --- /dev/null +++ b/third_party/Python-Markdown/markdown/odict.py
@@ -0,0 +1,191 @@ +from __future__ import unicode_literals +from __future__ import absolute_import +from . import util +from copy import deepcopy + + +class OrderedDict(dict): + """ + A dictionary that keeps its keys in the order in which they're inserted. + + Copied from Django's SortedDict with some modifications. + + """ + def __new__(cls, *args, **kwargs): + instance = super(OrderedDict, cls).__new__(cls, *args, **kwargs) + instance.keyOrder = [] + return instance + + def __init__(self, data=None): + if data is None or isinstance(data, dict): + data = data or [] + super(OrderedDict, self).__init__(data) + self.keyOrder = list(data) if data else [] + else: + super(OrderedDict, self).__init__() + super_set = super(OrderedDict, self).__setitem__ + for key, value in data: + # Take the ordering from first key + if key not in self: + self.keyOrder.append(key) + # But override with last value in data (dict() does this) + super_set(key, value) + + def __deepcopy__(self, memo): + return self.__class__([(key, deepcopy(value, memo)) + for key, value in self.items()]) + + def __copy__(self): + # The Python's default copy implementation will alter the state + # of self. The reason for this seems complex but is likely related to + # subclassing dict. + return self.copy() + + def __setitem__(self, key, value): + if key not in self: + self.keyOrder.append(key) + super(OrderedDict, self).__setitem__(key, value) + + def __delitem__(self, key): + super(OrderedDict, self).__delitem__(key) + self.keyOrder.remove(key) + + def __iter__(self): + return iter(self.keyOrder) + + def __reversed__(self): + return reversed(self.keyOrder) + + def pop(self, k, *args): + result = super(OrderedDict, self).pop(k, *args) + try: + self.keyOrder.remove(k) + except ValueError: + # Key wasn't in the dictionary in the first place. No problem. + pass + return result + + def popitem(self): + result = super(OrderedDict, self).popitem() + self.keyOrder.remove(result[0]) + return result + + def _iteritems(self): + for key in self.keyOrder: + yield key, self[key] + + def _iterkeys(self): + for key in self.keyOrder: + yield key + + def _itervalues(self): + for key in self.keyOrder: + yield self[key] + + if util.PY3: # pragma: no cover + items = _iteritems + keys = _iterkeys + values = _itervalues + else: # pragma: no cover + iteritems = _iteritems + iterkeys = _iterkeys + itervalues = _itervalues + + def items(self): + return [(k, self[k]) for k in self.keyOrder] + + def keys(self): + return self.keyOrder[:] + + def values(self): + return [self[k] for k in self.keyOrder] + + def update(self, dict_): + for k in dict_: + self[k] = dict_[k] + + def setdefault(self, key, default): + if key not in self: + self.keyOrder.append(key) + return super(OrderedDict, self).setdefault(key, default) + + def value_for_index(self, index): + """Returns the value of the item at the given zero-based index.""" + return self[self.keyOrder[index]] + + def insert(self, index, key, value): + """Inserts the key, value pair before the item with the given index.""" + if key in self.keyOrder: + n = self.keyOrder.index(key) + del self.keyOrder[n] + if n < index: + index -= 1 + self.keyOrder.insert(index, key) + super(OrderedDict, self).__setitem__(key, value) + + def copy(self): + """Returns a copy of this object.""" + # This way of initializing the copy means it works for subclasses, too. + return self.__class__(self) + + def __repr__(self): + """ + Replaces the normal dict.__repr__ with a version that returns the keys + in their Ordered order. + """ + return '{%s}' % ', '.join( + ['%r: %r' % (k, v) for k, v in self._iteritems()] + ) + + def clear(self): + super(OrderedDict, self).clear() + self.keyOrder = [] + + def index(self, key): + """ Return the index of a given key. """ + try: + return self.keyOrder.index(key) + except ValueError: + raise ValueError("Element '%s' was not found in OrderedDict" % key) + + def index_for_location(self, location): + """ Return index or None for a given location. """ + if location == '_begin': + i = 0 + elif location == '_end': + i = None + elif location.startswith('<') or location.startswith('>'): + i = self.index(location[1:]) + if location.startswith('>'): + if i >= len(self): + # last item + i = None + else: + i += 1 + else: + raise ValueError('Not a valid location: "%s". Location key ' + 'must start with a ">" or "<".' % location) + return i + + def add(self, key, value, location): + """ Insert by key location. """ + i = self.index_for_location(location) + if i is not None: + self.insert(i, key, value) + else: + self.__setitem__(key, value) + + def link(self, key, location): + """ Change location of an existing item. """ + n = self.keyOrder.index(key) + del self.keyOrder[n] + try: + i = self.index_for_location(location) + if i is not None: + self.keyOrder.insert(i, key) + else: + self.keyOrder.append(key) + except Exception as e: + # restore to prevent data loss and reraise + self.keyOrder.insert(n, key) + raise e
diff --git a/third_party/Python-Markdown/markdown/postprocessors.py b/third_party/Python-Markdown/markdown/postprocessors.py new file mode 100644 index 0000000..2d4dcb5 --- /dev/null +++ b/third_party/Python-Markdown/markdown/postprocessors.py
@@ -0,0 +1,108 @@ +""" +POST-PROCESSORS +============================================================================= + +Markdown also allows post-processors, which are similar to preprocessors in +that they need to implement a "run" method. However, they are run after core +processing. + +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import util +from . import odict +import re + + +def build_postprocessors(md_instance, **kwargs): + """ Build the default postprocessors for Markdown. """ + postprocessors = odict.OrderedDict() + postprocessors["raw_html"] = RawHtmlPostprocessor(md_instance) + postprocessors["amp_substitute"] = AndSubstitutePostprocessor() + postprocessors["unescape"] = UnescapePostprocessor() + return postprocessors + + +class Postprocessor(util.Processor): + """ + Postprocessors are run after the ElementTree it converted back into text. + + Each Postprocessor implements a "run" method that takes a pointer to a + text string, modifies it as necessary and returns a text string. + + Postprocessors must extend markdown.Postprocessor. + + """ + + def run(self, text): + """ + Subclasses of Postprocessor should implement a `run` method, which + takes the html document as a single text string and returns a + (possibly modified) string. + + """ + pass # pragma: no cover + + +class RawHtmlPostprocessor(Postprocessor): + """ Restore raw html to the document. """ + + def run(self, text): + """ Iterate over html stash and restore "safe" html. """ + for i in range(self.markdown.htmlStash.html_counter): + html, safe = self.markdown.htmlStash.rawHtmlBlocks[i] + if self.markdown.safeMode and not safe: + if str(self.markdown.safeMode).lower() == 'escape': + html = self.escape(html) + elif str(self.markdown.safeMode).lower() == 'remove': + html = '' + else: + html = self.markdown.html_replacement_text + if (self.isblocklevel(html) and + (safe or not self.markdown.safeMode)): + text = text.replace( + "<p>%s</p>" % + (self.markdown.htmlStash.get_placeholder(i)), + html + "\n" + ) + text = text.replace( + self.markdown.htmlStash.get_placeholder(i), html + ) + return text + + def escape(self, html): + """ Basic html escaping """ + html = html.replace('&', '&') + html = html.replace('<', '<') + html = html.replace('>', '>') + return html.replace('"', '"') + + def isblocklevel(self, html): + m = re.match(r'^\<\/?([^ >]+)', html) + if m: + if m.group(1)[0] in ('!', '?', '@', '%'): + # Comment, php etc... + return True + return util.isBlockLevel(m.group(1)) + return False + + +class AndSubstitutePostprocessor(Postprocessor): + """ Restore valid entities """ + + def run(self, text): + text = text.replace(util.AMP_SUBSTITUTE, "&") + return text + + +class UnescapePostprocessor(Postprocessor): + """ Restore escaped chars """ + + RE = re.compile('%s(\d+)%s' % (util.STX, util.ETX)) + + def unescape(self, m): + return util.int2str(int(m.group(1))) + + def run(self, text): + return self.RE.sub(self.unescape, text)
diff --git a/third_party/Python-Markdown/markdown/preprocessors.py b/third_party/Python-Markdown/markdown/preprocessors.py new file mode 100644 index 0000000..7fd38d33 --- /dev/null +++ b/third_party/Python-Markdown/markdown/preprocessors.py
@@ -0,0 +1,345 @@ +""" +PRE-PROCESSORS +============================================================================= + +Preprocessors work on source text before we start doing anything too +complicated. +""" + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import util +from . import odict +import re + + +def build_preprocessors(md_instance, **kwargs): + """ Build the default set of preprocessors used by Markdown. """ + preprocessors = odict.OrderedDict() + preprocessors['normalize_whitespace'] = NormalizeWhitespace(md_instance) + if md_instance.safeMode != 'escape': + preprocessors["html_block"] = HtmlBlockPreprocessor(md_instance) + preprocessors["reference"] = ReferencePreprocessor(md_instance) + return preprocessors + + +class Preprocessor(util.Processor): + """ + Preprocessors are run after the text is broken into lines. + + Each preprocessor implements a "run" method that takes a pointer to a + list of lines of the document, modifies it as necessary and returns + either the same pointer or a pointer to a new list. + + Preprocessors must extend markdown.Preprocessor. + + """ + def run(self, lines): + """ + Each subclass of Preprocessor should override the `run` method, which + takes the document as a list of strings split by newlines and returns + the (possibly modified) list of lines. + + """ + pass # pragma: no cover + + +class NormalizeWhitespace(Preprocessor): + """ Normalize whitespace for consistant parsing. """ + + def run(self, lines): + source = '\n'.join(lines) + source = source.replace(util.STX, "").replace(util.ETX, "") + source = source.replace("\r\n", "\n").replace("\r", "\n") + "\n\n" + source = source.expandtabs(self.markdown.tab_length) + source = re.sub(r'(?<=\n) +\n', '\n', source) + return source.split('\n') + + +class HtmlBlockPreprocessor(Preprocessor): + """Remove html blocks from the text and store them for later retrieval.""" + + right_tag_patterns = ["</%s>", "%s>"] + attrs_pattern = r""" + \s+(?P<attr>[^>"'/= ]+)=(?P<q>['"])(?P<value>.*?)(?P=q) # attr="value" + | # OR + \s+(?P<attr1>[^>"'/= ]+)=(?P<value1>[^> ]+) # attr=value + | # OR + \s+(?P<attr2>[^>"'/= ]+) # attr + """ + left_tag_pattern = r'^\<(?P<tag>[^> ]+)(?P<attrs>(%s)*)\s*\/?\>?' % \ + attrs_pattern + attrs_re = re.compile(attrs_pattern, re.VERBOSE) + left_tag_re = re.compile(left_tag_pattern, re.VERBOSE) + markdown_in_raw = False + + def _get_left_tag(self, block): + m = self.left_tag_re.match(block) + if m: + tag = m.group('tag') + raw_attrs = m.group('attrs') + attrs = {} + if raw_attrs: + for ma in self.attrs_re.finditer(raw_attrs): + if ma.group('attr'): + if ma.group('value'): + attrs[ma.group('attr').strip()] = ma.group('value') + else: + attrs[ma.group('attr').strip()] = "" + elif ma.group('attr1'): + if ma.group('value1'): + attrs[ma.group('attr1').strip()] = ma.group( + 'value1' + ) + else: + attrs[ma.group('attr1').strip()] = "" + elif ma.group('attr2'): + attrs[ma.group('attr2').strip()] = "" + return tag, len(m.group(0)), attrs + else: + tag = block[1:].split(">", 1)[0].lower() + return tag, len(tag)+2, {} + + def _recursive_tagfind(self, ltag, rtag, start_index, block): + while 1: + i = block.find(rtag, start_index) + if i == -1: + return -1 + j = block.find(ltag, start_index) + # if no ltag, or rtag found before another ltag, return index + if (j > i or j == -1): + return i + len(rtag) + # another ltag found before rtag, use end of ltag as starting + # point and search again + j = block.find('>', j) + start_index = self._recursive_tagfind(ltag, rtag, j + 1, block) + if start_index == -1: + # HTML potentially malformed- ltag has no corresponding + # rtag + return -1 + + def _get_right_tag(self, left_tag, left_index, block): + for p in self.right_tag_patterns: + tag = p % left_tag + i = self._recursive_tagfind( + "<%s" % left_tag, tag, left_index, block + ) + if i > 2: + return tag.lstrip("<").rstrip(">"), i + return block.rstrip()[-left_index:-1].lower(), len(block) + + def _equal_tags(self, left_tag, right_tag): + if left_tag[0] in ['?', '@', '%']: # handle PHP, etc. + return True + if ("/" + left_tag) == right_tag: + return True + if (right_tag == "--" and left_tag == "--"): + return True + elif left_tag == right_tag[1:] and right_tag[0] == "/": + return True + else: + return False + + def _is_oneliner(self, tag): + return (tag in ['hr', 'hr/']) + + def _stringindex_to_listindex(self, stringindex, items): + """ + Same effect as concatenating the strings in items, + finding the character to which stringindex refers in that string, + and returning the index of the item in which that character resides. + """ + items.append('dummy') + i, count = 0, 0 + while count <= stringindex: + count += len(items[i]) + i += 1 + return i - 1 + + def _nested_markdown_in_html(self, items): + """Find and process html child elements of the given element block.""" + for i, item in enumerate(items): + if self.left_tag_re.match(item): + left_tag, left_index, attrs = \ + self._get_left_tag(''.join(items[i:])) + right_tag, data_index = self._get_right_tag( + left_tag, left_index, ''.join(items[i:])) + right_listindex = \ + self._stringindex_to_listindex(data_index, items[i:]) + i + if 'markdown' in attrs.keys(): + items[i] = items[i][left_index:] # remove opening tag + placeholder = self.markdown.htmlStash.store_tag( + left_tag, attrs, i + 1, right_listindex + 1) + items.insert(i, placeholder) + if len(items) - right_listindex <= 1: # last nest, no tail + right_listindex -= 1 + items[right_listindex] = items[right_listindex][ + :-len(right_tag) - 2] # remove closing tag + else: # raw html + if len(items) - right_listindex <= 1: # last element + right_listindex -= 1 + offset = 1 if i == right_listindex else 0 + placeholder = self.markdown.htmlStash.store('\n\n'.join( + items[i:right_listindex + offset])) + del items[i:right_listindex + offset] + items.insert(i, placeholder) + return items + + def run(self, lines): + text = "\n".join(lines) + new_blocks = [] + text = text.rsplit("\n\n") + items = [] + left_tag = '' + right_tag = '' + in_tag = False # flag + + while text: + block = text[0] + if block.startswith("\n"): + block = block[1:] + text = text[1:] + + if block.startswith("\n"): + block = block[1:] + + if not in_tag: + if block.startswith("<") and len(block.strip()) > 1: + + if block[1:4] == "!--": + # is a comment block + left_tag, left_index, attrs = "--", 2, {} + else: + left_tag, left_index, attrs = self._get_left_tag(block) + right_tag, data_index = self._get_right_tag(left_tag, + left_index, + block) + # keep checking conditions below and maybe just append + + if data_index < len(block) and (util.isBlockLevel(left_tag) or left_tag == '--'): + text.insert(0, block[data_index:]) + block = block[:data_index] + + if not (util.isBlockLevel(left_tag) or block[1] in ["!", "?", "@", "%"]): + new_blocks.append(block) + continue + + if self._is_oneliner(left_tag): + new_blocks.append(block.strip()) + continue + + if block.rstrip().endswith(">") \ + and self._equal_tags(left_tag, right_tag): + if self.markdown_in_raw and 'markdown' in attrs.keys(): + block = block[left_index:-len(right_tag) - 2] + new_blocks.append(self.markdown.htmlStash. + store_tag(left_tag, attrs, 0, 2)) + new_blocks.extend([block]) + else: + new_blocks.append( + self.markdown.htmlStash.store(block.strip())) + continue + else: + # if is block level tag and is not complete + if (not self._equal_tags(left_tag, right_tag)) and \ + (util.isBlockLevel(left_tag) or left_tag == "--"): + items.append(block.strip()) + in_tag = True + else: + new_blocks.append( + self.markdown.htmlStash.store(block.strip()) + ) + continue + + else: + new_blocks.append(block) + + else: + items.append(block) + + right_tag, data_index = self._get_right_tag(left_tag, 0, block) + + if self._equal_tags(left_tag, right_tag): + # if find closing tag + + if data_index < len(block): + # we have more text after right_tag + items[-1] = block[:data_index] + text.insert(0, block[data_index:]) + + in_tag = False + if self.markdown_in_raw and 'markdown' in attrs.keys(): + items[0] = items[0][left_index:] + items[-1] = items[-1][:-len(right_tag) - 2] + if items[len(items) - 1]: # not a newline/empty string + right_index = len(items) + 3 + else: + right_index = len(items) + 2 + new_blocks.append(self.markdown.htmlStash.store_tag( + left_tag, attrs, 0, right_index)) + placeholderslen = len(self.markdown.htmlStash.tag_data) + new_blocks.extend( + self._nested_markdown_in_html(items)) + nests = len(self.markdown.htmlStash.tag_data) - \ + placeholderslen + self.markdown.htmlStash.tag_data[-1 - nests][ + 'right_index'] += nests - 2 + else: + new_blocks.append( + self.markdown.htmlStash.store('\n\n'.join(items))) + items = [] + + if items: + if self.markdown_in_raw and 'markdown' in attrs.keys(): + items[0] = items[0][left_index:] + items[-1] = items[-1][:-len(right_tag) - 2] + if items[len(items) - 1]: # not a newline/empty string + right_index = len(items) + 3 + else: + right_index = len(items) + 2 + new_blocks.append( + self.markdown.htmlStash.store_tag( + left_tag, attrs, 0, right_index)) + placeholderslen = len(self.markdown.htmlStash.tag_data) + new_blocks.extend(self._nested_markdown_in_html(items)) + nests = len(self.markdown.htmlStash.tag_data) - placeholderslen + self.markdown.htmlStash.tag_data[-1 - nests][ + 'right_index'] += nests - 2 + else: + new_blocks.append( + self.markdown.htmlStash.store('\n\n'.join(items))) + new_blocks.append('\n') + + new_text = "\n\n".join(new_blocks) + return new_text.split("\n") + + +class ReferencePreprocessor(Preprocessor): + """ Remove reference definitions from text and store for later use. """ + + TITLE = r'[ ]*(\"(.*)\"|\'(.*)\'|\((.*)\))[ ]*' + RE = re.compile( + r'^[ ]{0,3}\[([^\]]*)\]:\s*([^ ]*)[ ]*(%s)?$' % TITLE, re.DOTALL + ) + TITLE_RE = re.compile(r'^%s$' % TITLE) + + def run(self, lines): + new_text = [] + while lines: + line = lines.pop(0) + m = self.RE.match(line) + if m: + id = m.group(1).strip().lower() + link = m.group(2).lstrip('<').rstrip('>') + t = m.group(5) or m.group(6) or m.group(7) + if not t: + # Check next line for title + tm = self.TITLE_RE.match(lines[0]) + if tm: + lines.pop(0) + t = tm.group(2) or tm.group(3) or tm.group(4) + self.markdown.references[id] = (link, t) + else: + new_text.append(line) + + return new_text # + "\n"
diff --git a/third_party/Python-Markdown/markdown/serializers.py b/third_party/Python-Markdown/markdown/serializers.py new file mode 100644 index 0000000..1e8d9dd --- /dev/null +++ b/third_party/Python-Markdown/markdown/serializers.py
@@ -0,0 +1,282 @@ +# markdown/searializers.py +# +# Add x/html serialization to Elementree +# Taken from ElementTree 1.3 preview with slight modifications +# +# Copyright (c) 1999-2007 by Fredrik Lundh. All rights reserved. +# +# fredrik@pythonware.com +# http://www.pythonware.com +# +# -------------------------------------------------------------------- +# The ElementTree toolkit is +# +# Copyright (c) 1999-2007 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + + +from __future__ import absolute_import +from __future__ import unicode_literals +from . import util +ElementTree = util.etree.ElementTree +QName = util.etree.QName +if hasattr(util.etree, 'test_comment'): # pragma: no cover + Comment = util.etree.test_comment +else: # pragma: no cover + Comment = util.etree.Comment +PI = util.etree.PI +ProcessingInstruction = util.etree.ProcessingInstruction + +__all__ = ['to_html_string', 'to_xhtml_string'] + +HTML_EMPTY = ("area", "base", "basefont", "br", "col", "frame", "hr", + "img", "input", "isindex", "link", "meta" "param") + +try: + HTML_EMPTY = set(HTML_EMPTY) +except NameError: # pragma: no cover + pass + +_namespace_map = { + # "well-known" namespace prefixes + "http://www.w3.org/XML/1998/namespace": "xml", + "http://www.w3.org/1999/xhtml": "html", + "http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf", + "http://schemas.xmlsoap.org/wsdl/": "wsdl", + # xml schema + "http://www.w3.org/2001/XMLSchema": "xs", + "http://www.w3.org/2001/XMLSchema-instance": "xsi", + # dublic core + "http://purl.org/dc/elements/1.1/": "dc", +} + + +def _raise_serialization_error(text): # pragma: no cover + raise TypeError( + "cannot serialize %r (type %s)" % (text, type(text).__name__) + ) + + +def _encode(text, encoding): + try: + return text.encode(encoding, "xmlcharrefreplace") + except (TypeError, AttributeError): # pragma: no cover + _raise_serialization_error(text) + + +def _escape_cdata(text): + # escape character data + try: + # it's worth avoiding do-nothing calls for strings that are + # shorter than 500 character, or so. assume that's, by far, + # the most common case in most applications. + if "&" in text: + text = text.replace("&", "&") + if "<" in text: + text = text.replace("<", "<") + if ">" in text: + text = text.replace(">", ">") + return text + except (TypeError, AttributeError): # pragma: no cover + _raise_serialization_error(text) + + +def _escape_attrib(text): + # escape attribute value + try: + if "&" in text: + text = text.replace("&", "&") + if "<" in text: + text = text.replace("<", "<") + if ">" in text: + text = text.replace(">", ">") + if "\"" in text: + text = text.replace("\"", """) + if "\n" in text: + text = text.replace("\n", " ") + return text + except (TypeError, AttributeError): # pragma: no cover + _raise_serialization_error(text) + + +def _escape_attrib_html(text): + # escape attribute value + try: + if "&" in text: + text = text.replace("&", "&") + if "<" in text: + text = text.replace("<", "<") + if ">" in text: + text = text.replace(">", ">") + if "\"" in text: + text = text.replace("\"", """) + return text + except (TypeError, AttributeError): # pragma: no cover + _raise_serialization_error(text) + + +def _serialize_html(write, elem, qnames, namespaces, format): + tag = elem.tag + text = elem.text + if tag is Comment: + write("<!--%s-->" % _escape_cdata(text)) + elif tag is ProcessingInstruction: + write("<?%s?>" % _escape_cdata(text)) + else: + tag = qnames[tag] + if tag is None: + if text: + write(_escape_cdata(text)) + for e in elem: + _serialize_html(write, e, qnames, None, format) + else: + write("<" + tag) + items = elem.items() + if items or namespaces: + items = sorted(items) # lexical order + for k, v in items: + if isinstance(k, QName): + k = k.text + if isinstance(v, QName): + v = qnames[v.text] + else: + v = _escape_attrib_html(v) + if qnames[k] == v and format == 'html': + # handle boolean attributes + write(" %s" % v) + else: + write(" %s=\"%s\"" % (qnames[k], v)) + if namespaces: + items = namespaces.items() + items.sort(key=lambda x: x[1]) # sort on prefix + for v, k in items: + if k: + k = ":" + k + write(" xmlns%s=\"%s\"" % (k, _escape_attrib(v))) + if format == "xhtml" and tag.lower() in HTML_EMPTY: + write(" />") + else: + write(">") + if text: + if tag.lower() in ["script", "style"]: + write(text) + else: + write(_escape_cdata(text)) + for e in elem: + _serialize_html(write, e, qnames, None, format) + if tag.lower() not in HTML_EMPTY: + write("</" + tag + ">") + if elem.tail: + write(_escape_cdata(elem.tail)) + + +def _write_html(root, + encoding=None, + default_namespace=None, + format="html"): + assert root is not None + data = [] + write = data.append + qnames, namespaces = _namespaces(root, default_namespace) + _serialize_html(write, root, qnames, namespaces, format) + if encoding is None: + return "".join(data) + else: + return _encode("".join(data)) + + +# -------------------------------------------------------------------- +# serialization support + +def _namespaces(elem, default_namespace=None): + # identify namespaces used in this tree + + # maps qnames to *encoded* prefix:local names + qnames = {None: None} + + # maps uri:s to prefixes + namespaces = {} + if default_namespace: + namespaces[default_namespace] = "" + + def add_qname(qname): + # calculate serialized qname representation + try: + if qname[:1] == "{": + uri, tag = qname[1:].split("}", 1) + prefix = namespaces.get(uri) + if prefix is None: + prefix = _namespace_map.get(uri) + if prefix is None: + prefix = "ns%d" % len(namespaces) + if prefix != "xml": + namespaces[uri] = prefix + if prefix: + qnames[qname] = "%s:%s" % (prefix, tag) + else: + qnames[qname] = tag # default element + else: + if default_namespace: + raise ValueError( + "cannot use non-qualified names with " + "default_namespace option" + ) + qnames[qname] = qname + except TypeError: # pragma: no cover + _raise_serialization_error(qname) + + # populate qname and namespaces table + try: + iterate = elem.iter + except AttributeError: + iterate = elem.getiterator # cET compatibility + for elem in iterate(): + tag = elem.tag + if isinstance(tag, QName) and tag.text not in qnames: + add_qname(tag.text) + elif isinstance(tag, util.string_type): + if tag not in qnames: + add_qname(tag) + elif tag is not None and tag is not Comment and tag is not PI: + _raise_serialization_error(tag) + for key, value in elem.items(): + if isinstance(key, QName): + key = key.text + if key not in qnames: + add_qname(key) + if isinstance(value, QName) and value.text not in qnames: + add_qname(value.text) + text = elem.text + if isinstance(text, QName) and text.text not in qnames: + add_qname(text.text) + return qnames, namespaces + + +def to_html_string(element): + return _write_html(ElementTree(element).getroot(), format="html") + + +def to_xhtml_string(element): + return _write_html(ElementTree(element).getroot(), format="xhtml")
diff --git a/third_party/Python-Markdown/markdown/treeprocessors.py b/third_party/Python-Markdown/markdown/treeprocessors.py new file mode 100644 index 0000000..d06f192 --- /dev/null +++ b/third_party/Python-Markdown/markdown/treeprocessors.py
@@ -0,0 +1,371 @@ +from __future__ import unicode_literals +from __future__ import absolute_import +from . import util +from . import odict +from . import inlinepatterns + + +def build_treeprocessors(md_instance, **kwargs): + """ Build the default treeprocessors for Markdown. """ + treeprocessors = odict.OrderedDict() + treeprocessors["inline"] = InlineProcessor(md_instance) + treeprocessors["prettify"] = PrettifyTreeprocessor(md_instance) + return treeprocessors + + +def isString(s): + """ Check if it's string """ + if not isinstance(s, util.AtomicString): + return isinstance(s, util.string_type) + return False + + +class Treeprocessor(util.Processor): + """ + Treeprocessors are run on the ElementTree object before serialization. + + Each Treeprocessor implements a "run" method that takes a pointer to an + ElementTree, modifies it as necessary and returns an ElementTree + object. + + Treeprocessors must extend markdown.Treeprocessor. + + """ + def run(self, root): + """ + Subclasses of Treeprocessor should implement a `run` method, which + takes a root ElementTree. This method can return another ElementTree + object, and the existing root ElementTree will be replaced, or it can + modify the current tree and return None. + """ + pass # pragma: no cover + + +class InlineProcessor(Treeprocessor): + """ + A Treeprocessor that traverses a tree, applying inline patterns. + """ + + def __init__(self, md): + self.__placeholder_prefix = util.INLINE_PLACEHOLDER_PREFIX + self.__placeholder_suffix = util.ETX + self.__placeholder_length = 4 + len(self.__placeholder_prefix) \ + + len(self.__placeholder_suffix) + self.__placeholder_re = util.INLINE_PLACEHOLDER_RE + self.markdown = md + self.inlinePatterns = md.inlinePatterns + + def __makePlaceholder(self, type): + """ Generate a placeholder """ + id = "%04d" % len(self.stashed_nodes) + hash = util.INLINE_PLACEHOLDER % id + return hash, id + + def __findPlaceholder(self, data, index): + """ + Extract id from data string, start from index + + Keyword arguments: + + * data: string + * index: index, from which we start search + + Returns: placeholder id and string index, after the found placeholder. + + """ + m = self.__placeholder_re.search(data, index) + if m: + return m.group(1), m.end() + else: + return None, index + 1 + + def __stashNode(self, node, type): + """ Add node to stash """ + placeholder, id = self.__makePlaceholder(type) + self.stashed_nodes[id] = node + return placeholder + + def __handleInline(self, data, patternIndex=0): + """ + Process string with inline patterns and replace it + with placeholders + + Keyword arguments: + + * data: A line of Markdown text + * patternIndex: The index of the inlinePattern to start with + + Returns: String with placeholders. + + """ + if not isinstance(data, util.AtomicString): + startIndex = 0 + while patternIndex < len(self.inlinePatterns): + data, matched, startIndex = self.__applyPattern( + self.inlinePatterns.value_for_index(patternIndex), + data, patternIndex, startIndex) + if not matched: + patternIndex += 1 + return data + + def __processElementText(self, node, subnode, isText=True): + """ + Process placeholders in Element.text or Element.tail + of Elements popped from self.stashed_nodes. + + Keywords arguments: + + * node: parent node + * subnode: processing node + * isText: bool variable, True - it's text, False - it's tail + + Returns: None + + """ + if isText: + text = subnode.text + subnode.text = None + else: + text = subnode.tail + subnode.tail = None + + childResult = self.__processPlaceholders(text, subnode, isText) + + if not isText and node is not subnode: + pos = list(node).index(subnode) + 1 + else: + pos = 0 + + childResult.reverse() + for newChild in childResult: + node.insert(pos, newChild) + + def __processPlaceholders(self, data, parent, isText=True): + """ + Process string with placeholders and generate ElementTree tree. + + Keyword arguments: + + * data: string with placeholders instead of ElementTree elements. + * parent: Element, which contains processing inline data + + Returns: list with ElementTree elements with applied inline patterns. + + """ + def linkText(text): + if text: + if result: + if result[-1].tail: + result[-1].tail += text + else: + result[-1].tail = text + elif not isText: + if parent.tail: + parent.tail += text + else: + parent.tail = text + else: + if parent.text: + parent.text += text + else: + parent.text = text + result = [] + strartIndex = 0 + while data: + index = data.find(self.__placeholder_prefix, strartIndex) + if index != -1: + id, phEndIndex = self.__findPlaceholder(data, index) + + if id in self.stashed_nodes: + node = self.stashed_nodes.get(id) + + if index > 0: + text = data[strartIndex:index] + linkText(text) + + if not isString(node): # it's Element + for child in [node] + list(node): + if child.tail: + if child.tail.strip(): + self.__processElementText( + node, child, False + ) + if child.text: + if child.text.strip(): + self.__processElementText(child, child) + else: # it's just a string + linkText(node) + strartIndex = phEndIndex + continue + + strartIndex = phEndIndex + result.append(node) + + else: # wrong placeholder + end = index + len(self.__placeholder_prefix) + linkText(data[strartIndex:end]) + strartIndex = end + else: + text = data[strartIndex:] + if isinstance(data, util.AtomicString): + # We don't want to loose the AtomicString + text = util.AtomicString(text) + linkText(text) + data = "" + + return result + + def __applyPattern(self, pattern, data, patternIndex, startIndex=0): + """ + Check if the line fits the pattern, create the necessary + elements, add it to stashed_nodes. + + Keyword arguments: + + * data: the text to be processed + * pattern: the pattern to be checked + * patternIndex: index of current pattern + * startIndex: string index, from which we start searching + + Returns: String with placeholders instead of ElementTree elements. + + """ + match = pattern.getCompiledRegExp().match(data[startIndex:]) + leftData = data[:startIndex] + + if not match: + return data, False, 0 + + node = pattern.handleMatch(match) + + if node is None: + return data, True, len(leftData)+match.span(len(match.groups()))[0] + + if not isString(node): + if not isinstance(node.text, util.AtomicString): + # We need to process current node too + for child in [node] + list(node): + if not isString(node): + if child.text: + child.text = self.__handleInline( + child.text, patternIndex + 1 + ) + if child.tail: + child.tail = self.__handleInline( + child.tail, patternIndex + ) + + placeholder = self.__stashNode(node, pattern.type()) + + return "%s%s%s%s" % (leftData, + match.group(1), + placeholder, match.groups()[-1]), True, 0 + + def run(self, tree): + """Apply inline patterns to a parsed Markdown tree. + + Iterate over ElementTree, find elements with inline tag, apply inline + patterns and append newly created Elements to tree. If you don't + want to process your data with inline paterns, instead of normal + string, use subclass AtomicString: + + node.text = markdown.AtomicString("This will not be processed.") + + Arguments: + + * tree: ElementTree object, representing Markdown tree. + + Returns: ElementTree object with applied inline patterns. + + """ + self.stashed_nodes = {} + + stack = [tree] + + while stack: + currElement = stack.pop() + insertQueue = [] + for child in currElement: + if child.text and not isinstance( + child.text, util.AtomicString + ): + text = child.text + child.text = None + lst = self.__processPlaceholders( + self.__handleInline(text), child + ) + stack += lst + insertQueue.append((child, lst)) + if child.tail: + tail = self.__handleInline(child.tail) + dumby = util.etree.Element('d') + child.tail = None + tailResult = self.__processPlaceholders(tail, dumby, False) + if dumby.tail: + child.tail = dumby.tail + pos = list(currElement).index(child) + 1 + tailResult.reverse() + for newChild in tailResult: + currElement.insert(pos, newChild) + if len(child): + stack.append(child) + + for element, lst in insertQueue: + if self.markdown.enable_attributes: + if element.text and isString(element.text): + element.text = inlinepatterns.handleAttributes( + element.text, element + ) + i = 0 + for newChild in lst: + if self.markdown.enable_attributes: + # Processing attributes + if newChild.tail and isString(newChild.tail): + newChild.tail = inlinepatterns.handleAttributes( + newChild.tail, element + ) + if newChild.text and isString(newChild.text): + newChild.text = inlinepatterns.handleAttributes( + newChild.text, newChild + ) + element.insert(i, newChild) + i += 1 + return tree + + +class PrettifyTreeprocessor(Treeprocessor): + """ Add linebreaks to the html document. """ + + def _prettifyETree(self, elem): + """ Recursively add linebreaks to ElementTree children. """ + + i = "\n" + if util.isBlockLevel(elem.tag) and elem.tag not in ['code', 'pre']: + if (not elem.text or not elem.text.strip()) \ + and len(elem) and util.isBlockLevel(elem[0].tag): + elem.text = i + for e in elem: + if util.isBlockLevel(e.tag): + self._prettifyETree(e) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + if not elem.tail or not elem.tail.strip(): + elem.tail = i + + def run(self, root): + """ Add linebreaks to ElementTree root object. """ + + self._prettifyETree(root) + # Do <br />'s seperately as they are often in the middle of + # inline content and missed by _prettifyETree. + brs = root.getiterator('br') + for br in brs: + if not br.tail or not br.tail.strip(): + br.tail = '\n' + else: + br.tail = '\n%s' % br.tail + # Clean up extra empty lines at end of code blocks. + pres = root.getiterator('pre') + for pre in pres: + if len(pre) and pre[0].tag == 'code': + pre[0].text = util.AtomicString(pre[0].text.rstrip() + '\n')
diff --git a/third_party/Python-Markdown/markdown/util.py b/third_party/Python-Markdown/markdown/util.py new file mode 100644 index 0000000..d3d48f0 --- /dev/null +++ b/third_party/Python-Markdown/markdown/util.py
@@ -0,0 +1,177 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +import re +import sys + + +""" +Python 3 Stuff +============================================================================= +""" +PY3 = sys.version_info[0] == 3 + +if PY3: # pragma: no cover + string_type = str + text_type = str + int2str = chr +else: # pragma: no cover + string_type = basestring # noqa + text_type = unicode # noqa + int2str = unichr # noqa + + +""" +Constants you might want to modify +----------------------------------------------------------------------------- +""" + + +BLOCK_LEVEL_ELEMENTS = re.compile( + "^(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul" + "|script|noscript|form|fieldset|iframe|math" + "|hr|hr/|style|li|dt|dd|thead|tbody" + "|tr|th|td|section|footer|header|group|figure" + "|figcaption|aside|article|canvas|output" + "|progress|video|nav)$", + re.IGNORECASE +) +# Placeholders +STX = '\u0002' # Use STX ("Start of text") for start-of-placeholder +ETX = '\u0003' # Use ETX ("End of text") for end-of-placeholder +INLINE_PLACEHOLDER_PREFIX = STX+"klzzwxh:" +INLINE_PLACEHOLDER = INLINE_PLACEHOLDER_PREFIX + "%s" + ETX +INLINE_PLACEHOLDER_RE = re.compile(INLINE_PLACEHOLDER % r'([0-9]+)') +AMP_SUBSTITUTE = STX+"amp"+ETX +HTML_PLACEHOLDER = STX + "wzxhzdk:%s" + ETX +HTML_PLACEHOLDER_RE = re.compile(HTML_PLACEHOLDER % r'([0-9]+)') +TAG_PLACEHOLDER = STX + "hzzhzkh:%s" + ETX + + +""" +Constants you probably do not need to change +----------------------------------------------------------------------------- +""" + +RTL_BIDI_RANGES = ( + ('\u0590', '\u07FF'), + # Hebrew (0590-05FF), Arabic (0600-06FF), + # Syriac (0700-074F), Arabic supplement (0750-077F), + # Thaana (0780-07BF), Nko (07C0-07FF). + ('\u2D30', '\u2D7F') # Tifinagh +) + +# Extensions should use "markdown.util.etree" instead of "etree" (or do `from +# markdown.util import etree`). Do not import it by yourself. + +try: # pragma: no cover + # Is the C implementation of ElementTree available? + import xml.etree.cElementTree as etree + from xml.etree.ElementTree import Comment + # Serializers (including ours) test with non-c Comment + etree.test_comment = Comment + if etree.VERSION < "1.0.5": + raise RuntimeError("cElementTree version 1.0.5 or higher is required.") +except (ImportError, RuntimeError): # pragma: no cover + # Use the Python implementation of ElementTree? + import xml.etree.ElementTree as etree + if etree.VERSION < "1.1": + raise RuntimeError("ElementTree version 1.1 or higher is required") + + +""" +AUXILIARY GLOBAL FUNCTIONS +============================================================================= +""" + + +def isBlockLevel(tag): + """Check if the tag is a block level HTML tag.""" + if isinstance(tag, string_type): + return BLOCK_LEVEL_ELEMENTS.match(tag) + # Some ElementTree tags are not strings, so return False. + return False + + +def parseBoolValue(value, fail_on_errors=True, preserve_none=False): + """Parses a string representing bool value. If parsing was successful, + returns True or False. If preserve_none=True, returns True, False, + or None. If parsing was not successful, raises ValueError, or, if + fail_on_errors=False, returns None.""" + if not isinstance(value, string_type): + if preserve_none and value is None: + return value + return bool(value) + elif preserve_none and value.lower() == 'none': + return None + elif value.lower() in ('true', 'yes', 'y', 'on', '1'): + return True + elif value.lower() in ('false', 'no', 'n', 'off', '0', 'none'): + return False + elif fail_on_errors: + raise ValueError('Cannot parse bool value: %r' % value) + + +""" +MISC AUXILIARY CLASSES +============================================================================= +""" + + +class AtomicString(text_type): + """A string which should not be further processed.""" + pass + + +class Processor(object): + def __init__(self, markdown_instance=None): + if markdown_instance: + self.markdown = markdown_instance + + +class HtmlStash(object): + """ + This class is used for stashing HTML objects that we extract + in the beginning and replace with place-holders. + """ + + def __init__(self): + """ Create a HtmlStash. """ + self.html_counter = 0 # for counting inline html segments + self.rawHtmlBlocks = [] + self.tag_counter = 0 + self.tag_data = [] # list of dictionaries in the order tags appear + + def store(self, html, safe=False): + """ + Saves an HTML segment for later reinsertion. Returns a + placeholder string that needs to be inserted into the + document. + + Keyword arguments: + + * html: an html segment + * safe: label an html segment as safe for safemode + + Returns : a placeholder string + + """ + self.rawHtmlBlocks.append((html, safe)) + placeholder = self.get_placeholder(self.html_counter) + self.html_counter += 1 + return placeholder + + def reset(self): + self.html_counter = 0 + self.rawHtmlBlocks = [] + + def get_placeholder(self, key): + return HTML_PLACEHOLDER % key + + def store_tag(self, tag, attrs, left_index, right_index): + """Store tag data and return a placeholder.""" + self.tag_data.append({'tag': tag, 'attrs': attrs, + 'left_index': left_index, + 'right_index': right_index}) + placeholder = TAG_PLACEHOLDER % str(self.tag_counter) + self.tag_counter += 1 # equal to the tag's index in self.tag_data + return placeholder
diff --git a/third_party/WebKit/LayoutTests/TestExpectations b/third_party/WebKit/LayoutTests/TestExpectations index 6f37f3f..e0d33d4 100644 --- a/third_party/WebKit/LayoutTests/TestExpectations +++ b/third_party/WebKit/LayoutTests/TestExpectations
@@ -4,7 +4,7 @@ # TODO(wangxianzhu): Triage the failures crbug.com/524134 virtual/spv2/paint/invalidation/invalidate-after-composited-scroll.html [ Failure ] crbug.com/524134 crbug.com/539546 virtual/spv2/paint/selection/text-selection-drag.html [ ImageOnlyFailure Pass Failure ] -crbug.com/524134 virtual/spv2/paint/deprecatedpaintlayer/non-self-painting-layer-overrides-visibility.html [ Crash ImageOnlyFailure Timeout ] +crbug.com/524134 virtual/spv2/paint/paintlayer/non-self-painting-layer-overrides-visibility.html [ Crash ImageOnlyFailure Timeout ] crbug.com/524134 virtual/spv2/paint/invalidation/spv2/focus-ring-on-continuation-move.html [ ImageOnlyFailure ] crbug.com/524236 virtual/spv2/paint/invalidation/spv2/clipping-should-not-repaint-composited-descendants-as-text.html [ Failure ] @@ -753,8 +753,6 @@ crbug.com/400829 media/video-object-fit.html [ ImageOnlyFailure ] crbug.com/400829 virtual/stable/media/stable/video-object-fit-stable.html [ ImageOnlyFailure ] -crbug.com/539546 [ Debug ] css3/filters/effect-reference-tile-hw.html [ Pass Failure ImageOnlyFailure ] - # We only want to run one of the web-animations-api tests in stable mode. crbug.com/441553 virtual/stable/web-animations-api [ Skip ] crbug.com/368946 virtual/stable/web-animations-api/eased-keyframes.html [ Pass ] @@ -1131,6 +1129,10 @@ # Unclear semantics of ToString (actually ToPrimitive) across iframes. crbug.com/532469 http/tests/security/cross-frame-access-custom.html [ NeedsManualRebaseline ] +crbug.com/538692 css3/filters/effect-reference-hw.html [ NeedsRebaseline ] +crbug.com/538692 css3/filters/effect-reference-subregion-hw.html [ NeedsRebaseline ] +crbug.com/538692 css3/filters/effect-reference-tile-hw.html [ NeedsRebaseline ] + # Win10 specific failures that still need triaging. crbug.com/521730 [ Win10 ] fast/dom/Window/property-access-on-cached-properties-after-frame-navigated.html [ Failure ] crbug.com/521730 [ Win10 ] fast/dom/Window/property-access-on-cached-properties-after-frame-removed-and-gced.html [ Failure ] @@ -1607,10 +1609,11 @@ crbug.com/535478 [ Win ] virtual/threaded/inspector/tracing/decode-resize.html [ Slow Pass Failure ] crbug.com/538522 css3/filters/effect-reference-colorspace-hw.html [ Crash ImageOnlyFailure ] -crbug.com/539227 [ Linux Mac Win Debug ] css3/filters/effect-reference-hw.html [ Crash ] -crbug.com/539227 [ Linux Mac Win Debug ] css3/filters/effect-reference-subregion-hw.html [ Crash ] crbug.com/524646 [ Yosemite ] fast/dom/shadow/shadowdom-for-button.html [ ImageOnlyFailure ] crbug.com/538717 [ Win ] http/tests/permissions/chromium/test-request-multiple-window.html [ Failure Pass ] crbug.com/538717 [ Win ] http/tests/permissions/chromium/test-request-multiple-worker.html [ Failure Pass ] + +crbug.com/502267 fast/css/image-orientation/image-orientation-from-image-composited-dynamic.html [ NeedsRebaseline ] +crbug.com/502267 fast/css/image-orientation/image-orientation-default.html [ NeedsRebaseline ]
diff --git a/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/generateKey-failures-expected.txt b/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/generateKey-failures-expected.txt index 0acf798..de684b6e 100644 --- a/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/generateKey-failures-expected.txt +++ b/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/generateKey-failures-expected.txt
@@ -5,16 +5,16 @@ generateKey() without length... -error is: TypeError: AesKeyGenParams: length: Missing required property +SUCCESS (rejected): TypeError: AesKeyGenParams: length: Missing required property generateKey() with a length of 70000... -error is: TypeError: AesKeyGenParams: length: Outside of numeric range +SUCCESS (rejected): TypeError: AesKeyGenParams: length: Outside of numeric range generateKey() with a length of -3... -error is: TypeError: AesKeyGenParams: length: Outside of numeric range +SUCCESS (rejected): TypeError: AesKeyGenParams: length: Outside of numeric range generateKey() with length that is minus Infinity... -error is: TypeError: AesKeyGenParams: length: Outside of numeric range +SUCCESS (rejected): TypeError: AesKeyGenParams: length: Outside of numeric range PASS successfullyParsed is true TEST COMPLETE
diff --git a/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/generateKey-failures.html b/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/generateKey-failures.html index 1adf310..da36e3cd 100644 --- a/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/generateKey-failures.html +++ b/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/generateKey-failures.html
@@ -15,27 +15,40 @@ extractable = true; keyUsages = ['wrapKey', 'unwrapKey']; -Promise.resolve(null).then(function() { - debug("\ngenerateKey() without length..."); - return crypto.subtle.generateKey({ name: 'aes-kw' }, extractable, keyUsages); -}).then(failAndFinishJSTest, function(result) { - logError(result); +function testNoLength() +{ + return expectFailure( + "generateKey() without length...", + crypto.subtle.generateKey({ name: 'aes-kw' }, extractable, keyUsages)); +} - debug("\ngenerateKey() with a length of 70000..."); - return crypto.subtle.generateKey({ name: 'aes-kw', length: 70000 }, extractable, keyUsages); -}).then(failAndFinishJSTest, function(result) { - logError(result); +function testBigLength() +{ + return expectFailure( + "generateKey() with a length of 70000...", + crypto.subtle.generateKey({ name: 'aes-kw', length: 70000 }, extractable, keyUsages)); +} - debug("\ngenerateKey() with a length of -3..."); - return crypto.subtle.generateKey({ name: 'aes-kw', length: -3 }, extractable, keyUsages); -}).then(failAndFinishJSTest, function(result) { - logError(result); +function testNegativeLength() +{ + return expectFailure( + "generateKey() with a length of -3...", + crypto.subtle.generateKey({ name: 'aes-kw', length: -3 }, extractable, keyUsages)); +} - debug("\ngenerateKey() with length that is minus Infinity..."); - return crypto.subtle.generateKey({ name: 'aes-kw', length: -Infinity }, extractable, keyUsages); -}).then(failAndFinishJSTest, function(result) { - logError(result); -}).then(finishJSTest, failAndFinishJSTest); +function testMinusInfinity() +{ + return expectFailure( + "generateKey() with length that is minus Infinity...", + crypto.subtle.generateKey({ name: 'aes-kw', length: -Infinity }, extractable, keyUsages)); +} + +Promise.resolve(null) + .then(testNoLength) + .then(testBigLength) + .then(testNegativeLength) + .then(testMinusInfinity) + .then(finishJSTest, failAndFinishJSTest); </script>
diff --git a/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/key-manipulation-expected.txt b/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/key-manipulation-expected.txt index b011139..05285fb 100644 --- a/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/key-manipulation-expected.txt +++ b/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/key-manipulation-expected.txt
@@ -3,18 +3,23 @@ On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE". + Generating a key... +SUCCESS PASS key.toString() is '[object CryptoKey]' PASS key.type is 'secret' PASS key.algorithm.name is 'AES-KW' PASS key.algorithm.length is 256 Testing that the key can't be used with AES-CBC... -error is: InvalidAccessError: key.algorithm does not match that of operation +SUCCESS (rejected): InvalidAccessError: key.algorithm does not match that of operation Exporting the key to raw... +SUCCESS PASS exportedKey.toString() is '[object ArrayBuffer]' + Importing it back... +SUCCESS PASS importedKey.toString() is '[object CryptoKey]' PASS importedKey.type is 'secret' PASS importedKey.algorithm.name is 'AES-KW'
diff --git a/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/key-manipulation.html b/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/key-manipulation.html index 94ff417..dc15de1 100644 --- a/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/key-manipulation.html +++ b/third_party/WebKit/LayoutTests/crypto/subtle/aes-kw/key-manipulation.html
@@ -15,36 +15,66 @@ var extractable = true; -debug("Generating a key..."); -crypto.subtle.generateKey({name: "aes-kw", length: 256}, extractable, ["wrapKey", "unwrapKey"]).then(function(result) { - key = result; - shouldBe("key.toString()", "'[object CryptoKey]'"); - shouldBe("key.type", "'secret'"); - shouldBe("key.algorithm.name", "'AES-KW'"); - shouldBe("key.algorithm.length", "256"); +// Globals used by tests +var key = null; +var exportedKey = null; - debug("\nTesting that the key can't be used with AES-CBC..."); - iv = hexStringToUint8Array("000102030405060708090a0b0c0d0e0f"); +// Assigns the global |key| which is used by subsequent tests. +function generateTestKey() +{ + return expectSuccess( + "Generating a key...", + crypto.subtle.generateKey({name: "aes-kw", length: 256}, extractable, ["wrapKey", "unwrapKey"]) + ).then(function(result) { + key = result; + shouldBe("key.toString()", "'[object CryptoKey]'"); + shouldBe("key.type", "'secret'"); + shouldBe("key.algorithm.name", "'AES-KW'"); + shouldBe("key.algorithm.length", "256"); + }); +} - return crypto.subtle.wrapKey("raw", key, key, {name: "AES-CBC", iv: iv}); -}).then(failAndFinishJSTest, function(result) { - logError(result); +function testWrongAlgorithm() +{ + var iv = hexStringToUint8Array("000102030405060708090a0b0c0d0e0f"); - debug("\nExporting the key to raw..."); - return crypto.subtle.exportKey('raw', key); -}).then(function(result) { - exportedKey = result; - shouldBe("exportedKey.toString()", "'[object ArrayBuffer]'"); - debug("Importing it back..."); - return crypto.subtle.importKey('raw', exportedKey, {name: "aes-kw"}, extractable, ["wrapKey", "unwrapKey"]); -}).then(function(result) { - importedKey = result; + return expectFailure( + "Testing that the key can't be used with AES-CBC...", + crypto.subtle.wrapKey("raw", key, key, {name: "AES-CBC", iv: iv})); +} - shouldBe("importedKey.toString()", "'[object CryptoKey]'"); - shouldBe("importedKey.type", "'secret'"); - shouldBe("importedKey.algorithm.name", "'AES-KW'"); - shouldBe("importedKey.algorithm.length", "256"); -}).then(finishJSTest, failAndFinishJSTest); +function testExportKey() +{ + return expectSuccess( + "Exporting the key to raw...", + crypto.subtle.exportKey('raw', key) + ).then(function(result) { + exportedKey = result; + shouldBe("exportedKey.toString()", "'[object ArrayBuffer]'"); + }); +} + +function testImportExportedKey() +{ + return expectSuccess( + "Importing it back...", + crypto.subtle.importKey('raw', exportedKey, {name: "aes-kw"}, extractable, ["wrapKey", "unwrapKey"]) + ).then(function(result) { + importedKey = result; + + shouldBe("importedKey.toString()", "'[object CryptoKey]'"); + shouldBe("importedKey.type", "'secret'"); + shouldBe("importedKey.algorithm.name", "'AES-KW'"); + shouldBe("importedKey.algorithm.length", "256"); + }); +} + +Promise.resolve(null) + .then(generateTestKey) + .then(testWrongAlgorithm) + .then(testExportKey) + .then(testImportExportedKey) + .then(finishJSTest, failAndFinishJSTest); </script>
diff --git a/third_party/WebKit/LayoutTests/fast/css/image-orientation/image-orientation-default.html b/third_party/WebKit/LayoutTests/fast/css/image-orientation/image-orientation-default.html new file mode 100644 index 0000000..8ba9c7ce --- /dev/null +++ b/third_party/WebKit/LayoutTests/fast/css/image-orientation/image-orientation-default.html
@@ -0,0 +1,49 @@ +<html> +<head> +<script> +if (window.testRunner) { + testRunner.dumpAsTextWithPixelResults(); +} + +function log(str) { + var li = document.createElement("li"); + li.appendChild(document.createTextNode(str)); + var console = document.getElementById("console"); + console.appendChild(li); +} + +function imageSize(el) { + var computedStyle = window.getComputedStyle(el); + return computedStyle.width + " by " + computedStyle.height; +} + + +function load() { + for (var i = 1; i <= 9; i++) + log("img" + i + " size = " + imageSize(document.getElementById("img" + i))) +} + +</script> +<style> +body { overflow: hidden; } +img { border: 1px solid black; } +div { display: inline-block; margin-right: 20px; margin-bottom: 10px; width: 100px; vertical-align: top; } +</style> +</head> +<body onload="load()"> +<b>The images should not rotate respecting their EXIF orientation since no image-orientation is specified.</b><br><br> +<div><img id="img1" src="../../images/resources/exif-orientation-1-ul.jpg"><br>Normal</div> +<div><img id="img2" src="../../images/resources/exif-orientation-2-ur.jpg"><br>Flipped horizontally</div> +<div><img id="img3" src="../../images/resources/exif-orientation-3-lr.jpg"><br>Rotated 180°</div> +<div><img id="img4" src="../../images/resources/exif-orientation-4-lol.jpg"><br>Flipped vertically</div> +<br> +<div><img id="img5" src="../../images/resources/exif-orientation-5-lu.jpg"><br>Rotated 90° CCW and flipped vertically</div> +<div><img id="img6" src="../../images/resources/exif-orientation-6-ru.jpg"><br>Rotated 90° CCW</div> +<div><img id="img7" src="../../images/resources/exif-orientation-7-rl.jpg"><br>Rotated 90° CW and flipped vertically </div> +<div><img id="img8" src="../../images/resources/exif-orientation-8-llo.jpg"><br>Rotated 90° CW</div> +<br> +<div><img id="img9" src="../../images/resources/exif-orientation-9-u.jpg"><br>Undefined (invalid value)</div> +<br> +<ul id="console"></ul> +</body> +</html>
diff --git a/third_party/WebKit/LayoutTests/fast/css/image-orientation/image-orientation-from-image-composited-dynamic.html b/third_party/WebKit/LayoutTests/fast/css/image-orientation/image-orientation-from-image-composited-dynamic.html new file mode 100644 index 0000000..78f29de --- /dev/null +++ b/third_party/WebKit/LayoutTests/fast/css/image-orientation/image-orientation-from-image-composited-dynamic.html
@@ -0,0 +1,51 @@ +<html> +<head> +<script> +if (window.testRunner) { + testRunner.dumpAsTextWithPixelResults(); +} + +function log(str) { + var li = document.createElement("li"); + li.appendChild(document.createTextNode(str)); + var console = document.getElementById("console"); + console.appendChild(li); +} + +function imageSize(el) { + var computedStyle = window.getComputedStyle(el); + return computedStyle.width + " by " + computedStyle.height; +} + + +function load() { + for (var i = 1; i <= 9; i++) { + document.getElementById("img" + i).style.imageOrientation = "from-image"; + log("img" + i + " size = " + imageSize(document.getElementById("img" + i))) + } +} + +</script> +<style> +body { overflow: hidden; } +img { transform: translateZ(0); } +div { display: inline-block; margin-right: 20px; margin-bottom: 10px; width: 100px; vertical-align: top; } +</style> +</head> +<body onload="load()"> +<b>The images should be rotated respecting their EXIF orientation by use of image-orientation: from-image.</b><br><br> +<div><img id="img1" src="../../images/resources/exif-orientation-1-ul.jpg"><br>Normal</div> +<div><img id="img2" src="../../images/resources/exif-orientation-2-ur.jpg"><br>Flipped horizontally</div> +<div><img id="img3" src="../../images/resources/exif-orientation-3-lr.jpg"><br>Rotated 180°</div> +<div><img id="img4" src="../../images/resources/exif-orientation-4-lol.jpg"><br>Flipped vertically</div> +<br> +<div><img id="img5" src="../../images/resources/exif-orientation-5-lu.jpg"><br>Rotated 90° CCW and flipped vertically</div> +<div><img id="img6" src="../../images/resources/exif-orientation-6-ru.jpg"><br>Rotated 90° CCW</div> +<div><img id="img7" src="../../images/resources/exif-orientation-7-rl.jpg"><br>Rotated 90° CW and flipped vertically </div> +<div><img id="img8" src="../../images/resources/exif-orientation-8-llo.jpg"><br>Rotated 90° CW</div> +<br> +<div><img id="img9" src="../../images/resources/exif-orientation-9-u.jpg"><br>Undefined (invalid value)</div> +<br> +<ul id="console"></ul> +</body> +</html>
diff --git a/third_party/WebKit/LayoutTests/fast/forms/text/text-font-height-mismatch.html b/third_party/WebKit/LayoutTests/fast/forms/text/text-font-height-mismatch.html deleted file mode 100644 index a813340..0000000 --- a/third_party/WebKit/LayoutTests/fast/forms/text/text-font-height-mismatch.html +++ /dev/null
@@ -1,4 +0,0 @@ -<!DOCTYPE html> -<p>Editable text should be centered vertically.<p> -<input style="border: solid 1px black; line-height:16px; height:16px; font-size:24px;" value="ABCgjy"></div> -<input style="border: solid 1px black; height:32px; font-size:16px; " value="ABCgjy"></div>
diff --git a/third_party/WebKit/LayoutTests/http/tests/inspector/elements/styles/edit-css-with-source-url.html b/third_party/WebKit/LayoutTests/http/tests/inspector/elements/styles/edit-css-with-source-url.html index 6bda7f5..b7639653 100644 --- a/third_party/WebKit/LayoutTests/http/tests/inspector/elements/styles/edit-css-with-source-url.html +++ b/third_party/WebKit/LayoutTests/http/tests/inspector/elements/styles/edit-css-with-source-url.html
@@ -33,15 +33,15 @@ InspectorTest.addResult("Adding file system."); var fs = new InspectorTest.TestFileSystem(fileSystemPath); fs.root.addFile("foo.css", "#inspected {\n color: red;\n}\n"); + InspectorTest.addResult("Adding file system mapping."); + fs.addFileMapping("http://localhost:8000/inspector/elements/styles/", "/"); fs.reportCreated(fileSystemCreated); var uiSourceCode; function fileSystemCreated() { - InspectorTest.addResult("Adding file system mapping."); var fileSystemProjectId = WebInspector.FileSystemWorkspaceBinding.projectId(fileSystemPath); - WebInspector.fileSystemMapping.addFileMapping(fileSystemPath, "http://localhost:8000/inspector/elements/styles/", "/"); uiSourceCode = WebInspector.workspace.uiSourceCode(fileSystemProjectId, "foo.css"); InspectorTest.showUISourceCode(uiSourceCode, didShowScriptSource); } @@ -54,8 +54,10 @@ InspectorTest.evaluateInPage("loadStylesheet()"); } - function stylesheetLoaded() + function stylesheetLoaded(event) { + if (!event.data.sourceURL.includes("foo.css")) + return; InspectorTest.cssModel.removeEventListener(WebInspector.CSSStyleModel.Events.StyleSheetAdded, stylesheetLoaded); InspectorTest.addResult("Stylesheet loaded."); InspectorTest.selectNodeAndWaitForStyles("inspected", nodeSelected);
diff --git a/third_party/WebKit/LayoutTests/http/tests/inspector/search/sources-search-scope-in-files-expected.txt b/third_party/WebKit/LayoutTests/http/tests/inspector/search/sources-search-scope-in-files-expected.txt index c5d0178c..a2c9a3a 100644 --- a/third_party/WebKit/LayoutTests/http/tests/inspector/search/sources-search-scope-in-files-expected.txt +++ b/third_party/WebKit/LayoutTests/http/tests/inspector/search/sources-search-scope-in-files-expected.txt
@@ -1,7 +1,7 @@ Tests that ScriptSearchScope performs search across all sources correctly. See bug 41350. -Total uiSourceCodes: 15 +Total uiSourceCodes: 4 Running: testIgnoreCase Search result #1: uiSourceCode.uri = filesystem:/var/www/search.css
diff --git a/third_party/WebKit/LayoutTests/http/tests/inspector/workspace-test.js b/third_party/WebKit/LayoutTests/http/tests/inspector/workspace-test.js index b9b6468e..71103d2 100644 --- a/third_party/WebKit/LayoutTests/http/tests/inspector/workspace-test.js +++ b/third_party/WebKit/LayoutTests/http/tests/inspector/workspace-test.js
@@ -4,12 +4,14 @@ { if (InspectorTest.testFileSystemWorkspaceBinding) InspectorTest.testFileSystemWorkspaceBinding.dispose(); + if (InspectorTest.testNetworkMapping) + InspectorTest.testNetworkMapping.dispose(); WebInspector.fileSystemMapping.resetForTesting(); InspectorTest.testTargetManager = new WebInspector.TargetManager(); InspectorTest.testWorkspace = new WebInspector.Workspace(); InspectorTest.testFileSystemWorkspaceBinding = new WebInspector.FileSystemWorkspaceBinding(WebInspector.isolatedFileSystemManager, InspectorTest.testWorkspace); - InspectorTest.testNetworkMapping = new WebInspector.NetworkMapping(InspectorTest.testWorkspace, InspectorTest.testFileSystemWorkspaceBinding, WebInspector.fileSystemMapping); + InspectorTest.testNetworkMapping = new WebInspector.NetworkMapping(InspectorTest.testTargetManager, InspectorTest.testWorkspace, InspectorTest.testFileSystemWorkspaceBinding, WebInspector.fileSystemMapping); InspectorTest.testNetworkProjectManager = new WebInspector.NetworkProjectManager(InspectorTest.testTargetManager, InspectorTest.testWorkspace, InspectorTest.testNetworkMapping); InspectorTest.testDebuggerWorkspaceBinding = new WebInspector.DebuggerWorkspaceBinding(InspectorTest.testTargetManager, InspectorTest.testWorkspace, InspectorTest.testNetworkMapping); InspectorTest.testCSSWorkspaceBinding = new WebInspector.CSSWorkspaceBinding(InspectorTest.testTargetManager, InspectorTest.testWorkspace, InspectorTest.testNetworkMapping); @@ -37,9 +39,12 @@ { var MockTarget = function(name, connection, callback) { - WebInspector.Target.call(this, name, WebInspector.Target.Type.Page, connection, null, callback); + WebInspector.Target.call(this, InspectorTest.testTargetManager, name, WebInspector.Target.Type.Page, connection, null, callback); this.debuggerModel = debuggerModelConstructor ? new debuggerModelConstructor(this) : new WebInspector.DebuggerModel(this); - this.resourceTreeModel = WebInspector.targetManager.mainTarget().resourceTreeModel; + this.networkManager = new WebInspector.NetworkManager(this); + this.consoleModel = new WebInspector.ConsoleModel(this); + this.resourceTreeModel = new WebInspector.ResourceTreeModel(this); + this.resourceTreeModel._inspectedPageURL = InspectorTest.resourceTreeModel._inspectedPageURL; this.domModel = new WebInspector.DOMModel(this); this.cssModel = new WebInspector.CSSStyleModel(this); this.runtimeModel = new WebInspector.RuntimeModel(this);
diff --git a/third_party/WebKit/LayoutTests/inspector/file-system-mapping-overrides-expected.txt b/third_party/WebKit/LayoutTests/inspector/file-system-mapping-overrides-expected.txt new file mode 100644 index 0000000..2c0e9bc --- /dev/null +++ b/third_party/WebKit/LayoutTests/inspector/file-system-mapping-overrides-expected.txt
@@ -0,0 +1,15 @@ +Tests FileSystemMapping overrides + + +Running: testFileSystemClashDirectOrder +http://localhost:1234/right_url/file.txt + +Running: testFileSystemClashReversedOrder +http://localhost:1234/right_url/file.txt + +Running: testNetworkClashDirectOrder +right/file.txt + +Running: testNetworkClashReversedOrder +right/file.txt +
diff --git a/third_party/WebKit/LayoutTests/inspector/file-system-mapping-overrides.html b/third_party/WebKit/LayoutTests/inspector/file-system-mapping-overrides.html new file mode 100644 index 0000000..8c814e5d --- /dev/null +++ b/third_party/WebKit/LayoutTests/inspector/file-system-mapping-overrides.html
@@ -0,0 +1,62 @@ +<html> +<head> +<script src="../http/tests/inspector/inspector-test.js"></script> +<script> +function test() +{ + InspectorTest.runTestSuite([ + function testFileSystemClashDirectOrder(next) + { + var fileSystemMapping = new WebInspector.FileSystemMapping(); + fileSystemMapping.addFileSystem("//Source/devtools"); + + fileSystemMapping.addNonConfigurableFileMapping("//Source/devtools", "chrome-devtools://devtools/bundled/wrong_url", "/"); + fileSystemMapping.addFileMapping("//Source/devtools", "http://localhost:1234/right_url", "/"); + + InspectorTest.addResult(fileSystemMapping.urlForPath("//Source/devtools", "/file.txt")); + next(); + }, + + function testFileSystemClashReversedOrder(next) + { + var fileSystemMapping = new WebInspector.FileSystemMapping(); + fileSystemMapping.addFileSystem("//Source/devtools"); + + fileSystemMapping.addFileMapping("//Source/devtools", "http://localhost:1234/right_url", "/"); + fileSystemMapping.addNonConfigurableFileMapping("//Source/devtools", "chrome-devtools://devtools/wrong_url", "/"); + + InspectorTest.addResult(fileSystemMapping.urlForPath("//Source/devtools", "/file.txt")); + next(); + }, + + function testNetworkClashDirectOrder(next) + { + var fileSystemMapping = new WebInspector.FileSystemMapping(); + fileSystemMapping.addFileSystem("//Source/devtools"); + + fileSystemMapping.addNonConfigurableFileMapping("//Source/devtools", "http://localhost:1234/front_end", "/wrong"); + fileSystemMapping.addFileMapping("//Source/devtools", "http://localhost:1234/front_end", "/right"); + + InspectorTest.addResult(fileSystemMapping.fileForURL("http://localhost:1234/front_end/file.txt").filePath); + next(); + }, + + function testNetworkClashReversedOrder(next) + { + var fileSystemMapping = new WebInspector.FileSystemMapping(); + fileSystemMapping.addFileSystem("//Source/devtools"); + + fileSystemMapping.addFileMapping("//Source/devtools", "http://localhost:1234/front_end", "/right"); + fileSystemMapping.addNonConfigurableFileMapping("//Source/devtools", "http://localhost:1234/front_end", "/wrong"); + + InspectorTest.addResult(fileSystemMapping.fileForURL("http://localhost:1234/front_end/file.txt").filePath); + next(); + }, + ]); +} +</script> +</head> +<body onload="runTest()"> +<p>Tests FileSystemMapping overrides</p> +</body> +</html>
diff --git a/third_party/WebKit/LayoutTests/inspector/sources/debugger/file-system-project-mapping-expected.txt b/third_party/WebKit/LayoutTests/inspector/sources/debugger/file-system-project-mapping-expected.txt index 3b5a9fdb..3705223 100644 --- a/third_party/WebKit/LayoutTests/inspector/sources/debugger/file-system-project-mapping-expected.txt +++ b/third_party/WebKit/LayoutTests/inspector/sources/debugger/file-system-project-mapping-expected.txt
@@ -105,7 +105,11 @@ Adding file system. Workspace event: UISourceCodeAdded: filesystem:/var/www/html/foo.js. Workspace event: UISourceCodeAdded: filesystem:/var/www/html2/bar.js. +(suspend state changed: true) +(suspend state changed: false) UISourceCode uri to url mappings: filesystem:/var/www/html/foo.js -> http://localhost/h1/foo.js filesystem:/var/www/html2/bar.js -> http://localhost/h2/bar.js +(suspend state changed: true) +(suspend state changed: false)
diff --git a/third_party/WebKit/LayoutTests/inspector/sources/debugger/file-system-project-mapping.html b/third_party/WebKit/LayoutTests/inspector/sources/debugger/file-system-project-mapping.html index 5d73f7b..9e87005 100644 --- a/third_party/WebKit/LayoutTests/inspector/sources/debugger/file-system-project-mapping.html +++ b/third_party/WebKit/LayoutTests/inspector/sources/debugger/file-system-project-mapping.html
@@ -12,6 +12,7 @@ var resourceScriptMapping; var defaultScriptMapping; var fileSystemProjectId = WebInspector.FileSystemWorkspaceBinding.projectId("/var/www"); + WebInspector.networkMapping.dispose(); function createWorkspaceWithTarget() { @@ -21,6 +22,11 @@ defaultScriptMapping = entry._defaultMapping; } + function suspendStateChanged() + { + InspectorTest.addResult("(suspend state changed: " + InspectorTest.testTargetManager.allTargetsSuspended() + ")"); + } + function dumpFileSystemUISourceCodesMappings() { var uiSourceCodes = InspectorTest.testWorkspace.project(fileSystemProjectId).uiSourceCodes(); @@ -52,7 +58,6 @@ var uiSourceCode = InspectorTest.testWorkspace.uiSourceCode(fileSystemProjectId, "html/foo.js"); networkUISourceCode = InspectorTest.testWorkspace.uiSourceCode(WebInspector.NetworkProject.projectId(target, "http://localhost"), "html/foo.js"); - InspectorTest.override(WebInspector.SourcesPanel.prototype, "_suggestReload", function() { }); InspectorTest.addResult("Adding mapping between network and file system resources."); InspectorTest.testNetworkMapping.addMapping(networkUISourceCode, uiSourceCode); var setting = JSON.stringify(WebInspector.fileSystemMapping._fileSystemMappingSetting.get()); @@ -309,6 +314,7 @@ function testProjectBasedMapping(next) { createWorkspaceWithTarget(); + InspectorTest.testTargetManager.addEventListener(WebInspector.TargetManager.Events.SuspendStateChanged, suspendStateChanged); InspectorTest.addResult("Adding file system."); var fs = new InspectorTest.TestFileSystem("/var/www");
diff --git a/third_party/WebKit/LayoutTests/inspector/workspace-mapping.html b/third_party/WebKit/LayoutTests/inspector/workspace-mapping.html index cddb07d..fccbc7a 100644 --- a/third_party/WebKit/LayoutTests/inspector/workspace-mapping.html +++ b/third_party/WebKit/LayoutTests/inspector/workspace-mapping.html
@@ -42,7 +42,7 @@ fileSystemMapping.addFileMapping("/var/www", "http://localhost/", "/localhost/"); var workspace = new WebInspector.Workspace(); var fileSystemWorkspaceBinding = new WebInspector.FileSystemWorkspaceBinding(WebInspector.isolatedFileSystemManager, workspace); - var networkMapping = new WebInspector.NetworkMapping(workspace, fileSystemWorkspaceBinding, fileSystemMapping); + var networkMapping = new WebInspector.NetworkMapping(WebInspector.targetManager, workspace, fileSystemWorkspaceBinding, fileSystemMapping); function dumpHasMappingForURL(url) {
diff --git a/third_party/WebKit/LayoutTests/paint/clipath/clip-path-with-background-and-box-behind-expected.html b/third_party/WebKit/LayoutTests/paint/clipath/clip-path-with-background-and-box-behind-expected.html new file mode 100644 index 0000000..1a5bc64 --- /dev/null +++ b/third_party/WebKit/LayoutTests/paint/clipath/clip-path-with-background-and-box-behind-expected.html
@@ -0,0 +1,23 @@ +<!DOCTYPE html> +<style> +#boxpos { + position: absolute; + background-color: green; +} +.box { + position: relative; + -webkit-clip-path: url(#equitri); + height: 100px; + width: 100px; + background: red; +} +</style> +<div id="boxpos"> + <div class="box"></div> +</div> + +<svg id="tri"> + <clipPath id="equitri" clipPathUnits="objectBoundingBox"> + <polygon points=".5 0, 0 .86, 1 .86, .5 0" /> + </clipPath> +</svg>
diff --git a/third_party/WebKit/LayoutTests/paint/clipath/clip-path-with-background-and-box-behind.html b/third_party/WebKit/LayoutTests/paint/clipath/clip-path-with-background-and-box-behind.html new file mode 100644 index 0000000..04a9d6ab --- /dev/null +++ b/third_party/WebKit/LayoutTests/paint/clipath/clip-path-with-background-and-box-behind.html
@@ -0,0 +1,24 @@ +<!DOCTYPE html> +<style> +#boxpos { + position: absolute; + background-color: green; +} +.box { + will-change: transform; + position: relative; + -webkit-clip-path: url(#equitri); + height: 100px; + width: 100px; + background: red; +} +</style> +<div id="boxpos"> + <div class="box"></div> +</div> + +<svg id="tri"> + <clipPath id="equitri" clipPathUnits="objectBoundingBox"> + <polygon points=".5 0, 0 .86, 1 .86, .5 0" /> + </clipPath> +</svg>
diff --git a/third_party/WebKit/LayoutTests/paint/deprecatedpaintlayer/non-self-painting-layer-overrides-visibility-expected.html b/third_party/WebKit/LayoutTests/paint/paintlayer/non-self-painting-layer-overrides-visibility-expected.html similarity index 100% rename from third_party/WebKit/LayoutTests/paint/deprecatedpaintlayer/non-self-painting-layer-overrides-visibility-expected.html rename to third_party/WebKit/LayoutTests/paint/paintlayer/non-self-painting-layer-overrides-visibility-expected.html
diff --git a/third_party/WebKit/LayoutTests/paint/deprecatedpaintlayer/non-self-painting-layer-overrides-visibility.html b/third_party/WebKit/LayoutTests/paint/paintlayer/non-self-painting-layer-overrides-visibility.html similarity index 100% rename from third_party/WebKit/LayoutTests/paint/deprecatedpaintlayer/non-self-painting-layer-overrides-visibility.html rename to third_party/WebKit/LayoutTests/paint/paintlayer/non-self-painting-layer-overrides-visibility.html
diff --git a/third_party/WebKit/LayoutTests/platform/linux/fast/forms/text/text-font-height-mismatch-expected.png b/third_party/WebKit/LayoutTests/platform/linux/fast/forms/text/text-font-height-mismatch-expected.png deleted file mode 100644 index 3b9cf26d..0000000 --- a/third_party/WebKit/LayoutTests/platform/linux/fast/forms/text/text-font-height-mismatch-expected.png +++ /dev/null Binary files differ
diff --git a/third_party/WebKit/LayoutTests/platform/linux/fast/forms/text/text-font-height-mismatch-expected.txt b/third_party/WebKit/LayoutTests/platform/linux/fast/forms/text/text-font-height-mismatch-expected.txt deleted file mode 100644 index 97213e9..0000000 --- a/third_party/WebKit/LayoutTests/platform/linux/fast/forms/text/text-font-height-mismatch-expected.txt +++ /dev/null
@@ -1,22 +0,0 @@ -layer at (0,0) size 800x600 - LayoutView at (0,0) size 800x600 -layer at (0,0) size 800x104 - LayoutBlockFlow {HTML} at (0,0) size 800x104 - LayoutBlockFlow {BODY} at (8,16) size 784x72 - LayoutBlockFlow {P} at (0,0) size 784x20 - LayoutText {#text} at (0,0) size 254x19 - text run at (0,0) width 254: "Editable text should be centered vertically." - LayoutBlockFlow {P} at (0,36) size 784x36 - LayoutTextControl {INPUT} at (0,6) size 276x20 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (276,8) size 4x19 - text run at (276,8) width 4: " " - LayoutTextControl {INPUT} at (280,0) size 178x36 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (0,0) size 0x0 -layer at (9,60) size 274x16 scrollHeight 21 - LayoutBlockFlow {DIV} at (1,2) size 274x16 - LayoutText {#text} at (0,-6) size 80x27 - text run at (0,-6) width 80: "ABCgjy" -layer at (289,61) size 176x19 - LayoutBlockFlow {DIV} at (1,8.50) size 176x19 - LayoutText {#text} at (0,0) size 53x18 - text run at (0,0) width 53: "ABCgjy"
diff --git a/third_party/WebKit/LayoutTests/platform/mac-mavericks/fast/forms/text/text-font-height-mismatch-expected.png b/third_party/WebKit/LayoutTests/platform/mac-mavericks/fast/forms/text/text-font-height-mismatch-expected.png deleted file mode 100644 index c9e2eec0..0000000 --- a/third_party/WebKit/LayoutTests/platform/mac-mavericks/fast/forms/text/text-font-height-mismatch-expected.png +++ /dev/null Binary files differ
diff --git a/third_party/WebKit/LayoutTests/platform/mac-mavericks/fast/forms/text/text-font-height-mismatch-expected.txt b/third_party/WebKit/LayoutTests/platform/mac-mavericks/fast/forms/text/text-font-height-mismatch-expected.txt deleted file mode 100644 index 91b225134..0000000 --- a/third_party/WebKit/LayoutTests/platform/mac-mavericks/fast/forms/text/text-font-height-mismatch-expected.txt +++ /dev/null
@@ -1,22 +0,0 @@ -layer at (0,0) size 800x600 - LayoutView at (0,0) size 800x600 -layer at (0,0) size 800x102 - LayoutBlockFlow {HTML} at (0,0) size 800x102 - LayoutBlockFlow {BODY} at (8,16) size 784x70 - LayoutBlockFlow {P} at (0,0) size 784x18 - LayoutText {#text} at (0,0) size 274x18 - text run at (0,0) width 274: "Editable text should be centered vertically." - LayoutBlockFlow {P} at (0,34) size 784x36 - LayoutTextControl {INPUT} at (0,5) size 260x20 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (260,10) size 4x18 - text run at (260,10) width 4: " " - LayoutTextControl {INPUT} at (264,0) size 168x36 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (0,0) size 0x0 -layer at (10,57) size 256x16 scrollHeight 22 - LayoutBlockFlow {DIV} at (2,2) size 256x16 - LayoutText {#text} at (0,-6) size 82x28 - text run at (0,-6) width 82: "ABCgjy" -layer at (274,59) size 164x18 - LayoutBlockFlow {DIV} at (2,9) size 164x18 - LayoutText {#text} at (0,0) size 55x18 - text run at (0,0) width 55: "ABCgjy"
diff --git a/third_party/WebKit/LayoutTests/platform/mac/fast/forms/text/text-font-height-mismatch-expected.png b/third_party/WebKit/LayoutTests/platform/mac/fast/forms/text/text-font-height-mismatch-expected.png deleted file mode 100644 index c5e1ea8..0000000 --- a/third_party/WebKit/LayoutTests/platform/mac/fast/forms/text/text-font-height-mismatch-expected.png +++ /dev/null Binary files differ
diff --git a/third_party/WebKit/LayoutTests/platform/mac/fast/forms/text/text-font-height-mismatch-expected.txt b/third_party/WebKit/LayoutTests/platform/mac/fast/forms/text/text-font-height-mismatch-expected.txt deleted file mode 100644 index 28c7f2dba..0000000 --- a/third_party/WebKit/LayoutTests/platform/mac/fast/forms/text/text-font-height-mismatch-expected.txt +++ /dev/null
@@ -1,22 +0,0 @@ -layer at (0,0) size 800x600 - LayoutView at (0,0) size 800x600 -layer at (0,0) size 800x102 - LayoutBlockFlow {HTML} at (0,0) size 800x102 - LayoutBlockFlow {BODY} at (8,16) size 784x70 - LayoutBlockFlow {P} at (0,0) size 784x18 - LayoutText {#text} at (0,0) size 274x18 - text run at (0,0) width 274: "Editable text should be centered vertically." - LayoutBlockFlow {P} at (0,34) size 784x36 - LayoutTextControl {INPUT} at (0,5) size 274x20 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (274,10) size 4x18 - text run at (274,10) width 4: " " - LayoutTextControl {INPUT} at (278,0) size 171x36 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (0,0) size 0x0 -layer at (10,57) size 270x16 scrollHeight 22 - LayoutBlockFlow {DIV} at (2,2) size 270x16 - LayoutText {#text} at (0,-6) size 82x28 - text run at (0,-6) width 82: "ABCgjy" -layer at (288,59) size 167x18 - LayoutBlockFlow {DIV} at (2,9) size 167x18 - LayoutText {#text} at (0,0) size 55x18 - text run at (0,0) width 55: "ABCgjy"
diff --git a/third_party/WebKit/LayoutTests/platform/win-xp/fast/forms/text/text-font-height-mismatch-expected.png b/third_party/WebKit/LayoutTests/platform/win-xp/fast/forms/text/text-font-height-mismatch-expected.png deleted file mode 100644 index 3cfec07..0000000 --- a/third_party/WebKit/LayoutTests/platform/win-xp/fast/forms/text/text-font-height-mismatch-expected.png +++ /dev/null Binary files differ
diff --git a/third_party/WebKit/LayoutTests/platform/win-xp/fast/forms/text/text-font-height-mismatch-expected.txt b/third_party/WebKit/LayoutTests/platform/win-xp/fast/forms/text/text-font-height-mismatch-expected.txt deleted file mode 100644 index 654ac9f..0000000 --- a/third_party/WebKit/LayoutTests/platform/win-xp/fast/forms/text/text-font-height-mismatch-expected.txt +++ /dev/null
@@ -1,22 +0,0 @@ -layer at (0,0) size 800x600 - LayoutView at (0,0) size 800x600 -layer at (0,0) size 800x104 - LayoutBlockFlow {HTML} at (0,0) size 800x104 - LayoutBlockFlow {BODY} at (8,16) size 784x72 - LayoutBlockFlow {P} at (0,0) size 784x20 - LayoutText {#text} at (0,0) size 254x19 - text run at (0,0) width 254: "Editable text should be centered vertically." - LayoutBlockFlow {P} at (0,36) size 784x36 - LayoutTextControl {INPUT} at (0,6) size 275x20 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (275,8) size 4x19 - text run at (275,8) width 4: " " - LayoutTextControl {INPUT} at (279,0) size 178x36 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (0,0) size 0x0 -layer at (9,60) size 273x16 scrollHeight 21 - LayoutBlockFlow {DIV} at (1,2) size 273x16 - LayoutText {#text} at (0,-6) size 80x27 - text run at (0,-6) width 80: "ABCgjy" -layer at (288,61) size 176x19 - LayoutBlockFlow {DIV} at (1,8.50) size 176x19 - LayoutText {#text} at (0,0) size 53x18 - text run at (0,0) width 53: "ABCgjy"
diff --git a/third_party/WebKit/LayoutTests/platform/win/fast/forms/text/text-font-height-mismatch-expected.png b/third_party/WebKit/LayoutTests/platform/win/fast/forms/text/text-font-height-mismatch-expected.png deleted file mode 100644 index ae577479..0000000 --- a/third_party/WebKit/LayoutTests/platform/win/fast/forms/text/text-font-height-mismatch-expected.png +++ /dev/null Binary files differ
diff --git a/third_party/WebKit/LayoutTests/platform/win/fast/forms/text/text-font-height-mismatch-expected.txt b/third_party/WebKit/LayoutTests/platform/win/fast/forms/text/text-font-height-mismatch-expected.txt deleted file mode 100644 index 0e3cab4a..0000000 --- a/third_party/WebKit/LayoutTests/platform/win/fast/forms/text/text-font-height-mismatch-expected.txt +++ /dev/null
@@ -1,22 +0,0 @@ -layer at (0,0) size 800x600 - LayoutView at (0,0) size 800x600 -layer at (0,0) size 800x102 - LayoutBlockFlow {HTML} at (0,0) size 800x102 - LayoutBlockFlow {BODY} at (8,16) size 784x70 - LayoutBlockFlow {P} at (0,0) size 784x18 - LayoutText {#text} at (0,0) size 274x17 - text run at (0,0) width 274: "Editable text should be centered vertically." - LayoutBlockFlow {P} at (0,34) size 784x36 - LayoutTextControl {INPUT} at (0,5) size 294x20 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (294,9) size 4x17 - text run at (294,9) width 4: " " - LayoutTextControl {INPUT} at (298,0) size 197x36 [bgcolor=#FFFFFF] [border: (1px solid #000000)] - LayoutText {#text} at (0,0) size 0x0 -layer at (9,57) size 292x16 scrollHeight 21 - LayoutBlockFlow {DIV} at (1,2) size 292x16 - LayoutText {#text} at (0,-6) size 81x27 - text run at (0,-6) width 81: "ABCgjy" -layer at (307,59) size 195x18 - LayoutBlockFlow {DIV} at (1,9) size 195x18 - LayoutText {#text} at (0,0) size 54x17 - text run at (0,0) width 54: "ABCgjy"
diff --git a/third_party/WebKit/LayoutTests/svg/custom/use-nested-extern-href-non-existent-crash-expected.txt b/third_party/WebKit/LayoutTests/svg/custom/use-nested-extern-href-non-existent-crash-expected.txt new file mode 100644 index 0000000..c2541f4 --- /dev/null +++ b/third_party/WebKit/LayoutTests/svg/custom/use-nested-extern-href-non-existent-crash-expected.txt
@@ -0,0 +1 @@ +PASS if no crash.
diff --git a/third_party/WebKit/LayoutTests/svg/custom/use-nested-extern-href-non-existent-crash.html b/third_party/WebKit/LayoutTests/svg/custom/use-nested-extern-href-non-existent-crash.html new file mode 100644 index 0000000..5b5998e --- /dev/null +++ b/third_party/WebKit/LayoutTests/svg/custom/use-nested-extern-href-non-existent-crash.html
@@ -0,0 +1,15 @@ +<!DOCTYPE html> +<svg> + <use xlink:href="nonexistent.svg#nonexistent" id="a"/> + <use xlink:href="#a"/> + + <g id="b"> + <use xlink:href="nonexistent.svg#nonexistent"/> + </g> + <use xlink:href="#b"/> +</svg> +<p>PASS if no crash.</p> +<script> +if (window.testRunner) + testRunner.dumpAsText(); +</script>
diff --git a/third_party/WebKit/LayoutTests/webaudio/audiosource-onended-expected.txt b/third_party/WebKit/LayoutTests/webaudio/audiosource-onended-expected.txt new file mode 100644 index 0000000..3d364ff --- /dev/null +++ b/third_party/WebKit/LayoutTests/webaudio/audiosource-onended-expected.txt
@@ -0,0 +1,13 @@ +Test onended event listener + +On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE". + + +PASS AudioBufferSource.onended called when ended set directly. +PASS AudioBufferSource.onended called when using addEventListener. +PASS Oscillator.onended called when ended set directly. +PASS Oscillator.onended called when using addEventListener. +PASS successfullyParsed is true + +TEST COMPLETE +
diff --git a/third_party/WebKit/LayoutTests/webaudio/audiosource-onended.html b/third_party/WebKit/LayoutTests/webaudio/audiosource-onended.html new file mode 100644 index 0000000..f0f447e8 --- /dev/null +++ b/third_party/WebKit/LayoutTests/webaudio/audiosource-onended.html
@@ -0,0 +1,93 @@ +<!doctype html> +<html> + <head> + <title>Test Onended Event Listener</title> + <script src="../resources/js-test.js"></script> + <script src="resources/compatibility.js"></script> + <script src="resources/audio-testing.js"></script> + </head> + + <body> + <script> + description("Test onended event listener"); + window.jsTestIsAsync = true; + + var sampleRate = 44100; + var renderLengthSeconds = 1; + var renderLengthFrames = renderLengthSeconds * sampleRate; + + // Length of the source buffer. Anything less than the render length is fine. + var sourceBufferLengthFrames = renderLengthFrames / 8; + // When to stop the oscillator. Anything less than the render time is fine. + var stopTime = renderLengthSeconds / 8; + + var audit = Audit.createTaskRunner(); + + audit.defineTask("absn-set-onended", function (done) { + // Test that the onended event for an AudioBufferSourceNode is fired when it is set + // directly. + var context = new OfflineAudioContext(1, renderLengthFrames, sampleRate); + var buffer = context.createBuffer(1, sourceBufferLengthFrames, context.sampleRate); + var source = context.createBufferSource(); + source.buffer = buffer; + source.connect(context.destination); + source.onended = function (e) { + testPassed("AudioBufferSource.onended called when ended set directly."); + }; + source.start(); + context.startRendering().then(done); + }); + + audit.defineTask("absn-add-listener", function (done) { + // Test that the onended event for an AudioBufferSourceNode is fired when + // addEventListener is used to set the handler. + var context = new OfflineAudioContext(1, renderLengthFrames, sampleRate); + var buffer = context.createBuffer(1, sourceBufferLengthFrames, context.sampleRate); + var source = context.createBufferSource(); + source.buffer = buffer; + source.connect(context.destination); + source.addEventListener("ended", function (e) { + testPassed("AudioBufferSource.onended called when using addEventListener."); + }); + source.start(); + context.startRendering().then(done); + }); + + audit.defineTask("osc-set-onended", function (done) { + // Test that the onended event for an OscillatorNode is fired when it is set + // directly. + var context = new OfflineAudioContext(1, renderLengthFrames, sampleRate); + var source = context.createOscillator(); + source.connect(context.destination); + source.onended = function (e) { + testPassed("Oscillator.onended called when ended set directly."); + }; + source.start(); + source.stop(stopTime); + context.startRendering().then(done); + }); + + audit.defineTask("osc-add-listener", function (done) { + // Test that the onended event for an OscillatorNode is fired when + // addEventListener is used to set the handler. + var context = new OfflineAudioContext(1, renderLengthFrames, sampleRate); + var source = context.createOscillator(); + source.connect(context.destination); + source.addEventListener("ended", function (e) { + testPassed("Oscillator.onended called when using addEventListener."); + }); + source.start(); + source.stop(stopTime); + context.startRendering().then(done); + }); + + audit.defineTask("finish", function (done) { + finishJSTest(); + done(); + }); + + audit.runTasks(); + succesfullyParsed = true; + </script> + </body> +</html>
diff --git a/third_party/WebKit/Source/core/core.gypi b/third_party/WebKit/Source/core/core.gypi index b0ecb8e..2e2fbf9 100644 --- a/third_party/WebKit/Source/core/core.gypi +++ b/third_party/WebKit/Source/core/core.gypi
@@ -1906,18 +1906,6 @@ 'paint/ClipScope.h', 'paint/CompositingRecorder.cpp', 'paint/CompositingRecorder.h', - 'paint/PaintLayer.cpp', - 'paint/PaintLayerClipper.cpp', - 'paint/PaintLayerFilterInfo.cpp', - 'paint/PaintLayerFilterInfo.h', - 'paint/PaintLayerFragment.h', - 'paint/PaintLayerReflectionInfo.cpp', - 'paint/PaintLayerPainter.cpp', - 'paint/PaintLayerPainter.h', - 'paint/PaintLayerPaintingInfo.h', - 'paint/PaintLayerScrollableArea.cpp', - 'paint/PaintLayerStackingNode.cpp', - 'paint/PaintLayerStackingNodeIterator.cpp', 'paint/DetailsMarkerPainter.cpp', 'paint/DetailsMarkerPainter.h', 'paint/EllipsisBoxPainter.cpp', @@ -1973,6 +1961,18 @@ 'paint/ObjectPainter.cpp', 'paint/ObjectPainter.h', 'paint/PaintInfo.h', + 'paint/PaintLayer.cpp', + 'paint/PaintLayerClipper.cpp', + 'paint/PaintLayerFilterInfo.cpp', + 'paint/PaintLayerFilterInfo.h', + 'paint/PaintLayerFragment.h', + 'paint/PaintLayerReflectionInfo.cpp', + 'paint/PaintLayerPainter.cpp', + 'paint/PaintLayerPainter.h', + 'paint/PaintLayerPaintingInfo.h', + 'paint/PaintLayerScrollableArea.cpp', + 'paint/PaintLayerStackingNode.cpp', + 'paint/PaintLayerStackingNodeIterator.cpp', 'paint/PaintPhase.cpp', 'paint/PaintPhase.h', 'paint/PartPainter.cpp', @@ -3880,12 +3880,12 @@ 'page/PagePopupClientTest.cpp', 'page/PrintContextTest.cpp', 'page/scrolling/ScrollStateTest.cpp', - 'paint/PaintLayerPainterTest.cpp', 'paint/DisplayItemListPaintTest.cpp', 'paint/DisplayItemListPaintTest.h', 'paint/LayerClipRecorderTest.cpp', 'paint/LayoutObjectDrawingRecorderTest.cpp', 'paint/NinePieceImageGridTest.cpp', + 'paint/PaintLayerPainterTest.cpp', 'paint/TableCellPainterTest.cpp', 'paint/TextPainterTest.cpp', 'streams/ReadableStreamReaderTest.cpp',
diff --git a/third_party/WebKit/Source/core/html/ImageDocument.cpp b/third_party/WebKit/Source/core/html/ImageDocument.cpp index 3776a2b..6a16b3077 100644 --- a/third_party/WebKit/Source/core/html/ImageDocument.cpp +++ b/third_party/WebKit/Source/core/html/ImageDocument.cpp
@@ -156,8 +156,10 @@ { if (!isStopped() && document()->imageElement() && document()->cachedImage()) { ImageResource* cachedImage = document()->cachedImage(); + DocumentLoader* loader = document()->loader(); + cachedImage->setResponse(loader->response()); + cachedImage->setLoadFinishTime(loader->timing().responseEnd()); cachedImage->finish(); - cachedImage->setResponse(document()->frame()->loader().documentLoader()->response()); // Report the natural image size in the page title, regardless of zoom level. // At a zoom level of 1 the image is guaranteed to have an integer size.
diff --git a/third_party/WebKit/Source/core/html/ImageDocument.h b/third_party/WebKit/Source/core/html/ImageDocument.h index 73211d1e..4599ac4 100644 --- a/third_party/WebKit/Source/core/html/ImageDocument.h +++ b/third_party/WebKit/Source/core/html/ImageDocument.h
@@ -26,14 +26,14 @@ #define ImageDocument_h #include "core/html/HTMLDocument.h" +#include "core/html/HTMLImageElement.h" #include "wtf/RefPtr.h" namespace blink { class ImageResource; -class HTMLImageElement; -class ImageDocument final : public HTMLDocument { +class CORE_EXPORT ImageDocument final : public HTMLDocument { public: static PassRefPtrWillBeRawPtr<ImageDocument> create(const DocumentInit& initializer = DocumentInit()) {
diff --git a/third_party/WebKit/Source/core/layout/HitTestResult.cpp b/third_party/WebKit/Source/core/layout/HitTestResult.cpp index 6284c77..fcb509d 100644 --- a/third_party/WebKit/Source/core/layout/HitTestResult.cpp +++ b/third_party/WebKit/Source/core/layout/HitTestResult.cpp
@@ -336,20 +336,20 @@ if (!innerNodeOrImageMapImage) return KURL(); - LayoutObject* layoutObject = innerNodeOrImageMapImage->layoutObject(); - if (!(layoutObject && layoutObject->isImage())) - return KURL(); - AtomicString urlString; - if (isHTMLEmbedElement(*innerNodeOrImageMapImage) - || isHTMLImageElement(*innerNodeOrImageMapImage) - || isHTMLInputElement(*innerNodeOrImageMapImage) - || isHTMLObjectElement(*innerNodeOrImageMapImage) - || isSVGImageElement(*innerNodeOrImageMapImage)) { + // Always return a url for image elements and input elements with type=image, even if they + // don't have a LayoutImage (e.g. because the image didn't load and we are using an alt container). + // For other elements we don't create alt containers so ensure they contain a loaded image. + if (isHTMLImageElement(*innerNodeOrImageMapImage) + || (isHTMLInputElement(*innerNodeOrImageMapImage) && toHTMLInputElement(innerNodeOrImageMapImage)->isImage())) urlString = toElement(*innerNodeOrImageMapImage).imageSourceURL(); - } else { + else if ((innerNodeOrImageMapImage->layoutObject() && innerNodeOrImageMapImage->layoutObject()->isImage()) + && (isHTMLEmbedElement(*innerNodeOrImageMapImage) + || isHTMLObjectElement(*innerNodeOrImageMapImage) + || isSVGImageElement(*innerNodeOrImageMapImage))) + urlString = toElement(*innerNodeOrImageMapImage).imageSourceURL(); + if (urlString.isEmpty()) return KURL(); - } return innerNodeOrImageMapImage->document().completeURL(stripLeadingAndTrailingHTMLSpaces(urlString)); }
diff --git a/third_party/WebKit/Source/core/layout/LayoutBox.cpp b/third_party/WebKit/Source/core/layout/LayoutBox.cpp index e8f4a0e..22db7193 100644 --- a/third_party/WebKit/Source/core/layout/LayoutBox.cpp +++ b/third_party/WebKit/Source/core/layout/LayoutBox.cpp
@@ -1232,6 +1232,8 @@ // FIXME: Use rounded rect if border radius is present. if (style()->hasBorderRadius()) return false; + if (hasClipPath()) + return false; // FIXME: The background color clip is defined by the last layer. if (style()->backgroundLayers().next()) return false;
diff --git a/third_party/WebKit/Source/core/layout/LayoutObject.cpp b/third_party/WebKit/Source/core/layout/LayoutObject.cpp index 8e74d156..dabbee1 100644 --- a/third_party/WebKit/Source/core/layout/LayoutObject.cpp +++ b/third_party/WebKit/Source/core/layout/LayoutObject.cpp
@@ -76,6 +76,7 @@ #include "core/page/AutoscrollController.h" #include "core/page/Page.h" #include "core/paint/ObjectPainter.h" +#include "core/paint/PaintInfo.h" #include "core/paint/PaintLayer.h" #include "core/style/ContentData.h" #include "core/style/ShadowList.h" @@ -1434,6 +1435,29 @@ return invalidationReason; } +void LayoutObject::invalidatePaintIfNeededForSynchronizedPainting(const PaintInfo& paintInfo) +{ + ASSERT(RuntimeEnabledFeatures::slimmingPaintSynchronizedPaintingEnabled()); + ASSERT(document().lifecycle().state() == DocumentLifecycle::InPaint); + ASSERT(paintInfo.paintInvalidationState); + ASSERT(paintInfo.paintContainer()); + + DisplayItemList* displayItemList = paintInfo.context->displayItemList(); + if (displayItemList->clientHasCheckedPaintInvalidation(displayItemClient())) { + ASSERT(displayItemList->clientCacheIsValid(displayItemClient()) + == (invalidatePaintIfNeeded(*paintInfo.paintInvalidationState, *paintInfo.paintContainer()) == PaintInvalidationNone)); + return; + } + + PaintInvalidationReason reason = invalidatePaintIfNeeded(*paintInfo.paintInvalidationState, *paintInfo.paintContainer()); + clearPaintInvalidationState(*paintInfo.paintInvalidationState); + + if (reason == PaintInvalidationDelayedFull) + paintInfo.paintInvalidationState->pushDelayedPaintInvalidationTarget(*this); + + displayItemList->setClientHasCheckedPaintInvalidation(displayItemClient()); +} + PaintInvalidationReason LayoutObject::paintInvalidationReason(const LayoutBoxModelObject& paintInvalidationContainer, const LayoutRect& oldBounds, const LayoutPoint& oldPositionFromPaintInvalidationBacking, const LayoutRect& newBounds, const LayoutPoint& newPositionFromPaintInvalidationBacking) const
diff --git a/third_party/WebKit/Source/core/layout/LayoutObject.h b/third_party/WebKit/Source/core/layout/LayoutObject.h index 3bfe0c2..77815983 100644 --- a/third_party/WebKit/Source/core/layout/LayoutObject.h +++ b/third_party/WebKit/Source/core/layout/LayoutObject.h
@@ -1172,7 +1172,7 @@ ASSERT(RuntimeEnabledFeatures::slimmingPaintOffsetCachingEnabled()); return m_previousPositionFromPaintInvalidationBacking != uninitializedPaintOffset() && m_previousPositionFromPaintInvalidationBacking != newPaintOffset; } - void setPreviousPaintOffset(const LayoutPoint& paintOffset) const + void setPreviousPaintOffset(const LayoutPoint& paintOffset) { ASSERT(RuntimeEnabledFeatures::slimmingPaintOffsetCachingEnabled()); m_previousPositionFromPaintInvalidationBacking = paintOffset; @@ -1227,6 +1227,20 @@ // Called before anonymousChild.setStyle(). Override to set custom styles for the child. virtual void updateAnonymousChildStyle(const LayoutObject& anonymousChild, ComputedStyle& style) const { } + // Painters can use const methods only, except for these explicitly declared methods. + class MutableForPainting { + public: + void setPreviousPaintOffset(const LayoutPoint& paintOffset) { m_layoutObject.setPreviousPaintOffset(paintOffset); } + void invalidatePaintIfNeeded(const PaintInfo& paintInfo) { m_layoutObject.invalidatePaintIfNeededForSynchronizedPainting(paintInfo); } + + private: + friend class LayoutObject; + MutableForPainting(const LayoutObject& layoutObject) : m_layoutObject(const_cast<LayoutObject&>(layoutObject)) { } + + LayoutObject& m_layoutObject; + }; + MutableForPainting mutableForPainting() const { return MutableForPainting(*this); } + protected: enum LayoutObjectType { LayoutObjectBr, @@ -1358,6 +1372,7 @@ virtual void invalidatePaintOfSubtreesIfNeeded(PaintInvalidationState& childPaintInvalidationState); virtual PaintInvalidationReason invalidatePaintIfNeeded(PaintInvalidationState&, const LayoutBoxModelObject& paintInvalidationContainer); + void invalidatePaintIfNeededForSynchronizedPainting(const PaintInfo&); // When this object is invalidated for paint, this method is called to invalidate any DisplayItemClients // owned by this object, including the object itself, LayoutText/LayoutInline line boxes, etc., @@ -1722,8 +1737,7 @@ // This point does *not* account for composited scrolling. See adjustInvalidationRectForCompositedScrolling(). // For slimmingPaintOffsetCaching, this stores the previous paint offset. // TODO(wangxianzhu): Rename this to m_previousPaintOffset when we enable slimmingPaintOffsetCaching. - // TODO(wangxianzhu): Better mutation control for painting. - mutable LayoutPoint m_previousPositionFromPaintInvalidationBacking; + LayoutPoint m_previousPositionFromPaintInvalidationBacking; }; // FIXME: remove this once the layout object lifecycle ASSERTS are no longer hit.
diff --git a/third_party/WebKit/Source/core/layout/LayoutTextControlSingleLine.cpp b/third_party/WebKit/Source/core/layout/LayoutTextControlSingleLine.cpp index 160a666..775a5a37 100644 --- a/third_party/WebKit/Source/core/layout/LayoutTextControlSingleLine.cpp +++ b/third_party/WebKit/Source/core/layout/LayoutTextControlSingleLine.cpp
@@ -363,9 +363,8 @@ if (m_desiredInnerEditorLogicalHeight >= 0) textBlockStyle->setLogicalHeight(Length(m_desiredInnerEditorLogicalHeight, Fixed)); - // Do not allow line-height to be smaller than our default. - if (textBlockStyle->fontMetrics().lineSpacing() > lineHeight(true, HorizontalLine, PositionOfInteriorLineBoxes) && startStyle.height().isIntrinsicOrAuto()) + if (textBlockStyle->fontMetrics().lineSpacing() > lineHeight(true, HorizontalLine, PositionOfInteriorLineBoxes)) textBlockStyle->setLineHeight(ComputedStyle::initialLineHeight()); textBlockStyle->setDisplay(BLOCK);
diff --git a/third_party/WebKit/Source/core/layout/PaintInvalidationState.h b/third_party/WebKit/Source/core/layout/PaintInvalidationState.h index eb418f9..781d03d 100644 --- a/third_party/WebKit/Source/core/layout/PaintInvalidationState.h +++ b/third_party/WebKit/Source/core/layout/PaintInvalidationState.h
@@ -18,7 +18,7 @@ class LayoutView; class PaintInvalidationState { - STACK_ALLOCATED(); + ALLOW_ONLY_INLINE_ALLOCATION(); WTF_MAKE_NONCOPYABLE(PaintInvalidationState); public: PaintInvalidationState(PaintInvalidationState& next, LayoutBoxModelObject& layoutObject, const LayoutBoxModelObject& paintInvalidationContainer);
diff --git a/third_party/WebKit/Source/core/layout/compositing/CompositedLayerMapping.cpp b/third_party/WebKit/Source/core/layout/compositing/CompositedLayerMapping.cpp index 2d0b4b88..c947bb9 100644 --- a/third_party/WebKit/Source/core/layout/compositing/CompositedLayerMapping.cpp +++ b/third_party/WebKit/Source/core/layout/compositing/CompositedLayerMapping.cpp
@@ -1871,10 +1871,7 @@ if (!image->isBitmapImage()) return false; - // FIXME: We should be able to handle bitmap images using direct compositing - // no matter what image-orientation value. See crbug.com/502267 - if (imageLayoutObject->style()->respectImageOrientation() != RespectImageOrientation) - return true; + return true; } return false; @@ -1907,7 +1904,7 @@ return; // This is a no-op if the layer doesn't have an inner layer for the image. - m_graphicsLayer->setContentsToImage(image); + m_graphicsLayer->setContentsToImage(image, imageLayoutObject->shouldRespectImageOrientation()); m_graphicsLayer->setFilterQuality(layoutObject()->style()->imageRendering() == ImageRenderingPixelated ? kNone_SkFilterQuality : kLow_SkFilterQuality); @@ -2153,12 +2150,7 @@ PaintLayerPainter(*paintInfo.paintLayer).paintLayer(context, paintingInfo, paintLayerFlags); { ASSERT(context->displayItemList()); - if (!context->displayItemList()->displayItemConstructionIsDisabled()) { - if (context->displayItemList()->lastDisplayItemIsNoopBegin()) - context->displayItemList()->removeLastDisplayItem(); - else - context->displayItemList()->createAndAppend<EndClipDisplayItem>(*this, DisplayItem::clipTypeToEndClipType(DisplayItem::ClipLayerOverflowControls)); - } + context->displayItemList()->endItem<EndClipDisplayItem>(*this, DisplayItem::clipTypeToEndClipType(DisplayItem::ClipLayerOverflowControls)); } } }
diff --git a/third_party/WebKit/Source/core/page/DragController.cpp b/third_party/WebKit/Source/core/page/DragController.cpp index bef7db43..70493d6 100644 --- a/third_party/WebKit/Source/core/page/DragController.cpp +++ b/third_party/WebKit/Source/core/page/DragController.cpp
@@ -71,6 +71,7 @@ #include "core/layout/LayoutImage.h" #include "platform/DragImage.h" #include "platform/geometry/IntRect.h" +#include "platform/graphics/BitmapImage.h" #include "platform/graphics/Image.h" #include "platform/graphics/ImageOrientation.h" #include "platform/network/ResourceRequest.h" @@ -799,11 +800,16 @@ IntPoint origin; InterpolationQuality interpolationQuality = element->ensureComputedStyle()->imageRendering() == ImageRenderingPixelated ? InterpolationNone : InterpolationHigh; + RespectImageOrientationEnum shouldRespectImageOrientation = element->layoutObject() ? element->layoutObject()->shouldRespectImageOrientation() : DoNotRespectImageOrientation; + ImageOrientation orientation; + + if (shouldRespectImageOrientation == RespectImageOrientation && image->isBitmapImage()) + orientation = toBitmapImage(image)->currentFrameOrientation(); + if (image->size().height() * image->size().width() <= MaxOriginalImageArea - && (dragImage = DragImage::create(image, - element->layoutObject() ? element->layoutObject()->shouldRespectImageOrientation() : DoNotRespectImageOrientation, + && (dragImage = DragImage::create(image, shouldRespectImageOrientation, 1 /* deviceScaleFactor */, interpolationQuality, DragImageAlpha, - DragImage::clampedImageScale(*image, imageRect.size(), maxDragImageSize())))) { + DragImage::clampedImageScale(orientation.usesWidthAsHeight() ? image->size().transposedSize() : image->size(), imageRect.size(), maxDragImageSize())))) { IntSize originalSize = imageRect.size(); origin = imageRect.location();
diff --git a/third_party/WebKit/Source/core/paint/BlockPainter.cpp b/third_party/WebKit/Source/core/paint/BlockPainter.cpp index 2b80cc07..4c08b5e 100644 --- a/third_party/WebKit/Source/core/paint/BlockPainter.cpp +++ b/third_party/WebKit/Source/core/paint/BlockPainter.cpp
@@ -151,7 +151,7 @@ // Set previousPaintOffset here in case that m_layoutBlock paints nothing and no // LayoutObjectDrawingRecorder updates its previousPaintOffset. // TODO(wangxianzhu): Integrate paint offset checking into new paint invalidation. - m_layoutBlock.setPreviousPaintOffset(paintOffset); + m_layoutBlock.mutableForPainting().setPreviousPaintOffset(paintOffset); } const PaintPhase paintPhase = paintInfo.phase;
diff --git a/third_party/WebKit/Source/core/paint/BoxClipper.cpp b/third_party/WebKit/Source/core/paint/BoxClipper.cpp index 0e29877..e72609af 100644 --- a/third_party/WebKit/Source/core/paint/BoxClipper.cpp +++ b/third_party/WebKit/Source/core/paint/BoxClipper.cpp
@@ -67,12 +67,7 @@ ASSERT(m_box.hasControlClip() || (m_box.hasOverflowClip() && !m_box.layer()->isSelfPaintingLayer())); ASSERT(m_paintInfo.context->displayItemList()); - if (!m_paintInfo.context->displayItemList()->displayItemConstructionIsDisabled()) { - if (m_paintInfo.context->displayItemList()->lastDisplayItemIsNoopBegin()) - m_paintInfo.context->displayItemList()->removeLastDisplayItem(); - else - m_paintInfo.context->displayItemList()->createAndAppend<EndClipDisplayItem>(m_box, DisplayItem::clipTypeToEndClipType(m_clipType)); - } + m_paintInfo.context->displayItemList()->endItem<EndClipDisplayItem>(m_box, DisplayItem::clipTypeToEndClipType(m_clipType)); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/CompositingRecorder.cpp b/third_party/WebKit/Source/core/paint/CompositingRecorder.cpp index cc2b30a..dea972f 100644 --- a/third_party/WebKit/Source/core/paint/CompositingRecorder.cpp +++ b/third_party/WebKit/Source/core/paint/CompositingRecorder.cpp
@@ -36,12 +36,7 @@ void CompositingRecorder::endCompositing(GraphicsContext& graphicsContext, const DisplayItemClientWrapper& client) { ASSERT(graphicsContext.displayItemList()); - if (!graphicsContext.displayItemList()->displayItemConstructionIsDisabled()) { - if (graphicsContext.displayItemList()->lastDisplayItemIsNoopBegin()) - graphicsContext.displayItemList()->removeLastDisplayItem(); - else - graphicsContext.displayItemList()->createAndAppend<EndCompositingDisplayItem>(client); - } + graphicsContext.displayItemList()->endItem<EndCompositingDisplayItem>(client); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/FilterPainter.cpp b/third_party/WebKit/Source/core/paint/FilterPainter.cpp index b38d952..ddd978b 100644 --- a/third_party/WebKit/Source/core/paint/FilterPainter.cpp +++ b/third_party/WebKit/Source/core/paint/FilterPainter.cpp
@@ -80,12 +80,7 @@ return; ASSERT(m_context->displayItemList()); - if (!m_context->displayItemList()->displayItemConstructionIsDisabled()) { - if (m_context->displayItemList()->lastDisplayItemIsNoopBegin()) - m_context->displayItemList()->removeLastDisplayItem(); - else - m_context->displayItemList()->createAndAppend<EndFilterDisplayItem>(*m_layoutObject); - } + m_context->displayItemList()->endItem<EndFilterDisplayItem>(*m_layoutObject); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/FloatClipRecorder.cpp b/third_party/WebKit/Source/core/paint/FloatClipRecorder.cpp index 8e2a9b2..22c079cb 100644 --- a/third_party/WebKit/Source/core/paint/FloatClipRecorder.cpp +++ b/third_party/WebKit/Source/core/paint/FloatClipRecorder.cpp
@@ -26,12 +26,7 @@ { DisplayItem::Type endType = DisplayItem::floatClipTypeToEndFloatClipType(m_clipType); ASSERT(m_context.displayItemList()); - if (!m_context.displayItemList()->displayItemConstructionIsDisabled()) { - if (m_context.displayItemList()->lastDisplayItemIsNoopBegin()) - m_context.displayItemList()->removeLastDisplayItem(); - else - m_context.displayItemList()->createAndAppend<EndFloatClipDisplayItem>(m_client, endType); - } + m_context.displayItemList()->endItem<EndFloatClipDisplayItem>(m_client, endType); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/InlinePainter.cpp b/third_party/WebKit/Source/core/paint/InlinePainter.cpp index cad1e83..07874e78 100644 --- a/third_party/WebKit/Source/core/paint/InlinePainter.cpp +++ b/third_party/WebKit/Source/core/paint/InlinePainter.cpp
@@ -29,7 +29,7 @@ // Set previousPaintOffset here in case that m_layoutInline paints nothing and no // LayoutObjectDrawingRecorder updates its previousPaintOffset. // TODO(wangxianzhu): Integrate paint offset checking into new paint invalidation. - m_layoutInline.setPreviousPaintOffset(paintOffset); + m_layoutInline.mutableForPainting().setPreviousPaintOffset(paintOffset); } // FIXME: When Skia supports annotation rect covering (https://code.google.com/p/skia/issues/detail?id=3872),
diff --git a/third_party/WebKit/Source/core/paint/LayerClipRecorder.cpp b/third_party/WebKit/Source/core/paint/LayerClipRecorder.cpp index 5743fe4..afd686d 100644 --- a/third_party/WebKit/Source/core/paint/LayerClipRecorder.cpp +++ b/third_party/WebKit/Source/core/paint/LayerClipRecorder.cpp
@@ -76,12 +76,7 @@ LayerClipRecorder::~LayerClipRecorder() { ASSERT(m_graphicsContext.displayItemList()); - if (!m_graphicsContext.displayItemList()->displayItemConstructionIsDisabled()) { - if (m_graphicsContext.displayItemList()->lastDisplayItemIsNoopBegin()) - m_graphicsContext.displayItemList()->removeLastDisplayItem(); - else - m_graphicsContext.displayItemList()->createAndAppend<EndClipDisplayItem>(m_layoutObject, DisplayItem::clipTypeToEndClipType(m_clipType)); - } + m_graphicsContext.displayItemList()->endItem<EndClipDisplayItem>(m_layoutObject, DisplayItem::clipTypeToEndClipType(m_clipType)); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/LayerFixedPositionRecorder.cpp b/third_party/WebKit/Source/core/paint/LayerFixedPositionRecorder.cpp index 7581a1f5..41c086e 100644 --- a/third_party/WebKit/Source/core/paint/LayerFixedPositionRecorder.cpp +++ b/third_party/WebKit/Source/core/paint/LayerFixedPositionRecorder.cpp
@@ -41,22 +41,11 @@ if (!RuntimeEnabledFeatures::slimmingPaintV2Enabled()) return; - if (m_graphicsContext.displayItemList()->displayItemConstructionIsDisabled()) - return; - - if (m_isFixedPositionContainer) { - if (m_graphicsContext.displayItemList()->lastDisplayItemIsNoopBegin()) - m_graphicsContext.displayItemList()->removeLastDisplayItem(); - else - m_graphicsContext.displayItemList()->createAndAppend<EndFixedPositionDisplayItem>(m_layoutObject); - } - - if (m_isFixedPosition) { - if (m_graphicsContext.displayItemList()->lastDisplayItemIsNoopBegin()) - m_graphicsContext.displayItemList()->removeLastDisplayItem(); - else - m_graphicsContext.displayItemList()->createAndAppend<EndFixedPositionDisplayItem>(m_layoutObject); - } + ASSERT(m_graphicsContext.displayItemList()); + if (m_isFixedPositionContainer) + m_graphicsContext.displayItemList()->endItem<EndFixedPositionDisplayItem>(m_layoutObject); + if (m_isFixedPosition) + m_graphicsContext.displayItemList()->endItem<EndFixedPositionDisplayItem>(m_layoutObject); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/LayoutObjectDrawingRecorder.h b/third_party/WebKit/Source/core/paint/LayoutObjectDrawingRecorder.h index f9c754d..394bd7c 100644 --- a/third_party/WebKit/Source/core/paint/LayoutObjectDrawingRecorder.h +++ b/third_party/WebKit/Source/core/paint/LayoutObjectDrawingRecorder.h
@@ -72,7 +72,7 @@ else ASSERT(!displayItemList->paintOffsetWasInvalidated(layoutObject.displayItemClient()) || !displayItemList->clientCacheIsValid(layoutObject.displayItemClient())); - layoutObject.setPreviousPaintOffset(paintOffset); + layoutObject.mutableForPainting().setPreviousPaintOffset(paintOffset); } Optional<DisplayItemCacheSkipper> m_cacheSkipper;
diff --git a/third_party/WebKit/Source/core/paint/PaintInfo.h b/third_party/WebKit/Source/core/paint/PaintInfo.h index 9b2f138..2172564 100644 --- a/third_party/WebKit/Source/core/paint/PaintInfo.h +++ b/third_party/WebKit/Source/core/paint/PaintInfo.h
@@ -55,6 +55,7 @@ , rect(newRect) , phase(newPhase) , paintingRoot(newPaintingRoot) + , paintInvalidationState(nullptr) , m_paintContainer(newPaintContainer) , m_paintFlags(paintFlags) , m_globalPaintFlags(globalPaintFlags) @@ -109,6 +110,8 @@ IntRect rect; // dirty rect used for culling non-intersecting layoutObjects PaintPhase phase; LayoutObject* paintingRoot; // used to draw just one element and its visual kids + // TODO(wangxianzhu): Populate it. + PaintInvalidationState* paintInvalidationState; private: const LayoutBoxModelObject* m_paintContainer; // the box model object that originates the current painting
diff --git a/third_party/WebKit/Source/core/paint/PaintLayerScrollableArea.cpp b/third_party/WebKit/Source/core/paint/PaintLayerScrollableArea.cpp index dd79e0c..d3cc3ff 100644 --- a/third_party/WebKit/Source/core/paint/PaintLayerScrollableArea.cpp +++ b/third_party/WebKit/Source/core/paint/PaintLayerScrollableArea.cpp
@@ -1020,7 +1020,19 @@ bool shouldUseCustom = actualLayoutObject.isBox() && actualLayoutObject.styleRef().hasPseudoStyle(SCROLLBAR); bool hasAnyScrollbar = hasScrollbar(); bool hasCustom = (hasHorizontalScrollbar() && horizontalScrollbar()->isCustomScrollbar()) || (hasVerticalScrollbar() && verticalScrollbar()->isCustomScrollbar()); - return hasAnyScrollbar && (shouldUseCustom != hasCustom); + bool didCustomScrollbarOwnerChanged = false; + + if (hasHorizontalScrollbar() && horizontalScrollbar()->isCustomScrollbar()) { + if (actualLayoutObject != toLayoutScrollbar(horizontalScrollbar())->owningLayoutObject()) + didCustomScrollbarOwnerChanged = true; + } + + if (hasVerticalScrollbar() && verticalScrollbar()->isCustomScrollbar()) { + if (actualLayoutObject != toLayoutScrollbar(verticalScrollbar())->owningLayoutObject()) + didCustomScrollbarOwnerChanged = true; + } + + return hasAnyScrollbar && ((shouldUseCustom != hasCustom) || (shouldUseCustom && didCustomScrollbarOwnerChanged)); } void PaintLayerScrollableArea::setHasHorizontalScrollbar(bool hasScrollbar)
diff --git a/third_party/WebKit/Source/core/paint/ReplacedPainter.cpp b/third_party/WebKit/Source/core/paint/ReplacedPainter.cpp index f6a2a562..e9c025a 100644 --- a/third_party/WebKit/Source/core/paint/ReplacedPainter.cpp +++ b/third_party/WebKit/Source/core/paint/ReplacedPainter.cpp
@@ -84,12 +84,12 @@ // The selection tint never gets clipped by border-radius rounding, since we want it to run right up to the edges of // surrounding content. bool drawSelectionTint = paintInfo.phase == PaintPhaseForeground && m_layoutReplaced.selectionState() != SelectionNone && !paintInfo.isPrinting(); - if (drawSelectionTint && !LayoutObjectDrawingRecorder::useCachedDrawingIfPossible(*paintInfo.context, m_layoutReplaced, DisplayItem::SelectionTint, paintOffset)) { + if (drawSelectionTint && !LayoutObjectDrawingRecorder::useCachedDrawingIfPossible(*paintInfo.context, m_layoutReplaced, DisplayItem::SelectionTint, adjustedPaintOffset)) { LayoutRect selectionPaintingRect = m_layoutReplaced.localSelectionRect(); selectionPaintingRect.moveBy(adjustedPaintOffset); IntRect selectionPaintingIntRect = pixelSnappedIntRect(selectionPaintingRect); - LayoutObjectDrawingRecorder drawingRecorder(*paintInfo.context, m_layoutReplaced, DisplayItem::SelectionTint, selectionPaintingIntRect, paintOffset); + LayoutObjectDrawingRecorder drawingRecorder(*paintInfo.context, m_layoutReplaced, DisplayItem::SelectionTint, selectionPaintingIntRect, adjustedPaintOffset); paintInfo.context->fillRect(selectionPaintingIntRect, m_layoutReplaced.selectionBackgroundColor()); } }
diff --git a/third_party/WebKit/Source/core/paint/RoundedInnerRectClipper.cpp b/third_party/WebKit/Source/core/paint/RoundedInnerRectClipper.cpp index fb42cf1e..e7b454d 100644 --- a/third_party/WebKit/Source/core/paint/RoundedInnerRectClipper.cpp +++ b/third_party/WebKit/Source/core/paint/RoundedInnerRectClipper.cpp
@@ -64,12 +64,7 @@ DisplayItem::Type endType = DisplayItem::clipTypeToEndClipType(m_clipType); if (m_useDisplayItemList) { ASSERT(m_paintInfo.context->displayItemList()); - if (!m_paintInfo.context->displayItemList()->displayItemConstructionIsDisabled()) { - if (m_paintInfo.context->displayItemList()->lastDisplayItemIsNoopBegin()) - m_paintInfo.context->displayItemList()->removeLastDisplayItem(); - else - m_paintInfo.context->displayItemList()->createAndAppend<EndClipDisplayItem>(m_layoutObject, endType); - } + m_paintInfo.context->displayItemList()->endItem<EndClipDisplayItem>(m_layoutObject, endType); } else { EndClipDisplayItem endClipDisplayItem(m_layoutObject, endType); endClipDisplayItem.replay(*m_paintInfo.context);
diff --git a/third_party/WebKit/Source/core/paint/SVGClipPainter.cpp b/third_party/WebKit/Source/core/paint/SVGClipPainter.cpp index 19922f8a..a34eb298 100644 --- a/third_party/WebKit/Source/core/paint/SVGClipPainter.cpp +++ b/third_party/WebKit/Source/core/paint/SVGClipPainter.cpp
@@ -101,12 +101,7 @@ case ClipperAppliedPath: // Path-only clipping, no layers to restore but we need to emit an end to the clip path display item. ASSERT(context->displayItemList()); - if (!context->displayItemList()->displayItemConstructionIsDisabled()) { - if (context->displayItemList()->lastDisplayItemIsNoopBegin()) - context->displayItemList()->removeLastDisplayItem(); - else - context->displayItemList()->createAndAppend<EndClipPathDisplayItem>(target); - } + context->displayItemList()->endItem<EndClipPathDisplayItem>(target); break; case ClipperAppliedMask: // Transfer content -> clip mask (SrcIn)
diff --git a/third_party/WebKit/Source/core/paint/SVGMaskPainter.cpp b/third_party/WebKit/Source/core/paint/SVGMaskPainter.cpp index da28c2c..f8494be 100644 --- a/third_party/WebKit/Source/core/paint/SVGMaskPainter.cpp +++ b/third_party/WebKit/Source/core/paint/SVGMaskPainter.cpp
@@ -51,12 +51,7 @@ } ASSERT(context->displayItemList()); - if (!context->displayItemList()->displayItemConstructionIsDisabled()) { - if (context->displayItemList()->lastDisplayItemIsNoopBegin()) - context->displayItemList()->removeLastDisplayItem(); - else - context->displayItemList()->createAndAppend<EndCompositingDisplayItem>(object); - } + context->displayItemList()->endItem<EndCompositingDisplayItem>(object); } void SVGMaskPainter::drawMaskForLayoutObject(GraphicsContext* context, const LayoutObject& layoutObject, const FloatRect& targetBoundingBox, const FloatRect& targetPaintInvalidationRect)
diff --git a/third_party/WebKit/Source/core/paint/ScrollRecorder.cpp b/third_party/WebKit/Source/core/paint/ScrollRecorder.cpp index 7900c2c..323b89f 100644 --- a/third_party/WebKit/Source/core/paint/ScrollRecorder.cpp +++ b/third_party/WebKit/Source/core/paint/ScrollRecorder.cpp
@@ -25,12 +25,7 @@ ScrollRecorder::~ScrollRecorder() { ASSERT(m_context.displayItemList()); - if (!m_context.displayItemList()->displayItemConstructionIsDisabled()) { - if (m_context.displayItemList()->lastDisplayItemIsNoopBegin()) - m_context.displayItemList()->removeLastDisplayItem(); - else - m_context.displayItemList()->createAndAppend<EndScrollDisplayItem>(m_client, DisplayItem::scrollTypeToEndScrollType(m_beginItemType)); - } + m_context.displayItemList()->endItem<EndScrollDisplayItem>(m_client, DisplayItem::scrollTypeToEndScrollType(m_beginItemType)); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/TableCellPainter.cpp b/third_party/WebKit/Source/core/paint/TableCellPainter.cpp index 08fad67..7e5a0d7 100644 --- a/third_party/WebKit/Source/core/paint/TableCellPainter.cpp +++ b/third_party/WebKit/Source/core/paint/TableCellPainter.cpp
@@ -96,6 +96,7 @@ int rightWidth = rightBorderValue.width(); LayoutRect paintRect = paintBounds(paintOffset, AddOffsetFromParent); + LayoutPoint adjustedPaintOffset = paintRect.location(); IntRect borderRect = pixelSnappedIntRect(paintRect.x() - leftWidth / 2, paintRect.y() - topWidth / 2, paintRect.width() + leftWidth / 2 + (rightWidth + 1) / 2, @@ -105,10 +106,10 @@ return; GraphicsContext* graphicsContext = paintInfo.context; - if (LayoutObjectDrawingRecorder::useCachedDrawingIfPossible(*graphicsContext, m_layoutTableCell, static_cast<DisplayItem::Type>(displayItemType), paintOffset)) + if (LayoutObjectDrawingRecorder::useCachedDrawingIfPossible(*graphicsContext, m_layoutTableCell, static_cast<DisplayItem::Type>(displayItemType), adjustedPaintOffset)) return; - LayoutObjectDrawingRecorder recorder(*graphicsContext, m_layoutTableCell, static_cast<DisplayItem::Type>(displayItemType), borderRect, paintOffset); + LayoutObjectDrawingRecorder recorder(*graphicsContext, m_layoutTableCell, static_cast<DisplayItem::Type>(displayItemType), borderRect, adjustedPaintOffset); Color cellColor = m_layoutTableCell.resolveColor(CSSPropertyColor); // We never paint diagonals at the joins. We simply let the border with the highest @@ -131,7 +132,7 @@ } } -void TableCellPainter::paintBackgroundsBehindCell(const PaintInfo& paintInfo, const LayoutPoint& paintOffset, const LayoutObject* backgroundObject) +void TableCellPainter::paintBackgroundsBehindCell(const PaintInfo& paintInfo, const LayoutPoint& paintOffset, const LayoutObject* backgroundObject, DisplayItem::Type type) { if (!paintInfo.shouldPaintWithinRoot(&m_layoutTableCell)) return; @@ -146,11 +147,21 @@ if (!tableElt->collapseBorders() && m_layoutTableCell.style()->emptyCells() == HIDE && !m_layoutTableCell.firstChild()) return; - Color c = backgroundObject->resolveColor(CSSPropertyBackgroundColor); - const FillLayer& bgLayer = backgroundObject->style()->backgroundLayers(); - LayoutRect paintRect = paintBounds(paintOffset, backgroundObject != &m_layoutTableCell ? AddOffsetFromParent : DoNotAddOffsetFromParent); + // Record drawing only if the cell is painting background from containers. + Optional<LayoutObjectDrawingRecorder> recorder; + if (backgroundObject != &m_layoutTableCell) { + LayoutPoint adjustedPaintOffset = paintRect.location(); + if (LayoutObjectDrawingRecorder::useCachedDrawingIfPossible(*paintInfo.context, m_layoutTableCell, type, adjustedPaintOffset)) + return; + recorder.emplace(*paintInfo.context, m_layoutTableCell, type, paintRect, adjustedPaintOffset); + } else { + ASSERT(paintRect.location() == paintOffset); + } + + Color c = backgroundObject->resolveColor(CSSPropertyBackgroundColor); + const FillLayer& bgLayer = backgroundObject->style()->backgroundLayers(); if (bgLayer.hasImage() || c.alpha()) { // We have to clip here because the background would paint // on top of the borders otherwise. This only matters for cells and rows. @@ -191,7 +202,7 @@ BoxPainter::paintBoxShadow(paintInfo, paintRect, m_layoutTableCell.styleRef(), Normal); // Paint our cell background. - paintBackgroundsBehindCell(paintInfo, paintOffset, &m_layoutTableCell); + paintBackgroundsBehindCell(paintInfo, paintOffset, &m_layoutTableCell, DisplayItem::BoxDecorationBackground); BoxPainter::paintBoxShadow(paintInfo, paintRect, m_layoutTableCell.styleRef(), Inset);
diff --git a/third_party/WebKit/Source/core/paint/TableCellPainter.h b/third_party/WebKit/Source/core/paint/TableCellPainter.h index bc88c7d..ff3111b 100644 --- a/third_party/WebKit/Source/core/paint/TableCellPainter.h +++ b/third_party/WebKit/Source/core/paint/TableCellPainter.h
@@ -6,6 +6,7 @@ #define TableCellPainter_h #include "core/style/CollapsedBorderValue.h" +#include "platform/graphics/paint/DisplayItem.h" #include "wtf/Allocator.h" namespace blink { @@ -26,7 +27,7 @@ void paint(const PaintInfo&, const LayoutPoint&); void paintCollapsedBorders(const PaintInfo&, const LayoutPoint&, const CollapsedBorderValue&); - void paintBackgroundsBehindCell(const PaintInfo&, const LayoutPoint&, const LayoutObject* backgroundObject); + void paintBackgroundsBehindCell(const PaintInfo&, const LayoutPoint&, const LayoutObject* backgroundObject, DisplayItem::Type); void paintBoxDecorationBackground(const PaintInfo&, const LayoutPoint& paintOffset); void paintMask(const PaintInfo&, const LayoutPoint& paintOffset);
diff --git a/third_party/WebKit/Source/core/paint/TableCellPainterTest.cpp b/third_party/WebKit/Source/core/paint/TableCellPainterTest.cpp index be4b6128..5fce23a 100644 --- a/third_party/WebKit/Source/core/paint/TableCellPainterTest.cpp +++ b/third_party/WebKit/Source/core/paint/TableCellPainterTest.cpp
@@ -38,7 +38,7 @@ EXPECT_DISPLAY_LIST(rootDisplayItemList().displayItems(), 2, TestDisplayItem(layoutView, DisplayItem::BoxDecorationBackground), - TestDisplayItem(cell1, DisplayItem::TableCellBackgroundFromContainers)); + TestDisplayItem(cell1, DisplayItem::TableCellBackgroundFromRow)); PaintLayerPaintingInfo paintingInfo1(&rootLayer, LayoutRect(0, 300, 200, 200), GlobalPaintNormalPhase, LayoutSize()); PaintLayerPainter(rootLayer).paintLayerContents(&context, paintingInfo1, PaintLayerPaintingCompositingAllPhases); @@ -46,7 +46,7 @@ EXPECT_DISPLAY_LIST(rootDisplayItemList().displayItems(), 2, TestDisplayItem(layoutView, DisplayItem::BoxDecorationBackground), - TestDisplayItem(cell2, DisplayItem::TableCellBackgroundFromContainers)); + TestDisplayItem(cell2, DisplayItem::TableCellBackgroundFromRow)); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/TableRowPainter.cpp b/third_party/WebKit/Source/core/paint/TableRowPainter.cpp index d7d605bb..630ae29 100644 --- a/third_party/WebKit/Source/core/paint/TableRowPainter.cpp +++ b/third_party/WebKit/Source/core/paint/TableRowPainter.cpp
@@ -22,13 +22,8 @@ for (LayoutTableCell* cell = m_layoutTableRow.firstCell(); cell; cell = cell->nextCell()) { // Paint the row background behind the cell. if (paintInfo.phase == PaintPhaseBlockBackground || paintInfo.phase == PaintPhaseChildBlockBackground) { - if (m_layoutTableRow.hasBackground()) { - TableCellPainter tableCellPainter(*cell); - if (!LayoutObjectDrawingRecorder::useCachedDrawingIfPossible(*paintInfo.context, *cell, DisplayItem::TableCellBackgroundFromSelfPaintingRow, paintOffset)) { - LayoutObjectDrawingRecorder recorder(*paintInfo.context, *cell, DisplayItem::TableCellBackgroundFromSelfPaintingRow, tableCellPainter.paintBounds(paintOffset, TableCellPainter::AddOffsetFromParent), paintOffset); - tableCellPainter.paintBackgroundsBehindCell(paintInfo, paintOffset, &m_layoutTableRow); - } - } + if (m_layoutTableRow.hasBackground()) + TableCellPainter(*cell).paintBackgroundsBehindCell(paintInfo, paintOffset, &m_layoutTableRow, DisplayItem::TableCellBackgroundFromRow); } if (!cell->hasSelfPaintingLayer())
diff --git a/third_party/WebKit/Source/core/paint/TableSectionPainter.cpp b/third_party/WebKit/Source/core/paint/TableSectionPainter.cpp index 4bc637671..3f9bd94 100644 --- a/third_party/WebKit/Source/core/paint/TableSectionPainter.cpp +++ b/third_party/WebKit/Source/core/paint/TableSectionPainter.cpp
@@ -179,36 +179,26 @@ // the column group, column, row group, row, and then the cell. LayoutTableCol* column = m_layoutTableSection.table()->colElement(cell.col()); LayoutTableCol* columnGroup = column ? column->enclosingColumnGroup() : 0; + TableCellPainter tableCellPainter(cell); - bool columnHasBackground = column && column->hasBackground(); - bool columnGroupHasBackground = columnGroup && columnGroup->hasBackground(); - bool sectionHasBackground = m_layoutTableSection.hasBackground(); - bool rowHasBackground = row->hasBackground(); + // Column groups and columns first. + // FIXME: Columns and column groups do not currently support opacity, and they are being painted "too late" in + // the stack, since we have already opened a transparency layer (potentially) for the table row group. + // Note that we deliberately ignore whether or not the cell has a layer, since these backgrounds paint "behind" the + // cell. + if (columnGroup && columnGroup->hasBackground()) + tableCellPainter.paintBackgroundsBehindCell(paintInfo, cellPoint, columnGroup, DisplayItem::TableCellBackgroundFromColumnGroup); + if (column && column->hasBackground()) + tableCellPainter.paintBackgroundsBehindCell(paintInfo, cellPoint, column, DisplayItem::TableCellBackgroundFromColumn); - if (columnHasBackground || columnGroupHasBackground || sectionHasBackground || rowHasBackground) { - TableCellPainter tableCellPainter(cell); - if (!LayoutObjectDrawingRecorder::useCachedDrawingIfPossible(*paintInfo.context, cell, DisplayItem::TableCellBackgroundFromContainers, paintOffset)) { - LayoutObjectDrawingRecorder recorder(*paintInfo.context, cell, DisplayItem::TableCellBackgroundFromContainers, tableCellPainter.paintBounds(cellPoint, TableCellPainter::AddOffsetFromParent), paintOffset); - // Column groups and columns first. - // FIXME: Columns and column groups do not currently support opacity, and they are being painted "too late" in - // the stack, since we have already opened a transparency layer (potentially) for the table row group. - // Note that we deliberately ignore whether or not the cell has a layer, since these backgrounds paint "behind" the - // cell. - if (columnGroupHasBackground) - tableCellPainter.paintBackgroundsBehindCell(paintInfo, cellPoint, columnGroup); - if (columnHasBackground) - tableCellPainter.paintBackgroundsBehindCell(paintInfo, cellPoint, column); + // Paint the row group next. + if (m_layoutTableSection.hasBackground()) + tableCellPainter.paintBackgroundsBehindCell(paintInfo, cellPoint, &m_layoutTableSection, DisplayItem::TableCellBackgroundFromSection); - // Paint the row group next. - if (sectionHasBackground) - tableCellPainter.paintBackgroundsBehindCell(paintInfo, cellPoint, &m_layoutTableSection); - - // Paint the row next, but only if it doesn't have a layer. If a row has a layer, it will be responsible for - // painting the row background for the cell. - if (rowHasBackground && !row->hasSelfPaintingLayer()) - tableCellPainter.paintBackgroundsBehindCell(paintInfo, cellPoint, row); - } - } + // Paint the row next, but only if it doesn't have a layer. If a row has a layer, it will be responsible for + // painting the row background for the cell. + if (row->hasBackground() && !row->hasSelfPaintingLayer()) + tableCellPainter.paintBackgroundsBehindCell(paintInfo, cellPoint, row, DisplayItem::TableCellBackgroundFromRow); } if ((!cell.hasSelfPaintingLayer() && !row->hasSelfPaintingLayer())) cell.paint(paintInfo, cellPoint);
diff --git a/third_party/WebKit/Source/core/paint/Transform3DRecorder.cpp b/third_party/WebKit/Source/core/paint/Transform3DRecorder.cpp index e204f68..3ac856e4 100644 --- a/third_party/WebKit/Source/core/paint/Transform3DRecorder.cpp +++ b/third_party/WebKit/Source/core/paint/Transform3DRecorder.cpp
@@ -39,12 +39,7 @@ return; ASSERT(m_context.displayItemList()); - if (!m_context.displayItemList()->displayItemConstructionIsDisabled()) { - if (m_context.displayItemList()->lastDisplayItemIsNoopBegin()) - m_context.displayItemList()->removeLastDisplayItem(); - else - m_context.displayItemList()->createAndAppend<EndTransform3DDisplayItem>(m_client, DisplayItem::transform3DTypeToEndTransform3DType(m_type)); - } + m_context.displayItemList()->endItem<EndTransform3DDisplayItem>(m_client, DisplayItem::transform3DTypeToEndTransform3DType(m_type)); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/paint/TransformRecorder.cpp b/third_party/WebKit/Source/core/paint/TransformRecorder.cpp index 052f326c..e8f27f1 100644 --- a/third_party/WebKit/Source/core/paint/TransformRecorder.cpp +++ b/third_party/WebKit/Source/core/paint/TransformRecorder.cpp
@@ -32,12 +32,7 @@ return; ASSERT(m_context.displayItemList()); - if (!m_context.displayItemList()->displayItemConstructionIsDisabled()) { - if (m_context.displayItemList()->lastDisplayItemIsNoopBegin()) - m_context.displayItemList()->removeLastDisplayItem(); - else - m_context.displayItemList()->createAndAppend<EndTransformDisplayItem>(m_client); - } + m_context.displayItemList()->endItem<EndTransformDisplayItem>(m_client); } } // namespace blink
diff --git a/third_party/WebKit/Source/core/svg/SVGUseElement.cpp b/third_party/WebKit/Source/core/svg/SVGUseElement.cpp index 555b2cd..43a30d1 100644 --- a/third_party/WebKit/Source/core/svg/SVGUseElement.cpp +++ b/third_party/WebKit/Source/core/svg/SVGUseElement.cpp
@@ -797,19 +797,13 @@ bool SVGUseElement::resourceIsStillLoading() const { - if (m_resource && m_resource->isLoading()) - return true; - return false; + return m_resource && m_resource->isLoading(); } bool SVGUseElement::instanceTreeIsLoading(const SVGElement* targetInstance) { - for (const SVGElement* element = Traversal<SVGElement>::firstChild(*targetInstance); element; element = Traversal<SVGElement>::nextSibling(*element)) { - if (const SVGUseElement* use = element->correspondingUseElement()) { - if (use->resourceIsStillLoading()) - return true; - } - if (element->hasChildren() && instanceTreeIsLoading(element)) + for (const SVGElement* element = targetInstance; element; element = Traversal<SVGElement>::next(*element, targetInstance)) { + if (isSVGUseElement(*element) && toSVGUseElement(*element).resourceIsStillLoading()) return true; } return false;
diff --git a/third_party/WebKit/Source/devtools/front_end/bindings/ContentProviderBasedProjectDelegate.js b/third_party/WebKit/Source/devtools/front_end/bindings/ContentProviderBasedProjectDelegate.js index 5e67a41a..e1e5135 100644 --- a/third_party/WebKit/Source/devtools/front_end/bindings/ContentProviderBasedProjectDelegate.js +++ b/third_party/WebKit/Source/devtools/front_end/bindings/ContentProviderBasedProjectDelegate.js
@@ -44,11 +44,19 @@ this._contentProviders = {}; this._workspace = workspace; this._id = id; - workspace.addProject(id, this); + this._project = workspace.addProject(id, this); } WebInspector.ContentProviderBasedProjectDelegate.prototype = { /** + * @return {!WebInspector.Project} + */ + project: function() + { + return this._project; + }, + + /** * @override * @return {string} */
diff --git a/third_party/WebKit/Source/devtools/front_end/bindings/NetworkMapping.js b/third_party/WebKit/Source/devtools/front_end/bindings/NetworkMapping.js index 4bcb4a0a..9f802786 100644 --- a/third_party/WebKit/Source/devtools/front_end/bindings/NetworkMapping.js +++ b/third_party/WebKit/Source/devtools/front_end/bindings/NetworkMapping.js
@@ -4,18 +4,34 @@ /** * @constructor + * @param {!WebInspector.TargetManager} targetManager * @param {!WebInspector.Workspace} workspace * @param {!WebInspector.FileSystemWorkspaceBinding} fileSystemWorkspaceBinding * @param {!WebInspector.FileSystemMapping} fileSystemMapping */ -WebInspector.NetworkMapping = function(workspace, fileSystemWorkspaceBinding, fileSystemMapping) +WebInspector.NetworkMapping = function(targetManager, workspace, fileSystemWorkspaceBinding, fileSystemMapping) { + this._targetManager = targetManager; this._workspace = workspace; this._fileSystemWorkspaceBinding = fileSystemWorkspaceBinding; this._fileSystemMapping = fileSystemMapping; InspectorFrontendHost.events.addEventListener(InspectorFrontendHostAPI.Events.RevealSourceLine, this._revealSourceLine, this); - fileSystemWorkspaceBinding.fileSystemManager().addEventListener(WebInspector.IsolatedFileSystemManager.Events.FileSystemAdded, this._fileSystemAdded, this); - fileSystemWorkspaceBinding.fileSystemManager().addEventListener(WebInspector.IsolatedFileSystemManager.Events.FileSystemRemoved, this._fileSystemRemoved, this); + + // For now, following block is here primarily for testing since in the real life, network manager is created early enough to capture those events. + var fileSystemManager = fileSystemWorkspaceBinding.fileSystemManager(); + for (var path of fileSystemManager.fileSystemPaths()) { + var fileSystem = fileSystemManager.fileSystem(path); + this._fileSystemAdded(new WebInspector.Event(fileSystemManager, WebInspector.IsolatedFileSystemManager.Events.FileSystemAdded, fileSystem)); + } + if (fileSystemManager.fileSystemsLoaded()) + this._fileSystemsLoaded(); + + fileSystemManager.addEventListener(WebInspector.IsolatedFileSystemManager.Events.FileSystemAdded, this._fileSystemAdded, this); + fileSystemManager.addEventListener(WebInspector.IsolatedFileSystemManager.Events.FileSystemRemoved, this._fileSystemRemoved, this); + fileSystemManager.addEventListener(WebInspector.IsolatedFileSystemManager.Events.FileSystemsLoaded, this._fileSystemsLoaded, this); + + this._fileSystemMapping.addEventListener(WebInspector.FileSystemMapping.Events.FileMappingAdded, this._fileSystemMappingChanged, this); + this._fileSystemMapping.addEventListener(WebInspector.FileSystemMapping.Events.FileMappingRemoved, this._fileSystemMappingChanged, this); } WebInspector.NetworkMapping.prototype = { @@ -24,6 +40,7 @@ */ _fileSystemAdded: function(event) { + this._addingFileSystem = true; var fileSystem = /** @type {!WebInspector.IsolatedFileSystem} */ (event.data); this._fileSystemMapping.addFileSystem(fileSystem.path()); @@ -38,6 +55,8 @@ continue; this._fileSystemMapping.addNonConfigurableFileMapping(fileSystem.path(), url, folder); } + this._addingFileSystem = false; + this._fileSystemMappingChanged(); }, /** @@ -47,6 +66,7 @@ { var fileSystem = /** @type {!WebInspector.IsolatedFileSystem} */ (event.data); this._fileSystemMapping.removeFileSystem(fileSystem.path()); + this._fileSystemMappingChanged(); }, /** @@ -189,6 +209,26 @@ this._workspace.addEventListener(WebInspector.Workspace.Events.UISourceCodeAdded, listener, this); }, + + _fileSystemsLoaded: function() + { + this._fileSystemsReady = true; + }, + + _fileSystemMappingChanged: function() + { + if (!this._fileSystemsReady || this._addingFileSystem) + return; + this._targetManager.suspendAndResumeAllTargets(); + }, + + dispose: function() + { + this._fileSystemWorkspaceBinding.fileSystemManager().removeEventListener(WebInspector.IsolatedFileSystemManager.Events.FileSystemAdded, this._fileSystemAdded, this); + this._fileSystemWorkspaceBinding.fileSystemManager().removeEventListener(WebInspector.IsolatedFileSystemManager.Events.FileSystemRemoved, this._fileSystemRemoved, this); + this._fileSystemMapping.removeEventListener(WebInspector.FileSystemMapping.Events.FileMappingAdded, this._fileSystemMappingChanged, this); + this._fileSystemMapping.removeEventListener(WebInspector.FileSystemMapping.Events.FileMappingRemoved, this._fileSystemMappingChanged, this); + } } /**
diff --git a/third_party/WebKit/Source/devtools/front_end/bindings/NetworkProject.js b/third_party/WebKit/Source/devtools/front_end/bindings/NetworkProject.js index 2f5fbe9..2c49bb4 100644 --- a/third_party/WebKit/Source/devtools/front_end/bindings/NetworkProject.js +++ b/third_party/WebKit/Source/devtools/front_end/bindings/NetworkProject.js
@@ -43,6 +43,7 @@ this._target = target; this._id = projectId; WebInspector.ContentProviderBasedProjectDelegate.call(this, workspace, projectId, projectType); + this.project()[WebInspector.NetworkProject._targetSymbol] = target; } WebInspector.NetworkProjectDelegate.prototype = { @@ -169,10 +170,11 @@ cssModel.addEventListener(WebInspector.CSSStyleModel.Events.StyleSheetAdded, this._styleSheetAdded, this); cssModel.addEventListener(WebInspector.CSSStyleModel.Events.StyleSheetRemoved, this._styleSheetRemoved, this); } - WebInspector.targetManager.addEventListener(WebInspector.TargetManager.Events.SuspendStateChanged, this._suspendStateChanged, this); + target.targetManager().addEventListener(WebInspector.TargetManager.Events.SuspendStateChanged, this._suspendStateChanged, this); } WebInspector.NetworkProject._networkProjectSymbol = Symbol("networkProject"); +WebInspector.NetworkProject._targetSymbol = Symbol("target"); WebInspector.NetworkProject._contentTypeSymbol = Symbol("networkContentType"); /** @@ -192,8 +194,7 @@ */ WebInspector.NetworkProject._targetForProject = function(project) { - var targetId = parseInt(project.id(), 10); - return WebInspector.targetManager.targetById(targetId); + return project[WebInspector.NetworkProject._targetSymbol]; } /** @@ -366,7 +367,7 @@ _suspendStateChanged: function() { - if (WebInspector.targetManager.allTargetsSuspended()) + if (this.target().targetManager().allTargetsSuspended()) this._reset(); else this._populate();
diff --git a/third_party/WebKit/Source/devtools/front_end/elements/Spectrum.js b/third_party/WebKit/Source/devtools/front_end/elements/Spectrum.js index b23d667f..67ed818f 100644 --- a/third_party/WebKit/Source/devtools/front_end/elements/Spectrum.js +++ b/third_party/WebKit/Source/devtools/front_end/elements/Spectrum.js
@@ -129,6 +129,7 @@ addColorButton.addEventListener("click", this._addColorToCustomPalette.bind(this)); this._addColorToolbar.appendToolbarItem(addColorButton); + this._loadPalettes(); new WebInspector.Spectrum.PaletteGenerator(this._generatedPaletteLoaded.bind(this)); /** @@ -266,7 +267,6 @@ var numItems = palette.colors.length; if (palette.mutable) numItems++; - var rowsNeeded = Math.max(1, Math.ceil(numItems / WebInspector.Spectrum._itemsPerPaletteRow)); if (palette.mutable) { this._paletteContainer.appendChild(this._addColorToolbar.element); this._paletteContainer.appendChild(this._deleteIconToolbar.element); @@ -276,10 +276,6 @@ } this._togglePalettePanel(false); - var paletteColorHeight = 12; - var paletteMargin = 12; - this.element.style.height = (this._paletteContainer.offsetTop + paletteMargin + (paletteColorHeight + paletteMargin) * rowsNeeded) + "px"; - this.dispatchEventToListeners(WebInspector.Spectrum.Events.SizeChanged); }, /** @@ -385,13 +381,8 @@ this._deleteButton.setToggled(false); }, - /** - * @param {!WebInspector.Spectrum.Palette} generatedPalette - */ - _generatedPaletteLoaded: function(generatedPalette) + _loadPalettes: function() { - if (generatedPalette.colors.length) - this._palettes.set(generatedPalette.title, generatedPalette); this._palettes.set(WebInspector.Spectrum.MaterialPalette.title, WebInspector.Spectrum.MaterialPalette); /** @type {!WebInspector.Spectrum.Palette} */ var defaultCustomPalette = { title: "Custom", colors: [], mutable: true }; @@ -399,10 +390,28 @@ this._palettes.set(this._customPaletteSetting.get().title, this._customPaletteSetting.get()); this._selectedColorPalette = WebInspector.settings.createSetting("selectedColorPalette", WebInspector.Spectrum.GeneratedPaletteTitle); - var paletteToShow = this._palettes.get(this._selectedColorPalette.get() || WebInspector.Spectrum.GeneratedPaletteTitle) - || this._palettes.get("Material"); - if (paletteToShow) - this._showPalette(paletteToShow, true); + var palette = this._palettes.get(this._selectedColorPalette.get()); + if (palette) { + this._resizeForSelectedPalette(); + this._showPalette(palette, true); + } + }, + + /** + * @param {!WebInspector.Spectrum.Palette} generatedPalette + */ + _generatedPaletteLoaded: function(generatedPalette) + { + if (generatedPalette.colors.length) + this._palettes.set(generatedPalette.title, generatedPalette); + if (this._selectedColorPalette.get() !== generatedPalette.title) { + return; + } else if (!generatedPalette.colors.length) { + this._paletteSelected(WebInspector.Spectrum.MaterialPalette); + return; + } + this._resizeForSelectedPalette(); + this._showPalette(generatedPalette, true); }, /** @@ -429,9 +438,26 @@ _paletteSelected: function(palette) { this._selectedColorPalette.set(palette.title); + this._resizeForSelectedPalette(); this._showPalette(palette, true); }, + _resizeForSelectedPalette: function() + { + var palette = this._palettes.get(this._selectedColorPalette.get()); + if (!palette) + return; + var rowsNeeded = Math.max(1, Math.ceil(palette.colors.length / WebInspector.Spectrum._itemsPerPaletteRow)); + if (this._numPaletteRowsShown === rowsNeeded) + return; + this._numPaletteRowsShown = rowsNeeded; + var paletteColorHeight = 12; + var paletteMargin = 12; + var paletteTop = 235; + this.element.style.height = (paletteTop + paletteMargin + (paletteColorHeight + paletteMargin) * rowsNeeded) + "px"; + this.dispatchEventToListeners(WebInspector.Spectrum.Events.SizeChanged); + }, + /** * @param {string} colorText * @param {boolean} matchUserFormat
diff --git a/third_party/WebKit/Source/devtools/front_end/elements/spectrum.css b/third_party/WebKit/Source/devtools/front_end/elements/spectrum.css index 0dab2233..0f16172b 100644 --- a/third_party/WebKit/Source/devtools/front_end/elements/spectrum.css +++ b/third_party/WebKit/Source/devtools/front_end/elements/spectrum.css
@@ -6,7 +6,7 @@ } :host(.palettes-enabled) { - height: 271px; + height: 319px; } .spectrum-color {
diff --git a/third_party/WebKit/Source/devtools/front_end/main/Main.js b/third_party/WebKit/Source/devtools/front_end/main/Main.js index 2817d09..e7e7175 100644 --- a/third_party/WebKit/Source/devtools/front_end/main/Main.js +++ b/third_party/WebKit/Source/devtools/front_end/main/Main.js
@@ -210,7 +210,7 @@ WebInspector.fileSystemMapping = new WebInspector.FileSystemMapping(); var fileSystemWorkspaceBinding = new WebInspector.FileSystemWorkspaceBinding(WebInspector.isolatedFileSystemManager, WebInspector.workspace); - WebInspector.networkMapping = new WebInspector.NetworkMapping(WebInspector.workspace, fileSystemWorkspaceBinding, WebInspector.fileSystemMapping); + WebInspector.networkMapping = new WebInspector.NetworkMapping(WebInspector.targetManager, WebInspector.workspace, fileSystemWorkspaceBinding, WebInspector.fileSystemMapping); WebInspector.networkProjectManager = new WebInspector.NetworkProjectManager(WebInspector.targetManager, WebInspector.workspace, WebInspector.networkMapping); WebInspector.presentationConsoleMessageHelper = new WebInspector.PresentationConsoleMessageHelper(WebInspector.workspace); WebInspector.cssWorkspaceBinding = new WebInspector.CSSWorkspaceBinding(WebInspector.targetManager, WebInspector.workspace, WebInspector.networkMapping);
diff --git a/third_party/WebKit/Source/devtools/front_end/sdk/CSSStyleModel.js b/third_party/WebKit/Source/devtools/front_end/sdk/CSSStyleModel.js index 403bc68..d7d3797 100644 --- a/third_party/WebKit/Source/devtools/front_end/sdk/CSSStyleModel.js +++ b/third_party/WebKit/Source/devtools/front_end/sdk/CSSStyleModel.js
@@ -570,8 +570,7 @@ _suspendStateChanged: function() { if (WebInspector.targetManager.allTargetsSuspended()) { - this._resetStyleSheets(); - this._agent.disable(); + this._agent.disable(this._resetStyleSheets.bind(this)); this._isEnabled = false; } else { this._agent.enable().then(this._wasEnabled.bind(this));
diff --git a/third_party/WebKit/Source/devtools/front_end/sdk/DOMModel.js b/third_party/WebKit/Source/devtools/front_end/sdk/DOMModel.js index 8f95ac4a..26eefa9 100644 --- a/third_party/WebKit/Source/devtools/front_end/sdk/DOMModel.js +++ b/third_party/WebKit/Source/devtools/front_end/sdk/DOMModel.js
@@ -1938,10 +1938,9 @@ _suspendStateChanged: function() { if (WebInspector.targetManager.allTargetsSuspended()) { - this._agent.disable(); + this._agent.disable(this._setDocument.bind(this, null)); } else { this._agent.enable(); - this._setDocument(null); } },
diff --git a/third_party/WebKit/Source/devtools/front_end/sdk/InspectorBackend.js b/third_party/WebKit/Source/devtools/front_end/sdk/InspectorBackend.js index 7ccd25c..12c79815 100644 --- a/third_party/WebKit/Source/devtools/front_end/sdk/InspectorBackend.js +++ b/third_party/WebKit/Source/devtools/front_end/sdk/InspectorBackend.js
@@ -40,6 +40,7 @@ } InspectorBackendClass._DevToolsErrorCode = -32000; +InspectorBackendClass._DevToolsStubErrorCode = -32015; /** * @param {string} error @@ -461,7 +462,8 @@ return; } else { if (messageObject.error) { - InspectorBackendClass.reportProtocolError("Generic message format error", messageObject); + if (messageObject.error.code !== InspectorBackendClass._DevToolsStubErrorCode) + InspectorBackendClass.reportProtocolError("Generic message format error", messageObject); return; } var method = messageObject.method.split("."); @@ -702,15 +704,16 @@ */ sendMessage: function(messageObject) { - setTimeout(this._echoResponse.bind(this, messageObject), 0); + setTimeout(this._respondWithError.bind(this, messageObject), 0); }, /** * @param {!Object} messageObject */ - _echoResponse: function(messageObject) + _respondWithError: function(messageObject) { - this.dispatch(messageObject); + var error = { message: "This is a stub connection, can't dispatch message.", code: InspectorBackendClass._DevToolsStubErrorCode, data: messageObject }; + this.dispatch({error: error}); }, __proto__: InspectorBackendClass.Connection.prototype
diff --git a/third_party/WebKit/Source/devtools/front_end/sdk/Target.js b/third_party/WebKit/Source/devtools/front_end/sdk/Target.js index 08b517430..83c461f 100644 --- a/third_party/WebKit/Source/devtools/front_end/sdk/Target.js +++ b/third_party/WebKit/Source/devtools/front_end/sdk/Target.js
@@ -7,15 +7,17 @@ /** * @constructor * @extends {Protocol.Agents} + * @param {!WebInspector.TargetManager} targetManager * @param {string} name * @param {number} type * @param {!InspectorBackendClass.Connection} connection * @param {?WebInspector.Target} parentTarget * @param {function(?WebInspector.Target)=} callback */ -WebInspector.Target = function(name, type, connection, parentTarget, callback) +WebInspector.Target = function(targetManager, name, type, connection, parentTarget, callback) { Protocol.Agents.call(this, connection.agentsMap()); + this._targetManager = targetManager; this._name = name; this._type = type; this._connection = connection; @@ -68,6 +70,15 @@ }, /** + * + * @return {!WebInspector.TargetManager} + */ + targetManager: function() + { + return this._targetManager; + }, + + /** * @param {string} label * @return {string} */ @@ -206,13 +217,13 @@ _onDisconnect: function() { - WebInspector.targetManager.removeTarget(this); + this._targetManager.removeTarget(this); this._dispose(); }, _dispose: function() { - WebInspector.targetManager.dispatchEventToListeners(WebInspector.TargetManager.Events.TargetDisposed, this); + this._targetManager.dispatchEventToListeners(WebInspector.TargetManager.Events.TargetDisposed, this); this.networkManager.dispose(); this.cpuProfilerModel.dispose(); WebInspector.ServiceWorkerCacheModel.fromTarget(this).dispose(); @@ -323,6 +334,12 @@ this.dispatchEventToListeners(WebInspector.TargetManager.Events.SuspendStateChanged); }, + suspendAndResumeAllTargets: function() + { + this.suspendAllTargets(); + this.resumeAllTargets(); + }, + /** * @return {boolean} */ @@ -446,7 +463,7 @@ */ createTarget: function(name, type, connection, parentTarget, callback) { - new WebInspector.Target(name, type, connection, parentTarget, callbackWrapper.bind(this)); + new WebInspector.Target(this, name, type, connection, parentTarget, callbackWrapper.bind(this)); /** * @this {WebInspector.TargetManager}
diff --git a/third_party/WebKit/Source/devtools/front_end/settings/EditFileSystemDialog.js b/third_party/WebKit/Source/devtools/front_end/settings/EditFileSystemDialog.js index 8d53be3..de2647b 100644 --- a/third_party/WebKit/Source/devtools/front_end/settings/EditFileSystemDialog.js +++ b/third_party/WebKit/Source/devtools/front_end/settings/EditFileSystemDialog.js
@@ -110,14 +110,6 @@ } WebInspector.EditFileSystemDialog.prototype = { - willHide: function() - { - if (!this._hasMappingChanges) - return; - if (window.confirm(WebInspector.UIString("It is recommended to restart DevTools after making these changes. Would you like to restart it?"))) - WebInspector.reload(); - }, - _fileMappingAdded: function(event) { var entry = /** @type {!WebInspector.FileSystemMapping.Entry} */ (event.data); @@ -249,6 +241,9 @@ return prefix + (prefix[prefix.length - 1] === "/" ? "" : "/"); }, + /** + * @param {!WebInspector.FileSystemMapping.Entry} entry + */ _addMappingRow: function(entry) { var key = this._entryKey(entry);
diff --git a/third_party/WebKit/Source/devtools/front_end/sources/SourcesPanel.js b/third_party/WebKit/Source/devtools/front_end/sources/SourcesPanel.js index b6e3d68..991ba12 100644 --- a/third_party/WebKit/Source/devtools/front_end/sources/SourcesPanel.js +++ b/third_party/WebKit/Source/devtools/front_end/sources/SourcesPanel.js
@@ -828,12 +828,6 @@ this._appendNetworkRequestItems(contextMenu, target); }, - _suggestReload: function() - { - if (window.confirm(WebInspector.UIString("It is recommended to restart inspector after making these changes. Would you like to restart it?"))) - WebInspector.reload(); - }, - /** * @param {!WebInspector.UISourceCode} uiSourceCode */ @@ -850,7 +844,6 @@ if (!networkUISourceCode) return; this._networkMapping.addMapping(networkUISourceCode, uiSourceCode); - this._suggestReload(); } }, @@ -870,7 +863,6 @@ if (!uiSourceCode) return; this._networkMapping.addMapping(networkUISourceCode, uiSourceCode); - this._suggestReload(); } }, @@ -879,10 +871,7 @@ */ _removeNetworkMapping: function(uiSourceCode) { - if (confirm(WebInspector.UIString("Are you sure you want to remove network mapping?"))) { - this._networkMapping.removeMapping(uiSourceCode); - this._suggestReload(); - } + this._networkMapping.removeMapping(uiSourceCode); }, /**
diff --git a/third_party/WebKit/Source/devtools/front_end/timeline/TimelineTreeView.js b/third_party/WebKit/Source/devtools/front_end/timeline/TimelineTreeView.js index 0b768ff..97eedf5 100644 --- a/third_party/WebKit/Source/devtools/front_end/timeline/TimelineTreeView.js +++ b/third_party/WebKit/Source/devtools/front_end/timeline/TimelineTreeView.js
@@ -311,9 +311,8 @@ */ WebInspector.TimelineTreeView.eventStackFrame = function(event) { - var data = event.args["data"] || event.args["beginData"]; - if (data) - return data; + if (event.name == WebInspector.TimelineModel.RecordType.JSFrame) + return event.args["data"]; var topFrame = event.stackTrace && event.stackTrace[0]; if (topFrame) return topFrame;
diff --git a/third_party/WebKit/Source/devtools/front_end/ui/Tooltip.js b/third_party/WebKit/Source/devtools/front_end/ui/Tooltip.js index c01b3008..48f3326d 100644 --- a/third_party/WebKit/Source/devtools/front_end/ui/Tooltip.js +++ b/third_party/WebKit/Source/devtools/front_end/ui/Tooltip.js
@@ -15,7 +15,7 @@ this._tooltipElement = this._shadowRoot.createChild("div", "tooltip"); doc.addEventListener("mousemove", this._mouseMove.bind(this), true); doc.addEventListener("mousedown", this._hide.bind(this, true), true); - doc.addEventListener("mouseout", this._hide.bind(this, true), true); + doc.addEventListener("mouseleave", this._hide.bind(this, true), true); doc.addEventListener("keydown", this._hide.bind(this, true), true); }
diff --git a/third_party/WebKit/Source/devtools/front_end/workspace/FileSystemMapping.js b/third_party/WebKit/Source/devtools/front_end/workspace/FileSystemMapping.js index ee9ec0e..c5cf47e 100644 --- a/third_party/WebKit/Source/devtools/front_end/workspace/FileSystemMapping.js +++ b/third_party/WebKit/Source/devtools/front_end/workspace/FileSystemMapping.js
@@ -94,7 +94,8 @@ if (this._mappingForURLPrefix[entry.urlPrefix] && !entry.configurable) continue; this._mappingForURLPrefix[entry.urlPrefix] = entry; - this._urlPrefixes.push(entry.urlPrefix); + if (this._urlPrefixes.indexOf(entry.urlPrefix) === -1) + this._urlPrefixes.push(entry.urlPrefix); } } this._urlPrefixes.sort(); @@ -154,7 +155,6 @@ _innerAddFileMapping: function(fileSystemPath, urlPrefix, pathPrefix, configurable) { var entry = new WebInspector.FileSystemMapping.Entry(fileSystemPath, urlPrefix, pathPrefix, configurable); - var existingEntry = this._mappingForURLPrefix[entry.urlPrefix]; this._fileSystemMappings[fileSystemPath].push(entry); this._rebuildIndexes(); this.dispatchEventToListeners(WebInspector.FileSystemMapping.Events.FileMappingAdded, entry); @@ -167,7 +167,7 @@ */ removeFileMapping: function(fileSystemPath, urlPrefix, pathPrefix) { - var entry = this._mappingEntryForPathPrefix(fileSystemPath, pathPrefix); + var entry = this._configurableMappingEntryForPathPrefix(fileSystemPath, pathPrefix); if (!entry) return; this._fileSystemMappings[fileSystemPath].remove(entry); @@ -204,6 +204,8 @@ var entry = null; for (var i = 0; i < entries.length; ++i) { var pathPrefix = entries[i].pathPrefix; + if (entry && entry.configurable && !entries[i].configurable) + continue; // We are looking for the longest pathPrefix match. if (entry && entry.pathPrefix.length > pathPrefix.length) continue; @@ -218,7 +220,7 @@ * @param {string} pathPrefix * @return {?WebInspector.FileSystemMapping.Entry} */ - _mappingEntryForPathPrefix: function(fileSystemPath, pathPrefix) + _configurableMappingEntryForPathPrefix: function(fileSystemPath, pathPrefix) { var entries = this._fileSystemMappings[fileSystemPath]; for (var i = 0; i < entries.length; ++i) {
diff --git a/third_party/WebKit/Source/devtools/front_end/workspace/IsolatedFileSystem.js b/third_party/WebKit/Source/devtools/front_end/workspace/IsolatedFileSystem.js index be04317f..5208e4a 100644 --- a/third_party/WebKit/Source/devtools/front_end/workspace/IsolatedFileSystem.js +++ b/third_party/WebKit/Source/devtools/front_end/workspace/IsolatedFileSystem.js
@@ -512,9 +512,9 @@ function innerCallback(results) { - if (!results.length) + if (!results.length) { callback(entries.sort()); - else { + } else { entries = entries.concat(toArray(results)); dirReader.readEntries(innerCallback, errorHandler); }
diff --git a/third_party/WebKit/Source/devtools/front_end/workspace/IsolatedFileSystemManager.js b/third_party/WebKit/Source/devtools/front_end/workspace/IsolatedFileSystemManager.js index 9983eaa..78030c6e 100644 --- a/third_party/WebKit/Source/devtools/front_end/workspace/IsolatedFileSystemManager.js +++ b/third_party/WebKit/Source/devtools/front_end/workspace/IsolatedFileSystemManager.js
@@ -50,6 +50,7 @@ WebInspector.IsolatedFileSystemManager.Events = { FileSystemAdded: "FileSystemAdded", FileSystemRemoved: "FileSystemRemoved", + FileSystemsLoaded: "FileSystemsLoaded", ExcludedFolderAdded: "ExcludedFolderAdded", ExcludedFolderRemoved: "ExcludedFolderRemoved" } @@ -86,7 +87,25 @@ var promises = []; for (var i = 0; i < fileSystems.length; ++i) promises.push(this._innerAddFileSystem(fileSystems[i])); - Promise.all(promises).then(this._initializeCallback); + Promise.all(promises).then(fireFileSystemsLoaded.bind(this)); + + /** + * @this {WebInspector.IsolatedFileSystemManager} + */ + function fireFileSystemsLoaded() + { + this._initializeCallback(); + delete this._initializeCallback; + this.dispatchEventToListeners(WebInspector.IsolatedFileSystemManager.Events.FileSystemsLoaded); + } + }, + + /** + * @return {boolean} + */ + fileSystemsLoaded: function() + { + return !this._initializeCallback; }, /**
diff --git a/third_party/WebKit/Source/modules/webaudio/AudioBufferSourceNode.cpp b/third_party/WebKit/Source/modules/webaudio/AudioBufferSourceNode.cpp index 11215d43..d50ce02 100644 --- a/third_party/WebKit/Source/modules/webaudio/AudioBufferSourceNode.cpp +++ b/third_party/WebKit/Source/modules/webaudio/AudioBufferSourceNode.cpp
@@ -483,6 +483,12 @@ return; } + // The node is started. Add a reference to keep us alive so that audio + // will eventually get played even if Javascript should drop all references + // to this node. The reference will get dropped when the source has finished + // playing. + context()->notifySourceNodeStartedProcessing(node()); + // This synchronizes with process(). updateSchedulingInfo will read some of the variables being // set here. MutexLocker processLocker(m_processLock); @@ -492,12 +498,6 @@ m_grainOffset = grainOffset; m_grainDuration = grainDuration; - // The node is started. Add a reference to keep us alive so that audio - // will eventually get played even if Javascript should drop all references - // to this node. The reference will get dropped when the source has finished - // playing. - context()->notifySourceNodeStartedProcessing(node()); - // If |when| < currentTime, the source must start now according to the spec. // So just set startTime to currentTime in this case to start the source now. m_startTime = std::max(when, context()->currentTime());
diff --git a/third_party/WebKit/Source/modules/webaudio/AudioScheduledSourceNode.cpp b/third_party/WebKit/Source/modules/webaudio/AudioScheduledSourceNode.cpp index 1e84f43..d2272d8 100644 --- a/third_party/WebKit/Source/modules/webaudio/AudioScheduledSourceNode.cpp +++ b/third_party/WebKit/Source/modules/webaudio/AudioScheduledSourceNode.cpp
@@ -43,7 +43,6 @@ : AudioHandler(nodeType, node, sampleRate) , m_startTime(0) , m_endTime(UnknownTime) - , m_hasEndedListener(false) , m_playbackState(UNSCHEDULED_STATE) { } @@ -153,15 +152,15 @@ return; } - // This synchronizes with process(). updateSchedulingInfo will read some of the variables being - // set here. - MutexLocker processLocker(m_processLock); - // The node is started. Add a reference to keep us alive so that audio will eventually get // played even if Javascript should drop all references to this node. The reference will get // dropped when the source has finished playing. context()->notifySourceNodeStartedProcessing(node()); + // This synchronizes with process(). updateSchedulingInfo will read some of the variables being + // set here. + MutexLocker processLocker(m_processLock); + // If |when| < currentTime, the source must start now according to the spec. // So just set startTime to currentTime in this case to start the source now. m_startTime = std::max(when, context()->currentTime()); @@ -208,11 +207,12 @@ setPlaybackState(FINISHED_STATE); } } + void AudioScheduledSourceHandler::finish() { finishWithoutOnEnded(); - if (m_hasEndedListener && context()->executionContext()) { + if (context()->executionContext()) { context()->executionContext()->postTask(FROM_HERE, createCrossThreadTask(&AudioScheduledSourceHandler::notifyEnded, PassRefPtr<AudioScheduledSourceHandler>(this))); } } @@ -263,7 +263,6 @@ void AudioScheduledSourceNode::setOnended(PassRefPtrWillBeRawPtr<EventListener> listener) { - audioScheduledSourceHandler().setHasEndedListener(); setAttributeEventListener(EventTypeNames::ended, listener); }
diff --git a/third_party/WebKit/Source/modules/webaudio/AudioScheduledSourceNode.h b/third_party/WebKit/Source/modules/webaudio/AudioScheduledSourceNode.h index 9e36c1b..ed6a5300 100644 --- a/third_party/WebKit/Source/modules/webaudio/AudioScheduledSourceNode.h +++ b/third_party/WebKit/Source/modules/webaudio/AudioScheduledSourceNode.h
@@ -82,8 +82,6 @@ return playbackState() == FINISHED_STATE; } - void setHasEndedListener() { m_hasEndedListener = true; } - protected: // Get frame information for the current time quantum. // We handle the transition into PLAYING_STATE and FINISHED_STATE here, @@ -115,8 +113,6 @@ // has been reached. double m_endTime; // in seconds - bool m_hasEndedListener; - static const double UnknownTime; private: // This is accessed by both the main thread and audio thread. Use the setter and getter to
diff --git a/third_party/WebKit/Source/platform/DragImage.cpp b/third_party/WebKit/Source/platform/DragImage.cpp index be1b995..e639e03 100644 --- a/third_party/WebKit/Source/platform/DragImage.cpp +++ b/third_party/WebKit/Source/platform/DragImage.cpp
@@ -72,9 +72,24 @@ const float kDragLinkLabelFontSize = 11; const float kDragLinkUrlFontSize = 10; -PassRefPtr<SkImage> adjustedImage(PassRefPtr<SkImage> image, const IntSize& size, - const AffineTransform& transform, float opacity, InterpolationQuality interpolationQuality) +} // anonymous namespace + +PassRefPtr<SkImage> DragImage::resizeAndOrientImage(PassRefPtr<SkImage> image, ImageOrientation orientation, + FloatSize imageScale, float opacity, InterpolationQuality interpolationQuality) { + IntSize size(image->width(), image->height()); + size.scale(imageScale.width(), imageScale.height()); + AffineTransform transform; + if (orientation != DefaultImageOrientation) { + if (orientation.usesWidthAsHeight()) + size = size.transposedSize(); + transform *= orientation.transformFromDefault(size); + } + transform.scaleNonUniform(imageScale.width(), imageScale.height()); + + if (size.isEmpty()) + return nullptr; + if (transform.isIdentity() && opacity == 1) { // Nothing to adjust, just use the original. ASSERT(image->width() == size.width()); @@ -100,15 +115,13 @@ return adoptRef(surface->newImageSnapshot()); } -} // anonymous namespace - -FloatSize DragImage::clampedImageScale(const Image& image, const IntSize& size, +FloatSize DragImage::clampedImageScale(const IntSize& imageSize, const IntSize& size, const IntSize& maxSize) { // Non-uniform scaling for size mapping. FloatSize imageScale( - static_cast<float>(size.width()) / image.width(), - static_cast<float>(size.height()) / image.height()); + static_cast<float>(size.width()) / imageSize.width(), + static_cast<float>(size.height()) / imageSize.height()); // Uniform scaling for clamping. const float clampScaleX = size.width() > maxSize.width() @@ -122,7 +135,7 @@ PassOwnPtr<DragImage> DragImage::create(Image* image, RespectImageOrientationEnum shouldRespectImageOrientation, float deviceScaleFactor, - InterpolationQuality interpolationQuality, float opacity, const FloatSize& imageScale) + InterpolationQuality interpolationQuality, float opacity, FloatSize imageScale) { if (!image) return nullptr; @@ -131,32 +144,13 @@ if (!skImage) return nullptr; - IntSize size = image->size(); - size.scale(imageScale.width(), imageScale.height()); - if (size.isEmpty()) - return nullptr; - - AffineTransform transform; - transform.scaleNonUniform(imageScale.width(), imageScale.height()); - - if (shouldRespectImageOrientation == RespectImageOrientation && image->isBitmapImage()) { - BitmapImage* bitmapImage = toBitmapImage(image); - ImageOrientation orientation = bitmapImage->currentFrameOrientation(); - - if (orientation != DefaultImageOrientation) { - size = bitmapImage->sizeRespectingOrientation(); - if (orientation.usesWidthAsHeight()) - size.scale(imageScale.height(), imageScale.width()); - else - size.scale(imageScale.width(), imageScale.height()); - - transform *= orientation.transformFromDefault(size); - } - } + ImageOrientation orientation; + if (shouldRespectImageOrientation == RespectImageOrientation && image->isBitmapImage()) + orientation = toBitmapImage(image)->currentFrameOrientation(); SkBitmap bm; RefPtr<SkImage> resizedImage = - adjustedImage(skImage.release(), size, transform, opacity, interpolationQuality); + resizeAndOrientImage(skImage.release(), orientation, imageScale, opacity, interpolationQuality); if (!resizedImage || !resizedImage->asLegacyBitmap(&bm, SkImage::kRO_LegacyBitmapMode)) return nullptr;
diff --git a/third_party/WebKit/Source/platform/DragImage.h b/third_party/WebKit/Source/platform/DragImage.h index 6c3acfa7..5b8f552 100644 --- a/third_party/WebKit/Source/platform/DragImage.h +++ b/third_party/WebKit/Source/platform/DragImage.h
@@ -34,6 +34,8 @@ #include "third_party/skia/include/core/SkBitmap.h" #include "wtf/Forward.h" +class SkImage; + namespace blink { class FontDescription; @@ -45,12 +47,12 @@ static PassOwnPtr<DragImage> create(Image*, RespectImageOrientationEnum = DoNotRespectImageOrientation, float deviceScaleFactor = 1, InterpolationQuality = InterpolationHigh, float opacity = 1, - const FloatSize& imageScale = FloatSize(1, 1)); + FloatSize imageScale = FloatSize(1, 1)); static PassOwnPtr<DragImage> create(const KURL&, const String& label, const FontDescription& systemFont, float deviceScaleFactor); ~DragImage(); - static FloatSize clampedImageScale(const Image&, const IntSize&, const IntSize& maxSize); + static FloatSize clampedImageScale(const IntSize&, const IntSize&, const IntSize& maxSize); const SkBitmap& bitmap() { return m_bitmap; } float resolutionScale() const { return m_resolutionScale; } @@ -58,6 +60,8 @@ void scale(float scaleX, float scaleY); + static PassRefPtr<SkImage> resizeAndOrientImage(PassRefPtr<SkImage>, ImageOrientation, FloatSize imageScale = FloatSize(1, 1), float opacity = 1.0, InterpolationQuality = InterpolationNone); + private: DragImage(const SkBitmap&, float resolutionScale, InterpolationQuality);
diff --git a/third_party/WebKit/Source/platform/RuntimeEnabledFeatures.in b/third_party/WebKit/Source/platform/RuntimeEnabledFeatures.in index 16d813b..ef13b196 100644 --- a/third_party/WebKit/Source/platform/RuntimeEnabledFeatures.in +++ b/third_party/WebKit/Source/platform/RuntimeEnabledFeatures.in
@@ -156,7 +156,7 @@ SharedArrayBuffer SharedWorker status=stable SlimmingPaintV2 -SlimmingPaintOffsetCaching implied_by=SlimmingPaintV2 +SlimmingPaintOffsetCaching implied_by=SlimmingPaintV2, implied_by=SlimmingPaintSynchronizedPainting SlimmingPaintStrictCullRectClipping SlimmingPaintSubsequenceCaching implied_by=SlimmingPaintV2 SlimmingPaintSynchronizedPainting implied_by=SlimmingPaintV2
diff --git a/third_party/WebKit/Source/platform/graphics/GraphicsContext.cpp b/third_party/WebKit/Source/platform/graphics/GraphicsContext.cpp index 5704e7d..42bdc7b8 100644 --- a/third_party/WebKit/Source/platform/graphics/GraphicsContext.cpp +++ b/third_party/WebKit/Source/platform/graphics/GraphicsContext.cpp
@@ -344,7 +344,7 @@ SkMatrix pictureTransform; pictureTransform.setRectToRect(sourceBounds, skBounds, SkMatrix::kFill_ScaleToFit); m_canvas->concat(pictureTransform); - RefPtr<SkPictureImageFilter> pictureFilter = adoptRef(SkPictureImageFilter::CreateForLocalSpace(picture, sourceBounds, static_cast<SkFilterQuality>(imageInterpolationQuality()))); + RefPtr<SkImageFilter> pictureFilter = adoptRef(SkPictureImageFilter::CreateForLocalSpace(picture, sourceBounds, static_cast<SkFilterQuality>(imageInterpolationQuality()))); picturePaint.setImageFilter(pictureFilter.get()); m_canvas->saveLayer(&sourceBounds, &picturePaint); m_canvas->restore();
diff --git a/third_party/WebKit/Source/platform/graphics/GraphicsLayer.cpp b/third_party/WebKit/Source/platform/graphics/GraphicsLayer.cpp index 13e59ebd..21f225a 100644 --- a/third_party/WebKit/Source/platform/graphics/GraphicsLayer.cpp +++ b/third_party/WebKit/Source/platform/graphics/GraphicsLayer.cpp
@@ -28,9 +28,11 @@ #include "SkImageFilter.h" #include "SkMatrix44.h" +#include "platform/DragImage.h" #include "platform/TraceEvent.h" #include "platform/geometry/FloatRect.h" #include "platform/geometry/LayoutRect.h" +#include "platform/graphics/BitmapImage.h" #include "platform/graphics/FirstPaintInvalidationTracking.h" #include "platform/graphics/GraphicsContext.h" #include "platform/graphics/GraphicsLayerFactory.h" @@ -1044,7 +1046,7 @@ updateContentsRect(); } -void GraphicsLayer::setContentsToImage(Image* image) +void GraphicsLayer::setContentsToImage(Image* image, RespectImageOrientationEnum respectImageOrientation) { RefPtr<SkImage> skImage = image ? image->imageForCurrentFrame() : nullptr; if (image && skImage) { @@ -1052,6 +1054,10 @@ m_imageLayer = adoptPtr(Platform::current()->compositorSupport()->createImageLayer()); registerContentsLayer(m_imageLayer->layer()); } + if (respectImageOrientation == RespectImageOrientation && image->isBitmapImage()) { + ImageOrientation imageOrientation = toBitmapImage(image)->currentFrameOrientation(); + skImage = DragImage::resizeAndOrientImage(skImage.release(), imageOrientation); + } m_imageLayer->setImage(skImage.get()); m_imageLayer->layer()->setOpaque(image->currentFrameKnownToBeOpaque()); updateContentsRect();
diff --git a/third_party/WebKit/Source/platform/graphics/GraphicsLayer.h b/third_party/WebKit/Source/platform/graphics/GraphicsLayer.h index 7e1c8a2..3aaac2d 100644 --- a/third_party/WebKit/Source/platform/graphics/GraphicsLayer.h +++ b/third_party/WebKit/Source/platform/graphics/GraphicsLayer.h
@@ -36,6 +36,7 @@ #include "platform/graphics/ContentLayerDelegate.h" #include "platform/graphics/GraphicsLayerClient.h" #include "platform/graphics/GraphicsLayerDebugInfo.h" +#include "platform/graphics/ImageOrientation.h" #include "platform/graphics/PaintInvalidationReason.h" #include "platform/graphics/filters/FilterOperations.h" #include "platform/graphics/paint/DisplayItemClient.h" @@ -207,7 +208,7 @@ void removeAnimation(int animationId); // Layer contents - void setContentsToImage(Image*); + void setContentsToImage(Image*, RespectImageOrientationEnum = DoNotRespectImageOrientation); void setContentsToPlatformLayer(WebLayer* layer) { setContentsTo(layer); } bool hasContentsLayer() const { return m_contentsLayer; }
diff --git a/third_party/WebKit/Source/platform/graphics/paint/ClipPathRecorder.cpp b/third_party/WebKit/Source/platform/graphics/paint/ClipPathRecorder.cpp index dd02475..5511d28 100644 --- a/third_party/WebKit/Source/platform/graphics/paint/ClipPathRecorder.cpp +++ b/third_party/WebKit/Source/platform/graphics/paint/ClipPathRecorder.cpp
@@ -24,12 +24,7 @@ ClipPathRecorder::~ClipPathRecorder() { ASSERT(m_context.displayItemList()); - if (!m_context.displayItemList()->displayItemConstructionIsDisabled()) { - if (m_context.displayItemList()->lastDisplayItemIsNoopBegin()) - m_context.displayItemList()->removeLastDisplayItem(); - else - m_context.displayItemList()->createAndAppend<EndClipPathDisplayItem>(m_client); - } + m_context.displayItemList()->endItem<EndClipPathDisplayItem>(m_client); } } // namespace blink
diff --git a/third_party/WebKit/Source/platform/graphics/paint/ClipRecorder.cpp b/third_party/WebKit/Source/platform/graphics/paint/ClipRecorder.cpp index c4f1262..156f0c2 100644 --- a/third_party/WebKit/Source/platform/graphics/paint/ClipRecorder.cpp +++ b/third_party/WebKit/Source/platform/graphics/paint/ClipRecorder.cpp
@@ -25,12 +25,7 @@ ClipRecorder::~ClipRecorder() { ASSERT(m_context.displayItemList()); - if (!m_context.displayItemList()->displayItemConstructionIsDisabled()) { - if (m_context.displayItemList()->lastDisplayItemIsNoopBegin()) - m_context.displayItemList()->removeLastDisplayItem(); - else - m_context.displayItemList()->createAndAppend<EndClipDisplayItem>(m_client, DisplayItem::clipTypeToEndClipType(m_type)); - } + m_context.displayItemList()->endItem<EndClipDisplayItem>(m_client, DisplayItem::clipTypeToEndClipType(m_type)); } } // namespace blink
diff --git a/third_party/WebKit/Source/platform/graphics/paint/DisplayItem.cpp b/third_party/WebKit/Source/platform/graphics/paint/DisplayItem.cpp index fa2c265..fe060a7a 100644 --- a/third_party/WebKit/Source/platform/graphics/paint/DisplayItem.cpp +++ b/third_party/WebKit/Source/platform/graphics/paint/DisplayItem.cpp
@@ -104,8 +104,10 @@ DEBUG_STRING_CASE(ScrollbarVertical); DEBUG_STRING_CASE(SelectionGap); DEBUG_STRING_CASE(SelectionTint); - DEBUG_STRING_CASE(TableCellBackgroundFromContainers); - DEBUG_STRING_CASE(TableCellBackgroundFromSelfPaintingRow); + DEBUG_STRING_CASE(TableCellBackgroundFromColumnGroup); + DEBUG_STRING_CASE(TableCellBackgroundFromColumn); + DEBUG_STRING_CASE(TableCellBackgroundFromSection); + DEBUG_STRING_CASE(TableCellBackgroundFromRow); DEBUG_STRING_CASE(VideoBitmap); DEBUG_STRING_CASE(WebPlugin); DEBUG_STRING_CASE(WebFont);
diff --git a/third_party/WebKit/Source/platform/graphics/paint/DisplayItem.h b/third_party/WebKit/Source/platform/graphics/paint/DisplayItem.h index 7f4ab1c..8084dd7 100644 --- a/third_party/WebKit/Source/platform/graphics/paint/DisplayItem.h +++ b/third_party/WebKit/Source/platform/graphics/paint/DisplayItem.h
@@ -90,8 +90,10 @@ ScrollbarVertical, // For ScrollbarThemeMacNonOverlayAPI only. SelectionGap, SelectionTint, - TableCellBackgroundFromContainers, - TableCellBackgroundFromSelfPaintingRow, + TableCellBackgroundFromColumnGroup, + TableCellBackgroundFromColumn, + TableCellBackgroundFromSection, + TableCellBackgroundFromRow, // Table collapsed borders can be painted together (e.g., left & top) but there are at most 4 phases of collapsed // border painting for a single cell. To disambiguate these phases of collapsed border painting, a mask is used. // TableCollapsedBorderBase can be larger than TableCollapsedBorderUnalignedBase to ensure the base lower bits are 0's.
diff --git a/third_party/WebKit/Source/platform/graphics/paint/DisplayItemList.cpp b/third_party/WebKit/Source/platform/graphics/paint/DisplayItemList.cpp index e685909..7e8a0df 100644 --- a/third_party/WebKit/Source/platform/graphics/paint/DisplayItemList.cpp +++ b/third_party/WebKit/Source/platform/graphics/paint/DisplayItemList.cpp
@@ -119,7 +119,8 @@ void DisplayItemList::invalidate(const DisplayItemClientWrapper& client, PaintInvalidationReason paintInvalidationReason, const IntRect& previousPaintInvalidationRect, const IntRect& newPaintInvalidationRect) { - invalidateUntracked(client.displayItemClient()); + invalidateClient(client); + if (RuntimeEnabledFeatures::slimmingPaintSynchronizedPaintingEnabled()) { Invalidation invalidation = { previousPaintInvalidationRect, paintInvalidationReason }; if (!previousPaintInvalidationRect.isEmpty()) @@ -129,15 +130,19 @@ m_invalidations.append(invalidation); } } +} +void DisplayItemList::invalidateClient(const DisplayItemClientWrapper& client) +{ + invalidateUntracked(client.displayItemClient()); if (RuntimeEnabledFeatures::slimmingPaintV2Enabled() && m_trackedPaintInvalidationObjects) m_trackedPaintInvalidationObjects->append(client.debugName()); } void DisplayItemList::invalidateUntracked(DisplayItemClient client) { - // Can only be called during layout/paintInvalidation, not during painting. - ASSERT(m_newDisplayItems.isEmpty()); + // This can be called during painting, but we can't invalidate already painted clients. + ASSERT(!m_newDisplayItemIndicesByClient.contains(client)); updateValidlyCachedClientsIfNeeded(); m_validlyCachedClients.remove(client); } @@ -165,20 +170,11 @@ void DisplayItemList::invalidatePaintOffset(const DisplayItemClientWrapper& client) { ASSERT(RuntimeEnabledFeatures::slimmingPaintOffsetCachingEnabled()); - - updateValidlyCachedClientsIfNeeded(); - m_validlyCachedClients.remove(client.displayItemClient()); - - if (RuntimeEnabledFeatures::slimmingPaintV2Enabled() && m_trackedPaintInvalidationObjects) - m_trackedPaintInvalidationObjects->append(client.debugName()); + invalidateClient(client); #if ENABLE(ASSERT) + ASSERT(!paintOffsetWasInvalidated(client.displayItemClient())); m_clientsWithPaintOffsetInvalidations.add(client.displayItemClient()); - - // Ensure no phases slipped in using the old paint offset which would indicate - // different phases used different paint offsets, which should not happen. - for (const auto& item : m_newDisplayItems) - ASSERT(!item.isCached() || item.client() != client.displayItemClient()); #endif } @@ -287,6 +283,7 @@ for (const auto& invalidation : m_invalidations) graphicsLayer->setNeedsDisplayInRect(invalidation.rect, invalidation.invalidationReason); m_invalidations.clear(); + m_clientsCheckedPaintInvalidation.clear(); } // These data structures are used during painting only. @@ -296,6 +293,7 @@ ASSERT(!skippingCache()); #if ENABLE(ASSERT) m_newDisplayItemIndicesByClient.clear(); + m_clientsWithPaintOffsetInvalidations.clear(); #endif if (m_currentDisplayItems.isEmpty()) { @@ -392,10 +390,6 @@ m_validlyCachedClientsDirty = true; m_currentDisplayItems.swap(updatedList); m_numCachedItems = 0; - -#if ENABLE(ASSERT) - m_clientsWithPaintOffsetInvalidations.clear(); -#endif } size_t DisplayItemList::approximateUnsharedMemoryUsage() const
diff --git a/third_party/WebKit/Source/platform/graphics/paint/DisplayItemList.h b/third_party/WebKit/Source/platform/graphics/paint/DisplayItemList.h index b473c93..d1c1a69 100644 --- a/third_party/WebKit/Source/platform/graphics/paint/DisplayItemList.h +++ b/third_party/WebKit/Source/platform/graphics/paint/DisplayItemList.h
@@ -95,6 +95,22 @@ return displayItem; } + // Creates and appends an ending display item to pair with a preceding + // beginning item iff the display item actually draws content. For no-op + // items, rather than creating an ending item, the begin item will + // instead be removed, thereby maintaining brevity of the list. If display + // item construction is disabled, no list mutations will be performed. + template <typename DisplayItemClass, typename... Args> + void endItem(Args&&... args) + { + if (displayItemConstructionIsDisabled()) + return; + if (lastDisplayItemIsNoopBegin()) + removeLastDisplayItem(); + else + createAndAppend<DisplayItemClass>(WTF::forward<Args>(args)...); + } + // Scopes must be used to avoid duplicated display item ids when we paint some object // multiple times and generate multiple display items with the same type. // We don't cache display items added in scopes. @@ -163,6 +179,17 @@ return m_trackedPaintInvalidationObjects ? *m_trackedPaintInvalidationObjects : Vector<String>(); } + bool clientHasCheckedPaintInvalidation(DisplayItemClient client) const + { + ASSERT(RuntimeEnabledFeatures::slimmingPaintSynchronizedPaintingEnabled()); + return m_clientsCheckedPaintInvalidation.contains(client); + } + void setClientHasCheckedPaintInvalidation(DisplayItemClient client) + { + ASSERT(RuntimeEnabledFeatures::slimmingPaintSynchronizedPaintingEnabled()); + m_clientsCheckedPaintInvalidation.add(client); + } + protected: DisplayItemList() : m_currentDisplayItems(0) @@ -179,6 +206,8 @@ void updateValidlyCachedClientsIfNeeded() const; + void invalidateClient(const DisplayItemClientWrapper&); + #ifndef NDEBUG WTF::String displayItemsAsDebugString(const DisplayItems&) const; #endif @@ -216,11 +245,16 @@ // Contains all clients having valid cached paintings if updated. // It's lazily updated in updateValidlyCachedClientsIfNeeded(). - // FIXME: In the future we can replace this with client-side repaint flags + // TODO(wangxianzhu): In the future we can replace this with client-side repaint flags // to avoid the cost of building and querying the hash table. mutable HashSet<DisplayItemClient> m_validlyCachedClients; mutable bool m_validlyCachedClientsDirty; + // Used during painting. Contains clients that have checked paint invalidation and + // are known to be valid. + // TODO(wangxianzhu): Use client side flag to avoid const of hash table. + HashSet<DisplayItemClient> m_clientsCheckedPaintInvalidation; + #if ENABLE(ASSERT) // Set of clients which had paint offset changes since the last commit. This is used for // ensuring paint offsets are only updated once and are the same in all phases.
diff --git a/third_party/WebKit/Source/platform/testing/URLTestHelpers.cpp b/third_party/WebKit/Source/platform/testing/URLTestHelpers.cpp index d56dc1c..357918fa 100644 --- a/third_party/WebKit/Source/platform/testing/URLTestHelpers.cpp +++ b/third_party/WebKit/Source/platform/testing/URLTestHelpers.cpp
@@ -33,6 +33,7 @@ #include "public/platform/Platform.h" #include "public/platform/WebURL.h" +#include "public/platform/WebURLError.h" #include "public/platform/WebURLResponse.h" #include "public/platform/WebUnitTestSupport.h" @@ -60,6 +61,18 @@ registerMockedURLLoadWithCustomResponse(fullURL, fileName, relativeBaseDirectory, response); } +void registerMockedErrorURLLoad(const WebURL& fullURL) +{ + WebURLResponse response; + response.initialize(); + response.setMIMEType("image/png"); + response.setHTTPStatusCode(404); + + WebURLError error; + error.reason = 404; + Platform::current()->unitTestSupport()->registerMockedErrorURL(fullURL, response, error); +} + void registerMockedURLLoadWithCustomResponse(const WebURL& fullURL, const WebString& fileName, const WebString& relativeBaseDirectory, WebURLResponse response) { // Physical file path for the mock = <webkitRootDir> + relativeBaseDirectory + fileName.
diff --git a/third_party/WebKit/Source/platform/testing/URLTestHelpers.h b/third_party/WebKit/Source/platform/testing/URLTestHelpers.h index 48abdee..d676b10 100644 --- a/third_party/WebKit/Source/platform/testing/URLTestHelpers.h +++ b/third_party/WebKit/Source/platform/testing/URLTestHelpers.h
@@ -56,6 +56,7 @@ void registerMockedURLLoad(const WebURL& fullURL, const WebString& fileName, const WebString& mimeType = WebString::fromUTF8("text/html")); void registerMockedURLLoad(const WebURL& fullURL, const WebString& fileName, const WebString& relativeBaseDirectory, const WebString& mimeType); void registerMockedURLLoadWithCustomResponse(const WebURL& fullURL, const WebString& fileName, const WebString& relativeBaseDirectory, WebURLResponse); +void registerMockedErrorURLLoad(const WebURL& fullURL); } // namespace URLTestHelpers } // namespace blink
diff --git a/third_party/WebKit/Source/web/tests/WebFrameTest.cpp b/third_party/WebKit/Source/web/tests/WebFrameTest.cpp index 58b15bf..6ccabeb 100644 --- a/third_party/WebKit/Source/web/tests/WebFrameTest.cpp +++ b/third_party/WebKit/Source/web/tests/WebFrameTest.cpp
@@ -61,6 +61,7 @@ #include "core/html/HTMLDocument.h" #include "core/html/HTMLFormElement.h" #include "core/html/HTMLMediaElement.h" +#include "core/html/ImageDocument.h" #include "core/input/EventHandler.h" #include "core/layout/HitTestResult.h" #include "core/layout/LayoutFullScreen.h" @@ -207,6 +208,11 @@ URLTestHelpers::registerMockedURLLoadWithCustomResponse(toKURL(fullString.c_str()), WebString::fromUTF8(fileName.c_str()), WebString::fromUTF8(""), response); } + void registerMockedHttpURLLoadWithMimeType(const std::string& fileName, const std::string& mimeType) + { + URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8(fileName.c_str()), WebString::fromUTF8(mimeType)); + } + void applyViewportStyleOverride(FrameTestHelpers::WebViewHelper* webViewHelper) { RefPtrWillBeRawPtr<StyleSheetContents> styleSheet = StyleSheetContents::create(CSSParserContext(UASheetMode, 0)); @@ -8222,4 +8228,32 @@ webViewImpl->mainFrameImpl()->collectGarbage(); } +TEST_F(WebFrameTest, ImageDocumentLoadFinishTime) +{ + // Loading an image resource directly generates an ImageDocument with + // the document loader feeding image data into the resource of a generated + // img tag. We expect the load finish time to be the same for the document + // and the image resource. + + registerMockedHttpURLLoadWithMimeType("white-1x1.png", "image/png"); + FrameTestHelpers::WebViewHelper webViewHelper; + webViewHelper.initializeAndLoad(m_baseURL + "white-1x1.png"); + WebView* webView = webViewHelper.webView(); + Document* document = toWebLocalFrameImpl(webView->mainFrame())->frame()->document(); + + EXPECT_TRUE(document); + EXPECT_TRUE(document->isImageDocument()); + + ImageDocument* imgDocument = toImageDocument(document); + ImageResource* resource = imgDocument->cachedImage(); + + EXPECT_TRUE(resource); + EXPECT_NE(0, resource->loadFinishTime()); + + DocumentLoader* loader = document->loader(); + + EXPECT_TRUE(loader); + EXPECT_EQ(loader->timing().responseEnd(), resource->loadFinishTime()); +} + } // namespace blink
diff --git a/third_party/WebKit/Source/web/tests/WebViewTest.cpp b/third_party/WebKit/Source/web/tests/WebViewTest.cpp index fb38eef7..6e636a1 100644 --- a/third_party/WebKit/Source/web/tests/WebViewTest.cpp +++ b/third_party/WebKit/Source/web/tests/WebViewTest.cpp
@@ -445,6 +445,42 @@ EXPECT_EQ(imageUrl, hitTestAbsoluteUrl(webView, 75, 25)); } +TEST_F(WebViewTest, BrokenImage) +{ + URLTestHelpers::registerMockedErrorURLLoad(KURL(toKURL(m_baseURL), "non_existent.png")); + std::string url = m_baseURL + "image-broken.html"; + URLTestHelpers::registerMockedURLLoad(toKURL(url), "image-broken.html"); + + WebView* webView = m_webViewHelper.initialize(); + webView->settings()->setLoadsImagesAutomatically(true); + loadFrame(webView->mainFrame(), url); + webView->resize(WebSize(400, 400)); + + std::string imageUrl = "http://www.test.com/non_existent.png"; + + EXPECT_EQ("image", hitTestElementId(webView, 25, 25)); + EXPECT_TRUE(hitTestUrlElement(webView, 25, 25).isNull()); + EXPECT_EQ(imageUrl, hitTestAbsoluteUrl(webView, 25, 25)); +} + +TEST_F(WebViewTest, BrokenInputImage) +{ + URLTestHelpers::registerMockedErrorURLLoad(KURL(toKURL(m_baseURL), "non_existent.png")); + std::string url = m_baseURL + "input-image-broken.html"; + URLTestHelpers::registerMockedURLLoad(toKURL(url), "input-image-broken.html"); + + WebView* webView = m_webViewHelper.initialize(); + webView->settings()->setLoadsImagesAutomatically(true); + loadFrame(webView->mainFrame(), url); + webView->resize(WebSize(400, 400)); + + std::string imageUrl = "http://www.test.com/non_existent.png"; + + EXPECT_EQ("image", hitTestElementId(webView, 25, 25)); + EXPECT_TRUE(hitTestUrlElement(webView, 25, 25).isNull()); + EXPECT_EQ(imageUrl, hitTestAbsoluteUrl(webView, 25, 25)); +} + TEST_F(WebViewTest, SetBaseBackgroundColor) { const WebColor kWhite = 0xFFFFFFFF;
diff --git a/third_party/WebKit/Source/web/tests/data/image-broken.html b/third_party/WebKit/Source/web/tests/data/image-broken.html new file mode 100644 index 0000000..e6df3c3 --- /dev/null +++ b/third_party/WebKit/Source/web/tests/data/image-broken.html
@@ -0,0 +1,2 @@ +<!DOCTYPE HTML> +<img id="image" width="100" height="100" src="non_existent.png" alt="test">
diff --git a/third_party/WebKit/Source/web/tests/data/input-image-broken.html b/third_party/WebKit/Source/web/tests/data/input-image-broken.html new file mode 100644 index 0000000..4c373c4 --- /dev/null +++ b/third_party/WebKit/Source/web/tests/data/input-image-broken.html
@@ -0,0 +1,2 @@ +<!DOCTYPE HTML> +<input type=image id="image" width="100" height="100" src="non_existent.png" alt="test">
diff --git a/third_party/freetype-android/README.chromium b/third_party/freetype-android/README.chromium index 72d405a..5f07342 100644 --- a/third_party/freetype-android/README.chromium +++ b/third_party/freetype-android/README.chromium
@@ -1,7 +1,7 @@ Name: FreeType URL: http://www.freetype.org/ -Version: VER-2-6 -Revision: e186230678ee8e4ea4ac4797ece8125761e3225a +Version: VER-2-6-1 +Revision: 8cabd919ca63f0e6c12e8405e8542a45d910fa62 License: Custom license "inspired by the BSD, Artistic, and IJG (Independent JPEG Group) licenses" License File: src/docs/FTL.TXT
diff --git a/third_party/libevent/ChangeLog b/third_party/libevent/ChangeLog index 50eb6b3..893b0873 100644 --- a/third_party/libevent/ChangeLog +++ b/third_party/libevent/ChangeLog
@@ -1,3 +1,55 @@ +Changes in 1.4.15-stable (5 January 2015) + + o Avoid integer overflow bugs in evbuffer_add() and related functions. See CVE-2014-6272 advisory for more information. (d49bc0e88b81a5812116074dc007f1db0ca1eecd) + + o Pass flags to fcntl(F_SETFL) as int, not long (b3d0382) + o Backport and tweak the LICENSE file for 1.4 (8a5ebd3) + o set close-on-exec bit for filedescriptors created by dns subsystem (9985231 Ralf Schmitt) + o Replace unused case of FD_CLOSEONEXEC with a proper null statement. (44f04a2) + o Fix kqueue correctness test on x84_64 (1c25b07) + o Avoid deadlock when activating signals. (e0e6958) + o Backport doc fix for evhttp_bind_socket. (95b71d0 Marco) + o Fix an issue with forking and signal socketpairs in select/poll backends (f0ff765) + o Fix compilation on Visual Studio 2010 (53c47c2 VDm) + o Defensive programming to prevent (hopefully impossible) stack-stomping (2d8cf0b) + o Check for POLLERR, POLLHUP and POLLNVAL for Solaris event ports (353b4ac Trond Norbye) + o Fix a bug that could allow dns requests with duplicate tx ids (e50ba5b) + o Avoid truncating huge values for content-length (1d6e30e) + o Take generated files out of git; add correct m4 magic for libtool to auto* files (7cf794b) + o Prefer autoregen -ivf to manual autogen.sh (823d9be) + + +Changes in 1.4.14b-stable + o Set the VERSION_INFO correctly for 1.4.14 + + +Changes in 1.4.14-stable + o Add a .gitignore file for the 1.4 branch. (d014edb) + o Backport evbuffer_readln(). (b04cc60 Nicholas Marriott) + o Make the evbuffer_readln backport follow the current API (c545485) + o Valgrind fix: Clear struct kevent before checking for OSX bug. (5713d5d William Ahern) + o Fix a crash when reading badly formatted resolve.conf (5b10d00 Yasuoka Masahiko) + o Fix memory-leak of signal handler array with kqueue. [backport] (01f3775) + o Update sample/signal-test.c to use newer APIs and not leak. (891765c Evan Jones) + o Correct all versions in 1.4 branch (ac0d213) + o Make evutil_make_socket_nonblocking() leave any other flags alone. (81c26ba Jardel Weyrich) + o Adjusted fcntl() retval comparison on evutil_make_socket_nonblocking(). (5f2e250 Jardel Weyrich) + o Correct a debug message in evhttp_parse_request_line (35df59e) + o Merge branch 'readln-backport' into patches-1.4 (8771d5b) + o Do not send an HTTP error when we've already closed or responded. (4fd2dd9 Pavel Plesov) + o Re-add event_siglcb; some old code _was_ still using it. :( (bd03d06) + o Make Libevent 1.4 build on win32 with Unicode enabled. (bce58d6 Brodie Thiesfield) + o Distribute nmake makefile for 1.4 (20d706d) + o do not fail while sending on http connections the client closed. (5c8b446) + o make evhttp_send() safe against terminated connections, too (01ea0c5) + o Fix a free(NULL) in min_heap.h (2458934) + o Fix memory leak when setting up priorities; reported by Alexander Drozdov (cb1a722) + o Clean up properly when adding a signal handler fails. (ae6ece0 Gilad Benjamini) + o Do not abort HTTP requests missing a reason string. (29d7b32 Pierre Phaneuf) + o Fix compile warning in http.c (906d573) + o Define _REENTRANT as needed on Solaris, elsewhere (6cbea13) + + Changes in 1.4.13-stable: o If the kernel tells us that there are a negative number of bytes to read from a socket, do not believe it. Fixes bug 2841177; found by Alexander Pronchenkov. o Do not allocate the maximum event queue and fd array for the epoll backend at startup. Instead, start out accepting 32 events at a time, and double the queue's size when it seems that the OS is generating events faster than we're requesting them. Saves up to 512K per epoll-based event_base. Resolves bug 2839240.
diff --git a/third_party/libevent/Doxyfile b/third_party/libevent/Doxyfile new file mode 100644 index 0000000..77f6de8 --- /dev/null +++ b/third_party/libevent/Doxyfile
@@ -0,0 +1,230 @@ +# Doxyfile 1.5.1 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = libevent + +# Place all output under 'doxygen/' + +OUTPUT_DIRECTORY = doxygen/ + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like the Qt-style comments (thus requiring an +# explicit @brief command for a brief description. + +JAVADOC_AUTOBRIEF = YES + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = event.h evdns.h evhttp.h evrpc.h + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = YES + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = YES + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = NO + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = TAILQ_ENTRY RB_ENTRY _EVENT_DEFINED_TQENTRY + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES
diff --git a/third_party/libevent/LICENSE b/third_party/libevent/LICENSE index af977a4..cabd9fc 100644 --- a/third_party/libevent/LICENSE +++ b/third_party/libevent/LICENSE
@@ -1,5 +1,9 @@ -Copyright 2000-2007 Niels Provos <provos@citi.umich.edu> -Copyright 2007-2009 Niels Provos and Nick Mathewson +Libevent is available for use under the following license, commonly known +as the 3-clause (or "modified") BSD license: + +============================== +Copyright (c) 2000-2007 Niels Provos <provos@citi.umich.edu> +Copyright (c) 2007-2010 Niels Provos and Nick Mathewson Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions @@ -22,3 +26,28 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +============================== + +Portions of Libevent are based on works by others, also made available by +them under the three-clause BSD license above. The copyright notices are +available in the corresponding source files; the license is as above. Here's +a list: + +log.c: + Copyright (c) 2000 Dug Song <dugsong@monkey.org> + Copyright (c) 1993 The Regents of the University of California. + +strlcpy.c: + Copyright (c) 1998 Todd C. Miller <Todd.Miller@courtesan.com> + +win32.c: + Copyright (c) 2003 Michael A. Davis <mike@datanerds.net> + +evport.c: + Copyright (c) 2007 Sun Microsystems + +min_heap.h: + Copyright (c) 2006 Maxim Yegorushkin <maxim.yegorushkin@gmail.com> + +tree.h: + Copyright 2002 Niels Provos <provos@citi.umich.edu>
diff --git a/third_party/libevent/Makefile.am b/third_party/libevent/Makefile.am index dc78ef1..c1ed62a 100644 --- a/third_party/libevent/Makefile.am +++ b/third_party/libevent/Makefile.am
@@ -1,5 +1,7 @@ AUTOMAKE_OPTIONS = foreign no-dependencies +ACLOCAL_AMFLAGS = -I m4 + # This is the point release for libevent. It shouldn't include any # a/b/c/d/e notations. RELEASE = 1.4 @@ -24,12 +26,36 @@ # compatibility with old binaries. Increment Current. Set Age to 0, # since we're backward compatible with no previous APIs. Set Revision # to 0 too. +VERSION_INFO = 4:1:2 +### # History: -# Libevent 1.4.1 was 2:0:0 -# Libevent 1.4.2 should be 3:0:0 -# Libevent 1.4.5 is 3:0:1 (we forgot to increment in the past) -VERSION_INFO = 3:3:1 +# We started using Libtool around version 1.0d. For all versions from +# 1.0d through 1.3e, we set RELEASE to the version name, and +# VERSION_INFO to something haphazard. The didn't matter, since +# setting RELEASE meant that no version of Libevent was treated as +# binary-compatible with any other version. +# +# As of 1.4.0-beta, we set RELEASE to "1.4", so that releases in the +# 1.4.x series could be potentially binary-compatible with one another, +# but not with any other series. (They aren't.) We didn't necessarily +# set VERSION_INFO correctly, or update it as often as we should have. +# The VERSION_INFO values were: +# 1.4.0-beta .. 1.4.4-stable : 2:0:0 [See note 1] +# 1.4.5-stable : 3:0:1 (compatible ABI change) +# 1.4.6-stable : 3:1:1 (no ABI change) +# 1.4.7-stable : 3:1:1 [see note 1] +# 1.4.8-stable : 3:2:1 (no ABI change) +# 1.4.9-stable : 3:2:1 [see note 1] +# 1.4.10-stable : 3:3:1 (no ABI change) +# 1.4.11-stable .. 1.4.13-stable : 3:3:1 [see note 1] +# 1.4.14a-stable: : 3:3:2 [see note 2] +# 1.4.14b-stable: : 4:0:2 (compatible ABI change) +# 1.4.15-stable: : 4:1:2 (no ABI change) +# +# [1]: Using the same VERSION_INFO value was wrong; we should have been +# updating the Revision field. +# [2]: We set the VERSION_INFO completely wrong on 1.4.14b-stable bin_SCRIPTS = event_rpcgen.py @@ -53,7 +79,9 @@ WIN32-Prj/event_test/test.txt WIN32-Prj/libevent.dsp \ WIN32-Prj/libevent.dsw WIN32-Prj/signal_test/signal_test.dsp \ WIN32-Prj/time_test/time_test.dsp WIN32-Prj/regress/regress.vcproj \ - WIN32-Prj/libevent.sln WIN32-Prj/libevent.vcproj + WIN32-Prj/libevent.sln WIN32-Prj/libevent.vcproj \ + Makefile.nmake test/Makefile.nmake \ + LICENSE lib_LTLIBRARIES = libevent.la libevent_core.la libevent_extra.la
diff --git a/third_party/libevent/Makefile.in b/third_party/libevent/Makefile.in deleted file mode 100644 index 4d96c74..0000000 --- a/third_party/libevent/Makefile.in +++ /dev/null
@@ -1,976 +0,0 @@ -# Makefile.in generated by automake 1.10.1 from Makefile.am. -# @configure_input@ - -# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. -# This Makefile.in is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - -@SET_MAKE@ - - - -VPATH = @srcdir@ -pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ -pkgincludedir = $(includedir)/@PACKAGE@ -am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd -install_sh_DATA = $(install_sh) -c -m 644 -install_sh_PROGRAM = $(install_sh) -c -install_sh_SCRIPT = $(install_sh) -c -INSTALL_HEADER = $(INSTALL_DATA) -transform = $(program_transform_name) -NORMAL_INSTALL = : -PRE_INSTALL = : -POST_INSTALL = : -NORMAL_UNINSTALL = : -PRE_UNINSTALL = : -POST_UNINSTALL = : -build_triplet = @build@ -host_triplet = @host@ -subdir = . -DIST_COMMON = README $(am__configure_deps) $(include_HEADERS) \ - $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ - $(srcdir)/config.h.in $(top_srcdir)/configure ChangeLog \ - config.guess config.sub devpoll.c epoll.c epoll_sub.c evport.c \ - install-sh kqueue.c ltmain.sh missing mkinstalldirs poll.c \ - select.c signal.c -ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/configure.in -am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) -am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ - configure.lineno config.status.lineno -mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs -CONFIG_HEADER = config.h -CONFIG_CLEAN_FILES = -am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; -am__vpath_adj = case $$p in \ - $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ - *) f=$$p;; \ - esac; -am__strip_dir = `echo $$p | sed -e 's|^.*/||'`; -am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(bindir)" \ - "$(DESTDIR)$(man3dir)" "$(DESTDIR)$(includedir)" \ - "$(DESTDIR)$(includedir)" -libLTLIBRARIES_INSTALL = $(INSTALL) -LTLIBRARIES = $(lib_LTLIBRARIES) -am__DEPENDENCIES_1 = -libevent_la_DEPENDENCIES = @LTLIBOBJS@ $(am__DEPENDENCIES_1) -am__libevent_la_SOURCES_DIST = event.c buffer.c evbuffer.c log.c \ - evutil.c WIN32-Code/win32.c event_tagging.c http.c evhttp.h \ - http-internal.h evdns.c evdns.h evrpc.c evrpc.h \ - evrpc-internal.h strlcpy.c strlcpy-internal.h -@BUILD_WIN32_TRUE@am__objects_1 = win32.lo -am__objects_2 = event.lo buffer.lo evbuffer.lo log.lo evutil.lo \ - $(am__objects_1) -am__objects_3 = event_tagging.lo http.lo evdns.lo evrpc.lo strlcpy.lo -am_libevent_la_OBJECTS = $(am__objects_2) $(am__objects_3) -libevent_la_OBJECTS = $(am_libevent_la_OBJECTS) -libevent_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \ - $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ - $(libevent_la_LDFLAGS) $(LDFLAGS) -o $@ -libevent_core_la_DEPENDENCIES = @LTLIBOBJS@ $(am__DEPENDENCIES_1) -am__libevent_core_la_SOURCES_DIST = event.c buffer.c evbuffer.c log.c \ - evutil.c WIN32-Code/win32.c -am_libevent_core_la_OBJECTS = $(am__objects_2) -libevent_core_la_OBJECTS = $(am_libevent_core_la_OBJECTS) -libevent_core_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \ - $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ - $(libevent_core_la_LDFLAGS) $(LDFLAGS) -o $@ -libevent_extra_la_DEPENDENCIES = @LTLIBOBJS@ $(am__DEPENDENCIES_1) -am_libevent_extra_la_OBJECTS = $(am__objects_3) -libevent_extra_la_OBJECTS = $(am_libevent_extra_la_OBJECTS) -libevent_extra_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \ - $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ - $(libevent_extra_la_LDFLAGS) $(LDFLAGS) -o $@ -binSCRIPT_INSTALL = $(INSTALL_SCRIPT) -SCRIPTS = $(bin_SCRIPTS) -DEFAULT_INCLUDES = -I.@am__isrc@ -depcomp = -am__depfiles_maybe = -COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ - $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ - --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ - $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -CCLD = $(CC) -LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ - --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ - $(LDFLAGS) -o $@ -SOURCES = $(libevent_la_SOURCES) $(libevent_core_la_SOURCES) \ - $(libevent_extra_la_SOURCES) -DIST_SOURCES = $(am__libevent_la_SOURCES_DIST) \ - $(am__libevent_core_la_SOURCES_DIST) \ - $(libevent_extra_la_SOURCES) -RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ - html-recursive info-recursive install-data-recursive \ - install-dvi-recursive install-exec-recursive \ - install-html-recursive install-info-recursive \ - install-pdf-recursive install-ps-recursive install-recursive \ - installcheck-recursive installdirs-recursive pdf-recursive \ - ps-recursive uninstall-recursive -man3dir = $(mandir)/man3 -NROFF = nroff -MANS = $(man_MANS) -includeHEADERS_INSTALL = $(INSTALL_HEADER) -nodist_includeHEADERS_INSTALL = $(INSTALL_HEADER) -HEADERS = $(include_HEADERS) $(nodist_include_HEADERS) -RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ - distclean-recursive maintainer-clean-recursive -ETAGS = etags -CTAGS = ctags -DIST_SUBDIRS = . sample test -DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) -distdir = $(PACKAGE)-$(VERSION) -top_distdir = $(distdir) -am__remove_distdir = \ - { test ! -d $(distdir) \ - || { find $(distdir) -type d ! -perm -200 -exec chmod u+w {} ';' \ - && rm -fr $(distdir); }; } -DIST_ARCHIVES = $(distdir).tar.gz -GZIP_ENV = --best -distuninstallcheck_listfiles = find . -type f -print -distcleancheck_listfiles = find . -type f -print -ACLOCAL = @ACLOCAL@ -AMTAR = @AMTAR@ -AR = @AR@ -AUTOCONF = @AUTOCONF@ -AUTOHEADER = @AUTOHEADER@ -AUTOMAKE = @AUTOMAKE@ -AWK = @AWK@ -CC = @CC@ -CCDEPMODE = @CCDEPMODE@ -CFLAGS = @CFLAGS@ -CPP = @CPP@ -CPPFLAGS = @CPPFLAGS@ -CXX = @CXX@ -CXXCPP = @CXXCPP@ -CXXDEPMODE = @CXXDEPMODE@ -CXXFLAGS = @CXXFLAGS@ -CYGPATH_W = @CYGPATH_W@ -DEFS = @DEFS@ -DEPDIR = @DEPDIR@ -DSYMUTIL = @DSYMUTIL@ -ECHO = @ECHO@ -ECHO_C = @ECHO_C@ -ECHO_N = @ECHO_N@ -ECHO_T = @ECHO_T@ -EGREP = @EGREP@ -EXEEXT = @EXEEXT@ -F77 = @F77@ -FFLAGS = @FFLAGS@ -GREP = @GREP@ -INSTALL = @INSTALL@ -INSTALL_DATA = @INSTALL_DATA@ -INSTALL_PROGRAM = @INSTALL_PROGRAM@ -INSTALL_SCRIPT = @INSTALL_SCRIPT@ -INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -LDFLAGS = @LDFLAGS@ -LIBOBJS = @LIBOBJS@ -LIBS = @LIBS@ -LIBTOOL = @LIBTOOL@ -LIBTOOL_DEPS = @LIBTOOL_DEPS@ -LN_S = @LN_S@ -LTLIBOBJS = @LTLIBOBJS@ -MAKEINFO = @MAKEINFO@ -MKDIR_P = @MKDIR_P@ -NMEDIT = @NMEDIT@ -OBJEXT = @OBJEXT@ -PACKAGE = @PACKAGE@ -PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ -PACKAGE_NAME = @PACKAGE_NAME@ -PACKAGE_STRING = @PACKAGE_STRING@ -PACKAGE_TARNAME = @PACKAGE_TARNAME@ -PACKAGE_VERSION = @PACKAGE_VERSION@ -PATH_SEPARATOR = @PATH_SEPARATOR@ -RANLIB = @RANLIB@ -SED = @SED@ -SET_MAKE = @SET_MAKE@ -SHELL = @SHELL@ -STRIP = @STRIP@ -VERSION = @VERSION@ -abs_builddir = @abs_builddir@ -abs_srcdir = @abs_srcdir@ -abs_top_builddir = @abs_top_builddir@ -abs_top_srcdir = @abs_top_srcdir@ -ac_ct_CC = @ac_ct_CC@ -ac_ct_CXX = @ac_ct_CXX@ -ac_ct_F77 = @ac_ct_F77@ -am__include = @am__include@ -am__leading_dot = @am__leading_dot@ -am__quote = @am__quote@ -am__tar = @am__tar@ -am__untar = @am__untar@ -bindir = @bindir@ -build = @build@ -build_alias = @build_alias@ -build_cpu = @build_cpu@ -build_os = @build_os@ -build_vendor = @build_vendor@ -builddir = @builddir@ -datadir = @datadir@ -datarootdir = @datarootdir@ -docdir = @docdir@ -dvidir = @dvidir@ -exec_prefix = @exec_prefix@ -host = @host@ -host_alias = @host_alias@ -host_cpu = @host_cpu@ -host_os = @host_os@ -host_vendor = @host_vendor@ -htmldir = @htmldir@ -includedir = @includedir@ -infodir = @infodir@ -install_sh = @install_sh@ -libdir = @libdir@ -libexecdir = @libexecdir@ -localedir = @localedir@ -localstatedir = @localstatedir@ -mandir = @mandir@ -mkdir_p = @mkdir_p@ -oldincludedir = @oldincludedir@ -pdfdir = @pdfdir@ -prefix = @prefix@ -program_transform_name = @program_transform_name@ -psdir = @psdir@ -sbindir = @sbindir@ -sharedstatedir = @sharedstatedir@ -srcdir = @srcdir@ -sysconfdir = @sysconfdir@ -target_alias = @target_alias@ -top_build_prefix = @top_build_prefix@ -top_builddir = @top_builddir@ -top_srcdir = @top_srcdir@ -AUTOMAKE_OPTIONS = foreign no-dependencies - -# This is the point release for libevent. It shouldn't include any -# a/b/c/d/e notations. -RELEASE = 1.4 - -# This is the version info for the libevent binary API. It has three -# numbers: -# Current -- the number of the binary API that we're implementing -# Revision -- which iteration of the implementation of the binary -# API are we supplying? -# Age -- How many previous binary API versions do we also -# support? -# -# If we release a new version that does not change the binary API, -# increment Revision. -# -# If we release a new version that changes the binary API, but does -# not break programs compiled against the old binary API, increment -# Current and Age. Set Revision to 0, since this is the first -# implementation of the new API. -# -# Otherwise, we're changing the binary API and breaking bakward -# compatibility with old binaries. Increment Current. Set Age to 0, -# since we're backward compatible with no previous APIs. Set Revision -# to 0 too. - -# History: -# Libevent 1.4.1 was 2:0:0 -# Libevent 1.4.2 should be 3:0:0 -# Libevent 1.4.5 is 3:0:1 (we forgot to increment in the past) -VERSION_INFO = 3:3:1 -bin_SCRIPTS = event_rpcgen.py -EXTRA_DIST = autogen.sh event.h event-internal.h log.h evsignal.h evdns.3 \ - evrpc.h evrpc-internal.h min_heap.h \ - event.3 \ - Doxyfile \ - kqueue.c epoll_sub.c epoll.c select.c poll.c signal.c \ - evport.c devpoll.c event_rpcgen.py \ - sample/Makefile.am sample/Makefile.in sample/event-test.c \ - sample/signal-test.c sample/time-test.c \ - test/Makefile.am test/Makefile.in test/bench.c test/regress.c \ - test/test-eof.c test/test-weof.c test/test-time.c \ - test/test-init.c test/test.sh \ - compat/sys/queue.h compat/sys/_libevent_time.h \ - WIN32-Code/config.h \ - WIN32-Code/event-config.h \ - WIN32-Code/win32.c \ - WIN32-Code/tree.h \ - WIN32-Prj/event_test/event_test.dsp \ - WIN32-Prj/event_test/test.txt WIN32-Prj/libevent.dsp \ - WIN32-Prj/libevent.dsw WIN32-Prj/signal_test/signal_test.dsp \ - WIN32-Prj/time_test/time_test.dsp WIN32-Prj/regress/regress.vcproj \ - WIN32-Prj/libevent.sln WIN32-Prj/libevent.vcproj - -lib_LTLIBRARIES = libevent.la libevent_core.la libevent_extra.la -@BUILD_WIN32_FALSE@SUBDIRS = . sample test -@BUILD_WIN32_TRUE@SUBDIRS = . sample -@BUILD_WIN32_FALSE@SYS_LIBS = -@BUILD_WIN32_TRUE@SYS_LIBS = -lws2_32 -@BUILD_WIN32_FALSE@SYS_SRC = -@BUILD_WIN32_TRUE@SYS_SRC = WIN32-Code/win32.c -@BUILD_WIN32_FALSE@SYS_INCLUDES = -@BUILD_WIN32_TRUE@SYS_INCLUDES = -IWIN32-Code -BUILT_SOURCES = event-config.h -CORE_SRC = event.c buffer.c evbuffer.c log.c evutil.c $(SYS_SRC) -EXTRA_SRC = event_tagging.c http.c evhttp.h http-internal.h evdns.c \ - evdns.h evrpc.c evrpc.h evrpc-internal.h \ - strlcpy.c strlcpy-internal.h strlcpy-internal.h - -libevent_la_SOURCES = $(CORE_SRC) $(EXTRA_SRC) -libevent_la_LIBADD = @LTLIBOBJS@ $(SYS_LIBS) -libevent_la_LDFLAGS = -release $(RELEASE) -version-info $(VERSION_INFO) -libevent_core_la_SOURCES = $(CORE_SRC) -libevent_core_la_LIBADD = @LTLIBOBJS@ $(SYS_LIBS) -libevent_core_la_LDFLAGS = -release $(RELEASE) -version-info $(VERSION_INFO) -libevent_extra_la_SOURCES = $(EXTRA_SRC) -libevent_extra_la_LIBADD = @LTLIBOBJS@ $(SYS_LIBS) -libevent_extra_la_LDFLAGS = -release $(RELEASE) -version-info $(VERSION_INFO) -include_HEADERS = event.h evhttp.h evdns.h evrpc.h evutil.h -nodist_include_HEADERS = event-config.h -INCLUDES = -I$(srcdir)/compat $(SYS_INCLUDES) -man_MANS = event.3 evdns.3 -DISTCLEANFILES = *~ event-config.h -all: $(BUILT_SOURCES) config.h - $(MAKE) $(AM_MAKEFLAGS) all-recursive - -.SUFFIXES: -.SUFFIXES: .c .lo .o .obj -am--refresh: - @: -$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) - @for dep in $?; do \ - case '$(am__configure_deps)' in \ - *$$dep*) \ - echo ' cd $(srcdir) && $(AUTOMAKE) --foreign '; \ - cd $(srcdir) && $(AUTOMAKE) --foreign \ - && exit 0; \ - exit 1;; \ - esac; \ - done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign Makefile -.PRECIOUS: Makefile -Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ - echo ' $(SHELL) ./config.status'; \ - $(SHELL) ./config.status;; \ - *) \ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ - esac; - -$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - $(SHELL) ./config.status --recheck - -$(top_srcdir)/configure: $(am__configure_deps) - cd $(srcdir) && $(AUTOCONF) -$(ACLOCAL_M4): $(am__aclocal_m4_deps) - cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) - -config.h: stamp-h1 - @if test ! -f $@; then \ - rm -f stamp-h1; \ - $(MAKE) $(AM_MAKEFLAGS) stamp-h1; \ - else :; fi - -stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status - @rm -f stamp-h1 - cd $(top_builddir) && $(SHELL) ./config.status config.h -$(srcdir)/config.h.in: $(am__configure_deps) - cd $(top_srcdir) && $(AUTOHEADER) - rm -f stamp-h1 - touch $@ - -distclean-hdr: - -rm -f config.h stamp-h1 -install-libLTLIBRARIES: $(lib_LTLIBRARIES) - @$(NORMAL_INSTALL) - test -z "$(libdir)" || $(MKDIR_P) "$(DESTDIR)$(libdir)" - @list='$(lib_LTLIBRARIES)'; for p in $$list; do \ - if test -f $$p; then \ - f=$(am__strip_dir) \ - echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(libLTLIBRARIES_INSTALL) $(INSTALL_STRIP_FLAG) '$$p' '$(DESTDIR)$(libdir)/$$f'"; \ - $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(libLTLIBRARIES_INSTALL) $(INSTALL_STRIP_FLAG) "$$p" "$(DESTDIR)$(libdir)/$$f"; \ - else :; fi; \ - done - -uninstall-libLTLIBRARIES: - @$(NORMAL_UNINSTALL) - @list='$(lib_LTLIBRARIES)'; for p in $$list; do \ - p=$(am__strip_dir) \ - echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$p'"; \ - $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$p"; \ - done - -clean-libLTLIBRARIES: - -test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES) - @list='$(lib_LTLIBRARIES)'; for p in $$list; do \ - dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \ - test "$$dir" != "$$p" || dir=.; \ - echo "rm -f \"$${dir}/so_locations\""; \ - rm -f "$${dir}/so_locations"; \ - done -libevent.la: $(libevent_la_OBJECTS) $(libevent_la_DEPENDENCIES) - $(libevent_la_LINK) -rpath $(libdir) $(libevent_la_OBJECTS) $(libevent_la_LIBADD) $(LIBS) -libevent_core.la: $(libevent_core_la_OBJECTS) $(libevent_core_la_DEPENDENCIES) - $(libevent_core_la_LINK) -rpath $(libdir) $(libevent_core_la_OBJECTS) $(libevent_core_la_LIBADD) $(LIBS) -libevent_extra.la: $(libevent_extra_la_OBJECTS) $(libevent_extra_la_DEPENDENCIES) - $(libevent_extra_la_LINK) -rpath $(libdir) $(libevent_extra_la_OBJECTS) $(libevent_extra_la_LIBADD) $(LIBS) -install-binSCRIPTS: $(bin_SCRIPTS) - @$(NORMAL_INSTALL) - test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)" - @list='$(bin_SCRIPTS)'; for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - if test -f $$d$$p; then \ - f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \ - echo " $(binSCRIPT_INSTALL) '$$d$$p' '$(DESTDIR)$(bindir)/$$f'"; \ - $(binSCRIPT_INSTALL) "$$d$$p" "$(DESTDIR)$(bindir)/$$f"; \ - else :; fi; \ - done - -uninstall-binSCRIPTS: - @$(NORMAL_UNINSTALL) - @list='$(bin_SCRIPTS)'; for p in $$list; do \ - f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \ - echo " rm -f '$(DESTDIR)$(bindir)/$$f'"; \ - rm -f "$(DESTDIR)$(bindir)/$$f"; \ - done - -mostlyclean-compile: - -rm -f *.$(OBJEXT) - -distclean-compile: - -rm -f *.tab.c - -.c.o: - $(COMPILE) -c $< - -.c.obj: - $(COMPILE) -c `$(CYGPATH_W) '$<'` - -.c.lo: - $(LTCOMPILE) -c -o $@ $< - -win32.lo: WIN32-Code/win32.c - $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o win32.lo `test -f 'WIN32-Code/win32.c' || echo '$(srcdir)/'`WIN32-Code/win32.c - -mostlyclean-libtool: - -rm -f *.lo - -clean-libtool: - -rm -rf .libs _libs - -distclean-libtool: - -rm -f libtool -install-man3: $(man3_MANS) $(man_MANS) - @$(NORMAL_INSTALL) - test -z "$(man3dir)" || $(MKDIR_P) "$(DESTDIR)$(man3dir)" - @list='$(man3_MANS) $(dist_man3_MANS) $(nodist_man3_MANS)'; \ - l2='$(man_MANS) $(dist_man_MANS) $(nodist_man_MANS)'; \ - for i in $$l2; do \ - case "$$i" in \ - *.3*) list="$$list $$i" ;; \ - esac; \ - done; \ - for i in $$list; do \ - if test -f $(srcdir)/$$i; then file=$(srcdir)/$$i; \ - else file=$$i; fi; \ - ext=`echo $$i | sed -e 's/^.*\\.//'`; \ - case "$$ext" in \ - 3*) ;; \ - *) ext='3' ;; \ - esac; \ - inst=`echo $$i | sed -e 's/\\.[0-9a-z]*$$//'`; \ - inst=`echo $$inst | sed -e 's/^.*\///'`; \ - inst=`echo $$inst | sed '$(transform)'`.$$ext; \ - echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man3dir)/$$inst'"; \ - $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man3dir)/$$inst"; \ - done -uninstall-man3: - @$(NORMAL_UNINSTALL) - @list='$(man3_MANS) $(dist_man3_MANS) $(nodist_man3_MANS)'; \ - l2='$(man_MANS) $(dist_man_MANS) $(nodist_man_MANS)'; \ - for i in $$l2; do \ - case "$$i" in \ - *.3*) list="$$list $$i" ;; \ - esac; \ - done; \ - for i in $$list; do \ - ext=`echo $$i | sed -e 's/^.*\\.//'`; \ - case "$$ext" in \ - 3*) ;; \ - *) ext='3' ;; \ - esac; \ - inst=`echo $$i | sed -e 's/\\.[0-9a-z]*$$//'`; \ - inst=`echo $$inst | sed -e 's/^.*\///'`; \ - inst=`echo $$inst | sed '$(transform)'`.$$ext; \ - echo " rm -f '$(DESTDIR)$(man3dir)/$$inst'"; \ - rm -f "$(DESTDIR)$(man3dir)/$$inst"; \ - done -install-includeHEADERS: $(include_HEADERS) - @$(NORMAL_INSTALL) - test -z "$(includedir)" || $(MKDIR_P) "$(DESTDIR)$(includedir)" - @list='$(include_HEADERS)'; for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - f=$(am__strip_dir) \ - echo " $(includeHEADERS_INSTALL) '$$d$$p' '$(DESTDIR)$(includedir)/$$f'"; \ - $(includeHEADERS_INSTALL) "$$d$$p" "$(DESTDIR)$(includedir)/$$f"; \ - done - -uninstall-includeHEADERS: - @$(NORMAL_UNINSTALL) - @list='$(include_HEADERS)'; for p in $$list; do \ - f=$(am__strip_dir) \ - echo " rm -f '$(DESTDIR)$(includedir)/$$f'"; \ - rm -f "$(DESTDIR)$(includedir)/$$f"; \ - done -install-nodist_includeHEADERS: $(nodist_include_HEADERS) - @$(NORMAL_INSTALL) - test -z "$(includedir)" || $(MKDIR_P) "$(DESTDIR)$(includedir)" - @list='$(nodist_include_HEADERS)'; for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - f=$(am__strip_dir) \ - echo " $(nodist_includeHEADERS_INSTALL) '$$d$$p' '$(DESTDIR)$(includedir)/$$f'"; \ - $(nodist_includeHEADERS_INSTALL) "$$d$$p" "$(DESTDIR)$(includedir)/$$f"; \ - done - -uninstall-nodist_includeHEADERS: - @$(NORMAL_UNINSTALL) - @list='$(nodist_include_HEADERS)'; for p in $$list; do \ - f=$(am__strip_dir) \ - echo " rm -f '$(DESTDIR)$(includedir)/$$f'"; \ - rm -f "$(DESTDIR)$(includedir)/$$f"; \ - done - -# This directory's subdirectories are mostly independent; you can cd -# into them and run `make' without going through this Makefile. -# To change the values of `make' variables: instead of editing Makefiles, -# (1) if the variable is set in `config.status', edit `config.status' -# (which will cause the Makefiles to be regenerated when you run `make'); -# (2) otherwise, pass the desired values on the `make' command line. -$(RECURSIVE_TARGETS): - @failcom='exit 1'; \ - for f in x $$MAKEFLAGS; do \ - case $$f in \ - *=* | --[!k]*);; \ - *k*) failcom='fail=yes';; \ - esac; \ - done; \ - dot_seen=no; \ - target=`echo $@ | sed s/-recursive//`; \ - list='$(SUBDIRS)'; for subdir in $$list; do \ - echo "Making $$target in $$subdir"; \ - if test "$$subdir" = "."; then \ - dot_seen=yes; \ - local_target="$$target-am"; \ - else \ - local_target="$$target"; \ - fi; \ - (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ - || eval $$failcom; \ - done; \ - if test "$$dot_seen" = "no"; then \ - $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ - fi; test -z "$$fail" - -$(RECURSIVE_CLEAN_TARGETS): - @failcom='exit 1'; \ - for f in x $$MAKEFLAGS; do \ - case $$f in \ - *=* | --[!k]*);; \ - *k*) failcom='fail=yes';; \ - esac; \ - done; \ - dot_seen=no; \ - case "$@" in \ - distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ - *) list='$(SUBDIRS)' ;; \ - esac; \ - rev=''; for subdir in $$list; do \ - if test "$$subdir" = "."; then :; else \ - rev="$$subdir $$rev"; \ - fi; \ - done; \ - rev="$$rev ."; \ - target=`echo $@ | sed s/-recursive//`; \ - for subdir in $$rev; do \ - echo "Making $$target in $$subdir"; \ - if test "$$subdir" = "."; then \ - local_target="$$target-am"; \ - else \ - local_target="$$target"; \ - fi; \ - (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ - || eval $$failcom; \ - done && test -z "$$fail" -tags-recursive: - list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ - done -ctags-recursive: - list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ - done - -ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonemtpy = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - mkid -fID $$unique -tags: TAGS - -TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - tags=; \ - here=`pwd`; \ - if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ - include_option=--etags-include; \ - empty_fix=.; \ - else \ - include_option=--include; \ - empty_fix=; \ - fi; \ - list='$(SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ - test ! -f $$subdir/TAGS || \ - tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \ - fi; \ - done; \ - list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ - test -n "$$unique" || unique=$$empty_fix; \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$tags $$unique; \ - fi -ctags: CTAGS -CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - tags=; \ - list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - test -z "$(CTAGS_ARGS)$$tags$$unique" \ - || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$tags $$unique - -GTAGS: - here=`$(am__cd) $(top_builddir) && pwd` \ - && cd $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) $$here - -distclean-tags: - -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags - -distdir: $(DISTFILES) - $(am__remove_distdir) - test -d $(distdir) || mkdir $(distdir) - @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - list='$(DISTFILES)'; \ - dist_files=`for file in $$list; do echo $$file; done | \ - sed -e "s|^$$srcdirstrip/||;t" \ - -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ - case $$dist_files in \ - */*) $(MKDIR_P) `echo "$$dist_files" | \ - sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ - sort -u` ;; \ - esac; \ - for file in $$dist_files; do \ - if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ - if test -d $$d/$$file; then \ - dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ - if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ - fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ - else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ - || exit 1; \ - fi; \ - done - list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ - test -d "$(distdir)/$$subdir" \ - || $(MKDIR_P) "$(distdir)/$$subdir" \ - || exit 1; \ - distdir=`$(am__cd) $(distdir) && pwd`; \ - top_distdir=`$(am__cd) $(top_distdir) && pwd`; \ - (cd $$subdir && \ - $(MAKE) $(AM_MAKEFLAGS) \ - top_distdir="$$top_distdir" \ - distdir="$$distdir/$$subdir" \ - am__remove_distdir=: \ - am__skip_length_check=: \ - distdir) \ - || exit 1; \ - fi; \ - done - -find $(distdir) -type d ! -perm -777 -exec chmod a+rwx {} \; -o \ - ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ - ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ - ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ - || chmod -R a+r $(distdir) -dist-gzip: distdir - tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz - $(am__remove_distdir) - -dist-bzip2: distdir - tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 - $(am__remove_distdir) - -dist-lzma: distdir - tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma - $(am__remove_distdir) - -dist-tarZ: distdir - tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z - $(am__remove_distdir) - -dist-shar: distdir - shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz - $(am__remove_distdir) - -dist-zip: distdir - -rm -f $(distdir).zip - zip -rq $(distdir).zip $(distdir) - $(am__remove_distdir) - -dist dist-all: distdir - tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz - $(am__remove_distdir) - -# This target untars the dist file and tries a VPATH configuration. Then -# it guarantees that the distribution is self-contained by making another -# tarfile. -distcheck: dist - case '$(DIST_ARCHIVES)' in \ - *.tar.gz*) \ - GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(am__untar) ;;\ - *.tar.bz2*) \ - bunzip2 -c $(distdir).tar.bz2 | $(am__untar) ;;\ - *.tar.lzma*) \ - unlzma -c $(distdir).tar.lzma | $(am__untar) ;;\ - *.tar.Z*) \ - uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ - *.shar.gz*) \ - GZIP=$(GZIP_ENV) gunzip -c $(distdir).shar.gz | unshar ;;\ - *.zip*) \ - unzip $(distdir).zip ;;\ - esac - chmod -R a-w $(distdir); chmod a+w $(distdir) - mkdir $(distdir)/_build - mkdir $(distdir)/_inst - chmod a-w $(distdir) - dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ - && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ - && cd $(distdir)/_build \ - && ../configure --srcdir=.. --prefix="$$dc_install_base" \ - $(DISTCHECK_CONFIGURE_FLAGS) \ - && $(MAKE) $(AM_MAKEFLAGS) \ - && $(MAKE) $(AM_MAKEFLAGS) dvi \ - && $(MAKE) $(AM_MAKEFLAGS) check \ - && $(MAKE) $(AM_MAKEFLAGS) install \ - && $(MAKE) $(AM_MAKEFLAGS) installcheck \ - && $(MAKE) $(AM_MAKEFLAGS) uninstall \ - && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ - distuninstallcheck \ - && chmod -R a-w "$$dc_install_base" \ - && ({ \ - (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ - distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ - } || { rm -rf "$$dc_destdir"; exit 1; }) \ - && rm -rf "$$dc_destdir" \ - && $(MAKE) $(AM_MAKEFLAGS) dist \ - && rm -rf $(DIST_ARCHIVES) \ - && $(MAKE) $(AM_MAKEFLAGS) distcleancheck - $(am__remove_distdir) - @(echo "$(distdir) archives ready for distribution: "; \ - list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ - sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' -distuninstallcheck: - @cd $(distuninstallcheck_dir) \ - && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ - || { echo "ERROR: files left after uninstall:" ; \ - if test -n "$(DESTDIR)"; then \ - echo " (check DESTDIR support)"; \ - fi ; \ - $(distuninstallcheck_listfiles) ; \ - exit 1; } >&2 -distcleancheck: distclean - @if test '$(srcdir)' = . ; then \ - echo "ERROR: distcleancheck can only run from a VPATH build" ; \ - exit 1 ; \ - fi - @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ - || { echo "ERROR: files left in build directory after distclean:" ; \ - $(distcleancheck_listfiles) ; \ - exit 1; } >&2 -check-am: all-am -check: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) check-recursive -all-am: Makefile $(LTLIBRARIES) $(SCRIPTS) $(MANS) $(HEADERS) config.h -installdirs: installdirs-recursive -installdirs-am: - for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man3dir)" "$(DESTDIR)$(includedir)" "$(DESTDIR)$(includedir)"; do \ - test -z "$$dir" || $(MKDIR_P) "$$dir"; \ - done -install: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) install-recursive -install-exec: install-exec-recursive -install-data: install-data-recursive -uninstall: uninstall-recursive - -install-am: all-am - @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am - -installcheck: installcheck-recursive -install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install -mostlyclean-generic: - -clean-generic: - -distclean-generic: - -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) - -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES) - -maintainer-clean-generic: - @echo "This command is intended for maintainers to use" - @echo "it deletes files that may require special tools to rebuild." - -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES) -clean: clean-recursive - -clean-am: clean-generic clean-libLTLIBRARIES clean-libtool \ - mostlyclean-am - -distclean: distclean-recursive - -rm -f $(am__CONFIG_DISTCLEAN_FILES) - -rm -f Makefile -distclean-am: clean-am distclean-compile distclean-generic \ - distclean-hdr distclean-libtool distclean-tags - -dvi: dvi-recursive - -dvi-am: - -html: html-recursive - -info: info-recursive - -info-am: - -install-data-am: install-includeHEADERS install-man \ - install-nodist_includeHEADERS - -install-dvi: install-dvi-recursive - -install-exec-am: install-binSCRIPTS install-libLTLIBRARIES - -install-html: install-html-recursive - -install-info: install-info-recursive - -install-man: install-man3 - -install-pdf: install-pdf-recursive - -install-ps: install-ps-recursive - -installcheck-am: - -maintainer-clean: maintainer-clean-recursive - -rm -f $(am__CONFIG_DISTCLEAN_FILES) - -rm -rf $(top_srcdir)/autom4te.cache - -rm -f Makefile -maintainer-clean-am: distclean-am maintainer-clean-generic - -mostlyclean: mostlyclean-recursive - -mostlyclean-am: mostlyclean-compile mostlyclean-generic \ - mostlyclean-libtool - -pdf: pdf-recursive - -pdf-am: - -ps: ps-recursive - -ps-am: - -uninstall-am: uninstall-binSCRIPTS uninstall-includeHEADERS \ - uninstall-libLTLIBRARIES uninstall-man \ - uninstall-nodist_includeHEADERS - -uninstall-man: uninstall-man3 - -.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) install-am \ - install-strip - -.PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ - all all-am am--refresh check check-am clean clean-generic \ - clean-libLTLIBRARIES clean-libtool ctags ctags-recursive dist \ - dist-all dist-bzip2 dist-gzip dist-lzma dist-shar dist-tarZ \ - dist-zip distcheck distclean distclean-compile \ - distclean-generic distclean-hdr distclean-libtool \ - distclean-tags distcleancheck distdir distuninstallcheck dvi \ - dvi-am html html-am info info-am install install-am \ - install-binSCRIPTS install-data install-data-am install-dvi \ - install-dvi-am install-exec install-exec-am install-html \ - install-html-am install-includeHEADERS install-info \ - install-info-am install-libLTLIBRARIES install-man \ - install-man3 install-nodist_includeHEADERS install-pdf \ - install-pdf-am install-ps install-ps-am install-strip \ - installcheck installcheck-am installdirs installdirs-am \ - maintainer-clean maintainer-clean-generic mostlyclean \ - mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ - pdf pdf-am ps ps-am tags tags-recursive uninstall uninstall-am \ - uninstall-binSCRIPTS uninstall-includeHEADERS \ - uninstall-libLTLIBRARIES uninstall-man uninstall-man3 \ - uninstall-nodist_includeHEADERS - - -event-config.h: config.h - echo '/* event-config.h' > $@ - echo ' * Generated by autoconf; post-processed by libevent.' >> $@ - echo ' * Do not edit this file.' >> $@ - echo ' * Do not rely on macros in this file existing in later versions.'>> $@ - echo ' */' >> $@ - echo '#ifndef _EVENT_CONFIG_H_' >> $@ - echo '#define _EVENT_CONFIG_H_' >> $@ - - sed -e 's/#define /#define _EVENT_/' \ - -e 's/#undef /#undef _EVENT_/' \ - -e 's/#ifndef /#ifndef _EVENT_/' < config.h >> $@ - echo "#endif" >> $@ - -verify: libevent.la - cd test && make verify - -doxygen: FORCE - doxygen $(srcdir)/Doxyfile -FORCE: -# Tell versions [3.59,3.63) of GNU make to not export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT:
diff --git a/third_party/libevent/Makefile.nmake b/third_party/libevent/Makefile.nmake new file mode 100644 index 0000000..f8d5722 --- /dev/null +++ b/third_party/libevent/Makefile.nmake
@@ -0,0 +1,48 @@ +# WATCH OUT! This makefile is a work in progress. It is probably missing +# tons of important things. DO NOT RELY ON IT TO BUILD A GOOD LIBEVENT. + +# Needed for correctness +CFLAGS=/Iinclude /Icompat /IWIN32-Code /DWIN32 /DHAVE_CONFIG_H /I. + +# For optimization and warnings +CFLAGS=$(CFLAGS) /Ox /W3 /wd4996 /nologo + +# XXXX have a debug mode + +LIBFLAGS=/nologo + + +CORE_OBJS=event.obj buffer.obj evbuffer.obj \ + log.obj evutil.obj \ + strlcpy.obj signal.obj win32.obj +EXTRA_OBJS=event_tagging.obj http.obj evdns.obj evrpc.obj + +ALL_OBJS=$(CORE_OBJS) $(WIN_OBJS) $(EXTRA_OBJS) +STATIC_LIBS=libevent_core.lib libevent_extras.lib libevent.lib + + +all: static_libs tests + +static_libs: $(STATIC_LIBS) + +win32.obj: WIN32-Code\win32.c + $(CC) $(CFLAGS) /c WIN32-Code\win32.c + +libevent_core.lib: $(CORE_OBJS) + lib $(LIBFLAGS) $(CORE_OBJS) /out:libevent_core.lib + +libevent_extras.lib: $(EXTRA_OBJS) + lib $(LIBFLAGS) $(EXTRA_OBJS) /out:libevent_extras.lib + +libevent.lib: $(CORE_OBJ) $(EXTRA_OBJS) + lib $(LIBFLAGS) $(CORE_OBJS) $(EXTRA_OBJS) /out:libevent.lib + +clean: + del $(ALL_OBJS) + del $(STATIC_LIBS) + cd test + $(MAKE) /F Makefile.nmake clean + +tests: + cd test + $(MAKE) /F Makefile.nmake
diff --git a/third_party/libevent/README.chromium b/third_party/libevent/README.chromium index 939a3539..fa4bc4a0 100644 --- a/third_party/libevent/README.chromium +++ b/third_party/libevent/README.chromium
@@ -1,6 +1,6 @@ Name: libevent URL: http://www.monkey.org/~provos/libevent/ -Version: 1.4.13 +Version: 1.4.15 License: BSD Security Critical: yes @@ -12,17 +12,23 @@ and Mac and copy config.h and event-config.h to linux/, freebsd/, solaris/, and mac/ respectively. 2) Add libevent.gyp. -3) chromium.patch is applied to allow libevent to be used without - being installed and to fix a race condition. +3) chromium.patch is applied to make the following changes: + - Allow libevent to be used without being installed by changing <...> + #includes to "...". + - Fix a race condition in event_del. + - Optimistically assume CLOCK_MONOTONIC is available and fallback if it + fails, rather than explicitly testing for it. + - Remove an unneeded variable that causes a -Werror build failure. + - Revert the patch from http://sourceforge.net/p/levent/bugs/223/ that + introduces use-after-free memory corruption when an event callback frees + the struct event memory. 4) The directories WIN32-Code and WIN32-Prj are not included. -5) Apply r87338. -6) The configs for android were copied from Linux's which were very close to +5) The configs for android were copied from Linux's which were very close to android one with the exception of HAVE_FD_MASK and HAVE_STRLCPY. -7) Add files to support building with the PNaCl toolchain. Added +6) Add files to support building with the PNaCl toolchain. Added libevent_nacl_nonsfi.gyp for build rule. nacl_nonsfi/config.h and nacl_nonsfi/event-config.h are derived from linux/ counterparts. nacl_nonsfi/random.c is also added to provide the random() function, which is missing in the newlib-based PNaCl toolchain. -8) Apply https://github.com/libevent/libevent/commit/ea6b1df -9) Stub out signal.c for nacl_helper_nonsfi. socketpair() will be prohibited +7) Stub out signal.c for nacl_helper_nonsfi. socketpair() will be prohibited by sandbox in nacl_helper_nonsfi.
diff --git a/third_party/libevent/aclocal.m4 b/third_party/libevent/aclocal.m4 deleted file mode 100644 index 4af9376..0000000 --- a/third_party/libevent/aclocal.m4 +++ /dev/null
@@ -1,7498 +0,0 @@ -# generated automatically by aclocal 1.10.1 -*- Autoconf -*- - -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, -# 2005, 2006, 2007, 2008 Free Software Foundation, Inc. -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - -m4_ifndef([AC_AUTOCONF_VERSION], - [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl -m4_if(AC_AUTOCONF_VERSION, [2.63],, -[m4_warning([this file was generated for autoconf 2.63. -You have another version of autoconf. It may work, but is not guaranteed to. -If you have problems, you may need to regenerate the build system entirely. -To do so, use the procedure documented by the package, typically `autoreconf'.])]) - -# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- - -# serial 52 AC_PROG_LIBTOOL - - -# AC_PROVIDE_IFELSE(MACRO-NAME, IF-PROVIDED, IF-NOT-PROVIDED) -# ----------------------------------------------------------- -# If this macro is not defined by Autoconf, define it here. -m4_ifdef([AC_PROVIDE_IFELSE], - [], - [m4_define([AC_PROVIDE_IFELSE], - [m4_ifdef([AC_PROVIDE_$1], - [$2], [$3])])]) - - -# AC_PROG_LIBTOOL -# --------------- -AC_DEFUN([AC_PROG_LIBTOOL], -[AC_REQUIRE([_AC_PROG_LIBTOOL])dnl -dnl If AC_PROG_CXX has already been expanded, run AC_LIBTOOL_CXX -dnl immediately, otherwise, hook it in at the end of AC_PROG_CXX. - AC_PROVIDE_IFELSE([AC_PROG_CXX], - [AC_LIBTOOL_CXX], - [define([AC_PROG_CXX], defn([AC_PROG_CXX])[AC_LIBTOOL_CXX - ])]) -dnl And a similar setup for Fortran 77 support - AC_PROVIDE_IFELSE([AC_PROG_F77], - [AC_LIBTOOL_F77], - [define([AC_PROG_F77], defn([AC_PROG_F77])[AC_LIBTOOL_F77 -])]) - -dnl Quote A][M_PROG_GCJ so that aclocal doesn't bring it in needlessly. -dnl If either AC_PROG_GCJ or A][M_PROG_GCJ have already been expanded, run -dnl AC_LIBTOOL_GCJ immediately, otherwise, hook it in at the end of both. - AC_PROVIDE_IFELSE([AC_PROG_GCJ], - [AC_LIBTOOL_GCJ], - [AC_PROVIDE_IFELSE([A][M_PROG_GCJ], - [AC_LIBTOOL_GCJ], - [AC_PROVIDE_IFELSE([LT_AC_PROG_GCJ], - [AC_LIBTOOL_GCJ], - [ifdef([AC_PROG_GCJ], - [define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[AC_LIBTOOL_GCJ])]) - ifdef([A][M_PROG_GCJ], - [define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[AC_LIBTOOL_GCJ])]) - ifdef([LT_AC_PROG_GCJ], - [define([LT_AC_PROG_GCJ], - defn([LT_AC_PROG_GCJ])[AC_LIBTOOL_GCJ])])])]) -])])# AC_PROG_LIBTOOL - - -# _AC_PROG_LIBTOOL -# ---------------- -AC_DEFUN([_AC_PROG_LIBTOOL], -[AC_REQUIRE([AC_LIBTOOL_SETUP])dnl -AC_BEFORE([$0],[AC_LIBTOOL_CXX])dnl -AC_BEFORE([$0],[AC_LIBTOOL_F77])dnl -AC_BEFORE([$0],[AC_LIBTOOL_GCJ])dnl - -# This can be used to rebuild libtool when needed -LIBTOOL_DEPS="$ac_aux_dir/ltmain.sh" - -# Always use our own libtool. -LIBTOOL='$(SHELL) $(top_builddir)/libtool' -AC_SUBST(LIBTOOL)dnl - -# Prevent multiple expansion -define([AC_PROG_LIBTOOL], []) -])# _AC_PROG_LIBTOOL - - -# AC_LIBTOOL_SETUP -# ---------------- -AC_DEFUN([AC_LIBTOOL_SETUP], -[AC_PREREQ(2.50)dnl -AC_REQUIRE([AC_ENABLE_SHARED])dnl -AC_REQUIRE([AC_ENABLE_STATIC])dnl -AC_REQUIRE([AC_ENABLE_FAST_INSTALL])dnl -AC_REQUIRE([AC_CANONICAL_HOST])dnl -AC_REQUIRE([AC_CANONICAL_BUILD])dnl -AC_REQUIRE([AC_PROG_CC])dnl -AC_REQUIRE([AC_PROG_LD])dnl -AC_REQUIRE([AC_PROG_LD_RELOAD_FLAG])dnl -AC_REQUIRE([AC_PROG_NM])dnl - -AC_REQUIRE([AC_PROG_LN_S])dnl -AC_REQUIRE([AC_DEPLIBS_CHECK_METHOD])dnl -# Autoconf 2.13's AC_OBJEXT and AC_EXEEXT macros only works for C compilers! -AC_REQUIRE([AC_OBJEXT])dnl -AC_REQUIRE([AC_EXEEXT])dnl -dnl -AC_LIBTOOL_SYS_MAX_CMD_LEN -AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE -AC_LIBTOOL_OBJDIR - -AC_REQUIRE([_LT_AC_SYS_COMPILER])dnl -_LT_AC_PROG_ECHO_BACKSLASH - -case $host_os in -aix3*) - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. - if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi - ;; -esac - -# Sed substitution that helps us do robust quoting. It backslashifies -# metacharacters that are still active within double-quoted strings. -Xsed='sed -e 1s/^X//' -[sed_quote_subst='s/\([\\"\\`$\\\\]\)/\\\1/g'] - -# Same as above, but do not quote variable references. -[double_quote_subst='s/\([\\"\\`\\\\]\)/\\\1/g'] - -# Sed substitution to delay expansion of an escaped shell variable in a -# double_quote_subst'ed string. -delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' - -# Sed substitution to avoid accidental globbing in evaled expressions -no_glob_subst='s/\*/\\\*/g' - -# Constants: -rm="rm -f" - -# Global variables: -default_ofile=libtool -can_build_shared=yes - -# All known linkers require a `.a' archive for static linking (except MSVC, -# which needs '.lib'). -libext=a -ltmain="$ac_aux_dir/ltmain.sh" -ofile="$default_ofile" -with_gnu_ld="$lt_cv_prog_gnu_ld" - -AC_CHECK_TOOL(AR, ar, false) -AC_CHECK_TOOL(RANLIB, ranlib, :) -AC_CHECK_TOOL(STRIP, strip, :) - -old_CC="$CC" -old_CFLAGS="$CFLAGS" - -# Set sane defaults for various variables -test -z "$AR" && AR=ar -test -z "$AR_FLAGS" && AR_FLAGS=cru -test -z "$AS" && AS=as -test -z "$CC" && CC=cc -test -z "$LTCC" && LTCC=$CC -test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS -test -z "$DLLTOOL" && DLLTOOL=dlltool -test -z "$LD" && LD=ld -test -z "$LN_S" && LN_S="ln -s" -test -z "$MAGIC_CMD" && MAGIC_CMD=file -test -z "$NM" && NM=nm -test -z "$SED" && SED=sed -test -z "$OBJDUMP" && OBJDUMP=objdump -test -z "$RANLIB" && RANLIB=: -test -z "$STRIP" && STRIP=: -test -z "$ac_objext" && ac_objext=o - -# Determine commands to create old-style static archives. -old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' -old_postinstall_cmds='chmod 644 $oldlib' -old_postuninstall_cmds= - -if test -n "$RANLIB"; then - case $host_os in - openbsd*) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib" - ;; - *) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib" - ;; - esac - old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib" -fi - -_LT_CC_BASENAME([$compiler]) - -# Only perform the check for file, if the check method requires it -case $deplibs_check_method in -file_magic*) - if test "$file_magic_cmd" = '$MAGIC_CMD'; then - AC_PATH_MAGIC - fi - ;; -esac - -_LT_REQUIRED_DARWIN_CHECKS - -AC_PROVIDE_IFELSE([AC_LIBTOOL_DLOPEN], enable_dlopen=yes, enable_dlopen=no) -AC_PROVIDE_IFELSE([AC_LIBTOOL_WIN32_DLL], -enable_win32_dll=yes, enable_win32_dll=no) - -AC_ARG_ENABLE([libtool-lock], - [AC_HELP_STRING([--disable-libtool-lock], - [avoid locking (might break parallel builds)])]) -test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes - -AC_ARG_WITH([pic], - [AC_HELP_STRING([--with-pic], - [try to use only PIC/non-PIC objects @<:@default=use both@:>@])], - [pic_mode="$withval"], - [pic_mode=default]) -test -z "$pic_mode" && pic_mode=default - -# Use C for the default configuration in the libtool script -tagname= -AC_LIBTOOL_LANG_C_CONFIG -_LT_AC_TAGCONFIG -])# AC_LIBTOOL_SETUP - - -# _LT_AC_SYS_COMPILER -# ------------------- -AC_DEFUN([_LT_AC_SYS_COMPILER], -[AC_REQUIRE([AC_PROG_CC])dnl - -# If no C compiler was specified, use CC. -LTCC=${LTCC-"$CC"} - -# If no C compiler flags were specified, use CFLAGS. -LTCFLAGS=${LTCFLAGS-"$CFLAGS"} - -# Allow CC to be a program name with arguments. -compiler=$CC -])# _LT_AC_SYS_COMPILER - - -# _LT_CC_BASENAME(CC) -# ------------------- -# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. -AC_DEFUN([_LT_CC_BASENAME], -[for cc_temp in $1""; do - case $cc_temp in - compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; - distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; - \-*) ;; - *) break;; - esac -done -cc_basename=`$echo "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"` -]) - - -# _LT_COMPILER_BOILERPLATE -# ------------------------ -# Check for compiler boilerplate output or warnings with -# the simple compiler test code. -AC_DEFUN([_LT_COMPILER_BOILERPLATE], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -ac_outfile=conftest.$ac_objext -echo "$lt_simple_compile_test_code" >conftest.$ac_ext -eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_compiler_boilerplate=`cat conftest.err` -$rm conftest* -])# _LT_COMPILER_BOILERPLATE - - -# _LT_LINKER_BOILERPLATE -# ---------------------- -# Check for linker boilerplate output or warnings with -# the simple link test code. -AC_DEFUN([_LT_LINKER_BOILERPLATE], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -ac_outfile=conftest.$ac_objext -echo "$lt_simple_link_test_code" >conftest.$ac_ext -eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_linker_boilerplate=`cat conftest.err` -$rm -r conftest* -])# _LT_LINKER_BOILERPLATE - -# _LT_REQUIRED_DARWIN_CHECKS -# -------------------------- -# Check for some things on darwin -AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS],[ - case $host_os in - rhapsody* | darwin*) - AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:]) - AC_CHECK_TOOL([NMEDIT], [nmedit], [:]) - - AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod], - [lt_cv_apple_cc_single_mod=no - if test -z "${LT_MULTI_MODULE}"; then - # By default we will add the -single_module flag. You can override - # by either setting the environment variable LT_MULTI_MODULE - # non-empty at configure time, or by adding -multi_module to the - # link flags. - echo "int foo(void){return 1;}" > conftest.c - $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ - -dynamiclib ${wl}-single_module conftest.c - if test -f libconftest.dylib; then - lt_cv_apple_cc_single_mod=yes - rm -rf libconftest.dylib* - fi - rm conftest.c - fi]) - AC_CACHE_CHECK([for -exported_symbols_list linker flag], - [lt_cv_ld_exported_symbols_list], - [lt_cv_ld_exported_symbols_list=no - save_LDFLAGS=$LDFLAGS - echo "_main" > conftest.sym - LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" - AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], - [lt_cv_ld_exported_symbols_list=yes], - [lt_cv_ld_exported_symbols_list=no]) - LDFLAGS="$save_LDFLAGS" - ]) - case $host_os in - rhapsody* | darwin1.[[0123]]) - _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; - darwin1.*) - _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - darwin*) - # if running on 10.5 or later, the deployment target defaults - # to the OS version, if on x86, and 10.4, the deployment - # target defaults to 10.4. Don't you love it? - case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in - 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*) - _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - 10.[[012]]*) - _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - 10.*) - _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - esac - ;; - esac - if test "$lt_cv_apple_cc_single_mod" = "yes"; then - _lt_dar_single_mod='$single_module' - fi - if test "$lt_cv_ld_exported_symbols_list" = "yes"; then - _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' - else - _lt_dar_export_syms="~$NMEDIT -s \$output_objdir/\${libname}-symbols.expsym \${lib}" - fi - if test "$DSYMUTIL" != ":"; then - _lt_dsymutil="~$DSYMUTIL \$lib || :" - else - _lt_dsymutil= - fi - ;; - esac -]) - -# _LT_AC_SYS_LIBPATH_AIX -# ---------------------- -# Links a minimal program and checks the executable -# for the system default hardcoded library path. In most cases, -# this is /usr/lib:/lib, but when the MPI compilers are used -# the location of the communication and MPI libs are included too. -# If we don't find anything, use the default library path according -# to the aix ld manual. -AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -AC_LINK_IFELSE(AC_LANG_PROGRAM,[ -lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\(.*\)$/\1/ - p - } - }' -aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -# Check for a 64-bit object if we didn't find anything. -if test -z "$aix_libpath"; then - aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -fi],[]) -if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -])# _LT_AC_SYS_LIBPATH_AIX - - -# _LT_AC_SHELL_INIT(ARG) -# ---------------------- -AC_DEFUN([_LT_AC_SHELL_INIT], -[ifdef([AC_DIVERSION_NOTICE], - [AC_DIVERT_PUSH(AC_DIVERSION_NOTICE)], - [AC_DIVERT_PUSH(NOTICE)]) -$1 -AC_DIVERT_POP -])# _LT_AC_SHELL_INIT - - -# _LT_AC_PROG_ECHO_BACKSLASH -# -------------------------- -# Add some code to the start of the generated configure script which -# will find an echo command which doesn't interpret backslashes. -AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH], -[_LT_AC_SHELL_INIT([ -# Check that we are running under the correct shell. -SHELL=${CONFIG_SHELL-/bin/sh} - -case X$ECHO in -X*--fallback-echo) - # Remove one level of quotation (which was required for Make). - ECHO=`echo "$ECHO" | sed 's,\\\\\[$]\\[$]0,'[$]0','` - ;; -esac - -echo=${ECHO-echo} -if test "X[$]1" = X--no-reexec; then - # Discard the --no-reexec flag, and continue. - shift -elif test "X[$]1" = X--fallback-echo; then - # Avoid inline document here, it may be left over - : -elif test "X`($echo '\t') 2>/dev/null`" = 'X\t' ; then - # Yippee, $echo works! - : -else - # Restart under the correct shell. - exec $SHELL "[$]0" --no-reexec ${1+"[$]@"} -fi - -if test "X[$]1" = X--fallback-echo; then - # used as fallback echo - shift - cat <<EOF -[$]* -EOF - exit 0 -fi - -# The HP-UX ksh and POSIX shell print the target directory to stdout -# if CDPATH is set. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -if test -z "$ECHO"; then -if test "X${echo_test_string+set}" != Xset; then -# find a string as large as possible, as long as the shell can cope with it - for cmd in 'sed 50q "[$]0"' 'sed 20q "[$]0"' 'sed 10q "[$]0"' 'sed 2q "[$]0"' 'echo test'; do - # expected sizes: less than 2Kb, 1Kb, 512 bytes, 16 bytes, ... - if (echo_test_string=`eval $cmd`) 2>/dev/null && - echo_test_string=`eval $cmd` && - (test "X$echo_test_string" = "X$echo_test_string") 2>/dev/null - then - break - fi - done -fi - -if test "X`($echo '\t') 2>/dev/null`" = 'X\t' && - echo_testing_string=`($echo "$echo_test_string") 2>/dev/null` && - test "X$echo_testing_string" = "X$echo_test_string"; then - : -else - # The Solaris, AIX, and Digital Unix default echo programs unquote - # backslashes. This makes it impossible to quote backslashes using - # echo "$something" | sed 's/\\/\\\\/g' - # - # So, first we look for a working echo in the user's PATH. - - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for dir in $PATH /usr/ucb; do - IFS="$lt_save_ifs" - if (test -f $dir/echo || test -f $dir/echo$ac_exeext) && - test "X`($dir/echo '\t') 2>/dev/null`" = 'X\t' && - echo_testing_string=`($dir/echo "$echo_test_string") 2>/dev/null` && - test "X$echo_testing_string" = "X$echo_test_string"; then - echo="$dir/echo" - break - fi - done - IFS="$lt_save_ifs" - - if test "X$echo" = Xecho; then - # We didn't find a better echo, so look for alternatives. - if test "X`(print -r '\t') 2>/dev/null`" = 'X\t' && - echo_testing_string=`(print -r "$echo_test_string") 2>/dev/null` && - test "X$echo_testing_string" = "X$echo_test_string"; then - # This shell has a builtin print -r that does the trick. - echo='print -r' - elif (test -f /bin/ksh || test -f /bin/ksh$ac_exeext) && - test "X$CONFIG_SHELL" != X/bin/ksh; then - # If we have ksh, try running configure again with it. - ORIGINAL_CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} - export ORIGINAL_CONFIG_SHELL - CONFIG_SHELL=/bin/ksh - export CONFIG_SHELL - exec $CONFIG_SHELL "[$]0" --no-reexec ${1+"[$]@"} - else - # Try using printf. - echo='printf %s\n' - if test "X`($echo '\t') 2>/dev/null`" = 'X\t' && - echo_testing_string=`($echo "$echo_test_string") 2>/dev/null` && - test "X$echo_testing_string" = "X$echo_test_string"; then - # Cool, printf works - : - elif echo_testing_string=`($ORIGINAL_CONFIG_SHELL "[$]0" --fallback-echo '\t') 2>/dev/null` && - test "X$echo_testing_string" = 'X\t' && - echo_testing_string=`($ORIGINAL_CONFIG_SHELL "[$]0" --fallback-echo "$echo_test_string") 2>/dev/null` && - test "X$echo_testing_string" = "X$echo_test_string"; then - CONFIG_SHELL=$ORIGINAL_CONFIG_SHELL - export CONFIG_SHELL - SHELL="$CONFIG_SHELL" - export SHELL - echo="$CONFIG_SHELL [$]0 --fallback-echo" - elif echo_testing_string=`($CONFIG_SHELL "[$]0" --fallback-echo '\t') 2>/dev/null` && - test "X$echo_testing_string" = 'X\t' && - echo_testing_string=`($CONFIG_SHELL "[$]0" --fallback-echo "$echo_test_string") 2>/dev/null` && - test "X$echo_testing_string" = "X$echo_test_string"; then - echo="$CONFIG_SHELL [$]0 --fallback-echo" - else - # maybe with a smaller string... - prev=: - - for cmd in 'echo test' 'sed 2q "[$]0"' 'sed 10q "[$]0"' 'sed 20q "[$]0"' 'sed 50q "[$]0"'; do - if (test "X$echo_test_string" = "X`eval $cmd`") 2>/dev/null - then - break - fi - prev="$cmd" - done - - if test "$prev" != 'sed 50q "[$]0"'; then - echo_test_string=`eval $prev` - export echo_test_string - exec ${ORIGINAL_CONFIG_SHELL-${CONFIG_SHELL-/bin/sh}} "[$]0" ${1+"[$]@"} - else - # Oops. We lost completely, so just stick with echo. - echo=echo - fi - fi - fi - fi -fi -fi - -# Copy echo and quote the copy suitably for passing to libtool from -# the Makefile, instead of quoting the original, which is used later. -ECHO=$echo -if test "X$ECHO" = "X$CONFIG_SHELL [$]0 --fallback-echo"; then - ECHO="$CONFIG_SHELL \\\$\[$]0 --fallback-echo" -fi - -AC_SUBST(ECHO) -])])# _LT_AC_PROG_ECHO_BACKSLASH - - -# _LT_AC_LOCK -# ----------- -AC_DEFUN([_LT_AC_LOCK], -[AC_ARG_ENABLE([libtool-lock], - [AC_HELP_STRING([--disable-libtool-lock], - [avoid locking (might break parallel builds)])]) -test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes - -# Some flags need to be propagated to the compiler or linker for good -# libtool support. -case $host in -ia64-*-hpux*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.$ac_objext` in - *ELF-32*) - HPUX_IA64_MODE="32" - ;; - *ELF-64*) - HPUX_IA64_MODE="64" - ;; - esac - fi - rm -rf conftest* - ;; -*-*-irix6*) - # Find out which ABI we are using. - echo '[#]line __oline__ "configure"' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - if test "$lt_cv_prog_gnu_ld" = yes; then - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -melf32bsmip" - ;; - *N32*) - LD="${LD-ld} -melf32bmipn32" - ;; - *64-bit*) - LD="${LD-ld} -melf64bmip" - ;; - esac - else - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -32" - ;; - *N32*) - LD="${LD-ld} -n32" - ;; - *64-bit*) - LD="${LD-ld} -64" - ;; - esac - fi - fi - rm -rf conftest* - ;; - -x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ -s390*-*linux*|sparc*-*linux*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.o` in - *32-bit*) - case $host in - x86_64-*kfreebsd*-gnu) - LD="${LD-ld} -m elf_i386_fbsd" - ;; - x86_64-*linux*) - LD="${LD-ld} -m elf_i386" - ;; - ppc64-*linux*|powerpc64-*linux*) - LD="${LD-ld} -m elf32ppclinux" - ;; - s390x-*linux*) - LD="${LD-ld} -m elf_s390" - ;; - sparc64-*linux*) - LD="${LD-ld} -m elf32_sparc" - ;; - esac - ;; - *64-bit*) - case $host in - x86_64-*kfreebsd*-gnu) - LD="${LD-ld} -m elf_x86_64_fbsd" - ;; - x86_64-*linux*) - LD="${LD-ld} -m elf_x86_64" - ;; - ppc*-*linux*|powerpc*-*linux*) - LD="${LD-ld} -m elf64ppc" - ;; - s390*-*linux*) - LD="${LD-ld} -m elf64_s390" - ;; - sparc*-*linux*) - LD="${LD-ld} -m elf64_sparc" - ;; - esac - ;; - esac - fi - rm -rf conftest* - ;; - -*-*-sco3.2v5*) - # On SCO OpenServer 5, we need -belf to get full-featured binaries. - SAVE_CFLAGS="$CFLAGS" - CFLAGS="$CFLAGS -belf" - AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf, - [AC_LANG_PUSH(C) - AC_TRY_LINK([],[],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no]) - AC_LANG_POP]) - if test x"$lt_cv_cc_needs_belf" != x"yes"; then - # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf - CFLAGS="$SAVE_CFLAGS" - fi - ;; -sparc*-*solaris*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if AC_TRY_EVAL(ac_compile); then - case `/usr/bin/file conftest.o` in - *64-bit*) - case $lt_cv_prog_gnu_ld in - yes*) LD="${LD-ld} -m elf64_sparc" ;; - *) - if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then - LD="${LD-ld} -64" - fi - ;; - esac - ;; - esac - fi - rm -rf conftest* - ;; - -AC_PROVIDE_IFELSE([AC_LIBTOOL_WIN32_DLL], -[*-*-cygwin* | *-*-mingw* | *-*-pw32*) - AC_CHECK_TOOL(DLLTOOL, dlltool, false) - AC_CHECK_TOOL(AS, as, false) - AC_CHECK_TOOL(OBJDUMP, objdump, false) - ;; - ]) -esac - -need_locks="$enable_libtool_lock" - -])# _LT_AC_LOCK - - -# AC_LIBTOOL_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, -# [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE]) -# ---------------------------------------------------------------- -# Check whether the given compiler option works -AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], -[AC_REQUIRE([LT_AC_PROG_SED]) -AC_CACHE_CHECK([$1], [$2], - [$2=no - ifelse([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4]) - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - lt_compiler_flag="$3" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - # The option is referenced via a variable to avoid confusing sed. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:__oline__: $lt_compile\"" >&AS_MESSAGE_LOG_FD) - (eval "$lt_compile" 2>conftest.err) - ac_status=$? - cat conftest.err >&AS_MESSAGE_LOG_FD - echo "$as_me:__oline__: \$? = $ac_status" >&AS_MESSAGE_LOG_FD - if (exit $ac_status) && test -s "$ac_outfile"; then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings other than the usual output. - $echo "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then - $2=yes - fi - fi - $rm conftest* -]) - -if test x"[$]$2" = xyes; then - ifelse([$5], , :, [$5]) -else - ifelse([$6], , :, [$6]) -fi -])# AC_LIBTOOL_COMPILER_OPTION - - -# AC_LIBTOOL_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, -# [ACTION-SUCCESS], [ACTION-FAILURE]) -# ------------------------------------------------------------ -# Check whether the given compiler option works -AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -AC_CACHE_CHECK([$1], [$2], - [$2=no - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $3" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then - # The linker can only warn and ignore the option if not recognized - # So say no if there are warnings - if test -s conftest.err; then - # Append any errors to the config.log. - cat conftest.err 1>&AS_MESSAGE_LOG_FD - $echo "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if diff conftest.exp conftest.er2 >/dev/null; then - $2=yes - fi - else - $2=yes - fi - fi - $rm -r conftest* - LDFLAGS="$save_LDFLAGS" -]) - -if test x"[$]$2" = xyes; then - ifelse([$4], , :, [$4]) -else - ifelse([$5], , :, [$5]) -fi -])# AC_LIBTOOL_LINKER_OPTION - - -# AC_LIBTOOL_SYS_MAX_CMD_LEN -# -------------------------- -AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], -[# find the maximum length of command line arguments -AC_MSG_CHECKING([the maximum length of command line arguments]) -AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl - i=0 - teststring="ABCD" - - case $build_os in - msdosdjgpp*) - # On DJGPP, this test can blow up pretty badly due to problems in libc - # (any single argument exceeding 2000 bytes causes a buffer overrun - # during glob expansion). Even if it were fixed, the result of this - # check would be larger than it should be. - lt_cv_sys_max_cmd_len=12288; # 12K is about right - ;; - - gnu*) - # Under GNU Hurd, this test is not required because there is - # no limit to the length of command line arguments. - # Libtool will interpret -1 as no limit whatsoever - lt_cv_sys_max_cmd_len=-1; - ;; - - cygwin* | mingw*) - # On Win9x/ME, this test blows up -- it succeeds, but takes - # about 5 minutes as the teststring grows exponentially. - # Worse, since 9x/ME are not pre-emptively multitasking, - # you end up with a "frozen" computer, even though with patience - # the test eventually succeeds (with a max line length of 256k). - # Instead, let's just punt: use the minimum linelength reported by - # all of the supported platforms: 8192 (on NT/2K/XP). - lt_cv_sys_max_cmd_len=8192; - ;; - - amigaos*) - # On AmigaOS with pdksh, this test takes hours, literally. - # So we just punt and use a minimum line length of 8192. - lt_cv_sys_max_cmd_len=8192; - ;; - - netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) - # This has been around since 386BSD, at least. Likely further. - if test -x /sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` - elif test -x /usr/sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` - else - lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs - fi - # And add a safety zone - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - ;; - - interix*) - # We know the value 262144 and hardcode it with a safety zone (like BSD) - lt_cv_sys_max_cmd_len=196608 - ;; - - osf*) - # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure - # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not - # nice to cause kernel panics so lets avoid the loop below. - # First set a reasonable default. - lt_cv_sys_max_cmd_len=16384 - # - if test -x /sbin/sysconfig; then - case `/sbin/sysconfig -q proc exec_disable_arg_limit` in - *1*) lt_cv_sys_max_cmd_len=-1 ;; - esac - fi - ;; - sco3.2v5*) - lt_cv_sys_max_cmd_len=102400 - ;; - sysv5* | sco5v6* | sysv4.2uw2*) - kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` - if test -n "$kargmax"; then - lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'` - else - lt_cv_sys_max_cmd_len=32768 - fi - ;; - *) - lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` - if test -n "$lt_cv_sys_max_cmd_len"; then - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - else - SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} - while (test "X"`$SHELL [$]0 --fallback-echo "X$teststring" 2>/dev/null` \ - = "XX$teststring") >/dev/null 2>&1 && - new_result=`expr "X$teststring" : ".*" 2>&1` && - lt_cv_sys_max_cmd_len=$new_result && - test $i != 17 # 1/2 MB should be enough - do - i=`expr $i + 1` - teststring=$teststring$teststring - done - teststring= - # Add a significant safety factor because C++ compilers can tack on massive - # amounts of additional arguments before passing them to the linker. - # It appears as though 1/2 is a usable value. - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` - fi - ;; - esac -]) -if test -n $lt_cv_sys_max_cmd_len ; then - AC_MSG_RESULT($lt_cv_sys_max_cmd_len) -else - AC_MSG_RESULT(none) -fi -])# AC_LIBTOOL_SYS_MAX_CMD_LEN - - -# _LT_AC_CHECK_DLFCN -# ------------------ -AC_DEFUN([_LT_AC_CHECK_DLFCN], -[AC_CHECK_HEADERS(dlfcn.h)dnl -])# _LT_AC_CHECK_DLFCN - - -# _LT_AC_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE, -# ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING) -# --------------------------------------------------------------------- -AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF], -[AC_REQUIRE([_LT_AC_CHECK_DLFCN])dnl -if test "$cross_compiling" = yes; then : - [$4] -else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 - lt_status=$lt_dlunknown - cat > conftest.$ac_ext <<EOF -[#line __oline__ "configure" -#include "confdefs.h" - -#if HAVE_DLFCN_H -#include <dlfcn.h> -#endif - -#include <stdio.h> - -#ifdef RTLD_GLOBAL -# define LT_DLGLOBAL RTLD_GLOBAL -#else -# ifdef DL_GLOBAL -# define LT_DLGLOBAL DL_GLOBAL -# else -# define LT_DLGLOBAL 0 -# endif -#endif - -/* We may have to define LT_DLLAZY_OR_NOW in the command line if we - find out it does not work in some platform. */ -#ifndef LT_DLLAZY_OR_NOW -# ifdef RTLD_LAZY -# define LT_DLLAZY_OR_NOW RTLD_LAZY -# else -# ifdef DL_LAZY -# define LT_DLLAZY_OR_NOW DL_LAZY -# else -# ifdef RTLD_NOW -# define LT_DLLAZY_OR_NOW RTLD_NOW -# else -# ifdef DL_NOW -# define LT_DLLAZY_OR_NOW DL_NOW -# else -# define LT_DLLAZY_OR_NOW 0 -# endif -# endif -# endif -# endif -#endif - -#ifdef __cplusplus -extern "C" void exit (int); -#endif - -void fnord() { int i=42;} -int main () -{ - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); - int status = $lt_dlunknown; - - if (self) - { - if (dlsym (self,"fnord")) status = $lt_dlno_uscore; - else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; - /* dlclose (self); */ - } - else - puts (dlerror ()); - - exit (status); -}] -EOF - if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null - lt_status=$? - case x$lt_status in - x$lt_dlno_uscore) $1 ;; - x$lt_dlneed_uscore) $2 ;; - x$lt_dlunknown|x*) $3 ;; - esac - else : - # compilation failed - $3 - fi -fi -rm -fr conftest* -])# _LT_AC_TRY_DLOPEN_SELF - - -# AC_LIBTOOL_DLOPEN_SELF -# ---------------------- -AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], -[AC_REQUIRE([_LT_AC_CHECK_DLFCN])dnl -if test "x$enable_dlopen" != xyes; then - enable_dlopen=unknown - enable_dlopen_self=unknown - enable_dlopen_self_static=unknown -else - lt_cv_dlopen=no - lt_cv_dlopen_libs= - - case $host_os in - beos*) - lt_cv_dlopen="load_add_on" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ;; - - mingw* | pw32*) - lt_cv_dlopen="LoadLibrary" - lt_cv_dlopen_libs= - ;; - - cygwin*) - lt_cv_dlopen="dlopen" - lt_cv_dlopen_libs= - ;; - - darwin*) - # if libdl is installed we need to link against it - AC_CHECK_LIB([dl], [dlopen], - [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[ - lt_cv_dlopen="dyld" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ]) - ;; - - *) - AC_CHECK_FUNC([shl_load], - [lt_cv_dlopen="shl_load"], - [AC_CHECK_LIB([dld], [shl_load], - [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"], - [AC_CHECK_FUNC([dlopen], - [lt_cv_dlopen="dlopen"], - [AC_CHECK_LIB([dl], [dlopen], - [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"], - [AC_CHECK_LIB([svld], [dlopen], - [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"], - [AC_CHECK_LIB([dld], [dld_link], - [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"]) - ]) - ]) - ]) - ]) - ]) - ;; - esac - - if test "x$lt_cv_dlopen" != xno; then - enable_dlopen=yes - else - enable_dlopen=no - fi - - case $lt_cv_dlopen in - dlopen) - save_CPPFLAGS="$CPPFLAGS" - test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" - - save_LDFLAGS="$LDFLAGS" - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" - - save_LIBS="$LIBS" - LIBS="$lt_cv_dlopen_libs $LIBS" - - AC_CACHE_CHECK([whether a program can dlopen itself], - lt_cv_dlopen_self, [dnl - _LT_AC_TRY_DLOPEN_SELF( - lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes, - lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross) - ]) - - if test "x$lt_cv_dlopen_self" = xyes; then - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" - AC_CACHE_CHECK([whether a statically linked program can dlopen itself], - lt_cv_dlopen_self_static, [dnl - _LT_AC_TRY_DLOPEN_SELF( - lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes, - lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross) - ]) - fi - - CPPFLAGS="$save_CPPFLAGS" - LDFLAGS="$save_LDFLAGS" - LIBS="$save_LIBS" - ;; - esac - - case $lt_cv_dlopen_self in - yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; - *) enable_dlopen_self=unknown ;; - esac - - case $lt_cv_dlopen_self_static in - yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; - *) enable_dlopen_self_static=unknown ;; - esac -fi -])# AC_LIBTOOL_DLOPEN_SELF - - -# AC_LIBTOOL_PROG_CC_C_O([TAGNAME]) -# --------------------------------- -# Check to see if options -c and -o are simultaneously supported by compiler -AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -AC_REQUIRE([_LT_AC_SYS_COMPILER])dnl -AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext], - [_LT_AC_TAGVAR(lt_cv_prog_compiler_c_o, $1)], - [_LT_AC_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no - $rm -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:__oline__: $lt_compile\"" >&AS_MESSAGE_LOG_FD) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&AS_MESSAGE_LOG_FD - echo "$as_me:__oline__: \$? = $ac_status" >&AS_MESSAGE_LOG_FD - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $echo "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - _LT_AC_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes - fi - fi - chmod u+w . 2>&AS_MESSAGE_LOG_FD - $rm conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $rm out/ii_files/* && rmdir out/ii_files - $rm out/* && rmdir out - cd .. - rmdir conftest - $rm conftest* -]) -])# AC_LIBTOOL_PROG_CC_C_O - - -# AC_LIBTOOL_SYS_HARD_LINK_LOCKS([TAGNAME]) -# ----------------------------------------- -# Check to see if we can do hard links to lock some files if needed -AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], -[AC_REQUIRE([_LT_AC_LOCK])dnl - -hard_links="nottested" -if test "$_LT_AC_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - AC_MSG_CHECKING([if we can lock with hard links]) - hard_links=yes - $rm conftest* - ln conftest.a conftest.b 2>/dev/null && hard_links=no - touch conftest.a - ln conftest.a conftest.b 2>&5 || hard_links=no - ln conftest.a conftest.b 2>/dev/null && hard_links=no - AC_MSG_RESULT([$hard_links]) - if test "$hard_links" = no; then - AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe]) - need_locks=warn - fi -else - need_locks=no -fi -])# AC_LIBTOOL_SYS_HARD_LINK_LOCKS - - -# AC_LIBTOOL_OBJDIR -# ----------------- -AC_DEFUN([AC_LIBTOOL_OBJDIR], -[AC_CACHE_CHECK([for objdir], [lt_cv_objdir], -[rm -f .libs 2>/dev/null -mkdir .libs 2>/dev/null -if test -d .libs; then - lt_cv_objdir=.libs -else - # MS-DOS does not allow filenames that begin with a dot. - lt_cv_objdir=_libs -fi -rmdir .libs 2>/dev/null]) -objdir=$lt_cv_objdir -])# AC_LIBTOOL_OBJDIR - - -# AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH([TAGNAME]) -# ---------------------------------------------- -# Check hardcoding attributes. -AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], -[AC_MSG_CHECKING([how to hardcode library paths into programs]) -_LT_AC_TAGVAR(hardcode_action, $1)= -if test -n "$_LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)" || \ - test -n "$_LT_AC_TAGVAR(runpath_var, $1)" || \ - test "X$_LT_AC_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then - - # We can hardcode non-existant directories. - if test "$_LT_AC_TAGVAR(hardcode_direct, $1)" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one - ## test "$_LT_AC_TAGVAR(hardcode_shlibpath_var, $1)" != no && - test "$_LT_AC_TAGVAR(hardcode_minus_L, $1)" != no; then - # Linking always hardcodes the temporary library directory. - _LT_AC_TAGVAR(hardcode_action, $1)=relink - else - # We can link without hardcoding, and we can hardcode nonexisting dirs. - _LT_AC_TAGVAR(hardcode_action, $1)=immediate - fi -else - # We cannot hardcode anything, or else we can only hardcode existing - # directories. - _LT_AC_TAGVAR(hardcode_action, $1)=unsupported -fi -AC_MSG_RESULT([$_LT_AC_TAGVAR(hardcode_action, $1)]) - -if test "$_LT_AC_TAGVAR(hardcode_action, $1)" = relink; then - # Fast installation is not supported - enable_fast_install=no -elif test "$shlibpath_overrides_runpath" = yes || - test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless -fi -])# AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH - - -# AC_LIBTOOL_SYS_LIB_STRIP -# ------------------------ -AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP], -[striplib= -old_striplib= -AC_MSG_CHECKING([whether stripping libraries is possible]) -if test -n "$STRIP" && $STRIP -V 2>&1 | grep "GNU strip" >/dev/null; then - test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" - test -z "$striplib" && striplib="$STRIP --strip-unneeded" - AC_MSG_RESULT([yes]) -else -# FIXME - insert some real tests, host_os isn't really good enough - case $host_os in - darwin*) - if test -n "$STRIP" ; then - striplib="$STRIP -x" - old_striplib="$STRIP -S" - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) -fi - ;; - *) - AC_MSG_RESULT([no]) - ;; - esac -fi -])# AC_LIBTOOL_SYS_LIB_STRIP - - -# AC_LIBTOOL_SYS_DYNAMIC_LINKER -# ----------------------------- -# PORTME Fill in your ld.so characteristics -AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -AC_MSG_CHECKING([dynamic linker characteristics]) -library_names_spec= -libname_spec='lib$name' -soname_spec= -shrext_cmds=".so" -postinstall_cmds= -postuninstall_cmds= -finish_cmds= -finish_eval= -shlibpath_var= -shlibpath_overrides_runpath=unknown -version_type=none -dynamic_linker="$host_os ld.so" -sys_lib_dlsearch_path_spec="/lib /usr/lib" -m4_if($1,[],[ -if test "$GCC" = yes; then - case $host_os in - darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; - *) lt_awk_arg="/^libraries:/" ;; - esac - lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e "s,=/,/,g"` - if echo "$lt_search_path_spec" | grep ';' >/dev/null ; then - # if the path contains ";" then we assume it to be the separator - # otherwise default to the standard path separator (i.e. ":") - it is - # assumed that no part of a normal pathname contains ";" but that should - # okay in the real world where ";" in dirpaths is itself problematic. - lt_search_path_spec=`echo "$lt_search_path_spec" | $SED -e 's/;/ /g'` - else - lt_search_path_spec=`echo "$lt_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - fi - # Ok, now we have the path, separated by spaces, we can step through it - # and add multilib dir if necessary. - lt_tmp_lt_search_path_spec= - lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` - for lt_sys_path in $lt_search_path_spec; do - if test -d "$lt_sys_path/$lt_multi_os_dir"; then - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" - else - test -d "$lt_sys_path" && \ - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" - fi - done - lt_search_path_spec=`echo $lt_tmp_lt_search_path_spec | awk ' -BEGIN {RS=" "; FS="/|\n";} { - lt_foo=""; - lt_count=0; - for (lt_i = NF; lt_i > 0; lt_i--) { - if ($lt_i != "" && $lt_i != ".") { - if ($lt_i == "..") { - lt_count++; - } else { - if (lt_count == 0) { - lt_foo="/" $lt_i lt_foo; - } else { - lt_count--; - } - } - } - } - if (lt_foo != "") { lt_freq[[lt_foo]]++; } - if (lt_freq[[lt_foo]] == 1) { print lt_foo; } -}'` - sys_lib_search_path_spec=`echo $lt_search_path_spec` -else - sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -fi]) -need_lib_prefix=unknown -hardcode_into_libs=no - -# when you set need_version to no, make sure it does not cause -set_version -# flags to be left without arguments -need_version=unknown - -case $host_os in -aix3*) - version_type=linux - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. - soname_spec='${libname}${release}${shared_ext}$major' - ;; - -aix[[4-9]]*) - version_type=linux - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes - if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 - library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with - # the line `#! .'. This would cause the generated library to - # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[[01]] | aix4.[[01]].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' - echo '#endif'; } | ${CC} -E - | grep yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac - # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. - if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib<name>.so - # instead of lib<name>.a to let people know that these are not - # typical AIX shared libraries. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. - library_names_spec='${libname}${release}.a $libname.a' - soname_spec='${libname}${release}${shared_ext}$major' - fi - shlibpath_var=LIBPATH - fi - ;; - -amigaos*) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. - finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$echo "X$lib" | $Xsed -e '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $rm /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - -beos*) - library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; - -bsdi[[45]]*) - version_type=linux - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" - sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" - # the default ld.so.conf also contains /usr/contrib/lib and - # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow - # libtool to hard-code these into programs - ;; - -cygwin* | mingw* | pw32*) - version_type=windows - shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - - case $GCC,$host_os in - yes,cygwin* | yes,mingw* | yes,pw32*) - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i;echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ - chmod a+x \$dldir/$dlname' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $rm \$dlpath' - shlibpath_overrides_runpath=yes - - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' - soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" - ;; - mingw*) - # MinGW DLLs use traditional 'lib' prefix - soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - sys_lib_search_path_spec=`$CC -print-search-dirs | grep "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` - if echo "$sys_lib_search_path_spec" | [grep ';[c-zC-Z]:/' >/dev/null]; then - # It is most probably a Windows format PATH printed by - # mingw gcc, but we are running on Cygwin. Gcc prints its search - # path with ; separators, and with drive letters. We can handle the - # drive letters (cygwin fileutils understands them), so leave them, - # especially as we might pass files found there to a mingw objdump, - # which wouldn't understand a cygwinified path. Ahh. - sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` - else - sys_lib_search_path_spec=`echo "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - fi - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - ;; - esac - ;; - - *) - library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' - ;; - esac - dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; - -darwin* | rhapsody*) - dynamic_linker="$host_os dyld" - version_type=darwin - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${versuffix}$shared_ext ${libname}${release}${major}$shared_ext ${libname}$shared_ext' - soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' - m4_if([$1], [],[ - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"]) - sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' - ;; - -dgux*) - version_type=linux - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -freebsd1*) - dynamic_linker=no - ;; - -freebsd* | dragonfly*) - # DragonFly does not have aout. When/if they implement a new - # versioning mechanism, adjust this. - if test -x /usr/bin/objformat; then - objformat=`/usr/bin/objformat` - else - case $host_os in - freebsd[[123]]*) objformat=aout ;; - *) objformat=elf ;; - esac - fi - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac - shlibpath_var=LD_LIBRARY_PATH - case $host_os in - freebsd2*) - shlibpath_overrides_runpath=yes - ;; - freebsd3.[[01]]* | freebsdelf3.[[01]]*) - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \ - freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1) - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - *) # from 4.6 on, and DragonFly - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - esac - ;; - -gnu*) - version_type=linux - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - hardcode_into_libs=yes - ;; - -hpux9* | hpux10* | hpux11*) - # Give a soname corresponding to the major version so that dld.sl refuses to - # link against other versions. - version_type=sunos - need_lib_prefix=no - need_version=no - case $host_cpu in - ia64*) - shrext_cmds='.so' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" - fi - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - *) - shrext_cmds='.sl' - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555. - postinstall_cmds='chmod 555 $lib' - ;; - -interix[[3-9]]*) - version_type=linux - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -irix5* | irix6* | nonstopux*) - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) - if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux - else - version_type=irix - fi ;; - esac - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= - ;; - *) - case $LD in # libtool.m4 will add one of these switches to LD - *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") - libsuff= shlibsuff= libmagic=32-bit;; - *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") - libsuff=32 shlibsuff=N32 libmagic=N32;; - *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") - libsuff=64 shlibsuff=64 libmagic=64-bit;; - *) libsuff= shlibsuff= libmagic=never-match;; - esac - ;; - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no - sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" - sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -# No shared lib support for Linux oldld, aout, or coff. -linux*oldld* | linux*aout* | linux*coff*) - dynamic_linker=no - ;; - -# This must be Linux ELF. -linux* | k*bsd*-gnu) - version_type=linux - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - # This implies no fast_install, which is unacceptable. - # Some rework will be needed to allow for fast_install - # before this can be enabled. - hardcode_into_libs=yes - - # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '` - sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on - # powerpc, because MkLinux only supported shared libraries with the - # GNU dynamic linker. Since this was broken with cross compilers, - # most powerpc-linux boxes support dynamic linking these days and - # people can always --disable-shared, the test was removed, and we - # assume the GNU/Linux dynamic linker is in use. - dynamic_linker='GNU/Linux ld.so' - ;; - -netbsd*) - version_type=sunos - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - -newsos6) - version_type=linux - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; - -nto-qnx*) - version_type=linux - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; - -openbsd*) - version_type=sunos - sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no - # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. - case $host_os in - openbsd3.3 | openbsd3.3.*) need_version=yes ;; - *) need_version=no ;; - esac - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH - if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - case $host_os in - openbsd2.[[89]] | openbsd2.[[89]].*) - shlibpath_overrides_runpath=no - ;; - *) - shlibpath_overrides_runpath=yes - ;; - esac - else - shlibpath_overrides_runpath=yes - fi - ;; - -os2*) - libname_spec='$name' - shrext_cmds=".dll" - need_lib_prefix=no - library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' - shlibpath_var=LIBPATH - ;; - -osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" - sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - -rdos*) - dynamic_linker=no - ;; - -solaris*) - version_type=linux - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - # ldd complains unless libraries are executable - postinstall_cmds='chmod +x $lib' - ;; - -sunos4*) - version_type=sunos - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes - ;; - -sysv4 | sysv4.3*) - version_type=linux - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) - shlibpath_overrides_runpath=no - need_lib_prefix=no - export_dynamic_flag_spec='${wl}-Blargedynsym' - runpath_var=LD_RUN_PATH - ;; - siemens) - need_lib_prefix=no - ;; - motorola) - need_lib_prefix=no - need_version=no - shlibpath_overrides_runpath=no - sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' - ;; - esac - ;; - -sysv4*MP*) - if test -d /usr/nec ;then - version_type=linux - library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' - soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - -sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) - version_type=freebsd-elf - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - hardcode_into_libs=yes - if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - shlibpath_overrides_runpath=no - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' - shlibpath_overrides_runpath=yes - case $host_os in - sco3.2v5*) - sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" - ;; - esac - fi - sys_lib_dlsearch_path_spec='/usr/lib' - ;; - -uts4*) - version_type=linux - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -*) - dynamic_linker=no - ;; -esac -AC_MSG_RESULT([$dynamic_linker]) -test "$dynamic_linker" = no && can_build_shared=no - -AC_CACHE_VAL([lt_cv_sys_lib_search_path_spec], -[lt_cv_sys_lib_search_path_spec="$sys_lib_search_path_spec"]) -sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" -AC_CACHE_VAL([lt_cv_sys_lib_dlsearch_path_spec], -[lt_cv_sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec"]) -sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" - -variables_saved_for_relink="PATH $shlibpath_var $runpath_var" -if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" -fi -])# AC_LIBTOOL_SYS_DYNAMIC_LINKER - - -# _LT_AC_TAGCONFIG -# ---------------- -AC_DEFUN([_LT_AC_TAGCONFIG], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -AC_ARG_WITH([tags], - [AC_HELP_STRING([--with-tags@<:@=TAGS@:>@], - [include additional configurations @<:@automatic@:>@])], - [tagnames="$withval"]) - -if test -f "$ltmain" && test -n "$tagnames"; then - if test ! -f "${ofile}"; then - AC_MSG_WARN([output file `$ofile' does not exist]) - fi - - if test -z "$LTCC"; then - eval "`$SHELL ${ofile} --config | grep '^LTCC='`" - if test -z "$LTCC"; then - AC_MSG_WARN([output file `$ofile' does not look like a libtool script]) - else - AC_MSG_WARN([using `LTCC=$LTCC', extracted from `$ofile']) - fi - fi - if test -z "$LTCFLAGS"; then - eval "`$SHELL ${ofile} --config | grep '^LTCFLAGS='`" - fi - - # Extract list of available tagged configurations in $ofile. - # Note that this assumes the entire list is on one line. - available_tags=`grep "^available_tags=" "${ofile}" | $SED -e 's/available_tags=\(.*$\)/\1/' -e 's/\"//g'` - - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for tagname in $tagnames; do - IFS="$lt_save_ifs" - # Check whether tagname contains only valid characters - case `$echo "X$tagname" | $Xsed -e 's:[[-_ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890,/]]::g'` in - "") ;; - *) AC_MSG_ERROR([invalid tag name: $tagname]) - ;; - esac - - if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$" < "${ofile}" > /dev/null - then - AC_MSG_ERROR([tag name \"$tagname\" already exists]) - fi - - # Update the list of available tags. - if test -n "$tagname"; then - echo appending configuration tag \"$tagname\" to $ofile - - case $tagname in - CXX) - if test -n "$CXX" && ( test "X$CXX" != "Xno" && - ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || - (test "X$CXX" != "Xg++"))) ; then - AC_LIBTOOL_LANG_CXX_CONFIG - else - tagname="" - fi - ;; - - F77) - if test -n "$F77" && test "X$F77" != "Xno"; then - AC_LIBTOOL_LANG_F77_CONFIG - else - tagname="" - fi - ;; - - GCJ) - if test -n "$GCJ" && test "X$GCJ" != "Xno"; then - AC_LIBTOOL_LANG_GCJ_CONFIG - else - tagname="" - fi - ;; - - RC) - AC_LIBTOOL_LANG_RC_CONFIG - ;; - - *) - AC_MSG_ERROR([Unsupported tag name: $tagname]) - ;; - esac - - # Append the new tag name to the list of available tags. - if test -n "$tagname" ; then - available_tags="$available_tags $tagname" - fi - fi - done - IFS="$lt_save_ifs" - - # Now substitute the updated list of available tags. - if eval "sed -e 's/^available_tags=.*\$/available_tags=\"$available_tags\"/' \"$ofile\" > \"${ofile}T\""; then - mv "${ofile}T" "$ofile" - chmod +x "$ofile" - else - rm -f "${ofile}T" - AC_MSG_ERROR([unable to update list of available tagged configurations.]) - fi -fi -])# _LT_AC_TAGCONFIG - - -# AC_LIBTOOL_DLOPEN -# ----------------- -# enable checks for dlopen support -AC_DEFUN([AC_LIBTOOL_DLOPEN], - [AC_BEFORE([$0],[AC_LIBTOOL_SETUP]) -])# AC_LIBTOOL_DLOPEN - - -# AC_LIBTOOL_WIN32_DLL -# -------------------- -# declare package support for building win32 DLLs -AC_DEFUN([AC_LIBTOOL_WIN32_DLL], -[AC_BEFORE([$0], [AC_LIBTOOL_SETUP]) -])# AC_LIBTOOL_WIN32_DLL - - -# AC_ENABLE_SHARED([DEFAULT]) -# --------------------------- -# implement the --enable-shared flag -# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. -AC_DEFUN([AC_ENABLE_SHARED], -[define([AC_ENABLE_SHARED_DEFAULT], ifelse($1, no, no, yes))dnl -AC_ARG_ENABLE([shared], - [AC_HELP_STRING([--enable-shared@<:@=PKGS@:>@], - [build shared libraries @<:@default=]AC_ENABLE_SHARED_DEFAULT[@:>@])], - [p=${PACKAGE-default} - case $enableval in - yes) enable_shared=yes ;; - no) enable_shared=no ;; - *) - enable_shared=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_shared=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac], - [enable_shared=]AC_ENABLE_SHARED_DEFAULT) -])# AC_ENABLE_SHARED - - -# AC_DISABLE_SHARED -# ----------------- -# set the default shared flag to --disable-shared -AC_DEFUN([AC_DISABLE_SHARED], -[AC_BEFORE([$0],[AC_LIBTOOL_SETUP])dnl -AC_ENABLE_SHARED(no) -])# AC_DISABLE_SHARED - - -# AC_ENABLE_STATIC([DEFAULT]) -# --------------------------- -# implement the --enable-static flag -# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. -AC_DEFUN([AC_ENABLE_STATIC], -[define([AC_ENABLE_STATIC_DEFAULT], ifelse($1, no, no, yes))dnl -AC_ARG_ENABLE([static], - [AC_HELP_STRING([--enable-static@<:@=PKGS@:>@], - [build static libraries @<:@default=]AC_ENABLE_STATIC_DEFAULT[@:>@])], - [p=${PACKAGE-default} - case $enableval in - yes) enable_static=yes ;; - no) enable_static=no ;; - *) - enable_static=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_static=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac], - [enable_static=]AC_ENABLE_STATIC_DEFAULT) -])# AC_ENABLE_STATIC - - -# AC_DISABLE_STATIC -# ----------------- -# set the default static flag to --disable-static -AC_DEFUN([AC_DISABLE_STATIC], -[AC_BEFORE([$0],[AC_LIBTOOL_SETUP])dnl -AC_ENABLE_STATIC(no) -])# AC_DISABLE_STATIC - - -# AC_ENABLE_FAST_INSTALL([DEFAULT]) -# --------------------------------- -# implement the --enable-fast-install flag -# DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. -AC_DEFUN([AC_ENABLE_FAST_INSTALL], -[define([AC_ENABLE_FAST_INSTALL_DEFAULT], ifelse($1, no, no, yes))dnl -AC_ARG_ENABLE([fast-install], - [AC_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@], - [optimize for fast installation @<:@default=]AC_ENABLE_FAST_INSTALL_DEFAULT[@:>@])], - [p=${PACKAGE-default} - case $enableval in - yes) enable_fast_install=yes ;; - no) enable_fast_install=no ;; - *) - enable_fast_install=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_fast_install=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac], - [enable_fast_install=]AC_ENABLE_FAST_INSTALL_DEFAULT) -])# AC_ENABLE_FAST_INSTALL - - -# AC_DISABLE_FAST_INSTALL -# ----------------------- -# set the default to --disable-fast-install -AC_DEFUN([AC_DISABLE_FAST_INSTALL], -[AC_BEFORE([$0],[AC_LIBTOOL_SETUP])dnl -AC_ENABLE_FAST_INSTALL(no) -])# AC_DISABLE_FAST_INSTALL - - -# AC_LIBTOOL_PICMODE([MODE]) -# -------------------------- -# implement the --with-pic flag -# MODE is either `yes' or `no'. If omitted, it defaults to `both'. -AC_DEFUN([AC_LIBTOOL_PICMODE], -[AC_BEFORE([$0],[AC_LIBTOOL_SETUP])dnl -pic_mode=ifelse($#,1,$1,default) -])# AC_LIBTOOL_PICMODE - - -# AC_PROG_EGREP -# ------------- -# This is predefined starting with Autoconf 2.54, so this conditional -# definition can be removed once we require Autoconf 2.54 or later. -m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP], -[AC_CACHE_CHECK([for egrep], [ac_cv_prog_egrep], - [if echo a | (grep -E '(a|b)') >/dev/null 2>&1 - then ac_cv_prog_egrep='grep -E' - else ac_cv_prog_egrep='egrep' - fi]) - EGREP=$ac_cv_prog_egrep - AC_SUBST([EGREP]) -])]) - - -# AC_PATH_TOOL_PREFIX -# ------------------- -# find a file program which can recognize shared library -AC_DEFUN([AC_PATH_TOOL_PREFIX], -[AC_REQUIRE([AC_PROG_EGREP])dnl -AC_MSG_CHECKING([for $1]) -AC_CACHE_VAL(lt_cv_path_MAGIC_CMD, -[case $MAGIC_CMD in -[[\\/*] | ?:[\\/]*]) - lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; -*) - lt_save_MAGIC_CMD="$MAGIC_CMD" - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR -dnl $ac_dummy forces splitting on constant user-supplied paths. -dnl POSIX.2 word splitting is done only on the output of word expansions, -dnl not every word. This closes a longstanding sh security hole. - ac_dummy="ifelse([$2], , $PATH, [$2])" - for ac_dir in $ac_dummy; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f $ac_dir/$1; then - lt_cv_path_MAGIC_CMD="$ac_dir/$1" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` - MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : - else - cat <<EOF 1>&2 - -*** Warning: the command libtool uses to detect shared libraries, -*** $file_magic_cmd, produces output that libtool cannot recognize. -*** The result is that libtool may fail to recognize shared libraries -*** as such. This will affect the creation of libtool libraries that -*** depend on shared libraries, but programs linked with such libtool -*** libraries will work regardless of this problem. Nevertheless, you -*** may want to report the problem to your system manager and/or to -*** bug-libtool@gnu.org - -EOF - fi ;; - esac - fi - break - fi - done - IFS="$lt_save_ifs" - MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; -esac]) -MAGIC_CMD="$lt_cv_path_MAGIC_CMD" -if test -n "$MAGIC_CMD"; then - AC_MSG_RESULT($MAGIC_CMD) -else - AC_MSG_RESULT(no) -fi -])# AC_PATH_TOOL_PREFIX - - -# AC_PATH_MAGIC -# ------------- -# find a file program which can recognize a shared library -AC_DEFUN([AC_PATH_MAGIC], -[AC_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH) -if test -z "$lt_cv_path_MAGIC_CMD"; then - if test -n "$ac_tool_prefix"; then - AC_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH) - else - MAGIC_CMD=: - fi -fi -])# AC_PATH_MAGIC - - -# AC_PROG_LD -# ---------- -# find the pathname to the GNU or non-GNU linker -AC_DEFUN([AC_PROG_LD], -[AC_ARG_WITH([gnu-ld], - [AC_HELP_STRING([--with-gnu-ld], - [assume the C compiler uses GNU ld @<:@default=no@:>@])], - [test "$withval" = no || with_gnu_ld=yes], - [with_gnu_ld=no]) -AC_REQUIRE([LT_AC_PROG_SED])dnl -AC_REQUIRE([AC_PROG_CC])dnl -AC_REQUIRE([AC_CANONICAL_HOST])dnl -AC_REQUIRE([AC_CANONICAL_BUILD])dnl -ac_prog=ld -if test "$GCC" = yes; then - # Check if gcc -print-prog-name=ld gives a path. - AC_MSG_CHECKING([for ld used by $CC]) - case $host in - *-*-mingw*) - # gcc leaves a trailing carriage return which upsets mingw - ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; - *) - ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; - esac - case $ac_prog in - # Accept absolute paths. - [[\\/]]* | ?:[[\\/]]*) - re_direlt='/[[^/]][[^/]]*/\.\./' - # Canonicalize the pathname of ld - ac_prog=`echo $ac_prog| $SED 's%\\\\%/%g'` - while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do - ac_prog=`echo $ac_prog| $SED "s%$re_direlt%/%"` - done - test -z "$LD" && LD="$ac_prog" - ;; - "") - # If it fails, then pretend we aren't using GCC. - ac_prog=ld - ;; - *) - # If it is relative, then search for the first ld in PATH. - with_gnu_ld=unknown - ;; - esac -elif test "$with_gnu_ld" = yes; then - AC_MSG_CHECKING([for GNU ld]) -else - AC_MSG_CHECKING([for non-GNU ld]) -fi -AC_CACHE_VAL(lt_cv_path_LD, -[if test -z "$LD"; then - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then - lt_cv_path_LD="$ac_dir/$ac_prog" - # Check to see if the program is GNU ld. I'd rather use --version, - # but apparently some variants of GNU ld only accept -v. - # Break only if it was the GNU/non-GNU ld that we prefer. - case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in - *GNU* | *'with BFD'*) - test "$with_gnu_ld" != no && break - ;; - *) - test "$with_gnu_ld" != yes && break - ;; - esac - fi - done - IFS="$lt_save_ifs" -else - lt_cv_path_LD="$LD" # Let the user override the test with a path. -fi]) -LD="$lt_cv_path_LD" -if test -n "$LD"; then - AC_MSG_RESULT($LD) -else - AC_MSG_RESULT(no) -fi -test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH]) -AC_PROG_LD_GNU -])# AC_PROG_LD - - -# AC_PROG_LD_GNU -# -------------- -AC_DEFUN([AC_PROG_LD_GNU], -[AC_REQUIRE([AC_PROG_EGREP])dnl -AC_CACHE_CHECK([if the linker ($LD) is GNU ld], lt_cv_prog_gnu_ld, -[# I'd rather use --version here, but apparently some GNU lds only accept -v. -case `$LD -v 2>&1 </dev/null` in -*GNU* | *'with BFD'*) - lt_cv_prog_gnu_ld=yes - ;; -*) - lt_cv_prog_gnu_ld=no - ;; -esac]) -with_gnu_ld=$lt_cv_prog_gnu_ld -])# AC_PROG_LD_GNU - - -# AC_PROG_LD_RELOAD_FLAG -# ---------------------- -# find reload flag for linker -# -- PORTME Some linkers may need a different reload flag. -AC_DEFUN([AC_PROG_LD_RELOAD_FLAG], -[AC_CACHE_CHECK([for $LD option to reload object files], - lt_cv_ld_reload_flag, - [lt_cv_ld_reload_flag='-r']) -reload_flag=$lt_cv_ld_reload_flag -case $reload_flag in -"" | " "*) ;; -*) reload_flag=" $reload_flag" ;; -esac -reload_cmds='$LD$reload_flag -o $output$reload_objs' -case $host_os in - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' - else - reload_cmds='$LD$reload_flag -o $output$reload_objs' - fi - ;; -esac -])# AC_PROG_LD_RELOAD_FLAG - - -# AC_DEPLIBS_CHECK_METHOD -# ----------------------- -# how to check for library dependencies -# -- PORTME fill in with the dynamic library characteristics -AC_DEFUN([AC_DEPLIBS_CHECK_METHOD], -[AC_CACHE_CHECK([how to recognize dependent libraries], -lt_cv_deplibs_check_method, -[lt_cv_file_magic_cmd='$MAGIC_CMD' -lt_cv_file_magic_test_file= -lt_cv_deplibs_check_method='unknown' -# Need to set the preceding variable on all platforms that support -# interlibrary dependencies. -# 'none' -- dependencies not supported. -# `unknown' -- same as none, but documents that we really don't know. -# 'pass_all' -- all dependencies passed with no checks. -# 'test_compile' -- check by making test program. -# 'file_magic [[regex]]' -- check by looking for files in library path -# which responds to the $file_magic_cmd with a given extended regex. -# If you have `file' or equivalent on your system and you're not sure -# whether `pass_all' will *always* work, you probably want this one. - -case $host_os in -aix[[4-9]]*) - lt_cv_deplibs_check_method=pass_all - ;; - -beos*) - lt_cv_deplibs_check_method=pass_all - ;; - -bsdi[[45]]*) - lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)' - lt_cv_file_magic_cmd='/usr/bin/file -L' - lt_cv_file_magic_test_file=/shlib/libc.so - ;; - -cygwin*) - # func_win32_libid is a shell function defined in ltmain.sh - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - ;; - -mingw* | pw32*) - # Base MSYS/MinGW do not provide the 'file' command needed by - # func_win32_libid shell function, so use a weaker test based on 'objdump', - # unless we find 'file', for example because we are cross-compiling. - if ( file / ) >/dev/null 2>&1; then - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else - lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; - -darwin* | rhapsody*) - lt_cv_deplibs_check_method=pass_all - ;; - -freebsd* | dragonfly*) - if echo __ELF__ | $CC -E - | grep __ELF__ > /dev/null; then - case $host_cpu in - i*86 ) - # Not sure whether the presence of OpenBSD here was a mistake. - # Let's accept both of them until this is cleared up. - lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library' - lt_cv_file_magic_cmd=/usr/bin/file - lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` - ;; - esac - else - lt_cv_deplibs_check_method=pass_all - fi - ;; - -gnu*) - lt_cv_deplibs_check_method=pass_all - ;; - -hpux10.20* | hpux11*) - lt_cv_file_magic_cmd=/usr/bin/file - case $host_cpu in - ia64*) - lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64' - lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so - ;; - hppa*64*) - [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - PA-RISC [0-9].[0-9]'] - lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl - ;; - *) - lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]].[[0-9]]) shared library' - lt_cv_file_magic_test_file=/usr/lib/libc.sl - ;; - esac - ;; - -interix[[3-9]]*) - # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$' - ;; - -irix5* | irix6* | nonstopux*) - case $LD in - *-32|*"-32 ") libmagic=32-bit;; - *-n32|*"-n32 ") libmagic=N32;; - *-64|*"-64 ") libmagic=64-bit;; - *) libmagic=never-match;; - esac - lt_cv_deplibs_check_method=pass_all - ;; - -# This must be Linux ELF. -linux* | k*bsd*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - -netbsd*) - if echo __ELF__ | $CC -E - | grep __ELF__ > /dev/null; then - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$' - fi - ;; - -newos6*) - lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)' - lt_cv_file_magic_cmd=/usr/bin/file - lt_cv_file_magic_test_file=/usr/lib/libnls.so - ;; - -nto-qnx*) - lt_cv_deplibs_check_method=unknown - ;; - -openbsd*) - if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' - fi - ;; - -osf3* | osf4* | osf5*) - lt_cv_deplibs_check_method=pass_all - ;; - -rdos*) - lt_cv_deplibs_check_method=pass_all - ;; - -solaris*) - lt_cv_deplibs_check_method=pass_all - ;; - -sysv4 | sysv4.3*) - case $host_vendor in - motorola) - lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]' - lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` - ;; - ncr) - lt_cv_deplibs_check_method=pass_all - ;; - sequent) - lt_cv_file_magic_cmd='/bin/file' - lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )' - ;; - sni) - lt_cv_file_magic_cmd='/bin/file' - lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib" - lt_cv_file_magic_test_file=/lib/libc.so - ;; - siemens) - lt_cv_deplibs_check_method=pass_all - ;; - pc) - lt_cv_deplibs_check_method=pass_all - ;; - esac - ;; - -sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) - lt_cv_deplibs_check_method=pass_all - ;; -esac -]) -file_magic_cmd=$lt_cv_file_magic_cmd -deplibs_check_method=$lt_cv_deplibs_check_method -test -z "$deplibs_check_method" && deplibs_check_method=unknown -])# AC_DEPLIBS_CHECK_METHOD - - -# AC_PROG_NM -# ---------- -# find the pathname to a BSD-compatible name lister -AC_DEFUN([AC_PROG_NM], -[AC_CACHE_CHECK([for BSD-compatible nm], lt_cv_path_NM, -[if test -n "$NM"; then - # Let the user override the test. - lt_cv_path_NM="$NM" -else - lt_nm_to_check="${ac_tool_prefix}nm" - if test -n "$ac_tool_prefix" && test "$build" = "$host"; then - lt_nm_to_check="$lt_nm_to_check nm" - fi - for lt_tmp_nm in $lt_nm_to_check; do - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - tmp_nm="$ac_dir/$lt_tmp_nm" - if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then - # Check to see if the nm accepts a BSD-compat flag. - # Adding the `sed 1q' prevents false positives on HP-UX, which says: - # nm: unknown option "B" ignored - # Tru64's nm complains that /dev/null is an invalid object file - case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in - */dev/null* | *'Invalid file or object type'*) - lt_cv_path_NM="$tmp_nm -B" - break - ;; - *) - case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in - */dev/null*) - lt_cv_path_NM="$tmp_nm -p" - break - ;; - *) - lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but - continue # so that we can try to find one that supports BSD flags - ;; - esac - ;; - esac - fi - done - IFS="$lt_save_ifs" - done - test -z "$lt_cv_path_NM" && lt_cv_path_NM=nm -fi]) -NM="$lt_cv_path_NM" -])# AC_PROG_NM - - -# AC_CHECK_LIBM -# ------------- -# check for math library -AC_DEFUN([AC_CHECK_LIBM], -[AC_REQUIRE([AC_CANONICAL_HOST])dnl -LIBM= -case $host in -*-*-beos* | *-*-cygwin* | *-*-pw32* | *-*-darwin*) - # These system don't have libm, or don't need it - ;; -*-ncr-sysv4.3*) - AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw") - AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm") - ;; -*) - AC_CHECK_LIB(m, cos, LIBM="-lm") - ;; -esac -])# AC_CHECK_LIBM - - -# AC_LIBLTDL_CONVENIENCE([DIRECTORY]) -# ----------------------------------- -# sets LIBLTDL to the link flags for the libltdl convenience library and -# LTDLINCL to the include flags for the libltdl header and adds -# --enable-ltdl-convenience to the configure arguments. Note that -# AC_CONFIG_SUBDIRS is not called here. If DIRECTORY is not provided, -# it is assumed to be `libltdl'. LIBLTDL will be prefixed with -# '${top_builddir}/' and LTDLINCL will be prefixed with '${top_srcdir}/' -# (note the single quotes!). If your package is not flat and you're not -# using automake, define top_builddir and top_srcdir appropriately in -# the Makefiles. -AC_DEFUN([AC_LIBLTDL_CONVENIENCE], -[AC_BEFORE([$0],[AC_LIBTOOL_SETUP])dnl - case $enable_ltdl_convenience in - no) AC_MSG_ERROR([this package needs a convenience libltdl]) ;; - "") enable_ltdl_convenience=yes - ac_configure_args="$ac_configure_args --enable-ltdl-convenience" ;; - esac - LIBLTDL='${top_builddir}/'ifelse($#,1,[$1],['libltdl'])/libltdlc.la - LTDLINCL='-I${top_srcdir}/'ifelse($#,1,[$1],['libltdl']) - # For backwards non-gettext consistent compatibility... - INCLTDL="$LTDLINCL" -])# AC_LIBLTDL_CONVENIENCE - - -# AC_LIBLTDL_INSTALLABLE([DIRECTORY]) -# ----------------------------------- -# sets LIBLTDL to the link flags for the libltdl installable library and -# LTDLINCL to the include flags for the libltdl header and adds -# --enable-ltdl-install to the configure arguments. Note that -# AC_CONFIG_SUBDIRS is not called here. If DIRECTORY is not provided, -# and an installed libltdl is not found, it is assumed to be `libltdl'. -# LIBLTDL will be prefixed with '${top_builddir}/'# and LTDLINCL with -# '${top_srcdir}/' (note the single quotes!). If your package is not -# flat and you're not using automake, define top_builddir and top_srcdir -# appropriately in the Makefiles. -# In the future, this macro may have to be called after AC_PROG_LIBTOOL. -AC_DEFUN([AC_LIBLTDL_INSTALLABLE], -[AC_BEFORE([$0],[AC_LIBTOOL_SETUP])dnl - AC_CHECK_LIB(ltdl, lt_dlinit, - [test x"$enable_ltdl_install" != xyes && enable_ltdl_install=no], - [if test x"$enable_ltdl_install" = xno; then - AC_MSG_WARN([libltdl not installed, but installation disabled]) - else - enable_ltdl_install=yes - fi - ]) - if test x"$enable_ltdl_install" = x"yes"; then - ac_configure_args="$ac_configure_args --enable-ltdl-install" - LIBLTDL='${top_builddir}/'ifelse($#,1,[$1],['libltdl'])/libltdl.la - LTDLINCL='-I${top_srcdir}/'ifelse($#,1,[$1],['libltdl']) - else - ac_configure_args="$ac_configure_args --enable-ltdl-install=no" - LIBLTDL="-lltdl" - LTDLINCL= - fi - # For backwards non-gettext consistent compatibility... - INCLTDL="$LTDLINCL" -])# AC_LIBLTDL_INSTALLABLE - - -# AC_LIBTOOL_CXX -# -------------- -# enable support for C++ libraries -AC_DEFUN([AC_LIBTOOL_CXX], -[AC_REQUIRE([_LT_AC_LANG_CXX]) -])# AC_LIBTOOL_CXX - - -# _LT_AC_LANG_CXX -# --------------- -AC_DEFUN([_LT_AC_LANG_CXX], -[AC_REQUIRE([AC_PROG_CXX]) -AC_REQUIRE([_LT_AC_PROG_CXXCPP]) -_LT_AC_SHELL_INIT([tagnames=${tagnames+${tagnames},}CXX]) -])# _LT_AC_LANG_CXX - -# _LT_AC_PROG_CXXCPP -# ------------------ -AC_DEFUN([_LT_AC_PROG_CXXCPP], -[ -AC_REQUIRE([AC_PROG_CXX]) -if test -n "$CXX" && ( test "X$CXX" != "Xno" && - ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || - (test "X$CXX" != "Xg++"))) ; then - AC_PROG_CXXCPP -fi -])# _LT_AC_PROG_CXXCPP - -# AC_LIBTOOL_F77 -# -------------- -# enable support for Fortran 77 libraries -AC_DEFUN([AC_LIBTOOL_F77], -[AC_REQUIRE([_LT_AC_LANG_F77]) -])# AC_LIBTOOL_F77 - - -# _LT_AC_LANG_F77 -# --------------- -AC_DEFUN([_LT_AC_LANG_F77], -[AC_REQUIRE([AC_PROG_F77]) -_LT_AC_SHELL_INIT([tagnames=${tagnames+${tagnames},}F77]) -])# _LT_AC_LANG_F77 - - -# AC_LIBTOOL_GCJ -# -------------- -# enable support for GCJ libraries -AC_DEFUN([AC_LIBTOOL_GCJ], -[AC_REQUIRE([_LT_AC_LANG_GCJ]) -])# AC_LIBTOOL_GCJ - - -# _LT_AC_LANG_GCJ -# --------------- -AC_DEFUN([_LT_AC_LANG_GCJ], -[AC_PROVIDE_IFELSE([AC_PROG_GCJ],[], - [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],[], - [AC_PROVIDE_IFELSE([LT_AC_PROG_GCJ],[], - [ifdef([AC_PROG_GCJ],[AC_REQUIRE([AC_PROG_GCJ])], - [ifdef([A][M_PROG_GCJ],[AC_REQUIRE([A][M_PROG_GCJ])], - [AC_REQUIRE([A][C_PROG_GCJ_OR_A][M_PROG_GCJ])])])])])]) -_LT_AC_SHELL_INIT([tagnames=${tagnames+${tagnames},}GCJ]) -])# _LT_AC_LANG_GCJ - - -# AC_LIBTOOL_RC -# ------------- -# enable support for Windows resource files -AC_DEFUN([AC_LIBTOOL_RC], -[AC_REQUIRE([LT_AC_PROG_RC]) -_LT_AC_SHELL_INIT([tagnames=${tagnames+${tagnames},}RC]) -])# AC_LIBTOOL_RC - - -# AC_LIBTOOL_LANG_C_CONFIG -# ------------------------ -# Ensure that the configuration vars for the C compiler are -# suitably defined. Those variables are subsequently used by -# AC_LIBTOOL_CONFIG to write the compiler configuration to `libtool'. -AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG], [_LT_AC_LANG_C_CONFIG]) -AC_DEFUN([_LT_AC_LANG_C_CONFIG], -[lt_save_CC="$CC" -AC_LANG_PUSH(C) - -# Source file extension for C test sources. -ac_ext=c - -# Object file extension for compiled C test sources. -objext=o -_LT_AC_TAGVAR(objext, $1)=$objext - -# Code to be used in simple compile tests -lt_simple_compile_test_code="int some_variable = 0;" - -# Code to be used in simple link tests -lt_simple_link_test_code='int main(){return(0);}' - -_LT_AC_SYS_COMPILER - -# save warnings/boilerplate of simple test code -_LT_COMPILER_BOILERPLATE -_LT_LINKER_BOILERPLATE - -AC_LIBTOOL_PROG_COMPILER_NO_RTTI($1) -AC_LIBTOOL_PROG_COMPILER_PIC($1) -AC_LIBTOOL_PROG_CC_C_O($1) -AC_LIBTOOL_SYS_HARD_LINK_LOCKS($1) -AC_LIBTOOL_PROG_LD_SHLIBS($1) -AC_LIBTOOL_SYS_DYNAMIC_LINKER($1) -AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH($1) -AC_LIBTOOL_SYS_LIB_STRIP -AC_LIBTOOL_DLOPEN_SELF - -# Report which library types will actually be built -AC_MSG_CHECKING([if libtool supports shared libraries]) -AC_MSG_RESULT([$can_build_shared]) - -AC_MSG_CHECKING([whether to build shared libraries]) -test "$can_build_shared" = "no" && enable_shared=no - -# On AIX, shared libraries and static libraries use the same namespace, and -# are all built from PIC. -case $host_os in -aix3*) - test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - -aix[[4-9]]*) - if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then - test "$enable_shared" = yes && enable_static=no - fi - ;; -esac -AC_MSG_RESULT([$enable_shared]) - -AC_MSG_CHECKING([whether to build static libraries]) -# Make sure either enable_shared or enable_static is yes. -test "$enable_shared" = yes || enable_static=yes -AC_MSG_RESULT([$enable_static]) - -AC_LIBTOOL_CONFIG($1) - -AC_LANG_POP -CC="$lt_save_CC" -])# AC_LIBTOOL_LANG_C_CONFIG - - -# AC_LIBTOOL_LANG_CXX_CONFIG -# -------------------------- -# Ensure that the configuration vars for the C compiler are -# suitably defined. Those variables are subsequently used by -# AC_LIBTOOL_CONFIG to write the compiler configuration to `libtool'. -AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG], [_LT_AC_LANG_CXX_CONFIG(CXX)]) -AC_DEFUN([_LT_AC_LANG_CXX_CONFIG], -[AC_LANG_PUSH(C++) -AC_REQUIRE([AC_PROG_CXX]) -AC_REQUIRE([_LT_AC_PROG_CXXCPP]) - -_LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no -_LT_AC_TAGVAR(allow_undefined_flag, $1)= -_LT_AC_TAGVAR(always_export_symbols, $1)=no -_LT_AC_TAGVAR(archive_expsym_cmds, $1)= -_LT_AC_TAGVAR(export_dynamic_flag_spec, $1)= -_LT_AC_TAGVAR(hardcode_direct, $1)=no -_LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)= -_LT_AC_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= -_LT_AC_TAGVAR(hardcode_libdir_separator, $1)= -_LT_AC_TAGVAR(hardcode_minus_L, $1)=no -_LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=unsupported -_LT_AC_TAGVAR(hardcode_automatic, $1)=no -_LT_AC_TAGVAR(module_cmds, $1)= -_LT_AC_TAGVAR(module_expsym_cmds, $1)= -_LT_AC_TAGVAR(link_all_deplibs, $1)=unknown -_LT_AC_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds -_LT_AC_TAGVAR(no_undefined_flag, $1)= -_LT_AC_TAGVAR(whole_archive_flag_spec, $1)= -_LT_AC_TAGVAR(enable_shared_with_static_runtimes, $1)=no - -# Dependencies to place before and after the object being linked: -_LT_AC_TAGVAR(predep_objects, $1)= -_LT_AC_TAGVAR(postdep_objects, $1)= -_LT_AC_TAGVAR(predeps, $1)= -_LT_AC_TAGVAR(postdeps, $1)= -_LT_AC_TAGVAR(compiler_lib_search_path, $1)= -_LT_AC_TAGVAR(compiler_lib_search_dirs, $1)= - -# Source file extension for C++ test sources. -ac_ext=cpp - -# Object file extension for compiled C++ test sources. -objext=o -_LT_AC_TAGVAR(objext, $1)=$objext - -# Code to be used in simple compile tests -lt_simple_compile_test_code="int some_variable = 0;" - -# Code to be used in simple link tests -lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }' - -# ltmain only uses $CC for tagged configurations so make sure $CC is set. -_LT_AC_SYS_COMPILER - -# save warnings/boilerplate of simple test code -_LT_COMPILER_BOILERPLATE -_LT_LINKER_BOILERPLATE - -# Allow CC to be a program name with arguments. -lt_save_CC=$CC -lt_save_LD=$LD -lt_save_GCC=$GCC -GCC=$GXX -lt_save_with_gnu_ld=$with_gnu_ld -lt_save_path_LD=$lt_cv_path_LD -if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then - lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx -else - $as_unset lt_cv_prog_gnu_ld -fi -if test -n "${lt_cv_path_LDCXX+set}"; then - lt_cv_path_LD=$lt_cv_path_LDCXX -else - $as_unset lt_cv_path_LD -fi -test -z "${LDCXX+set}" || LD=$LDCXX -CC=${CXX-"c++"} -compiler=$CC -_LT_AC_TAGVAR(compiler, $1)=$CC -_LT_CC_BASENAME([$compiler]) - -# We don't want -fno-exception wen compiling C++ code, so set the -# no_builtin_flag separately -if test "$GXX" = yes; then - _LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' -else - _LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= -fi - -if test "$GXX" = yes; then - # Set up default GNU C++ configuration - - AC_PROG_LD - - # Check if GNU C++ uses GNU ld as the underlying linker, since the - # archiving commands below assume that GNU ld is being used. - if test "$with_gnu_ld" = yes; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - - # If archive_cmds runs LD, not CC, wlarc should be empty - # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to - # investigate it a little bit more. (MM) - wlarc='${wl}' - - # ancient GNU ld didn't support --whole-archive et. al. - if eval "`$CC -print-prog-name=ld` --help 2>&1" | \ - grep 'no-whole-archive' > /dev/null; then - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)= - fi - else - with_gnu_ld=no - wlarc= - - # A generic and very simple default shared library creation - # command for GNU C++ for the case where it uses the native - # linker, instead of GNU ld. If possible, this setting should - # overridden to take advantage of the native linker features on - # the platform it is being used on. - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' - fi - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "\-L"' - -else - GXX=no - with_gnu_ld=no - wlarc= -fi - -# PORTME: fill in a description of your system's C++ link characteristics -AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) -_LT_AC_TAGVAR(ld_shlibs, $1)=yes -case $host_os in - aix3*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - aix[[4-9]]*) - if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' - no_entry_flag="" - else - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we - # need to do runtime linking. - case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) - for ld_flag in $LDFLAGS; do - case $ld_flag in - *-brtl*) - aix_use_runtimelinking=yes - break - ;; - esac - done - ;; - esac - - exp_sym_flag='-bexport' - no_entry_flag='-bnoentry' - fi - - # When large executables or shared objects are built, AIX ld can - # have problems creating the table of contents. If linking a library - # or program results in "error TOC overflow" add -mminimal-toc to - # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not - # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. - - _LT_AC_TAGVAR(archive_cmds, $1)='' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - - if test "$GXX" = yes; then - case $host_os in aix4.[[012]]|aix4.[[012]].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ - collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && \ - strings "$collect2name" | grep resolve_lib_name >/dev/null - then - # We have reworked collect2 - : - else - # We have old collect2 - _LT_AC_TAGVAR(hardcode_direct, $1)=unsupported - # It fails to find uninstalled libraries when the uninstalled - # path is not listed in the libpath. Setting hardcode_minus_L - # to unsupported forces relinking - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)= - fi - ;; - esac - shared_flag='-shared' - if test "$aix_use_runtimelinking" = yes; then - shared_flag="$shared_flag "'${wl}-G' - fi - else - # not using gcc - if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else - if test "$aix_use_runtimelinking" = yes; then - shared_flag='${wl}-G' - else - shared_flag='${wl}-bM:SRE' - fi - fi - fi - - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - _LT_AC_TAGVAR(always_export_symbols, $1)=yes - if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - _LT_AC_TAGVAR(allow_undefined_flag, $1)='-berok' - # Determine the default libpath from the value encoded in an empty executable. - _LT_AC_SYS_LIBPATH_AIX - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="\$CC"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else - if test "$host_cpu" = ia64; then - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' - _LT_AC_TAGVAR(allow_undefined_flag, $1)="-z nodefs" - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an empty executable. - _LT_AC_SYS_LIBPATH_AIX - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. - _LT_AC_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' - # Exported symbols can be pulled into shared objects from archives - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='$convenience' - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=yes - # This is similar to how AIX traditionally builds its shared libraries. - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; - - beos*) - if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then - _LT_AC_TAGVAR(allow_undefined_flag, $1)=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - - chorus*) - case $cc_basename in - *) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - esac - ;; - - cygwin* | mingw* | pw32*) - # _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, - # as there is no search path for DLLs. - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_AC_TAGVAR(always_export_symbols, $1)=no - _LT_AC_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - - if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - # If the export-symbols file already is a .def file (1st line - # is EXPORTS), use it as is; otherwise, prepend... - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - cp $export_symbols $output_objdir/$soname.def; - else - echo EXPORTS > $output_objdir/$soname.def; - cat $export_symbols >> $output_objdir/$soname.def; - fi~ - $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - darwin* | rhapsody*) - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_AC_TAGVAR(hardcode_direct, $1)=no - _LT_AC_TAGVAR(hardcode_automatic, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=unsupported - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='' - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - _LT_AC_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined" - if test "$GXX" = yes ; then - output_verbose_link_cmd='echo' - _LT_AC_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" - _LT_AC_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" - _LT_AC_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - if test "$lt_cv_apple_cc_single_mod" != "yes"; then - _LT_AC_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}" - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}" - fi - else - case $cc_basename in - xlc*) - output_verbose_link_cmd='echo' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj ${wl}-single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-install_name ${wl}`echo $rpath/$soname` $xlcverstring' - _LT_AC_TAGVAR(module_cmds, $1)='$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags' - # Don't fix this by using the ld -exported_symbols_list flag, it doesn't exist in older darwin lds - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -qmkshrobj ${wl}-single_module $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-install_name ${wl}$rpath/$soname $xlcverstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' - _LT_AC_TAGVAR(module_expsym_cmds, $1)='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' - ;; - *) - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - esac - fi - ;; - - dgux*) - case $cc_basename in - ec++*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - ghcx*) - # Green Hills C++ Compiler - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - *) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - esac - ;; - freebsd[[12]]*) - # C++ shared libraries reported to be fairly broken before switch to ELF - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - freebsd-elf*) - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - ;; - freebsd* | dragonfly*) - # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF - # conventions - _LT_AC_TAGVAR(ld_shlibs, $1)=yes - ;; - gnu*) - ;; - hpux9*) - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, - # but as the default - # location of the library. - - case $cc_basename in - CC*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - aCC*) - _LT_AC_TAGVAR(archive_cmds, $1)='$rm $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - # - # There doesn't appear to be a way to prevent this compiler from - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. - output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | grep "[[-]]L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' - ;; - *) - if test "$GXX" = yes; then - _LT_AC_TAGVAR(archive_cmds, $1)='$rm $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - esac - ;; - hpux10*|hpux11*) - if test $with_gnu_ld = no; then - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - case $host_cpu in - hppa*64*|ia64*) ;; - *) - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - ;; - esac - fi - case $host_cpu in - hppa*64*|ia64*) - _LT_AC_TAGVAR(hardcode_direct, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - *) - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, - # but as the default - # location of the library. - ;; - esac - - case $cc_basename in - CC*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - aCC*) - case $host_cpu in - hppa*64*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - ia64*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - *) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - esac - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - # - # There doesn't appear to be a way to prevent this compiler from - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. - output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | grep "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' - ;; - *) - if test "$GXX" = yes; then - if test $with_gnu_ld = no; then - case $host_cpu in - hppa*64*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - ia64*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - *) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - esac - fi - else - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - esac - ;; - interix[[3-9]]*) - _LT_AC_TAGVAR(hardcode_direct, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - irix5* | irix6*) - case $cc_basename in - CC*) - # SGI C++ - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' - - # Archives containing C++ object files must be created using - # "CC -ar", where "CC" is the IRIX C++ compiler. This is - # necessary to make sure instantiated templates are included - # in the archive. - _LT_AC_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs' - ;; - *) - if test "$GXX" = yes; then - if test "$with_gnu_ld" = no; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` -o $lib' - fi - fi - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - ;; - esac - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - ;; - linux* | k*bsd*-gnu) - case $cc_basename in - KCC*) - # Kuck and Associates, Inc. (KAI) C++ Compiler - - # KCC will only create a shared library if the output file - # ends with ".so" (or ".sl" for HP-UX), so rename the library - # to its proper name (with version) after linking. - _LT_AC_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - # - # There doesn't appear to be a way to prevent this compiler from - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. - output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | grep "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath,$libdir' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - - # Archives containing C++ object files must be created using - # "CC -Bstatic", where "CC" is the KAI C++ compiler. - _LT_AC_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' - ;; - icpc*) - # Intel C++ - with_gnu_ld=yes - # version 8.0 and above of icpc choke on multiply defined symbols - # if we add $predep_objects and $postdep_objects, however 7.1 and - # earlier do not add the objects themselves. - case `$CC -V 2>&1` in - *"Version 7."*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - ;; - *) # Version 8.0 or newer - tmp_idyn= - case $host_cpu in - ia64*) tmp_idyn=' -i_dynamic';; - esac - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - ;; - esac - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - ;; - pgCC* | pgcpp*) - # Portland Group C++ compiler - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $echo \"$new_convenience\"` ${wl}--no-whole-archive' - ;; - cxx*) - # Compaq C++ - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' - - runpath_var=LD_RUN_PATH - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - # - # There doesn't appear to be a way to prevent this compiler from - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. - output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "ld"`; templist=`echo $templist | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' - ;; - *) - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) - # Sun C++ 5.9 - _LT_AC_TAGVAR(no_undefined_flag, $1)=' -zdefs' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $echo \"$new_convenience\"` ${wl}--no-whole-archive' - - # Not sure whether something based on - # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 - # would be better. - output_verbose_link_cmd='echo' - - # Archives containing C++ object files must be created using - # "CC -xar", where "CC" is the Sun C++ compiler. This is - # necessary to make sure instantiated templates are included - # in the archive. - _LT_AC_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' - ;; - esac - ;; - esac - ;; - lynxos*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - m88k*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - mvs*) - case $cc_basename in - cxx*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - *) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - esac - ;; - netbsd*) - if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' - wlarc= - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - fi - # Workaround some broken pre-1.5 toolchains - output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' - ;; - openbsd2*) - # C++ shared libraries are fairly broken - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - openbsd*) - if test -f /usr/libexec/ld.so; then - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - fi - output_verbose_link_cmd='echo' - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - osf3*) - case $cc_basename in - KCC*) - # Kuck and Associates, Inc. (KAI) C++ Compiler - - # KCC will only create a shared library if the output file - # ends with ".so" (or ".sl" for HP-UX), so rename the library - # to its proper name (with version) after linking. - _LT_AC_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - # Archives containing C++ object files must be created using - # "CC -Bstatic", where "CC" is the KAI C++ compiler. - _LT_AC_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' - - ;; - RCC*) - # Rational C++ 2.4.1 - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - cxx*) - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && echo ${wl}-set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - # - # There doesn't appear to be a way to prevent this compiler from - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. - output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "ld" | grep -v "ld:"`; templist=`echo $templist | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' - ;; - *) - if test "$GXX" = yes && test "$with_gnu_ld" = no; then - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "\-L"' - - else - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - esac - ;; - osf4* | osf5*) - case $cc_basename in - KCC*) - # Kuck and Associates, Inc. (KAI) C++ Compiler - - # KCC will only create a shared library if the output file - # ends with ".so" (or ".sl" for HP-UX), so rename the library - # to its proper name (with version) after linking. - _LT_AC_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - # Archives containing C++ object files must be created using - # the KAI C++ compiler. - _LT_AC_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' - ;; - RCC*) - # Rational C++ 2.4.1 - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - cxx*) - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ - echo "-hidden">> $lib.exp~ - $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname -Wl,-input -Wl,$lib.exp `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib~ - $rm $lib.exp' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - # - # There doesn't appear to be a way to prevent this compiler from - # explicitly linking system object files so we need to strip them - # from the output so that they don't get included in the library - # dependencies. - output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "ld" | grep -v "ld:"`; templist=`echo $templist | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; echo $list' - ;; - *) - if test "$GXX" = yes && test "$with_gnu_ld" = no; then - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep "\-L"' - - else - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - esac - ;; - psos*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - sunos4*) - case $cc_basename in - CC*) - # Sun C++ 4.x - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - lcc*) - # Lucid - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - *) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - esac - ;; - solaris*) - case $cc_basename in - CC*) - # Sun C++ 4.2, 5.x and Centerline C++ - _LT_AC_TAGVAR(archive_cmds_need_lc,$1)=yes - _LT_AC_TAGVAR(no_undefined_flag, $1)=' -zdefs' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ - $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$rm $lib.exp' - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - case $host_os in - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) - # The compiler driver will combine and reorder linker options, - # but understands `-z linker_flag'. - # Supported since Solaris 2.6 (maybe 2.5.1?) - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' - ;; - esac - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - - output_verbose_link_cmd='echo' - - # Archives containing C++ object files must be created using - # "CC -xar", where "CC" is the Sun C++ compiler. This is - # necessary to make sure instantiated templates are included - # in the archive. - _LT_AC_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' - ;; - gcx*) - # Green Hills C++ Compiler - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - - # The C++ compiler must be used to create the archive. - _LT_AC_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs' - ;; - *) - # GNU C++ compiler with Solaris linker - if test "$GXX" = yes && test "$with_gnu_ld" = no; then - _LT_AC_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' - if $CC --version | grep -v '^2\.7' > /dev/null; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ - $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$rm $lib.exp' - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - output_verbose_link_cmd="$CC -shared $CFLAGS -v conftest.$objext 2>&1 | grep \"\-L\"" - else - # g++ 2.7 appears to require `-G' NOT `-shared' on this - # platform. - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ - $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$rm $lib.exp' - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when - # linking a shared library. - output_verbose_link_cmd="$CC -G $CFLAGS -v conftest.$objext 2>&1 | grep \"\-L\"" - fi - - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir' - case $host_os in - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - ;; - esac - fi - ;; - esac - ;; - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) - _LT_AC_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - runpath_var='LD_RUN_PATH' - - case $cc_basename in - CC*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - ;; - sysv5* | sco3.2v5* | sco5v6*) - # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. - # For security reasons, it is highly recommended that you always - # use absolute paths for naming shared libraries, and exclude the - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - # So that behaviour is only enabled if SCOABSPATH is set to a - # non-empty value in the environment. Most likely only useful for - # creating official distributions of packages. - # This is a hack until libtool officially supports absolute path - # names for shared libraries. - _LT_AC_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' - _LT_AC_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - - case $cc_basename in - CC*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - ;; - tandem*) - case $cc_basename in - NCC*) - # NonStop-UX NCC 3.20 - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - *) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - esac - ;; - vxworks*) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - *) - # FIXME: insert proper C++ library support - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; -esac -AC_MSG_RESULT([$_LT_AC_TAGVAR(ld_shlibs, $1)]) -test "$_LT_AC_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no - -_LT_AC_TAGVAR(GCC, $1)="$GXX" -_LT_AC_TAGVAR(LD, $1)="$LD" - -AC_LIBTOOL_POSTDEP_PREDEP($1) -AC_LIBTOOL_PROG_COMPILER_PIC($1) -AC_LIBTOOL_PROG_CC_C_O($1) -AC_LIBTOOL_SYS_HARD_LINK_LOCKS($1) -AC_LIBTOOL_PROG_LD_SHLIBS($1) -AC_LIBTOOL_SYS_DYNAMIC_LINKER($1) -AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH($1) - -AC_LIBTOOL_CONFIG($1) - -AC_LANG_POP -CC=$lt_save_CC -LDCXX=$LD -LD=$lt_save_LD -GCC=$lt_save_GCC -with_gnu_ldcxx=$with_gnu_ld -with_gnu_ld=$lt_save_with_gnu_ld -lt_cv_path_LDCXX=$lt_cv_path_LD -lt_cv_path_LD=$lt_save_path_LD -lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld -lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld -])# AC_LIBTOOL_LANG_CXX_CONFIG - -# AC_LIBTOOL_POSTDEP_PREDEP([TAGNAME]) -# ------------------------------------ -# Figure out "hidden" library dependencies from verbose -# compiler output when linking a shared library. -# Parse the compiler output and extract the necessary -# objects, libraries and library flags. -AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -dnl we can't use the lt_simple_compile_test_code here, -dnl because it contains code intended for an executable, -dnl not a library. It's possible we should let each -dnl tag define a new lt_????_link_test_code variable, -dnl but it's only used here... -ifelse([$1],[],[cat > conftest.$ac_ext <<EOF -int a; -void foo (void) { a = 0; } -EOF -],[$1],[CXX],[cat > conftest.$ac_ext <<EOF -class Foo -{ -public: - Foo (void) { a = 0; } -private: - int a; -}; -EOF -],[$1],[F77],[cat > conftest.$ac_ext <<EOF - subroutine foo - implicit none - integer*4 a - a=0 - return - end -EOF -],[$1],[GCJ],[cat > conftest.$ac_ext <<EOF -public class foo { - private int a; - public void bar (void) { - a = 0; - } -}; -EOF -]) -dnl Parse the compiler output and extract the necessary -dnl objects, libraries and library flags. -if AC_TRY_EVAL(ac_compile); then - # Parse the compiler output and extract the necessary - # objects, libraries and library flags. - - # Sentinel used to keep track of whether or not we are before - # the conftest object file. - pre_test_object_deps_done=no - - # The `*' in the case matches for architectures that use `case' in - # $output_verbose_cmd can trigger glob expansion during the loop - # eval without this substitution. - output_verbose_link_cmd=`$echo "X$output_verbose_link_cmd" | $Xsed -e "$no_glob_subst"` - - for p in `eval $output_verbose_link_cmd`; do - case $p in - - -L* | -R* | -l*) - # Some compilers place space between "-{L,R}" and the path. - # Remove the space. - if test $p = "-L" \ - || test $p = "-R"; then - prev=$p - continue - else - prev= - fi - - if test "$pre_test_object_deps_done" = no; then - case $p in - -L* | -R*) - # Internal compiler library paths should come after those - # provided the user. The postdeps already come after the - # user supplied libs so there is no need to process them. - if test -z "$_LT_AC_TAGVAR(compiler_lib_search_path, $1)"; then - _LT_AC_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}" - else - _LT_AC_TAGVAR(compiler_lib_search_path, $1)="${_LT_AC_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}" - fi - ;; - # The "-l" case would never come before the object being - # linked, so don't bother handling this case. - esac - else - if test -z "$_LT_AC_TAGVAR(postdeps, $1)"; then - _LT_AC_TAGVAR(postdeps, $1)="${prev}${p}" - else - _LT_AC_TAGVAR(postdeps, $1)="${_LT_AC_TAGVAR(postdeps, $1)} ${prev}${p}" - fi - fi - ;; - - *.$objext) - # This assumes that the test object file only shows up - # once in the compiler output. - if test "$p" = "conftest.$objext"; then - pre_test_object_deps_done=yes - continue - fi - - if test "$pre_test_object_deps_done" = no; then - if test -z "$_LT_AC_TAGVAR(predep_objects, $1)"; then - _LT_AC_TAGVAR(predep_objects, $1)="$p" - else - _LT_AC_TAGVAR(predep_objects, $1)="$_LT_AC_TAGVAR(predep_objects, $1) $p" - fi - else - if test -z "$_LT_AC_TAGVAR(postdep_objects, $1)"; then - _LT_AC_TAGVAR(postdep_objects, $1)="$p" - else - _LT_AC_TAGVAR(postdep_objects, $1)="$_LT_AC_TAGVAR(postdep_objects, $1) $p" - fi - fi - ;; - - *) ;; # Ignore the rest. - - esac - done - - # Clean up. - rm -f a.out a.exe -else - echo "libtool.m4: error: problem compiling $1 test program" -fi - -$rm -f confest.$objext - -_LT_AC_TAGVAR(compiler_lib_search_dirs, $1)= -if test -n "$_LT_AC_TAGVAR(compiler_lib_search_path, $1)"; then - _LT_AC_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_AC_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` -fi - -# PORTME: override above test on systems where it is broken -ifelse([$1],[CXX], -[case $host_os in -interix[[3-9]]*) - # Interix 3.5 installs completely hosed .la files for C++, so rather than - # hack all around it, let's just trust "g++" to DTRT. - _LT_AC_TAGVAR(predep_objects,$1)= - _LT_AC_TAGVAR(postdep_objects,$1)= - _LT_AC_TAGVAR(postdeps,$1)= - ;; - -linux*) - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) - # Sun C++ 5.9 - # - # The more standards-conforming stlport4 library is - # incompatible with the Cstd library. Avoid specifying - # it if it's in CXXFLAGS. Ignore libCrun as - # -library=stlport4 depends on it. - case " $CXX $CXXFLAGS " in - *" -library=stlport4 "*) - solaris_use_stlport4=yes - ;; - esac - if test "$solaris_use_stlport4" != yes; then - _LT_AC_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' - fi - ;; - esac - ;; - -solaris*) - case $cc_basename in - CC*) - # The more standards-conforming stlport4 library is - # incompatible with the Cstd library. Avoid specifying - # it if it's in CXXFLAGS. Ignore libCrun as - # -library=stlport4 depends on it. - case " $CXX $CXXFLAGS " in - *" -library=stlport4 "*) - solaris_use_stlport4=yes - ;; - esac - - # Adding this requires a known-good setup of shared libraries for - # Sun compiler versions before 5.6, else PIC objects from an old - # archive will be linked into the output, leading to subtle bugs. - if test "$solaris_use_stlport4" != yes; then - _LT_AC_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' - fi - ;; - esac - ;; -esac -]) -case " $_LT_AC_TAGVAR(postdeps, $1) " in -*" -lc "*) _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no ;; -esac -])# AC_LIBTOOL_POSTDEP_PREDEP - -# AC_LIBTOOL_LANG_F77_CONFIG -# -------------------------- -# Ensure that the configuration vars for the C compiler are -# suitably defined. Those variables are subsequently used by -# AC_LIBTOOL_CONFIG to write the compiler configuration to `libtool'. -AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG], [_LT_AC_LANG_F77_CONFIG(F77)]) -AC_DEFUN([_LT_AC_LANG_F77_CONFIG], -[AC_REQUIRE([AC_PROG_F77]) -AC_LANG_PUSH(Fortran 77) - -_LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no -_LT_AC_TAGVAR(allow_undefined_flag, $1)= -_LT_AC_TAGVAR(always_export_symbols, $1)=no -_LT_AC_TAGVAR(archive_expsym_cmds, $1)= -_LT_AC_TAGVAR(export_dynamic_flag_spec, $1)= -_LT_AC_TAGVAR(hardcode_direct, $1)=no -_LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)= -_LT_AC_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= -_LT_AC_TAGVAR(hardcode_libdir_separator, $1)= -_LT_AC_TAGVAR(hardcode_minus_L, $1)=no -_LT_AC_TAGVAR(hardcode_automatic, $1)=no -_LT_AC_TAGVAR(module_cmds, $1)= -_LT_AC_TAGVAR(module_expsym_cmds, $1)= -_LT_AC_TAGVAR(link_all_deplibs, $1)=unknown -_LT_AC_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds -_LT_AC_TAGVAR(no_undefined_flag, $1)= -_LT_AC_TAGVAR(whole_archive_flag_spec, $1)= -_LT_AC_TAGVAR(enable_shared_with_static_runtimes, $1)=no - -# Source file extension for f77 test sources. -ac_ext=f - -# Object file extension for compiled f77 test sources. -objext=o -_LT_AC_TAGVAR(objext, $1)=$objext - -# Code to be used in simple compile tests -lt_simple_compile_test_code="\ - subroutine t - return - end -" - -# Code to be used in simple link tests -lt_simple_link_test_code="\ - program t - end -" - -# ltmain only uses $CC for tagged configurations so make sure $CC is set. -_LT_AC_SYS_COMPILER - -# save warnings/boilerplate of simple test code -_LT_COMPILER_BOILERPLATE -_LT_LINKER_BOILERPLATE - -# Allow CC to be a program name with arguments. -lt_save_CC="$CC" -CC=${F77-"f77"} -compiler=$CC -_LT_AC_TAGVAR(compiler, $1)=$CC -_LT_CC_BASENAME([$compiler]) - -AC_MSG_CHECKING([if libtool supports shared libraries]) -AC_MSG_RESULT([$can_build_shared]) - -AC_MSG_CHECKING([whether to build shared libraries]) -test "$can_build_shared" = "no" && enable_shared=no - -# On AIX, shared libraries and static libraries use the same namespace, and -# are all built from PIC. -case $host_os in -aix3*) - test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; -aix[[4-9]]*) - if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then - test "$enable_shared" = yes && enable_static=no - fi - ;; -esac -AC_MSG_RESULT([$enable_shared]) - -AC_MSG_CHECKING([whether to build static libraries]) -# Make sure either enable_shared or enable_static is yes. -test "$enable_shared" = yes || enable_static=yes -AC_MSG_RESULT([$enable_static]) - -_LT_AC_TAGVAR(GCC, $1)="$G77" -_LT_AC_TAGVAR(LD, $1)="$LD" - -AC_LIBTOOL_PROG_COMPILER_PIC($1) -AC_LIBTOOL_PROG_CC_C_O($1) -AC_LIBTOOL_SYS_HARD_LINK_LOCKS($1) -AC_LIBTOOL_PROG_LD_SHLIBS($1) -AC_LIBTOOL_SYS_DYNAMIC_LINKER($1) -AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH($1) - -AC_LIBTOOL_CONFIG($1) - -AC_LANG_POP -CC="$lt_save_CC" -])# AC_LIBTOOL_LANG_F77_CONFIG - - -# AC_LIBTOOL_LANG_GCJ_CONFIG -# -------------------------- -# Ensure that the configuration vars for the C compiler are -# suitably defined. Those variables are subsequently used by -# AC_LIBTOOL_CONFIG to write the compiler configuration to `libtool'. -AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG], [_LT_AC_LANG_GCJ_CONFIG(GCJ)]) -AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG], -[AC_LANG_SAVE - -# Source file extension for Java test sources. -ac_ext=java - -# Object file extension for compiled Java test sources. -objext=o -_LT_AC_TAGVAR(objext, $1)=$objext - -# Code to be used in simple compile tests -lt_simple_compile_test_code="class foo {}" - -# Code to be used in simple link tests -lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }' - -# ltmain only uses $CC for tagged configurations so make sure $CC is set. -_LT_AC_SYS_COMPILER - -# save warnings/boilerplate of simple test code -_LT_COMPILER_BOILERPLATE -_LT_LINKER_BOILERPLATE - -# Allow CC to be a program name with arguments. -lt_save_CC="$CC" -CC=${GCJ-"gcj"} -compiler=$CC -_LT_AC_TAGVAR(compiler, $1)=$CC -_LT_CC_BASENAME([$compiler]) - -# GCJ did not exist at the time GCC didn't implicitly link libc in. -_LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - -_LT_AC_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds - -AC_LIBTOOL_PROG_COMPILER_NO_RTTI($1) -AC_LIBTOOL_PROG_COMPILER_PIC($1) -AC_LIBTOOL_PROG_CC_C_O($1) -AC_LIBTOOL_SYS_HARD_LINK_LOCKS($1) -AC_LIBTOOL_PROG_LD_SHLIBS($1) -AC_LIBTOOL_SYS_DYNAMIC_LINKER($1) -AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH($1) - -AC_LIBTOOL_CONFIG($1) - -AC_LANG_RESTORE -CC="$lt_save_CC" -])# AC_LIBTOOL_LANG_GCJ_CONFIG - - -# AC_LIBTOOL_LANG_RC_CONFIG -# ------------------------- -# Ensure that the configuration vars for the Windows resource compiler are -# suitably defined. Those variables are subsequently used by -# AC_LIBTOOL_CONFIG to write the compiler configuration to `libtool'. -AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG], [_LT_AC_LANG_RC_CONFIG(RC)]) -AC_DEFUN([_LT_AC_LANG_RC_CONFIG], -[AC_LANG_SAVE - -# Source file extension for RC test sources. -ac_ext=rc - -# Object file extension for compiled RC test sources. -objext=o -_LT_AC_TAGVAR(objext, $1)=$objext - -# Code to be used in simple compile tests -lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }' - -# Code to be used in simple link tests -lt_simple_link_test_code="$lt_simple_compile_test_code" - -# ltmain only uses $CC for tagged configurations so make sure $CC is set. -_LT_AC_SYS_COMPILER - -# save warnings/boilerplate of simple test code -_LT_COMPILER_BOILERPLATE -_LT_LINKER_BOILERPLATE - -# Allow CC to be a program name with arguments. -lt_save_CC="$CC" -CC=${RC-"windres"} -compiler=$CC -_LT_AC_TAGVAR(compiler, $1)=$CC -_LT_CC_BASENAME([$compiler]) -_LT_AC_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes - -AC_LIBTOOL_CONFIG($1) - -AC_LANG_RESTORE -CC="$lt_save_CC" -])# AC_LIBTOOL_LANG_RC_CONFIG - - -# AC_LIBTOOL_CONFIG([TAGNAME]) -# ---------------------------- -# If TAGNAME is not passed, then create an initial libtool script -# with a default configuration from the untagged config vars. Otherwise -# add code to config.status for appending the configuration named by -# TAGNAME from the matching tagged config vars. -AC_DEFUN([AC_LIBTOOL_CONFIG], -[# The else clause should only fire when bootstrapping the -# libtool distribution, otherwise you forgot to ship ltmain.sh -# with your package, and you will get complaints that there are -# no rules to generate ltmain.sh. -if test -f "$ltmain"; then - # See if we are running on zsh, and set the options which allow our commands through - # without removal of \ escapes. - if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - # Now quote all the things that may contain metacharacters while being - # careful not to overquote the AC_SUBSTed values. We take copies of the - # variables and quote the copies for generation of the libtool script. - for var in echo old_CC old_CFLAGS AR AR_FLAGS EGREP RANLIB LN_S LTCC LTCFLAGS NM \ - SED SHELL STRIP \ - libname_spec library_names_spec soname_spec extract_expsyms_cmds \ - old_striplib striplib file_magic_cmd finish_cmds finish_eval \ - deplibs_check_method reload_flag reload_cmds need_locks \ - lt_cv_sys_global_symbol_pipe lt_cv_sys_global_symbol_to_cdecl \ - lt_cv_sys_global_symbol_to_c_name_address \ - sys_lib_search_path_spec sys_lib_dlsearch_path_spec \ - old_postinstall_cmds old_postuninstall_cmds \ - _LT_AC_TAGVAR(compiler, $1) \ - _LT_AC_TAGVAR(CC, $1) \ - _LT_AC_TAGVAR(LD, $1) \ - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1) \ - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1) \ - _LT_AC_TAGVAR(lt_prog_compiler_static, $1) \ - _LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) \ - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1) \ - _LT_AC_TAGVAR(thread_safe_flag_spec, $1) \ - _LT_AC_TAGVAR(whole_archive_flag_spec, $1) \ - _LT_AC_TAGVAR(enable_shared_with_static_runtimes, $1) \ - _LT_AC_TAGVAR(old_archive_cmds, $1) \ - _LT_AC_TAGVAR(old_archive_from_new_cmds, $1) \ - _LT_AC_TAGVAR(predep_objects, $1) \ - _LT_AC_TAGVAR(postdep_objects, $1) \ - _LT_AC_TAGVAR(predeps, $1) \ - _LT_AC_TAGVAR(postdeps, $1) \ - _LT_AC_TAGVAR(compiler_lib_search_path, $1) \ - _LT_AC_TAGVAR(compiler_lib_search_dirs, $1) \ - _LT_AC_TAGVAR(archive_cmds, $1) \ - _LT_AC_TAGVAR(archive_expsym_cmds, $1) \ - _LT_AC_TAGVAR(postinstall_cmds, $1) \ - _LT_AC_TAGVAR(postuninstall_cmds, $1) \ - _LT_AC_TAGVAR(old_archive_from_expsyms_cmds, $1) \ - _LT_AC_TAGVAR(allow_undefined_flag, $1) \ - _LT_AC_TAGVAR(no_undefined_flag, $1) \ - _LT_AC_TAGVAR(export_symbols_cmds, $1) \ - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1) \ - _LT_AC_TAGVAR(hardcode_libdir_flag_spec_ld, $1) \ - _LT_AC_TAGVAR(hardcode_libdir_separator, $1) \ - _LT_AC_TAGVAR(hardcode_automatic, $1) \ - _LT_AC_TAGVAR(module_cmds, $1) \ - _LT_AC_TAGVAR(module_expsym_cmds, $1) \ - _LT_AC_TAGVAR(lt_cv_prog_compiler_c_o, $1) \ - _LT_AC_TAGVAR(fix_srcfile_path, $1) \ - _LT_AC_TAGVAR(exclude_expsyms, $1) \ - _LT_AC_TAGVAR(include_expsyms, $1); do - - case $var in - _LT_AC_TAGVAR(old_archive_cmds, $1) | \ - _LT_AC_TAGVAR(old_archive_from_new_cmds, $1) | \ - _LT_AC_TAGVAR(archive_cmds, $1) | \ - _LT_AC_TAGVAR(archive_expsym_cmds, $1) | \ - _LT_AC_TAGVAR(module_cmds, $1) | \ - _LT_AC_TAGVAR(module_expsym_cmds, $1) | \ - _LT_AC_TAGVAR(old_archive_from_expsyms_cmds, $1) | \ - _LT_AC_TAGVAR(export_symbols_cmds, $1) | \ - extract_expsyms_cmds | reload_cmds | finish_cmds | \ - postinstall_cmds | postuninstall_cmds | \ - old_postinstall_cmds | old_postuninstall_cmds | \ - sys_lib_search_path_spec | sys_lib_dlsearch_path_spec) - # Double-quote double-evaled strings. - eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$double_quote_subst\" -e \"\$sed_quote_subst\" -e \"\$delay_variable_subst\"\`\\\"" - ;; - *) - eval "lt_$var=\\\"\`\$echo \"X\$$var\" | \$Xsed -e \"\$sed_quote_subst\"\`\\\"" - ;; - esac - done - - case $lt_echo in - *'\[$]0 --fallback-echo"') - lt_echo=`$echo "X$lt_echo" | $Xsed -e 's/\\\\\\\[$]0 --fallback-echo"[$]/[$]0 --fallback-echo"/'` - ;; - esac - -ifelse([$1], [], - [cfgfile="${ofile}T" - trap "$rm \"$cfgfile\"; exit 1" 1 2 15 - $rm -f "$cfgfile" - AC_MSG_NOTICE([creating $ofile])], - [cfgfile="$ofile"]) - - cat <<__EOF__ >> "$cfgfile" -ifelse([$1], [], -[#! $SHELL - -# `$echo "$cfgfile" | sed 's%^.*/%%'` - Provide generalized library-building support services. -# Generated automatically by $PROGRAM (GNU $PACKAGE $VERSION$TIMESTAMP) -# NOTE: Changes made to this file will be lost: look at ltmain.sh. -# -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 -# Free Software Foundation, Inc. -# -# This file is part of GNU Libtool: -# Originally by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996 -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - -# A sed program that does not truncate output. -SED=$lt_SED - -# Sed that helps us avoid accidentally triggering echo(1) options like -n. -Xsed="$SED -e 1s/^X//" - -# The HP-UX ksh and POSIX shell print the target directory to stdout -# if CDPATH is set. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -# The names of the tagged configurations supported by this script. -available_tags= - -# ### BEGIN LIBTOOL CONFIG], -[# ### BEGIN LIBTOOL TAG CONFIG: $tagname]) - -# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: - -# Shell to use when invoking shell scripts. -SHELL=$lt_SHELL - -# Whether or not to build shared libraries. -build_libtool_libs=$enable_shared - -# Whether or not to build static libraries. -build_old_libs=$enable_static - -# Whether or not to add -lc for building shared libraries. -build_libtool_need_lc=$_LT_AC_TAGVAR(archive_cmds_need_lc, $1) - -# Whether or not to disallow shared libs when runtime libs are static -allow_libtool_libs_with_static_runtimes=$_LT_AC_TAGVAR(enable_shared_with_static_runtimes, $1) - -# Whether or not to optimize for fast installation. -fast_install=$enable_fast_install - -# The host system. -host_alias=$host_alias -host=$host -host_os=$host_os - -# The build system. -build_alias=$build_alias -build=$build -build_os=$build_os - -# An echo program that does not interpret backslashes. -echo=$lt_echo - -# The archiver. -AR=$lt_AR -AR_FLAGS=$lt_AR_FLAGS - -# A C compiler. -LTCC=$lt_LTCC - -# LTCC compiler flags. -LTCFLAGS=$lt_LTCFLAGS - -# A language-specific compiler. -CC=$lt_[]_LT_AC_TAGVAR(compiler, $1) - -# Is the compiler the GNU C compiler? -with_gcc=$_LT_AC_TAGVAR(GCC, $1) - -# An ERE matcher. -EGREP=$lt_EGREP - -# The linker used to build libraries. -LD=$lt_[]_LT_AC_TAGVAR(LD, $1) - -# Whether we need hard or soft links. -LN_S=$lt_LN_S - -# A BSD-compatible nm program. -NM=$lt_NM - -# A symbol stripping program -STRIP=$lt_STRIP - -# Used to examine libraries when file_magic_cmd begins "file" -MAGIC_CMD=$MAGIC_CMD - -# Used on cygwin: DLL creation program. -DLLTOOL="$DLLTOOL" - -# Used on cygwin: object dumper. -OBJDUMP="$OBJDUMP" - -# Used on cygwin: assembler. -AS="$AS" - -# The name of the directory that contains temporary libtool files. -objdir=$objdir - -# How to create reloadable object files. -reload_flag=$lt_reload_flag -reload_cmds=$lt_reload_cmds - -# How to pass a linker flag through the compiler. -wl=$lt_[]_LT_AC_TAGVAR(lt_prog_compiler_wl, $1) - -# Object file suffix (normally "o"). -objext="$ac_objext" - -# Old archive suffix (normally "a"). -libext="$libext" - -# Shared library suffix (normally ".so"). -shrext_cmds='$shrext_cmds' - -# Executable file suffix (normally ""). -exeext="$exeext" - -# Additional compiler flags for building library objects. -pic_flag=$lt_[]_LT_AC_TAGVAR(lt_prog_compiler_pic, $1) -pic_mode=$pic_mode - -# What is the maximum length of a command? -max_cmd_len=$lt_cv_sys_max_cmd_len - -# Does compiler simultaneously support -c and -o options? -compiler_c_o=$lt_[]_LT_AC_TAGVAR(lt_cv_prog_compiler_c_o, $1) - -# Must we lock files when doing compilation? -need_locks=$lt_need_locks - -# Do we need the lib prefix for modules? -need_lib_prefix=$need_lib_prefix - -# Do we need a version for libraries? -need_version=$need_version - -# Whether dlopen is supported. -dlopen_support=$enable_dlopen - -# Whether dlopen of programs is supported. -dlopen_self=$enable_dlopen_self - -# Whether dlopen of statically linked programs is supported. -dlopen_self_static=$enable_dlopen_self_static - -# Compiler flag to prevent dynamic linking. -link_static_flag=$lt_[]_LT_AC_TAGVAR(lt_prog_compiler_static, $1) - -# Compiler flag to turn off builtin functions. -no_builtin_flag=$lt_[]_LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) - -# Compiler flag to allow reflexive dlopens. -export_dynamic_flag_spec=$lt_[]_LT_AC_TAGVAR(export_dynamic_flag_spec, $1) - -# Compiler flag to generate shared objects directly from archives. -whole_archive_flag_spec=$lt_[]_LT_AC_TAGVAR(whole_archive_flag_spec, $1) - -# Compiler flag to generate thread-safe objects. -thread_safe_flag_spec=$lt_[]_LT_AC_TAGVAR(thread_safe_flag_spec, $1) - -# Library versioning type. -version_type=$version_type - -# Format of library name prefix. -libname_spec=$lt_libname_spec - -# List of archive names. First name is the real one, the rest are links. -# The last name is the one that the linker finds with -lNAME. -library_names_spec=$lt_library_names_spec - -# The coded name of the library, if different from the real name. -soname_spec=$lt_soname_spec - -# Commands used to build and install an old-style archive. -RANLIB=$lt_RANLIB -old_archive_cmds=$lt_[]_LT_AC_TAGVAR(old_archive_cmds, $1) -old_postinstall_cmds=$lt_old_postinstall_cmds -old_postuninstall_cmds=$lt_old_postuninstall_cmds - -# Create an old-style archive from a shared archive. -old_archive_from_new_cmds=$lt_[]_LT_AC_TAGVAR(old_archive_from_new_cmds, $1) - -# Create a temporary old-style archive to link instead of a shared archive. -old_archive_from_expsyms_cmds=$lt_[]_LT_AC_TAGVAR(old_archive_from_expsyms_cmds, $1) - -# Commands used to build and install a shared archive. -archive_cmds=$lt_[]_LT_AC_TAGVAR(archive_cmds, $1) -archive_expsym_cmds=$lt_[]_LT_AC_TAGVAR(archive_expsym_cmds, $1) -postinstall_cmds=$lt_postinstall_cmds -postuninstall_cmds=$lt_postuninstall_cmds - -# Commands used to build a loadable module (assumed same as above if empty) -module_cmds=$lt_[]_LT_AC_TAGVAR(module_cmds, $1) -module_expsym_cmds=$lt_[]_LT_AC_TAGVAR(module_expsym_cmds, $1) - -# Commands to strip libraries. -old_striplib=$lt_old_striplib -striplib=$lt_striplib - -# Dependencies to place before the objects being linked to create a -# shared library. -predep_objects=$lt_[]_LT_AC_TAGVAR(predep_objects, $1) - -# Dependencies to place after the objects being linked to create a -# shared library. -postdep_objects=$lt_[]_LT_AC_TAGVAR(postdep_objects, $1) - -# Dependencies to place before the objects being linked to create a -# shared library. -predeps=$lt_[]_LT_AC_TAGVAR(predeps, $1) - -# Dependencies to place after the objects being linked to create a -# shared library. -postdeps=$lt_[]_LT_AC_TAGVAR(postdeps, $1) - -# The directories searched by this compiler when creating a shared -# library -compiler_lib_search_dirs=$lt_[]_LT_AC_TAGVAR(compiler_lib_search_dirs, $1) - -# The library search path used internally by the compiler when linking -# a shared library. -compiler_lib_search_path=$lt_[]_LT_AC_TAGVAR(compiler_lib_search_path, $1) - -# Method to check whether dependent libraries are shared objects. -deplibs_check_method=$lt_deplibs_check_method - -# Command to use when deplibs_check_method == file_magic. -file_magic_cmd=$lt_file_magic_cmd - -# Flag that allows shared libraries with undefined symbols to be built. -allow_undefined_flag=$lt_[]_LT_AC_TAGVAR(allow_undefined_flag, $1) - -# Flag that forces no undefined symbols. -no_undefined_flag=$lt_[]_LT_AC_TAGVAR(no_undefined_flag, $1) - -# Commands used to finish a libtool library installation in a directory. -finish_cmds=$lt_finish_cmds - -# Same as above, but a single script fragment to be evaled but not shown. -finish_eval=$lt_finish_eval - -# Take the output of nm and produce a listing of raw symbols and C names. -global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe - -# Transform the output of nm in a proper C declaration -global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl - -# Transform the output of nm in a C name address pair -global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - -# This is the shared library runtime path variable. -runpath_var=$runpath_var - -# This is the shared library path variable. -shlibpath_var=$shlibpath_var - -# Is shlibpath searched before the hard-coded library search path? -shlibpath_overrides_runpath=$shlibpath_overrides_runpath - -# How to hardcode a shared library path into an executable. -hardcode_action=$_LT_AC_TAGVAR(hardcode_action, $1) - -# Whether we should hardcode library paths into libraries. -hardcode_into_libs=$hardcode_into_libs - -# Flag to hardcode \$libdir into a binary during linking. -# This must work even if \$libdir does not exist. -hardcode_libdir_flag_spec=$lt_[]_LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1) - -# If ld is used when linking, flag to hardcode \$libdir into -# a binary during linking. This must work even if \$libdir does -# not exist. -hardcode_libdir_flag_spec_ld=$lt_[]_LT_AC_TAGVAR(hardcode_libdir_flag_spec_ld, $1) - -# Whether we need a single -rpath flag with a separated argument. -hardcode_libdir_separator=$lt_[]_LT_AC_TAGVAR(hardcode_libdir_separator, $1) - -# Set to yes if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the -# resulting binary. -hardcode_direct=$_LT_AC_TAGVAR(hardcode_direct, $1) - -# Set to yes if using the -LDIR flag during linking hardcodes DIR into the -# resulting binary. -hardcode_minus_L=$_LT_AC_TAGVAR(hardcode_minus_L, $1) - -# Set to yes if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into -# the resulting binary. -hardcode_shlibpath_var=$_LT_AC_TAGVAR(hardcode_shlibpath_var, $1) - -# Set to yes if building a shared library automatically hardcodes DIR into the library -# and all subsequent libraries and executables linked against it. -hardcode_automatic=$_LT_AC_TAGVAR(hardcode_automatic, $1) - -# Variables whose values should be saved in libtool wrapper scripts and -# restored at relink time. -variables_saved_for_relink="$variables_saved_for_relink" - -# Whether libtool must link a program against all its dependency libraries. -link_all_deplibs=$_LT_AC_TAGVAR(link_all_deplibs, $1) - -# Compile-time system search path for libraries -sys_lib_search_path_spec=$lt_sys_lib_search_path_spec - -# Run-time system search path for libraries -sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec - -# Fix the shell variable \$srcfile for the compiler. -fix_srcfile_path=$lt_fix_srcfile_path - -# Set to yes if exported symbols are required. -always_export_symbols=$_LT_AC_TAGVAR(always_export_symbols, $1) - -# The commands to list exported symbols. -export_symbols_cmds=$lt_[]_LT_AC_TAGVAR(export_symbols_cmds, $1) - -# The commands to extract the exported symbol list from a shared archive. -extract_expsyms_cmds=$lt_extract_expsyms_cmds - -# Symbols that should not be listed in the preloaded symbols. -exclude_expsyms=$lt_[]_LT_AC_TAGVAR(exclude_expsyms, $1) - -# Symbols that must always be exported. -include_expsyms=$lt_[]_LT_AC_TAGVAR(include_expsyms, $1) - -ifelse([$1],[], -[# ### END LIBTOOL CONFIG], -[# ### END LIBTOOL TAG CONFIG: $tagname]) - -__EOF__ - -ifelse([$1],[], [ - case $host_os in - aix3*) - cat <<\EOF >> "$cfgfile" - -# AIX sometimes has problems with the GCC collect2 program. For some -# reason, if we set the COLLECT_NAMES environment variable, the problems -# vanish in a puff of smoke. -if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES -fi -EOF - ;; - esac - - # We use sed instead of cat because bash on DJGPP gets confused if - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? - sed '$q' "$ltmain" >> "$cfgfile" || (rm -f "$cfgfile"; exit 1) - - mv -f "$cfgfile" "$ofile" || \ - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" -]) -else - # If there is no Makefile yet, we rely on a make rule to execute - # `config.status --recheck' to rerun these tests and create the - # libtool script then. - ltmain_in=`echo $ltmain | sed -e 's/\.sh$/.in/'` - if test -f "$ltmain_in"; then - test -f Makefile && make "$ltmain" - fi -fi -])# AC_LIBTOOL_CONFIG - - -# AC_LIBTOOL_PROG_COMPILER_NO_RTTI([TAGNAME]) -# ------------------------------------------- -AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], -[AC_REQUIRE([_LT_AC_SYS_COMPILER])dnl - -_LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= - -if test "$GCC" = yes; then - _LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' - - AC_LIBTOOL_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions], - lt_cv_prog_compiler_rtti_exceptions, - [-fno-rtti -fno-exceptions], [], - [_LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"]) -fi -])# AC_LIBTOOL_PROG_COMPILER_NO_RTTI - - -# AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE -# --------------------------------- -AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], -[AC_REQUIRE([AC_CANONICAL_HOST]) -AC_REQUIRE([LT_AC_PROG_SED]) -AC_REQUIRE([AC_PROG_NM]) -AC_REQUIRE([AC_OBJEXT]) -# Check for command to grab the raw symbol name followed by C symbol from nm. -AC_MSG_CHECKING([command to parse $NM output from $compiler object]) -AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe], -[ -# These are sane defaults that work on at least a few old systems. -# [They come from Ultrix. What could be older than Ultrix?!! ;)] - -# Character class describing NM global symbol codes. -symcode='[[BCDEGRST]]' - -# Regexp to match symbols that can be accessed directly from C. -sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)' - -# Transform an extracted symbol line into a proper C declaration -lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^. .* \(.*\)$/extern int \1;/p'" - -# Transform an extracted symbol line into symbol name and symbol address -lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (lt_ptr) 0},/p' -e 's/^$symcode \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (lt_ptr) \&\2},/p'" - -# Define system-specific variables. -case $host_os in -aix*) - symcode='[[BCDT]]' - ;; -cygwin* | mingw* | pw32*) - symcode='[[ABCDGISTW]]' - ;; -hpux*) # Its linker distinguishes data from code symbols - if test "$host_cpu" = ia64; then - symcode='[[ABCDEGRST]]' - fi - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (lt_ptr) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (lt_ptr) \&\2},/p'" - ;; -linux* | k*bsd*-gnu) - if test "$host_cpu" = ia64; then - symcode='[[ABCDGIRSTW]]' - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (lt_ptr) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (lt_ptr) \&\2},/p'" - fi - ;; -irix* | nonstopux*) - symcode='[[BCDEGRST]]' - ;; -osf*) - symcode='[[BCDEGQRST]]' - ;; -solaris*) - symcode='[[BDRT]]' - ;; -sco3.2v5*) - symcode='[[DT]]' - ;; -sysv4.2uw2*) - symcode='[[DT]]' - ;; -sysv5* | sco5v6* | unixware* | OpenUNIX*) - symcode='[[ABDT]]' - ;; -sysv4) - symcode='[[DFNSTU]]' - ;; -esac - -# Handle CRLF in mingw tool chain -opt_cr= -case $build_os in -mingw*) - opt_cr=`echo 'x\{0,1\}' | tr x '\015'` # option cr in regexp - ;; -esac - -# If we're using GNU nm, then use its standard symbol codes. -case `$NM -V 2>&1` in -*GNU* | *'with BFD'*) - symcode='[[ABCDGIRSTW]]' ;; -esac - -# Try without a prefix undercore, then with it. -for ac_symprfx in "" "_"; do - - # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. - symxfrm="\\1 $ac_symprfx\\2 \\2" - - # Write the raw and C identifiers. - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - - # Check to see that the pipe works correctly. - pipe_works=no - - rm -f conftest* - cat > conftest.$ac_ext <<EOF -#ifdef __cplusplus -extern "C" { -#endif -char nm_test_var; -void nm_test_func(){} -#ifdef __cplusplus -} -#endif -int main(){nm_test_var='a';nm_test_func();return(0);} -EOF - - if AC_TRY_EVAL(ac_compile); then - # Now try to grab the symbols. - nlist=conftest.nm - if AC_TRY_EVAL(NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist) && test -s "$nlist"; then - # Try sorting and uniquifying the output. - if sort "$nlist" | uniq > "$nlist"T; then - mv -f "$nlist"T "$nlist" - else - rm -f "$nlist"T - fi - - # Make sure that we snagged all the symbols we need. - if grep ' nm_test_var$' "$nlist" >/dev/null; then - if grep ' nm_test_func$' "$nlist" >/dev/null; then - cat <<EOF > conftest.$ac_ext -#ifdef __cplusplus -extern "C" { -#endif - -EOF - # Now generate the symbol file. - eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | grep -v main >> conftest.$ac_ext' - - cat <<EOF >> conftest.$ac_ext -#if defined (__STDC__) && __STDC__ -# define lt_ptr_t void * -#else -# define lt_ptr_t char * -# define const -#endif - -/* The mapping between symbol names and symbols. */ -const struct { - const char *name; - lt_ptr_t address; -} -lt_preloaded_symbols[[]] = -{ -EOF - $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (lt_ptr_t) \&\2},/" < "$nlist" | grep -v main >> conftest.$ac_ext - cat <<\EOF >> conftest.$ac_ext - {0, (lt_ptr_t) 0} -}; - -#ifdef __cplusplus -} -#endif -EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext - lt_save_LIBS="$LIBS" - lt_save_CFLAGS="$CFLAGS" - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$_LT_AC_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" - if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then - pipe_works=yes - fi - LIBS="$lt_save_LIBS" - CFLAGS="$lt_save_CFLAGS" - else - echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD - fi - else - echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD - fi - else - echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD - fi - else - echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD - cat conftest.$ac_ext >&5 - fi - rm -rf conftest* conftst* - - # Do not use the global_symbol_pipe unless it works. - if test "$pipe_works" = yes; then - break - else - lt_cv_sys_global_symbol_pipe= - fi -done -]) -if test -z "$lt_cv_sys_global_symbol_pipe"; then - lt_cv_sys_global_symbol_to_cdecl= -fi -if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then - AC_MSG_RESULT(failed) -else - AC_MSG_RESULT(ok) -fi -]) # AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE - - -# AC_LIBTOOL_PROG_COMPILER_PIC([TAGNAME]) -# --------------------------------------- -AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC], -[_LT_AC_TAGVAR(lt_prog_compiler_wl, $1)= -_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)= -_LT_AC_TAGVAR(lt_prog_compiler_static, $1)= - -AC_MSG_CHECKING([for $compiler option to produce PIC]) - ifelse([$1],[CXX],[ - # C++ specific cases for pic, static, wl, etc. - if test "$GXX" = yes; then - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-static' - - case $host_os in - aix*) - # All AIX code is PIC. - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi - ;; - amigaos*) - # FIXME: we need at least 68020 code to build shared libraries, but - # adding the `-m68020' flag to GCC prevents building anything better, - # like `-m68040'. - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' - ;; - beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) - # PIC is the default for these OSes. - ;; - mingw* | cygwin* | os2* | pw32*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - m4_if([$1], [GCJ], [], - [_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) - ;; - darwin* | rhapsody*) - # PIC is the default on this platform - # Common symbols not allowed in MH_DYLIB files - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' - ;; - *djgpp*) - # DJGPP does not support shared libraries at all - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)= - ;; - interix[[3-9]]*) - # Interix 3.x gcc -fpic/-fPIC options generate broken code. - # Instead, we relocate shared libraries at runtime. - ;; - sysv4*MP*) - if test -d /usr/nec; then - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic - fi - ;; - hpux*) - # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but - # not for PA HP-UX. - case $host_cpu in - hppa*64*|ia64*) - ;; - *) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - esac - ;; - *) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - esac - else - case $host_os in - aix[[4-9]]*) - # All AIX code is PIC. - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - else - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' - fi - ;; - chorus*) - case $cc_basename in - cxch68*) - # Green Hills C++ Compiler - # _LT_AC_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" - ;; - esac - ;; - darwin*) - # PIC is the default on this platform - # Common symbols not allowed in MH_DYLIB files - case $cc_basename in - xlc*) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-qnocommon' - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - ;; - esac - ;; - dgux*) - case $cc_basename in - ec++*) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - ;; - ghcx*) - # Green Hills C++ Compiler - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-pic' - ;; - *) - ;; - esac - ;; - freebsd* | dragonfly*) - # FreeBSD uses GNU C++ - ;; - hpux9* | hpux10* | hpux11*) - case $cc_basename in - CC*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' - if test "$host_cpu" != ia64; then - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='+Z' - fi - ;; - aCC*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' - case $host_cpu in - hppa*64*|ia64*) - # +Z the default - ;; - *) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='+Z' - ;; - esac - ;; - *) - ;; - esac - ;; - interix*) - # This is c89, which is MS Visual C++ (no shared libs) - # Anyone wants to do a port? - ;; - irix5* | irix6* | nonstopux*) - case $cc_basename in - CC*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - # CC pic flag -KPIC is the default. - ;; - *) - ;; - esac - ;; - linux* | k*bsd*-gnu) - case $cc_basename in - KCC*) - # KAI C++ Compiler - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - icpc* | ecpc*) - # Intel C++ - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-static' - ;; - pgCC* | pgcpp*) - # Portland Group C++ compiler. - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - cxx*) - # Compaq C++ - # Make sure the PIC flag is empty. It appears that all Alpha - # Linux and Compaq Tru64 Unix objects are PIC. - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)= - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - ;; - *) - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) - # Sun C++ 5.9 - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' - ;; - esac - ;; - esac - ;; - lynxos*) - ;; - m88k*) - ;; - mvs*) - case $cc_basename in - cxx*) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall' - ;; - *) - ;; - esac - ;; - netbsd*) - ;; - osf3* | osf4* | osf5*) - case $cc_basename in - KCC*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' - ;; - RCC*) - # Rational C++ 2.4.1 - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-pic' - ;; - cxx*) - # Digital/Compaq C++ - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - # Make sure the PIC flag is empty. It appears that all Alpha - # Linux and Compaq Tru64 Unix objects are PIC. - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)= - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - ;; - *) - ;; - esac - ;; - psos*) - ;; - solaris*) - case $cc_basename in - CC*) - # Sun C++ 4.2, 5.x and Centerline C++ - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' - ;; - gcx*) - # Green Hills C++ Compiler - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' - ;; - *) - ;; - esac - ;; - sunos4*) - case $cc_basename in - CC*) - # Sun C++ 4.x - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-pic' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - lcc*) - # Lucid - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-pic' - ;; - *) - ;; - esac - ;; - tandem*) - case $cc_basename in - NCC*) - # NonStop-UX NCC 3.20 - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - ;; - *) - ;; - esac - ;; - sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) - case $cc_basename in - CC*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - esac - ;; - vxworks*) - ;; - *) - _LT_AC_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no - ;; - esac - fi -], -[ - if test "$GCC" = yes; then - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-static' - - case $host_os in - aix*) - # All AIX code is PIC. - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi - ;; - - amigaos*) - # FIXME: we need at least 68020 code to build shared libraries, but - # adding the `-m68020' flag to GCC prevents building anything better, - # like `-m68040'. - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' - ;; - - beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) - # PIC is the default for these OSes. - ;; - - mingw* | cygwin* | pw32* | os2*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - m4_if([$1], [GCJ], [], - [_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) - ;; - - darwin* | rhapsody*) - # PIC is the default on this platform - # Common symbols not allowed in MH_DYLIB files - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' - ;; - - interix[[3-9]]*) - # Interix 3.x gcc -fpic/-fPIC options generate broken code. - # Instead, we relocate shared libraries at runtime. - ;; - - msdosdjgpp*) - # Just because we use GCC doesn't mean we suddenly get shared libraries - # on systems that don't support them. - _LT_AC_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no - enable_shared=no - ;; - - sysv4*MP*) - if test -d /usr/nec; then - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic - fi - ;; - - hpux*) - # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but - # not for PA HP-UX. - case $host_cpu in - hppa*64*|ia64*) - # +Z the default - ;; - *) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - esac - ;; - - *) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' - ;; - esac - else - # PORTME Check for flag to pass linker flags through the system compiler. - case $host_os in - aix*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - else - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' - fi - ;; - darwin*) - # PIC is the default on this platform - # Common symbols not allowed in MH_DYLIB files - case $cc_basename in - xlc*) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-qnocommon' - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - ;; - esac - ;; - - mingw* | cygwin* | pw32* | os2*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - m4_if([$1], [GCJ], [], - [_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) - ;; - - hpux9* | hpux10* | hpux11*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but - # not for PA HP-UX. - case $host_cpu in - hppa*64*|ia64*) - # +Z the default - ;; - *) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='+Z' - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - # PIC (with -KPIC) is the default. - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - ;; - - newsos6) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - - linux* | k*bsd*-gnu) - case $cc_basename in - icc* | ecc*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-static' - ;; - pgcc* | pgf77* | pgf90* | pgf95*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - ccc*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - # All Alpha code is PIC. - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - ;; - *) - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) - # Sun C 5.9 - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - ;; - *Sun\ F*) - # Sun Fortran 8.3 passes all unrecognized flags to the linker - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='' - ;; - esac - ;; - esac - ;; - - osf3* | osf4* | osf5*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - # All OSF/1 code is PIC. - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - ;; - - rdos*) - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' - ;; - - solaris*) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - case $cc_basename in - f77* | f90* | f95*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';; - *) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';; - esac - ;; - - sunos4*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - - sysv4 | sysv4.2uw2* | sysv4.3*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - - sysv4*MP*) - if test -d /usr/nec ;then - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - fi - ;; - - sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - - unicos*) - _LT_AC_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' - _LT_AC_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no - ;; - - uts4*) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)='-pic' - _LT_AC_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - ;; - - *) - _LT_AC_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no - ;; - esac - fi -]) -AC_MSG_RESULT([$_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)]) - -# -# Check to make sure the PIC flag actually works. -# -if test -n "$_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)"; then - AC_LIBTOOL_COMPILER_OPTION([if $compiler PIC flag $_LT_AC_TAGVAR(lt_prog_compiler_pic, $1) works], - _LT_AC_TAGVAR(lt_cv_prog_compiler_pic_works, $1), - [$_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)ifelse([$1],[],[ -DPIC],[ifelse([$1],[CXX],[ -DPIC],[])])], [], - [case $_LT_AC_TAGVAR(lt_prog_compiler_pic, $1) in - "" | " "*) ;; - *) _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)" ;; - esac], - [_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)= - _LT_AC_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no]) -fi -case $host_os in - # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)= - ;; - *) - _LT_AC_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_AC_TAGVAR(lt_prog_compiler_pic, $1)ifelse([$1],[],[ -DPIC],[ifelse([$1],[CXX],[ -DPIC],[])])" - ;; -esac - -# -# Check to make sure the static flag actually works. -# -wl=$_LT_AC_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_AC_TAGVAR(lt_prog_compiler_static, $1)\" -AC_LIBTOOL_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works], - _LT_AC_TAGVAR(lt_cv_prog_compiler_static_works, $1), - $lt_tmp_static_flag, - [], - [_LT_AC_TAGVAR(lt_prog_compiler_static, $1)=]) -]) - - -# AC_LIBTOOL_PROG_LD_SHLIBS([TAGNAME]) -# ------------------------------------ -# See if the linker supports building shared libraries. -AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS], -[AC_REQUIRE([LT_AC_PROG_SED])dnl -AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) -ifelse([$1],[CXX],[ - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - case $host_os in - aix[[4-9]]*) - # If we're using GNU nm, then we don't want the "-C" option. - # -C means demangle to AIX nm, but means don't demangle with GNU nm - if $NM -V 2>&1 | grep 'GNU' > /dev/null; then - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\[$]2 == "T") || (\[$]2 == "D") || (\[$]2 == "B")) && ([substr](\[$]3,1,1) != ".")) { print \[$]3 } }'\'' | sort -u > $export_symbols' - else - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\[$]2 == "T") || (\[$]2 == "D") || (\[$]2 == "B")) && ([substr](\[$]3,1,1) != ".")) { print \[$]3 } }'\'' | sort -u > $export_symbols' - fi - ;; - pw32*) - _LT_AC_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" - ;; - cygwin* | mingw*) - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' - ;; - *) - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - ;; - esac - _LT_AC_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] -],[ - runpath_var= - _LT_AC_TAGVAR(allow_undefined_flag, $1)= - _LT_AC_TAGVAR(enable_shared_with_static_runtimes, $1)=no - _LT_AC_TAGVAR(archive_cmds, $1)= - _LT_AC_TAGVAR(archive_expsym_cmds, $1)= - _LT_AC_TAGVAR(old_archive_From_new_cmds, $1)= - _LT_AC_TAGVAR(old_archive_from_expsyms_cmds, $1)= - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)= - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)= - _LT_AC_TAGVAR(thread_safe_flag_spec, $1)= - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)= - _LT_AC_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)= - _LT_AC_TAGVAR(hardcode_direct, $1)=no - _LT_AC_TAGVAR(hardcode_minus_L, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=unsupported - _LT_AC_TAGVAR(link_all_deplibs, $1)=unknown - _LT_AC_TAGVAR(hardcode_automatic, $1)=no - _LT_AC_TAGVAR(module_cmds, $1)= - _LT_AC_TAGVAR(module_expsym_cmds, $1)= - _LT_AC_TAGVAR(always_export_symbols, $1)=no - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - # include_expsyms should be a list of space-separated symbols to be *always* - # included in the symbol list - _LT_AC_TAGVAR(include_expsyms, $1)= - # exclude_expsyms can be an extended regexp of symbols to exclude - # it will be wrapped by ` (' and `)$', so one must not match beginning or - # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', - # as well as any symbol that contains `d'. - _LT_AC_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if - # the symbol is explicitly referenced. Since portable code cannot - # rely on this symbol name, it's probably fine to never include it in - # preloaded symbol tables. - # Exclude shared library initialization/finalization symbols. -dnl Note also adjust exclude_expsyms for C++ above. - extract_expsyms_cmds= - # Just being paranoid about ensuring that cc_basename is set. - _LT_CC_BASENAME([$compiler]) - case $host_os in - cygwin* | mingw* | pw32*) - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; - interix*) - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; - openbsd*) - with_gnu_ld=no - ;; - esac - - _LT_AC_TAGVAR(ld_shlibs, $1)=yes - if test "$with_gnu_ld" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty - wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | grep 'no-whole-archive' > /dev/null; then - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)= - fi - supports_anon_versioning=no - case `$LD -v 2>/dev/null` in - *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... - *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... - *\ 2.11.*) ;; # other 2.11 versions - *) supports_anon_versioning=yes ;; - esac - - # See if GNU ld supports shared libraries. - case $host_os in - aix[[3-9]]*) - # On AIX/PPC, the GNU linker is very broken - if test "$host_cpu" != ia64; then - _LT_AC_TAGVAR(ld_shlibs, $1)=no - cat <<EOF 1>&2 - -*** Warning: the GNU linker, at least up to release 2.9.1, is reported -*** to be unable to reliably create shared libraries on AIX. -*** Therefore, libtool is disabling shared libraries support. If you -*** really care for shared libraries, you may want to modify your PATH -*** so that a non-GNU linker is found, and then restart. - -EOF - fi - ;; - - amigaos*) - _LT_AC_TAGVAR(archive_cmds, $1)='$rm $output_objdir/a2ixlibrary.data~$echo "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$echo "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$echo "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$echo "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - - # Samuel A. Falvo II <kc5tja@dolphin.openprojects.net> reports - # that the semantics of dynamic libraries on AmigaOS, at least up - # to version 4, is to share data among multiple programs linked - # with the same dynamic library. Since this doesn't match the - # behavior of shared libraries on other platforms, we can't use - # them. - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - - beos*) - if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then - _LT_AC_TAGVAR(allow_undefined_flag, $1)=unsupported - # Joseph Beckenbach <jrb3@best.com> says some releases of gcc - # support --undefined. This deserves some investigation. FIXME - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - - cygwin* | mingw* | pw32*) - # _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, - # as there is no search path for DLLs. - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_AC_TAGVAR(always_export_symbols, $1)=no - _LT_AC_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' - - if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - # If the export-symbols file already is a .def file (1st line - # is EXPORTS), use it as is; otherwise, prepend... - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - cp $export_symbols $output_objdir/$soname.def; - else - echo EXPORTS > $output_objdir/$soname.def; - cat $export_symbols >> $output_objdir/$soname.def; - fi~ - $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - - interix[[3-9]]*) - _LT_AC_TAGVAR(hardcode_direct, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | k*bsd*-gnu) - if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then - tmp_addflag= - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $echo \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95*) # Portland Group f77 and f90 compilers - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $echo \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; - efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 - tmp_addflag=' -i_dynamic -nofor_main' ;; - ifc* | ifort*) # Intel Fortran compiler - tmp_addflag=' -nofor_main' ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $echo \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - *) - tmp_sharedflag='-shared' ;; - esac - _LT_AC_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - - if test $supports_anon_versioning = yes; then - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - $echo "local: *; };" >> $output_objdir/$libname.ver~ - $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - - solaris*) - if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then - _LT_AC_TAGVAR(ld_shlibs, $1)=no - cat <<EOF 1>&2 - -*** Warning: The releases 2.8.* of the GNU linker cannot reliably -*** create shared libraries on Solaris systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.9.1 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -EOF - elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) - case `$LD -v 2>&1` in - *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*) - _LT_AC_TAGVAR(ld_shlibs, $1)=no - cat <<_LT_EOF 1>&2 - -*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not -*** reliably create shared libraries on SCO systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.16.91.0.3 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - ;; - *) - if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='`test -z "$SCOABSPATH" && echo ${wl}-rpath,$libdir`' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname,\${SCOABSPATH:+${install_libdir}/}$soname,-retain-symbols-file,$export_symbols -o $lib' - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - esac - ;; - - sunos4*) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' - wlarc= - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - *) - if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - esac - - if test "$_LT_AC_TAGVAR(ld_shlibs, $1)" = no; then - runpath_var= - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)= - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)= - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)= - fi - else - # PORTME fill in a description of your system's linker (not GNU ld) - case $host_os in - aix3*) - _LT_AC_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_AC_TAGVAR(always_export_symbols, $1)=yes - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - _LT_AC_TAGVAR(hardcode_direct, $1)=unsupported - fi - ;; - - aix[[4-9]]*) - if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' - no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. - # -C means demangle to AIX nm, but means don't demangle with GNU nm - if $NM -V 2>&1 | grep 'GNU' > /dev/null; then - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\[$]2 == "T") || (\[$]2 == "D") || (\[$]2 == "B")) && ([substr](\[$]3,1,1) != ".")) { print \[$]3 } }'\'' | sort -u > $export_symbols' - else - _LT_AC_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\[$]2 == "T") || (\[$]2 == "D") || (\[$]2 == "B")) && ([substr](\[$]3,1,1) != ".")) { print \[$]3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we - # need to do runtime linking. - case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) - for ld_flag in $LDFLAGS; do - if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done - ;; - esac - - exp_sym_flag='-bexport' - no_entry_flag='-bnoentry' - fi - - # When large executables or shared objects are built, AIX ld can - # have problems creating the table of contents. If linking a library - # or program results in "error TOC overflow" add -mminimal-toc to - # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not - # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. - - _LT_AC_TAGVAR(archive_cmds, $1)='' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - - if test "$GCC" = yes; then - case $host_os in aix4.[[012]]|aix4.[[012]].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ - collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && \ - strings "$collect2name" | grep resolve_lib_name >/dev/null - then - # We have reworked collect2 - : - else - # We have old collect2 - _LT_AC_TAGVAR(hardcode_direct, $1)=unsupported - # It fails to find uninstalled libraries when the uninstalled - # path is not listed in the libpath. Setting hardcode_minus_L - # to unsupported forces relinking - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)= - fi - ;; - esac - shared_flag='-shared' - if test "$aix_use_runtimelinking" = yes; then - shared_flag="$shared_flag "'${wl}-G' - fi - else - # not using gcc - if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else - if test "$aix_use_runtimelinking" = yes; then - shared_flag='${wl}-G' - else - shared_flag='${wl}-bM:SRE' - fi - fi - fi - - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - _LT_AC_TAGVAR(always_export_symbols, $1)=yes - if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - _LT_AC_TAGVAR(allow_undefined_flag, $1)='-berok' - # Determine the default libpath from the value encoded in an empty executable. - _LT_AC_SYS_LIBPATH_AIX - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="\$CC"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else - if test "$host_cpu" = ia64; then - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' - _LT_AC_TAGVAR(allow_undefined_flag, $1)="-z nodefs" - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an empty executable. - _LT_AC_SYS_LIBPATH_AIX - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. - _LT_AC_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' - # Exported symbols can be pulled into shared objects from archives - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='$convenience' - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=yes - # This is similar to how AIX traditionally builds its shared libraries. - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; - - amigaos*) - _LT_AC_TAGVAR(archive_cmds, $1)='$rm $output_objdir/a2ixlibrary.data~$echo "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$echo "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$echo "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$echo "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - # see comment about different semantics on the GNU ld section - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - - bsdi[[45]]*) - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic - ;; - - cygwin* | mingw* | pw32*) - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' - _LT_AC_TAGVAR(allow_undefined_flag, $1)=unsupported - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `echo "$deplibs" | $SED -e '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. - _LT_AC_TAGVAR(old_archive_From_new_cmds, $1)='true' - # FIXME: Should let the user specify the lib program. - _LT_AC_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' - _LT_AC_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`' - _LT_AC_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - ;; - - darwin* | rhapsody*) - _LT_AC_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined" - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_AC_TAGVAR(hardcode_direct, $1)=no - _LT_AC_TAGVAR(hardcode_automatic, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=unsupported - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='' - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - if test "$GCC" = yes ; then - output_verbose_link_cmd='echo' - _LT_AC_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" - _LT_AC_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" - _LT_AC_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" - _LT_AC_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - else - case $cc_basename in - xlc*) - output_verbose_link_cmd='echo' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-install_name ${wl}`echo $rpath/$soname` $xlcverstring' - _LT_AC_TAGVAR(module_cmds, $1)='$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags' - # Don't fix this by using the ld -exported_symbols_list flag, it doesn't exist in older darwin lds - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC -qmkshrobj $allow_undefined_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-install_name ${wl}$rpath/$soname $xlcverstring~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' - _LT_AC_TAGVAR(module_expsym_cmds, $1)='sed -e "s,#.*,," -e "s,^[ ]*,," -e "s,^\(..*\),_&," < $export_symbols > $output_objdir/${libname}-symbols.expsym~$CC $allow_undefined_flag -o $lib -bundle $libobjs $deplibs$compiler_flags~nmedit -s $output_objdir/${libname}-symbols.expsym ${lib}' - ;; - *) - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - esac - fi - ;; - - dgux*) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - freebsd1*) - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - - # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor - # support. Future versions do this automatically, but an explicit c++rt0.o - # does not break anything, and helps significantly (at the cost of a little - # extra space). - freebsd2.2*) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - # Unfortunately, older versions of FreeBSD 2 do not have this feature. - freebsd2*) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - hpux9*) - if test "$GCC" = yes; then - _LT_AC_TAGVAR(archive_cmds, $1)='$rm $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - _LT_AC_TAGVAR(archive_cmds, $1)='$rm $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - ;; - - hpux10*) - if test "$GCC" = yes -a "$with_gnu_ld" = no; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi - if test "$with_gnu_ld" = no; then - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - fi - ;; - - hpux11*) - if test "$GCC" = yes -a "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - fi - if test "$with_gnu_ld" = no; then - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - - case $host_cpu in - hppa*64*|ia64*) - _LT_AC_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='+b $libdir' - _LT_AC_TAGVAR(hardcode_direct, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - *) - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - ;; - esac - fi - ;; - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir' - fi - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | grep __ELF__ >/dev/null; then - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out - else - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF - fi - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - newsos6) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - openbsd*) - if test -f /usr/libexec/ld.so; then - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' - else - case $host_os in - openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - ;; - *) - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' - ;; - esac - fi - else - _LT_AC_TAGVAR(ld_shlibs, $1)=no - fi - ;; - - os2*) - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - _LT_AC_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_AC_TAGVAR(archive_cmds, $1)='$echo "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$echo "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$echo DATA >> $output_objdir/$libname.def~$echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~$echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' - _LT_AC_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) - if test "$GCC" = yes; then - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -shared${allow_undefined_flag} $libobjs $deplibs $linker_flags -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' - fi - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && echo ${wl}-set_version ${wl}$verstring` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - else - _LT_AC_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -shared${allow_undefined_flag} $libobjs $deplibs $linker_flags -msym -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; echo "-hidden">> $lib.exp~ - $LD -shared${allow_undefined_flag} -input $lib.exp $linker_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && echo -set_version $verstring` -update_registry ${output_objdir}/so_locations -o $lib~$rm $lib.exp' - - # Both c and cxx compiler support -rpath directly - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' - fi - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=: - ;; - - solaris*) - _LT_AC_TAGVAR(no_undefined_flag, $1)=' -z text' - if test "$GCC" = yes; then - wlarc='${wl}' - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ - $CC -shared ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$rm $lib.exp' - else - wlarc='' - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~$echo "local: *; };" >> $lib.exp~ - $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$rm $lib.exp' - fi - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - case $host_os in - solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; - *) - # The compiler driver will combine and reorder linker options, - # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) - if test "$GCC" = yes; then - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - _LT_AC_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' - fi - ;; - esac - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - ;; - - sunos4*) - if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes - _LT_AC_TAGVAR(hardcode_minus_L, $1)=yes - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - sysv4) - case $host_vendor in - sni) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_direct, $1)=yes # is this really true??? - ;; - siemens) - ## LD is ld it makes a PLAMLIB - ## CC just makes a GrossModule. - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs' - _LT_AC_TAGVAR(hardcode_direct, $1)=no - ;; - motorola) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie - ;; - esac - runpath_var='LD_RUN_PATH' - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - sysv4.3*) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - runpath_var=LD_RUN_PATH - hardcode_runpath_var=yes - _LT_AC_TAGVAR(ld_shlibs, $1)=yes - fi - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) - _LT_AC_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) - # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. - _LT_AC_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' - _LT_AC_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`' - _LT_AC_TAGVAR(hardcode_libdir_separator, $1)=':' - _LT_AC_TAGVAR(link_all_deplibs, $1)=yes - _LT_AC_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_AC_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_AC_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,\${SCOABSPATH:+${install_libdir}/}$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - uts4*) - _LT_AC_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - _LT_AC_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_AC_TAGVAR(hardcode_shlibpath_var, $1)=no - ;; - - *) - _LT_AC_TAGVAR(ld_shlibs, $1)=no - ;; - esac - fi -]) -AC_MSG_RESULT([$_LT_AC_TAGVAR(ld_shlibs, $1)]) -test "$_LT_AC_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no - -# -# Do we need to explicitly link libc? -# -case "x$_LT_AC_TAGVAR(archive_cmds_need_lc, $1)" in -x|xyes) - # Assume -lc should be added - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=yes - - if test "$enable_shared" = yes && test "$GCC" = yes; then - case $_LT_AC_TAGVAR(archive_cmds, $1) in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. - ;; - '$CC '*) - # Test whether the compiler implicitly links with -lc since on some - # systems, -lgcc has to come before -lc. If gcc already passes -lc - # to ld, don't add -lc before -lgcc. - AC_MSG_CHECKING([whether -lc should be explicitly linked in]) - $rm conftest* - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - if AC_TRY_EVAL(ac_compile) 2>conftest.err; then - soname=conftest - lib=conftest - libobjs=conftest.$ac_objext - deplibs= - wl=$_LT_AC_TAGVAR(lt_prog_compiler_wl, $1) - pic_flag=$_LT_AC_TAGVAR(lt_prog_compiler_pic, $1) - compiler_flags=-v - linker_flags=-v - verstring= - output_objdir=. - libname=conftest - lt_save_allow_undefined_flag=$_LT_AC_TAGVAR(allow_undefined_flag, $1) - _LT_AC_TAGVAR(allow_undefined_flag, $1)= - if AC_TRY_EVAL(_LT_AC_TAGVAR(archive_cmds, $1) 2\>\&1 \| grep \" -lc \" \>/dev/null 2\>\&1) - then - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=no - else - _LT_AC_TAGVAR(archive_cmds_need_lc, $1)=yes - fi - _LT_AC_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag - else - cat conftest.err 1>&5 - fi - $rm conftest* - AC_MSG_RESULT([$_LT_AC_TAGVAR(archive_cmds_need_lc, $1)]) - ;; - esac - fi - ;; -esac -])# AC_LIBTOOL_PROG_LD_SHLIBS - - -# _LT_AC_FILE_LTDLL_C -# ------------------- -# Be careful that the start marker always follows a newline. -AC_DEFUN([_LT_AC_FILE_LTDLL_C], [ -# /* ltdll.c starts here */ -# #define WIN32_LEAN_AND_MEAN -# #include <windows.h> -# #undef WIN32_LEAN_AND_MEAN -# #include <stdio.h> -# -# #ifndef __CYGWIN__ -# # ifdef __CYGWIN32__ -# # define __CYGWIN__ __CYGWIN32__ -# # endif -# #endif -# -# #ifdef __cplusplus -# extern "C" { -# #endif -# BOOL APIENTRY DllMain (HINSTANCE hInst, DWORD reason, LPVOID reserved); -# #ifdef __cplusplus -# } -# #endif -# -# #ifdef __CYGWIN__ -# #include <cygwin/cygwin_dll.h> -# DECLARE_CYGWIN_DLL( DllMain ); -# #endif -# HINSTANCE __hDllInstance_base; -# -# BOOL APIENTRY -# DllMain (HINSTANCE hInst, DWORD reason, LPVOID reserved) -# { -# __hDllInstance_base = hInst; -# return TRUE; -# } -# /* ltdll.c ends here */ -])# _LT_AC_FILE_LTDLL_C - - -# _LT_AC_TAGVAR(VARNAME, [TAGNAME]) -# --------------------------------- -AC_DEFUN([_LT_AC_TAGVAR], [ifelse([$2], [], [$1], [$1_$2])]) - - -# old names -AC_DEFUN([AM_PROG_LIBTOOL], [AC_PROG_LIBTOOL]) -AC_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)]) -AC_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)]) -AC_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)]) -AC_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)]) -AC_DEFUN([AM_PROG_LD], [AC_PROG_LD]) -AC_DEFUN([AM_PROG_NM], [AC_PROG_NM]) - -# This is just to silence aclocal about the macro not being used -ifelse([AC_DISABLE_FAST_INSTALL]) - -AC_DEFUN([LT_AC_PROG_GCJ], -[AC_CHECK_TOOL(GCJ, gcj, no) - test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2" - AC_SUBST(GCJFLAGS) -]) - -AC_DEFUN([LT_AC_PROG_RC], -[AC_CHECK_TOOL(RC, windres, no) -]) - - -# Cheap backport of AS_EXECUTABLE_P and required macros -# from Autoconf 2.59; we should not use $as_executable_p directly. - -# _AS_TEST_PREPARE -# ---------------- -m4_ifndef([_AS_TEST_PREPARE], -[m4_defun([_AS_TEST_PREPARE], -[if test -x / >/dev/null 2>&1; then - as_executable_p='test -x' -else - as_executable_p='test -f' -fi -])])# _AS_TEST_PREPARE - -# AS_EXECUTABLE_P -# --------------- -# Check whether a file is executable. -m4_ifndef([AS_EXECUTABLE_P], -[m4_defun([AS_EXECUTABLE_P], -[AS_REQUIRE([_AS_TEST_PREPARE])dnl -$as_executable_p $1[]dnl -])])# AS_EXECUTABLE_P - -# NOTE: This macro has been submitted for inclusion into # -# GNU Autoconf as AC_PROG_SED. When it is available in # -# a released version of Autoconf we should remove this # -# macro and use it instead. # -# LT_AC_PROG_SED -# -------------- -# Check for a fully-functional sed program, that truncates -# as few characters as possible. Prefer GNU sed if found. -AC_DEFUN([LT_AC_PROG_SED], -[AC_MSG_CHECKING([for a sed that does not truncate output]) -AC_CACHE_VAL(lt_cv_path_SED, -[# Loop through the user's path and test for sed and gsed. -# Then use that list of sed's as ones to test for truncation. -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for lt_ac_prog in sed gsed; do - for ac_exec_ext in '' $ac_executable_extensions; do - if AS_EXECUTABLE_P(["$as_dir/$lt_ac_prog$ac_exec_ext"]); then - lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext" - fi - done - done -done -IFS=$as_save_IFS -lt_ac_max=0 -lt_ac_count=0 -# Add /usr/xpg4/bin/sed as it is typically found on Solaris -# along with /bin/sed that truncates output. -for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do - test ! -f $lt_ac_sed && continue - cat /dev/null > conftest.in - lt_ac_count=0 - echo $ECHO_N "0123456789$ECHO_C" >conftest.in - # Check for GNU sed and select it if it is found. - if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then - lt_cv_path_SED=$lt_ac_sed - break - fi - while true; do - cat conftest.in conftest.in >conftest.tmp - mv conftest.tmp conftest.in - cp conftest.in conftest.nl - echo >>conftest.nl - $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break - cmp -s conftest.out conftest.nl || break - # 10000 chars as input seems more than enough - test $lt_ac_count -gt 10 && break - lt_ac_count=`expr $lt_ac_count + 1` - if test $lt_ac_count -gt $lt_ac_max; then - lt_ac_max=$lt_ac_count - lt_cv_path_SED=$lt_ac_sed - fi - done -done -]) -SED=$lt_cv_path_SED -AC_SUBST([SED]) -AC_MSG_RESULT([$SED]) -]) - -# Copyright (C) 2002, 2003, 2005, 2006, 2007 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_AUTOMAKE_VERSION(VERSION) -# ---------------------------- -# Automake X.Y traces this macro to ensure aclocal.m4 has been -# generated from the m4 files accompanying Automake X.Y. -# (This private macro should not be called outside this file.) -AC_DEFUN([AM_AUTOMAKE_VERSION], -[am__api_version='1.10' -dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to -dnl require some minimum version. Point them to the right macro. -m4_if([$1], [1.10.1], [], - [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl -]) - -# _AM_AUTOCONF_VERSION(VERSION) -# ----------------------------- -# aclocal traces this macro to find the Autoconf version. -# This is a private macro too. Using m4_define simplifies -# the logic in aclocal, which can simply ignore this definition. -m4_define([_AM_AUTOCONF_VERSION], []) - -# AM_SET_CURRENT_AUTOMAKE_VERSION -# ------------------------------- -# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. -# This function is AC_REQUIREd by AC_INIT_AUTOMAKE. -AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], -[AM_AUTOMAKE_VERSION([1.10.1])dnl -m4_ifndef([AC_AUTOCONF_VERSION], - [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl -_AM_AUTOCONF_VERSION(AC_AUTOCONF_VERSION)]) - -# AM_AUX_DIR_EXPAND -*- Autoconf -*- - -# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets -# $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to -# `$srcdir', `$srcdir/..', or `$srcdir/../..'. -# -# Of course, Automake must honor this variable whenever it calls a -# tool from the auxiliary directory. The problem is that $srcdir (and -# therefore $ac_aux_dir as well) can be either absolute or relative, -# depending on how configure is run. This is pretty annoying, since -# it makes $ac_aux_dir quite unusable in subdirectories: in the top -# source directory, any form will work fine, but in subdirectories a -# relative path needs to be adjusted first. -# -# $ac_aux_dir/missing -# fails when called from a subdirectory if $ac_aux_dir is relative -# $top_srcdir/$ac_aux_dir/missing -# fails if $ac_aux_dir is absolute, -# fails when called from a subdirectory in a VPATH build with -# a relative $ac_aux_dir -# -# The reason of the latter failure is that $top_srcdir and $ac_aux_dir -# are both prefixed by $srcdir. In an in-source build this is usually -# harmless because $srcdir is `.', but things will broke when you -# start a VPATH build or use an absolute $srcdir. -# -# So we could use something similar to $top_srcdir/$ac_aux_dir/missing, -# iff we strip the leading $srcdir from $ac_aux_dir. That would be: -# am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` -# and then we would define $MISSING as -# MISSING="\${SHELL} $am_aux_dir/missing" -# This will work as long as MISSING is not called from configure, because -# unfortunately $(top_srcdir) has no meaning in configure. -# However there are other variables, like CC, which are often used in -# configure, and could therefore not use this "fixed" $ac_aux_dir. -# -# Another solution, used here, is to always expand $ac_aux_dir to an -# absolute PATH. The drawback is that using absolute paths prevent a -# configured tree to be moved without reconfiguration. - -AC_DEFUN([AM_AUX_DIR_EXPAND], -[dnl Rely on autoconf to set up CDPATH properly. -AC_PREREQ([2.50])dnl -# expand $ac_aux_dir to an absolute path -am_aux_dir=`cd $ac_aux_dir && pwd` -]) - -# AM_CONDITIONAL -*- Autoconf -*- - -# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006 -# Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 8 - -# AM_CONDITIONAL(NAME, SHELL-CONDITION) -# ------------------------------------- -# Define a conditional. -AC_DEFUN([AM_CONDITIONAL], -[AC_PREREQ(2.52)dnl - ifelse([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], - [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl -AC_SUBST([$1_TRUE])dnl -AC_SUBST([$1_FALSE])dnl -_AM_SUBST_NOTMAKE([$1_TRUE])dnl -_AM_SUBST_NOTMAKE([$1_FALSE])dnl -if $2; then - $1_TRUE= - $1_FALSE='#' -else - $1_TRUE='#' - $1_FALSE= -fi -AC_CONFIG_COMMANDS_PRE( -[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then - AC_MSG_ERROR([[conditional "$1" was never defined. -Usually this means the macro was only invoked conditionally.]]) -fi])]) - -# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 -# Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 9 - -# There are a few dirty hacks below to avoid letting `AC_PROG_CC' be -# written in clear, in which case automake, when reading aclocal.m4, -# will think it sees a *use*, and therefore will trigger all it's -# C support machinery. Also note that it means that autoscan, seeing -# CC etc. in the Makefile, will ask for an AC_PROG_CC use... - - -# _AM_DEPENDENCIES(NAME) -# ---------------------- -# See how the compiler implements dependency checking. -# NAME is "CC", "CXX", "GCJ", or "OBJC". -# We try a few techniques and use that to set a single cache variable. -# -# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was -# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular -# dependency, and given that the user is not expected to run this macro, -# just rely on AC_PROG_CC. -AC_DEFUN([_AM_DEPENDENCIES], -[AC_REQUIRE([AM_SET_DEPDIR])dnl -AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl -AC_REQUIRE([AM_MAKE_INCLUDE])dnl -AC_REQUIRE([AM_DEP_TRACK])dnl - -ifelse([$1], CC, [depcc="$CC" am_compiler_list=], - [$1], CXX, [depcc="$CXX" am_compiler_list=], - [$1], OBJC, [depcc="$OBJC" am_compiler_list='gcc3 gcc'], - [$1], UPC, [depcc="$UPC" am_compiler_list=], - [$1], GCJ, [depcc="$GCJ" am_compiler_list='gcc3 gcc'], - [depcc="$$1" am_compiler_list=]) - -AC_CACHE_CHECK([dependency style of $depcc], - [am_cv_$1_dependencies_compiler_type], -[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then - # We make a subdir and do the tests there. Otherwise we can end up - # making bogus files that we don't know about and never remove. For - # instance it was reported that on HP-UX the gcc test will end up - # making a dummy file named `D' -- because `-MD' means `put the output - # in D'. - mkdir conftest.dir - # Copy depcomp to subdir because otherwise we won't find it if we're - # using a relative directory. - cp "$am_depcomp" conftest.dir - cd conftest.dir - # We will build objects and dependencies in a subdirectory because - # it helps to detect inapplicable dependency modes. For instance - # both Tru64's cc and ICC support -MD to output dependencies as a - # side effect of compilation, but ICC will put the dependencies in - # the current directory while Tru64 will put them in the object - # directory. - mkdir sub - - am_cv_$1_dependencies_compiler_type=none - if test "$am_compiler_list" = ""; then - am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` - fi - for depmode in $am_compiler_list; do - # Setup a source with many dependencies, because some compilers - # like to wrap large dependency lists on column 80 (with \), and - # we should not choose a depcomp mode which is confused by this. - # - # We need to recreate these files for each test, as the compiler may - # overwrite some of them when testing with obscure command lines. - # This happens at least with the AIX C compiler. - : > sub/conftest.c - for i in 1 2 3 4 5 6; do - echo '#include "conftst'$i'.h"' >> sub/conftest.c - # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with - # Solaris 8's {/usr,}/bin/sh. - touch sub/conftst$i.h - done - echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf - - case $depmode in - nosideeffect) - # after this tag, mechanisms are not by side-effect, so they'll - # only be used when explicitly requested - if test "x$enable_dependency_tracking" = xyes; then - continue - else - break - fi - ;; - none) break ;; - esac - # We check with `-c' and `-o' for the sake of the "dashmstdout" - # mode. It turns out that the SunPro C++ compiler does not properly - # handle `-M -o', and we need to detect this. - if depmode=$depmode \ - source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \ - depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ - $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \ - >/dev/null 2>conftest.err && - grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 && - ${MAKE-make} -s -f confmf > /dev/null 2>&1; then - # icc doesn't choke on unknown options, it will just issue warnings - # or remarks (even with -Werror). So we grep stderr for any message - # that says an option was ignored or not supported. - # When given -MP, icc 7.0 and 7.1 complain thusly: - # icc: Command line warning: ignoring option '-M'; no argument required - # The diagnosis changed in icc 8.0: - # icc: Command line remark: option '-MP' not supported - if (grep 'ignoring option' conftest.err || - grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else - am_cv_$1_dependencies_compiler_type=$depmode - break - fi - fi - done - - cd .. - rm -rf conftest.dir -else - am_cv_$1_dependencies_compiler_type=none -fi -]) -AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) -AM_CONDITIONAL([am__fastdep$1], [ - test "x$enable_dependency_tracking" != xno \ - && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) -]) - - -# AM_SET_DEPDIR -# ------------- -# Choose a directory name for dependency files. -# This macro is AC_REQUIREd in _AM_DEPENDENCIES -AC_DEFUN([AM_SET_DEPDIR], -[AC_REQUIRE([AM_SET_LEADING_DOT])dnl -AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl -]) - - -# AM_DEP_TRACK -# ------------ -AC_DEFUN([AM_DEP_TRACK], -[AC_ARG_ENABLE(dependency-tracking, -[ --disable-dependency-tracking speeds up one-time build - --enable-dependency-tracking do not reject slow dependency extractors]) -if test "x$enable_dependency_tracking" != xno; then - am_depcomp="$ac_aux_dir/depcomp" - AMDEPBACKSLASH='\' -fi -AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) -AC_SUBST([AMDEPBACKSLASH])dnl -_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl -]) - -# Generate code to set up dependency tracking. -*- Autoconf -*- - -# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005 -# Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -#serial 3 - -# _AM_OUTPUT_DEPENDENCY_COMMANDS -# ------------------------------ -AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], -[for mf in $CONFIG_FILES; do - # Strip MF so we end up with the name of the file. - mf=`echo "$mf" | sed -e 's/:.*$//'` - # Check whether this is an Automake generated Makefile or not. - # We used to match only the files named `Makefile.in', but - # some people rename them; so instead we look at the file content. - # Grep'ing the first line is not enough: some people post-process - # each Makefile.in and add a new line on top of each file to say so. - # Grep'ing the whole file is not good either: AIX grep has a line - # limit of 2048, but all sed's we know have understand at least 4000. - if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then - dirpart=`AS_DIRNAME("$mf")` - else - continue - fi - # Extract the definition of DEPDIR, am__include, and am__quote - # from the Makefile without running `make'. - DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` - test -z "$DEPDIR" && continue - am__include=`sed -n 's/^am__include = //p' < "$mf"` - test -z "am__include" && continue - am__quote=`sed -n 's/^am__quote = //p' < "$mf"` - # When using ansi2knr, U may be empty or an underscore; expand it - U=`sed -n 's/^U = //p' < "$mf"` - # Find all dependency output files, they are included files with - # $(DEPDIR) in their names. We invoke sed twice because it is the - # simplest approach to changing $(DEPDIR) to its actual value in the - # expansion. - for file in `sed -n " - s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ - sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do - # Make sure the directory exists. - test -f "$dirpart/$file" && continue - fdir=`AS_DIRNAME(["$file"])` - AS_MKDIR_P([$dirpart/$fdir]) - # echo "creating $dirpart/$file" - echo '# dummy' > "$dirpart/$file" - done -done -])# _AM_OUTPUT_DEPENDENCY_COMMANDS - - -# AM_OUTPUT_DEPENDENCY_COMMANDS -# ----------------------------- -# This macro should only be invoked once -- use via AC_REQUIRE. -# -# This code is only required when automatic dependency tracking -# is enabled. FIXME. This creates each `.P' file that we will -# need in order to bootstrap the dependency handling code. -AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], -[AC_CONFIG_COMMANDS([depfiles], - [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], - [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) -]) - -# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005 -# Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 8 - -# AM_CONFIG_HEADER is obsolete. It has been replaced by AC_CONFIG_HEADERS. -AU_DEFUN([AM_CONFIG_HEADER], [AC_CONFIG_HEADERS($@)]) - -# Do all the work for Automake. -*- Autoconf -*- - -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, -# 2005, 2006, 2008 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 13 - -# This macro actually does too much. Some checks are only needed if -# your package does certain things. But this isn't really a big deal. - -# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) -# AM_INIT_AUTOMAKE([OPTIONS]) -# ----------------------------------------------- -# The call with PACKAGE and VERSION arguments is the old style -# call (pre autoconf-2.50), which is being phased out. PACKAGE -# and VERSION should now be passed to AC_INIT and removed from -# the call to AM_INIT_AUTOMAKE. -# We support both call styles for the transition. After -# the next Automake release, Autoconf can make the AC_INIT -# arguments mandatory, and then we can depend on a new Autoconf -# release and drop the old call support. -AC_DEFUN([AM_INIT_AUTOMAKE], -[AC_PREREQ([2.60])dnl -dnl Autoconf wants to disallow AM_ names. We explicitly allow -dnl the ones we care about. -m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl -AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl -AC_REQUIRE([AC_PROG_INSTALL])dnl -if test "`cd $srcdir && pwd`" != "`pwd`"; then - # Use -I$(srcdir) only when $(srcdir) != ., so that make's output - # is not polluted with repeated "-I." - AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl - # test to see if srcdir already configured - if test -f $srcdir/config.status; then - AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) - fi -fi - -# test whether we have cygpath -if test -z "$CYGPATH_W"; then - if (cygpath --version) >/dev/null 2>/dev/null; then - CYGPATH_W='cygpath -w' - else - CYGPATH_W=echo - fi -fi -AC_SUBST([CYGPATH_W]) - -# Define the identity of the package. -dnl Distinguish between old-style and new-style calls. -m4_ifval([$2], -[m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl - AC_SUBST([PACKAGE], [$1])dnl - AC_SUBST([VERSION], [$2])], -[_AM_SET_OPTIONS([$1])dnl -dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. -m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,, - [m4_fatal([AC_INIT should be called with package and version arguments])])dnl - AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl - AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl - -_AM_IF_OPTION([no-define],, -[AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package]) - AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl - -# Some tools Automake needs. -AC_REQUIRE([AM_SANITY_CHECK])dnl -AC_REQUIRE([AC_ARG_PROGRAM])dnl -AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version}) -AM_MISSING_PROG(AUTOCONF, autoconf) -AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version}) -AM_MISSING_PROG(AUTOHEADER, autoheader) -AM_MISSING_PROG(MAKEINFO, makeinfo) -AM_PROG_INSTALL_SH -AM_PROG_INSTALL_STRIP -AC_REQUIRE([AM_PROG_MKDIR_P])dnl -# We need awk for the "check" target. The system "awk" is bad on -# some platforms. -AC_REQUIRE([AC_PROG_AWK])dnl -AC_REQUIRE([AC_PROG_MAKE_SET])dnl -AC_REQUIRE([AM_SET_LEADING_DOT])dnl -_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], - [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], - [_AM_PROG_TAR([v7])])]) -_AM_IF_OPTION([no-dependencies],, -[AC_PROVIDE_IFELSE([AC_PROG_CC], - [_AM_DEPENDENCIES(CC)], - [define([AC_PROG_CC], - defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl -AC_PROVIDE_IFELSE([AC_PROG_CXX], - [_AM_DEPENDENCIES(CXX)], - [define([AC_PROG_CXX], - defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl -AC_PROVIDE_IFELSE([AC_PROG_OBJC], - [_AM_DEPENDENCIES(OBJC)], - [define([AC_PROG_OBJC], - defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl -]) -]) - - -# When config.status generates a header, we must update the stamp-h file. -# This file resides in the same directory as the config header -# that is generated. The stamp files are numbered to have different names. - -# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the -# loop where config.status creates the headers, so we can generate -# our stamp files there. -AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], -[# Compute $1's index in $config_headers. -_am_arg=$1 -_am_stamp_count=1 -for _am_header in $config_headers :; do - case $_am_header in - $_am_arg | $_am_arg:* ) - break ;; - * ) - _am_stamp_count=`expr $_am_stamp_count + 1` ;; - esac -done -echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) - -# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_PROG_INSTALL_SH -# ------------------ -# Define $install_sh. -AC_DEFUN([AM_PROG_INSTALL_SH], -[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -install_sh=${install_sh-"\$(SHELL) $am_aux_dir/install-sh"} -AC_SUBST(install_sh)]) - -# Copyright (C) 2003, 2005 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 2 - -# Check whether the underlying file-system supports filenames -# with a leading dot. For instance MS-DOS doesn't. -AC_DEFUN([AM_SET_LEADING_DOT], -[rm -rf .tst 2>/dev/null -mkdir .tst 2>/dev/null -if test -d .tst; then - am__leading_dot=. -else - am__leading_dot=_ -fi -rmdir .tst 2>/dev/null -AC_SUBST([am__leading_dot])]) - -# Check to see how 'make' treats includes. -*- Autoconf -*- - -# Copyright (C) 2001, 2002, 2003, 2005 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 3 - -# AM_MAKE_INCLUDE() -# ----------------- -# Check to see how make treats includes. -AC_DEFUN([AM_MAKE_INCLUDE], -[am_make=${MAKE-make} -cat > confinc << 'END' -am__doit: - @echo done -.PHONY: am__doit -END -# If we don't find an include directive, just comment out the code. -AC_MSG_CHECKING([for style of include used by $am_make]) -am__include="#" -am__quote= -_am_result=none -# First try GNU make style include. -echo "include confinc" > confmf -# We grep out `Entering directory' and `Leaving directory' -# messages which can occur if `w' ends up in MAKEFLAGS. -# In particular we don't look at `^make:' because GNU make might -# be invoked under some other name (usually "gmake"), in which -# case it prints its new name instead of `make'. -if test "`$am_make -s -f confmf 2> /dev/null | grep -v 'ing directory'`" = "done"; then - am__include=include - am__quote= - _am_result=GNU -fi -# Now try BSD make style include. -if test "$am__include" = "#"; then - echo '.include "confinc"' > confmf - if test "`$am_make -s -f confmf 2> /dev/null`" = "done"; then - am__include=.include - am__quote="\"" - _am_result=BSD - fi -fi -AC_SUBST([am__include]) -AC_SUBST([am__quote]) -AC_MSG_RESULT([$_am_result]) -rm -f confinc confmf -]) - -# Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- - -# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005 -# Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 5 - -# AM_MISSING_PROG(NAME, PROGRAM) -# ------------------------------ -AC_DEFUN([AM_MISSING_PROG], -[AC_REQUIRE([AM_MISSING_HAS_RUN]) -$1=${$1-"${am_missing_run}$2"} -AC_SUBST($1)]) - - -# AM_MISSING_HAS_RUN -# ------------------ -# Define MISSING if not defined so far and test if it supports --run. -# If it does, set am_missing_run to use it, otherwise, to nothing. -AC_DEFUN([AM_MISSING_HAS_RUN], -[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -AC_REQUIRE_AUX_FILE([missing])dnl -test x"${MISSING+set}" = xset || MISSING="\${SHELL} $am_aux_dir/missing" -# Use eval to expand $SHELL -if eval "$MISSING --run true"; then - am_missing_run="$MISSING --run " -else - am_missing_run= - AC_MSG_WARN([`missing' script is too old or missing]) -fi -]) - -# Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_PROG_MKDIR_P -# --------------- -# Check for `mkdir -p'. -AC_DEFUN([AM_PROG_MKDIR_P], -[AC_PREREQ([2.60])dnl -AC_REQUIRE([AC_PROG_MKDIR_P])dnl -dnl Automake 1.8 to 1.9.6 used to define mkdir_p. We now use MKDIR_P, -dnl while keeping a definition of mkdir_p for backward compatibility. -dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile. -dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of -dnl Makefile.ins that do not define MKDIR_P, so we do our own -dnl adjustment using top_builddir (which is defined more often than -dnl MKDIR_P). -AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl -case $mkdir_p in - [[\\/$]]* | ?:[[\\/]]*) ;; - */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; -esac -]) - -# Helper functions for option handling. -*- Autoconf -*- - -# Copyright (C) 2001, 2002, 2003, 2005 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 3 - -# _AM_MANGLE_OPTION(NAME) -# ----------------------- -AC_DEFUN([_AM_MANGLE_OPTION], -[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) - -# _AM_SET_OPTION(NAME) -# ------------------------------ -# Set option NAME. Presently that only means defining a flag for this option. -AC_DEFUN([_AM_SET_OPTION], -[m4_define(_AM_MANGLE_OPTION([$1]), 1)]) - -# _AM_SET_OPTIONS(OPTIONS) -# ---------------------------------- -# OPTIONS is a space-separated list of Automake options. -AC_DEFUN([_AM_SET_OPTIONS], -[AC_FOREACH([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) - -# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) -# ------------------------------------------- -# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. -AC_DEFUN([_AM_IF_OPTION], -[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) - -# Check to make sure that the build environment is sane. -*- Autoconf -*- - -# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005 -# Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 4 - -# AM_SANITY_CHECK -# --------------- -AC_DEFUN([AM_SANITY_CHECK], -[AC_MSG_CHECKING([whether build environment is sane]) -# Just in case -sleep 1 -echo timestamp > conftest.file -# Do `set' in a subshell so we don't clobber the current shell's -# arguments. Must try -L first in case configure is actually a -# symlink; some systems play weird games with the mod time of symlinks -# (eg FreeBSD returns the mod time of the symlink's containing -# directory). -if ( - set X `ls -Lt $srcdir/configure conftest.file 2> /dev/null` - if test "$[*]" = "X"; then - # -L didn't work. - set X `ls -t $srcdir/configure conftest.file` - fi - rm -f conftest.file - if test "$[*]" != "X $srcdir/configure conftest.file" \ - && test "$[*]" != "X conftest.file $srcdir/configure"; then - - # If neither matched, then we have a broken ls. This can happen - # if, for instance, CONFIG_SHELL is bash and it inherits a - # broken ls alias from the environment. This has actually - # happened. Such a system could not be considered "sane". - AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken -alias in your environment]) - fi - - test "$[2]" = conftest.file - ) -then - # Ok. - : -else - AC_MSG_ERROR([newly created file is older than distributed files! -Check your system clock]) -fi -AC_MSG_RESULT(yes)]) - -# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# AM_PROG_INSTALL_STRIP -# --------------------- -# One issue with vendor `install' (even GNU) is that you can't -# specify the program used to strip binaries. This is especially -# annoying in cross-compiling environments, where the build's strip -# is unlikely to handle the host's binaries. -# Fortunately install-sh will honor a STRIPPROG variable, so we -# always use install-sh in `make install-strip', and initialize -# STRIPPROG with the value of the STRIP variable (set by the user). -AC_DEFUN([AM_PROG_INSTALL_STRIP], -[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl -# Installed binaries are usually stripped using `strip' when the user -# run `make install-strip'. However `strip' might not be the right -# tool to use in cross-compilation environments, therefore Automake -# will honor the `STRIP' environment variable to overrule this program. -dnl Don't test for $cross_compiling = yes, because it might be `maybe'. -if test "$cross_compiling" != no; then - AC_CHECK_TOOL([STRIP], [strip], :) -fi -INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" -AC_SUBST([INSTALL_STRIP_PROGRAM])]) - -# Copyright (C) 2006 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# _AM_SUBST_NOTMAKE(VARIABLE) -# --------------------------- -# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. -# This macro is traced by Automake. -AC_DEFUN([_AM_SUBST_NOTMAKE]) - -# Check how to create a tarball. -*- Autoconf -*- - -# Copyright (C) 2004, 2005 Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 2 - -# _AM_PROG_TAR(FORMAT) -# -------------------- -# Check how to create a tarball in format FORMAT. -# FORMAT should be one of `v7', `ustar', or `pax'. -# -# Substitute a variable $(am__tar) that is a command -# writing to stdout a FORMAT-tarball containing the directory -# $tardir. -# tardir=directory && $(am__tar) > result.tar -# -# Substitute a variable $(am__untar) that extract such -# a tarball read from stdin. -# $(am__untar) < result.tar -AC_DEFUN([_AM_PROG_TAR], -[# Always define AMTAR for backward compatibility. -AM_MISSING_PROG([AMTAR], [tar]) -m4_if([$1], [v7], - [am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'], - [m4_case([$1], [ustar],, [pax],, - [m4_fatal([Unknown tar format])]) -AC_MSG_CHECKING([how to create a $1 tar archive]) -# Loop over all known methods to create a tar archive until one works. -_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' -_am_tools=${am_cv_prog_tar_$1-$_am_tools} -# Do not fold the above two line into one, because Tru64 sh and -# Solaris sh will not grok spaces in the rhs of `-'. -for _am_tool in $_am_tools -do - case $_am_tool in - gnutar) - for _am_tar in tar gnutar gtar; - do - AM_RUN_LOG([$_am_tar --version]) && break - done - am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' - am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' - am__untar="$_am_tar -xf -" - ;; - plaintar) - # Must skip GNU tar: if it does not support --format= it doesn't create - # ustar tarball either. - (tar --version) >/dev/null 2>&1 && continue - am__tar='tar chf - "$$tardir"' - am__tar_='tar chf - "$tardir"' - am__untar='tar xf -' - ;; - pax) - am__tar='pax -L -x $1 -w "$$tardir"' - am__tar_='pax -L -x $1 -w "$tardir"' - am__untar='pax -r' - ;; - cpio) - am__tar='find "$$tardir" -print | cpio -o -H $1 -L' - am__tar_='find "$tardir" -print | cpio -o -H $1 -L' - am__untar='cpio -i -H $1 -d' - ;; - none) - am__tar=false - am__tar_=false - am__untar=false - ;; - esac - - # If the value was cached, stop now. We just wanted to have am__tar - # and am__untar set. - test -n "${am_cv_prog_tar_$1}" && break - - # tar/untar a dummy directory, and stop if the command works - rm -rf conftest.dir - mkdir conftest.dir - echo GrepMe > conftest.dir/file - AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) - rm -rf conftest.dir - if test -s conftest.tar; then - AM_RUN_LOG([$am__untar <conftest.tar]) - grep GrepMe conftest.dir/file >/dev/null 2>&1 && break - fi -done -rm -rf conftest.dir - -AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) -AC_MSG_RESULT([$am_cv_prog_tar_$1])]) -AC_SUBST([am__tar]) -AC_SUBST([am__untar]) -]) # _AM_PROG_TAR -
diff --git a/third_party/libevent/android/event-config.h b/third_party/libevent/android/event-config.h index 7745519..6563cb78 100644 --- a/third_party/libevent/android/event-config.h +++ b/third_party/libevent/android/event-config.h
@@ -209,6 +209,13 @@ /* Define if kqueue works correctly with pipes */ /* #undef _EVENT_HAVE_WORKING_KQUEUE */ +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#define _EVENT_LT_OBJDIR ".libs/" + +/* Numeric representation of the version */ +#define _EVENT_NUMERIC_VERSION 0x01040f00 + /* Name of package */ #define _EVENT_PACKAGE "libevent" @@ -224,6 +231,9 @@ /* Define to the one symbol short name of this package. */ #define _EVENT_PACKAGE_TARNAME "" +/* Define to the home page for this package. */ +#define _EVENT_PACKAGE_URL "" + /* Define to the version of this package. */ #define _EVENT_PACKAGE_VERSION "" @@ -246,7 +256,7 @@ #define _EVENT_TIME_WITH_SYS_TIME 1 /* Version number of package */ -#define _EVENT_VERSION "1.4.13-stable" +#define _EVENT_VERSION "1.4.15" /* Define to appropriate substitue if compiler doesnt have __func__ */ /* #undef _EVENT___func__ */
diff --git a/third_party/libevent/autogen.sh b/third_party/libevent/autogen.sh old mode 100644 new mode 100755 index 6d4275a..099cb305 --- a/third_party/libevent/autogen.sh +++ b/third_party/libevent/autogen.sh
@@ -1,4 +1,8 @@ #!/bin/sh +if [ -x "`which autoreconf 2>/dev/null`" ] ; then + exec autoreconf -ivf +fi + LIBTOOLIZE=libtoolize SYSNAME=`uname` if [ "x$SYSNAME" = "xDarwin" ] ; then
diff --git a/third_party/libevent/buffer.c b/third_party/libevent/buffer.c index dfaca5d..ebf35c9 100644 --- a/third_party/libevent/buffer.c +++ b/third_party/libevent/buffer.c
@@ -64,6 +64,7 @@ #include "event.h" #include "config.h" #include "evutil.h" +#include "./log.h" struct evbuffer * evbuffer_new(void) @@ -143,7 +144,8 @@ va_list aq; /* make sure that at least some space is available */ - evbuffer_expand(buf, 64); + if (evbuffer_expand(buf, 64) < 0) + return (-1); for (;;) { size_t used = buf->misalign + buf->off; buffer = (char *)buf->buffer + buf->off; @@ -248,6 +250,92 @@ return (line); } + +char * +evbuffer_readln(struct evbuffer *buffer, size_t *n_read_out, + enum evbuffer_eol_style eol_style) +{ + u_char *data = EVBUFFER_DATA(buffer); + u_char *start_of_eol, *end_of_eol; + size_t len = EVBUFFER_LENGTH(buffer); + char *line; + unsigned int i, n_to_copy, n_to_drain; + + if (n_read_out) + *n_read_out = 0; + + /* depending on eol_style, set start_of_eol to the first character + * in the newline, and end_of_eol to one after the last character. */ + switch (eol_style) { + case EVBUFFER_EOL_ANY: + for (i = 0; i < len; i++) { + if (data[i] == '\r' || data[i] == '\n') + break; + } + if (i == len) + return (NULL); + start_of_eol = data+i; + ++i; + for ( ; i < len; i++) { + if (data[i] != '\r' && data[i] != '\n') + break; + } + end_of_eol = data+i; + break; + case EVBUFFER_EOL_CRLF: + end_of_eol = memchr(data, '\n', len); + if (!end_of_eol) + return (NULL); + if (end_of_eol > data && *(end_of_eol-1) == '\r') + start_of_eol = end_of_eol - 1; + else + start_of_eol = end_of_eol; + end_of_eol++; /*point to one after the LF. */ + break; + case EVBUFFER_EOL_CRLF_STRICT: { + u_char *cp = data; + while ((cp = memchr(cp, '\r', len-(cp-data)))) { + if (cp < data+len-1 && *(cp+1) == '\n') + break; + if (++cp >= data+len) { + cp = NULL; + break; + } + } + if (!cp) + return (NULL); + start_of_eol = cp; + end_of_eol = cp+2; + break; + } + case EVBUFFER_EOL_LF: + start_of_eol = memchr(data, '\n', len); + if (!start_of_eol) + return (NULL); + end_of_eol = start_of_eol + 1; + break; + default: + return (NULL); + } + + n_to_copy = start_of_eol - data; + n_to_drain = end_of_eol - data; + + if ((line = malloc(n_to_copy+1)) == NULL) { + event_warn("%s: out of memory\n", __func__); + return (NULL); + } + + memcpy(line, data, n_to_copy); + line[n_to_copy] = '\0'; + + evbuffer_drain(buffer, n_to_drain); + if (n_read_out) + *n_read_out = (size_t)n_to_copy; + + return (line); +} + /* Adds data to an event buffer */ static void @@ -258,31 +346,47 @@ buf->misalign = 0; } +#ifndef SIZE_MAX +#define SIZE_MAX ((size_t)-1) +#endif + /* Expands the available space in the event buffer to at least datlen */ int evbuffer_expand(struct evbuffer *buf, size_t datlen) { - size_t need = buf->misalign + buf->off + datlen; + size_t used = buf->misalign + buf->off; + + assert(buf->totallen >= used); /* If we can fit all the data, then we don't have to do anything */ - if (buf->totallen >= need) + if (buf->totallen - used >= datlen) return (0); + /* If we would need to overflow to fit this much data, we can't + * do anything. */ + if (datlen > SIZE_MAX - buf->off) + return (-1); /* * If the misalignment fulfills our data needs, we just force an * alignment to happen. Afterwards, we have enough space. */ - if (buf->misalign >= datlen) { + if (buf->totallen - buf->off >= datlen) { evbuffer_align(buf); } else { void *newbuf; size_t length = buf->totallen; + size_t need = buf->off + datlen; if (length < 256) length = 256; - while (length < need) - length <<= 1; + if (need < SIZE_MAX / 2) { + while (length < need) { + length <<= 1; + } + } else { + length = need; + } if (buf->orig_buffer != buf->buffer) evbuffer_align(buf); @@ -299,10 +403,10 @@ int evbuffer_add(struct evbuffer *buf, const void *data, size_t datlen) { - size_t need = buf->misalign + buf->off + datlen; + size_t used = buf->misalign + buf->off; size_t oldoff = buf->off; - if (buf->totallen < need) { + if (buf->totallen - used < datlen) { if (evbuffer_expand(buf, datlen) == -1) return (-1); }
diff --git a/third_party/libevent/chromium.patch b/third_party/libevent/chromium.patch index 08e0122..5771a17 100644 --- a/third_party/libevent/chromium.patch +++ b/third_party/libevent/chromium.patch
@@ -1,5 +1,17 @@ +diff --git a/third_party/libevent/buffer.c b/third_party/libevent/buffer.c +index 64324bb..ebf35c9 100644 +--- a/third_party/libevent/buffer.c ++++ b/third_party/libevent/buffer.c +@@ -356,7 +356,6 @@ int + evbuffer_expand(struct evbuffer *buf, size_t datlen) + { + size_t used = buf->misalign + buf->off; +- size_t need; + + assert(buf->totallen >= used); + diff --git a/third_party/libevent/evdns.c b/third_party/libevent/evdns.c -index f07ecc9..da6ea19 100644 +index fa23163..f1c70d0 100644 --- a/third_party/libevent/evdns.c +++ b/third_party/libevent/evdns.c @@ -134,7 +134,7 @@ @@ -8,7 +20,7 @@ #endif -#include <event.h> +#include "event.h" - + #define u64 ev_uint64_t #define u32 ev_uint32_t diff --git a/third_party/libevent/evdns.h b/third_party/libevent/evdns.h @@ -17,74 +29,15 @@ +++ b/third_party/libevent/evdns.h @@ -165,7 +165,7 @@ extern "C" { #endif - + /* For integer types. */ -#include <evutil.h> +#include "evutil.h" - + /** Error codes 0-5 are as described in RFC 1035. */ #define DNS_ERR_NONE 0 -diff --git a/third_party/libevent/event-config.h b/third_party/libevent/event-config.h -new file mode 100644 -index 0000000..78a4727 ---- /dev/null -+++ b/third_party/libevent/event-config.h -@@ -0,0 +1,16 @@ -+// Copyright (c) 2009 The Chromium Authors. All rights reserved. -+// Use of this source code is governed by a BSD-style license that can be -+// found in the LICENSE file. -+ -+// This file is Chromium-specific, and brings in the appropriate -+// event-config.h depending on your platform. -+ -+#if defined(__APPLE__) -+#include "mac/event-config.h" -+#elif defined(__linux__) -+#include "linux/event-config.h" -+#elif defined(__FreeBSD__) -+#include "freebsd/event-config.h" -+#else -+#error generate event-config.h for your platform -+#endif -diff --git a/third_party/libevent/event.h b/third_party/libevent/event.h -index cfa0fc3..72e9b8b 100644 ---- a/third_party/libevent/event.h -+++ b/third_party/libevent/event.h -@@ -159,7 +159,7 @@ - extern "C" { - #endif - --#include <event-config.h> -+#include "event-config.h" - #ifdef _EVENT_HAVE_SYS_TYPES_H - #include <sys/types.h> - #endif -@@ -172,7 +172,7 @@ extern "C" { - #include <stdarg.h> - - /* For int types. */ --#include <evutil.h> -+#include "evutil.h" - - #ifdef WIN32 - #define WIN32_LEAN_AND_MEAN -diff --git a/third_party/libevent/evutil.h b/third_party/libevent/evutil.h -index dcb0013..8b664b9 100644 ---- a/third_party/libevent/evutil.h -+++ b/third_party/libevent/evutil.h -@@ -38,7 +38,7 @@ - extern "C" { - #endif - --#include <event-config.h> -+#include "event-config.h" - #ifdef _EVENT_HAVE_SYS_TIME_H - #include <sys/time.h> - #endif -diff --git a/third_party/libevent/README.chromium b/third_party/libevent/README.chromium -index 9969566..7e5f8ba 100644 diff --git a/third_party/libevent/event.c b/third_party/libevent/event.c -index 1253352..8b6cae5 100644 +index da6cd42..36b1c51 100644 --- a/third_party/libevent/event.c +++ b/third_party/libevent/event.c @@ -107,7 +107,7 @@ static const struct eventop *eventops[] = { @@ -94,9 +47,9 @@ -static int use_monotonic; +static int use_monotonic = 1; - /* Prototypes */ - static void event_queue_insert(struct event_base *, struct event *, int); -@@ -120,17 +120,6 @@ static int timeout_next(struct event_base *, struct timeval **); + /* Handle signals - This is a deprecated interface */ + int (*event_sigcb)(void); /* Signal callback when gotsig is set */ +@@ -124,17 +124,6 @@ static int timeout_next(struct event_base *, struct timeval **); static void timeout_process(struct event_base *); static void timeout_correct(struct event_base *, struct timeval *); @@ -114,7 +67,7 @@ static int gettime(struct event_base *base, struct timeval *tp) { -@@ -140,18 +129,18 @@ gettime(struct event_base *base, struct timeval *tp) +@@ -144,18 +133,18 @@ gettime(struct event_base *base, struct timeval *tp) } #if defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_MONOTONIC) @@ -138,11 +91,100 @@ return (evutil_gettimeofday(tp, NULL)); } -@@ -175,7 +164,6 @@ event_base_new(void) - if ((base = calloc(1, sizeof(struct event_base))) == NULL) - event_err(1, "%s: calloc", __func__); +@@ -182,7 +171,6 @@ event_base_new(void) + event_sigcb = NULL; + event_gotsig = 0; - detect_monotonic(); gettime(base, &base->event_tv); min_heap_ctor(&base->timeheap); +@@ -398,12 +386,9 @@ event_process_active(struct event_base *base) + ncalls--; + ev->ev_ncalls = ncalls; + (*ev->ev_callback)((int)ev->ev_fd, ev->ev_res, ev->ev_arg); +- if (event_gotsig || base->event_break) { +- ev->ev_pncalls = NULL; ++ if (event_gotsig || base->event_break) + return; +- } + } +- ev->ev_pncalls = NULL; + } + } + +@@ -808,8 +793,6 @@ int + event_del(struct event *ev) + { + struct event_base *base; +- const struct eventop *evsel; +- void *evbase; + + event_debug(("event_del: %p, callback %p", + ev, ev->ev_callback)); +@@ -819,8 +802,6 @@ event_del(struct event *ev) + return (-1); + + base = ev->ev_base; +- evsel = base->evsel; +- evbase = base->evbase; + + assert(!(ev->ev_flags & ~EVLIST_ALL)); + +@@ -838,7 +819,7 @@ event_del(struct event *ev) + + if (ev->ev_flags & EVLIST_INSERTED) { + event_queue_remove(base, ev, EVLIST_INSERTED); +- return (evsel->del(evbase, ev)); ++ return (base->evsel->del(base->evbase, ev)); + } + + return (0); +diff --git a/third_party/libevent/event.h b/third_party/libevent/event.h +index d1f5d9e..f0887b9 100644 +--- a/third_party/libevent/event.h ++++ b/third_party/libevent/event.h +@@ -159,7 +159,7 @@ + extern "C" { + #endif + +-#include <event-config.h> ++#include "event-config.h" + #ifdef _EVENT_HAVE_SYS_TYPES_H + #include <sys/types.h> + #endif +@@ -172,7 +172,7 @@ extern "C" { + #include <stdarg.h> + + /* For int types. */ +-#include <evutil.h> ++#include "evutil.h" + + #ifdef WIN32 + #define WIN32_LEAN_AND_MEAN +diff --git a/third_party/libevent/evhttp.h b/third_party/libevent/evhttp.h +index cba8be1..48c1d91 100644 +--- a/third_party/libevent/evhttp.h ++++ b/third_party/libevent/evhttp.h +@@ -27,7 +27,7 @@ + #ifndef _EVHTTP_H_ + #define _EVHTTP_H_ + +-#include <event.h> ++#include "event.h" + + #ifdef __cplusplus + extern "C" { +diff --git a/third_party/libevent/evutil.h b/third_party/libevent/evutil.h +index dcb0013..8b664b9 100644 +--- a/third_party/libevent/evutil.h ++++ b/third_party/libevent/evutil.h +@@ -38,7 +38,7 @@ + extern "C" { + #endif + +-#include <event-config.h> ++#include "event-config.h" + #ifdef _EVENT_HAVE_SYS_TIME_H + #include <sys/time.h> + #endif
diff --git a/third_party/libevent/compat/sys/_time.h b/third_party/libevent/compat/sys/_time.h deleted file mode 100644 index 8cabb0d..0000000 --- a/third_party/libevent/compat/sys/_time.h +++ /dev/null
@@ -1,163 +0,0 @@ -/* $OpenBSD: time.h,v 1.11 2000/10/10 13:36:48 itojun Exp $ */ -/* $NetBSD: time.h,v 1.18 1996/04/23 10:29:33 mycroft Exp $ */ - -/* - * Copyright (c) 1982, 1986, 1993 - * The Regents of the University of California. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the University nor the names of its contributors - * may be used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - * - * @(#)time.h 8.2 (Berkeley) 7/10/94 - */ - -#ifndef _SYS_TIME_H_ -#define _SYS_TIME_H_ - -#include <sys/types.h> - -/* - * Structure returned by gettimeofday(2) system call, - * and used in other calls. - */ -struct timeval { - long tv_sec; /* seconds */ - long tv_usec; /* and microseconds */ -}; - -/* - * Structure defined by POSIX.1b to be like a timeval. - */ -struct timespec { - time_t tv_sec; /* seconds */ - long tv_nsec; /* and nanoseconds */ -}; - -#define TIMEVAL_TO_TIMESPEC(tv, ts) { \ - (ts)->tv_sec = (tv)->tv_sec; \ - (ts)->tv_nsec = (tv)->tv_usec * 1000; \ -} -#define TIMESPEC_TO_TIMEVAL(tv, ts) { \ - (tv)->tv_sec = (ts)->tv_sec; \ - (tv)->tv_usec = (ts)->tv_nsec / 1000; \ -} - -struct timezone { - int tz_minuteswest; /* minutes west of Greenwich */ - int tz_dsttime; /* type of dst correction */ -}; -#define DST_NONE 0 /* not on dst */ -#define DST_USA 1 /* USA style dst */ -#define DST_AUST 2 /* Australian style dst */ -#define DST_WET 3 /* Western European dst */ -#define DST_MET 4 /* Middle European dst */ -#define DST_EET 5 /* Eastern European dst */ -#define DST_CAN 6 /* Canada */ - -/* Operations on timevals. */ -#define timerclear(tvp) (tvp)->tv_sec = (tvp)->tv_usec = 0 -#define timerisset(tvp) ((tvp)->tv_sec || (tvp)->tv_usec) -#define timercmp(tvp, uvp, cmp) \ - (((tvp)->tv_sec == (uvp)->tv_sec) ? \ - ((tvp)->tv_usec cmp (uvp)->tv_usec) : \ - ((tvp)->tv_sec cmp (uvp)->tv_sec)) -#define timeradd(tvp, uvp, vvp) \ - do { \ - (vvp)->tv_sec = (tvp)->tv_sec + (uvp)->tv_sec; \ - (vvp)->tv_usec = (tvp)->tv_usec + (uvp)->tv_usec; \ - if ((vvp)->tv_usec >= 1000000) { \ - (vvp)->tv_sec++; \ - (vvp)->tv_usec -= 1000000; \ - } \ - } while (0) -#define timersub(tvp, uvp, vvp) \ - do { \ - (vvp)->tv_sec = (tvp)->tv_sec - (uvp)->tv_sec; \ - (vvp)->tv_usec = (tvp)->tv_usec - (uvp)->tv_usec; \ - if ((vvp)->tv_usec < 0) { \ - (vvp)->tv_sec--; \ - (vvp)->tv_usec += 1000000; \ - } \ - } while (0) - -/* Operations on timespecs. */ -#define timespecclear(tsp) (tsp)->tv_sec = (tsp)->tv_nsec = 0 -#define timespecisset(tsp) ((tsp)->tv_sec || (tsp)->tv_nsec) -#define timespeccmp(tsp, usp, cmp) \ - (((tsp)->tv_sec == (usp)->tv_sec) ? \ - ((tsp)->tv_nsec cmp (usp)->tv_nsec) : \ - ((tsp)->tv_sec cmp (usp)->tv_sec)) -#define timespecadd(tsp, usp, vsp) \ - do { \ - (vsp)->tv_sec = (tsp)->tv_sec + (usp)->tv_sec; \ - (vsp)->tv_nsec = (tsp)->tv_nsec + (usp)->tv_nsec; \ - if ((vsp)->tv_nsec >= 1000000000L) { \ - (vsp)->tv_sec++; \ - (vsp)->tv_nsec -= 1000000000L; \ - } \ - } while (0) -#define timespecsub(tsp, usp, vsp) \ - do { \ - (vsp)->tv_sec = (tsp)->tv_sec - (usp)->tv_sec; \ - (vsp)->tv_nsec = (tsp)->tv_nsec - (usp)->tv_nsec; \ - if ((vsp)->tv_nsec < 0) { \ - (vsp)->tv_sec--; \ - (vsp)->tv_nsec += 1000000000L; \ - } \ - } while (0) - -/* - * Names of the interval timers, and structure - * defining a timer setting. - */ -#define ITIMER_REAL 0 -#define ITIMER_VIRTUAL 1 -#define ITIMER_PROF 2 - -struct itimerval { - struct timeval it_interval; /* timer interval */ - struct timeval it_value; /* current value */ -}; - -/* - * Getkerninfo clock information structure - */ -struct clockinfo { - int hz; /* clock frequency */ - int tick; /* micro-seconds per hz tick */ - int tickadj; /* clock skew rate for adjtime() */ - int stathz; /* statistics clock frequency */ - int profhz; /* profiling clock frequency */ -}; - -#define CLOCK_REALTIME 0 -#define CLOCK_VIRTUAL 1 -#define CLOCK_PROF 2 - -#define TIMER_RELTIME 0x0 /* relative timer */ -#define TIMER_ABSTIME 0x1 /* absolute timer */ - -/* --- stuff got cut here - niels --- */ - -#endif /* !_SYS_TIME_H_ */
diff --git a/third_party/libevent/config.guess b/third_party/libevent/config.guess deleted file mode 100644 index f32079a..0000000 --- a/third_party/libevent/config.guess +++ /dev/null
@@ -1,1526 +0,0 @@ -#! /bin/sh -# Attempt to guess a canonical system name. -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, -# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 -# Free Software Foundation, Inc. - -timestamp='2008-01-23' - -# This file is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA -# 02110-1301, USA. -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - - -# Originally written by Per Bothner <per@bothner.com>. -# Please send patches to <config-patches@gnu.org>. Submit a context -# diff and a properly formatted ChangeLog entry. -# -# This script attempts to guess a canonical system name similar to -# config.sub. If it succeeds, it prints the system name on stdout, and -# exits with 0. Otherwise, it exits with 1. -# -# The plan is that this can be called by configure scripts if you -# don't specify an explicit build system type. - -me=`echo "$0" | sed -e 's,.*/,,'` - -usage="\ -Usage: $0 [OPTION] - -Output the configuration name of the system \`$me' is run on. - -Operation modes: - -h, --help print this help, then exit - -t, --time-stamp print date of last modification, then exit - -v, --version print version number, then exit - -Report bugs and patches to <config-patches@gnu.org>." - -version="\ -GNU config.guess ($timestamp) - -Originally written by Per Bothner. -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, -2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. - -This is free software; see the source for copying conditions. There is NO -warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - -help=" -Try \`$me --help' for more information." - -# Parse command line -while test $# -gt 0 ; do - case $1 in - --time-stamp | --time* | -t ) - echo "$timestamp" ; exit ;; - --version | -v ) - echo "$version" ; exit ;; - --help | --h* | -h ) - echo "$usage"; exit ;; - -- ) # Stop option processing - shift; break ;; - - ) # Use stdin as input. - break ;; - -* ) - echo "$me: invalid option $1$help" >&2 - exit 1 ;; - * ) - break ;; - esac -done - -if test $# != 0; then - echo "$me: too many arguments$help" >&2 - exit 1 -fi - -trap 'exit 1' 1 2 15 - -# CC_FOR_BUILD -- compiler used by this script. Note that the use of a -# compiler to aid in system detection is discouraged as it requires -# temporary files to be created and, as you can see below, it is a -# headache to deal with in a portable fashion. - -# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still -# use `HOST_CC' if defined, but it is deprecated. - -# Portable tmp directory creation inspired by the Autoconf team. - -set_cc_for_build=' -trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; -trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; -: ${TMPDIR=/tmp} ; - { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || - { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || - { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || - { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; -dummy=$tmp/dummy ; -tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; -case $CC_FOR_BUILD,$HOST_CC,$CC in - ,,) echo "int x;" > $dummy.c ; - for c in cc gcc c89 c99 ; do - if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then - CC_FOR_BUILD="$c"; break ; - fi ; - done ; - if test x"$CC_FOR_BUILD" = x ; then - CC_FOR_BUILD=no_compiler_found ; - fi - ;; - ,,*) CC_FOR_BUILD=$CC ;; - ,*,*) CC_FOR_BUILD=$HOST_CC ;; -esac ; set_cc_for_build= ;' - -# This is needed to find uname on a Pyramid OSx when run in the BSD universe. -# (ghazi@noc.rutgers.edu 1994-08-24) -if (test -f /.attbin/uname) >/dev/null 2>&1 ; then - PATH=$PATH:/.attbin ; export PATH -fi - -UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown -UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown -UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown -UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown - -# Note: order is significant - the case branches are not exclusive. - -case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in - *:NetBSD:*:*) - # NetBSD (nbsd) targets should (where applicable) match one or - # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*, - # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently - # switched to ELF, *-*-netbsd* would select the old - # object file format. This provides both forward - # compatibility and a consistent mechanism for selecting the - # object file format. - # - # Note: NetBSD doesn't particularly care about the vendor - # portion of the name. We always set it to "unknown". - sysctl="sysctl -n hw.machine_arch" - UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \ - /usr/sbin/$sysctl 2>/dev/null || echo unknown)` - case "${UNAME_MACHINE_ARCH}" in - armeb) machine=armeb-unknown ;; - arm*) machine=arm-unknown ;; - sh3el) machine=shl-unknown ;; - sh3eb) machine=sh-unknown ;; - sh5el) machine=sh5le-unknown ;; - *) machine=${UNAME_MACHINE_ARCH}-unknown ;; - esac - # The Operating System including object format, if it has switched - # to ELF recently, or will in the future. - case "${UNAME_MACHINE_ARCH}" in - arm*|i386|m68k|ns32k|sh3*|sparc|vax) - eval $set_cc_for_build - if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep __ELF__ >/dev/null - then - # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). - # Return netbsd for either. FIX? - os=netbsd - else - os=netbsdelf - fi - ;; - *) - os=netbsd - ;; - esac - # The OS release - # Debian GNU/NetBSD machines have a different userland, and - # thus, need a distinct triplet. However, they do not need - # kernel version information, so it can be replaced with a - # suitable tag, in the style of linux-gnu. - case "${UNAME_VERSION}" in - Debian*) - release='-gnu' - ;; - *) - release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'` - ;; - esac - # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: - # contains redundant information, the shorter form: - # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. - echo "${machine}-${os}${release}" - exit ;; - *:OpenBSD:*:*) - UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` - echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} - exit ;; - *:ekkoBSD:*:*) - echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} - exit ;; - *:SolidBSD:*:*) - echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} - exit ;; - macppc:MirBSD:*:*) - echo powerpc-unknown-mirbsd${UNAME_RELEASE} - exit ;; - *:MirBSD:*:*) - echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} - exit ;; - alpha:OSF1:*:*) - case $UNAME_RELEASE in - *4.0) - UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` - ;; - *5.*) - UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` - ;; - esac - # According to Compaq, /usr/sbin/psrinfo has been available on - # OSF/1 and Tru64 systems produced since 1995. I hope that - # covers most systems running today. This code pipes the CPU - # types through head -n 1, so we only detect the type of CPU 0. - ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` - case "$ALPHA_CPU_TYPE" in - "EV4 (21064)") - UNAME_MACHINE="alpha" ;; - "EV4.5 (21064)") - UNAME_MACHINE="alpha" ;; - "LCA4 (21066/21068)") - UNAME_MACHINE="alpha" ;; - "EV5 (21164)") - UNAME_MACHINE="alphaev5" ;; - "EV5.6 (21164A)") - UNAME_MACHINE="alphaev56" ;; - "EV5.6 (21164PC)") - UNAME_MACHINE="alphapca56" ;; - "EV5.7 (21164PC)") - UNAME_MACHINE="alphapca57" ;; - "EV6 (21264)") - UNAME_MACHINE="alphaev6" ;; - "EV6.7 (21264A)") - UNAME_MACHINE="alphaev67" ;; - "EV6.8CB (21264C)") - UNAME_MACHINE="alphaev68" ;; - "EV6.8AL (21264B)") - UNAME_MACHINE="alphaev68" ;; - "EV6.8CX (21264D)") - UNAME_MACHINE="alphaev68" ;; - "EV6.9A (21264/EV69A)") - UNAME_MACHINE="alphaev69" ;; - "EV7 (21364)") - UNAME_MACHINE="alphaev7" ;; - "EV7.9 (21364A)") - UNAME_MACHINE="alphaev79" ;; - esac - # A Pn.n version is a patched version. - # A Vn.n version is a released version. - # A Tn.n version is a released field test version. - # A Xn.n version is an unreleased experimental baselevel. - # 1.2 uses "1.2" for uname -r. - echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` - exit ;; - Alpha\ *:Windows_NT*:*) - # How do we know it's Interix rather than the generic POSIX subsystem? - # Should we change UNAME_MACHINE based on the output of uname instead - # of the specific Alpha model? - echo alpha-pc-interix - exit ;; - 21064:Windows_NT:50:3) - echo alpha-dec-winnt3.5 - exit ;; - Amiga*:UNIX_System_V:4.0:*) - echo m68k-unknown-sysv4 - exit ;; - *:[Aa]miga[Oo][Ss]:*:*) - echo ${UNAME_MACHINE}-unknown-amigaos - exit ;; - *:[Mm]orph[Oo][Ss]:*:*) - echo ${UNAME_MACHINE}-unknown-morphos - exit ;; - *:OS/390:*:*) - echo i370-ibm-openedition - exit ;; - *:z/VM:*:*) - echo s390-ibm-zvmoe - exit ;; - *:OS400:*:*) - echo powerpc-ibm-os400 - exit ;; - arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) - echo arm-acorn-riscix${UNAME_RELEASE} - exit ;; - arm:riscos:*:*|arm:RISCOS:*:*) - echo arm-unknown-riscos - exit ;; - SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) - echo hppa1.1-hitachi-hiuxmpp - exit ;; - Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) - # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. - if test "`(/bin/universe) 2>/dev/null`" = att ; then - echo pyramid-pyramid-sysv3 - else - echo pyramid-pyramid-bsd - fi - exit ;; - NILE*:*:*:dcosx) - echo pyramid-pyramid-svr4 - exit ;; - DRS?6000:unix:4.0:6*) - echo sparc-icl-nx6 - exit ;; - DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) - case `/usr/bin/uname -p` in - sparc) echo sparc-icl-nx7; exit ;; - esac ;; - sun4H:SunOS:5.*:*) - echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) - echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) - echo i386-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - sun4*:SunOS:6*:*) - # According to config.sub, this is the proper way to canonicalize - # SunOS6. Hard to guess exactly what SunOS6 will be like, but - # it's likely to be more like Solaris than SunOS4. - echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - sun4*:SunOS:*:*) - case "`/usr/bin/arch -k`" in - Series*|S4*) - UNAME_RELEASE=`uname -v` - ;; - esac - # Japanese Language versions have a version number like `4.1.3-JL'. - echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` - exit ;; - sun3*:SunOS:*:*) - echo m68k-sun-sunos${UNAME_RELEASE} - exit ;; - sun*:*:4.2BSD:*) - UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` - test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3 - case "`/bin/arch`" in - sun3) - echo m68k-sun-sunos${UNAME_RELEASE} - ;; - sun4) - echo sparc-sun-sunos${UNAME_RELEASE} - ;; - esac - exit ;; - aushp:SunOS:*:*) - echo sparc-auspex-sunos${UNAME_RELEASE} - exit ;; - # The situation for MiNT is a little confusing. The machine name - # can be virtually everything (everything which is not - # "atarist" or "atariste" at least should have a processor - # > m68000). The system name ranges from "MiNT" over "FreeMiNT" - # to the lowercase version "mint" (or "freemint"). Finally - # the system name "TOS" denotes a system which is actually not - # MiNT. But MiNT is downward compatible to TOS, so this should - # be no problem. - atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} - exit ;; - atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} - exit ;; - *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} - exit ;; - milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) - echo m68k-milan-mint${UNAME_RELEASE} - exit ;; - hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) - echo m68k-hades-mint${UNAME_RELEASE} - exit ;; - *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) - echo m68k-unknown-mint${UNAME_RELEASE} - exit ;; - m68k:machten:*:*) - echo m68k-apple-machten${UNAME_RELEASE} - exit ;; - powerpc:machten:*:*) - echo powerpc-apple-machten${UNAME_RELEASE} - exit ;; - RISC*:Mach:*:*) - echo mips-dec-mach_bsd4.3 - exit ;; - RISC*:ULTRIX:*:*) - echo mips-dec-ultrix${UNAME_RELEASE} - exit ;; - VAX*:ULTRIX*:*:*) - echo vax-dec-ultrix${UNAME_RELEASE} - exit ;; - 2020:CLIX:*:* | 2430:CLIX:*:*) - echo clipper-intergraph-clix${UNAME_RELEASE} - exit ;; - mips:*:*:UMIPS | mips:*:*:RISCos) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c -#ifdef __cplusplus -#include <stdio.h> /* for printf() prototype */ - int main (int argc, char *argv[]) { -#else - int main (argc, argv) int argc; char *argv[]; { -#endif - #if defined (host_mips) && defined (MIPSEB) - #if defined (SYSTYPE_SYSV) - printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); - #endif - #if defined (SYSTYPE_SVR4) - printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); - #endif - #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) - printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); - #endif - #endif - exit (-1); - } -EOF - $CC_FOR_BUILD -o $dummy $dummy.c && - dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && - SYSTEM_NAME=`$dummy $dummyarg` && - { echo "$SYSTEM_NAME"; exit; } - echo mips-mips-riscos${UNAME_RELEASE} - exit ;; - Motorola:PowerMAX_OS:*:*) - echo powerpc-motorola-powermax - exit ;; - Motorola:*:4.3:PL8-*) - echo powerpc-harris-powermax - exit ;; - Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) - echo powerpc-harris-powermax - exit ;; - Night_Hawk:Power_UNIX:*:*) - echo powerpc-harris-powerunix - exit ;; - m88k:CX/UX:7*:*) - echo m88k-harris-cxux7 - exit ;; - m88k:*:4*:R4*) - echo m88k-motorola-sysv4 - exit ;; - m88k:*:3*:R3*) - echo m88k-motorola-sysv3 - exit ;; - AViiON:dgux:*:*) - # DG/UX returns AViiON for all architectures - UNAME_PROCESSOR=`/usr/bin/uname -p` - if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] - then - if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ - [ ${TARGET_BINARY_INTERFACE}x = x ] - then - echo m88k-dg-dgux${UNAME_RELEASE} - else - echo m88k-dg-dguxbcs${UNAME_RELEASE} - fi - else - echo i586-dg-dgux${UNAME_RELEASE} - fi - exit ;; - M88*:DolphinOS:*:*) # DolphinOS (SVR3) - echo m88k-dolphin-sysv3 - exit ;; - M88*:*:R3*:*) - # Delta 88k system running SVR3 - echo m88k-motorola-sysv3 - exit ;; - XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) - echo m88k-tektronix-sysv3 - exit ;; - Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) - echo m68k-tektronix-bsd - exit ;; - *:IRIX*:*:*) - echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` - exit ;; - ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. - echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id - exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' - i*86:AIX:*:*) - echo i386-ibm-aix - exit ;; - ia64:AIX:*:*) - if [ -x /usr/bin/oslevel ] ; then - IBM_REV=`/usr/bin/oslevel` - else - IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} - fi - echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} - exit ;; - *:AIX:2:3) - if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #include <sys/systemcfg.h> - - main() - { - if (!__power_pc()) - exit(1); - puts("powerpc-ibm-aix3.2.5"); - exit(0); - } -EOF - if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` - then - echo "$SYSTEM_NAME" - else - echo rs6000-ibm-aix3.2.5 - fi - elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then - echo rs6000-ibm-aix3.2.4 - else - echo rs6000-ibm-aix3.2 - fi - exit ;; - *:AIX:*:[456]) - IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` - if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then - IBM_ARCH=rs6000 - else - IBM_ARCH=powerpc - fi - if [ -x /usr/bin/oslevel ] ; then - IBM_REV=`/usr/bin/oslevel` - else - IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} - fi - echo ${IBM_ARCH}-ibm-aix${IBM_REV} - exit ;; - *:AIX:*:*) - echo rs6000-ibm-aix - exit ;; - ibmrt:4.4BSD:*|romp-ibm:BSD:*) - echo romp-ibm-bsd4.4 - exit ;; - ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and - echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to - exit ;; # report: romp-ibm BSD 4.3 - *:BOSX:*:*) - echo rs6000-bull-bosx - exit ;; - DPX/2?00:B.O.S.:*:*) - echo m68k-bull-sysv3 - exit ;; - 9000/[34]??:4.3bsd:1.*:*) - echo m68k-hp-bsd - exit ;; - hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) - echo m68k-hp-bsd4.4 - exit ;; - 9000/[34678]??:HP-UX:*:*) - HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` - case "${UNAME_MACHINE}" in - 9000/31? ) HP_ARCH=m68000 ;; - 9000/[34]?? ) HP_ARCH=m68k ;; - 9000/[678][0-9][0-9]) - if [ -x /usr/bin/getconf ]; then - sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` - sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` - case "${sc_cpu_version}" in - 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 - 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 - 532) # CPU_PA_RISC2_0 - case "${sc_kernel_bits}" in - 32) HP_ARCH="hppa2.0n" ;; - 64) HP_ARCH="hppa2.0w" ;; - '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 - esac ;; - esac - fi - if [ "${HP_ARCH}" = "" ]; then - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - - #define _HPUX_SOURCE - #include <stdlib.h> - #include <unistd.h> - - int main () - { - #if defined(_SC_KERNEL_BITS) - long bits = sysconf(_SC_KERNEL_BITS); - #endif - long cpu = sysconf (_SC_CPU_VERSION); - - switch (cpu) - { - case CPU_PA_RISC1_0: puts ("hppa1.0"); break; - case CPU_PA_RISC1_1: puts ("hppa1.1"); break; - case CPU_PA_RISC2_0: - #if defined(_SC_KERNEL_BITS) - switch (bits) - { - case 64: puts ("hppa2.0w"); break; - case 32: puts ("hppa2.0n"); break; - default: puts ("hppa2.0"); break; - } break; - #else /* !defined(_SC_KERNEL_BITS) */ - puts ("hppa2.0"); break; - #endif - default: puts ("hppa1.0"); break; - } - exit (0); - } -EOF - (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` - test -z "$HP_ARCH" && HP_ARCH=hppa - fi ;; - esac - if [ ${HP_ARCH} = "hppa2.0w" ] - then - eval $set_cc_for_build - - # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating - # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler - # generating 64-bit code. GNU and HP use different nomenclature: - # - # $ CC_FOR_BUILD=cc ./config.guess - # => hppa2.0w-hp-hpux11.23 - # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess - # => hppa64-hp-hpux11.23 - - if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | - grep __LP64__ >/dev/null - then - HP_ARCH="hppa2.0w" - else - HP_ARCH="hppa64" - fi - fi - echo ${HP_ARCH}-hp-hpux${HPUX_REV} - exit ;; - ia64:HP-UX:*:*) - HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` - echo ia64-hp-hpux${HPUX_REV} - exit ;; - 3050*:HI-UX:*:*) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #include <unistd.h> - int - main () - { - long cpu = sysconf (_SC_CPU_VERSION); - /* The order matters, because CPU_IS_HP_MC68K erroneously returns - true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct - results, however. */ - if (CPU_IS_PA_RISC (cpu)) - { - switch (cpu) - { - case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; - case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; - case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; - default: puts ("hppa-hitachi-hiuxwe2"); break; - } - } - else if (CPU_IS_HP_MC68K (cpu)) - puts ("m68k-hitachi-hiuxwe2"); - else puts ("unknown-hitachi-hiuxwe2"); - exit (0); - } -EOF - $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && - { echo "$SYSTEM_NAME"; exit; } - echo unknown-hitachi-hiuxwe2 - exit ;; - 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) - echo hppa1.1-hp-bsd - exit ;; - 9000/8??:4.3bsd:*:*) - echo hppa1.0-hp-bsd - exit ;; - *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) - echo hppa1.0-hp-mpeix - exit ;; - hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) - echo hppa1.1-hp-osf - exit ;; - hp8??:OSF1:*:*) - echo hppa1.0-hp-osf - exit ;; - i*86:OSF1:*:*) - if [ -x /usr/sbin/sysversion ] ; then - echo ${UNAME_MACHINE}-unknown-osf1mk - else - echo ${UNAME_MACHINE}-unknown-osf1 - fi - exit ;; - parisc*:Lites*:*:*) - echo hppa1.1-hp-lites - exit ;; - C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) - echo c1-convex-bsd - exit ;; - C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) - if getsysinfo -f scalar_acc - then echo c32-convex-bsd - else echo c2-convex-bsd - fi - exit ;; - C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) - echo c34-convex-bsd - exit ;; - C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) - echo c38-convex-bsd - exit ;; - C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) - echo c4-convex-bsd - exit ;; - CRAY*Y-MP:*:*:*) - echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*[A-Z]90:*:*:*) - echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ - | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ - -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ - -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*TS:*:*:*) - echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*T3E:*:*:*) - echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*SV1:*:*:*) - echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - *:UNICOS/mp:*:*) - echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) - FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` - FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` - FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` - echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" - exit ;; - 5000:UNIX_System_V:4.*:*) - FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` - FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` - echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" - exit ;; - i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) - echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} - exit ;; - sparc*:BSD/OS:*:*) - echo sparc-unknown-bsdi${UNAME_RELEASE} - exit ;; - *:BSD/OS:*:*) - echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} - exit ;; - *:FreeBSD:*:*) - case ${UNAME_MACHINE} in - pc98) - echo i386-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; - amd64) - echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; - *) - echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; - esac - exit ;; - i*:CYGWIN*:*) - echo ${UNAME_MACHINE}-pc-cygwin - exit ;; - *:MINGW*:*) - echo ${UNAME_MACHINE}-pc-mingw32 - exit ;; - i*:windows32*:*) - # uname -m includes "-pc" on this system. - echo ${UNAME_MACHINE}-mingw32 - exit ;; - i*:PW*:*) - echo ${UNAME_MACHINE}-pc-pw32 - exit ;; - *:Interix*:[3456]*) - case ${UNAME_MACHINE} in - x86) - echo i586-pc-interix${UNAME_RELEASE} - exit ;; - EM64T | authenticamd) - echo x86_64-unknown-interix${UNAME_RELEASE} - exit ;; - IA64) - echo ia64-unknown-interix${UNAME_RELEASE} - exit ;; - esac ;; - [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) - echo i${UNAME_MACHINE}-pc-mks - exit ;; - i*:Windows_NT*:* | Pentium*:Windows_NT*:*) - # How do we know it's Interix rather than the generic POSIX subsystem? - # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we - # UNAME_MACHINE based on the output of uname instead of i386? - echo i586-pc-interix - exit ;; - i*:UWIN*:*) - echo ${UNAME_MACHINE}-pc-uwin - exit ;; - amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) - echo x86_64-unknown-cygwin - exit ;; - p*:CYGWIN*:*) - echo powerpcle-unknown-cygwin - exit ;; - prep*:SunOS:5.*:*) - echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - *:GNU:*:*) - # the GNU system - echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` - exit ;; - *:GNU/*:*:*) - # other systems with GNU libc and userland - echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu - exit ;; - i*86:Minix:*:*) - echo ${UNAME_MACHINE}-pc-minix - exit ;; - arm*:Linux:*:*) - eval $set_cc_for_build - if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep -q __ARM_EABI__ - then - echo ${UNAME_MACHINE}-unknown-linux-gnu - else - echo ${UNAME_MACHINE}-unknown-linux-gnueabi - fi - exit ;; - avr32*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu - exit ;; - cris:Linux:*:*) - echo cris-axis-linux-gnu - exit ;; - crisv32:Linux:*:*) - echo crisv32-axis-linux-gnu - exit ;; - frv:Linux:*:*) - echo frv-unknown-linux-gnu - exit ;; - ia64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu - exit ;; - m32r*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu - exit ;; - m68*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu - exit ;; - mips:Linux:*:*) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #undef CPU - #undef mips - #undef mipsel - #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) - CPU=mipsel - #else - #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) - CPU=mips - #else - CPU= - #endif - #endif -EOF - eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n ' - /^CPU/{ - s: ::g - p - }'`" - test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } - ;; - mips64:Linux:*:*) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #undef CPU - #undef mips64 - #undef mips64el - #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) - CPU=mips64el - #else - #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) - CPU=mips64 - #else - CPU= - #endif - #endif -EOF - eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n ' - /^CPU/{ - s: ::g - p - }'`" - test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } - ;; - or32:Linux:*:*) - echo or32-unknown-linux-gnu - exit ;; - ppc:Linux:*:*) - echo powerpc-unknown-linux-gnu - exit ;; - ppc64:Linux:*:*) - echo powerpc64-unknown-linux-gnu - exit ;; - alpha:Linux:*:*) - case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in - EV5) UNAME_MACHINE=alphaev5 ;; - EV56) UNAME_MACHINE=alphaev56 ;; - PCA56) UNAME_MACHINE=alphapca56 ;; - PCA57) UNAME_MACHINE=alphapca56 ;; - EV6) UNAME_MACHINE=alphaev6 ;; - EV67) UNAME_MACHINE=alphaev67 ;; - EV68*) UNAME_MACHINE=alphaev68 ;; - esac - objdump --private-headers /bin/sh | grep ld.so.1 >/dev/null - if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi - echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} - exit ;; - parisc:Linux:*:* | hppa:Linux:*:*) - # Look for CPU level - case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in - PA7*) echo hppa1.1-unknown-linux-gnu ;; - PA8*) echo hppa2.0-unknown-linux-gnu ;; - *) echo hppa-unknown-linux-gnu ;; - esac - exit ;; - parisc64:Linux:*:* | hppa64:Linux:*:*) - echo hppa64-unknown-linux-gnu - exit ;; - s390:Linux:*:* | s390x:Linux:*:*) - echo ${UNAME_MACHINE}-ibm-linux - exit ;; - sh64*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu - exit ;; - sh*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu - exit ;; - sparc:Linux:*:* | sparc64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu - exit ;; - vax:Linux:*:*) - echo ${UNAME_MACHINE}-dec-linux-gnu - exit ;; - x86_64:Linux:*:*) - echo x86_64-unknown-linux-gnu - exit ;; - xtensa*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu - exit ;; - i*86:Linux:*:*) - # The BFD linker knows what the default object file format is, so - # first see if it will tell us. cd to the root directory to prevent - # problems with other programs or directories called `ld' in the path. - # Set LC_ALL=C to ensure ld outputs messages in English. - ld_supported_targets=`cd /; LC_ALL=C ld --help 2>&1 \ - | sed -ne '/supported targets:/!d - s/[ ][ ]*/ /g - s/.*supported targets: *// - s/ .*// - p'` - case "$ld_supported_targets" in - elf32-i386) - TENTATIVE="${UNAME_MACHINE}-pc-linux-gnu" - ;; - a.out-i386-linux) - echo "${UNAME_MACHINE}-pc-linux-gnuaout" - exit ;; - coff-i386) - echo "${UNAME_MACHINE}-pc-linux-gnucoff" - exit ;; - "") - # Either a pre-BFD a.out linker (linux-gnuoldld) or - # one that does not give us useful --help. - echo "${UNAME_MACHINE}-pc-linux-gnuoldld" - exit ;; - esac - # Determine whether the default compiler is a.out or elf - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #include <features.h> - #ifdef __ELF__ - # ifdef __GLIBC__ - # if __GLIBC__ >= 2 - LIBC=gnu - # else - LIBC=gnulibc1 - # endif - # else - LIBC=gnulibc1 - # endif - #else - #if defined(__INTEL_COMPILER) || defined(__PGI) || defined(__SUNPRO_C) || defined(__SUNPRO_CC) - LIBC=gnu - #else - LIBC=gnuaout - #endif - #endif - #ifdef __dietlibc__ - LIBC=dietlibc - #endif -EOF - eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n ' - /^LIBC/{ - s: ::g - p - }'`" - test x"${LIBC}" != x && { - echo "${UNAME_MACHINE}-pc-linux-${LIBC}" - exit - } - test x"${TENTATIVE}" != x && { echo "${TENTATIVE}"; exit; } - ;; - i*86:DYNIX/ptx:4*:*) - # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. - # earlier versions are messed up and put the nodename in both - # sysname and nodename. - echo i386-sequent-sysv4 - exit ;; - i*86:UNIX_SV:4.2MP:2.*) - # Unixware is an offshoot of SVR4, but it has its own version - # number series starting with 2... - # I am not positive that other SVR4 systems won't match this, - # I just have to hope. -- rms. - # Use sysv4.2uw... so that sysv4* matches it. - echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} - exit ;; - i*86:OS/2:*:*) - # If we were able to find `uname', then EMX Unix compatibility - # is probably installed. - echo ${UNAME_MACHINE}-pc-os2-emx - exit ;; - i*86:XTS-300:*:STOP) - echo ${UNAME_MACHINE}-unknown-stop - exit ;; - i*86:atheos:*:*) - echo ${UNAME_MACHINE}-unknown-atheos - exit ;; - i*86:syllable:*:*) - echo ${UNAME_MACHINE}-pc-syllable - exit ;; - i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.0*:*) - echo i386-unknown-lynxos${UNAME_RELEASE} - exit ;; - i*86:*DOS:*:*) - echo ${UNAME_MACHINE}-pc-msdosdjgpp - exit ;; - i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) - UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` - if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then - echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} - else - echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} - fi - exit ;; - i*86:*:5:[678]*) - # UnixWare 7.x, OpenUNIX and OpenServer 6. - case `/bin/uname -X | grep "^Machine"` in - *486*) UNAME_MACHINE=i486 ;; - *Pentium) UNAME_MACHINE=i586 ;; - *Pent*|*Celeron) UNAME_MACHINE=i686 ;; - esac - echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} - exit ;; - i*86:*:3.2:*) - if test -f /usr/options/cb.name; then - UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name` - echo ${UNAME_MACHINE}-pc-isc$UNAME_REL - elif /bin/uname -X 2>/dev/null >/dev/null ; then - UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` - (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 - (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ - && UNAME_MACHINE=i586 - (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ - && UNAME_MACHINE=i686 - (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ - && UNAME_MACHINE=i686 - echo ${UNAME_MACHINE}-pc-sco$UNAME_REL - else - echo ${UNAME_MACHINE}-pc-sysv32 - fi - exit ;; - pc:*:*:*) - # Left here for compatibility: - # uname -m prints for DJGPP always 'pc', but it prints nothing about - # the processor, so we play safe by assuming i386. - echo i386-pc-msdosdjgpp - exit ;; - Intel:Mach:3*:*) - echo i386-pc-mach3 - exit ;; - paragon:*:*:*) - echo i860-intel-osf1 - exit ;; - i860:*:4.*:*) # i860-SVR4 - if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then - echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 - else # Add other i860-SVR4 vendors below as they are discovered. - echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 - fi - exit ;; - mini*:CTIX:SYS*5:*) - # "miniframe" - echo m68010-convergent-sysv - exit ;; - mc68k:UNIX:SYSTEM5:3.51m) - echo m68k-convergent-sysv - exit ;; - M680?0:D-NIX:5.3:*) - echo m68k-diab-dnix - exit ;; - M68*:*:R3V[5678]*:*) - test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; - 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) - OS_REL='' - test -r /etc/.relid \ - && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4.3${OS_REL}; exit; } - /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ - && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; - 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4; exit; } ;; - m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) - echo m68k-unknown-lynxos${UNAME_RELEASE} - exit ;; - mc68030:UNIX_System_V:4.*:*) - echo m68k-atari-sysv4 - exit ;; - TSUNAMI:LynxOS:2.*:*) - echo sparc-unknown-lynxos${UNAME_RELEASE} - exit ;; - rs6000:LynxOS:2.*:*) - echo rs6000-unknown-lynxos${UNAME_RELEASE} - exit ;; - PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.0*:*) - echo powerpc-unknown-lynxos${UNAME_RELEASE} - exit ;; - SM[BE]S:UNIX_SV:*:*) - echo mips-dde-sysv${UNAME_RELEASE} - exit ;; - RM*:ReliantUNIX-*:*:*) - echo mips-sni-sysv4 - exit ;; - RM*:SINIX-*:*:*) - echo mips-sni-sysv4 - exit ;; - *:SINIX-*:*:*) - if uname -p 2>/dev/null >/dev/null ; then - UNAME_MACHINE=`(uname -p) 2>/dev/null` - echo ${UNAME_MACHINE}-sni-sysv4 - else - echo ns32k-sni-sysv - fi - exit ;; - PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort - # says <Richard.M.Bartel@ccMail.Census.GOV> - echo i586-unisys-sysv4 - exit ;; - *:UNIX_System_V:4*:FTX*) - # From Gerald Hewes <hewes@openmarket.com>. - # How about differentiating between stratus architectures? -djm - echo hppa1.1-stratus-sysv4 - exit ;; - *:*:*:FTX*) - # From seanf@swdc.stratus.com. - echo i860-stratus-sysv4 - exit ;; - i*86:VOS:*:*) - # From Paul.Green@stratus.com. - echo ${UNAME_MACHINE}-stratus-vos - exit ;; - *:VOS:*:*) - # From Paul.Green@stratus.com. - echo hppa1.1-stratus-vos - exit ;; - mc68*:A/UX:*:*) - echo m68k-apple-aux${UNAME_RELEASE} - exit ;; - news*:NEWS-OS:6*:*) - echo mips-sony-newsos6 - exit ;; - R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) - if [ -d /usr/nec ]; then - echo mips-nec-sysv${UNAME_RELEASE} - else - echo mips-unknown-sysv${UNAME_RELEASE} - fi - exit ;; - BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. - echo powerpc-be-beos - exit ;; - BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. - echo powerpc-apple-beos - exit ;; - BePC:BeOS:*:*) # BeOS running on Intel PC compatible. - echo i586-pc-beos - exit ;; - SX-4:SUPER-UX:*:*) - echo sx4-nec-superux${UNAME_RELEASE} - exit ;; - SX-5:SUPER-UX:*:*) - echo sx5-nec-superux${UNAME_RELEASE} - exit ;; - SX-6:SUPER-UX:*:*) - echo sx6-nec-superux${UNAME_RELEASE} - exit ;; - SX-7:SUPER-UX:*:*) - echo sx7-nec-superux${UNAME_RELEASE} - exit ;; - SX-8:SUPER-UX:*:*) - echo sx8-nec-superux${UNAME_RELEASE} - exit ;; - SX-8R:SUPER-UX:*:*) - echo sx8r-nec-superux${UNAME_RELEASE} - exit ;; - Power*:Rhapsody:*:*) - echo powerpc-apple-rhapsody${UNAME_RELEASE} - exit ;; - *:Rhapsody:*:*) - echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} - exit ;; - *:Darwin:*:*) - UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown - case $UNAME_PROCESSOR in - unknown) UNAME_PROCESSOR=powerpc ;; - esac - echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} - exit ;; - *:procnto*:*:* | *:QNX:[0123456789]*:*) - UNAME_PROCESSOR=`uname -p` - if test "$UNAME_PROCESSOR" = "x86"; then - UNAME_PROCESSOR=i386 - UNAME_MACHINE=pc - fi - echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} - exit ;; - *:QNX:*:4*) - echo i386-pc-qnx - exit ;; - NSE-?:NONSTOP_KERNEL:*:*) - echo nse-tandem-nsk${UNAME_RELEASE} - exit ;; - NSR-?:NONSTOP_KERNEL:*:*) - echo nsr-tandem-nsk${UNAME_RELEASE} - exit ;; - *:NonStop-UX:*:*) - echo mips-compaq-nonstopux - exit ;; - BS2000:POSIX*:*:*) - echo bs2000-siemens-sysv - exit ;; - DS/*:UNIX_System_V:*:*) - echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} - exit ;; - *:Plan9:*:*) - # "uname -m" is not consistent, so use $cputype instead. 386 - # is converted to i386 for consistency with other x86 - # operating systems. - if test "$cputype" = "386"; then - UNAME_MACHINE=i386 - else - UNAME_MACHINE="$cputype" - fi - echo ${UNAME_MACHINE}-unknown-plan9 - exit ;; - *:TOPS-10:*:*) - echo pdp10-unknown-tops10 - exit ;; - *:TENEX:*:*) - echo pdp10-unknown-tenex - exit ;; - KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) - echo pdp10-dec-tops20 - exit ;; - XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) - echo pdp10-xkl-tops20 - exit ;; - *:TOPS-20:*:*) - echo pdp10-unknown-tops20 - exit ;; - *:ITS:*:*) - echo pdp10-unknown-its - exit ;; - SEI:*:*:SEIUX) - echo mips-sei-seiux${UNAME_RELEASE} - exit ;; - *:DragonFly:*:*) - echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` - exit ;; - *:*VMS:*:*) - UNAME_MACHINE=`(uname -p) 2>/dev/null` - case "${UNAME_MACHINE}" in - A*) echo alpha-dec-vms ; exit ;; - I*) echo ia64-dec-vms ; exit ;; - V*) echo vax-dec-vms ; exit ;; - esac ;; - *:XENIX:*:SysV) - echo i386-pc-xenix - exit ;; - i*86:skyos:*:*) - echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//' - exit ;; - i*86:rdos:*:*) - echo ${UNAME_MACHINE}-pc-rdos - exit ;; -esac - -#echo '(No uname command or uname output not recognized.)' 1>&2 -#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2 - -eval $set_cc_for_build -cat >$dummy.c <<EOF -#ifdef _SEQUENT_ -# include <sys/types.h> -# include <sys/utsname.h> -#endif -main () -{ -#if defined (sony) -#if defined (MIPSEB) - /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, - I don't know.... */ - printf ("mips-sony-bsd\n"); exit (0); -#else -#include <sys/param.h> - printf ("m68k-sony-newsos%s\n", -#ifdef NEWSOS4 - "4" -#else - "" -#endif - ); exit (0); -#endif -#endif - -#if defined (__arm) && defined (__acorn) && defined (__unix) - printf ("arm-acorn-riscix\n"); exit (0); -#endif - -#if defined (hp300) && !defined (hpux) - printf ("m68k-hp-bsd\n"); exit (0); -#endif - -#if defined (NeXT) -#if !defined (__ARCHITECTURE__) -#define __ARCHITECTURE__ "m68k" -#endif - int version; - version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`; - if (version < 4) - printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); - else - printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); - exit (0); -#endif - -#if defined (MULTIMAX) || defined (n16) -#if defined (UMAXV) - printf ("ns32k-encore-sysv\n"); exit (0); -#else -#if defined (CMU) - printf ("ns32k-encore-mach\n"); exit (0); -#else - printf ("ns32k-encore-bsd\n"); exit (0); -#endif -#endif -#endif - -#if defined (__386BSD__) - printf ("i386-pc-bsd\n"); exit (0); -#endif - -#if defined (sequent) -#if defined (i386) - printf ("i386-sequent-dynix\n"); exit (0); -#endif -#if defined (ns32000) - printf ("ns32k-sequent-dynix\n"); exit (0); -#endif -#endif - -#if defined (_SEQUENT_) - struct utsname un; - - uname(&un); - - if (strncmp(un.version, "V2", 2) == 0) { - printf ("i386-sequent-ptx2\n"); exit (0); - } - if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ - printf ("i386-sequent-ptx1\n"); exit (0); - } - printf ("i386-sequent-ptx\n"); exit (0); - -#endif - -#if defined (vax) -# if !defined (ultrix) -# include <sys/param.h> -# if defined (BSD) -# if BSD == 43 - printf ("vax-dec-bsd4.3\n"); exit (0); -# else -# if BSD == 199006 - printf ("vax-dec-bsd4.3reno\n"); exit (0); -# else - printf ("vax-dec-bsd\n"); exit (0); -# endif -# endif -# else - printf ("vax-dec-bsd\n"); exit (0); -# endif -# else - printf ("vax-dec-ultrix\n"); exit (0); -# endif -#endif - -#if defined (alliant) && defined (i860) - printf ("i860-alliant-bsd\n"); exit (0); -#endif - - exit (1); -} -EOF - -$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` && - { echo "$SYSTEM_NAME"; exit; } - -# Apollos put the system type in the environment. - -test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; } - -# Convex versions that predate uname can use getsysinfo(1) - -if [ -x /usr/convex/getsysinfo ] -then - case `getsysinfo -f cpu_type` in - c1*) - echo c1-convex-bsd - exit ;; - c2*) - if getsysinfo -f scalar_acc - then echo c32-convex-bsd - else echo c2-convex-bsd - fi - exit ;; - c34*) - echo c34-convex-bsd - exit ;; - c38*) - echo c38-convex-bsd - exit ;; - c4*) - echo c4-convex-bsd - exit ;; - esac -fi - -cat >&2 <<EOF -$0: unable to guess system type - -This script, last modified $timestamp, has failed to recognize -the operating system you are using. It is advised that you -download the most up to date version of the config scripts from - - http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD -and - http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD - -If the version you run ($0) is already up to date, please -send the following data and any information you think might be -pertinent to <config-patches@gnu.org> in order to provide the needed -information to handle your system. - -config.guess timestamp = $timestamp - -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null` - -hostinfo = `(hostinfo) 2>/dev/null` -/bin/universe = `(/bin/universe) 2>/dev/null` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` -/bin/arch = `(/bin/arch) 2>/dev/null` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` - -UNAME_MACHINE = ${UNAME_MACHINE} -UNAME_RELEASE = ${UNAME_RELEASE} -UNAME_SYSTEM = ${UNAME_SYSTEM} -UNAME_VERSION = ${UNAME_VERSION} -EOF - -exit 1 - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "timestamp='" -# time-stamp-format: "%:y-%02m-%02d" -# time-stamp-end: "'" -# End:
diff --git a/third_party/libevent/config.h.in b/third_party/libevent/config.h.in deleted file mode 100644 index 149942c..0000000 --- a/third_party/libevent/config.h.in +++ /dev/null
@@ -1,265 +0,0 @@ -/* config.h.in. Generated from configure.in by autoheader. */ - -/* Define if clock_gettime is available in libc */ -#undef DNS_USE_CPU_CLOCK_FOR_ID - -/* Define is no secure id variant is available */ -#undef DNS_USE_GETTIMEOFDAY_FOR_ID - -/* Define to 1 if you have the `clock_gettime' function. */ -#undef HAVE_CLOCK_GETTIME - -/* Define if /dev/poll is available */ -#undef HAVE_DEVPOLL - -/* Define to 1 if you have the <dlfcn.h> header file. */ -#undef HAVE_DLFCN_H - -/* Define if your system supports the epoll system calls */ -#undef HAVE_EPOLL - -/* Define to 1 if you have the `epoll_ctl' function. */ -#undef HAVE_EPOLL_CTL - -/* Define if your system supports event ports */ -#undef HAVE_EVENT_PORTS - -/* Define to 1 if you have the `fcntl' function. */ -#undef HAVE_FCNTL - -/* Define to 1 if you have the <fcntl.h> header file. */ -#undef HAVE_FCNTL_H - -/* Define to 1 if the system has the type `fd_mask'. */ -#undef HAVE_FD_MASK - -/* Define to 1 if you have the `getaddrinfo' function. */ -#undef HAVE_GETADDRINFO - -/* Define to 1 if you have the `getegid' function. */ -#undef HAVE_GETEGID - -/* Define to 1 if you have the `geteuid' function. */ -#undef HAVE_GETEUID - -/* Define to 1 if you have the `getnameinfo' function. */ -#undef HAVE_GETNAMEINFO - -/* Define to 1 if you have the `gettimeofday' function. */ -#undef HAVE_GETTIMEOFDAY - -/* Define to 1 if you have the `inet_ntop' function. */ -#undef HAVE_INET_NTOP - -/* Define to 1 if you have the <inttypes.h> header file. */ -#undef HAVE_INTTYPES_H - -/* Define to 1 if you have the `issetugid' function. */ -#undef HAVE_ISSETUGID - -/* Define to 1 if you have the `kqueue' function. */ -#undef HAVE_KQUEUE - -/* Define to 1 if you have the `nsl' library (-lnsl). */ -#undef HAVE_LIBNSL - -/* Define to 1 if you have the `resolv' library (-lresolv). */ -#undef HAVE_LIBRESOLV - -/* Define to 1 if you have the `rt' library (-lrt). */ -#undef HAVE_LIBRT - -/* Define to 1 if you have the `socket' library (-lsocket). */ -#undef HAVE_LIBSOCKET - -/* Define to 1 if you have the <memory.h> header file. */ -#undef HAVE_MEMORY_H - -/* Define to 1 if you have the <netinet/in6.h> header file. */ -#undef HAVE_NETINET_IN6_H - -/* Define to 1 if you have the `poll' function. */ -#undef HAVE_POLL - -/* Define to 1 if you have the <poll.h> header file. */ -#undef HAVE_POLL_H - -/* Define to 1 if you have the `port_create' function. */ -#undef HAVE_PORT_CREATE - -/* Define to 1 if you have the <port.h> header file. */ -#undef HAVE_PORT_H - -/* Define to 1 if you have the `select' function. */ -#undef HAVE_SELECT - -/* Define if F_SETFD is defined in <fcntl.h> */ -#undef HAVE_SETFD - -/* Define to 1 if you have the `sigaction' function. */ -#undef HAVE_SIGACTION - -/* Define to 1 if you have the `signal' function. */ -#undef HAVE_SIGNAL - -/* Define to 1 if you have the <signal.h> header file. */ -#undef HAVE_SIGNAL_H - -/* Define to 1 if you have the <stdarg.h> header file. */ -#undef HAVE_STDARG_H - -/* Define to 1 if you have the <stdint.h> header file. */ -#undef HAVE_STDINT_H - -/* Define to 1 if you have the <stdlib.h> header file. */ -#undef HAVE_STDLIB_H - -/* Define to 1 if you have the <strings.h> header file. */ -#undef HAVE_STRINGS_H - -/* Define to 1 if you have the <string.h> header file. */ -#undef HAVE_STRING_H - -/* Define to 1 if you have the `strlcpy' function. */ -#undef HAVE_STRLCPY - -/* Define to 1 if you have the `strsep' function. */ -#undef HAVE_STRSEP - -/* Define to 1 if you have the `strtok_r' function. */ -#undef HAVE_STRTOK_R - -/* Define to 1 if you have the `strtoll' function. */ -#undef HAVE_STRTOLL - -/* Define to 1 if the system has the type `struct in6_addr'. */ -#undef HAVE_STRUCT_IN6_ADDR - -/* Define to 1 if you have the <sys/devpoll.h> header file. */ -#undef HAVE_SYS_DEVPOLL_H - -/* Define to 1 if you have the <sys/epoll.h> header file. */ -#undef HAVE_SYS_EPOLL_H - -/* Define to 1 if you have the <sys/event.h> header file. */ -#undef HAVE_SYS_EVENT_H - -/* Define to 1 if you have the <sys/ioctl.h> header file. */ -#undef HAVE_SYS_IOCTL_H - -/* Define to 1 if you have the <sys/param.h> header file. */ -#undef HAVE_SYS_PARAM_H - -/* Define to 1 if you have the <sys/queue.h> header file. */ -#undef HAVE_SYS_QUEUE_H - -/* Define to 1 if you have the <sys/select.h> header file. */ -#undef HAVE_SYS_SELECT_H - -/* Define to 1 if you have the <sys/socket.h> header file. */ -#undef HAVE_SYS_SOCKET_H - -/* Define to 1 if you have the <sys/stat.h> header file. */ -#undef HAVE_SYS_STAT_H - -/* Define to 1 if you have the <sys/time.h> header file. */ -#undef HAVE_SYS_TIME_H - -/* Define to 1 if you have the <sys/types.h> header file. */ -#undef HAVE_SYS_TYPES_H - -/* Define if TAILQ_FOREACH is defined in <sys/queue.h> */ -#undef HAVE_TAILQFOREACH - -/* Define if timeradd is defined in <sys/time.h> */ -#undef HAVE_TIMERADD - -/* Define if timerclear is defined in <sys/time.h> */ -#undef HAVE_TIMERCLEAR - -/* Define if timercmp is defined in <sys/time.h> */ -#undef HAVE_TIMERCMP - -/* Define if timerisset is defined in <sys/time.h> */ -#undef HAVE_TIMERISSET - -/* Define to 1 if the system has the type `uint16_t'. */ -#undef HAVE_UINT16_T - -/* Define to 1 if the system has the type `uint32_t'. */ -#undef HAVE_UINT32_T - -/* Define to 1 if the system has the type `uint64_t'. */ -#undef HAVE_UINT64_T - -/* Define to 1 if the system has the type `uint8_t'. */ -#undef HAVE_UINT8_T - -/* Define to 1 if you have the <unistd.h> header file. */ -#undef HAVE_UNISTD_H - -/* Define to 1 if you have the `vasprintf' function. */ -#undef HAVE_VASPRINTF - -/* Define if kqueue works correctly with pipes */ -#undef HAVE_WORKING_KQUEUE - -/* Name of package */ -#undef PACKAGE - -/* Define to the address where bug reports for this package should be sent. */ -#undef PACKAGE_BUGREPORT - -/* Define to the full name of this package. */ -#undef PACKAGE_NAME - -/* Define to the full name and version of this package. */ -#undef PACKAGE_STRING - -/* Define to the one symbol short name of this package. */ -#undef PACKAGE_TARNAME - -/* Define to the version of this package. */ -#undef PACKAGE_VERSION - -/* The size of `int', as computed by sizeof. */ -#undef SIZEOF_INT - -/* The size of `long', as computed by sizeof. */ -#undef SIZEOF_LONG - -/* The size of `long long', as computed by sizeof. */ -#undef SIZEOF_LONG_LONG - -/* The size of `short', as computed by sizeof. */ -#undef SIZEOF_SHORT - -/* Define to 1 if you have the ANSI C header files. */ -#undef STDC_HEADERS - -/* Define to 1 if you can safely include both <sys/time.h> and <time.h>. */ -#undef TIME_WITH_SYS_TIME - -/* Version number of package */ -#undef VERSION - -/* Define to appropriate substitue if compiler doesnt have __func__ */ -#undef __func__ - -/* Define to empty if `const' does not conform to ANSI C. */ -#undef const - -/* Define to `__inline__' or `__inline' if that's what the C compiler - calls it, or to nothing if 'inline' is not supported under any name. */ -#ifndef __cplusplus -#undef inline -#endif - -/* Define to `int' if <sys/types.h> does not define. */ -#undef pid_t - -/* Define to `unsigned int' if <sys/types.h> does not define. */ -#undef size_t - -/* Define to unsigned int if you dont have it */ -#undef socklen_t
diff --git a/third_party/libevent/config.sub b/third_party/libevent/config.sub deleted file mode 100644 index 6759825a..0000000 --- a/third_party/libevent/config.sub +++ /dev/null
@@ -1,1658 +0,0 @@ -#! /bin/sh -# Configuration validation subroutine script. -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, -# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 -# Free Software Foundation, Inc. - -timestamp='2008-01-16' - -# This file is (in principle) common to ALL GNU software. -# The presence of a machine in this file suggests that SOME GNU software -# can handle that machine. It does not imply ALL GNU software can. -# -# This file is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA -# 02110-1301, USA. -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - - -# Please send patches to <config-patches@gnu.org>. Submit a context -# diff and a properly formatted ChangeLog entry. -# -# Configuration subroutine to validate and canonicalize a configuration type. -# Supply the specified configuration type as an argument. -# If it is invalid, we print an error message on stderr and exit with code 1. -# Otherwise, we print the canonical config type on stdout and succeed. - -# This file is supposed to be the same for all GNU packages -# and recognize all the CPU types, system types and aliases -# that are meaningful with *any* GNU software. -# Each package is responsible for reporting which valid configurations -# it does not support. The user should be able to distinguish -# a failure to support a valid configuration from a meaningless -# configuration. - -# The goal of this file is to map all the various variations of a given -# machine specification into a single specification in the form: -# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM -# or in some cases, the newer four-part form: -# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM -# It is wrong to echo any other type of specification. - -me=`echo "$0" | sed -e 's,.*/,,'` - -usage="\ -Usage: $0 [OPTION] CPU-MFR-OPSYS - $0 [OPTION] ALIAS - -Canonicalize a configuration name. - -Operation modes: - -h, --help print this help, then exit - -t, --time-stamp print date of last modification, then exit - -v, --version print version number, then exit - -Report bugs and patches to <config-patches@gnu.org>." - -version="\ -GNU config.sub ($timestamp) - -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, -2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. - -This is free software; see the source for copying conditions. There is NO -warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - -help=" -Try \`$me --help' for more information." - -# Parse command line -while test $# -gt 0 ; do - case $1 in - --time-stamp | --time* | -t ) - echo "$timestamp" ; exit ;; - --version | -v ) - echo "$version" ; exit ;; - --help | --h* | -h ) - echo "$usage"; exit ;; - -- ) # Stop option processing - shift; break ;; - - ) # Use stdin as input. - break ;; - -* ) - echo "$me: invalid option $1$help" - exit 1 ;; - - *local*) - # First pass through any local machine types. - echo $1 - exit ;; - - * ) - break ;; - esac -done - -case $# in - 0) echo "$me: missing argument$help" >&2 - exit 1;; - 1) ;; - *) echo "$me: too many arguments$help" >&2 - exit 1;; -esac - -# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). -# Here we must recognize all the valid KERNEL-OS combinations. -maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` -case $maybe_os in - nto-qnx* | linux-gnu* | linux-dietlibc | linux-newlib* | linux-uclibc* | \ - uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | \ - storm-chaos* | os2-emx* | rtmk-nova*) - os=-$maybe_os - basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` - ;; - *) - basic_machine=`echo $1 | sed 's/-[^-]*$//'` - if [ $basic_machine != $1 ] - then os=`echo $1 | sed 's/.*-/-/'` - else os=; fi - ;; -esac - -### Let's recognize common machines as not being operating systems so -### that things like config.sub decstation-3100 work. We also -### recognize some manufacturers as not being operating systems, so we -### can provide default operating systems below. -case $os in - -sun*os*) - # Prevent following clause from handling this invalid input. - ;; - -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ - -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ - -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ - -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ - -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ - -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ - -apple | -axis | -knuth | -cray) - os= - basic_machine=$1 - ;; - -sim | -cisco | -oki | -wec | -winbond) - os= - basic_machine=$1 - ;; - -scout) - ;; - -wrs) - os=-vxworks - basic_machine=$1 - ;; - -chorusos*) - os=-chorusos - basic_machine=$1 - ;; - -chorusrdb) - os=-chorusrdb - basic_machine=$1 - ;; - -hiux*) - os=-hiuxwe2 - ;; - -sco6) - os=-sco5v6 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco5) - os=-sco3.2v5 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco4) - os=-sco3.2v4 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco3.2.[4-9]*) - os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco3.2v[4-9]*) - # Don't forget version if it is 3.2v4 or newer. - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco5v6*) - # Don't forget version if it is 3.2v4 or newer. - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco*) - os=-sco3.2v2 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -udk*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -isc) - os=-isc2.2 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -clix*) - basic_machine=clipper-intergraph - ;; - -isc*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -lynx*) - os=-lynxos - ;; - -ptx*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` - ;; - -windowsnt*) - os=`echo $os | sed -e 's/windowsnt/winnt/'` - ;; - -psos*) - os=-psos - ;; - -mint | -mint[0-9]*) - basic_machine=m68k-atari - os=-mint - ;; -esac - -# Decode aliases for certain CPU-COMPANY combinations. -case $basic_machine in - # Recognize the basic CPU types without company name. - # Some are omitted here because they have special meanings below. - 1750a | 580 \ - | a29k \ - | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ - | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ - | am33_2.0 \ - | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \ - | bfin \ - | c4x | clipper \ - | d10v | d30v | dlx | dsp16xx \ - | fido | fr30 | frv \ - | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ - | i370 | i860 | i960 | ia64 \ - | ip2k | iq2000 \ - | m32c | m32r | m32rle | m68000 | m68k | m88k \ - | maxq | mb | microblaze | mcore | mep \ - | mips | mipsbe | mipseb | mipsel | mipsle \ - | mips16 \ - | mips64 | mips64el \ - | mips64vr | mips64vrel \ - | mips64orion | mips64orionel \ - | mips64vr4100 | mips64vr4100el \ - | mips64vr4300 | mips64vr4300el \ - | mips64vr5000 | mips64vr5000el \ - | mips64vr5900 | mips64vr5900el \ - | mipsisa32 | mipsisa32el \ - | mipsisa32r2 | mipsisa32r2el \ - | mipsisa64 | mipsisa64el \ - | mipsisa64r2 | mipsisa64r2el \ - | mipsisa64sb1 | mipsisa64sb1el \ - | mipsisa64sr71k | mipsisa64sr71kel \ - | mipstx39 | mipstx39el \ - | mn10200 | mn10300 \ - | mt \ - | msp430 \ - | nios | nios2 \ - | ns16k | ns32k \ - | or32 \ - | pdp10 | pdp11 | pj | pjl \ - | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \ - | pyramid \ - | score \ - | sh | sh[1234] | sh[24]a | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ - | sh64 | sh64le \ - | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ - | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ - | spu | strongarm \ - | tahoe | thumb | tic4x | tic80 | tron \ - | v850 | v850e \ - | we32k \ - | x86 | xc16x | xscale | xscalee[bl] | xstormy16 | xtensa \ - | z8k) - basic_machine=$basic_machine-unknown - ;; - m6811 | m68hc11 | m6812 | m68hc12) - # Motorola 68HC11/12. - basic_machine=$basic_machine-unknown - os=-none - ;; - m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) - ;; - ms1) - basic_machine=mt-unknown - ;; - - # We use `pc' rather than `unknown' - # because (1) that's what they normally are, and - # (2) the word "unknown" tends to confuse beginning users. - i*86 | x86_64) - basic_machine=$basic_machine-pc - ;; - # Object if more than one company name word. - *-*-*) - echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 - exit 1 - ;; - # Recognize the basic CPU types with company name. - 580-* \ - | a29k-* \ - | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ - | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ - | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ - | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ - | avr-* | avr32-* \ - | bfin-* | bs2000-* \ - | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \ - | clipper-* | craynv-* | cydra-* \ - | d10v-* | d30v-* | dlx-* \ - | elxsi-* \ - | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ - | h8300-* | h8500-* \ - | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ - | i*86-* | i860-* | i960-* | ia64-* \ - | ip2k-* | iq2000-* \ - | m32c-* | m32r-* | m32rle-* \ - | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ - | m88110-* | m88k-* | maxq-* | mcore-* \ - | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ - | mips16-* \ - | mips64-* | mips64el-* \ - | mips64vr-* | mips64vrel-* \ - | mips64orion-* | mips64orionel-* \ - | mips64vr4100-* | mips64vr4100el-* \ - | mips64vr4300-* | mips64vr4300el-* \ - | mips64vr5000-* | mips64vr5000el-* \ - | mips64vr5900-* | mips64vr5900el-* \ - | mipsisa32-* | mipsisa32el-* \ - | mipsisa32r2-* | mipsisa32r2el-* \ - | mipsisa64-* | mipsisa64el-* \ - | mipsisa64r2-* | mipsisa64r2el-* \ - | mipsisa64sb1-* | mipsisa64sb1el-* \ - | mipsisa64sr71k-* | mipsisa64sr71kel-* \ - | mipstx39-* | mipstx39el-* \ - | mmix-* \ - | mt-* \ - | msp430-* \ - | nios-* | nios2-* \ - | none-* | np1-* | ns16k-* | ns32k-* \ - | orion-* \ - | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ - | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \ - | pyramid-* \ - | romp-* | rs6000-* \ - | sh-* | sh[1234]-* | sh[24]a-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ - | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ - | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ - | sparclite-* \ - | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | strongarm-* | sv1-* | sx?-* \ - | tahoe-* | thumb-* \ - | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ - | tron-* \ - | v850-* | v850e-* | vax-* \ - | we32k-* \ - | x86-* | x86_64-* | xc16x-* | xps100-* | xscale-* | xscalee[bl]-* \ - | xstormy16-* | xtensa*-* \ - | ymp-* \ - | z8k-*) - ;; - # Recognize the basic CPU types without company name, with glob match. - xtensa*) - basic_machine=$basic_machine-unknown - ;; - # Recognize the various machine names and aliases which stand - # for a CPU type and a company and sometimes even an OS. - 386bsd) - basic_machine=i386-unknown - os=-bsd - ;; - 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) - basic_machine=m68000-att - ;; - 3b*) - basic_machine=we32k-att - ;; - a29khif) - basic_machine=a29k-amd - os=-udi - ;; - abacus) - basic_machine=abacus-unknown - ;; - adobe68k) - basic_machine=m68010-adobe - os=-scout - ;; - alliant | fx80) - basic_machine=fx80-alliant - ;; - altos | altos3068) - basic_machine=m68k-altos - ;; - am29k) - basic_machine=a29k-none - os=-bsd - ;; - amd64) - basic_machine=x86_64-pc - ;; - amd64-*) - basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - amdahl) - basic_machine=580-amdahl - os=-sysv - ;; - amiga | amiga-*) - basic_machine=m68k-unknown - ;; - amigaos | amigados) - basic_machine=m68k-unknown - os=-amigaos - ;; - amigaunix | amix) - basic_machine=m68k-unknown - os=-sysv4 - ;; - apollo68) - basic_machine=m68k-apollo - os=-sysv - ;; - apollo68bsd) - basic_machine=m68k-apollo - os=-bsd - ;; - aux) - basic_machine=m68k-apple - os=-aux - ;; - balance) - basic_machine=ns32k-sequent - os=-dynix - ;; - blackfin) - basic_machine=bfin-unknown - os=-linux - ;; - blackfin-*) - basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux - ;; - c90) - basic_machine=c90-cray - os=-unicos - ;; - convex-c1) - basic_machine=c1-convex - os=-bsd - ;; - convex-c2) - basic_machine=c2-convex - os=-bsd - ;; - convex-c32) - basic_machine=c32-convex - os=-bsd - ;; - convex-c34) - basic_machine=c34-convex - os=-bsd - ;; - convex-c38) - basic_machine=c38-convex - os=-bsd - ;; - cray | j90) - basic_machine=j90-cray - os=-unicos - ;; - craynv) - basic_machine=craynv-cray - os=-unicosmp - ;; - cr16) - basic_machine=cr16-unknown - os=-elf - ;; - crds | unos) - basic_machine=m68k-crds - ;; - crisv32 | crisv32-* | etraxfs*) - basic_machine=crisv32-axis - ;; - cris | cris-* | etrax*) - basic_machine=cris-axis - ;; - crx) - basic_machine=crx-unknown - os=-elf - ;; - da30 | da30-*) - basic_machine=m68k-da30 - ;; - decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) - basic_machine=mips-dec - ;; - decsystem10* | dec10*) - basic_machine=pdp10-dec - os=-tops10 - ;; - decsystem20* | dec20*) - basic_machine=pdp10-dec - os=-tops20 - ;; - delta | 3300 | motorola-3300 | motorola-delta \ - | 3300-motorola | delta-motorola) - basic_machine=m68k-motorola - ;; - delta88) - basic_machine=m88k-motorola - os=-sysv3 - ;; - djgpp) - basic_machine=i586-pc - os=-msdosdjgpp - ;; - dpx20 | dpx20-*) - basic_machine=rs6000-bull - os=-bosx - ;; - dpx2* | dpx2*-bull) - basic_machine=m68k-bull - os=-sysv3 - ;; - ebmon29k) - basic_machine=a29k-amd - os=-ebmon - ;; - elxsi) - basic_machine=elxsi-elxsi - os=-bsd - ;; - encore | umax | mmax) - basic_machine=ns32k-encore - ;; - es1800 | OSE68k | ose68k | ose | OSE) - basic_machine=m68k-ericsson - os=-ose - ;; - fx2800) - basic_machine=i860-alliant - ;; - genix) - basic_machine=ns32k-ns - ;; - gmicro) - basic_machine=tron-gmicro - os=-sysv - ;; - go32) - basic_machine=i386-pc - os=-go32 - ;; - h3050r* | hiux*) - basic_machine=hppa1.1-hitachi - os=-hiuxwe2 - ;; - h8300hms) - basic_machine=h8300-hitachi - os=-hms - ;; - h8300xray) - basic_machine=h8300-hitachi - os=-xray - ;; - h8500hms) - basic_machine=h8500-hitachi - os=-hms - ;; - harris) - basic_machine=m88k-harris - os=-sysv3 - ;; - hp300-*) - basic_machine=m68k-hp - ;; - hp300bsd) - basic_machine=m68k-hp - os=-bsd - ;; - hp300hpux) - basic_machine=m68k-hp - os=-hpux - ;; - hp3k9[0-9][0-9] | hp9[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hp9k2[0-9][0-9] | hp9k31[0-9]) - basic_machine=m68000-hp - ;; - hp9k3[2-9][0-9]) - basic_machine=m68k-hp - ;; - hp9k6[0-9][0-9] | hp6[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hp9k7[0-79][0-9] | hp7[0-79][0-9]) - basic_machine=hppa1.1-hp - ;; - hp9k78[0-9] | hp78[0-9]) - # FIXME: really hppa2.0-hp - basic_machine=hppa1.1-hp - ;; - hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) - # FIXME: really hppa2.0-hp - basic_machine=hppa1.1-hp - ;; - hp9k8[0-9][13679] | hp8[0-9][13679]) - basic_machine=hppa1.1-hp - ;; - hp9k8[0-9][0-9] | hp8[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hppa-next) - os=-nextstep3 - ;; - hppaosf) - basic_machine=hppa1.1-hp - os=-osf - ;; - hppro) - basic_machine=hppa1.1-hp - os=-proelf - ;; - i370-ibm* | ibm*) - basic_machine=i370-ibm - ;; -# I'm not sure what "Sysv32" means. Should this be sysv3.2? - i*86v32) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv32 - ;; - i*86v4*) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv4 - ;; - i*86v) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv - ;; - i*86sol2) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-solaris2 - ;; - i386mach) - basic_machine=i386-mach - os=-mach - ;; - i386-vsta | vsta) - basic_machine=i386-unknown - os=-vsta - ;; - iris | iris4d) - basic_machine=mips-sgi - case $os in - -irix*) - ;; - *) - os=-irix4 - ;; - esac - ;; - isi68 | isi) - basic_machine=m68k-isi - os=-sysv - ;; - m68knommu) - basic_machine=m68k-unknown - os=-linux - ;; - m68knommu-*) - basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux - ;; - m88k-omron*) - basic_machine=m88k-omron - ;; - magnum | m3230) - basic_machine=mips-mips - os=-sysv - ;; - merlin) - basic_machine=ns32k-utek - os=-sysv - ;; - mingw32) - basic_machine=i386-pc - os=-mingw32 - ;; - mingw32ce) - basic_machine=arm-unknown - os=-mingw32ce - ;; - miniframe) - basic_machine=m68000-convergent - ;; - *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) - basic_machine=m68k-atari - os=-mint - ;; - mips3*-*) - basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` - ;; - mips3*) - basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown - ;; - monitor) - basic_machine=m68k-rom68k - os=-coff - ;; - morphos) - basic_machine=powerpc-unknown - os=-morphos - ;; - msdos) - basic_machine=i386-pc - os=-msdos - ;; - ms1-*) - basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` - ;; - mvs) - basic_machine=i370-ibm - os=-mvs - ;; - ncr3000) - basic_machine=i486-ncr - os=-sysv4 - ;; - netbsd386) - basic_machine=i386-unknown - os=-netbsd - ;; - netwinder) - basic_machine=armv4l-rebel - os=-linux - ;; - news | news700 | news800 | news900) - basic_machine=m68k-sony - os=-newsos - ;; - news1000) - basic_machine=m68030-sony - os=-newsos - ;; - news-3600 | risc-news) - basic_machine=mips-sony - os=-newsos - ;; - necv70) - basic_machine=v70-nec - os=-sysv - ;; - next | m*-next ) - basic_machine=m68k-next - case $os in - -nextstep* ) - ;; - -ns2*) - os=-nextstep2 - ;; - *) - os=-nextstep3 - ;; - esac - ;; - nh3000) - basic_machine=m68k-harris - os=-cxux - ;; - nh[45]000) - basic_machine=m88k-harris - os=-cxux - ;; - nindy960) - basic_machine=i960-intel - os=-nindy - ;; - mon960) - basic_machine=i960-intel - os=-mon960 - ;; - nonstopux) - basic_machine=mips-compaq - os=-nonstopux - ;; - np1) - basic_machine=np1-gould - ;; - nsr-tandem) - basic_machine=nsr-tandem - ;; - op50n-* | op60c-*) - basic_machine=hppa1.1-oki - os=-proelf - ;; - openrisc | openrisc-*) - basic_machine=or32-unknown - ;; - os400) - basic_machine=powerpc-ibm - os=-os400 - ;; - OSE68000 | ose68000) - basic_machine=m68000-ericsson - os=-ose - ;; - os68k) - basic_machine=m68k-none - os=-os68k - ;; - pa-hitachi) - basic_machine=hppa1.1-hitachi - os=-hiuxwe2 - ;; - paragon) - basic_machine=i860-intel - os=-osf - ;; - parisc) - basic_machine=hppa-unknown - os=-linux - ;; - parisc-*) - basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux - ;; - pbd) - basic_machine=sparc-tti - ;; - pbb) - basic_machine=m68k-tti - ;; - pc532 | pc532-*) - basic_machine=ns32k-pc532 - ;; - pc98) - basic_machine=i386-pc - ;; - pc98-*) - basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pentium | p5 | k5 | k6 | nexgen | viac3) - basic_machine=i586-pc - ;; - pentiumpro | p6 | 6x86 | athlon | athlon_*) - basic_machine=i686-pc - ;; - pentiumii | pentium2 | pentiumiii | pentium3) - basic_machine=i686-pc - ;; - pentium4) - basic_machine=i786-pc - ;; - pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) - basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pentiumpro-* | p6-* | 6x86-* | athlon-*) - basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) - basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pentium4-*) - basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pn) - basic_machine=pn-gould - ;; - power) basic_machine=power-ibm - ;; - ppc) basic_machine=powerpc-unknown - ;; - ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - ppcle | powerpclittle | ppc-le | powerpc-little) - basic_machine=powerpcle-unknown - ;; - ppcle-* | powerpclittle-*) - basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - ppc64) basic_machine=powerpc64-unknown - ;; - ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - ppc64le | powerpc64little | ppc64-le | powerpc64-little) - basic_machine=powerpc64le-unknown - ;; - ppc64le-* | powerpc64little-*) - basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - ps2) - basic_machine=i386-ibm - ;; - pw32) - basic_machine=i586-unknown - os=-pw32 - ;; - rdos) - basic_machine=i386-pc - os=-rdos - ;; - rom68k) - basic_machine=m68k-rom68k - os=-coff - ;; - rm[46]00) - basic_machine=mips-siemens - ;; - rtpc | rtpc-*) - basic_machine=romp-ibm - ;; - s390 | s390-*) - basic_machine=s390-ibm - ;; - s390x | s390x-*) - basic_machine=s390x-ibm - ;; - sa29200) - basic_machine=a29k-amd - os=-udi - ;; - sb1) - basic_machine=mipsisa64sb1-unknown - ;; - sb1el) - basic_machine=mipsisa64sb1el-unknown - ;; - sde) - basic_machine=mipsisa32-sde - os=-elf - ;; - sei) - basic_machine=mips-sei - os=-seiux - ;; - sequent) - basic_machine=i386-sequent - ;; - sh) - basic_machine=sh-hitachi - os=-hms - ;; - sh5el) - basic_machine=sh5le-unknown - ;; - sh64) - basic_machine=sh64-unknown - ;; - sparclite-wrs | simso-wrs) - basic_machine=sparclite-wrs - os=-vxworks - ;; - sps7) - basic_machine=m68k-bull - os=-sysv2 - ;; - spur) - basic_machine=spur-unknown - ;; - st2000) - basic_machine=m68k-tandem - ;; - stratus) - basic_machine=i860-stratus - os=-sysv4 - ;; - sun2) - basic_machine=m68000-sun - ;; - sun2os3) - basic_machine=m68000-sun - os=-sunos3 - ;; - sun2os4) - basic_machine=m68000-sun - os=-sunos4 - ;; - sun3os3) - basic_machine=m68k-sun - os=-sunos3 - ;; - sun3os4) - basic_machine=m68k-sun - os=-sunos4 - ;; - sun4os3) - basic_machine=sparc-sun - os=-sunos3 - ;; - sun4os4) - basic_machine=sparc-sun - os=-sunos4 - ;; - sun4sol2) - basic_machine=sparc-sun - os=-solaris2 - ;; - sun3 | sun3-*) - basic_machine=m68k-sun - ;; - sun4) - basic_machine=sparc-sun - ;; - sun386 | sun386i | roadrunner) - basic_machine=i386-sun - ;; - sv1) - basic_machine=sv1-cray - os=-unicos - ;; - symmetry) - basic_machine=i386-sequent - os=-dynix - ;; - t3e) - basic_machine=alphaev5-cray - os=-unicos - ;; - t90) - basic_machine=t90-cray - os=-unicos - ;; - tic54x | c54x*) - basic_machine=tic54x-unknown - os=-coff - ;; - tic55x | c55x*) - basic_machine=tic55x-unknown - os=-coff - ;; - tic6x | c6x*) - basic_machine=tic6x-unknown - os=-coff - ;; - tile*) - basic_machine=tile-unknown - os=-linux-gnu - ;; - tx39) - basic_machine=mipstx39-unknown - ;; - tx39el) - basic_machine=mipstx39el-unknown - ;; - toad1) - basic_machine=pdp10-xkl - os=-tops20 - ;; - tower | tower-32) - basic_machine=m68k-ncr - ;; - tpf) - basic_machine=s390x-ibm - os=-tpf - ;; - udi29k) - basic_machine=a29k-amd - os=-udi - ;; - ultra3) - basic_machine=a29k-nyu - os=-sym1 - ;; - v810 | necv810) - basic_machine=v810-nec - os=-none - ;; - vaxv) - basic_machine=vax-dec - os=-sysv - ;; - vms) - basic_machine=vax-dec - os=-vms - ;; - vpp*|vx|vx-*) - basic_machine=f301-fujitsu - ;; - vxworks960) - basic_machine=i960-wrs - os=-vxworks - ;; - vxworks68) - basic_machine=m68k-wrs - os=-vxworks - ;; - vxworks29k) - basic_machine=a29k-wrs - os=-vxworks - ;; - w65*) - basic_machine=w65-wdc - os=-none - ;; - w89k-*) - basic_machine=hppa1.1-winbond - os=-proelf - ;; - xbox) - basic_machine=i686-pc - os=-mingw32 - ;; - xps | xps100) - basic_machine=xps100-honeywell - ;; - ymp) - basic_machine=ymp-cray - os=-unicos - ;; - z8k-*-coff) - basic_machine=z8k-unknown - os=-sim - ;; - none) - basic_machine=none-none - os=-none - ;; - -# Here we handle the default manufacturer of certain CPU types. It is in -# some cases the only manufacturer, in others, it is the most popular. - w89k) - basic_machine=hppa1.1-winbond - ;; - op50n) - basic_machine=hppa1.1-oki - ;; - op60c) - basic_machine=hppa1.1-oki - ;; - romp) - basic_machine=romp-ibm - ;; - mmix) - basic_machine=mmix-knuth - ;; - rs6000) - basic_machine=rs6000-ibm - ;; - vax) - basic_machine=vax-dec - ;; - pdp10) - # there are many clones, so DEC is not a safe bet - basic_machine=pdp10-unknown - ;; - pdp11) - basic_machine=pdp11-dec - ;; - we32k) - basic_machine=we32k-att - ;; - sh[1234] | sh[24]a | sh[34]eb | sh[1234]le | sh[23]ele) - basic_machine=sh-unknown - ;; - sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) - basic_machine=sparc-sun - ;; - cydra) - basic_machine=cydra-cydrome - ;; - orion) - basic_machine=orion-highlevel - ;; - orion105) - basic_machine=clipper-highlevel - ;; - mac | mpw | mac-mpw) - basic_machine=m68k-apple - ;; - pmac | pmac-mpw) - basic_machine=powerpc-apple - ;; - *-unknown) - # Make sure to match an already-canonicalized machine name. - ;; - *) - echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 - exit 1 - ;; -esac - -# Here we canonicalize certain aliases for manufacturers. -case $basic_machine in - *-digital*) - basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` - ;; - *-commodore*) - basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` - ;; - *) - ;; -esac - -# Decode manufacturer-specific aliases for certain operating systems. - -if [ x"$os" != x"" ] -then -case $os in - # First match some system type aliases - # that might get confused with valid system types. - # -solaris* is a basic system type, with this one exception. - -solaris1 | -solaris1.*) - os=`echo $os | sed -e 's|solaris1|sunos4|'` - ;; - -solaris) - os=-solaris2 - ;; - -svr4*) - os=-sysv4 - ;; - -unixware*) - os=-sysv4.2uw - ;; - -gnu/linux*) - os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` - ;; - # First accept the basic system types. - # The portable systems comes first. - # Each alternative MUST END IN A *, to match a version number. - # -sysv* is not here because it comes later, after sysvr4. - -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ - | -*vms* | -sco* | -esix* | -isc* | -aix* | -sunos | -sunos[34]*\ - | -hpux* | -unos* | -osf* | -luna* | -dgux* | -solaris* | -sym* \ - | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ - | -aos* \ - | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ - | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ - | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ - | -openbsd* | -solidbsd* \ - | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ - | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ - | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ - | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ - | -chorusos* | -chorusrdb* \ - | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ - | -mingw32* | -linux-gnu* | -linux-newlib* | -linux-uclibc* \ - | -uxpv* | -beos* | -mpeix* | -udk* \ - | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ - | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ - | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ - | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ - | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ - | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ - | -skyos* | -haiku* | -rdos* | -toppers* | -drops*) - # Remember, each alternative MUST END IN *, to match a version number. - ;; - -qnx*) - case $basic_machine in - x86-* | i*86-*) - ;; - *) - os=-nto$os - ;; - esac - ;; - -nto-qnx*) - ;; - -nto*) - os=`echo $os | sed -e 's|nto|nto-qnx|'` - ;; - -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ - | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ - | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) - ;; - -mac*) - os=`echo $os | sed -e 's|mac|macos|'` - ;; - -linux-dietlibc) - os=-linux-dietlibc - ;; - -linux*) - os=`echo $os | sed -e 's|linux|linux-gnu|'` - ;; - -sunos5*) - os=`echo $os | sed -e 's|sunos5|solaris2|'` - ;; - -sunos6*) - os=`echo $os | sed -e 's|sunos6|solaris3|'` - ;; - -opened*) - os=-openedition - ;; - -os400*) - os=-os400 - ;; - -wince*) - os=-wince - ;; - -osfrose*) - os=-osfrose - ;; - -osf*) - os=-osf - ;; - -utek*) - os=-bsd - ;; - -dynix*) - os=-bsd - ;; - -acis*) - os=-aos - ;; - -atheos*) - os=-atheos - ;; - -syllable*) - os=-syllable - ;; - -386bsd) - os=-bsd - ;; - -ctix* | -uts*) - os=-sysv - ;; - -nova*) - os=-rtmk-nova - ;; - -ns2 ) - os=-nextstep2 - ;; - -nsk*) - os=-nsk - ;; - # Preserve the version number of sinix5. - -sinix5.*) - os=`echo $os | sed -e 's|sinix|sysv|'` - ;; - -sinix*) - os=-sysv4 - ;; - -tpf*) - os=-tpf - ;; - -triton*) - os=-sysv3 - ;; - -oss*) - os=-sysv3 - ;; - -svr4) - os=-sysv4 - ;; - -svr3) - os=-sysv3 - ;; - -sysvr4) - os=-sysv4 - ;; - # This must come after -sysvr4. - -sysv*) - ;; - -ose*) - os=-ose - ;; - -es1800*) - os=-ose - ;; - -xenix) - os=-xenix - ;; - -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) - os=-mint - ;; - -aros*) - os=-aros - ;; - -kaos*) - os=-kaos - ;; - -zvmoe) - os=-zvmoe - ;; - -none) - ;; - *) - # Get rid of the `-' at the beginning of $os. - os=`echo $os | sed 's/[^-]*-//'` - echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 - exit 1 - ;; -esac -else - -# Here we handle the default operating systems that come with various machines. -# The value should be what the vendor currently ships out the door with their -# machine or put another way, the most popular os provided with the machine. - -# Note that if you're going to try to match "-MANUFACTURER" here (say, -# "-sun"), then you have to tell the case statement up towards the top -# that MANUFACTURER isn't an operating system. Otherwise, code above -# will signal an error saying that MANUFACTURER isn't an operating -# system, and we'll never get to this point. - -case $basic_machine in - score-*) - os=-elf - ;; - spu-*) - os=-elf - ;; - *-acorn) - os=-riscix1.2 - ;; - arm*-rebel) - os=-linux - ;; - arm*-semi) - os=-aout - ;; - c4x-* | tic4x-*) - os=-coff - ;; - # This must come before the *-dec entry. - pdp10-*) - os=-tops20 - ;; - pdp11-*) - os=-none - ;; - *-dec | vax-*) - os=-ultrix4.2 - ;; - m68*-apollo) - os=-domain - ;; - i386-sun) - os=-sunos4.0.2 - ;; - m68000-sun) - os=-sunos3 - # This also exists in the configure program, but was not the - # default. - # os=-sunos4 - ;; - m68*-cisco) - os=-aout - ;; - mep-*) - os=-elf - ;; - mips*-cisco) - os=-elf - ;; - mips*-*) - os=-elf - ;; - or32-*) - os=-coff - ;; - *-tti) # must be before sparc entry or we get the wrong os. - os=-sysv3 - ;; - sparc-* | *-sun) - os=-sunos4.1.1 - ;; - *-be) - os=-beos - ;; - *-haiku) - os=-haiku - ;; - *-ibm) - os=-aix - ;; - *-knuth) - os=-mmixware - ;; - *-wec) - os=-proelf - ;; - *-winbond) - os=-proelf - ;; - *-oki) - os=-proelf - ;; - *-hp) - os=-hpux - ;; - *-hitachi) - os=-hiux - ;; - i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) - os=-sysv - ;; - *-cbm) - os=-amigaos - ;; - *-dg) - os=-dgux - ;; - *-dolphin) - os=-sysv3 - ;; - m68k-ccur) - os=-rtu - ;; - m88k-omron*) - os=-luna - ;; - *-next ) - os=-nextstep - ;; - *-sequent) - os=-ptx - ;; - *-crds) - os=-unos - ;; - *-ns) - os=-genix - ;; - i370-*) - os=-mvs - ;; - *-next) - os=-nextstep3 - ;; - *-gould) - os=-sysv - ;; - *-highlevel) - os=-bsd - ;; - *-encore) - os=-bsd - ;; - *-sgi) - os=-irix - ;; - *-siemens) - os=-sysv4 - ;; - *-masscomp) - os=-rtu - ;; - f30[01]-fujitsu | f700-fujitsu) - os=-uxpv - ;; - *-rom68k) - os=-coff - ;; - *-*bug) - os=-coff - ;; - *-apple) - os=-macos - ;; - *-atari*) - os=-mint - ;; - *) - os=-none - ;; -esac -fi - -# Here we handle the case where we know the os, and the CPU type, but not the -# manufacturer. We pick the logical manufacturer. -vendor=unknown -case $basic_machine in - *-unknown) - case $os in - -riscix*) - vendor=acorn - ;; - -sunos*) - vendor=sun - ;; - -aix*) - vendor=ibm - ;; - -beos*) - vendor=be - ;; - -hpux*) - vendor=hp - ;; - -mpeix*) - vendor=hp - ;; - -hiux*) - vendor=hitachi - ;; - -unos*) - vendor=crds - ;; - -dgux*) - vendor=dg - ;; - -luna*) - vendor=omron - ;; - -genix*) - vendor=ns - ;; - -mvs* | -opened*) - vendor=ibm - ;; - -os400*) - vendor=ibm - ;; - -ptx*) - vendor=sequent - ;; - -tpf*) - vendor=ibm - ;; - -vxsim* | -vxworks* | -windiss*) - vendor=wrs - ;; - -aux*) - vendor=apple - ;; - -hms*) - vendor=hitachi - ;; - -mpw* | -macos*) - vendor=apple - ;; - -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) - vendor=atari - ;; - -vos*) - vendor=stratus - ;; - esac - basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` - ;; -esac - -echo $basic_machine$os -exit - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "timestamp='" -# time-stamp-format: "%:y-%02m-%02d" -# time-stamp-end: "'" -# End:
diff --git a/third_party/libevent/configure b/third_party/libevent/configure deleted file mode 100755 index c98d1ff7..0000000 --- a/third_party/libevent/configure +++ /dev/null Binary files differ
diff --git a/third_party/libevent/configure.in b/third_party/libevent/configure.in index bf21399..468d7744 100644 --- a/third_party/libevent/configure.in +++ b/third_party/libevent/configure.in
@@ -2,10 +2,16 @@ dnl Dug Song <dugsong@monkey.org> AC_INIT(event.c) -AM_INIT_AUTOMAKE(libevent,1.4.13-stable) +AM_INIT_AUTOMAKE(libevent,1.4.15) AM_CONFIG_HEADER(config.h) dnl AM_MAINTAINER_MODE +AC_CONFIG_MACRO_DIR([m4]) + +AC_CANONICAL_HOST + +AC_DEFINE(NUMERIC_VERSION, 0x01040f00, [Numeric representation of the version]) + dnl Initialize prefix. if test "$prefix" = "NONE"; then prefix="/usr/local" @@ -24,6 +30,25 @@ CFLAGS="$CFLAGS -fno-strict-aliasing" fi +dnl Libevent 1.4 isn't multithreaded, but some of its functions are +dnl documented to be reentrant. If you don't define the right macros +dnl on some platforms, you get non-reentrant versions of the libc +dnl functinos (like an errno that's shared by all threads). +AC_MSG_CHECKING([whether we need extra flags to make libc reentrant]) +case $host in + *solaris* | *-osf* | *-hpux* ) + AC_MSG_RESULT([-D_REENTRANT]) + CFLAGS="$CFLAGS -D_REENTRANT" + ;; + *-aix* | *-freebsd* | *-darwin* ) + AC_MSG_RESULT([-D_THREAD_SAFE]) + CFLAGS="$CFLAGS -D_THREAD_SAFE" + ;; + *) + AC_MSG_RESULT(no) + ;; +esac + AC_ARG_ENABLE(gcc-warnings, AS_HELP_STRING(--enable-gcc-warnings, enable verbose warnings with GCC)) @@ -223,13 +248,14 @@ if ((kq = kqueue()) == -1) exit(1); + memset(&ev, 0, sizeof(ev)); ev.ident = fd[[1]]; ev.filter = EVFILT_WRITE; ev.flags = EV_ADD | EV_ENABLE; n = kevent(kq, &ev, 1, NULL, 0, NULL); if (n == -1) exit(1); - + read(fd[[0]], buf, sizeof(buf)); ts.tv_sec = 0;
diff --git a/third_party/libevent/evdns.c b/third_party/libevent/evdns.c index 6fa971c8..4486fb86 100644 --- a/third_party/libevent/evdns.c +++ b/third_party/libevent/evdns.c
@@ -55,9 +55,7 @@ #endif /* #define _POSIX_C_SOURCE 200507 */ -#if !defined(_GNU_SOURCE) #define _GNU_SOURCE -#endif #ifdef DNS_USE_CPU_CLOCK_FOR_ID #ifdef DNS_USE_OPENSSL_FOR_ID @@ -160,6 +158,15 @@ #define CLASS_INET EVDNS_CLASS_INET +#ifdef HAVE_SETFD +#define FD_CLOSEONEXEC(x) do { \ + if (fcntl(x, F_SETFD, 1) == -1) \ + event_warn("fcntl(%d, F_SETFD)", x); \ + } while (0) +#else +#define FD_CLOSEONEXEC(x) (void)0 +#endif + struct request { u8 *request; /* the dns packet data */ unsigned int request_len; @@ -1101,20 +1108,12 @@ static u16 transaction_id_pick(void) { for (;;) { - const struct request *req = req_head, *started_at; u16 trans_id = trans_id_function(); if (trans_id == 0xffff) continue; - /* now check to see if that id is already inflight */ - req = started_at = req_head; - if (req) { - do { - if (req->trans_id == trans_id) break; - req = req->next; - } while (req != started_at); - } - /* we didn't find it, so this is a good id */ - if (req == started_at) return trans_id; + + if (request_find_from_trans_id(trans_id) == NULL) + return trans_id; } } @@ -2134,7 +2133,8 @@ ns->socket = socket(PF_INET, SOCK_DGRAM, 0); if (ns->socket < 0) { err = 1; goto out1; } - evutil_make_socket_nonblocking(ns->socket); + FD_CLOSEONEXEC(ns->socket); + evutil_make_socket_nonblocking(ns->socket); ns->address = address; ns->port = htons(port); @@ -2706,7 +2706,7 @@ const char *const nameserver = NEXT_TOKEN; struct in_addr ina; - if (inet_aton(nameserver, &ina)) { + if (nameserver && inet_aton(nameserver, &ina)) { /* address is valid */ evdns_nameserver_add(ina.s_addr); } @@ -2848,7 +2848,7 @@ IP_ADDR_STRING *ns; GetNetworkParams_fn_t fn; - if (!(handle = LoadLibrary("iphlpapi.dll"))) { + if (!(handle = LoadLibraryA("iphlpapi.dll"))) { log(EVDNS_LOG_WARN, "Could not open iphlpapi.dll"); status = -1; goto done; @@ -2918,13 +2918,13 @@ DWORD bufsz = 0, type = 0; int status = 0; - if (RegQueryValueEx(key, subkey, 0, &type, NULL, &bufsz) + if (RegQueryValueExA(key, subkey, 0, &type, NULL, &bufsz) != ERROR_MORE_DATA) return -1; if (!(buf = malloc(bufsz))) return -1; - if (RegQueryValueEx(key, subkey, 0, &type, (LPBYTE)buf, &bufsz) + if (RegQueryValueExA(key, subkey, 0, &type, (LPBYTE)buf, &bufsz) == ERROR_SUCCESS && bufsz > 1) { status = evdns_nameserver_ip_add_line(buf); } @@ -2954,12 +2954,12 @@ if (((int)GetVersion()) > 0) { /* NT */ HKEY nt_key = 0, interfaces_key = 0; - if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, WIN_NS_NT_KEY, 0, + if (RegOpenKeyExA(HKEY_LOCAL_MACHINE, WIN_NS_NT_KEY, 0, KEY_READ, &nt_key) != ERROR_SUCCESS) { log(EVDNS_LOG_DEBUG,"Couldn't open nt key, %d",(int)GetLastError()); return -1; } - r = RegOpenKeyEx(nt_key, "Interfaces", 0, + r = RegOpenKeyExA(nt_key, "Interfaces", 0, KEY_QUERY_VALUE|KEY_ENUMERATE_SUB_KEYS, &interfaces_key); if (r != ERROR_SUCCESS) { @@ -2974,7 +2974,7 @@ RegCloseKey(nt_key); } else { HKEY win_key = 0; - if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, WIN_NS_9X_KEY, 0, + if (RegOpenKeyExA(HKEY_LOCAL_MACHINE, WIN_NS_9X_KEY, 0, KEY_READ, &win_key) != ERROR_SUCCESS) { log(EVDNS_LOG_DEBUG, "Couldn't open registry key, %d", (int)GetLastError()); return -1;
diff --git a/third_party/libevent/event.c b/third_party/libevent/event.c index 8b6cae5..36b1c51 100644 --- a/third_party/libevent/event.c +++ b/third_party/libevent/event.c
@@ -109,6 +109,10 @@ extern struct event_base *evsignal_base; static int use_monotonic = 1; +/* Handle signals - This is a deprecated interface */ +int (*event_sigcb)(void); /* Signal callback when gotsig is set */ +volatile sig_atomic_t event_gotsig; /* Set in signal handler */ + /* Prototypes */ static void event_queue_insert(struct event_base *, struct event *, int); static void event_queue_remove(struct event_base *, struct event *, int); @@ -164,6 +168,9 @@ if ((base = calloc(1, sizeof(struct event_base))) == NULL) event_err(1, "%s: calloc", __func__); + event_sigcb = NULL; + event_gotsig = 0; + gettime(base, &base->event_tv); min_heap_ctor(&base->timeheap); @@ -260,9 +267,14 @@ int res = 0; struct event *ev; +#if 0 + /* Right now, reinit always takes effect, since even if the + backend doesn't require it, the signal socketpair code does. + */ /* check if this event mechanism requires reinit */ if (!evsel->need_reinit) return (0); +#endif /* prevent internal delete */ if (base->sig.ev_signal_added) { @@ -275,7 +287,7 @@ EVLIST_ACTIVE); base->sig.ev_signal_added = 0; } - + if (base->evsel->dealloc != NULL) base->evsel->dealloc(base, base->evbase); evbase = base->evbase = evsel->init(base); @@ -305,7 +317,10 @@ if (base->event_count_active) return (-1); - if (base->nactivequeues && npriorities != base->nactivequeues) { + if (npriorities == base->nactivequeues) + return (0); + + if (base->nactivequeues) { for (i = 0; i < base->nactivequeues; ++i) { free(base->activequeues[i]); } @@ -371,7 +386,7 @@ ncalls--; ev->ev_ncalls = ncalls; (*ev->ev_callback)((int)ev->ev_fd, ev->ev_res, ev->ev_arg); - if (base->event_break) + if (event_gotsig || base->event_break) return; } } @@ -476,6 +491,18 @@ break; } + /* You cannot use this interface for multi-threaded apps */ + while (event_gotsig) { + event_gotsig = 0; + if (event_sigcb) { + res = (*event_sigcb)(); + if (res == -1) { + errno = EINTR; + return (-1); + } + } + } + timeout_correct(base, &tv); tv_p = &tv;
diff --git a/third_party/libevent/event.h b/third_party/libevent/event.h index 72e9b8b..f0887b96 100644 --- a/third_party/libevent/event.h +++ b/third_party/libevent/event.h
@@ -1030,6 +1030,38 @@ char *evbuffer_readline(struct evbuffer *); +/** Used to tell evbuffer_readln what kind of line-ending to look for. + */ +enum evbuffer_eol_style { + /** Any sequence of CR and LF characters is acceptable as an EOL. */ + EVBUFFER_EOL_ANY, + /** An EOL is an LF, optionally preceded by a CR. This style is + * most useful for implementing text-based internet protocols. */ + EVBUFFER_EOL_CRLF, + /** An EOL is a CR followed by an LF. */ + EVBUFFER_EOL_CRLF_STRICT, + /** An EOL is a LF. */ + EVBUFFER_EOL_LF +}; + +/** + * Read a single line from an event buffer. + * + * Reads a line terminated by an EOL as determined by the evbuffer_eol_style + * argument. Returns a newly allocated nul-terminated string; the caller must + * free the returned value. The EOL is not included in the returned string. + * + * @param buffer the evbuffer to read from + * @param n_read_out if non-NULL, points to a size_t that is set to the + * number of characters in the returned string. This is useful for + * strings that can contain NUL characters. + * @param eol_style the style of line-ending to use. + * @return pointer to a single line, or NULL if an error occurred + */ +char *evbuffer_readln(struct evbuffer *buffer, size_t *n_read_out, + enum evbuffer_eol_style eol_style); + + /** Move data from one evbuffer into another evbuffer.
diff --git a/third_party/libevent/evhttp.h b/third_party/libevent/evhttp.h index 30dee8bb..48c1d918 100644 --- a/third_party/libevent/evhttp.h +++ b/third_party/libevent/evhttp.h
@@ -81,7 +81,7 @@ * @param http a pointer to an evhttp object * @param address a string containing the IP address to listen(2) on * @param port the port number to listen on - * @return a newly allocated evhttp struct + * @return 0 on success, -1 on failure * @see evhttp_free() */ int evhttp_bind_socket(struct evhttp *http, const char *address, u_short port); @@ -221,7 +221,8 @@ struct evbuffer *input_buffer; /* read data */ ev_int64_t ntoread; - int chunked; + int chunked:1, /* a chunked request */ + userdone:1; /* the user has sent all data */ struct evbuffer *output_buffer; /* outgoing post or data */ @@ -252,6 +253,9 @@ /** Frees the request object and removes associated events. */ void evhttp_request_free(struct evhttp_request *req); +/** Returns the connection object associated with the request or NULL */ +struct evhttp_connection *evhttp_request_get_connection(struct evhttp_request *req); + /** * A connection object that can be used to for making HTTP requests. The * connection object tries to establish the connection when it is given an
diff --git a/third_party/libevent/evport.c b/third_party/libevent/evport.c index a2ee1bc..1f5ebc41 100644 --- a/third_party/libevent/evport.c +++ b/third_party/libevent/evport.c
@@ -367,6 +367,12 @@ if (pevt->portev_events & POLLOUT) res |= EV_WRITE; + /* + * Check for the error situations or a hangup situation + */ + if (pevt->portev_events & (POLLERR|POLLHUP|POLLNVAL)) + res |= EV_READ|EV_WRITE; + assert(epdp->ed_nevents > fd); fdi = &(epdp->ed_fds[fd]);
diff --git a/third_party/libevent/evutil.c b/third_party/libevent/evutil.c index 564377d7..cc6d0f4 100644 --- a/third_party/libevent/evutil.c +++ b/third_party/libevent/evutil.c
@@ -168,10 +168,17 @@ ioctlsocket(fd, FIONBIO, (unsigned long*) &nonblocking); } #else - if (fcntl(fd, F_SETFL, O_NONBLOCK) == -1) { - event_warn("fcntl(O_NONBLOCK)"); - return -1; -} + { + int flags; + if ((flags = fcntl(fd, F_GETFL, NULL)) < 0) { + event_warn("fcntl(%d, F_GETFL)", fd); + return -1; + } + if (fcntl(fd, F_SETFL, flags | O_NONBLOCK) == -1) { + event_warn("fcntl(%d, F_SETFL)", fd); + return -1; + } + } #endif return 0; }
diff --git a/third_party/libevent/freebsd/event-config.h b/third_party/libevent/freebsd/event-config.h index 662abc87..be1eae4 100644 --- a/third_party/libevent/freebsd/event-config.h +++ b/third_party/libevent/freebsd/event-config.h
@@ -212,6 +212,13 @@ /* Define if kqueue works correctly with pipes */ #define _EVENT_HAVE_WORKING_KQUEUE 1 +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#define _EVENT_LT_OBJDIR ".libs/" + +/* Numeric representation of the version */ +#define _EVENT_NUMERIC_VERSION 0x01040f00 + /* Name of package */ #define _EVENT_PACKAGE "libevent" @@ -227,6 +234,9 @@ /* Define to the one symbol short name of this package. */ #define _EVENT_PACKAGE_TARNAME "" +/* Define to the home page for this package. */ +#define _EVENT_PACKAGE_URL "" + /* Define to the version of this package. */ #define _EVENT_PACKAGE_VERSION "" @@ -249,7 +259,7 @@ #define _EVENT_TIME_WITH_SYS_TIME 1 /* Version number of package */ -#define _EVENT_VERSION "1.4.13-stable" +#define _EVENT_VERSION "1.4.15" /* Define to appropriate substitue if compiler doesnt have __func__ */ /* #undef _EVENT___func__ */
diff --git a/third_party/libevent/http.c b/third_party/libevent/http.c index b04ad54..4abce23 100644 --- a/third_party/libevent/http.c +++ b/third_party/libevent/http.c
@@ -99,8 +99,13 @@ #define NI_MAXSERV 32 #define NI_MAXHOST 1025 +#ifndef NI_NUMERICHOST #define NI_NUMERICHOST 1 +#endif + +#ifndef NI_NUMERICSERV #define NI_NUMERICSERV 2 +#endif static int fake_getnameinfo(const struct sockaddr *sa, size_t salen, char *host, @@ -142,6 +147,8 @@ #endif #ifndef HAVE_GETADDRINFO +/* Apparently msvc2010 does have an addrinfo definition visible here */ +#if !defined(WIN32) || !defined(_MSC_VER) || (_MSC_VER < 1600) struct addrinfo { int ai_family; int ai_socktype; @@ -150,6 +157,7 @@ struct sockaddr *ai_addr; struct addrinfo *ai_next; }; +#endif static int fake_getaddrinfo(const char *hostname, struct addrinfo *ai) { @@ -390,7 +398,7 @@ /* Add the content length on a post request if missing */ if (req->type == EVHTTP_REQ_POST && evhttp_find_header(req->output_headers, "Content-Length") == NULL){ - char size[12]; + char size[22]; evutil_snprintf(size, sizeof(size), "%ld", (long)EVBUFFER_LENGTH(req->output_buffer)); evhttp_add_header(req->output_headers, "Content-Length", size); @@ -447,7 +455,7 @@ { if (evhttp_find_header(headers, "Transfer-Encoding") == NULL && evhttp_find_header(headers, "Content-Length") == NULL) { - char len[12]; + char len[22]; evutil_snprintf(len, sizeof(len), "%ld", content_length); evhttp_add_header(headers, "Content-Length", len); } @@ -606,8 +614,18 @@ * these are cases in which we probably should just * close the connection and not send a reply. this * case may happen when a browser keeps a persistent - * connection open and we timeout on the read. + * connection open and we timeout on the read. when + * the request is still being used for sending, we + * need to disassociated it from the connection here. */ + if (!req->userdone) { + /* remove it so that it will not be freed */ + TAILQ_REMOVE(&req->evcon->requests, req, next); + /* indicate that this request no longer has a + * connection object + */ + req->evcon = NULL; + } return (-1); case EVCON_HTTP_INVALID_HEADER: default: /* xxx: probably should just error on default */ @@ -654,11 +672,13 @@ cb = req->cb; cb_arg = req->cb_arg; + /* do not fail all requests; the next request is going to get + * send over a new connection. when a user cancels a request, + * all other pending requests should be processed as normal + */ TAILQ_REMOVE(&evcon->requests, req, next); evhttp_request_free(req); - /* xxx: maybe we should fail all requests??? */ - /* reset the connection */ evhttp_connection_reset(evcon); @@ -751,7 +771,7 @@ */ evhttp_connection_start_detectclose(evcon); } - } else { + } else if (evcon->state != EVCON_DISCONNECTED) { /* * incoming connection - we need to leave the request on the * connection so that we can reply to it. @@ -933,6 +953,7 @@ return; } else if (n == 0) { /* Connection closed */ + evcon->state = EVCON_DISCONNECTED; evhttp_connection_done(evcon); return; } @@ -990,7 +1011,11 @@ (*evcon->closecb)(evcon, evcon->closecb_arg); } - /* remove all requests that might be queued on this connection */ + /* remove all requests that might be queued on this + * connection. for server connections, this should be empty. + * because it gets dequeued either in evhttp_connection_done or + * evhttp_connection_fail. + */ while ((req = TAILQ_FIRST(&evcon->requests)) != NULL) { TAILQ_REMOVE(&evcon->requests, req, next); evhttp_request_free(req); @@ -1213,15 +1238,14 @@ { char *protocol; char *number; - char *readable; + const char *readable = ""; protocol = strsep(&line, " "); if (line == NULL) return (-1); number = strsep(&line, " "); - if (line == NULL) - return (-1); - readable = line; + if (line != NULL) + readable = line; if (strcmp(protocol, "HTTP/1.0") == 0) { req->major = 1; @@ -1294,7 +1318,7 @@ } if ((req->uri = strdup(uri)) == NULL) { - event_debug(("%s: evhttp_decode_uri", __func__)); + event_debug(("%s: strdup", __func__)); return (-1); } @@ -1918,8 +1942,16 @@ { struct evhttp_connection *evcon = req->evcon; + if (evcon == NULL) { + evhttp_request_free(req); + return; + } + assert(TAILQ_FIRST(&evcon->requests) == req); + /* we expect no more calls form the user on this request */ + req->userdone = 1; + /* xxx: not sure if we really should expose the data buffer this way */ if (databuf != NULL) evbuffer_add_buffer(req->output_buffer, databuf); @@ -1957,15 +1989,20 @@ void evhttp_send_reply_chunk(struct evhttp_request *req, struct evbuffer *databuf) { + struct evhttp_connection *evcon = req->evcon; + + if (evcon == NULL) + return; + if (req->chunked) { - evbuffer_add_printf(req->evcon->output_buffer, "%x\r\n", + evbuffer_add_printf(evcon->output_buffer, "%x\r\n", (unsigned)EVBUFFER_LENGTH(databuf)); } - evbuffer_add_buffer(req->evcon->output_buffer, databuf); + evbuffer_add_buffer(evcon->output_buffer, databuf); if (req->chunked) { - evbuffer_add(req->evcon->output_buffer, "\r\n", 2); + evbuffer_add(evcon->output_buffer, "\r\n", 2); } - evhttp_write_buffer(req->evcon, NULL, NULL); + evhttp_write_buffer(evcon, NULL, NULL); } void @@ -1973,6 +2010,14 @@ { struct evhttp_connection *evcon = req->evcon; + if (evcon == NULL) { + evhttp_request_free(req); + return; + } + + /* we expect no more calls form the user on this request */ + req->userdone = 1; + if (req->chunked) { evbuffer_add(req->evcon->output_buffer, "0\r\n\r\n", 5); evhttp_write_buffer(req->evcon, evhttp_send_done, NULL); @@ -2190,8 +2235,15 @@ struct evhttp *http = arg; struct evhttp_cb *cb = NULL; + event_debug(("%s: req->uri=%s", __func__, req->uri)); if (req->uri == NULL) { - evhttp_send_error(req, HTTP_BADREQUEST, "Bad Request"); + event_debug(("%s: bad request", __func__)); + if (req->evcon->state == EVCON_DISCONNECTED) { + evhttp_connection_fail(req->evcon, EVCON_HTTP_EOF); + } else { + event_debug(("%s: sending error", __func__)); + evhttp_send_error(req, HTTP_BADREQUEST, "Bad Request"); + } return; } @@ -2505,6 +2557,13 @@ free(req); } +struct evhttp_connection * +evhttp_request_get_connection(struct evhttp_request *req) +{ + return req->evcon; +} + + void evhttp_request_set_chunked_cb(struct evhttp_request *req, void (*cb)(struct evhttp_request *, void *))
diff --git a/third_party/libevent/install-sh b/third_party/libevent/install-sh deleted file mode 100644 index 89fc9b0..0000000 --- a/third_party/libevent/install-sh +++ /dev/null
@@ -1,238 +0,0 @@ -#! /bin/sh -# -# install - install a program, script, or datafile -# This comes from X11R5. -# -# Calling this script install-sh is preferred over install.sh, to prevent -# `make' implicit rules from creating a file called install from it -# when there is no Makefile. -# -# This script is compatible with the BSD install script, but was written -# from scratch. -# - - -# set DOITPROG to echo to test this script - -# Don't use :- since 4.3BSD and earlier shells don't like it. -doit="${DOITPROG-}" - - -# put in absolute paths if you don't have them in your path; or use env. vars. - -mvprog="${MVPROG-mv}" -cpprog="${CPPROG-cp}" -chmodprog="${CHMODPROG-chmod}" -chownprog="${CHOWNPROG-chown}" -chgrpprog="${CHGRPPROG-chgrp}" -stripprog="${STRIPPROG-strip}" -rmprog="${RMPROG-rm}" -mkdirprog="${MKDIRPROG-mkdir}" - -tranformbasename="" -transform_arg="" -instcmd="$mvprog" -chmodcmd="$chmodprog 0755" -chowncmd="" -chgrpcmd="" -stripcmd="" -rmcmd="$rmprog -f" -mvcmd="$mvprog" -src="" -dst="" -dir_arg="" - -while [ x"$1" != x ]; do - case $1 in - -c) instcmd="$cpprog" - shift - continue;; - - -d) dir_arg=true - shift - continue;; - - -m) chmodcmd="$chmodprog $2" - shift - shift - continue;; - - -o) chowncmd="$chownprog $2" - shift - shift - continue;; - - -g) chgrpcmd="$chgrpprog $2" - shift - shift - continue;; - - -s) stripcmd="$stripprog" - shift - continue;; - - -t=*) transformarg=`echo $1 | sed 's/-t=//'` - shift - continue;; - - -b=*) transformbasename=`echo $1 | sed 's/-b=//'` - shift - continue;; - - *) if [ x"$src" = x ] - then - src=$1 - else - # this colon is to work around a 386BSD /bin/sh bug - : - dst=$1 - fi - shift - continue;; - esac -done - -if [ x"$src" = x ] -then - echo "install: no input file specified" - exit 1 -else - true -fi - -if [ x"$dir_arg" != x ]; then - dst=$src - src="" - - if [ -d $dst ]; then - instcmd=: - else - instcmd=mkdir - fi -else - -# Waiting for this to be detected by the "$instcmd $src $dsttmp" command -# might cause directories to be created, which would be especially bad -# if $src (and thus $dsttmp) contains '*'. - - if [ -f $src -o -d $src ] - then - true - else - echo "install: $src does not exist" - exit 1 - fi - - if [ x"$dst" = x ] - then - echo "install: no destination specified" - exit 1 - else - true - fi - -# If destination is a directory, append the input filename; if your system -# does not like double slashes in filenames, you may need to add some logic - - if [ -d $dst ] - then - dst="$dst"/`basename $src` - else - true - fi -fi - -## this sed command emulates the dirname command -dstdir=`echo $dst | sed -e 's,[^/]*$,,;s,/$,,;s,^$,.,'` - -# Make sure that the destination directory exists. -# this part is taken from Noah Friedman's mkinstalldirs script - -# Skip lots of stat calls in the usual case. -if [ ! -d "$dstdir" ]; then -defaultIFS=' -' -IFS="${IFS-${defaultIFS}}" - -oIFS="${IFS}" -# Some sh's can't handle IFS=/ for some reason. -IFS='%' -set - `echo ${dstdir} | sed -e 's@/@%@g' -e 's@^%@/@'` -IFS="${oIFS}" - -pathcomp='' - -while [ $# -ne 0 ] ; do - pathcomp="${pathcomp}${1}" - shift - - if [ ! -d "${pathcomp}" ] ; - then - $mkdirprog "${pathcomp}" - else - true - fi - - pathcomp="${pathcomp}/" -done -fi - -if [ x"$dir_arg" != x ] -then - $doit $instcmd $dst && - - if [ x"$chowncmd" != x ]; then $doit $chowncmd $dst; else true ; fi && - if [ x"$chgrpcmd" != x ]; then $doit $chgrpcmd $dst; else true ; fi && - if [ x"$stripcmd" != x ]; then $doit $stripcmd $dst; else true ; fi && - if [ x"$chmodcmd" != x ]; then $doit $chmodcmd $dst; else true ; fi -else - -# If we're going to rename the final executable, determine the name now. - - if [ x"$transformarg" = x ] - then - dstfile=`basename $dst` - else - dstfile=`basename $dst $transformbasename | - sed $transformarg`$transformbasename - fi - -# don't allow the sed command to completely eliminate the filename - - if [ x"$dstfile" = x ] - then - dstfile=`basename $dst` - else - true - fi - -# Make a temp file name in the proper directory. - - dsttmp=$dstdir/#inst.$$# - -# Move or copy the file name to the temp name - - $doit $instcmd $src $dsttmp && - - trap "rm -f ${dsttmp}" 0 && - -# and set any options; do chmod last to preserve setuid bits - -# If any of these fail, we abort the whole thing. If we want to -# ignore errors from any of these, just make sure not to ignore -# errors from the above "$doit $instcmd $src $dsttmp" command. - - if [ x"$chowncmd" != x ]; then $doit $chowncmd $dsttmp; else true;fi && - if [ x"$chgrpcmd" != x ]; then $doit $chgrpcmd $dsttmp; else true;fi && - if [ x"$stripcmd" != x ]; then $doit $stripcmd $dsttmp; else true;fi && - if [ x"$chmodcmd" != x ]; then $doit $chmodcmd $dsttmp; else true;fi && - -# Now rename the file to the real destination. - - $doit $rmcmd -f $dstdir/$dstfile && - $doit $mvcmd $dsttmp $dstdir/$dstfile - -fi && - - -exit 0
diff --git a/third_party/libevent/kqueue.c b/third_party/libevent/kqueue.c index 556b73c..ee740ee 100644 --- a/third_party/libevent/kqueue.c +++ b/third_party/libevent/kqueue.c
@@ -63,6 +63,7 @@ #include "event.h" #include "event-internal.h" #include "log.h" +#include "evsignal.h" #define EVLIST_X_KQINKERNEL 0x1000 @@ -140,6 +141,7 @@ } /* Check for Mac OS X kqueue bug. */ + memset(&kqueueop->changes[0], 0, sizeof kqueueop->changes[0]); kqueueop->changes[0].ident = -1; kqueueop->changes[0].filter = EVFILT_READ; kqueueop->changes[0].flags = EV_ADD; @@ -439,12 +441,15 @@ { struct kqop *kqop = arg; + evsignal_dealloc(base); + if (kqop->changes) free(kqop->changes); if (kqop->events) free(kqop->events); if (kqop->kq >= 0 && kqop->pid == getpid()) close(kqop->kq); + memset(kqop, 0, sizeof(struct kqop)); free(kqop); }
diff --git a/third_party/libevent/linux/event-config.h b/third_party/libevent/linux/event-config.h index c8a6431..2203253 100644 --- a/third_party/libevent/linux/event-config.h +++ b/third_party/libevent/linux/event-config.h
@@ -212,6 +212,13 @@ /* Define if kqueue works correctly with pipes */ /* #undef _EVENT_HAVE_WORKING_KQUEUE */ +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#define _EVENT_LT_OBJDIR ".libs/" + +/* Numeric representation of the version */ +#define _EVENT_NUMERIC_VERSION 0x01040f00 + /* Name of package */ #define _EVENT_PACKAGE "libevent" @@ -227,6 +234,9 @@ /* Define to the one symbol short name of this package. */ #define _EVENT_PACKAGE_TARNAME "" +/* Define to the home page for this package. */ +#define _EVENT_PACKAGE_URL "" + /* Define to the version of this package. */ #define _EVENT_PACKAGE_VERSION "" @@ -249,7 +259,7 @@ #define _EVENT_TIME_WITH_SYS_TIME 1 /* Version number of package */ -#define _EVENT_VERSION "1.4.13-stable" +#define _EVENT_VERSION "1.4.15" /* Define to appropriate substitue if compiler doesnt have __func__ */ /* #undef _EVENT___func__ */
diff --git a/third_party/libevent/ltmain.sh b/third_party/libevent/ltmain.sh deleted file mode 100644 index 27d498a..0000000 --- a/third_party/libevent/ltmain.sh +++ /dev/null
@@ -1,6956 +0,0 @@ -# ltmain.sh - Provide generalized library-building support services. -# NOTE: Changing this file will not affect anything until you rerun configure. -# -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, -# 2007, 2008 Free Software Foundation, Inc. -# Originally by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996 -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - -basename="s,^.*/,,g" - -# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh -# is ksh but when the shell is invoked as "sh" and the current value of -# the _XPG environment variable is not equal to 1 (one), the special -# positional parameter $0, within a function call, is the name of the -# function. -progpath="$0" - -# The name of this program: -progname=`echo "$progpath" | $SED $basename` -modename="$progname" - -# Global variables: -EXIT_SUCCESS=0 -EXIT_FAILURE=1 - -PROGRAM=ltmain.sh -PACKAGE=libtool -VERSION=1.5.26 -TIMESTAMP=" (1.1220.2.492 2008/01/30 06:40:56)" - -# Be Bourne compatible (taken from Autoconf:_AS_BOURNE_COMPATIBLE). -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then - emulate sh - NULLCMD=: - # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac -fi -BIN_SH=xpg4; export BIN_SH # for Tru64 -DUALCASE=1; export DUALCASE # for MKS sh - -# Check that we have a working $echo. -if test "X$1" = X--no-reexec; then - # Discard the --no-reexec flag, and continue. - shift -elif test "X$1" = X--fallback-echo; then - # Avoid inline document here, it may be left over - : -elif test "X`($echo '\t') 2>/dev/null`" = 'X\t'; then - # Yippee, $echo works! - : -else - # Restart under the correct shell, and then maybe $echo will work. - exec $SHELL "$progpath" --no-reexec ${1+"$@"} -fi - -if test "X$1" = X--fallback-echo; then - # used as fallback echo - shift - cat <<EOF -$* -EOF - exit $EXIT_SUCCESS -fi - -default_mode= -help="Try \`$progname --help' for more information." -magic="%%%MAGIC variable%%%" -mkdir="mkdir" -mv="mv -f" -rm="rm -f" - -# Sed substitution that helps us do robust quoting. It backslashifies -# metacharacters that are still active within double-quoted strings. -Xsed="${SED}"' -e 1s/^X//' -sed_quote_subst='s/\([\\`\\"$\\\\]\)/\\\1/g' -# test EBCDIC or ASCII -case `echo X|tr X '\101'` in - A) # ASCII based system - # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr - SP2NL='tr \040 \012' - NL2SP='tr \015\012 \040\040' - ;; - *) # EBCDIC based system - SP2NL='tr \100 \n' - NL2SP='tr \r\n \100\100' - ;; -esac - -# NLS nuisances. -# Only set LANG and LC_ALL to C if already set. -# These must not be set unconditionally because not all systems understand -# e.g. LANG=C (notably SCO). -# We save the old values to restore during execute mode. -lt_env= -for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES -do - eval "if test \"\${$lt_var+set}\" = set; then - save_$lt_var=\$$lt_var - lt_env=\"$lt_var=\$$lt_var \$lt_env\" - $lt_var=C - export $lt_var - fi" -done - -if test -n "$lt_env"; then - lt_env="env $lt_env" -fi - -# Make sure IFS has a sensible default -lt_nl=' -' -IFS=" $lt_nl" - -if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then - $echo "$modename: not configured to build any kind of library" 1>&2 - $echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2 - exit $EXIT_FAILURE -fi - -# Global variables. -mode=$default_mode -nonopt= -prev= -prevopt= -run= -show="$echo" -show_help= -execute_dlfiles= -duplicate_deps=no -preserve_args= -lo2o="s/\\.lo\$/.${objext}/" -o2lo="s/\\.${objext}\$/.lo/" -extracted_archives= -extracted_serial=0 - -##################################### -# Shell function definitions: -# This seems to be the best place for them - -# func_mktempdir [string] -# Make a temporary directory that won't clash with other running -# libtool processes, and avoids race conditions if possible. If -# given, STRING is the basename for that directory. -func_mktempdir () -{ - my_template="${TMPDIR-/tmp}/${1-$progname}" - - if test "$run" = ":"; then - # Return a directory name, but don't create it in dry-run mode - my_tmpdir="${my_template}-$$" - else - - # If mktemp works, use that first and foremost - my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null` - - if test ! -d "$my_tmpdir"; then - # Failing that, at least try and use $RANDOM to avoid a race - my_tmpdir="${my_template}-${RANDOM-0}$$" - - save_mktempdir_umask=`umask` - umask 0077 - $mkdir "$my_tmpdir" - umask $save_mktempdir_umask - fi - - # If we're not in dry-run mode, bomb out on failure - test -d "$my_tmpdir" || { - $echo "cannot create temporary directory \`$my_tmpdir'" 1>&2 - exit $EXIT_FAILURE - } - fi - - $echo "X$my_tmpdir" | $Xsed -} - - -# func_win32_libid arg -# return the library type of file 'arg' -# -# Need a lot of goo to handle *both* DLLs and import libs -# Has to be a shell function in order to 'eat' the argument -# that is supplied when $file_magic_command is called. -func_win32_libid () -{ - win32_libid_type="unknown" - win32_fileres=`file -L $1 2>/dev/null` - case $win32_fileres in - *ar\ archive\ import\ library*) # definitely import - win32_libid_type="x86 archive import" - ;; - *ar\ archive*) # could be an import, or static - if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | \ - $EGREP -e 'file format pe-i386(.*architecture: i386)?' >/dev/null ; then - win32_nmres=`eval $NM -f posix -A $1 | \ - $SED -n -e '1,100{ - / I /{ - s,.*,import, - p - q - } - }'` - case $win32_nmres in - import*) win32_libid_type="x86 archive import";; - *) win32_libid_type="x86 archive static";; - esac - fi - ;; - *DLL*) - win32_libid_type="x86 DLL" - ;; - *executable*) # but shell scripts are "executable" too... - case $win32_fileres in - *MS\ Windows\ PE\ Intel*) - win32_libid_type="x86 DLL" - ;; - esac - ;; - esac - $echo $win32_libid_type -} - - -# func_infer_tag arg -# Infer tagged configuration to use if any are available and -# if one wasn't chosen via the "--tag" command line option. -# Only attempt this if the compiler in the base compile -# command doesn't match the default compiler. -# arg is usually of the form 'gcc ...' -func_infer_tag () -{ - if test -n "$available_tags" && test -z "$tagname"; then - CC_quoted= - for arg in $CC; do - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - CC_quoted="$CC_quoted $arg" - done - case $@ in - # Blanks in the command may have been stripped by the calling shell, - # but not from the CC environment variable when configure was run. - " $CC "* | "$CC "* | " `$echo $CC` "* | "`$echo $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$echo $CC_quoted` "* | "`$echo $CC_quoted` "*) ;; - # Blanks at the start of $base_compile will cause this to fail - # if we don't check for them as well. - *) - for z in $available_tags; do - if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then - # Evaluate the configuration. - eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" - CC_quoted= - for arg in $CC; do - # Double-quote args containing other shell metacharacters. - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - CC_quoted="$CC_quoted $arg" - done - case "$@ " in - " $CC "* | "$CC "* | " `$echo $CC` "* | "`$echo $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$echo $CC_quoted` "* | "`$echo $CC_quoted` "*) - # The compiler in the base compile command matches - # the one in the tagged configuration. - # Assume this is the tagged configuration we want. - tagname=$z - break - ;; - esac - fi - done - # If $tagname still isn't set, then no tagged configuration - # was found and let the user know that the "--tag" command - # line option must be used. - if test -z "$tagname"; then - $echo "$modename: unable to infer tagged configuration" - $echo "$modename: specify a tag with \`--tag'" 1>&2 - exit $EXIT_FAILURE -# else -# $echo "$modename: using $tagname tagged configuration" - fi - ;; - esac - fi -} - - -# func_extract_an_archive dir oldlib -func_extract_an_archive () -{ - f_ex_an_ar_dir="$1"; shift - f_ex_an_ar_oldlib="$1" - - $show "(cd $f_ex_an_ar_dir && $AR x $f_ex_an_ar_oldlib)" - $run eval "(cd \$f_ex_an_ar_dir && $AR x \$f_ex_an_ar_oldlib)" || exit $? - if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then - : - else - $echo "$modename: ERROR: object name conflicts: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib" 1>&2 - exit $EXIT_FAILURE - fi -} - -# func_extract_archives gentop oldlib ... -func_extract_archives () -{ - my_gentop="$1"; shift - my_oldlibs=${1+"$@"} - my_oldobjs="" - my_xlib="" - my_xabs="" - my_xdir="" - my_status="" - - $show "${rm}r $my_gentop" - $run ${rm}r "$my_gentop" - $show "$mkdir $my_gentop" - $run $mkdir "$my_gentop" - my_status=$? - if test "$my_status" -ne 0 && test ! -d "$my_gentop"; then - exit $my_status - fi - - for my_xlib in $my_oldlibs; do - # Extract the objects. - case $my_xlib in - [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;; - *) my_xabs=`pwd`"/$my_xlib" ;; - esac - my_xlib=`$echo "X$my_xlib" | $Xsed -e 's%^.*/%%'` - my_xlib_u=$my_xlib - while :; do - case " $extracted_archives " in - *" $my_xlib_u "*) - extracted_serial=`expr $extracted_serial + 1` - my_xlib_u=lt$extracted_serial-$my_xlib ;; - *) break ;; - esac - done - extracted_archives="$extracted_archives $my_xlib_u" - my_xdir="$my_gentop/$my_xlib_u" - - $show "${rm}r $my_xdir" - $run ${rm}r "$my_xdir" - $show "$mkdir $my_xdir" - $run $mkdir "$my_xdir" - exit_status=$? - if test "$exit_status" -ne 0 && test ! -d "$my_xdir"; then - exit $exit_status - fi - case $host in - *-darwin*) - $show "Extracting $my_xabs" - # Do not bother doing anything if just a dry run - if test -z "$run"; then - darwin_orig_dir=`pwd` - cd $my_xdir || exit $? - darwin_archive=$my_xabs - darwin_curdir=`pwd` - darwin_base_archive=`$echo "X$darwin_archive" | $Xsed -e 's%^.*/%%'` - darwin_arches=`lipo -info "$darwin_archive" 2>/dev/null | $EGREP Architectures 2>/dev/null` - if test -n "$darwin_arches"; then - darwin_arches=`echo "$darwin_arches" | $SED -e 's/.*are://'` - darwin_arch= - $show "$darwin_base_archive has multiple architectures $darwin_arches" - for darwin_arch in $darwin_arches ; do - mkdir -p "unfat-$$/${darwin_base_archive}-${darwin_arch}" - lipo -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}" - cd "unfat-$$/${darwin_base_archive}-${darwin_arch}" - func_extract_an_archive "`pwd`" "${darwin_base_archive}" - cd "$darwin_curdir" - $rm "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" - done # $darwin_arches - ## Okay now we have a bunch of thin objects, gotta fatten them up :) - darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print| xargs basename | sort -u | $NL2SP` - darwin_file= - darwin_files= - for darwin_file in $darwin_filelist; do - darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP` - lipo -create -output "$darwin_file" $darwin_files - done # $darwin_filelist - ${rm}r unfat-$$ - cd "$darwin_orig_dir" - else - cd "$darwin_orig_dir" - func_extract_an_archive "$my_xdir" "$my_xabs" - fi # $darwin_arches - fi # $run - ;; - *) - func_extract_an_archive "$my_xdir" "$my_xabs" - ;; - esac - my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` - done - func_extract_archives_result="$my_oldobjs" -} -# End of Shell function definitions -##################################### - -# Darwin sucks -eval std_shrext=\"$shrext_cmds\" - -disable_libs=no - -# Parse our command line options once, thoroughly. -while test "$#" -gt 0 -do - arg="$1" - shift - - case $arg in - -*=*) optarg=`$echo "X$arg" | $Xsed -e 's/[-_a-zA-Z0-9]*=//'` ;; - *) optarg= ;; - esac - - # If the previous option needs an argument, assign it. - if test -n "$prev"; then - case $prev in - execute_dlfiles) - execute_dlfiles="$execute_dlfiles $arg" - ;; - tag) - tagname="$arg" - preserve_args="${preserve_args}=$arg" - - # Check whether tagname contains only valid characters - case $tagname in - *[!-_A-Za-z0-9,/]*) - $echo "$progname: invalid tag name: $tagname" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - case $tagname in - CC) - # Don't test for the "default" C tag, as we know, it's there, but - # not specially marked. - ;; - *) - if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$" < "$progpath" > /dev/null; then - taglist="$taglist $tagname" - # Evaluate the configuration. - eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$tagname'$/,/^# ### END LIBTOOL TAG CONFIG: '$tagname'$/p' < $progpath`" - else - $echo "$progname: ignoring unknown tag $tagname" 1>&2 - fi - ;; - esac - ;; - *) - eval "$prev=\$arg" - ;; - esac - - prev= - prevopt= - continue - fi - - # Have we seen a non-optional argument yet? - case $arg in - --help) - show_help=yes - ;; - - --version) - echo "\ -$PROGRAM (GNU $PACKAGE) $VERSION$TIMESTAMP - -Copyright (C) 2008 Free Software Foundation, Inc. -This is free software; see the source for copying conditions. There is NO -warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - exit $? - ;; - - --config) - ${SED} -e '1,/^# ### BEGIN LIBTOOL CONFIG/d' -e '/^# ### END LIBTOOL CONFIG/,$d' $progpath - # Now print the configurations for the tags. - for tagname in $taglist; do - ${SED} -n -e "/^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$/,/^# ### END LIBTOOL TAG CONFIG: $tagname$/p" < "$progpath" - done - exit $? - ;; - - --debug) - $echo "$progname: enabling shell trace mode" - set -x - preserve_args="$preserve_args $arg" - ;; - - --dry-run | -n) - run=: - ;; - - --features) - $echo "host: $host" - if test "$build_libtool_libs" = yes; then - $echo "enable shared libraries" - else - $echo "disable shared libraries" - fi - if test "$build_old_libs" = yes; then - $echo "enable static libraries" - else - $echo "disable static libraries" - fi - exit $? - ;; - - --finish) mode="finish" ;; - - --mode) prevopt="--mode" prev=mode ;; - --mode=*) mode="$optarg" ;; - - --preserve-dup-deps) duplicate_deps="yes" ;; - - --quiet | --silent) - show=: - preserve_args="$preserve_args $arg" - ;; - - --tag) - prevopt="--tag" - prev=tag - preserve_args="$preserve_args --tag" - ;; - --tag=*) - set tag "$optarg" ${1+"$@"} - shift - prev=tag - preserve_args="$preserve_args --tag" - ;; - - -dlopen) - prevopt="-dlopen" - prev=execute_dlfiles - ;; - - -*) - $echo "$modename: unrecognized option \`$arg'" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - ;; - - *) - nonopt="$arg" - break - ;; - esac -done - -if test -n "$prevopt"; then - $echo "$modename: option \`$prevopt' requires an argument" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE -fi - -case $disable_libs in -no) - ;; -shared) - build_libtool_libs=no - build_old_libs=yes - ;; -static) - build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac` - ;; -esac - -# If this variable is set in any of the actions, the command in it -# will be execed at the end. This prevents here-documents from being -# left over by shells. -exec_cmd= - -if test -z "$show_help"; then - - # Infer the operation mode. - if test -z "$mode"; then - $echo "*** Warning: inferring the mode of operation is deprecated." 1>&2 - $echo "*** Future versions of Libtool will require --mode=MODE be specified." 1>&2 - case $nonopt in - *cc | cc* | *++ | gcc* | *-gcc* | g++* | xlc*) - mode=link - for arg - do - case $arg in - -c) - mode=compile - break - ;; - esac - done - ;; - *db | *dbx | *strace | *truss) - mode=execute - ;; - *install*|cp|mv) - mode=install - ;; - *rm) - mode=uninstall - ;; - *) - # If we have no mode, but dlfiles were specified, then do execute mode. - test -n "$execute_dlfiles" && mode=execute - - # Just use the default operation mode. - if test -z "$mode"; then - if test -n "$nonopt"; then - $echo "$modename: warning: cannot infer operation mode from \`$nonopt'" 1>&2 - else - $echo "$modename: warning: cannot infer operation mode without MODE-ARGS" 1>&2 - fi - fi - ;; - esac - fi - - # Only execute mode is allowed to have -dlopen flags. - if test -n "$execute_dlfiles" && test "$mode" != execute; then - $echo "$modename: unrecognized option \`-dlopen'" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - # Change the help message to a mode-specific one. - generic_help="$help" - help="Try \`$modename --help --mode=$mode' for more information." - - # These modes are in order of execution frequency so that they run quickly. - case $mode in - # libtool compile mode - compile) - modename="$modename: compile" - # Get the compilation command and the source file. - base_compile= - srcfile="$nonopt" # always keep a non-empty value in "srcfile" - suppress_opt=yes - suppress_output= - arg_mode=normal - libobj= - later= - - for arg - do - case $arg_mode in - arg ) - # do not "continue". Instead, add this to base_compile - lastarg="$arg" - arg_mode=normal - ;; - - target ) - libobj="$arg" - arg_mode=normal - continue - ;; - - normal ) - # Accept any command-line options. - case $arg in - -o) - if test -n "$libobj" ; then - $echo "$modename: you cannot specify \`-o' more than once" 1>&2 - exit $EXIT_FAILURE - fi - arg_mode=target - continue - ;; - - -static | -prefer-pic | -prefer-non-pic) - later="$later $arg" - continue - ;; - - -no-suppress) - suppress_opt=no - continue - ;; - - -Xcompiler) - arg_mode=arg # the next one goes into the "base_compile" arg list - continue # The current "srcfile" will either be retained or - ;; # replaced later. I would guess that would be a bug. - - -Wc,*) - args=`$echo "X$arg" | $Xsed -e "s/^-Wc,//"` - lastarg= - save_ifs="$IFS"; IFS=',' - for arg in $args; do - IFS="$save_ifs" - - # Double-quote args containing other shell metacharacters. - # Many Bourne shells cannot handle close brackets correctly - # in scan sets, so we specify it separately. - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - lastarg="$lastarg $arg" - done - IFS="$save_ifs" - lastarg=`$echo "X$lastarg" | $Xsed -e "s/^ //"` - - # Add the arguments to base_compile. - base_compile="$base_compile $lastarg" - continue - ;; - - * ) - # Accept the current argument as the source file. - # The previous "srcfile" becomes the current argument. - # - lastarg="$srcfile" - srcfile="$arg" - ;; - esac # case $arg - ;; - esac # case $arg_mode - - # Aesthetically quote the previous argument. - lastarg=`$echo "X$lastarg" | $Xsed -e "$sed_quote_subst"` - - case $lastarg in - # Double-quote args containing other shell metacharacters. - # Many Bourne shells cannot handle close brackets correctly - # in scan sets, and some SunOS ksh mistreat backslash-escaping - # in scan sets (worked around with variable expansion), - # and furthermore cannot handle '|' '&' '(' ')' in scan sets - # at all, so we specify them separately. - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - lastarg="\"$lastarg\"" - ;; - esac - - base_compile="$base_compile $lastarg" - done # for arg - - case $arg_mode in - arg) - $echo "$modename: you must specify an argument for -Xcompile" - exit $EXIT_FAILURE - ;; - target) - $echo "$modename: you must specify a target with \`-o'" 1>&2 - exit $EXIT_FAILURE - ;; - *) - # Get the name of the library object. - [ -z "$libobj" ] && libobj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%'` - ;; - esac - - # Recognize several different file suffixes. - # If the user specifies -o file.o, it is replaced with file.lo - xform='[cCFSifmso]' - case $libobj in - *.ada) xform=ada ;; - *.adb) xform=adb ;; - *.ads) xform=ads ;; - *.asm) xform=asm ;; - *.c++) xform=c++ ;; - *.cc) xform=cc ;; - *.ii) xform=ii ;; - *.class) xform=class ;; - *.cpp) xform=cpp ;; - *.cxx) xform=cxx ;; - *.[fF][09]?) xform=[fF][09]. ;; - *.for) xform=for ;; - *.java) xform=java ;; - *.obj) xform=obj ;; - *.sx) xform=sx ;; - esac - - libobj=`$echo "X$libobj" | $Xsed -e "s/\.$xform$/.lo/"` - - case $libobj in - *.lo) obj=`$echo "X$libobj" | $Xsed -e "$lo2o"` ;; - *) - $echo "$modename: cannot determine name of library object from \`$libobj'" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - func_infer_tag $base_compile - - for arg in $later; do - case $arg in - -static) - build_old_libs=yes - continue - ;; - - -prefer-pic) - pic_mode=yes - continue - ;; - - -prefer-non-pic) - pic_mode=no - continue - ;; - esac - done - - qlibobj=`$echo "X$libobj" | $Xsed -e "$sed_quote_subst"` - case $qlibobj in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - qlibobj="\"$qlibobj\"" ;; - esac - test "X$libobj" != "X$qlibobj" \ - && $echo "X$libobj" | grep '[]~#^*{};<>?"'"'"' &()|`$[]' \ - && $echo "$modename: libobj name \`$libobj' may not contain shell special characters." - objname=`$echo "X$obj" | $Xsed -e 's%^.*/%%'` - xdir=`$echo "X$obj" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$obj"; then - xdir= - else - xdir=$xdir/ - fi - lobj=${xdir}$objdir/$objname - - if test -z "$base_compile"; then - $echo "$modename: you must specify a compilation command" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - # Delete any leftover library objects. - if test "$build_old_libs" = yes; then - removelist="$obj $lobj $libobj ${libobj}T" - else - removelist="$lobj $libobj ${libobj}T" - fi - - $run $rm $removelist - trap "$run $rm $removelist; exit $EXIT_FAILURE" 1 2 15 - - # On Cygwin there's no "real" PIC flag so we must build both object types - case $host_os in - cygwin* | mingw* | pw32* | os2*) - pic_mode=default - ;; - esac - if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then - # non-PIC code in shared libraries is not supported - pic_mode=default - fi - - # Calculate the filename of the output object if compiler does - # not support -o with -c - if test "$compiler_c_o" = no; then - output_obj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%' -e 's%\.[^.]*$%%'`.${objext} - lockfile="$output_obj.lock" - removelist="$removelist $output_obj $lockfile" - trap "$run $rm $removelist; exit $EXIT_FAILURE" 1 2 15 - else - output_obj= - need_locks=no - lockfile= - fi - - # Lock this critical section if it is needed - # We use this script file to make the link, it avoids creating a new file - if test "$need_locks" = yes; then - until $run ln "$progpath" "$lockfile" 2>/dev/null; do - $show "Waiting for $lockfile to be removed" - sleep 2 - done - elif test "$need_locks" = warn; then - if test -f "$lockfile"; then - $echo "\ -*** ERROR, $lockfile exists and contains: -`cat $lockfile 2>/dev/null` - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $run $rm $removelist - exit $EXIT_FAILURE - fi - $echo "$srcfile" > "$lockfile" - fi - - if test -n "$fix_srcfile_path"; then - eval srcfile=\"$fix_srcfile_path\" - fi - qsrcfile=`$echo "X$srcfile" | $Xsed -e "$sed_quote_subst"` - case $qsrcfile in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - qsrcfile="\"$qsrcfile\"" ;; - esac - - $run $rm "$libobj" "${libobj}T" - - # Create a libtool object file (analogous to a ".la" file), - # but don't create it if we're doing a dry run. - test -z "$run" && cat > ${libobj}T <<EOF -# $libobj - a libtool object file -# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP -# -# Please DO NOT delete this file! -# It is necessary for linking the library. - -# Name of the PIC object. -EOF - - # Only build a PIC object if we are building libtool libraries. - if test "$build_libtool_libs" = yes; then - # Without this assignment, base_compile gets emptied. - fbsd_hideous_sh_bug=$base_compile - - if test "$pic_mode" != no; then - command="$base_compile $qsrcfile $pic_flag" - else - # Don't build PIC code - command="$base_compile $qsrcfile" - fi - - if test ! -d "${xdir}$objdir"; then - $show "$mkdir ${xdir}$objdir" - $run $mkdir ${xdir}$objdir - exit_status=$? - if test "$exit_status" -ne 0 && test ! -d "${xdir}$objdir"; then - exit $exit_status - fi - fi - - if test -z "$output_obj"; then - # Place PIC objects in $objdir - command="$command -o $lobj" - fi - - $run $rm "$lobj" "$output_obj" - - $show "$command" - if $run eval $lt_env "$command"; then : - else - test -n "$output_obj" && $run $rm $removelist - exit $EXIT_FAILURE - fi - - if test "$need_locks" = warn && - test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then - $echo "\ -*** ERROR, $lockfile contains: -`cat $lockfile 2>/dev/null` - -but it should contain: -$srcfile - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $run $rm $removelist - exit $EXIT_FAILURE - fi - - # Just move the object if needed, then go on to compile the next one - if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then - $show "$mv $output_obj $lobj" - if $run $mv $output_obj $lobj; then : - else - error=$? - $run $rm $removelist - exit $error - fi - fi - - # Append the name of the PIC object to the libtool object file. - test -z "$run" && cat >> ${libobj}T <<EOF -pic_object='$objdir/$objname' - -EOF - - # Allow error messages only from the first compilation. - if test "$suppress_opt" = yes; then - suppress_output=' >/dev/null 2>&1' - fi - else - # No PIC object so indicate it doesn't exist in the libtool - # object file. - test -z "$run" && cat >> ${libobj}T <<EOF -pic_object=none - -EOF - fi - - # Only build a position-dependent object if we build old libraries. - if test "$build_old_libs" = yes; then - if test "$pic_mode" != yes; then - # Don't build PIC code - command="$base_compile $qsrcfile" - else - command="$base_compile $qsrcfile $pic_flag" - fi - if test "$compiler_c_o" = yes; then - command="$command -o $obj" - fi - - # Suppress compiler output if we already did a PIC compilation. - command="$command$suppress_output" - $run $rm "$obj" "$output_obj" - $show "$command" - if $run eval $lt_env "$command"; then : - else - $run $rm $removelist - exit $EXIT_FAILURE - fi - - if test "$need_locks" = warn && - test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then - $echo "\ -*** ERROR, $lockfile contains: -`cat $lockfile 2>/dev/null` - -but it should contain: -$srcfile - -This indicates that another process is trying to use the same -temporary object file, and libtool could not work around it because -your compiler does not support \`-c' and \`-o' together. If you -repeat this compilation, it may succeed, by chance, but you had better -avoid parallel builds (make -j) in this platform, or get a better -compiler." - - $run $rm $removelist - exit $EXIT_FAILURE - fi - - # Just move the object if needed - if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then - $show "$mv $output_obj $obj" - if $run $mv $output_obj $obj; then : - else - error=$? - $run $rm $removelist - exit $error - fi - fi - - # Append the name of the non-PIC object the libtool object file. - # Only append if the libtool object file exists. - test -z "$run" && cat >> ${libobj}T <<EOF -# Name of the non-PIC object. -non_pic_object='$objname' - -EOF - else - # Append the name of the non-PIC object the libtool object file. - # Only append if the libtool object file exists. - test -z "$run" && cat >> ${libobj}T <<EOF -# Name of the non-PIC object. -non_pic_object=none - -EOF - fi - - $run $mv "${libobj}T" "${libobj}" - - # Unlock the critical section if it was locked - if test "$need_locks" != no; then - $run $rm "$lockfile" - fi - - exit $EXIT_SUCCESS - ;; - - # libtool link mode - link | relink) - modename="$modename: link" - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) - # It is impossible to link a dll without this setting, and - # we shouldn't force the makefile maintainer to figure out - # which system we are compiling for in order to pass an extra - # flag for every libtool invocation. - # allow_undefined=no - - # FIXME: Unfortunately, there are problems with the above when trying - # to make a dll which has undefined symbols, in which case not - # even a static library is built. For now, we need to specify - # -no-undefined on the libtool link line when we can be certain - # that all symbols are satisfied, otherwise we get a static library. - allow_undefined=yes - ;; - *) - allow_undefined=yes - ;; - esac - libtool_args="$nonopt" - base_compile="$nonopt $@" - compile_command="$nonopt" - finalize_command="$nonopt" - - compile_rpath= - finalize_rpath= - compile_shlibpath= - finalize_shlibpath= - convenience= - old_convenience= - deplibs= - old_deplibs= - compiler_flags= - linker_flags= - dllsearchpath= - lib_search_path=`pwd` - inst_prefix_dir= - - avoid_version=no - dlfiles= - dlprefiles= - dlself=no - export_dynamic=no - export_symbols= - export_symbols_regex= - generated= - libobjs= - ltlibs= - module=no - no_install=no - objs= - non_pic_objects= - notinst_path= # paths that contain not-installed libtool libraries - precious_files_regex= - prefer_static_libs=no - preload=no - prev= - prevarg= - release= - rpath= - xrpath= - perm_rpath= - temp_rpath= - thread_safe=no - vinfo= - vinfo_number=no - single_module="${wl}-single_module" - - func_infer_tag $base_compile - - # We need to know -static, to get the right output filenames. - for arg - do - case $arg in - -all-static | -static | -static-libtool-libs) - case $arg in - -all-static) - if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then - $echo "$modename: warning: complete static linking is impossible in this configuration" 1>&2 - fi - if test -n "$link_static_flag"; then - dlopen_self=$dlopen_self_static - fi - prefer_static_libs=yes - ;; - -static) - if test -z "$pic_flag" && test -n "$link_static_flag"; then - dlopen_self=$dlopen_self_static - fi - prefer_static_libs=built - ;; - -static-libtool-libs) - if test -z "$pic_flag" && test -n "$link_static_flag"; then - dlopen_self=$dlopen_self_static - fi - prefer_static_libs=yes - ;; - esac - build_libtool_libs=no - build_old_libs=yes - break - ;; - esac - done - - # See if our shared archives depend on static archives. - test -n "$old_archive_from_new_cmds" && build_old_libs=yes - - # Go through the arguments, transforming them on the way. - while test "$#" -gt 0; do - arg="$1" - shift - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - qarg=\"`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`\" ### testsuite: skip nested quoting test - ;; - *) qarg=$arg ;; - esac - libtool_args="$libtool_args $qarg" - - # If the previous option needs an argument, assign it. - if test -n "$prev"; then - case $prev in - output) - compile_command="$compile_command @OUTPUT@" - finalize_command="$finalize_command @OUTPUT@" - ;; - esac - - case $prev in - dlfiles|dlprefiles) - if test "$preload" = no; then - # Add the symbol object into the linking commands. - compile_command="$compile_command @SYMFILE@" - finalize_command="$finalize_command @SYMFILE@" - preload=yes - fi - case $arg in - *.la | *.lo) ;; # We handle these cases below. - force) - if test "$dlself" = no; then - dlself=needless - export_dynamic=yes - fi - prev= - continue - ;; - self) - if test "$prev" = dlprefiles; then - dlself=yes - elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then - dlself=yes - else - dlself=needless - export_dynamic=yes - fi - prev= - continue - ;; - *) - if test "$prev" = dlfiles; then - dlfiles="$dlfiles $arg" - else - dlprefiles="$dlprefiles $arg" - fi - prev= - continue - ;; - esac - ;; - expsyms) - export_symbols="$arg" - if test ! -f "$arg"; then - $echo "$modename: symbol file \`$arg' does not exist" - exit $EXIT_FAILURE - fi - prev= - continue - ;; - expsyms_regex) - export_symbols_regex="$arg" - prev= - continue - ;; - inst_prefix) - inst_prefix_dir="$arg" - prev= - continue - ;; - precious_regex) - precious_files_regex="$arg" - prev= - continue - ;; - release) - release="-$arg" - prev= - continue - ;; - objectlist) - if test -f "$arg"; then - save_arg=$arg - moreargs= - for fil in `cat $save_arg` - do -# moreargs="$moreargs $fil" - arg=$fil - # A libtool-controlled object. - - # Check to see that this really is a libtool object. - if (${SED} -e '2q' $arg | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - pic_object= - non_pic_object= - - # Read the .lo file - # If there is no directory component, then add one. - case $arg in - */* | *\\*) . $arg ;; - *) . ./$arg ;; - esac - - if test -z "$pic_object" || \ - test -z "$non_pic_object" || - test "$pic_object" = none && \ - test "$non_pic_object" = none; then - $echo "$modename: cannot find name of object for \`$arg'" 1>&2 - exit $EXIT_FAILURE - fi - - # Extract subdirectory from the argument. - xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$arg"; then - xdir= - else - xdir="$xdir/" - fi - - if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. - pic_object="$xdir$pic_object" - - if test "$prev" = dlfiles; then - if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - dlfiles="$dlfiles $pic_object" - prev= - continue - else - # If libtool objects are unsupported, then we need to preload. - prev=dlprefiles - fi - fi - - # CHECK ME: I think I busted this. -Ossama - if test "$prev" = dlprefiles; then - # Preload the old-style object. - dlprefiles="$dlprefiles $pic_object" - prev= - fi - - # A PIC object. - libobjs="$libobjs $pic_object" - arg="$pic_object" - fi - - # Non-PIC object. - if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. - non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - non_pic_objects="$non_pic_objects $non_pic_object" - if test -z "$pic_object" || test "$pic_object" = none ; then - arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. - non_pic_object="$pic_object" - non_pic_objects="$non_pic_objects $non_pic_object" - fi - else - # Only an error if not doing a dry-run. - if test -z "$run"; then - $echo "$modename: \`$arg' is not a valid libtool object" 1>&2 - exit $EXIT_FAILURE - else - # Dry-run case. - - # Extract subdirectory from the argument. - xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$arg"; then - xdir= - else - xdir="$xdir/" - fi - - pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"` - non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"` - libobjs="$libobjs $pic_object" - non_pic_objects="$non_pic_objects $non_pic_object" - fi - fi - done - else - $echo "$modename: link input file \`$save_arg' does not exist" - exit $EXIT_FAILURE - fi - arg=$save_arg - prev= - continue - ;; - rpath | xrpath) - # We need an absolute path. - case $arg in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - $echo "$modename: only absolute run-paths are allowed" 1>&2 - exit $EXIT_FAILURE - ;; - esac - if test "$prev" = rpath; then - case "$rpath " in - *" $arg "*) ;; - *) rpath="$rpath $arg" ;; - esac - else - case "$xrpath " in - *" $arg "*) ;; - *) xrpath="$xrpath $arg" ;; - esac - fi - prev= - continue - ;; - xcompiler) - compiler_flags="$compiler_flags $qarg" - prev= - compile_command="$compile_command $qarg" - finalize_command="$finalize_command $qarg" - continue - ;; - xlinker) - linker_flags="$linker_flags $qarg" - compiler_flags="$compiler_flags $wl$qarg" - prev= - compile_command="$compile_command $wl$qarg" - finalize_command="$finalize_command $wl$qarg" - continue - ;; - xcclinker) - linker_flags="$linker_flags $qarg" - compiler_flags="$compiler_flags $qarg" - prev= - compile_command="$compile_command $qarg" - finalize_command="$finalize_command $qarg" - continue - ;; - shrext) - shrext_cmds="$arg" - prev= - continue - ;; - darwin_framework|darwin_framework_skip) - test "$prev" = "darwin_framework" && compiler_flags="$compiler_flags $arg" - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - prev= - continue - ;; - *) - eval "$prev=\"\$arg\"" - prev= - continue - ;; - esac - fi # test -n "$prev" - - prevarg="$arg" - - case $arg in - -all-static) - if test -n "$link_static_flag"; then - compile_command="$compile_command $link_static_flag" - finalize_command="$finalize_command $link_static_flag" - fi - continue - ;; - - -allow-undefined) - # FIXME: remove this flag sometime in the future. - $echo "$modename: \`-allow-undefined' is deprecated because it is the default" 1>&2 - continue - ;; - - -avoid-version) - avoid_version=yes - continue - ;; - - -dlopen) - prev=dlfiles - continue - ;; - - -dlpreopen) - prev=dlprefiles - continue - ;; - - -export-dynamic) - export_dynamic=yes - continue - ;; - - -export-symbols | -export-symbols-regex) - if test -n "$export_symbols" || test -n "$export_symbols_regex"; then - $echo "$modename: more than one -exported-symbols argument is not allowed" - exit $EXIT_FAILURE - fi - if test "X$arg" = "X-export-symbols"; then - prev=expsyms - else - prev=expsyms_regex - fi - continue - ;; - - -framework|-arch|-isysroot) - case " $CC " in - *" ${arg} ${1} "* | *" ${arg} ${1} "*) - prev=darwin_framework_skip ;; - *) compiler_flags="$compiler_flags $arg" - prev=darwin_framework ;; - esac - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - continue - ;; - - -inst-prefix-dir) - prev=inst_prefix - continue - ;; - - # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:* - # so, if we see these flags be careful not to treat them like -L - -L[A-Z][A-Z]*:*) - case $with_gcc/$host in - no/*-*-irix* | /*-*-irix*) - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - ;; - esac - continue - ;; - - -L*) - dir=`$echo "X$arg" | $Xsed -e 's/^-L//'` - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - absdir=`cd "$dir" && pwd` - if test -z "$absdir"; then - $echo "$modename: cannot determine absolute directory name of \`$dir'" 1>&2 - absdir="$dir" - notinst_path="$notinst_path $dir" - fi - dir="$absdir" - ;; - esac - case "$deplibs " in - *" -L$dir "*) ;; - *) - deplibs="$deplibs -L$dir" - lib_search_path="$lib_search_path $dir" - ;; - esac - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) - testbindir=`$echo "X$dir" | $Xsed -e 's*/lib$*/bin*'` - case :$dllsearchpath: in - *":$dir:"*) ;; - *) dllsearchpath="$dllsearchpath:$dir";; - esac - case :$dllsearchpath: in - *":$testbindir:"*) ;; - *) dllsearchpath="$dllsearchpath:$testbindir";; - esac - ;; - esac - continue - ;; - - -l*) - if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos*) - # These systems don't actually have a C or math library (as such) - continue - ;; - *-*-os2*) - # These systems don't actually have a C library (as such) - test "X$arg" = "X-lc" && continue - ;; - *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc due to us having libc/libc_r. - test "X$arg" = "X-lc" && continue - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C and math libraries are in the System framework - deplibs="$deplibs -framework System" - continue - ;; - *-*-sco3.2v5* | *-*-sco5v6*) - # Causes problems with __ctype - test "X$arg" = "X-lc" && continue - ;; - *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) - # Compiler inserts libc in the correct place for threads to work - test "X$arg" = "X-lc" && continue - ;; - esac - elif test "X$arg" = "X-lc_r"; then - case $host in - *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc_r directly, use -pthread flag. - continue - ;; - esac - fi - deplibs="$deplibs $arg" - continue - ;; - - # Tru64 UNIX uses -model [arg] to determine the layout of C++ - # classes, name mangling, and exception handling. - -model) - compile_command="$compile_command $arg" - compiler_flags="$compiler_flags $arg" - finalize_command="$finalize_command $arg" - prev=xcompiler - continue - ;; - - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) - compiler_flags="$compiler_flags $arg" - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - continue - ;; - - -multi_module) - single_module="${wl}-multi_module" - continue - ;; - - -module) - module=yes - continue - ;; - - # -64, -mips[0-9] enable 64-bit mode on the SGI compiler - # -r[0-9][0-9]* specifies the processor on the SGI compiler - # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler - # +DA*, +DD* enable 64-bit mode on the HP compiler - # -q* pass through compiler args for the IBM compiler - # -m* pass through architecture-specific compiler args for GCC - # -m*, -t[45]*, -txscale* pass through architecture-specific - # compiler args for GCC - # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC - # -F/path gives path to uninstalled frameworks, gcc on darwin - # @file GCC response files - -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ - -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*) - - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - compiler_flags="$compiler_flags $arg" - continue - ;; - - -shrext) - prev=shrext - continue - ;; - - -no-fast-install) - fast_install=no - continue - ;; - - -no-install) - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin*) - # The PATH hackery in wrapper scripts is required on Windows - # and Darwin in order for the loader to find any dlls it needs. - $echo "$modename: warning: \`-no-install' is ignored for $host" 1>&2 - $echo "$modename: warning: assuming \`-no-fast-install' instead" 1>&2 - fast_install=no - ;; - *) no_install=yes ;; - esac - continue - ;; - - -no-undefined) - allow_undefined=no - continue - ;; - - -objectlist) - prev=objectlist - continue - ;; - - -o) prev=output ;; - - -precious-files-regex) - prev=precious_regex - continue - ;; - - -release) - prev=release - continue - ;; - - -rpath) - prev=rpath - continue - ;; - - -R) - prev=xrpath - continue - ;; - - -R*) - dir=`$echo "X$arg" | $Xsed -e 's/^-R//'` - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - $echo "$modename: only absolute run-paths are allowed" 1>&2 - exit $EXIT_FAILURE - ;; - esac - case "$xrpath " in - *" $dir "*) ;; - *) xrpath="$xrpath $dir" ;; - esac - continue - ;; - - -static | -static-libtool-libs) - # The effects of -static are defined in a previous loop. - # We used to do the same as -all-static on platforms that - # didn't have a PIC flag, but the assumption that the effects - # would be equivalent was wrong. It would break on at least - # Digital Unix and AIX. - continue - ;; - - -thread-safe) - thread_safe=yes - continue - ;; - - -version-info) - prev=vinfo - continue - ;; - -version-number) - prev=vinfo - vinfo_number=yes - continue - ;; - - -Wc,*) - args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wc,//'` - arg= - save_ifs="$IFS"; IFS=',' - for flag in $args; do - IFS="$save_ifs" - case $flag in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - flag="\"$flag\"" - ;; - esac - arg="$arg $wl$flag" - compiler_flags="$compiler_flags $flag" - done - IFS="$save_ifs" - arg=`$echo "X$arg" | $Xsed -e "s/^ //"` - ;; - - -Wl,*) - args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wl,//'` - arg= - save_ifs="$IFS"; IFS=',' - for flag in $args; do - IFS="$save_ifs" - case $flag in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - flag="\"$flag\"" - ;; - esac - arg="$arg $wl$flag" - compiler_flags="$compiler_flags $wl$flag" - linker_flags="$linker_flags $flag" - done - IFS="$save_ifs" - arg=`$echo "X$arg" | $Xsed -e "s/^ //"` - ;; - - -Xcompiler) - prev=xcompiler - continue - ;; - - -Xlinker) - prev=xlinker - continue - ;; - - -XCClinker) - prev=xcclinker - continue - ;; - - # Some other compiler flag. - -* | +*) - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - ;; - - *.$objext) - # A standard object. - objs="$objs $arg" - ;; - - *.lo) - # A libtool-controlled object. - - # Check to see that this really is a libtool object. - if (${SED} -e '2q' $arg | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - pic_object= - non_pic_object= - - # Read the .lo file - # If there is no directory component, then add one. - case $arg in - */* | *\\*) . $arg ;; - *) . ./$arg ;; - esac - - if test -z "$pic_object" || \ - test -z "$non_pic_object" || - test "$pic_object" = none && \ - test "$non_pic_object" = none; then - $echo "$modename: cannot find name of object for \`$arg'" 1>&2 - exit $EXIT_FAILURE - fi - - # Extract subdirectory from the argument. - xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$arg"; then - xdir= - else - xdir="$xdir/" - fi - - if test "$pic_object" != none; then - # Prepend the subdirectory the object is found in. - pic_object="$xdir$pic_object" - - if test "$prev" = dlfiles; then - if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then - dlfiles="$dlfiles $pic_object" - prev= - continue - else - # If libtool objects are unsupported, then we need to preload. - prev=dlprefiles - fi - fi - - # CHECK ME: I think I busted this. -Ossama - if test "$prev" = dlprefiles; then - # Preload the old-style object. - dlprefiles="$dlprefiles $pic_object" - prev= - fi - - # A PIC object. - libobjs="$libobjs $pic_object" - arg="$pic_object" - fi - - # Non-PIC object. - if test "$non_pic_object" != none; then - # Prepend the subdirectory the object is found in. - non_pic_object="$xdir$non_pic_object" - - # A standard non-PIC object - non_pic_objects="$non_pic_objects $non_pic_object" - if test -z "$pic_object" || test "$pic_object" = none ; then - arg="$non_pic_object" - fi - else - # If the PIC object exists, use it instead. - # $xdir was prepended to $pic_object above. - non_pic_object="$pic_object" - non_pic_objects="$non_pic_objects $non_pic_object" - fi - else - # Only an error if not doing a dry-run. - if test -z "$run"; then - $echo "$modename: \`$arg' is not a valid libtool object" 1>&2 - exit $EXIT_FAILURE - else - # Dry-run case. - - # Extract subdirectory from the argument. - xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'` - if test "X$xdir" = "X$arg"; then - xdir= - else - xdir="$xdir/" - fi - - pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"` - non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"` - libobjs="$libobjs $pic_object" - non_pic_objects="$non_pic_objects $non_pic_object" - fi - fi - ;; - - *.$libext) - # An archive. - deplibs="$deplibs $arg" - old_deplibs="$old_deplibs $arg" - continue - ;; - - *.la) - # A libtool-controlled library. - - if test "$prev" = dlfiles; then - # This library was specified with -dlopen. - dlfiles="$dlfiles $arg" - prev= - elif test "$prev" = dlprefiles; then - # The library was specified with -dlpreopen. - dlprefiles="$dlprefiles $arg" - prev= - else - deplibs="$deplibs $arg" - fi - continue - ;; - - # Some other compiler argument. - *) - # Unknown arguments in both finalize_command and compile_command need - # to be aesthetically quoted because they are evaled later. - arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - ;; - esac # arg - - # Now actually substitute the argument into the commands. - if test -n "$arg"; then - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - fi - done # argument parsing loop - - if test -n "$prev"; then - $echo "$modename: the \`$prevarg' option requires an argument" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then - eval arg=\"$export_dynamic_flag_spec\" - compile_command="$compile_command $arg" - finalize_command="$finalize_command $arg" - fi - - oldlibs= - # calculate the name of the file, without its directory - outputname=`$echo "X$output" | $Xsed -e 's%^.*/%%'` - libobjs_save="$libobjs" - - if test -n "$shlibpath_var"; then - # get the directories listed in $shlibpath_var - eval shlib_search_path=\`\$echo \"X\${$shlibpath_var}\" \| \$Xsed -e \'s/:/ /g\'\` - else - shlib_search_path= - fi - eval sys_lib_search_path=\"$sys_lib_search_path_spec\" - eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" - - output_objdir=`$echo "X$output" | $Xsed -e 's%/[^/]*$%%'` - if test "X$output_objdir" = "X$output"; then - output_objdir="$objdir" - else - output_objdir="$output_objdir/$objdir" - fi - # Create the object directory. - if test ! -d "$output_objdir"; then - $show "$mkdir $output_objdir" - $run $mkdir $output_objdir - exit_status=$? - if test "$exit_status" -ne 0 && test ! -d "$output_objdir"; then - exit $exit_status - fi - fi - - # Determine the type of output - case $output in - "") - $echo "$modename: you must specify an output file" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - ;; - *.$libext) linkmode=oldlib ;; - *.lo | *.$objext) linkmode=obj ;; - *.la) linkmode=lib ;; - *) linkmode=prog ;; # Anything else should be a program. - esac - - case $host in - *cygwin* | *mingw* | *pw32*) - # don't eliminate duplications in $postdeps and $predeps - duplicate_compiler_generated_deps=yes - ;; - *) - duplicate_compiler_generated_deps=$duplicate_deps - ;; - esac - specialdeplibs= - - libs= - # Find all interdependent deplibs by searching for libraries - # that are linked more than once (e.g. -la -lb -la) - for deplib in $deplibs; do - if test "X$duplicate_deps" = "Xyes" ; then - case "$libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - libs="$libs $deplib" - done - - if test "$linkmode" = lib; then - libs="$predeps $libs $compiler_lib_search_path $postdeps" - - # Compute libraries that are listed more than once in $predeps - # $postdeps and mark them as special (i.e., whose duplicates are - # not to be eliminated). - pre_post_deps= - if test "X$duplicate_compiler_generated_deps" = "Xyes" ; then - for pre_post_dep in $predeps $postdeps; do - case "$pre_post_deps " in - *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;; - esac - pre_post_deps="$pre_post_deps $pre_post_dep" - done - fi - pre_post_deps= - fi - - deplibs= - newdependency_libs= - newlib_search_path= - need_relink=no # whether we're linking any uninstalled libtool libraries - notinst_deplibs= # not-installed libtool libraries - case $linkmode in - lib) - passes="conv link" - for file in $dlfiles $dlprefiles; do - case $file in - *.la) ;; - *) - $echo "$modename: libraries can \`-dlopen' only libtool libraries: $file" 1>&2 - exit $EXIT_FAILURE - ;; - esac - done - ;; - prog) - compile_deplibs= - finalize_deplibs= - alldeplibs=no - newdlfiles= - newdlprefiles= - passes="conv scan dlopen dlpreopen link" - ;; - *) passes="conv" - ;; - esac - for pass in $passes; do - if test "$linkmode,$pass" = "lib,link" || - test "$linkmode,$pass" = "prog,scan"; then - libs="$deplibs" - deplibs= - fi - if test "$linkmode" = prog; then - case $pass in - dlopen) libs="$dlfiles" ;; - dlpreopen) libs="$dlprefiles" ;; - link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; - esac - fi - if test "$pass" = dlopen; then - # Collect dlpreopened libraries - save_deplibs="$deplibs" - deplibs= - fi - for deplib in $libs; do - lib= - found=no - case $deplib in - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - compiler_flags="$compiler_flags $deplib" - fi - continue - ;; - -l*) - if test "$linkmode" != lib && test "$linkmode" != prog; then - $echo "$modename: warning: \`-l' is ignored for archives/objects" 1>&2 - continue - fi - name=`$echo "X$deplib" | $Xsed -e 's/^-l//'` - if test "$linkmode" = lib; then - searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path" - else - searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path" - fi - for searchdir in $searchdirs; do - for search_ext in .la $std_shrext .so .a; do - # Search the libtool library - lib="$searchdir/lib${name}${search_ext}" - if test -f "$lib"; then - if test "$search_ext" = ".la"; then - found=yes - else - found=no - fi - break 2 - fi - done - done - if test "$found" != yes; then - # deplib doesn't seem to be a libtool library - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" - test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" - fi - continue - else # deplib is a libtool library - # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, - # We need to do some special things here, and not later. - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $deplib "*) - if (${SED} -e '2q' $lib | - grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - library_names= - old_library= - case $lib in - */* | *\\*) . $lib ;; - *) . ./$lib ;; - esac - for l in $old_library $library_names; do - ll="$l" - done - if test "X$ll" = "X$old_library" ; then # only static version available - found=no - ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'` - test "X$ladir" = "X$lib" && ladir="." - lib=$ladir/$old_library - if test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - deplibs="$deplib $deplibs" - test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" - fi - continue - fi - fi - ;; - *) ;; - esac - fi - fi - ;; # -l - -L*) - case $linkmode in - lib) - deplibs="$deplib $deplibs" - test "$pass" = conv && continue - newdependency_libs="$deplib $newdependency_libs" - newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'` - ;; - prog) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi - if test "$pass" = scan; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - fi - newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'` - ;; - *) - $echo "$modename: warning: \`-L' is ignored for archives/objects" 1>&2 - ;; - esac # linkmode - continue - ;; # -L - -R*) - if test "$pass" = link; then - dir=`$echo "X$deplib" | $Xsed -e 's/^-R//'` - # Make sure the xrpath contains only unique directories. - case "$xrpath " in - *" $dir "*) ;; - *) xrpath="$xrpath $dir" ;; - esac - fi - deplibs="$deplib $deplibs" - continue - ;; - *.la) lib="$deplib" ;; - *.$libext) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - continue - fi - case $linkmode in - lib) - valid_a_lib=no - case $deplibs_check_method in - match_pattern*) - set dummy $deplibs_check_method - match_pattern_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"` - if eval $echo \"$deplib\" 2>/dev/null \ - | $SED 10q \ - | $EGREP "$match_pattern_regex" > /dev/null; then - valid_a_lib=yes - fi - ;; - pass_all) - valid_a_lib=yes - ;; - esac - if test "$valid_a_lib" != yes; then - $echo - $echo "*** Warning: Trying to link with static lib archive $deplib." - $echo "*** I have the capability to make that library automatically link in when" - $echo "*** you link to this library. But I can only do this if you have a" - $echo "*** shared version of the library, which you do not appear to have" - $echo "*** because the file extensions .$libext of this argument makes me believe" - $echo "*** that it is just a static archive that I should not used here." - else - $echo - $echo "*** Warning: Linking the shared library $output against the" - $echo "*** static library $deplib is not portable!" - deplibs="$deplib $deplibs" - fi - continue - ;; - prog) - if test "$pass" != link; then - deplibs="$deplib $deplibs" - else - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - fi - continue - ;; - esac # linkmode - ;; # *.$libext - *.lo | *.$objext) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" - elif test "$linkmode" = prog; then - if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then - # If there is no dlopen support or we're linking statically, - # we need to preload. - newdlprefiles="$newdlprefiles $deplib" - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else - newdlfiles="$newdlfiles $deplib" - fi - fi - continue - ;; - %DEPLIBS%) - alldeplibs=yes - continue - ;; - esac # case $deplib - if test "$found" = yes || test -f "$lib"; then : - else - $echo "$modename: cannot find the library \`$lib' or unhandled argument \`$deplib'" 1>&2 - exit $EXIT_FAILURE - fi - - # Check to see that this really is a libtool archive. - if (${SED} -e '2q' $lib | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then : - else - $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 - exit $EXIT_FAILURE - fi - - ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'` - test "X$ladir" = "X$lib" && ladir="." - - dlname= - dlopen= - dlpreopen= - libdir= - library_names= - old_library= - # If the library was installed with an old release of libtool, - # it will not redefine variables installed, or shouldnotlink - installed=yes - shouldnotlink=no - avoidtemprpath= - - - # Read the .la file - case $lib in - */* | *\\*) . $lib ;; - *) . ./$lib ;; - esac - - if test "$linkmode,$pass" = "lib,link" || - test "$linkmode,$pass" = "prog,scan" || - { test "$linkmode" != prog && test "$linkmode" != lib; }; then - test -n "$dlopen" && dlfiles="$dlfiles $dlopen" - test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen" - fi - - if test "$pass" = conv; then - # Only check for convenience libraries - deplibs="$lib $deplibs" - if test -z "$libdir"; then - if test -z "$old_library"; then - $echo "$modename: cannot find name of link library for \`$lib'" 1>&2 - exit $EXIT_FAILURE - fi - # It is a libtool convenience library, so add in its objects. - convenience="$convenience $ladir/$objdir/$old_library" - old_convenience="$old_convenience $ladir/$objdir/$old_library" - tmp_libs= - for deplib in $dependency_libs; do - deplibs="$deplib $deplibs" - if test "X$duplicate_deps" = "Xyes" ; then - case "$tmp_libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - tmp_libs="$tmp_libs $deplib" - done - elif test "$linkmode" != prog && test "$linkmode" != lib; then - $echo "$modename: \`$lib' is not a convenience library" 1>&2 - exit $EXIT_FAILURE - fi - continue - fi # $pass = conv - - - # Get the name of the library we link against. - linklib= - for l in $old_library $library_names; do - linklib="$l" - done - if test -z "$linklib"; then - $echo "$modename: cannot find name of link library for \`$lib'" 1>&2 - exit $EXIT_FAILURE - fi - - # This library was specified with -dlopen. - if test "$pass" = dlopen; then - if test -z "$libdir"; then - $echo "$modename: cannot -dlopen a convenience library: \`$lib'" 1>&2 - exit $EXIT_FAILURE - fi - if test -z "$dlname" || - test "$dlopen_support" != yes || - test "$build_libtool_libs" = no; then - # If there is no dlname, no dlopen support or we're linking - # statically, we need to preload. We also need to preload any - # dependent libraries so libltdl's deplib preloader doesn't - # bomb out in the load deplibs phase. - dlprefiles="$dlprefiles $lib $dependency_libs" - else - newdlfiles="$newdlfiles $lib" - fi - continue - fi # $pass = dlopen - - # We need an absolute path. - case $ladir in - [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; - *) - abs_ladir=`cd "$ladir" && pwd` - if test -z "$abs_ladir"; then - $echo "$modename: warning: cannot determine absolute directory name of \`$ladir'" 1>&2 - $echo "$modename: passing it literally to the linker, although it might fail" 1>&2 - abs_ladir="$ladir" - fi - ;; - esac - laname=`$echo "X$lib" | $Xsed -e 's%^.*/%%'` - - # Find the relevant object directory and library name. - if test "X$installed" = Xyes; then - if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then - $echo "$modename: warning: library \`$lib' was moved." 1>&2 - dir="$ladir" - absdir="$abs_ladir" - libdir="$abs_ladir" - else - dir="$libdir" - absdir="$libdir" - fi - test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes - else - if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then - dir="$ladir" - absdir="$abs_ladir" - # Remove this search path later - notinst_path="$notinst_path $abs_ladir" - else - dir="$ladir/$objdir" - absdir="$abs_ladir/$objdir" - # Remove this search path later - notinst_path="$notinst_path $abs_ladir" - fi - fi # $installed = yes - name=`$echo "X$laname" | $Xsed -e 's/\.la$//' -e 's/^lib//'` - - # This library was specified with -dlpreopen. - if test "$pass" = dlpreopen; then - if test -z "$libdir"; then - $echo "$modename: cannot -dlpreopen a convenience library: \`$lib'" 1>&2 - exit $EXIT_FAILURE - fi - # Prefer using a static library (so that no silly _DYNAMIC symbols - # are required to link). - if test -n "$old_library"; then - newdlprefiles="$newdlprefiles $dir/$old_library" - # Otherwise, use the dlname, so that lt_dlopen finds it. - elif test -n "$dlname"; then - newdlprefiles="$newdlprefiles $dir/$dlname" - else - newdlprefiles="$newdlprefiles $dir/$linklib" - fi - fi # $pass = dlpreopen - - if test -z "$libdir"; then - # Link the convenience library - if test "$linkmode" = lib; then - deplibs="$dir/$old_library $deplibs" - elif test "$linkmode,$pass" = "prog,link"; then - compile_deplibs="$dir/$old_library $compile_deplibs" - finalize_deplibs="$dir/$old_library $finalize_deplibs" - else - deplibs="$lib $deplibs" # used for prog,scan pass - fi - continue - fi - - - if test "$linkmode" = prog && test "$pass" != link; then - newlib_search_path="$newlib_search_path $ladir" - deplibs="$lib $deplibs" - - linkalldeplibs=no - if test "$link_all_deplibs" != no || test -z "$library_names" || - test "$build_libtool_libs" = no; then - linkalldeplibs=yes - fi - - tmp_libs= - for deplib in $dependency_libs; do - case $deplib in - -L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test - esac - # Need to link against all dependency_libs? - if test "$linkalldeplibs" = yes; then - deplibs="$deplib $deplibs" - else - # Need to hardcode shared library paths - # or/and link against static libraries - newdependency_libs="$deplib $newdependency_libs" - fi - if test "X$duplicate_deps" = "Xyes" ; then - case "$tmp_libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - tmp_libs="$tmp_libs $deplib" - done # for deplib - continue - fi # $linkmode = prog... - - if test "$linkmode,$pass" = "prog,link"; then - if test -n "$library_names" && - { { test "$prefer_static_libs" = no || - test "$prefer_static_libs,$installed" = "built,yes"; } || - test -z "$old_library"; }; then - # We need to hardcode the library path - if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then - # Make sure the rpath contains only unique directories. - case "$temp_rpath " in - *" $dir "*) ;; - *" $absdir "*) ;; - *) temp_rpath="$temp_rpath $absdir" ;; - esac - fi - - # Hardcode the library path. - # Skip directories that are in the system default run-time - # search path. - case " $sys_lib_dlsearch_path " in - *" $absdir "*) ;; - *) - case "$compile_rpath " in - *" $absdir "*) ;; - *) compile_rpath="$compile_rpath $absdir" - esac - ;; - esac - case " $sys_lib_dlsearch_path " in - *" $libdir "*) ;; - *) - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" - esac - ;; - esac - fi # $linkmode,$pass = prog,link... - - if test "$alldeplibs" = yes && - { test "$deplibs_check_method" = pass_all || - { test "$build_libtool_libs" = yes && - test -n "$library_names"; }; }; then - # We only need to search for static libraries - continue - fi - fi - - link_static=no # Whether the deplib will be linked statically - use_static_libs=$prefer_static_libs - if test "$use_static_libs" = built && test "$installed" = yes ; then - use_static_libs=no - fi - if test -n "$library_names" && - { test "$use_static_libs" = no || test -z "$old_library"; }; then - if test "$installed" = no; then - notinst_deplibs="$notinst_deplibs $lib" - need_relink=yes - fi - # This is a shared library - - # Warn about portability, can't link against -module's on - # some systems (darwin) - if test "$shouldnotlink" = yes && test "$pass" = link ; then - $echo - if test "$linkmode" = prog; then - $echo "*** Warning: Linking the executable $output against the loadable module" - else - $echo "*** Warning: Linking the shared library $output against the loadable module" - fi - $echo "*** $linklib is not portable!" - fi - if test "$linkmode" = lib && - test "$hardcode_into_libs" = yes; then - # Hardcode the library path. - # Skip directories that are in the system default run-time - # search path. - case " $sys_lib_dlsearch_path " in - *" $absdir "*) ;; - *) - case "$compile_rpath " in - *" $absdir "*) ;; - *) compile_rpath="$compile_rpath $absdir" - esac - ;; - esac - case " $sys_lib_dlsearch_path " in - *" $libdir "*) ;; - *) - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" - esac - ;; - esac - fi - - if test -n "$old_archive_from_expsyms_cmds"; then - # figure out the soname - set dummy $library_names - realname="$2" - shift; shift - libname=`eval \\$echo \"$libname_spec\"` - # use dlname if we got it. it's perfectly good, no? - if test -n "$dlname"; then - soname="$dlname" - elif test -n "$soname_spec"; then - # bleh windows - case $host in - *cygwin* | mingw*) - major=`expr $current - $age` - versuffix="-$major" - ;; - esac - eval soname=\"$soname_spec\" - else - soname="$realname" - fi - - # Make a new name for the extract_expsyms_cmds to use - soroot="$soname" - soname=`$echo $soroot | ${SED} -e 's/^.*\///'` - newlib="libimp-`$echo $soname | ${SED} 's/^lib//;s/\.dll$//'`.a" - - # If the library has no export list, then create one now - if test -f "$output_objdir/$soname-def"; then : - else - $show "extracting exported symbol list from \`$soname'" - save_ifs="$IFS"; IFS='~' - cmds=$extract_expsyms_cmds - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - fi - - # Create $newlib - if test -f "$output_objdir/$newlib"; then :; else - $show "generating import library for \`$soname'" - save_ifs="$IFS"; IFS='~' - cmds=$old_archive_from_expsyms_cmds - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - fi - # make sure the library variables are pointing to the new library - dir=$output_objdir - linklib=$newlib - fi # test -n "$old_archive_from_expsyms_cmds" - - if test "$linkmode" = prog || test "$mode" != relink; then - add_shlibpath= - add_dir= - add= - lib_linked=yes - case $hardcode_action in - immediate | unsupported) - if test "$hardcode_direct" = no; then - add="$dir/$linklib" - case $host in - *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;; - *-*-sysv4*uw2*) add_dir="-L$dir" ;; - *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ - *-*-unixware7*) add_dir="-L$dir" ;; - *-*-darwin* ) - # if the lib is a module then we can not link against - # it, someone is ignoring the new warnings I added - if /usr/bin/file -L $add 2> /dev/null | - $EGREP ": [^:]* bundle" >/dev/null ; then - $echo "** Warning, lib $linklib is a module, not a shared library" - if test -z "$old_library" ; then - $echo - $echo "** And there doesn't seem to be a static archive available" - $echo "** The link will probably fail, sorry" - else - add="$dir/$old_library" - fi - fi - esac - elif test "$hardcode_minus_L" = no; then - case $host in - *-*-sunos*) add_shlibpath="$dir" ;; - esac - add_dir="-L$dir" - add="-l$name" - elif test "$hardcode_shlibpath_var" = no; then - add_shlibpath="$dir" - add="-l$name" - else - lib_linked=no - fi - ;; - relink) - if test "$hardcode_direct" = yes; then - add="$dir/$linklib" - elif test "$hardcode_minus_L" = yes; then - add_dir="-L$dir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) - add_dir="$add_dir -L$inst_prefix_dir$libdir" - ;; - esac - fi - add="-l$name" - elif test "$hardcode_shlibpath_var" = yes; then - add_shlibpath="$dir" - add="-l$name" - else - lib_linked=no - fi - ;; - *) lib_linked=no ;; - esac - - if test "$lib_linked" != yes; then - $echo "$modename: configuration error: unsupported hardcode properties" - exit $EXIT_FAILURE - fi - - if test -n "$add_shlibpath"; then - case :$compile_shlibpath: in - *":$add_shlibpath:"*) ;; - *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;; - esac - fi - if test "$linkmode" = prog; then - test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" - test -n "$add" && compile_deplibs="$add $compile_deplibs" - else - test -n "$add_dir" && deplibs="$add_dir $deplibs" - test -n "$add" && deplibs="$add $deplibs" - if test "$hardcode_direct" != yes && \ - test "$hardcode_minus_L" != yes && \ - test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; - esac - fi - fi - fi - - if test "$linkmode" = prog || test "$mode" = relink; then - add_shlibpath= - add_dir= - add= - # Finalize command for both is simple: just hardcode it. - if test "$hardcode_direct" = yes; then - add="$libdir/$linklib" - elif test "$hardcode_minus_L" = yes; then - add_dir="-L$libdir" - add="-l$name" - elif test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; - *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; - esac - add="-l$name" - elif test "$hardcode_automatic" = yes; then - if test -n "$inst_prefix_dir" && - test -f "$inst_prefix_dir$libdir/$linklib" ; then - add="$inst_prefix_dir$libdir/$linklib" - else - add="$libdir/$linklib" - fi - else - # We cannot seem to hardcode it, guess we'll fake it. - add_dir="-L$libdir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) - add_dir="$add_dir -L$inst_prefix_dir$libdir" - ;; - esac - fi - add="-l$name" - fi - - if test "$linkmode" = prog; then - test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" - test -n "$add" && finalize_deplibs="$add $finalize_deplibs" - else - test -n "$add_dir" && deplibs="$add_dir $deplibs" - test -n "$add" && deplibs="$add $deplibs" - fi - fi - elif test "$linkmode" = prog; then - # Here we assume that one of hardcode_direct or hardcode_minus_L - # is not unsupported. This is valid on all known static and - # shared platforms. - if test "$hardcode_direct" != unsupported; then - test -n "$old_library" && linklib="$old_library" - compile_deplibs="$dir/$linklib $compile_deplibs" - finalize_deplibs="$dir/$linklib $finalize_deplibs" - else - compile_deplibs="-l$name -L$dir $compile_deplibs" - finalize_deplibs="-l$name -L$dir $finalize_deplibs" - fi - elif test "$build_libtool_libs" = yes; then - # Not a shared library - if test "$deplibs_check_method" != pass_all; then - # We're trying link a shared library against a static one - # but the system doesn't support it. - - # Just print a warning and add the library to dependency_libs so - # that the program can be linked against the static library. - $echo - $echo "*** Warning: This system can not link to static lib archive $lib." - $echo "*** I have the capability to make that library automatically link in when" - $echo "*** you link to this library. But I can only do this if you have a" - $echo "*** shared version of the library, which you do not appear to have." - if test "$module" = yes; then - $echo "*** But as you try to build a module library, libtool will still create " - $echo "*** a static module, that should work as long as the dlopening application" - $echo "*** is linked with the -dlopen flag to resolve symbols at runtime." - if test -z "$global_symbol_pipe"; then - $echo - $echo "*** However, this would only work if libtool was able to extract symbol" - $echo "*** lists from a program, using \`nm' or equivalent, but libtool could" - $echo "*** not find such a program. So, this module is probably useless." - $echo "*** \`nm' from GNU binutils and a full rebuild may help." - fi - if test "$build_old_libs" = no; then - build_libtool_libs=module - build_old_libs=yes - else - build_libtool_libs=no - fi - fi - else - deplibs="$dir/$old_library $deplibs" - link_static=yes - fi - fi # link shared/static library? - - if test "$linkmode" = lib; then - if test -n "$dependency_libs" && - { test "$hardcode_into_libs" != yes || - test "$build_old_libs" = yes || - test "$link_static" = yes; }; then - # Extract -R from dependency_libs - temp_deplibs= - for libdir in $dependency_libs; do - case $libdir in - -R*) temp_xrpath=`$echo "X$libdir" | $Xsed -e 's/^-R//'` - case " $xrpath " in - *" $temp_xrpath "*) ;; - *) xrpath="$xrpath $temp_xrpath";; - esac;; - *) temp_deplibs="$temp_deplibs $libdir";; - esac - done - dependency_libs="$temp_deplibs" - fi - - newlib_search_path="$newlib_search_path $absdir" - # Link against this library - test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" - # ... and its dependency_libs - tmp_libs= - for deplib in $dependency_libs; do - newdependency_libs="$deplib $newdependency_libs" - if test "X$duplicate_deps" = "Xyes" ; then - case "$tmp_libs " in - *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; - esac - fi - tmp_libs="$tmp_libs $deplib" - done - - if test "$link_all_deplibs" != no; then - # Add the search paths of all dependency libraries - for deplib in $dependency_libs; do - case $deplib in - -L*) path="$deplib" ;; - *.la) - dir=`$echo "X$deplib" | $Xsed -e 's%/[^/]*$%%'` - test "X$dir" = "X$deplib" && dir="." - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; - *) - absdir=`cd "$dir" && pwd` - if test -z "$absdir"; then - $echo "$modename: warning: cannot determine absolute directory name of \`$dir'" 1>&2 - absdir="$dir" - fi - ;; - esac - if grep "^installed=no" $deplib > /dev/null; then - path="$absdir/$objdir" - else - eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` - if test -z "$libdir"; then - $echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2 - exit $EXIT_FAILURE - fi - if test "$absdir" != "$libdir"; then - $echo "$modename: warning: \`$deplib' seems to be moved" 1>&2 - fi - path="$absdir" - fi - depdepl= - case $host in - *-*-darwin*) - # we do not want to link against static libs, - # but need to link against shared - eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` - eval deplibdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` - if test -n "$deplibrary_names" ; then - for tmp in $deplibrary_names ; do - depdepl=$tmp - done - if test -f "$deplibdir/$depdepl" ; then - depdepl="$deplibdir/$depdepl" - elif test -f "$path/$depdepl" ; then - depdepl="$path/$depdepl" - else - # Can't find it, oh well... - depdepl= - fi - # do not add paths which are already there - case " $newlib_search_path " in - *" $path "*) ;; - *) newlib_search_path="$newlib_search_path $path";; - esac - fi - path="" - ;; - *) - path="-L$path" - ;; - esac - ;; - -l*) - case $host in - *-*-darwin*) - # Again, we only want to link against shared libraries - eval tmp_libs=`$echo "X$deplib" | $Xsed -e "s,^\-l,,"` - for tmp in $newlib_search_path ; do - if test -f "$tmp/lib$tmp_libs.dylib" ; then - eval depdepl="$tmp/lib$tmp_libs.dylib" - break - fi - done - path="" - ;; - *) continue ;; - esac - ;; - *) continue ;; - esac - case " $deplibs " in - *" $path "*) ;; - *) deplibs="$path $deplibs" ;; - esac - case " $deplibs " in - *" $depdepl "*) ;; - *) deplibs="$depdepl $deplibs" ;; - esac - done - fi # link_all_deplibs != no - fi # linkmode = lib - done # for deplib in $libs - dependency_libs="$newdependency_libs" - if test "$pass" = dlpreopen; then - # Link the dlpreopened libraries before other libraries - for deplib in $save_deplibs; do - deplibs="$deplib $deplibs" - done - fi - if test "$pass" != dlopen; then - if test "$pass" != conv; then - # Make sure lib_search_path contains only unique directories. - lib_search_path= - for dir in $newlib_search_path; do - case "$lib_search_path " in - *" $dir "*) ;; - *) lib_search_path="$lib_search_path $dir" ;; - esac - done - newlib_search_path= - fi - - if test "$linkmode,$pass" != "prog,link"; then - vars="deplibs" - else - vars="compile_deplibs finalize_deplibs" - fi - for var in $vars dependency_libs; do - # Add libraries to $var in reverse order - eval tmp_libs=\"\$$var\" - new_libs= - for deplib in $tmp_libs; do - # FIXME: Pedantically, this is the right thing to do, so - # that some nasty dependency loop isn't accidentally - # broken: - #new_libs="$deplib $new_libs" - # Pragmatically, this seems to cause very few problems in - # practice: - case $deplib in - -L*) new_libs="$deplib $new_libs" ;; - -R*) ;; - *) - # And here is the reason: when a library appears more - # than once as an explicit dependence of a library, or - # is implicitly linked in more than once by the - # compiler, it is considered special, and multiple - # occurrences thereof are not removed. Compare this - # with having the same library being listed as a - # dependency of multiple other libraries: in this case, - # we know (pedantically, we assume) the library does not - # need to be listed more than once, so we keep only the - # last copy. This is not always right, but it is rare - # enough that we require users that really mean to play - # such unportable linking tricks to link the library - # using -Wl,-lname, so that libtool does not consider it - # for duplicate removal. - case " $specialdeplibs " in - *" $deplib "*) new_libs="$deplib $new_libs" ;; - *) - case " $new_libs " in - *" $deplib "*) ;; - *) new_libs="$deplib $new_libs" ;; - esac - ;; - esac - ;; - esac - done - tmp_libs= - for deplib in $new_libs; do - case $deplib in - -L*) - case " $tmp_libs " in - *" $deplib "*) ;; - *) tmp_libs="$tmp_libs $deplib" ;; - esac - ;; - *) tmp_libs="$tmp_libs $deplib" ;; - esac - done - eval $var=\"$tmp_libs\" - done # for var - fi - # Last step: remove runtime libs from dependency_libs - # (they stay in deplibs) - tmp_libs= - for i in $dependency_libs ; do - case " $predeps $postdeps $compiler_lib_search_path " in - *" $i "*) - i="" - ;; - esac - if test -n "$i" ; then - tmp_libs="$tmp_libs $i" - fi - done - dependency_libs=$tmp_libs - done # for pass - if test "$linkmode" = prog; then - dlfiles="$newdlfiles" - dlprefiles="$newdlprefiles" - fi - - case $linkmode in - oldlib) - case " $deplibs" in - *\ -l* | *\ -L*) - $echo "$modename: warning: \`-l' and \`-L' are ignored for archives" 1>&2 ;; - esac - - if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then - $echo "$modename: warning: \`-dlopen' is ignored for archives" 1>&2 - fi - - if test -n "$rpath"; then - $echo "$modename: warning: \`-rpath' is ignored for archives" 1>&2 - fi - - if test -n "$xrpath"; then - $echo "$modename: warning: \`-R' is ignored for archives" 1>&2 - fi - - if test -n "$vinfo"; then - $echo "$modename: warning: \`-version-info/-version-number' is ignored for archives" 1>&2 - fi - - if test -n "$release"; then - $echo "$modename: warning: \`-release' is ignored for archives" 1>&2 - fi - - if test -n "$export_symbols" || test -n "$export_symbols_regex"; then - $echo "$modename: warning: \`-export-symbols' is ignored for archives" 1>&2 - fi - - # Now set the variables for building old libraries. - build_libtool_libs=no - oldlibs="$output" - objs="$objs$old_deplibs" - ;; - - lib) - # Make sure we only generate libraries of the form `libNAME.la'. - case $outputname in - lib*) - name=`$echo "X$outputname" | $Xsed -e 's/\.la$//' -e 's/^lib//'` - eval shared_ext=\"$shrext_cmds\" - eval libname=\"$libname_spec\" - ;; - *) - if test "$module" = no; then - $echo "$modename: libtool library \`$output' must begin with \`lib'" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - if test "$need_lib_prefix" != no; then - # Add the "lib" prefix for modules if required - name=`$echo "X$outputname" | $Xsed -e 's/\.la$//'` - eval shared_ext=\"$shrext_cmds\" - eval libname=\"$libname_spec\" - else - libname=`$echo "X$outputname" | $Xsed -e 's/\.la$//'` - fi - ;; - esac - - if test -n "$objs"; then - if test "$deplibs_check_method" != pass_all; then - $echo "$modename: cannot build libtool library \`$output' from non-libtool objects on this host:$objs" 2>&1 - exit $EXIT_FAILURE - else - $echo - $echo "*** Warning: Linking the shared library $output against the non-libtool" - $echo "*** objects $objs is not portable!" - libobjs="$libobjs $objs" - fi - fi - - if test "$dlself" != no; then - $echo "$modename: warning: \`-dlopen self' is ignored for libtool libraries" 1>&2 - fi - - set dummy $rpath - if test "$#" -gt 2; then - $echo "$modename: warning: ignoring multiple \`-rpath's for a libtool library" 1>&2 - fi - install_libdir="$2" - - oldlibs= - if test -z "$rpath"; then - if test "$build_libtool_libs" = yes; then - # Building a libtool convenience library. - # Some compilers have problems with a `.al' extension so - # convenience libraries should have the same extension an - # archive normally would. - oldlibs="$output_objdir/$libname.$libext $oldlibs" - build_libtool_libs=convenience - build_old_libs=yes - fi - - if test -n "$vinfo"; then - $echo "$modename: warning: \`-version-info/-version-number' is ignored for convenience libraries" 1>&2 - fi - - if test -n "$release"; then - $echo "$modename: warning: \`-release' is ignored for convenience libraries" 1>&2 - fi - else - - # Parse the version information argument. - save_ifs="$IFS"; IFS=':' - set dummy $vinfo 0 0 0 - IFS="$save_ifs" - - if test -n "$8"; then - $echo "$modename: too many parameters to \`-version-info'" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - # convert absolute version numbers to libtool ages - # this retains compatibility with .la files and attempts - # to make the code below a bit more comprehensible - - case $vinfo_number in - yes) - number_major="$2" - number_minor="$3" - number_revision="$4" - # - # There are really only two kinds -- those that - # use the current revision as the major version - # and those that subtract age and use age as - # a minor version. But, then there is irix - # which has an extra 1 added just for fun - # - case $version_type in - darwin|linux|osf|windows|none) - current=`expr $number_major + $number_minor` - age="$number_minor" - revision="$number_revision" - ;; - freebsd-aout|freebsd-elf|sunos) - current="$number_major" - revision="$number_minor" - age="0" - ;; - irix|nonstopux) - current=`expr $number_major + $number_minor` - age="$number_minor" - revision="$number_minor" - lt_irix_increment=no - ;; - esac - ;; - no) - current="$2" - revision="$3" - age="$4" - ;; - esac - - # Check that each of the things are valid numbers. - case $current in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) - $echo "$modename: CURRENT \`$current' must be a nonnegative integer" 1>&2 - $echo "$modename: \`$vinfo' is not valid version information" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - case $revision in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) - $echo "$modename: REVISION \`$revision' must be a nonnegative integer" 1>&2 - $echo "$modename: \`$vinfo' is not valid version information" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - case $age in - 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; - *) - $echo "$modename: AGE \`$age' must be a nonnegative integer" 1>&2 - $echo "$modename: \`$vinfo' is not valid version information" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - if test "$age" -gt "$current"; then - $echo "$modename: AGE \`$age' is greater than the current interface number \`$current'" 1>&2 - $echo "$modename: \`$vinfo' is not valid version information" 1>&2 - exit $EXIT_FAILURE - fi - - # Calculate the version variables. - major= - versuffix= - verstring= - case $version_type in - none) ;; - - darwin) - # Like Linux, but with the current version available in - # verstring for coding it into the library header - major=.`expr $current - $age` - versuffix="$major.$age.$revision" - # Darwin ld doesn't like 0 for these options... - minor_current=`expr $current + 1` - xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision" - verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" - ;; - - freebsd-aout) - major=".$current" - versuffix=".$current.$revision"; - ;; - - freebsd-elf) - major=".$current" - versuffix=".$current"; - ;; - - irix | nonstopux) - if test "X$lt_irix_increment" = "Xno"; then - major=`expr $current - $age` - else - major=`expr $current - $age + 1` - fi - case $version_type in - nonstopux) verstring_prefix=nonstopux ;; - *) verstring_prefix=sgi ;; - esac - verstring="$verstring_prefix$major.$revision" - - # Add in all the interfaces that we are compatible with. - loop=$revision - while test "$loop" -ne 0; do - iface=`expr $revision - $loop` - loop=`expr $loop - 1` - verstring="$verstring_prefix$major.$iface:$verstring" - done - - # Before this point, $major must not contain `.'. - major=.$major - versuffix="$major.$revision" - ;; - - linux) - major=.`expr $current - $age` - versuffix="$major.$age.$revision" - ;; - - osf) - major=.`expr $current - $age` - versuffix=".$current.$age.$revision" - verstring="$current.$age.$revision" - - # Add in all the interfaces that we are compatible with. - loop=$age - while test "$loop" -ne 0; do - iface=`expr $current - $loop` - loop=`expr $loop - 1` - verstring="$verstring:${iface}.0" - done - - # Make executables depend on our current version. - verstring="$verstring:${current}.0" - ;; - - sunos) - major=".$current" - versuffix=".$current.$revision" - ;; - - windows) - # Use '-' rather than '.', since we only want one - # extension on DOS 8.3 filesystems. - major=`expr $current - $age` - versuffix="-$major" - ;; - - *) - $echo "$modename: unknown library version type \`$version_type'" 1>&2 - $echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2 - exit $EXIT_FAILURE - ;; - esac - - # Clear the version info if we defaulted, and they specified a release. - if test -z "$vinfo" && test -n "$release"; then - major= - case $version_type in - darwin) - # we can't check for "0.0" in archive_cmds due to quoting - # problems, so we reset it completely - verstring= - ;; - *) - verstring="0.0" - ;; - esac - if test "$need_version" = no; then - versuffix= - else - versuffix=".0.0" - fi - fi - - # Remove version info from name if versioning should be avoided - if test "$avoid_version" = yes && test "$need_version" = no; then - major= - versuffix= - verstring="" - fi - - # Check to see if the archive will have undefined symbols. - if test "$allow_undefined" = yes; then - if test "$allow_undefined_flag" = unsupported; then - $echo "$modename: warning: undefined symbols not allowed in $host shared libraries" 1>&2 - build_libtool_libs=no - build_old_libs=yes - fi - else - # Don't allow undefined symbols. - allow_undefined_flag="$no_undefined_flag" - fi - fi - - if test "$mode" != relink; then - # Remove our outputs, but don't remove object files since they - # may have been created when compiling PIC objects. - removelist= - tempremovelist=`$echo "$output_objdir/*"` - for p in $tempremovelist; do - case $p in - *.$objext) - ;; - $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*) - if test "X$precious_files_regex" != "X"; then - if echo $p | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 - then - continue - fi - fi - removelist="$removelist $p" - ;; - *) ;; - esac - done - if test -n "$removelist"; then - $show "${rm}r $removelist" - $run ${rm}r $removelist - fi - fi - - # Now set the variables for building old libraries. - if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then - oldlibs="$oldlibs $output_objdir/$libname.$libext" - - # Transform .lo files to .o files. - oldobjs="$objs "`$echo "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}'$/d' -e "$lo2o" | $NL2SP` - fi - - # Eliminate all temporary directories. - #for path in $notinst_path; do - # lib_search_path=`$echo "$lib_search_path " | ${SED} -e "s% $path % %g"` - # deplibs=`$echo "$deplibs " | ${SED} -e "s% -L$path % %g"` - # dependency_libs=`$echo "$dependency_libs " | ${SED} -e "s% -L$path % %g"` - #done - - if test -n "$xrpath"; then - # If the user specified any rpath flags, then add them. - temp_xrpath= - for libdir in $xrpath; do - temp_xrpath="$temp_xrpath -R$libdir" - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" ;; - esac - done - if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then - dependency_libs="$temp_xrpath $dependency_libs" - fi - fi - - # Make sure dlfiles contains only unique files that won't be dlpreopened - old_dlfiles="$dlfiles" - dlfiles= - for lib in $old_dlfiles; do - case " $dlprefiles $dlfiles " in - *" $lib "*) ;; - *) dlfiles="$dlfiles $lib" ;; - esac - done - - # Make sure dlprefiles contains only unique files - old_dlprefiles="$dlprefiles" - dlprefiles= - for lib in $old_dlprefiles; do - case "$dlprefiles " in - *" $lib "*) ;; - *) dlprefiles="$dlprefiles $lib" ;; - esac - done - - if test "$build_libtool_libs" = yes; then - if test -n "$rpath"; then - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos*) - # these systems don't actually have a c library (as such)! - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C library is in the System framework - deplibs="$deplibs -framework System" - ;; - *-*-netbsd*) - # Don't link with libc until the a.out ld.so is fixed. - ;; - *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) - # Do not include libc due to us having libc/libc_r. - ;; - *-*-sco3.2v5* | *-*-sco5v6*) - # Causes problems with __ctype - ;; - *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) - # Compiler inserts libc in the correct place for threads to work - ;; - *) - # Add libc to deplibs on all other systems if necessary. - if test "$build_libtool_need_lc" = "yes"; then - deplibs="$deplibs -lc" - fi - ;; - esac - fi - - # Transform deplibs into only deplibs that can be linked in shared. - name_save=$name - libname_save=$libname - release_save=$release - versuffix_save=$versuffix - major_save=$major - # I'm not sure if I'm treating the release correctly. I think - # release should show up in the -l (ie -lgmp5) so we don't want to - # add it in twice. Is that correct? - release="" - versuffix="" - major="" - newdeplibs= - droppeddeps=no - case $deplibs_check_method in - pass_all) - # Don't check for shared/static. Everything works. - # This might be a little naive. We might want to check - # whether the library exists or not. But this is on - # osf3 & osf4 and I'm not really sure... Just - # implementing what was already the behavior. - newdeplibs=$deplibs - ;; - test_compile) - # This code stresses the "libraries are programs" paradigm to its - # limits. Maybe even breaks it. We compile a program, linking it - # against the deplibs as a proxy for the library. Then we can check - # whether they linked in statically or dynamically with ldd. - $rm conftest.c - cat > conftest.c <<EOF - int main() { return 0; } -EOF - $rm conftest - if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then - ldd_output=`ldd conftest` - for i in $deplibs; do - name=`expr $i : '-l\(.*\)'` - # If $name is empty we are operating on a -L argument. - if test "$name" != "" && test "$name" != "0"; then - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) - newdeplibs="$newdeplibs $i" - i="" - ;; - esac - fi - if test -n "$i" ; then - libname=`eval \\$echo \"$libname_spec\"` - deplib_matches=`eval \\$echo \"$library_names_spec\"` - set dummy $deplib_matches - deplib_match=$2 - if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then - newdeplibs="$newdeplibs $i" - else - droppeddeps=yes - $echo - $echo "*** Warning: dynamic linker does not accept needed library $i." - $echo "*** I have the capability to make that library automatically link in when" - $echo "*** you link to this library. But I can only do this if you have a" - $echo "*** shared version of the library, which I believe you do not have" - $echo "*** because a test_compile did reveal that the linker did not use it for" - $echo "*** its dynamic dependency list that programs get resolved with at runtime." - fi - fi - else - newdeplibs="$newdeplibs $i" - fi - done - else - # Error occurred in the first compile. Let's try to salvage - # the situation: Compile a separate program for each library. - for i in $deplibs; do - name=`expr $i : '-l\(.*\)'` - # If $name is empty we are operating on a -L argument. - if test "$name" != "" && test "$name" != "0"; then - $rm conftest - if $LTCC $LTCFLAGS -o conftest conftest.c $i; then - ldd_output=`ldd conftest` - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) - newdeplibs="$newdeplibs $i" - i="" - ;; - esac - fi - if test -n "$i" ; then - libname=`eval \\$echo \"$libname_spec\"` - deplib_matches=`eval \\$echo \"$library_names_spec\"` - set dummy $deplib_matches - deplib_match=$2 - if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then - newdeplibs="$newdeplibs $i" - else - droppeddeps=yes - $echo - $echo "*** Warning: dynamic linker does not accept needed library $i." - $echo "*** I have the capability to make that library automatically link in when" - $echo "*** you link to this library. But I can only do this if you have a" - $echo "*** shared version of the library, which you do not appear to have" - $echo "*** because a test_compile did reveal that the linker did not use this one" - $echo "*** as a dynamic dependency that programs can get resolved with at runtime." - fi - fi - else - droppeddeps=yes - $echo - $echo "*** Warning! Library $i is needed by this library but I was not able to" - $echo "*** make it link in! You will probably need to install it or some" - $echo "*** library that it depends on before this library will be fully" - $echo "*** functional. Installing it before continuing would be even better." - fi - else - newdeplibs="$newdeplibs $i" - fi - done - fi - ;; - file_magic*) - set dummy $deplibs_check_method - file_magic_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"` - for a_deplib in $deplibs; do - name=`expr $a_deplib : '-l\(.*\)'` - # If $name is empty we are operating on a -L argument. - if test "$name" != "" && test "$name" != "0"; then - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) - newdeplibs="$newdeplibs $a_deplib" - a_deplib="" - ;; - esac - fi - if test -n "$a_deplib" ; then - libname=`eval \\$echo \"$libname_spec\"` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do - potential_libs=`ls $i/$libname[.-]* 2>/dev/null` - for potent_lib in $potential_libs; do - # Follow soft links. - if ls -lLd "$potent_lib" 2>/dev/null \ - | grep " -> " >/dev/null; then - continue - fi - # The statement above tries to avoid entering an - # endless loop below, in case of cyclic links. - # We might still enter an endless loop, since a link - # loop can be closed while we follow links, - # but so what? - potlib="$potent_lib" - while test -h "$potlib" 2>/dev/null; do - potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'` - case $potliblink in - [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";; - *) potlib=`$echo "X$potlib" | $Xsed -e 's,[^/]*$,,'`"$potliblink";; - esac - done - if eval $file_magic_cmd \"\$potlib\" 2>/dev/null \ - | ${SED} 10q \ - | $EGREP "$file_magic_regex" > /dev/null; then - newdeplibs="$newdeplibs $a_deplib" - a_deplib="" - break 2 - fi - done - done - fi - if test -n "$a_deplib" ; then - droppeddeps=yes - $echo - $echo "*** Warning: linker path does not have real file for library $a_deplib." - $echo "*** I have the capability to make that library automatically link in when" - $echo "*** you link to this library. But I can only do this if you have a" - $echo "*** shared version of the library, which you do not appear to have" - $echo "*** because I did check the linker path looking for a file starting" - if test -z "$potlib" ; then - $echo "*** with $libname but no candidates were found. (...for file magic test)" - else - $echo "*** with $libname and none of the candidates passed a file format test" - $echo "*** using a file magic. Last file checked: $potlib" - fi - fi - else - # Add a -L argument. - newdeplibs="$newdeplibs $a_deplib" - fi - done # Gone through all deplibs. - ;; - match_pattern*) - set dummy $deplibs_check_method - match_pattern_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"` - for a_deplib in $deplibs; do - name=`expr $a_deplib : '-l\(.*\)'` - # If $name is empty we are operating on a -L argument. - if test -n "$name" && test "$name" != "0"; then - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) - newdeplibs="$newdeplibs $a_deplib" - a_deplib="" - ;; - esac - fi - if test -n "$a_deplib" ; then - libname=`eval \\$echo \"$libname_spec\"` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do - potential_libs=`ls $i/$libname[.-]* 2>/dev/null` - for potent_lib in $potential_libs; do - potlib="$potent_lib" # see symlink-check above in file_magic test - if eval $echo \"$potent_lib\" 2>/dev/null \ - | ${SED} 10q \ - | $EGREP "$match_pattern_regex" > /dev/null; then - newdeplibs="$newdeplibs $a_deplib" - a_deplib="" - break 2 - fi - done - done - fi - if test -n "$a_deplib" ; then - droppeddeps=yes - $echo - $echo "*** Warning: linker path does not have real file for library $a_deplib." - $echo "*** I have the capability to make that library automatically link in when" - $echo "*** you link to this library. But I can only do this if you have a" - $echo "*** shared version of the library, which you do not appear to have" - $echo "*** because I did check the linker path looking for a file starting" - if test -z "$potlib" ; then - $echo "*** with $libname but no candidates were found. (...for regex pattern test)" - else - $echo "*** with $libname and none of the candidates passed a file format test" - $echo "*** using a regex pattern. Last file checked: $potlib" - fi - fi - else - # Add a -L argument. - newdeplibs="$newdeplibs $a_deplib" - fi - done # Gone through all deplibs. - ;; - none | unknown | *) - newdeplibs="" - tmp_deplibs=`$echo "X $deplibs" | $Xsed -e 's/ -lc$//' \ - -e 's/ -[LR][^ ]*//g'` - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - for i in $predeps $postdeps ; do - # can't use Xsed below, because $i might contain '/' - tmp_deplibs=`$echo "X $tmp_deplibs" | ${SED} -e "1s,^X,," -e "s,$i,,"` - done - fi - if $echo "X $tmp_deplibs" | $Xsed -e 's/[ ]//g' \ - | grep . >/dev/null; then - $echo - if test "X$deplibs_check_method" = "Xnone"; then - $echo "*** Warning: inter-library dependencies are not supported in this platform." - else - $echo "*** Warning: inter-library dependencies are not known to be supported." - fi - $echo "*** All declared inter-library dependencies are being dropped." - droppeddeps=yes - fi - ;; - esac - versuffix=$versuffix_save - major=$major_save - release=$release_save - libname=$libname_save - name=$name_save - - case $host in - *-*-rhapsody* | *-*-darwin1.[012]) - # On Rhapsody replace the C library is the System framework - newdeplibs=`$echo "X $newdeplibs" | $Xsed -e 's/ -lc / -framework System /'` - ;; - esac - - if test "$droppeddeps" = yes; then - if test "$module" = yes; then - $echo - $echo "*** Warning: libtool could not satisfy all declared inter-library" - $echo "*** dependencies of module $libname. Therefore, libtool will create" - $echo "*** a static module, that should work as long as the dlopening" - $echo "*** application is linked with the -dlopen flag." - if test -z "$global_symbol_pipe"; then - $echo - $echo "*** However, this would only work if libtool was able to extract symbol" - $echo "*** lists from a program, using \`nm' or equivalent, but libtool could" - $echo "*** not find such a program. So, this module is probably useless." - $echo "*** \`nm' from GNU binutils and a full rebuild may help." - fi - if test "$build_old_libs" = no; then - oldlibs="$output_objdir/$libname.$libext" - build_libtool_libs=module - build_old_libs=yes - else - build_libtool_libs=no - fi - else - $echo "*** The inter-library dependencies that have been dropped here will be" - $echo "*** automatically added whenever a program is linked with this library" - $echo "*** or is declared to -dlopen it." - - if test "$allow_undefined" = no; then - $echo - $echo "*** Since this library must not contain undefined symbols," - $echo "*** because either the platform does not support them or" - $echo "*** it was explicitly requested with -no-undefined," - $echo "*** libtool will only create a static version of it." - if test "$build_old_libs" = no; then - oldlibs="$output_objdir/$libname.$libext" - build_libtool_libs=module - build_old_libs=yes - else - build_libtool_libs=no - fi - fi - fi - fi - # Done checking deplibs! - deplibs=$newdeplibs - fi - - - # move library search paths that coincide with paths to not yet - # installed libraries to the beginning of the library search list - new_libs= - for path in $notinst_path; do - case " $new_libs " in - *" -L$path/$objdir "*) ;; - *) - case " $deplibs " in - *" -L$path/$objdir "*) - new_libs="$new_libs -L$path/$objdir" ;; - esac - ;; - esac - done - for deplib in $deplibs; do - case $deplib in - -L*) - case " $new_libs " in - *" $deplib "*) ;; - *) new_libs="$new_libs $deplib" ;; - esac - ;; - *) new_libs="$new_libs $deplib" ;; - esac - done - deplibs="$new_libs" - - - # All the library-specific variables (install_libdir is set above). - library_names= - old_library= - dlname= - - # Test again, we may have decided not to build it any more - if test "$build_libtool_libs" = yes; then - if test "$hardcode_into_libs" = yes; then - # Hardcode the library paths - hardcode_libdirs= - dep_rpath= - rpath="$finalize_rpath" - test "$mode" != relink && rpath="$compile_rpath$rpath" - for libdir in $rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" - ;; - esac - fi - else - eval flag=\"$hardcode_libdir_flag_spec\" - dep_rpath="$dep_rpath $flag" - fi - elif test -n "$runpath_var"; then - case "$perm_rpath " in - *" $libdir "*) ;; - *) perm_rpath="$perm_rpath $libdir" ;; - esac - fi - done - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" - if test -n "$hardcode_libdir_flag_spec_ld"; then - case $archive_cmds in - *\$LD*) eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\" ;; - *) eval dep_rpath=\"$hardcode_libdir_flag_spec\" ;; - esac - else - eval dep_rpath=\"$hardcode_libdir_flag_spec\" - fi - fi - if test -n "$runpath_var" && test -n "$perm_rpath"; then - # We should set the runpath_var. - rpath= - for dir in $perm_rpath; do - rpath="$rpath$dir:" - done - eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" - fi - test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" - fi - - shlibpath="$finalize_shlibpath" - test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath" - if test -n "$shlibpath"; then - eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" - fi - - # Get the real and link names of the library. - eval shared_ext=\"$shrext_cmds\" - eval library_names=\"$library_names_spec\" - set dummy $library_names - realname="$2" - shift; shift - - if test -n "$soname_spec"; then - eval soname=\"$soname_spec\" - else - soname="$realname" - fi - if test -z "$dlname"; then - dlname=$soname - fi - - lib="$output_objdir/$realname" - linknames= - for link - do - linknames="$linknames $link" - done - - # Use standard objects if they are pic - test -z "$pic_flag" && libobjs=`$echo "X$libobjs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` - - # Prepare the list of exported symbols - if test -z "$export_symbols"; then - if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then - $show "generating symbol list for \`$libname.la'" - export_symbols="$output_objdir/$libname.exp" - $run $rm $export_symbols - cmds=$export_symbols_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - if len=`expr "X$cmd" : ".*"` && - test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then - $show "$cmd" - $run eval "$cmd" || exit $? - skipped_export=false - else - # The command line is too long to execute in one step. - $show "using reloadable object file for export list..." - skipped_export=: - # Break out early, otherwise skipped_export may be - # set to false by a later but shorter cmd. - break - fi - done - IFS="$save_ifs" - if test -n "$export_symbols_regex"; then - $show "$EGREP -e \"$export_symbols_regex\" \"$export_symbols\" > \"${export_symbols}T\"" - $run eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' - $show "$mv \"${export_symbols}T\" \"$export_symbols\"" - $run eval '$mv "${export_symbols}T" "$export_symbols"' - fi - fi - fi - - if test -n "$export_symbols" && test -n "$include_expsyms"; then - $run eval '$echo "X$include_expsyms" | $SP2NL >> "$export_symbols"' - fi - - tmp_deplibs= - for test_deplib in $deplibs; do - case " $convenience " in - *" $test_deplib "*) ;; - *) - tmp_deplibs="$tmp_deplibs $test_deplib" - ;; - esac - done - deplibs="$tmp_deplibs" - - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec"; then - save_libobjs=$libobjs - eval libobjs=\"\$libobjs $whole_archive_flag_spec\" - else - gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" - - func_extract_archives $gentop $convenience - libobjs="$libobjs $func_extract_archives_result" - fi - fi - - if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then - eval flag=\"$thread_safe_flag_spec\" - linker_flags="$linker_flags $flag" - fi - - # Make a backup of the uninstalled library when relinking - if test "$mode" = relink; then - $run eval '(cd $output_objdir && $rm ${realname}U && $mv $realname ${realname}U)' || exit $? - fi - - # Do each of the archive commands. - if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then - eval test_cmds=\"$module_expsym_cmds\" - cmds=$module_expsym_cmds - else - eval test_cmds=\"$module_cmds\" - cmds=$module_cmds - fi - else - if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then - eval test_cmds=\"$archive_expsym_cmds\" - cmds=$archive_expsym_cmds - else - eval test_cmds=\"$archive_cmds\" - cmds=$archive_cmds - fi - fi - - if test "X$skipped_export" != "X:" && - len=`expr "X$test_cmds" : ".*" 2>/dev/null` && - test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then - : - else - # The command line is too long to link in one step, link piecewise. - $echo "creating reloadable object files..." - - # Save the value of $output and $libobjs because we want to - # use them later. If we have whole_archive_flag_spec, we - # want to use save_libobjs as it was before - # whole_archive_flag_spec was expanded, because we can't - # assume the linker understands whole_archive_flag_spec. - # This may have to be revisited, in case too many - # convenience libraries get linked in and end up exceeding - # the spec. - if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then - save_libobjs=$libobjs - fi - save_output=$output - output_la=`$echo "X$output" | $Xsed -e "$basename"` - - # Clear the reloadable object creation command queue and - # initialize k to one. - test_cmds= - concat_cmds= - objlist= - delfiles= - last_robj= - k=1 - output=$output_objdir/$output_la-${k}.$objext - # Loop over the list of objects to be linked. - for obj in $save_libobjs - do - eval test_cmds=\"$reload_cmds $objlist $last_robj\" - if test "X$objlist" = X || - { len=`expr "X$test_cmds" : ".*" 2>/dev/null` && - test "$len" -le "$max_cmd_len"; }; then - objlist="$objlist $obj" - else - # The command $test_cmds is almost too long, add a - # command to the queue. - if test "$k" -eq 1 ; then - # The first file doesn't have a previous command to add. - eval concat_cmds=\"$reload_cmds $objlist $last_robj\" - else - # All subsequent reloadable object files will link in - # the last one created. - eval concat_cmds=\"\$concat_cmds~$reload_cmds $objlist $last_robj\" - fi - last_robj=$output_objdir/$output_la-${k}.$objext - k=`expr $k + 1` - output=$output_objdir/$output_la-${k}.$objext - objlist=$obj - len=1 - fi - done - # Handle the remaining objects by creating one last - # reloadable object file. All subsequent reloadable object - # files will link in the last one created. - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ - eval concat_cmds=\"\${concat_cmds}$reload_cmds $objlist $last_robj\" - - if ${skipped_export-false}; then - $show "generating symbol list for \`$libname.la'" - export_symbols="$output_objdir/$libname.exp" - $run $rm $export_symbols - libobjs=$output - # Append the command to create the export file. - eval concat_cmds=\"\$concat_cmds~$export_symbols_cmds\" - fi - - # Set up a command to remove the reloadable object files - # after they are used. - i=0 - while test "$i" -lt "$k" - do - i=`expr $i + 1` - delfiles="$delfiles $output_objdir/$output_la-${i}.$objext" - done - - $echo "creating a temporary reloadable object file: $output" - - # Loop through the commands generated above and execute them. - save_ifs="$IFS"; IFS='~' - for cmd in $concat_cmds; do - IFS="$save_ifs" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - - libobjs=$output - # Restore the value of output. - output=$save_output - - if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then - eval libobjs=\"\$libobjs $whole_archive_flag_spec\" - fi - # Expand the library linking commands again to reset the - # value of $libobjs for piecewise linking. - - # Do each of the archive commands. - if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then - cmds=$module_expsym_cmds - else - cmds=$module_cmds - fi - else - if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then - cmds=$archive_expsym_cmds - else - cmds=$archive_cmds - fi - fi - - # Append the command to remove the reloadable object files - # to the just-reset $cmds. - eval cmds=\"\$cmds~\$rm $delfiles\" - fi - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || { - lt_exit=$? - - # Restore the uninstalled library and exit - if test "$mode" = relink; then - $run eval '(cd $output_objdir && $rm ${realname}T && $mv ${realname}U $realname)' - fi - - exit $lt_exit - } - done - IFS="$save_ifs" - - # Restore the uninstalled library and exit - if test "$mode" = relink; then - $run eval '(cd $output_objdir && $rm ${realname}T && $mv $realname ${realname}T && $mv "$realname"U $realname)' || exit $? - - if test -n "$convenience"; then - if test -z "$whole_archive_flag_spec"; then - $show "${rm}r $gentop" - $run ${rm}r "$gentop" - fi - fi - - exit $EXIT_SUCCESS - fi - - # Create links to the real library. - for linkname in $linknames; do - if test "$realname" != "$linkname"; then - $show "(cd $output_objdir && $rm $linkname && $LN_S $realname $linkname)" - $run eval '(cd $output_objdir && $rm $linkname && $LN_S $realname $linkname)' || exit $? - fi - done - - # If -module or -export-dynamic was specified, set the dlname. - if test "$module" = yes || test "$export_dynamic" = yes; then - # On all known operating systems, these are identical. - dlname="$soname" - fi - fi - ;; - - obj) - case " $deplibs" in - *\ -l* | *\ -L*) - $echo "$modename: warning: \`-l' and \`-L' are ignored for objects" 1>&2 ;; - esac - - if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then - $echo "$modename: warning: \`-dlopen' is ignored for objects" 1>&2 - fi - - if test -n "$rpath"; then - $echo "$modename: warning: \`-rpath' is ignored for objects" 1>&2 - fi - - if test -n "$xrpath"; then - $echo "$modename: warning: \`-R' is ignored for objects" 1>&2 - fi - - if test -n "$vinfo"; then - $echo "$modename: warning: \`-version-info' is ignored for objects" 1>&2 - fi - - if test -n "$release"; then - $echo "$modename: warning: \`-release' is ignored for objects" 1>&2 - fi - - case $output in - *.lo) - if test -n "$objs$old_deplibs"; then - $echo "$modename: cannot build library object \`$output' from non-libtool objects" 1>&2 - exit $EXIT_FAILURE - fi - libobj="$output" - obj=`$echo "X$output" | $Xsed -e "$lo2o"` - ;; - *) - libobj= - obj="$output" - ;; - esac - - # Delete the old objects. - $run $rm $obj $libobj - - # Objects from convenience libraries. This assumes - # single-version convenience libraries. Whenever we create - # different ones for PIC/non-PIC, this we'll have to duplicate - # the extraction. - reload_conv_objs= - gentop= - # reload_cmds runs $LD directly, so let us get rid of - # -Wl from whole_archive_flag_spec and hope we can get by with - # turning comma into space.. - wl= - - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec"; then - eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" - reload_conv_objs=$reload_objs\ `$echo "X$tmp_whole_archive_flags" | $Xsed -e 's|,| |g'` - else - gentop="$output_objdir/${obj}x" - generated="$generated $gentop" - - func_extract_archives $gentop $convenience - reload_conv_objs="$reload_objs $func_extract_archives_result" - fi - fi - - # Create the old-style object. - reload_objs="$objs$old_deplibs "`$echo "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}$'/d' -e '/\.lib$/d' -e "$lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test - - output="$obj" - cmds=$reload_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - - # Exit if we aren't doing a library object file. - if test -z "$libobj"; then - if test -n "$gentop"; then - $show "${rm}r $gentop" - $run ${rm}r $gentop - fi - - exit $EXIT_SUCCESS - fi - - if test "$build_libtool_libs" != yes; then - if test -n "$gentop"; then - $show "${rm}r $gentop" - $run ${rm}r $gentop - fi - - # Create an invalid libtool object if no PIC, so that we don't - # accidentally link it into a program. - # $show "echo timestamp > $libobj" - # $run eval "echo timestamp > $libobj" || exit $? - exit $EXIT_SUCCESS - fi - - if test -n "$pic_flag" || test "$pic_mode" != default; then - # Only do commands if we really have different PIC objects. - reload_objs="$libobjs $reload_conv_objs" - output="$libobj" - cmds=$reload_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - fi - - if test -n "$gentop"; then - $show "${rm}r $gentop" - $run ${rm}r $gentop - fi - - exit $EXIT_SUCCESS - ;; - - prog) - case $host in - *cygwin*) output=`$echo $output | ${SED} -e 's,.exe$,,;s,$,.exe,'` ;; - esac - if test -n "$vinfo"; then - $echo "$modename: warning: \`-version-info' is ignored for programs" 1>&2 - fi - - if test -n "$release"; then - $echo "$modename: warning: \`-release' is ignored for programs" 1>&2 - fi - - if test "$preload" = yes; then - if test "$dlopen_support" = unknown && test "$dlopen_self" = unknown && - test "$dlopen_self_static" = unknown; then - $echo "$modename: warning: \`AC_LIBTOOL_DLOPEN' not used. Assuming no dlopen support." - fi - fi - - case $host in - *-*-rhapsody* | *-*-darwin1.[012]) - # On Rhapsody replace the C library is the System framework - compile_deplibs=`$echo "X $compile_deplibs" | $Xsed -e 's/ -lc / -framework System /'` - finalize_deplibs=`$echo "X $finalize_deplibs" | $Xsed -e 's/ -lc / -framework System /'` - ;; - esac - - case $host in - *darwin*) - # Don't allow lazy linking, it breaks C++ global constructors - if test "$tagname" = CXX ; then - compile_command="$compile_command ${wl}-bind_at_load" - finalize_command="$finalize_command ${wl}-bind_at_load" - fi - ;; - esac - - - # move library search paths that coincide with paths to not yet - # installed libraries to the beginning of the library search list - new_libs= - for path in $notinst_path; do - case " $new_libs " in - *" -L$path/$objdir "*) ;; - *) - case " $compile_deplibs " in - *" -L$path/$objdir "*) - new_libs="$new_libs -L$path/$objdir" ;; - esac - ;; - esac - done - for deplib in $compile_deplibs; do - case $deplib in - -L*) - case " $new_libs " in - *" $deplib "*) ;; - *) new_libs="$new_libs $deplib" ;; - esac - ;; - *) new_libs="$new_libs $deplib" ;; - esac - done - compile_deplibs="$new_libs" - - - compile_command="$compile_command $compile_deplibs" - finalize_command="$finalize_command $finalize_deplibs" - - if test -n "$rpath$xrpath"; then - # If the user specified any rpath flags, then add them. - for libdir in $rpath $xrpath; do - # This is the magic to use -rpath. - case "$finalize_rpath " in - *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" ;; - esac - done - fi - - # Now hardcode the library paths - rpath= - hardcode_libdirs= - for libdir in $compile_rpath $finalize_rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" - ;; - esac - fi - else - eval flag=\"$hardcode_libdir_flag_spec\" - rpath="$rpath $flag" - fi - elif test -n "$runpath_var"; then - case "$perm_rpath " in - *" $libdir "*) ;; - *) perm_rpath="$perm_rpath $libdir" ;; - esac - fi - case $host in - *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*) - testbindir=`$echo "X$libdir" | $Xsed -e 's*/lib$*/bin*'` - case :$dllsearchpath: in - *":$libdir:"*) ;; - *) dllsearchpath="$dllsearchpath:$libdir";; - esac - case :$dllsearchpath: in - *":$testbindir:"*) ;; - *) dllsearchpath="$dllsearchpath:$testbindir";; - esac - ;; - esac - done - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" - eval rpath=\" $hardcode_libdir_flag_spec\" - fi - compile_rpath="$rpath" - - rpath= - hardcode_libdirs= - for libdir in $finalize_rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" - else - # Just accumulate the unique libdirs. - case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" - ;; - esac - fi - else - eval flag=\"$hardcode_libdir_flag_spec\" - rpath="$rpath $flag" - fi - elif test -n "$runpath_var"; then - case "$finalize_perm_rpath " in - *" $libdir "*) ;; - *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;; - esac - fi - done - # Substitute the hardcoded libdirs into the rpath. - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" - eval rpath=\" $hardcode_libdir_flag_spec\" - fi - finalize_rpath="$rpath" - - if test -n "$libobjs" && test "$build_old_libs" = yes; then - # Transform all the library objects into standard objects. - compile_command=`$echo "X$compile_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` - finalize_command=`$echo "X$finalize_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` - fi - - dlsyms= - if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then - if test -n "$NM" && test -n "$global_symbol_pipe"; then - dlsyms="${outputname}S.c" - else - $echo "$modename: not configured to extract global symbols from dlpreopened files" 1>&2 - fi - fi - - if test -n "$dlsyms"; then - case $dlsyms in - "") ;; - *.c) - # Discover the nlist of each of the dlfiles. - nlist="$output_objdir/${outputname}.nm" - - $show "$rm $nlist ${nlist}S ${nlist}T" - $run $rm "$nlist" "${nlist}S" "${nlist}T" - - # Parse the name list into a source file. - $show "creating $output_objdir/$dlsyms" - - test -z "$run" && $echo > "$output_objdir/$dlsyms" "\ -/* $dlsyms - symbol resolution table for \`$outputname' dlsym emulation. */ -/* Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP */ - -#ifdef __cplusplus -extern \"C\" { -#endif - -/* Prevent the only kind of declaration conflicts we can make. */ -#define lt_preloaded_symbols some_other_symbol - -/* External symbol declarations for the compiler. */\ -" - - if test "$dlself" = yes; then - $show "generating symbol list for \`$output'" - - test -z "$run" && $echo ': @PROGRAM@ ' > "$nlist" - - # Add our own program objects to the symbol list. - progfiles=`$echo "X$objs$old_deplibs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP` - for arg in $progfiles; do - $show "extracting global C symbols from \`$arg'" - $run eval "$NM $arg | $global_symbol_pipe >> '$nlist'" - done - - if test -n "$exclude_expsyms"; then - $run eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T' - $run eval '$mv "$nlist"T "$nlist"' - fi - - if test -n "$export_symbols_regex"; then - $run eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T' - $run eval '$mv "$nlist"T "$nlist"' - fi - - # Prepare the list of exported symbols - if test -z "$export_symbols"; then - export_symbols="$output_objdir/$outputname.exp" - $run $rm $export_symbols - $run eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' - case $host in - *cygwin* | *mingw* ) - $run eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' - $run eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"' - ;; - esac - else - $run eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' - $run eval 'grep -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' - $run eval 'mv "$nlist"T "$nlist"' - case $host in - *cygwin* | *mingw* ) - $run eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' - $run eval 'cat "$nlist" >> "$output_objdir/$outputname.def"' - ;; - esac - fi - fi - - for arg in $dlprefiles; do - $show "extracting global C symbols from \`$arg'" - name=`$echo "$arg" | ${SED} -e 's%^.*/%%'` - $run eval '$echo ": $name " >> "$nlist"' - $run eval "$NM $arg | $global_symbol_pipe >> '$nlist'" - done - - if test -z "$run"; then - # Make sure we have at least an empty file. - test -f "$nlist" || : > "$nlist" - - if test -n "$exclude_expsyms"; then - $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T - $mv "$nlist"T "$nlist" - fi - - # Try sorting and uniquifying the output. - if grep -v "^: " < "$nlist" | - if sort -k 3 </dev/null >/dev/null 2>&1; then - sort -k 3 - else - sort +2 - fi | - uniq > "$nlist"S; then - : - else - grep -v "^: " < "$nlist" > "$nlist"S - fi - - if test -f "$nlist"S; then - eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$dlsyms"' - else - $echo '/* NONE */' >> "$output_objdir/$dlsyms" - fi - - $echo >> "$output_objdir/$dlsyms" "\ - -#undef lt_preloaded_symbols - -#if defined (__STDC__) && __STDC__ -# define lt_ptr void * -#else -# define lt_ptr char * -# define const -#endif - -/* The mapping between symbol names and symbols. */ -" - - case $host in - *cygwin* | *mingw* ) - $echo >> "$output_objdir/$dlsyms" "\ -/* DATA imports from DLLs on WIN32 can't be const, because - runtime relocations are performed -- see ld's documentation - on pseudo-relocs */ -struct { -" - ;; - * ) - $echo >> "$output_objdir/$dlsyms" "\ -const struct { -" - ;; - esac - - - $echo >> "$output_objdir/$dlsyms" "\ - const char *name; - lt_ptr address; -} -lt_preloaded_symbols[] = -{\ -" - - eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$dlsyms" - - $echo >> "$output_objdir/$dlsyms" "\ - {0, (lt_ptr) 0} -}; - -/* This works around a problem in FreeBSD linker */ -#ifdef FREEBSD_WORKAROUND -static const void *lt_preloaded_setup() { - return lt_preloaded_symbols; -} -#endif - -#ifdef __cplusplus -} -#endif\ -" - fi - - pic_flag_for_symtable= - case $host in - # compiling the symbol table file with pic_flag works around - # a FreeBSD bug that causes programs to crash when -lm is - # linked before any other PIC object. But we must not use - # pic_flag when linking with -static. The problem exists in - # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1. - *-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*) - case "$compile_command " in - *" -static "*) ;; - *) pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND";; - esac;; - *-*-hpux*) - case "$compile_command " in - *" -static "*) ;; - *) pic_flag_for_symtable=" $pic_flag";; - esac - esac - - # Now compile the dynamic symbol file. - $show "(cd $output_objdir && $LTCC $LTCFLAGS -c$no_builtin_flag$pic_flag_for_symtable \"$dlsyms\")" - $run eval '(cd $output_objdir && $LTCC $LTCFLAGS -c$no_builtin_flag$pic_flag_for_symtable "$dlsyms")' || exit $? - - # Clean up the generated files. - $show "$rm $output_objdir/$dlsyms $nlist ${nlist}S ${nlist}T" - $run $rm "$output_objdir/$dlsyms" "$nlist" "${nlist}S" "${nlist}T" - - # Transform the symbol file into the correct name. - case $host in - *cygwin* | *mingw* ) - if test -f "$output_objdir/${outputname}.def" ; then - compile_command=`$echo "X$compile_command" | $SP2NL | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}.def $output_objdir/${outputname}S.${objext}%" | $NL2SP` - finalize_command=`$echo "X$finalize_command" | $SP2NL | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}.def $output_objdir/${outputname}S.${objext}%" | $NL2SP` - else - compile_command=`$echo "X$compile_command" | $SP2NL | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%" | $NL2SP` - finalize_command=`$echo "X$finalize_command" | $SP2NL | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%" | $NL2SP` - fi - ;; - * ) - compile_command=`$echo "X$compile_command" | $SP2NL | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%" | $NL2SP` - finalize_command=`$echo "X$finalize_command" | $SP2NL | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%" | $NL2SP` - ;; - esac - ;; - *) - $echo "$modename: unknown suffix for \`$dlsyms'" 1>&2 - exit $EXIT_FAILURE - ;; - esac - else - # We keep going just in case the user didn't refer to - # lt_preloaded_symbols. The linker will fail if global_symbol_pipe - # really was required. - - # Nullify the symbol file. - compile_command=`$echo "X$compile_command" | $SP2NL | $Xsed -e "s% @SYMFILE@%%" | $NL2SP` - finalize_command=`$echo "X$finalize_command" | $SP2NL | $Xsed -e "s% @SYMFILE@%%" | $NL2SP` - fi - - if test "$need_relink" = no || test "$build_libtool_libs" != yes; then - # Replace the output file specification. - compile_command=`$echo "X$compile_command" | $SP2NL | $Xsed -e 's%@OUTPUT@%'"$output"'%g' | $NL2SP` - link_command="$compile_command$compile_rpath" - - # We have no uninstalled library dependencies, so finalize right now. - $show "$link_command" - $run eval "$link_command" - exit_status=$? - - # Delete the generated files. - if test -n "$dlsyms"; then - $show "$rm $output_objdir/${outputname}S.${objext}" - $run $rm "$output_objdir/${outputname}S.${objext}" - fi - - exit $exit_status - fi - - if test -n "$shlibpath_var"; then - # We should set the shlibpath_var - rpath= - for dir in $temp_rpath; do - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) - # Absolute path. - rpath="$rpath$dir:" - ;; - *) - # Relative path: add a thisdir entry. - rpath="$rpath\$thisdir/$dir:" - ;; - esac - done - temp_rpath="$rpath" - fi - - if test -n "$compile_shlibpath$finalize_shlibpath"; then - compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" - fi - if test -n "$finalize_shlibpath"; then - finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command" - fi - - compile_var= - finalize_var= - if test -n "$runpath_var"; then - if test -n "$perm_rpath"; then - # We should set the runpath_var. - rpath= - for dir in $perm_rpath; do - rpath="$rpath$dir:" - done - compile_var="$runpath_var=\"$rpath\$$runpath_var\" " - fi - if test -n "$finalize_perm_rpath"; then - # We should set the runpath_var. - rpath= - for dir in $finalize_perm_rpath; do - rpath="$rpath$dir:" - done - finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " - fi - fi - - if test "$no_install" = yes; then - # We don't need to create a wrapper script. - link_command="$compile_var$compile_command$compile_rpath" - # Replace the output file specification. - link_command=`$echo "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'` - # Delete the old output file. - $run $rm $output - # Link the executable and exit - $show "$link_command" - $run eval "$link_command" || exit $? - exit $EXIT_SUCCESS - fi - - if test "$hardcode_action" = relink; then - # Fast installation is not supported - link_command="$compile_var$compile_command$compile_rpath" - relink_command="$finalize_var$finalize_command$finalize_rpath" - - $echo "$modename: warning: this platform does not like uninstalled shared libraries" 1>&2 - $echo "$modename: \`$output' will be relinked during installation" 1>&2 - else - if test "$fast_install" != no; then - link_command="$finalize_var$compile_command$finalize_rpath" - if test "$fast_install" = yes; then - relink_command=`$echo "X$compile_var$compile_command$compile_rpath" | $SP2NL | $Xsed -e 's%@OUTPUT@%\$progdir/\$file%g' | $NL2SP` - else - # fast_install is set to needless - relink_command= - fi - else - link_command="$compile_var$compile_command$compile_rpath" - relink_command="$finalize_var$finalize_command$finalize_rpath" - fi - fi - - # Replace the output file specification. - link_command=`$echo "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` - - # Delete the old output files. - $run $rm $output $output_objdir/$outputname $output_objdir/lt-$outputname - - $show "$link_command" - $run eval "$link_command" || exit $? - - # Now create the wrapper script. - $show "creating $output" - - # Quote the relink command for shipping. - if test -n "$relink_command"; then - # Preserve any variables that may affect compiler behavior - for var in $variables_saved_for_relink; do - if eval test -z \"\${$var+set}\"; then - relink_command="{ test -z \"\${$var+set}\" || unset $var || { $var=; export $var; }; }; $relink_command" - elif eval var_value=\$$var; test -z "$var_value"; then - relink_command="$var=; export $var; $relink_command" - else - var_value=`$echo "X$var_value" | $Xsed -e "$sed_quote_subst"` - relink_command="$var=\"$var_value\"; export $var; $relink_command" - fi - done - relink_command="(cd `pwd`; $relink_command)" - relink_command=`$echo "X$relink_command" | $SP2NL | $Xsed -e "$sed_quote_subst" | $NL2SP` - fi - - # Quote $echo for shipping. - if test "X$echo" = "X$SHELL $progpath --fallback-echo"; then - case $progpath in - [\\/]* | [A-Za-z]:[\\/]*) qecho="$SHELL $progpath --fallback-echo";; - *) qecho="$SHELL `pwd`/$progpath --fallback-echo";; - esac - qecho=`$echo "X$qecho" | $Xsed -e "$sed_quote_subst"` - else - qecho=`$echo "X$echo" | $Xsed -e "$sed_quote_subst"` - fi - - # Only actually do things if our run command is non-null. - if test -z "$run"; then - # win32 will think the script is a binary if it has - # a .exe suffix, so we strip it off here. - case $output in - *.exe) output=`$echo $output|${SED} 's,.exe$,,'` ;; - esac - # test for cygwin because mv fails w/o .exe extensions - case $host in - *cygwin*) - exeext=.exe - outputname=`$echo $outputname|${SED} 's,.exe$,,'` ;; - *) exeext= ;; - esac - case $host in - *cygwin* | *mingw* ) - output_name=`basename $output` - output_path=`dirname $output` - cwrappersource="$output_path/$objdir/lt-$output_name.c" - cwrapper="$output_path/$output_name.exe" - $rm $cwrappersource $cwrapper - trap "$rm $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 - - cat > $cwrappersource <<EOF - -/* $cwrappersource - temporary wrapper executable for $objdir/$outputname - Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP - - The $output program cannot be directly executed until all the libtool - libraries that it depends on are installed. - - This wrapper executable should never be moved out of the build directory. - If it is, it will not operate correctly. - - Currently, it simply execs the wrapper *script* "/bin/sh $output", - but could eventually absorb all of the scripts functionality and - exec $objdir/$outputname directly. -*/ -EOF - cat >> $cwrappersource<<"EOF" -#include <stdio.h> -#include <stdlib.h> -#include <unistd.h> -#include <malloc.h> -#include <stdarg.h> -#include <assert.h> -#include <string.h> -#include <ctype.h> -#include <sys/stat.h> - -#if defined(PATH_MAX) -# define LT_PATHMAX PATH_MAX -#elif defined(MAXPATHLEN) -# define LT_PATHMAX MAXPATHLEN -#else -# define LT_PATHMAX 1024 -#endif - -#ifndef DIR_SEPARATOR -# define DIR_SEPARATOR '/' -# define PATH_SEPARATOR ':' -#endif - -#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \ - defined (__OS2__) -# define HAVE_DOS_BASED_FILE_SYSTEM -# ifndef DIR_SEPARATOR_2 -# define DIR_SEPARATOR_2 '\\' -# endif -# ifndef PATH_SEPARATOR_2 -# define PATH_SEPARATOR_2 ';' -# endif -#endif - -#ifndef DIR_SEPARATOR_2 -# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR) -#else /* DIR_SEPARATOR_2 */ -# define IS_DIR_SEPARATOR(ch) \ - (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2)) -#endif /* DIR_SEPARATOR_2 */ - -#ifndef PATH_SEPARATOR_2 -# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR) -#else /* PATH_SEPARATOR_2 */ -# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2) -#endif /* PATH_SEPARATOR_2 */ - -#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) -#define XFREE(stale) do { \ - if (stale) { free ((void *) stale); stale = 0; } \ -} while (0) - -/* -DDEBUG is fairly common in CFLAGS. */ -#undef DEBUG -#if defined DEBUGWRAPPER -# define DEBUG(format, ...) fprintf(stderr, format, __VA_ARGS__) -#else -# define DEBUG(format, ...) -#endif - -const char *program_name = NULL; - -void * xmalloc (size_t num); -char * xstrdup (const char *string); -const char * base_name (const char *name); -char * find_executable(const char *wrapper); -int check_executable(const char *path); -char * strendzap(char *str, const char *pat); -void lt_fatal (const char *message, ...); - -int -main (int argc, char *argv[]) -{ - char **newargz; - int i; - - program_name = (char *) xstrdup (base_name (argv[0])); - DEBUG("(main) argv[0] : %s\n",argv[0]); - DEBUG("(main) program_name : %s\n",program_name); - newargz = XMALLOC(char *, argc+2); -EOF - - cat >> $cwrappersource <<EOF - newargz[0] = (char *) xstrdup("$SHELL"); -EOF - - cat >> $cwrappersource <<"EOF" - newargz[1] = find_executable(argv[0]); - if (newargz[1] == NULL) - lt_fatal("Couldn't find %s", argv[0]); - DEBUG("(main) found exe at : %s\n",newargz[1]); - /* we know the script has the same name, without the .exe */ - /* so make sure newargz[1] doesn't end in .exe */ - strendzap(newargz[1],".exe"); - for (i = 1; i < argc; i++) - newargz[i+1] = xstrdup(argv[i]); - newargz[argc+1] = NULL; - - for (i=0; i<argc+1; i++) - { - DEBUG("(main) newargz[%d] : %s\n",i,newargz[i]); - ; - } - -EOF - - case $host_os in - mingw*) - cat >> $cwrappersource <<EOF - execv("$SHELL",(char const **)newargz); -EOF - ;; - *) - cat >> $cwrappersource <<EOF - execv("$SHELL",newargz); -EOF - ;; - esac - - cat >> $cwrappersource <<"EOF" - return 127; -} - -void * -xmalloc (size_t num) -{ - void * p = (void *) malloc (num); - if (!p) - lt_fatal ("Memory exhausted"); - - return p; -} - -char * -xstrdup (const char *string) -{ - return string ? strcpy ((char *) xmalloc (strlen (string) + 1), string) : NULL -; -} - -const char * -base_name (const char *name) -{ - const char *base; - -#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - /* Skip over the disk name in MSDOS pathnames. */ - if (isalpha ((unsigned char)name[0]) && name[1] == ':') - name += 2; -#endif - - for (base = name; *name; name++) - if (IS_DIR_SEPARATOR (*name)) - base = name + 1; - return base; -} - -int -check_executable(const char * path) -{ - struct stat st; - - DEBUG("(check_executable) : %s\n", path ? (*path ? path : "EMPTY!") : "NULL!"); - if ((!path) || (!*path)) - return 0; - - if ((stat (path, &st) >= 0) && - ( - /* MinGW & native WIN32 do not support S_IXOTH or S_IXGRP */ -#if defined (S_IXOTH) - ((st.st_mode & S_IXOTH) == S_IXOTH) || -#endif -#if defined (S_IXGRP) - ((st.st_mode & S_IXGRP) == S_IXGRP) || -#endif - ((st.st_mode & S_IXUSR) == S_IXUSR)) - ) - return 1; - else - return 0; -} - -/* Searches for the full path of the wrapper. Returns - newly allocated full path name if found, NULL otherwise */ -char * -find_executable (const char* wrapper) -{ - int has_slash = 0; - const char* p; - const char* p_next; - /* static buffer for getcwd */ - char tmp[LT_PATHMAX + 1]; - int tmp_len; - char* concat_name; - - DEBUG("(find_executable) : %s\n", wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!"); - - if ((wrapper == NULL) || (*wrapper == '\0')) - return NULL; - - /* Absolute path? */ -#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - if (isalpha ((unsigned char)wrapper[0]) && wrapper[1] == ':') - { - concat_name = xstrdup (wrapper); - if (check_executable(concat_name)) - return concat_name; - XFREE(concat_name); - } - else - { -#endif - if (IS_DIR_SEPARATOR (wrapper[0])) - { - concat_name = xstrdup (wrapper); - if (check_executable(concat_name)) - return concat_name; - XFREE(concat_name); - } -#if defined (HAVE_DOS_BASED_FILE_SYSTEM) - } -#endif - - for (p = wrapper; *p; p++) - if (*p == '/') - { - has_slash = 1; - break; - } - if (!has_slash) - { - /* no slashes; search PATH */ - const char* path = getenv ("PATH"); - if (path != NULL) - { - for (p = path; *p; p = p_next) - { - const char* q; - size_t p_len; - for (q = p; *q; q++) - if (IS_PATH_SEPARATOR(*q)) - break; - p_len = q - p; - p_next = (*q == '\0' ? q : q + 1); - if (p_len == 0) - { - /* empty path: current directory */ - if (getcwd (tmp, LT_PATHMAX) == NULL) - lt_fatal ("getcwd failed"); - tmp_len = strlen(tmp); - concat_name = XMALLOC(char, tmp_len + 1 + strlen(wrapper) + 1); - memcpy (concat_name, tmp, tmp_len); - concat_name[tmp_len] = '/'; - strcpy (concat_name + tmp_len + 1, wrapper); - } - else - { - concat_name = XMALLOC(char, p_len + 1 + strlen(wrapper) + 1); - memcpy (concat_name, p, p_len); - concat_name[p_len] = '/'; - strcpy (concat_name + p_len + 1, wrapper); - } - if (check_executable(concat_name)) - return concat_name; - XFREE(concat_name); - } - } - /* not found in PATH; assume curdir */ - } - /* Relative path | not found in path: prepend cwd */ - if (getcwd (tmp, LT_PATHMAX) == NULL) - lt_fatal ("getcwd failed"); - tmp_len = strlen(tmp); - concat_name = XMALLOC(char, tmp_len + 1 + strlen(wrapper) + 1); - memcpy (concat_name, tmp, tmp_len); - concat_name[tmp_len] = '/'; - strcpy (concat_name + tmp_len + 1, wrapper); - - if (check_executable(concat_name)) - return concat_name; - XFREE(concat_name); - return NULL; -} - -char * -strendzap(char *str, const char *pat) -{ - size_t len, patlen; - - assert(str != NULL); - assert(pat != NULL); - - len = strlen(str); - patlen = strlen(pat); - - if (patlen <= len) - { - str += len - patlen; - if (strcmp(str, pat) == 0) - *str = '\0'; - } - return str; -} - -static void -lt_error_core (int exit_status, const char * mode, - const char * message, va_list ap) -{ - fprintf (stderr, "%s: %s: ", program_name, mode); - vfprintf (stderr, message, ap); - fprintf (stderr, ".\n"); - - if (exit_status >= 0) - exit (exit_status); -} - -void -lt_fatal (const char *message, ...) -{ - va_list ap; - va_start (ap, message); - lt_error_core (EXIT_FAILURE, "FATAL", message, ap); - va_end (ap); -} -EOF - # we should really use a build-platform specific compiler - # here, but OTOH, the wrappers (shell script and this C one) - # are only useful if you want to execute the "real" binary. - # Since the "real" binary is built for $host, then this - # wrapper might as well be built for $host, too. - $run $LTCC $LTCFLAGS -s -o $cwrapper $cwrappersource - ;; - esac - $rm $output - trap "$rm $output; exit $EXIT_FAILURE" 1 2 15 - - $echo > $output "\ -#! $SHELL - -# $output - temporary wrapper script for $objdir/$outputname -# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP -# -# The $output program cannot be directly executed until all the libtool -# libraries that it depends on are installed. -# -# This wrapper script should never be moved out of the build directory. -# If it is, it will not operate correctly. - -# Sed substitution that helps us do robust quoting. It backslashifies -# metacharacters that are still active within double-quoted strings. -Xsed='${SED} -e 1s/^X//' -sed_quote_subst='$sed_quote_subst' - -# Be Bourne compatible (taken from Autoconf:_AS_BOURNE_COMPATIBLE). -if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then - emulate sh - NULLCMD=: - # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else - case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac -fi -BIN_SH=xpg4; export BIN_SH # for Tru64 -DUALCASE=1; export DUALCASE # for MKS sh - -# The HP-UX ksh and POSIX shell print the target directory to stdout -# if CDPATH is set. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -relink_command=\"$relink_command\" - -# This environment variable determines our operation mode. -if test \"\$libtool_install_magic\" = \"$magic\"; then - # install mode needs the following variable: - notinst_deplibs='$notinst_deplibs' -else - # When we are sourced in execute mode, \$file and \$echo are already set. - if test \"\$libtool_execute_magic\" != \"$magic\"; then - echo=\"$qecho\" - file=\"\$0\" - # Make sure echo works. - if test \"X\$1\" = X--no-reexec; then - # Discard the --no-reexec flag, and continue. - shift - elif test \"X\`(\$echo '\t') 2>/dev/null\`\" = 'X\t'; then - # Yippee, \$echo works! - : - else - # Restart under the correct shell, and then maybe \$echo will work. - exec $SHELL \"\$0\" --no-reexec \${1+\"\$@\"} - fi - fi\ -" - $echo >> $output "\ - - # Find the directory that this script lives in. - thisdir=\`\$echo \"X\$file\" | \$Xsed -e 's%/[^/]*$%%'\` - test \"x\$thisdir\" = \"x\$file\" && thisdir=. - - # Follow symbolic links until we get to the real thisdir. - file=\`ls -ld \"\$file\" | ${SED} -n 's/.*-> //p'\` - while test -n \"\$file\"; do - destdir=\`\$echo \"X\$file\" | \$Xsed -e 's%/[^/]*\$%%'\` - - # If there was a directory component, then change thisdir. - if test \"x\$destdir\" != \"x\$file\"; then - case \"\$destdir\" in - [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;; - *) thisdir=\"\$thisdir/\$destdir\" ;; - esac - fi - - file=\`\$echo \"X\$file\" | \$Xsed -e 's%^.*/%%'\` - file=\`ls -ld \"\$thisdir/\$file\" | ${SED} -n 's/.*-> //p'\` - done - - # Try to get the absolute directory name. - absdir=\`cd \"\$thisdir\" && pwd\` - test -n \"\$absdir\" && thisdir=\"\$absdir\" -" - - if test "$fast_install" = yes; then - $echo >> $output "\ - program=lt-'$outputname'$exeext - progdir=\"\$thisdir/$objdir\" - - if test ! -f \"\$progdir/\$program\" || \\ - { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\ - test \"X\$file\" != \"X\$progdir/\$program\"; }; then - - file=\"\$\$-\$program\" - - if test ! -d \"\$progdir\"; then - $mkdir \"\$progdir\" - else - $rm \"\$progdir/\$file\" - fi" - - $echo >> $output "\ - - # relink executable if necessary - if test -n \"\$relink_command\"; then - if relink_command_output=\`eval \$relink_command 2>&1\`; then : - else - $echo \"\$relink_command_output\" >&2 - $rm \"\$progdir/\$file\" - exit $EXIT_FAILURE - fi - fi - - $mv \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null || - { $rm \"\$progdir/\$program\"; - $mv \"\$progdir/\$file\" \"\$progdir/\$program\"; } - $rm \"\$progdir/\$file\" - fi" - else - $echo >> $output "\ - program='$outputname' - progdir=\"\$thisdir/$objdir\" -" - fi - - $echo >> $output "\ - - if test -f \"\$progdir/\$program\"; then" - - # Export our shlibpath_var if we have one. - if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then - $echo >> $output "\ - # Add our own library path to $shlibpath_var - $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" - - # Some systems cannot cope with colon-terminated $shlibpath_var - # The second colon is a workaround for a bug in BeOS R4 sed - $shlibpath_var=\`\$echo \"X\$$shlibpath_var\" | \$Xsed -e 's/::*\$//'\` - - export $shlibpath_var -" - fi - - # fixup the dll searchpath if we need to. - if test -n "$dllsearchpath"; then - $echo >> $output "\ - # Add the dll search path components to the executable PATH - PATH=$dllsearchpath:\$PATH -" - fi - - $echo >> $output "\ - if test \"\$libtool_execute_magic\" != \"$magic\"; then - # Run the actual program with our arguments. -" - case $host in - # Backslashes separate directories on plain windows - *-*-mingw | *-*-os2*) - $echo >> $output "\ - exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} -" - ;; - - *) - $echo >> $output "\ - exec \"\$progdir/\$program\" \${1+\"\$@\"} -" - ;; - esac - $echo >> $output "\ - \$echo \"\$0: cannot exec \$program \$*\" - exit $EXIT_FAILURE - fi - else - # The program doesn't exist. - \$echo \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2 - \$echo \"This script is just a wrapper for \$program.\" 1>&2 - $echo \"See the $PACKAGE documentation for more information.\" 1>&2 - exit $EXIT_FAILURE - fi -fi\ -" - chmod +x $output - fi - exit $EXIT_SUCCESS - ;; - esac - - # See if we need to build an old-fashioned archive. - for oldlib in $oldlibs; do - - if test "$build_libtool_libs" = convenience; then - oldobjs="$libobjs_save" - addlibs="$convenience" - build_libtool_libs=no - else - if test "$build_libtool_libs" = module; then - oldobjs="$libobjs_save" - build_libtool_libs=no - else - oldobjs="$old_deplibs $non_pic_objects" - fi - addlibs="$old_convenience" - fi - - if test -n "$addlibs"; then - gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" - - func_extract_archives $gentop $addlibs - oldobjs="$oldobjs $func_extract_archives_result" - fi - - # Do each command in the archive commands. - if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then - cmds=$old_archive_from_new_cmds - else - # POSIX demands no paths to be encoded in archives. We have - # to avoid creating archives with duplicate basenames if we - # might have to extract them afterwards, e.g., when creating a - # static archive out of a convenience library, or when linking - # the entirety of a libtool archive into another (currently - # not supported by libtool). - if (for obj in $oldobjs - do - $echo "X$obj" | $Xsed -e 's%^.*/%%' - done | sort | sort -uc >/dev/null 2>&1); then - : - else - $echo "copying selected object files to avoid basename conflicts..." - - if test -z "$gentop"; then - gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" - - $show "${rm}r $gentop" - $run ${rm}r "$gentop" - $show "$mkdir $gentop" - $run $mkdir "$gentop" - exit_status=$? - if test "$exit_status" -ne 0 && test ! -d "$gentop"; then - exit $exit_status - fi - fi - - save_oldobjs=$oldobjs - oldobjs= - counter=1 - for obj in $save_oldobjs - do - objbase=`$echo "X$obj" | $Xsed -e 's%^.*/%%'` - case " $oldobjs " in - " ") oldobjs=$obj ;; - *[\ /]"$objbase "*) - while :; do - # Make sure we don't pick an alternate name that also - # overlaps. - newobj=lt$counter-$objbase - counter=`expr $counter + 1` - case " $oldobjs " in - *[\ /]"$newobj "*) ;; - *) if test ! -f "$gentop/$newobj"; then break; fi ;; - esac - done - $show "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" - $run ln "$obj" "$gentop/$newobj" || - $run cp "$obj" "$gentop/$newobj" - oldobjs="$oldobjs $gentop/$newobj" - ;; - *) oldobjs="$oldobjs $obj" ;; - esac - done - fi - - eval cmds=\"$old_archive_cmds\" - - if len=`expr "X$cmds" : ".*"` && - test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then - cmds=$old_archive_cmds - else - # the command line is too long to link in one step, link in parts - $echo "using piecewise archive linking..." - save_RANLIB=$RANLIB - RANLIB=: - objlist= - concat_cmds= - save_oldobjs=$oldobjs - - # Is there a better way of finding the last object in the list? - for obj in $save_oldobjs - do - last_oldobj=$obj - done - for obj in $save_oldobjs - do - oldobjs="$objlist $obj" - objlist="$objlist $obj" - eval test_cmds=\"$old_archive_cmds\" - if len=`expr "X$test_cmds" : ".*" 2>/dev/null` && - test "$len" -le "$max_cmd_len"; then - : - else - # the above command should be used before it gets too long - oldobjs=$objlist - if test "$obj" = "$last_oldobj" ; then - RANLIB=$save_RANLIB - fi - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ - eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" - objlist= - fi - done - RANLIB=$save_RANLIB - oldobjs=$objlist - if test "X$oldobjs" = "X" ; then - eval cmds=\"\$concat_cmds\" - else - eval cmds=\"\$concat_cmds~\$old_archive_cmds\" - fi - fi - fi - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - eval cmd=\"$cmd\" - IFS="$save_ifs" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - done - - if test -n "$generated"; then - $show "${rm}r$generated" - $run ${rm}r$generated - fi - - # Now create the libtool archive. - case $output in - *.la) - old_library= - test "$build_old_libs" = yes && old_library="$libname.$libext" - $show "creating $output" - - # Preserve any variables that may affect compiler behavior - for var in $variables_saved_for_relink; do - if eval test -z \"\${$var+set}\"; then - relink_command="{ test -z \"\${$var+set}\" || unset $var || { $var=; export $var; }; }; $relink_command" - elif eval var_value=\$$var; test -z "$var_value"; then - relink_command="$var=; export $var; $relink_command" - else - var_value=`$echo "X$var_value" | $Xsed -e "$sed_quote_subst"` - relink_command="$var=\"$var_value\"; export $var; $relink_command" - fi - done - # Quote the link command for shipping. - relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" - relink_command=`$echo "X$relink_command" | $SP2NL | $Xsed -e "$sed_quote_subst" | $NL2SP` - if test "$hardcode_automatic" = yes ; then - relink_command= - fi - - - # Only create the output if not a dry run. - if test -z "$run"; then - for installed in no yes; do - if test "$installed" = yes; then - if test -z "$install_libdir"; then - break - fi - output="$output_objdir/$outputname"i - # Replace all uninstalled libtool libraries with the installed ones - newdependency_libs= - for deplib in $dependency_libs; do - case $deplib in - *.la) - name=`$echo "X$deplib" | $Xsed -e 's%^.*/%%'` - eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` - if test -z "$libdir"; then - $echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2 - exit $EXIT_FAILURE - fi - newdependency_libs="$newdependency_libs $libdir/$name" - ;; - *) newdependency_libs="$newdependency_libs $deplib" ;; - esac - done - dependency_libs="$newdependency_libs" - newdlfiles= - for lib in $dlfiles; do - name=`$echo "X$lib" | $Xsed -e 's%^.*/%%'` - eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` - if test -z "$libdir"; then - $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 - exit $EXIT_FAILURE - fi - newdlfiles="$newdlfiles $libdir/$name" - done - dlfiles="$newdlfiles" - newdlprefiles= - for lib in $dlprefiles; do - name=`$echo "X$lib" | $Xsed -e 's%^.*/%%'` - eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` - if test -z "$libdir"; then - $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 - exit $EXIT_FAILURE - fi - newdlprefiles="$newdlprefiles $libdir/$name" - done - dlprefiles="$newdlprefiles" - else - newdlfiles= - for lib in $dlfiles; do - case $lib in - [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac - newdlfiles="$newdlfiles $abs" - done - dlfiles="$newdlfiles" - newdlprefiles= - for lib in $dlprefiles; do - case $lib in - [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac - newdlprefiles="$newdlprefiles $abs" - done - dlprefiles="$newdlprefiles" - fi - $rm $output - # place dlname in correct position for cygwin - tdlname=$dlname - case $host,$output,$installed,$module,$dlname in - *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;; - esac - $echo > $output "\ -# $outputname - a libtool library file -# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP -# -# Please DO NOT delete this file! -# It is necessary for linking the library. - -# The name that we can dlopen(3). -dlname='$tdlname' - -# Names of this library. -library_names='$library_names' - -# The name of the static archive. -old_library='$old_library' - -# Libraries that this one depends upon. -dependency_libs='$dependency_libs' - -# Version information for $libname. -current=$current -age=$age -revision=$revision - -# Is this an already installed library? -installed=$installed - -# Should we warn about portability when linking against -modules? -shouldnotlink=$module - -# Files to dlopen/dlpreopen -dlopen='$dlfiles' -dlpreopen='$dlprefiles' - -# Directory that this library needs to be installed in: -libdir='$install_libdir'" - if test "$installed" = no && test "$need_relink" = yes; then - $echo >> $output "\ -relink_command=\"$relink_command\"" - fi - done - fi - - # Do a symbolic link so that the libtool archive can be found in - # LD_LIBRARY_PATH before the program is installed. - $show "(cd $output_objdir && $rm $outputname && $LN_S ../$outputname $outputname)" - $run eval '(cd $output_objdir && $rm $outputname && $LN_S ../$outputname $outputname)' || exit $? - ;; - esac - exit $EXIT_SUCCESS - ;; - - # libtool install mode - install) - modename="$modename: install" - - # There may be an optional sh(1) argument at the beginning of - # install_prog (especially on Windows NT). - if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh || - # Allow the use of GNU shtool's install command. - $echo "X$nonopt" | grep shtool > /dev/null; then - # Aesthetically quote it. - arg=`$echo "X$nonopt" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - install_prog="$arg " - arg="$1" - shift - else - install_prog= - arg=$nonopt - fi - - # The real first argument should be the name of the installation program. - # Aesthetically quote it. - arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - install_prog="$install_prog$arg" - - # We need to accept at least all the BSD install flags. - dest= - files= - opts= - prev= - install_type= - isdir=no - stripme= - for arg - do - if test -n "$dest"; then - files="$files $dest" - dest=$arg - continue - fi - - case $arg in - -d) isdir=yes ;; - -f) - case " $install_prog " in - *[\\\ /]cp\ *) ;; - *) prev=$arg ;; - esac - ;; - -g | -m | -o) prev=$arg ;; - -s) - stripme=" -s" - continue - ;; - -*) - ;; - *) - # If the previous option needed an argument, then skip it. - if test -n "$prev"; then - prev= - else - dest=$arg - continue - fi - ;; - esac - - # Aesthetically quote the argument. - arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"` - case $arg in - *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") - arg="\"$arg\"" - ;; - esac - install_prog="$install_prog $arg" - done - - if test -z "$install_prog"; then - $echo "$modename: you must specify an install program" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - if test -n "$prev"; then - $echo "$modename: the \`$prev' option requires an argument" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - if test -z "$files"; then - if test -z "$dest"; then - $echo "$modename: no file or destination specified" 1>&2 - else - $echo "$modename: you must specify a destination" 1>&2 - fi - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - # Strip any trailing slash from the destination. - dest=`$echo "X$dest" | $Xsed -e 's%/$%%'` - - # Check to see that the destination is a directory. - test -d "$dest" && isdir=yes - if test "$isdir" = yes; then - destdir="$dest" - destname= - else - destdir=`$echo "X$dest" | $Xsed -e 's%/[^/]*$%%'` - test "X$destdir" = "X$dest" && destdir=. - destname=`$echo "X$dest" | $Xsed -e 's%^.*/%%'` - - # Not a directory, so check to see that there is only one file specified. - set dummy $files - if test "$#" -gt 2; then - $echo "$modename: \`$dest' is not a directory" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - fi - case $destdir in - [\\/]* | [A-Za-z]:[\\/]*) ;; - *) - for file in $files; do - case $file in - *.lo) ;; - *) - $echo "$modename: \`$destdir' must be an absolute directory name" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - ;; - esac - done - ;; - esac - - # This variable tells wrapper scripts just to set variables rather - # than running their programs. - libtool_install_magic="$magic" - - staticlibs= - future_libdirs= - current_libdirs= - for file in $files; do - - # Do each installation. - case $file in - *.$libext) - # Do the static libraries later. - staticlibs="$staticlibs $file" - ;; - - *.la) - # Check to see that this really is a libtool archive. - if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then : - else - $echo "$modename: \`$file' is not a valid libtool archive" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - library_names= - old_library= - relink_command= - # If there is no directory component, then add one. - case $file in - */* | *\\*) . $file ;; - *) . ./$file ;; - esac - - # Add the libdir to current_libdirs if it is the destination. - if test "X$destdir" = "X$libdir"; then - case "$current_libdirs " in - *" $libdir "*) ;; - *) current_libdirs="$current_libdirs $libdir" ;; - esac - else - # Note the libdir as a future libdir. - case "$future_libdirs " in - *" $libdir "*) ;; - *) future_libdirs="$future_libdirs $libdir" ;; - esac - fi - - dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`/ - test "X$dir" = "X$file/" && dir= - dir="$dir$objdir" - - if test -n "$relink_command"; then - # Determine the prefix the user has applied to our future dir. - inst_prefix_dir=`$echo "$destdir" | $SED "s%$libdir\$%%"` - - # Don't allow the user to place us outside of our expected - # location b/c this prevents finding dependent libraries that - # are installed to the same prefix. - # At present, this check doesn't affect windows .dll's that - # are installed into $libdir/../bin (currently, that works fine) - # but it's something to keep an eye on. - if test "$inst_prefix_dir" = "$destdir"; then - $echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2 - exit $EXIT_FAILURE - fi - - if test -n "$inst_prefix_dir"; then - # Stick the inst_prefix_dir data into the link command. - relink_command=`$echo "$relink_command" | $SP2NL | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%" | $NL2SP` - else - relink_command=`$echo "$relink_command" | $SP2NL | $SED "s%@inst_prefix_dir@%%" | $NL2SP` - fi - - $echo "$modename: warning: relinking \`$file'" 1>&2 - $show "$relink_command" - if $run eval "$relink_command"; then : - else - $echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2 - exit $EXIT_FAILURE - fi - fi - - # See the names of the shared library. - set dummy $library_names - if test -n "$2"; then - realname="$2" - shift - shift - - srcname="$realname" - test -n "$relink_command" && srcname="$realname"T - - # Install the shared library and build the symlinks. - $show "$install_prog $dir/$srcname $destdir/$realname" - $run eval "$install_prog $dir/$srcname $destdir/$realname" || exit $? - if test -n "$stripme" && test -n "$striplib"; then - $show "$striplib $destdir/$realname" - $run eval "$striplib $destdir/$realname" || exit $? - fi - - if test "$#" -gt 0; then - # Delete the old symlinks, and create new ones. - # Try `ln -sf' first, because the `ln' binary might depend on - # the symlink we replace! Solaris /bin/ln does not understand -f, - # so we also need to try rm && ln -s. - for linkname - do - if test "$linkname" != "$realname"; then - $show "(cd $destdir && { $LN_S -f $realname $linkname || { $rm $linkname && $LN_S $realname $linkname; }; })" - $run eval "(cd $destdir && { $LN_S -f $realname $linkname || { $rm $linkname && $LN_S $realname $linkname; }; })" - fi - done - fi - - # Do each command in the postinstall commands. - lib="$destdir/$realname" - cmds=$postinstall_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || { - lt_exit=$? - - # Restore the uninstalled library and exit - if test "$mode" = relink; then - $run eval '(cd $output_objdir && $rm ${realname}T && $mv ${realname}U $realname)' - fi - - exit $lt_exit - } - done - IFS="$save_ifs" - fi - - # Install the pseudo-library for information purposes. - name=`$echo "X$file" | $Xsed -e 's%^.*/%%'` - instname="$dir/$name"i - $show "$install_prog $instname $destdir/$name" - $run eval "$install_prog $instname $destdir/$name" || exit $? - - # Maybe install the static library, too. - test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library" - ;; - - *.lo) - # Install (i.e. copy) a libtool object. - - # Figure out destination file name, if it wasn't already specified. - if test -n "$destname"; then - destfile="$destdir/$destname" - else - destfile=`$echo "X$file" | $Xsed -e 's%^.*/%%'` - destfile="$destdir/$destfile" - fi - - # Deduce the name of the destination old-style object file. - case $destfile in - *.lo) - staticdest=`$echo "X$destfile" | $Xsed -e "$lo2o"` - ;; - *.$objext) - staticdest="$destfile" - destfile= - ;; - *) - $echo "$modename: cannot copy a libtool object to \`$destfile'" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - # Install the libtool object if requested. - if test -n "$destfile"; then - $show "$install_prog $file $destfile" - $run eval "$install_prog $file $destfile" || exit $? - fi - - # Install the old object if enabled. - if test "$build_old_libs" = yes; then - # Deduce the name of the old-style object file. - staticobj=`$echo "X$file" | $Xsed -e "$lo2o"` - - $show "$install_prog $staticobj $staticdest" - $run eval "$install_prog \$staticobj \$staticdest" || exit $? - fi - exit $EXIT_SUCCESS - ;; - - *) - # Figure out destination file name, if it wasn't already specified. - if test -n "$destname"; then - destfile="$destdir/$destname" - else - destfile=`$echo "X$file" | $Xsed -e 's%^.*/%%'` - destfile="$destdir/$destfile" - fi - - # If the file is missing, and there is a .exe on the end, strip it - # because it is most likely a libtool script we actually want to - # install - stripped_ext="" - case $file in - *.exe) - if test ! -f "$file"; then - file=`$echo $file|${SED} 's,.exe$,,'` - stripped_ext=".exe" - fi - ;; - esac - - # Do a test to see if this is really a libtool program. - case $host in - *cygwin*|*mingw*) - wrapper=`$echo $file | ${SED} -e 's,.exe$,,'` - ;; - *) - wrapper=$file - ;; - esac - if (${SED} -e '4q' $wrapper | grep "^# Generated by .*$PACKAGE")>/dev/null 2>&1; then - notinst_deplibs= - relink_command= - - # Note that it is not necessary on cygwin/mingw to append a dot to - # foo even if both foo and FILE.exe exist: automatic-append-.exe - # behavior happens only for exec(3), not for open(2)! Also, sourcing - # `FILE.' does not work on cygwin managed mounts. - # - # If there is no directory component, then add one. - case $wrapper in - */* | *\\*) . ${wrapper} ;; - *) . ./${wrapper} ;; - esac - - # Check the variables that should have been set. - if test -z "$notinst_deplibs"; then - $echo "$modename: invalid libtool wrapper script \`$wrapper'" 1>&2 - exit $EXIT_FAILURE - fi - - finalize=yes - for lib in $notinst_deplibs; do - # Check to see that each library is installed. - libdir= - if test -f "$lib"; then - # If there is no directory component, then add one. - case $lib in - */* | *\\*) . $lib ;; - *) . ./$lib ;; - esac - fi - libfile="$libdir/"`$echo "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test - if test -n "$libdir" && test ! -f "$libfile"; then - $echo "$modename: warning: \`$lib' has not been installed in \`$libdir'" 1>&2 - finalize=no - fi - done - - relink_command= - # Note that it is not necessary on cygwin/mingw to append a dot to - # foo even if both foo and FILE.exe exist: automatic-append-.exe - # behavior happens only for exec(3), not for open(2)! Also, sourcing - # `FILE.' does not work on cygwin managed mounts. - # - # If there is no directory component, then add one. - case $wrapper in - */* | *\\*) . ${wrapper} ;; - *) . ./${wrapper} ;; - esac - - outputname= - if test "$fast_install" = no && test -n "$relink_command"; then - if test "$finalize" = yes && test -z "$run"; then - tmpdir=`func_mktempdir` - file=`$echo "X$file$stripped_ext" | $Xsed -e 's%^.*/%%'` - outputname="$tmpdir/$file" - # Replace the output file specification. - relink_command=`$echo "X$relink_command" | $SP2NL | $Xsed -e 's%@OUTPUT@%'"$outputname"'%g' | $NL2SP` - - $show "$relink_command" - if $run eval "$relink_command"; then : - else - $echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2 - ${rm}r "$tmpdir" - continue - fi - file="$outputname" - else - $echo "$modename: warning: cannot relink \`$file'" 1>&2 - fi - else - # Install the binary that we compiled earlier. - file=`$echo "X$file$stripped_ext" | $Xsed -e "s%\([^/]*\)$%$objdir/\1%"` - fi - fi - - # remove .exe since cygwin /usr/bin/install will append another - # one anyway - case $install_prog,$host in - */usr/bin/install*,*cygwin*) - case $file:$destfile in - *.exe:*.exe) - # this is ok - ;; - *.exe:*) - destfile=$destfile.exe - ;; - *:*.exe) - destfile=`$echo $destfile | ${SED} -e 's,.exe$,,'` - ;; - esac - ;; - esac - $show "$install_prog$stripme $file $destfile" - $run eval "$install_prog\$stripme \$file \$destfile" || exit $? - test -n "$outputname" && ${rm}r "$tmpdir" - ;; - esac - done - - for file in $staticlibs; do - name=`$echo "X$file" | $Xsed -e 's%^.*/%%'` - - # Set up the ranlib parameters. - oldlib="$destdir/$name" - - $show "$install_prog $file $oldlib" - $run eval "$install_prog \$file \$oldlib" || exit $? - - if test -n "$stripme" && test -n "$old_striplib"; then - $show "$old_striplib $oldlib" - $run eval "$old_striplib $oldlib" || exit $? - fi - - # Do each command in the postinstall commands. - cmds=$old_postinstall_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || exit $? - done - IFS="$save_ifs" - done - - if test -n "$future_libdirs"; then - $echo "$modename: warning: remember to run \`$progname --finish$future_libdirs'" 1>&2 - fi - - if test -n "$current_libdirs"; then - # Maybe just do a dry run. - test -n "$run" && current_libdirs=" -n$current_libdirs" - exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' - else - exit $EXIT_SUCCESS - fi - ;; - - # libtool finish mode - finish) - modename="$modename: finish" - libdirs="$nonopt" - admincmds= - - if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then - for dir - do - libdirs="$libdirs $dir" - done - - for libdir in $libdirs; do - if test -n "$finish_cmds"; then - # Do each command in the finish commands. - cmds=$finish_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" || admincmds="$admincmds - $cmd" - done - IFS="$save_ifs" - fi - if test -n "$finish_eval"; then - # Do the single finish_eval. - eval cmds=\"$finish_eval\" - $run eval "$cmds" || admincmds="$admincmds - $cmds" - fi - done - fi - - # Exit here if they wanted silent mode. - test "$show" = : && exit $EXIT_SUCCESS - - $echo "X----------------------------------------------------------------------" | $Xsed - $echo "Libraries have been installed in:" - for libdir in $libdirs; do - $echo " $libdir" - done - $echo - $echo "If you ever happen to want to link against installed libraries" - $echo "in a given directory, LIBDIR, you must either use libtool, and" - $echo "specify the full pathname of the library, or use the \`-LLIBDIR'" - $echo "flag during linking and do at least one of the following:" - if test -n "$shlibpath_var"; then - $echo " - add LIBDIR to the \`$shlibpath_var' environment variable" - $echo " during execution" - fi - if test -n "$runpath_var"; then - $echo " - add LIBDIR to the \`$runpath_var' environment variable" - $echo " during linking" - fi - if test -n "$hardcode_libdir_flag_spec"; then - libdir=LIBDIR - eval flag=\"$hardcode_libdir_flag_spec\" - - $echo " - use the \`$flag' linker flag" - fi - if test -n "$admincmds"; then - $echo " - have your system administrator run these commands:$admincmds" - fi - if test -f /etc/ld.so.conf; then - $echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" - fi - $echo - $echo "See any operating system documentation about shared libraries for" - $echo "more information, such as the ld(1) and ld.so(8) manual pages." - $echo "X----------------------------------------------------------------------" | $Xsed - exit $EXIT_SUCCESS - ;; - - # libtool execute mode - execute) - modename="$modename: execute" - - # The first argument is the command name. - cmd="$nonopt" - if test -z "$cmd"; then - $echo "$modename: you must specify a COMMAND" 1>&2 - $echo "$help" - exit $EXIT_FAILURE - fi - - # Handle -dlopen flags immediately. - for file in $execute_dlfiles; do - if test ! -f "$file"; then - $echo "$modename: \`$file' is not a file" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - dir= - case $file in - *.la) - # Check to see that this really is a libtool archive. - if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then : - else - $echo "$modename: \`$lib' is not a valid libtool archive" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - # Read the libtool library. - dlname= - library_names= - - # If there is no directory component, then add one. - case $file in - */* | *\\*) . $file ;; - *) . ./$file ;; - esac - - # Skip this library if it cannot be dlopened. - if test -z "$dlname"; then - # Warn if it was a shared library. - test -n "$library_names" && $echo "$modename: warning: \`$file' was not linked with \`-export-dynamic'" - continue - fi - - dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'` - test "X$dir" = "X$file" && dir=. - - if test -f "$dir/$objdir/$dlname"; then - dir="$dir/$objdir" - else - if test ! -f "$dir/$dlname"; then - $echo "$modename: cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" 1>&2 - exit $EXIT_FAILURE - fi - fi - ;; - - *.lo) - # Just add the directory containing the .lo file. - dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'` - test "X$dir" = "X$file" && dir=. - ;; - - *) - $echo "$modename: warning \`-dlopen' is ignored for non-libtool libraries and objects" 1>&2 - continue - ;; - esac - - # Get the absolute pathname. - absdir=`cd "$dir" && pwd` - test -n "$absdir" && dir="$absdir" - - # Now add the directory to shlibpath_var. - if eval "test -z \"\$$shlibpath_var\""; then - eval "$shlibpath_var=\"\$dir\"" - else - eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\"" - fi - done - - # This variable tells wrapper scripts just to set shlibpath_var - # rather than running their programs. - libtool_execute_magic="$magic" - - # Check if any of the arguments is a wrapper script. - args= - for file - do - case $file in - -*) ;; - *) - # Do a test to see if this is really a libtool program. - if (${SED} -e '4q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - # If there is no directory component, then add one. - case $file in - */* | *\\*) . $file ;; - *) . ./$file ;; - esac - - # Transform arg to wrapped name. - file="$progdir/$program" - fi - ;; - esac - # Quote arguments (to preserve shell metacharacters). - file=`$echo "X$file" | $Xsed -e "$sed_quote_subst"` - args="$args \"$file\"" - done - - if test -z "$run"; then - if test -n "$shlibpath_var"; then - # Export the shlibpath_var. - eval "export $shlibpath_var" - fi - - # Restore saved environment variables - for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES - do - eval "if test \"\${save_$lt_var+set}\" = set; then - $lt_var=\$save_$lt_var; export $lt_var - fi" - done - - # Now prepare to actually exec the command. - exec_cmd="\$cmd$args" - else - # Display what would be done. - if test -n "$shlibpath_var"; then - eval "\$echo \"\$shlibpath_var=\$$shlibpath_var\"" - $echo "export $shlibpath_var" - fi - $echo "$cmd$args" - exit $EXIT_SUCCESS - fi - ;; - - # libtool clean and uninstall mode - clean | uninstall) - modename="$modename: $mode" - rm="$nonopt" - files= - rmforce= - exit_status=0 - - # This variable tells wrapper scripts just to set variables rather - # than running their programs. - libtool_install_magic="$magic" - - for arg - do - case $arg in - -f) rm="$rm $arg"; rmforce=yes ;; - -*) rm="$rm $arg" ;; - *) files="$files $arg" ;; - esac - done - - if test -z "$rm"; then - $echo "$modename: you must specify an RM program" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - fi - - rmdirs= - - origobjdir="$objdir" - for file in $files; do - dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'` - if test "X$dir" = "X$file"; then - dir=. - objdir="$origobjdir" - else - objdir="$dir/$origobjdir" - fi - name=`$echo "X$file" | $Xsed -e 's%^.*/%%'` - test "$mode" = uninstall && objdir="$dir" - - # Remember objdir for removal later, being careful to avoid duplicates - if test "$mode" = clean; then - case " $rmdirs " in - *" $objdir "*) ;; - *) rmdirs="$rmdirs $objdir" ;; - esac - fi - - # Don't error if the file doesn't exist and rm -f was used. - if (test -L "$file") >/dev/null 2>&1 \ - || (test -h "$file") >/dev/null 2>&1 \ - || test -f "$file"; then - : - elif test -d "$file"; then - exit_status=1 - continue - elif test "$rmforce" = yes; then - continue - fi - - rmfiles="$file" - - case $name in - *.la) - # Possibly a libtool archive, so verify it. - if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - . $dir/$name - - # Delete the libtool libraries and symlinks. - for n in $library_names; do - rmfiles="$rmfiles $objdir/$n" - done - test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library" - - case "$mode" in - clean) - case " $library_names " in - # " " in the beginning catches empty $dlname - *" $dlname "*) ;; - *) rmfiles="$rmfiles $objdir/$dlname" ;; - esac - test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i" - ;; - uninstall) - if test -n "$library_names"; then - # Do each command in the postuninstall commands. - cmds=$postuninstall_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" - if test "$?" -ne 0 && test "$rmforce" != yes; then - exit_status=1 - fi - done - IFS="$save_ifs" - fi - - if test -n "$old_library"; then - # Do each command in the old_postuninstall commands. - cmds=$old_postuninstall_cmds - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" - eval cmd=\"$cmd\" - $show "$cmd" - $run eval "$cmd" - if test "$?" -ne 0 && test "$rmforce" != yes; then - exit_status=1 - fi - done - IFS="$save_ifs" - fi - # FIXME: should reinstall the best remaining shared library. - ;; - esac - fi - ;; - - *.lo) - # Possibly a libtool object, so verify it. - if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - - # Read the .lo file - . $dir/$name - - # Add PIC object to the list of files to remove. - if test -n "$pic_object" \ - && test "$pic_object" != none; then - rmfiles="$rmfiles $dir/$pic_object" - fi - - # Add non-PIC object to the list of files to remove. - if test -n "$non_pic_object" \ - && test "$non_pic_object" != none; then - rmfiles="$rmfiles $dir/$non_pic_object" - fi - fi - ;; - - *) - if test "$mode" = clean ; then - noexename=$name - case $file in - *.exe) - file=`$echo $file|${SED} 's,.exe$,,'` - noexename=`$echo $name|${SED} 's,.exe$,,'` - # $file with .exe has already been added to rmfiles, - # add $file without .exe - rmfiles="$rmfiles $file" - ;; - esac - # Do a test to see if this is a libtool program. - if (${SED} -e '4q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then - relink_command= - . $dir/$noexename - - # note $name still contains .exe if it was in $file originally - # as does the version of $file that was added into $rmfiles - rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}" - if test "$fast_install" = yes && test -n "$relink_command"; then - rmfiles="$rmfiles $objdir/lt-$name" - fi - if test "X$noexename" != "X$name" ; then - rmfiles="$rmfiles $objdir/lt-${noexename}.c" - fi - fi - fi - ;; - esac - $show "$rm $rmfiles" - $run $rm $rmfiles || exit_status=1 - done - objdir="$origobjdir" - - # Try to remove the ${objdir}s in the directories where we deleted files - for dir in $rmdirs; do - if test -d "$dir"; then - $show "rmdir $dir" - $run rmdir $dir >/dev/null 2>&1 - fi - done - - exit $exit_status - ;; - - "") - $echo "$modename: you must specify a MODE" 1>&2 - $echo "$generic_help" 1>&2 - exit $EXIT_FAILURE - ;; - esac - - if test -z "$exec_cmd"; then - $echo "$modename: invalid operation mode \`$mode'" 1>&2 - $echo "$generic_help" 1>&2 - exit $EXIT_FAILURE - fi -fi # test -z "$show_help" - -if test -n "$exec_cmd"; then - eval exec $exec_cmd - exit $EXIT_FAILURE -fi - -# We need to display help for each of the modes. -case $mode in -"") $echo \ -"Usage: $modename [OPTION]... [MODE-ARG]... - -Provide generalized library-building support services. - - --config show all configuration variables - --debug enable verbose shell tracing --n, --dry-run display commands without modifying any files - --features display basic configuration information and exit - --finish same as \`--mode=finish' - --help display this help message and exit - --mode=MODE use operation mode MODE [default=inferred from MODE-ARGS] - --quiet same as \`--silent' - --silent don't print informational messages - --tag=TAG use configuration variables from tag TAG - --version print version information - -MODE must be one of the following: - - clean remove files from the build directory - compile compile a source file into a libtool object - execute automatically set library path, then run a program - finish complete the installation of libtool libraries - install install libraries or executables - link create a library or an executable - uninstall remove libraries from an installed directory - -MODE-ARGS vary depending on the MODE. Try \`$modename --help --mode=MODE' for -a more detailed description of MODE. - -Report bugs to <bug-libtool@gnu.org>." - exit $EXIT_SUCCESS - ;; - -clean) - $echo \ -"Usage: $modename [OPTION]... --mode=clean RM [RM-OPTION]... FILE... - -Remove files from the build directory. - -RM is the name of the program to use to delete files associated with each FILE -(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed -to RM. - -If FILE is a libtool library, object or program, all the files associated -with it are deleted. Otherwise, only FILE itself is deleted using RM." - ;; - -compile) - $echo \ -"Usage: $modename [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE - -Compile a source file into a libtool library object. - -This mode accepts the following additional options: - - -o OUTPUT-FILE set the output file name to OUTPUT-FILE - -prefer-pic try to building PIC objects only - -prefer-non-pic try to building non-PIC objects only - -static always build a \`.o' file suitable for static linking - -COMPILE-COMMAND is a command to be used in creating a \`standard' object file -from the given SOURCEFILE. - -The output file name is determined by removing the directory component from -SOURCEFILE, then substituting the C source code suffix \`.c' with the -library object suffix, \`.lo'." - ;; - -execute) - $echo \ -"Usage: $modename [OPTION]... --mode=execute COMMAND [ARGS]... - -Automatically set library path, then run a program. - -This mode accepts the following additional options: - - -dlopen FILE add the directory containing FILE to the library path - -This mode sets the library path environment variable according to \`-dlopen' -flags. - -If any of the ARGS are libtool executable wrappers, then they are translated -into their corresponding uninstalled binary, and any of their required library -directories are added to the library path. - -Then, COMMAND is executed, with ARGS as arguments." - ;; - -finish) - $echo \ -"Usage: $modename [OPTION]... --mode=finish [LIBDIR]... - -Complete the installation of libtool libraries. - -Each LIBDIR is a directory that contains libtool libraries. - -The commands that this mode executes may require superuser privileges. Use -the \`--dry-run' option if you just want to see what would be executed." - ;; - -install) - $echo \ -"Usage: $modename [OPTION]... --mode=install INSTALL-COMMAND... - -Install executables or libraries. - -INSTALL-COMMAND is the installation command. The first component should be -either the \`install' or \`cp' program. - -The rest of the components are interpreted as arguments to that command (only -BSD-compatible install options are recognized)." - ;; - -link) - $echo \ -"Usage: $modename [OPTION]... --mode=link LINK-COMMAND... - -Link object files or libraries together to form another library, or to -create an executable program. - -LINK-COMMAND is a command using the C compiler that you would use to create -a program from several object files. - -The following components of LINK-COMMAND are treated specially: - - -all-static do not do any dynamic linking at all - -avoid-version do not add a version suffix if possible - -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime - -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols - -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) - -export-symbols SYMFILE - try to export only the symbols listed in SYMFILE - -export-symbols-regex REGEX - try to export only the symbols matching REGEX - -LLIBDIR search LIBDIR for required installed libraries - -lNAME OUTPUT-FILE requires the installed library libNAME - -module build a library that can dlopened - -no-fast-install disable the fast-install mode - -no-install link a not-installable executable - -no-undefined declare that a library does not refer to external symbols - -o OUTPUT-FILE create OUTPUT-FILE from the specified objects - -objectlist FILE Use a list of object files found in FILE to specify objects - -precious-files-regex REGEX - don't remove output files matching REGEX - -release RELEASE specify package release information - -rpath LIBDIR the created library will eventually be installed in LIBDIR - -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries - -static do not do any dynamic linking of uninstalled libtool libraries - -static-libtool-libs - do not do any dynamic linking of libtool libraries - -version-info CURRENT[:REVISION[:AGE]] - specify library version info [each variable defaults to 0] - -All other options (arguments beginning with \`-') are ignored. - -Every other argument is treated as a filename. Files ending in \`.la' are -treated as uninstalled libtool libraries, other files are standard or library -object files. - -If the OUTPUT-FILE ends in \`.la', then a libtool library is created, -only library objects (\`.lo' files) may be specified, and \`-rpath' is -required, except when creating a convenience library. - -If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created -using \`ar' and \`ranlib', or on Windows using \`lib'. - -If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file -is created, otherwise an executable program is created." - ;; - -uninstall) - $echo \ -"Usage: $modename [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE... - -Remove libraries from an installation directory. - -RM is the name of the program to use to delete files associated with each FILE -(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed -to RM. - -If FILE is a libtool library, all the files associated with it are deleted. -Otherwise, only FILE itself is deleted using RM." - ;; - -*) - $echo "$modename: invalid operation mode \`$mode'" 1>&2 - $echo "$help" 1>&2 - exit $EXIT_FAILURE - ;; -esac - -$echo -$echo "Try \`$modename --help' for more information about other modes." - -exit $? - -# The TAGs below are defined such that we never get into a situation -# in which we disable both kinds of libraries. Given conflicting -# choices, we go for a static library, that is the most portable, -# since we can't tell whether shared libraries were disabled because -# the user asked for that or because the platform doesn't support -# them. This is particularly important on AIX, because we don't -# support having both static and shared libraries enabled at the same -# time on that platform, so we default to a shared-only configuration. -# If a disable-shared tag is given, we'll fallback to a static-only -# configuration. But we'll never go from static-only to shared-only. - -# ### BEGIN LIBTOOL TAG CONFIG: disable-shared -disable_libs=shared -# ### END LIBTOOL TAG CONFIG: disable-shared - -# ### BEGIN LIBTOOL TAG CONFIG: disable-static -disable_libs=static -# ### END LIBTOOL TAG CONFIG: disable-static - -# Local Variables: -# mode:shell-script -# sh-indentation:2 -# End:
diff --git a/third_party/libevent/m4/.dummy b/third_party/libevent/m4/.dummy new file mode 100644 index 0000000..a0a72d60 --- /dev/null +++ b/third_party/libevent/m4/.dummy
@@ -0,0 +1 @@ +(This dummy file exists so that git will create the m4 directory)
diff --git a/third_party/libevent/mac/event-config.h b/third_party/libevent/mac/event-config.h index 4af575a5..92e212d0 100644 --- a/third_party/libevent/mac/event-config.h +++ b/third_party/libevent/mac/event-config.h
@@ -212,6 +212,13 @@ /* Define if kqueue works correctly with pipes */ #define _EVENT_HAVE_WORKING_KQUEUE 1 +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#define _EVENT_LT_OBJDIR ".libs/" + +/* Numeric representation of the version */ +#define _EVENT_NUMERIC_VERSION 0x01040f00 + /* Name of package */ #define _EVENT_PACKAGE "libevent" @@ -227,6 +234,9 @@ /* Define to the one symbol short name of this package. */ #define _EVENT_PACKAGE_TARNAME "" +/* Define to the home page for this package. */ +#define _EVENT_PACKAGE_URL "" + /* Define to the version of this package. */ #define _EVENT_PACKAGE_VERSION "" @@ -249,7 +259,7 @@ #define _EVENT_TIME_WITH_SYS_TIME 1 /* Version number of package */ -#define _EVENT_VERSION "1.4.13-stable" +#define _EVENT_VERSION "1.4.15" /* Define to appropriate substitue if compiler doesnt have __func__ */ /* #undef _EVENT___func__ */
diff --git a/third_party/libevent/min_heap.h b/third_party/libevent/min_heap.h index 4fc83c0..14d8e37 100644 --- a/third_party/libevent/min_heap.h +++ b/third_party/libevent/min_heap.h
@@ -56,7 +56,7 @@ } void min_heap_ctor(min_heap_t* s) { s->p = 0; s->n = 0; s->a = 0; } -void min_heap_dtor(min_heap_t* s) { free(s->p); } +void min_heap_dtor(min_heap_t* s) { if(s->p) free(s->p); } void min_heap_elem_init(struct event* e) { e->min_heap_idx = -1; } int min_heap_empty(min_heap_t* s) { return 0u == s->n; } unsigned min_heap_size(min_heap_t* s) { return s->n; }
diff --git a/third_party/libevent/missing b/third_party/libevent/missing deleted file mode 100644 index e7ef83a..0000000 --- a/third_party/libevent/missing +++ /dev/null
@@ -1,360 +0,0 @@ -#! /bin/sh -# Common stub for a few missing GNU programs while installing. - -scriptversion=2003-09-02.23 - -# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003 -# Free Software Foundation, Inc. -# Originally by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996. - -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2, or (at your option) -# any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA -# 02111-1307, USA. - -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - -if test $# -eq 0; then - echo 1>&2 "Try \`$0 --help' for more information" - exit 1 -fi - -run=: - -# In the cases where this matters, `missing' is being run in the -# srcdir already. -if test -f configure.ac; then - configure_ac=configure.ac -else - configure_ac=configure.in -fi - -msg="missing on your system" - -case "$1" in ---run) - # Try to run requested program, and just exit if it succeeds. - run= - shift - "$@" && exit 0 - # Exit code 63 means version mismatch. This often happens - # when the user try to use an ancient version of a tool on - # a file that requires a minimum version. In this case we - # we should proceed has if the program had been absent, or - # if --run hadn't been passed. - if test $? = 63; then - run=: - msg="probably too old" - fi - ;; -esac - -# If it does not exist, or fails to run (possibly an outdated version), -# try to emulate it. -case "$1" in - - -h|--h|--he|--hel|--help) - echo "\ -$0 [OPTION]... PROGRAM [ARGUMENT]... - -Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an -error status if there is no known handling for PROGRAM. - -Options: - -h, --help display this help and exit - -v, --version output version information and exit - --run try to run the given command, and emulate it if it fails - -Supported PROGRAM values: - aclocal touch file \`aclocal.m4' - autoconf touch file \`configure' - autoheader touch file \`config.h.in' - automake touch all \`Makefile.in' files - bison create \`y.tab.[ch]', if possible, from existing .[ch] - flex create \`lex.yy.c', if possible, from existing .c - help2man touch the output file - lex create \`lex.yy.c', if possible, from existing .c - makeinfo touch the output file - tar try tar, gnutar, gtar, then tar without non-portable flags - yacc create \`y.tab.[ch]', if possible, from existing .[ch] - -Send bug reports to <bug-automake@gnu.org>." - ;; - - -v|--v|--ve|--ver|--vers|--versi|--versio|--version) - echo "missing $scriptversion (GNU Automake)" - ;; - - -*) - echo 1>&2 "$0: Unknown \`$1' option" - echo 1>&2 "Try \`$0 --help' for more information" - exit 1 - ;; - - aclocal*) - if test -z "$run" && ($1 --version) > /dev/null 2>&1; then - # We have it, but it failed. - exit 1 - fi - - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified \`acinclude.m4' or \`${configure_ac}'. You might want - to install the \`Automake' and \`Perl' packages. Grab them from - any GNU archive site." - touch aclocal.m4 - ;; - - autoconf) - if test -z "$run" && ($1 --version) > /dev/null 2>&1; then - # We have it, but it failed. - exit 1 - fi - - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified \`${configure_ac}'. You might want to install the - \`Autoconf' and \`GNU m4' packages. Grab them from any GNU - archive site." - touch configure - ;; - - autoheader) - if test -z "$run" && ($1 --version) > /dev/null 2>&1; then - # We have it, but it failed. - exit 1 - fi - - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified \`acconfig.h' or \`${configure_ac}'. You might want - to install the \`Autoconf' and \`GNU m4' packages. Grab them - from any GNU archive site." - files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}` - test -z "$files" && files="config.h" - touch_files= - for f in $files; do - case "$f" in - *:*) touch_files="$touch_files "`echo "$f" | - sed -e 's/^[^:]*://' -e 's/:.*//'`;; - *) touch_files="$touch_files $f.in";; - esac - done - touch $touch_files - ;; - - automake*) - if test -z "$run" && ($1 --version) > /dev/null 2>&1; then - # We have it, but it failed. - exit 1 - fi - - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'. - You might want to install the \`Automake' and \`Perl' packages. - Grab them from any GNU archive site." - find . -type f -name Makefile.am -print | - sed 's/\.am$/.in/' | - while read f; do touch "$f"; done - ;; - - autom4te) - if test -z "$run" && ($1 --version) > /dev/null 2>&1; then - # We have it, but it failed. - exit 1 - fi - - echo 1>&2 "\ -WARNING: \`$1' is needed, but is $msg. - You might have modified some files without having the - proper tools for further handling them. - You can get \`$1' as part of \`Autoconf' from any GNU - archive site." - - file=`echo "$*" | sed -n 's/.*--output[ =]*\([^ ]*\).*/\1/p'` - test -z "$file" && file=`echo "$*" | sed -n 's/.*-o[ ]*\([^ ]*\).*/\1/p'` - if test -f "$file"; then - touch $file - else - test -z "$file" || exec >$file - echo "#! /bin/sh" - echo "# Created by GNU Automake missing as a replacement of" - echo "# $ $@" - echo "exit 0" - chmod +x $file - exit 1 - fi - ;; - - bison|yacc) - echo 1>&2 "\ -WARNING: \`$1' $msg. You should only need it if - you modified a \`.y' file. You may need the \`Bison' package - in order for those modifications to take effect. You can get - \`Bison' from any GNU archive site." - rm -f y.tab.c y.tab.h - if [ $# -ne 1 ]; then - eval LASTARG="\${$#}" - case "$LASTARG" in - *.y) - SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'` - if [ -f "$SRCFILE" ]; then - cp "$SRCFILE" y.tab.c - fi - SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'` - if [ -f "$SRCFILE" ]; then - cp "$SRCFILE" y.tab.h - fi - ;; - esac - fi - if [ ! -f y.tab.h ]; then - echo >y.tab.h - fi - if [ ! -f y.tab.c ]; then - echo 'main() { return 0; }' >y.tab.c - fi - ;; - - lex|flex) - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified a \`.l' file. You may need the \`Flex' package - in order for those modifications to take effect. You can get - \`Flex' from any GNU archive site." - rm -f lex.yy.c - if [ $# -ne 1 ]; then - eval LASTARG="\${$#}" - case "$LASTARG" in - *.l) - SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'` - if [ -f "$SRCFILE" ]; then - cp "$SRCFILE" lex.yy.c - fi - ;; - esac - fi - if [ ! -f lex.yy.c ]; then - echo 'main() { return 0; }' >lex.yy.c - fi - ;; - - help2man) - if test -z "$run" && ($1 --version) > /dev/null 2>&1; then - # We have it, but it failed. - exit 1 - fi - - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified a dependency of a manual page. You may need the - \`Help2man' package in order for those modifications to take - effect. You can get \`Help2man' from any GNU archive site." - - file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'` - if test -z "$file"; then - file=`echo "$*" | sed -n 's/.*--output=\([^ ]*\).*/\1/p'` - fi - if [ -f "$file" ]; then - touch $file - else - test -z "$file" || exec >$file - echo ".ab help2man is required to generate this page" - exit 1 - fi - ;; - - makeinfo) - if test -z "$run" && (makeinfo --version) > /dev/null 2>&1; then - # We have makeinfo, but it failed. - exit 1 - fi - - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified a \`.texi' or \`.texinfo' file, or any other file - indirectly affecting the aspect of the manual. The spurious - call might also be the consequence of using a buggy \`make' (AIX, - DU, IRIX). You might want to install the \`Texinfo' package or - the \`GNU make' package. Grab either from any GNU archive site." - file=`echo "$*" | sed -n 's/.*-o \([^ ]*\).*/\1/p'` - if test -z "$file"; then - file=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'` - file=`sed -n '/^@setfilename/ { s/.* \([^ ]*\) *$/\1/; p; q; }' $file` - fi - touch $file - ;; - - tar) - shift - if test -n "$run"; then - echo 1>&2 "ERROR: \`tar' requires --run" - exit 1 - fi - - # We have already tried tar in the generic part. - # Look for gnutar/gtar before invocation to avoid ugly error - # messages. - if (gnutar --version > /dev/null 2>&1); then - gnutar "$@" && exit 0 - fi - if (gtar --version > /dev/null 2>&1); then - gtar "$@" && exit 0 - fi - firstarg="$1" - if shift; then - case "$firstarg" in - *o*) - firstarg=`echo "$firstarg" | sed s/o//` - tar "$firstarg" "$@" && exit 0 - ;; - esac - case "$firstarg" in - *h*) - firstarg=`echo "$firstarg" | sed s/h//` - tar "$firstarg" "$@" && exit 0 - ;; - esac - fi - - echo 1>&2 "\ -WARNING: I can't seem to be able to run \`tar' with the given arguments. - You may want to install GNU tar or Free paxutils, or check the - command line arguments." - exit 1 - ;; - - *) - echo 1>&2 "\ -WARNING: \`$1' is needed, and is $msg. - You might have modified some files without having the - proper tools for further handling them. Check the \`README' file, - it often tells you about the needed prerequisites for installing - this package. You may also peek at any GNU archive site, in case - some other package would contain this missing \`$1' program." - exit 1 - ;; -esac - -exit 0 - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-end: "$" -# End:
diff --git a/third_party/libevent/mkinstalldirs b/third_party/libevent/mkinstalldirs deleted file mode 100644 index 56d6671..0000000 --- a/third_party/libevent/mkinstalldirs +++ /dev/null
@@ -1,40 +0,0 @@ -#! /bin/sh -# mkinstalldirs --- make directory hierarchy -# Author: Noah Friedman <friedman@prep.ai.mit.edu> -# Created: 1993-05-16 -# Public domain - -# $Id: mkinstalldirs 11 2002-04-09 17:52:23Z nprovos $ - -errstatus=0 - -for file -do - set fnord `echo ":$file" | sed -ne 's/^:\//#/;s/^://;s/\// /g;s/^#/\//;p'` - shift - - pathcomp= - for d - do - pathcomp="$pathcomp$d" - case "$pathcomp" in - -* ) pathcomp=./$pathcomp ;; - esac - - if test ! -d "$pathcomp"; then - echo "mkdir $pathcomp" - - mkdir "$pathcomp" || lasterr=$? - - if test ! -d "$pathcomp"; then - errstatus=$lasterr - fi - fi - - pathcomp="$pathcomp/" - done -done - -exit $errstatus - -# mkinstalldirs ends here
diff --git a/third_party/libevent/nacl_nonsfi/event-config.h b/third_party/libevent/nacl_nonsfi/event-config.h index 636ee80..fe28043 100644 --- a/third_party/libevent/nacl_nonsfi/event-config.h +++ b/third_party/libevent/nacl_nonsfi/event-config.h
@@ -211,6 +211,13 @@ /* Define if kqueue works correctly with pipes */ /* #undef _EVENT_HAVE_WORKING_KQUEUE */ +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#define _EVENT_LT_OBJDIR ".libs/" + +/* Numeric representation of the version */ +#define _EVENT_NUMERIC_VERSION 0x01040f00 + /* Name of package */ #define _EVENT_PACKAGE "libevent_nacl" @@ -226,6 +233,9 @@ /* Define to the one symbol short name of this package. */ #define _EVENT_PACKAGE_TARNAME "" +/* Define to the home page for this package. */ +#define _EVENT_PACKAGE_URL "" + /* Define to the version of this package. */ #define _EVENT_PACKAGE_VERSION "" @@ -248,7 +258,7 @@ #define _EVENT_TIME_WITH_SYS_TIME 1 /* Version number of package */ -#define _EVENT_VERSION "1.4.13-stable" +#define _EVENT_VERSION "1.4.15" /* Define to appropriate substitue if compiler doesnt have __func__ */ /* #undef _EVENT___func__ */
diff --git a/third_party/libevent/sample/Makefile.in b/third_party/libevent/sample/Makefile.in deleted file mode 100644 index 793752a..0000000 --- a/third_party/libevent/sample/Makefile.in +++ /dev/null
@@ -1,442 +0,0 @@ -# Makefile.in generated by automake 1.10.1 from Makefile.am. -# @configure_input@ - -# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. -# This Makefile.in is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - -@SET_MAKE@ - -VPATH = @srcdir@ -pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ -pkgincludedir = $(includedir)/@PACKAGE@ -am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd -install_sh_DATA = $(install_sh) -c -m 644 -install_sh_PROGRAM = $(install_sh) -c -install_sh_SCRIPT = $(install_sh) -c -INSTALL_HEADER = $(INSTALL_DATA) -transform = $(program_transform_name) -NORMAL_INSTALL = : -PRE_INSTALL = : -POST_INSTALL = : -NORMAL_UNINSTALL = : -PRE_UNINSTALL = : -POST_UNINSTALL = : -build_triplet = @build@ -host_triplet = @host@ -noinst_PROGRAMS = event-test$(EXEEXT) time-test$(EXEEXT) \ - signal-test$(EXEEXT) -subdir = sample -DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in -ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/configure.in -am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) -mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs -CONFIG_HEADER = $(top_builddir)/config.h -CONFIG_CLEAN_FILES = -PROGRAMS = $(noinst_PROGRAMS) -event_test_SOURCES = event-test.c -event_test_OBJECTS = event-test.$(OBJEXT) -event_test_LDADD = $(LDADD) -event_test_DEPENDENCIES = ../libevent.la -signal_test_SOURCES = signal-test.c -signal_test_OBJECTS = signal-test.$(OBJEXT) -signal_test_LDADD = $(LDADD) -signal_test_DEPENDENCIES = ../libevent.la -time_test_SOURCES = time-test.c -time_test_OBJECTS = time-test.$(OBJEXT) -time_test_LDADD = $(LDADD) -time_test_DEPENDENCIES = ../libevent.la -DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) -depcomp = -am__depfiles_maybe = -COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ - $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ - --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ - $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -CCLD = $(CC) -LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ - --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ - $(LDFLAGS) -o $@ -SOURCES = event-test.c signal-test.c time-test.c -DIST_SOURCES = event-test.c signal-test.c time-test.c -ETAGS = etags -CTAGS = ctags -DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) -ACLOCAL = @ACLOCAL@ -AMTAR = @AMTAR@ -AR = @AR@ -AUTOCONF = @AUTOCONF@ -AUTOHEADER = @AUTOHEADER@ -AUTOMAKE = @AUTOMAKE@ -AWK = @AWK@ -CC = @CC@ -CCDEPMODE = @CCDEPMODE@ -CFLAGS = @CFLAGS@ -CPP = @CPP@ -CPPFLAGS = @CPPFLAGS@ -CXX = @CXX@ -CXXCPP = @CXXCPP@ -CXXDEPMODE = @CXXDEPMODE@ -CXXFLAGS = @CXXFLAGS@ -CYGPATH_W = @CYGPATH_W@ -DEFS = @DEFS@ -DEPDIR = @DEPDIR@ -DSYMUTIL = @DSYMUTIL@ -ECHO = @ECHO@ -ECHO_C = @ECHO_C@ -ECHO_N = @ECHO_N@ -ECHO_T = @ECHO_T@ -EGREP = @EGREP@ -EXEEXT = @EXEEXT@ -F77 = @F77@ -FFLAGS = @FFLAGS@ -GREP = @GREP@ -INSTALL = @INSTALL@ -INSTALL_DATA = @INSTALL_DATA@ -INSTALL_PROGRAM = @INSTALL_PROGRAM@ -INSTALL_SCRIPT = @INSTALL_SCRIPT@ -INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -LDFLAGS = @LDFLAGS@ -LIBOBJS = @LIBOBJS@ -LIBS = @LIBS@ -LIBTOOL = @LIBTOOL@ -LIBTOOL_DEPS = @LIBTOOL_DEPS@ -LN_S = @LN_S@ -LTLIBOBJS = @LTLIBOBJS@ -MAKEINFO = @MAKEINFO@ -MKDIR_P = @MKDIR_P@ -NMEDIT = @NMEDIT@ -OBJEXT = @OBJEXT@ -PACKAGE = @PACKAGE@ -PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ -PACKAGE_NAME = @PACKAGE_NAME@ -PACKAGE_STRING = @PACKAGE_STRING@ -PACKAGE_TARNAME = @PACKAGE_TARNAME@ -PACKAGE_VERSION = @PACKAGE_VERSION@ -PATH_SEPARATOR = @PATH_SEPARATOR@ -RANLIB = @RANLIB@ -SED = @SED@ -SET_MAKE = @SET_MAKE@ -SHELL = @SHELL@ -STRIP = @STRIP@ -VERSION = @VERSION@ -abs_builddir = @abs_builddir@ -abs_srcdir = @abs_srcdir@ -abs_top_builddir = @abs_top_builddir@ -abs_top_srcdir = @abs_top_srcdir@ -ac_ct_CC = @ac_ct_CC@ -ac_ct_CXX = @ac_ct_CXX@ -ac_ct_F77 = @ac_ct_F77@ -am__include = @am__include@ -am__leading_dot = @am__leading_dot@ -am__quote = @am__quote@ -am__tar = @am__tar@ -am__untar = @am__untar@ -bindir = @bindir@ -build = @build@ -build_alias = @build_alias@ -build_cpu = @build_cpu@ -build_os = @build_os@ -build_vendor = @build_vendor@ -builddir = @builddir@ -datadir = @datadir@ -datarootdir = @datarootdir@ -docdir = @docdir@ -dvidir = @dvidir@ -exec_prefix = @exec_prefix@ -host = @host@ -host_alias = @host_alias@ -host_cpu = @host_cpu@ -host_os = @host_os@ -host_vendor = @host_vendor@ -htmldir = @htmldir@ -includedir = @includedir@ -infodir = @infodir@ -install_sh = @install_sh@ -libdir = @libdir@ -libexecdir = @libexecdir@ -localedir = @localedir@ -localstatedir = @localstatedir@ -mandir = @mandir@ -mkdir_p = @mkdir_p@ -oldincludedir = @oldincludedir@ -pdfdir = @pdfdir@ -prefix = @prefix@ -program_transform_name = @program_transform_name@ -psdir = @psdir@ -sbindir = @sbindir@ -sharedstatedir = @sharedstatedir@ -srcdir = @srcdir@ -sysconfdir = @sysconfdir@ -target_alias = @target_alias@ -top_build_prefix = @top_build_prefix@ -top_builddir = @top_builddir@ -top_srcdir = @top_srcdir@ -AUTOMAKE_OPTIONS = foreign no-dependencies -LDADD = ../libevent.la -AM_CFLAGS = -I$(top_srcdir) -I$(top_srcdir)/compat -event_test_sources = event-test.c -time_test_sources = time-test.c -signal_test_sources = signal-test.c -DISTCLEANFILES = *~ -all: all-am - -.SUFFIXES: -.SUFFIXES: .c .lo .o .obj -$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) - @for dep in $?; do \ - case '$(am__configure_deps)' in \ - *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ - exit 1;; \ - esac; \ - done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign sample/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign sample/Makefile -.PRECIOUS: Makefile -Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ - *) \ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ - esac; - -$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh - -$(top_srcdir)/configure: $(am__configure_deps) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh -$(ACLOCAL_M4): $(am__aclocal_m4_deps) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh - -clean-noinstPROGRAMS: - @list='$(noinst_PROGRAMS)'; for p in $$list; do \ - f=`echo $$p|sed 's/$(EXEEXT)$$//'`; \ - echo " rm -f $$p $$f"; \ - rm -f $$p $$f ; \ - done -event-test$(EXEEXT): $(event_test_OBJECTS) $(event_test_DEPENDENCIES) - @rm -f event-test$(EXEEXT) - $(LINK) $(event_test_OBJECTS) $(event_test_LDADD) $(LIBS) -signal-test$(EXEEXT): $(signal_test_OBJECTS) $(signal_test_DEPENDENCIES) - @rm -f signal-test$(EXEEXT) - $(LINK) $(signal_test_OBJECTS) $(signal_test_LDADD) $(LIBS) -time-test$(EXEEXT): $(time_test_OBJECTS) $(time_test_DEPENDENCIES) - @rm -f time-test$(EXEEXT) - $(LINK) $(time_test_OBJECTS) $(time_test_LDADD) $(LIBS) - -mostlyclean-compile: - -rm -f *.$(OBJEXT) - -distclean-compile: - -rm -f *.tab.c - -.c.o: - $(COMPILE) -c $< - -.c.obj: - $(COMPILE) -c `$(CYGPATH_W) '$<'` - -.c.lo: - $(LTCOMPILE) -c -o $@ $< - -mostlyclean-libtool: - -rm -f *.lo - -clean-libtool: - -rm -rf .libs _libs - -ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonemtpy = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - mkid -fID $$unique -tags: TAGS - -TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - tags=; \ - here=`pwd`; \ - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ - test -n "$$unique" || unique=$$empty_fix; \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$tags $$unique; \ - fi -ctags: CTAGS -CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - tags=; \ - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - test -z "$(CTAGS_ARGS)$$tags$$unique" \ - || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$tags $$unique - -GTAGS: - here=`$(am__cd) $(top_builddir) && pwd` \ - && cd $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) $$here - -distclean-tags: - -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags - -distdir: $(DISTFILES) - @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - list='$(DISTFILES)'; \ - dist_files=`for file in $$list; do echo $$file; done | \ - sed -e "s|^$$srcdirstrip/||;t" \ - -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ - case $$dist_files in \ - */*) $(MKDIR_P) `echo "$$dist_files" | \ - sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ - sort -u` ;; \ - esac; \ - for file in $$dist_files; do \ - if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ - if test -d $$d/$$file; then \ - dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ - if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ - fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ - else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ - || exit 1; \ - fi; \ - done -check-am: all-am -check: check-am -all-am: Makefile $(PROGRAMS) -installdirs: -install: install-am -install-exec: install-exec-am -install-data: install-data-am -uninstall: uninstall-am - -install-am: all-am - @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am - -installcheck: installcheck-am -install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install -mostlyclean-generic: - -clean-generic: - -distclean-generic: - -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) - -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES) - -maintainer-clean-generic: - @echo "This command is intended for maintainers to use" - @echo "it deletes files that may require special tools to rebuild." -clean: clean-am - -clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \ - mostlyclean-am - -distclean: distclean-am - -rm -f Makefile -distclean-am: clean-am distclean-compile distclean-generic \ - distclean-tags - -dvi: dvi-am - -dvi-am: - -html: html-am - -info: info-am - -info-am: - -install-data-am: - -install-dvi: install-dvi-am - -install-exec-am: - -install-html: install-html-am - -install-info: install-info-am - -install-man: - -install-pdf: install-pdf-am - -install-ps: install-ps-am - -installcheck-am: - -maintainer-clean: maintainer-clean-am - -rm -f Makefile -maintainer-clean-am: distclean-am maintainer-clean-generic - -mostlyclean: mostlyclean-am - -mostlyclean-am: mostlyclean-compile mostlyclean-generic \ - mostlyclean-libtool - -pdf: pdf-am - -pdf-am: - -ps: ps-am - -ps-am: - -uninstall-am: - -.MAKE: install-am install-strip - -.PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \ - clean-libtool clean-noinstPROGRAMS ctags distclean \ - distclean-compile distclean-generic distclean-libtool \ - distclean-tags distdir dvi dvi-am html html-am info info-am \ - install install-am install-data install-data-am install-dvi \ - install-dvi-am install-exec install-exec-am install-html \ - install-html-am install-info install-info-am install-man \ - install-pdf install-pdf-am install-ps install-ps-am \ - install-strip installcheck installcheck-am installdirs \ - maintainer-clean maintainer-clean-generic mostlyclean \ - mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ - pdf pdf-am ps ps-am tags uninstall uninstall-am - - -verify: -# Tell versions [3.59,3.63) of GNU make to not export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT:
diff --git a/third_party/libevent/sample/event-test.c b/third_party/libevent/sample/event-test.c index 2c6cb93..0a439ce 100644 --- a/third_party/libevent/sample/event-test.c +++ b/third_party/libevent/sample/event-test.c
@@ -73,7 +73,7 @@ #ifdef WIN32 HANDLE socket; // Open a file. - socket = CreateFile("test.txt", // open File + socket = CreateFileA("test.txt", // open File GENERIC_READ, // open for reading 0, // do not share NULL, // no security
diff --git a/third_party/libevent/sample/signal-test.c b/third_party/libevent/sample/signal-test.c index 9a131cb..5a5a303 100644 --- a/third_party/libevent/sample/signal-test.c +++ b/third_party/libevent/sample/signal-test.c
@@ -38,7 +38,7 @@ if (called >= 2) event_del(signal); - + called++; } @@ -46,17 +46,19 @@ main (int argc, char **argv) { struct event signal_int; - + /* Initalize the event library */ - event_init(); + struct event_base* base = event_base_new(); /* Initalize one event */ event_set(&signal_int, SIGINT, EV_SIGNAL|EV_PERSIST, signal_cb, &signal_int); + event_base_set(base, &signal_int); event_add(&signal_int, NULL); - event_dispatch(); + event_base_dispatch(base); + event_base_free(base); return (0); }
diff --git a/third_party/libevent/signal.c b/third_party/libevent/signal.c index 74fa23f..b8d51ab 100644 --- a/third_party/libevent/signal.c +++ b/third_party/libevent/signal.c
@@ -67,6 +67,13 @@ static void evsignal_handler(int sig); +#ifdef WIN32 +#define error_is_eagain(err) \ + ((err) == EAGAIN || (err) == WSAEWOULDBLOCK) +#else +#define error_is_eagain(err) ((err) == EAGAIN) +#endif + /* Callback for when the signal handler write a byte to our signaling socket */ static void evsignal_cb(int fd, short what, void *arg) @@ -79,8 +86,11 @@ #endif n = recv(fd, signals, sizeof(signals), 0); - if (n == -1) - event_err(1, "%s: read", __func__); + if (n == -1) { + int err = EVUTIL_SOCKET_ERROR(); + if (! error_is_eagain(err)) + event_err(1, "%s: read", __func__); + } } #ifdef HAVE_SETFD @@ -125,6 +135,7 @@ TAILQ_INIT(&base->sig.evsigevents[i]); evutil_make_socket_nonblocking(base->sig.ev_signal_pair[0]); + evutil_make_socket_nonblocking(base->sig.ev_signal_pair[1]); event_set(&base->sig.ev_signal, base->sig.ev_signal_pair[1], EV_READ | EV_PERSIST, evsignal_cb, &base->sig.ev_signal); @@ -186,12 +197,14 @@ if (sigaction(evsignal, &sa, sig->sh_old[evsignal]) == -1) { event_warn("sigaction"); free(sig->sh_old[evsignal]); + sig->sh_old[evsignal] = NULL; return (-1); } #else if ((sh = signal(evsignal, handler)) == SIG_ERR) { event_warn("signal"); free(sig->sh_old[evsignal]); + sig->sh_old[evsignal] = NULL; return (-1); } *sig->sh_old[evsignal] = sh; @@ -346,12 +359,19 @@ _evsignal_restore_handler(base, i); } - EVUTIL_CLOSESOCKET(base->sig.ev_signal_pair[0]); - base->sig.ev_signal_pair[0] = -1; - EVUTIL_CLOSESOCKET(base->sig.ev_signal_pair[1]); - base->sig.ev_signal_pair[1] = -1; + if (base->sig.ev_signal_pair[0] != -1) { + EVUTIL_CLOSESOCKET(base->sig.ev_signal_pair[0]); + base->sig.ev_signal_pair[0] = -1; + } + if (base->sig.ev_signal_pair[1] != -1) { + EVUTIL_CLOSESOCKET(base->sig.ev_signal_pair[1]); + base->sig.ev_signal_pair[1] = -1; + } base->sig.sh_old_max = 0; - /* per index frees are handled in evsignal_del() */ - free(base->sig.sh_old); + /* per index frees are handled in evsig_del() */ + if (base->sig.sh_old) { + free(base->sig.sh_old); + base->sig.sh_old = NULL; + } }
diff --git a/third_party/libevent/solaris/event-config.h b/third_party/libevent/solaris/event-config.h index c5fe160..afabe2f4 100644 --- a/third_party/libevent/solaris/event-config.h +++ b/third_party/libevent/solaris/event-config.h
@@ -212,6 +212,13 @@ /* Define if kqueue works correctly with pipes */ /* #undef _EVENT_HAVE_WORKING_KQUEUE */ +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +#define _EVENT_LT_OBJDIR ".libs/" + +/* Numeric representation of the version */ +#define _EVENT_NUMERIC_VERSION 0x01040f00 + /* Name of package */ #define _EVENT_PACKAGE "libevent" @@ -227,6 +234,9 @@ /* Define to the one symbol short name of this package. */ #define _EVENT_PACKAGE_TARNAME "" +/* Define to the home page for this package. */ +#define _EVENT_PACKAGE_URL "" + /* Define to the version of this package. */ #define _EVENT_PACKAGE_VERSION "" @@ -249,7 +259,7 @@ #define _EVENT_TIME_WITH_SYS_TIME 1 /* Version number of package */ -#define _EVENT_VERSION "1.4.13-stable" +#define _EVENT_VERSION "1.4.15" /* Define to appropriate substitue if compiler doesnt have __func__ */ /* #undef _EVENT___func__ */
diff --git a/third_party/libevent/stamp-h.in b/third_party/libevent/stamp-h.in new file mode 100644 index 0000000..9788f70 --- /dev/null +++ b/third_party/libevent/stamp-h.in
@@ -0,0 +1 @@ +timestamp
diff --git a/third_party/libevent/test/Makefile.in b/third_party/libevent/test/Makefile.in deleted file mode 100644 index c2d5b31..0000000 --- a/third_party/libevent/test/Makefile.in +++ /dev/null
@@ -1,487 +0,0 @@ -# Makefile.in generated by automake 1.10.1 from Makefile.am. -# @configure_input@ - -# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. -# This Makefile.in is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - -@SET_MAKE@ - -VPATH = @srcdir@ -pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ -pkgincludedir = $(includedir)/@PACKAGE@ -am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd -install_sh_DATA = $(install_sh) -c -m 644 -install_sh_PROGRAM = $(install_sh) -c -install_sh_SCRIPT = $(install_sh) -c -INSTALL_HEADER = $(INSTALL_DATA) -transform = $(program_transform_name) -NORMAL_INSTALL = : -PRE_INSTALL = : -POST_INSTALL = : -NORMAL_UNINSTALL = : -PRE_UNINSTALL = : -POST_UNINSTALL = : -build_triplet = @build@ -host_triplet = @host@ -noinst_PROGRAMS = test-init$(EXEEXT) test-eof$(EXEEXT) \ - test-weof$(EXEEXT) test-time$(EXEEXT) regress$(EXEEXT) \ - bench$(EXEEXT) -subdir = test -DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in -ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/configure.in -am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) -mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs -CONFIG_HEADER = $(top_builddir)/config.h -CONFIG_CLEAN_FILES = -PROGRAMS = $(noinst_PROGRAMS) -am_bench_OBJECTS = bench.$(OBJEXT) -bench_OBJECTS = $(am_bench_OBJECTS) -bench_DEPENDENCIES = ../libevent.la -am_regress_OBJECTS = regress.$(OBJEXT) regress_http.$(OBJEXT) \ - regress_dns.$(OBJEXT) regress_rpc.$(OBJEXT) \ - regress.gen.$(OBJEXT) -regress_OBJECTS = $(am_regress_OBJECTS) -regress_DEPENDENCIES = ../libevent.la -am_test_eof_OBJECTS = test-eof.$(OBJEXT) -test_eof_OBJECTS = $(am_test_eof_OBJECTS) -test_eof_DEPENDENCIES = ../libevent_core.la -am_test_init_OBJECTS = test-init.$(OBJEXT) -test_init_OBJECTS = $(am_test_init_OBJECTS) -test_init_DEPENDENCIES = ../libevent_core.la -am_test_time_OBJECTS = test-time.$(OBJEXT) -test_time_OBJECTS = $(am_test_time_OBJECTS) -test_time_DEPENDENCIES = ../libevent_core.la -am_test_weof_OBJECTS = test-weof.$(OBJEXT) -test_weof_OBJECTS = $(am_test_weof_OBJECTS) -test_weof_DEPENDENCIES = ../libevent_core.la -DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) -depcomp = -am__depfiles_maybe = -COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ - $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ - --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ - $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -CCLD = $(CC) -LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ - --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ - $(LDFLAGS) -o $@ -SOURCES = $(bench_SOURCES) $(regress_SOURCES) $(test_eof_SOURCES) \ - $(test_init_SOURCES) $(test_time_SOURCES) $(test_weof_SOURCES) -DIST_SOURCES = $(bench_SOURCES) $(regress_SOURCES) $(test_eof_SOURCES) \ - $(test_init_SOURCES) $(test_time_SOURCES) $(test_weof_SOURCES) -ETAGS = etags -CTAGS = ctags -DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) -ACLOCAL = @ACLOCAL@ -AMTAR = @AMTAR@ -AR = @AR@ -AUTOCONF = @AUTOCONF@ -AUTOHEADER = @AUTOHEADER@ -AUTOMAKE = @AUTOMAKE@ -AWK = @AWK@ -CC = @CC@ -CCDEPMODE = @CCDEPMODE@ -CFLAGS = @CFLAGS@ -CPP = @CPP@ -CPPFLAGS = @CPPFLAGS@ -CXX = @CXX@ -CXXCPP = @CXXCPP@ -CXXDEPMODE = @CXXDEPMODE@ -CXXFLAGS = @CXXFLAGS@ -CYGPATH_W = @CYGPATH_W@ -DEFS = @DEFS@ -DEPDIR = @DEPDIR@ -DSYMUTIL = @DSYMUTIL@ -ECHO = @ECHO@ -ECHO_C = @ECHO_C@ -ECHO_N = @ECHO_N@ -ECHO_T = @ECHO_T@ -EGREP = @EGREP@ -EXEEXT = @EXEEXT@ -F77 = @F77@ -FFLAGS = @FFLAGS@ -GREP = @GREP@ -INSTALL = @INSTALL@ -INSTALL_DATA = @INSTALL_DATA@ -INSTALL_PROGRAM = @INSTALL_PROGRAM@ -INSTALL_SCRIPT = @INSTALL_SCRIPT@ -INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -LDFLAGS = @LDFLAGS@ -LIBOBJS = @LIBOBJS@ -LIBS = @LIBS@ -LIBTOOL = @LIBTOOL@ -LIBTOOL_DEPS = @LIBTOOL_DEPS@ -LN_S = @LN_S@ -LTLIBOBJS = @LTLIBOBJS@ -MAKEINFO = @MAKEINFO@ -MKDIR_P = @MKDIR_P@ -NMEDIT = @NMEDIT@ -OBJEXT = @OBJEXT@ -PACKAGE = @PACKAGE@ -PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ -PACKAGE_NAME = @PACKAGE_NAME@ -PACKAGE_STRING = @PACKAGE_STRING@ -PACKAGE_TARNAME = @PACKAGE_TARNAME@ -PACKAGE_VERSION = @PACKAGE_VERSION@ -PATH_SEPARATOR = @PATH_SEPARATOR@ -RANLIB = @RANLIB@ -SED = @SED@ -SET_MAKE = @SET_MAKE@ -SHELL = @SHELL@ -STRIP = @STRIP@ -VERSION = @VERSION@ -abs_builddir = @abs_builddir@ -abs_srcdir = @abs_srcdir@ -abs_top_builddir = @abs_top_builddir@ -abs_top_srcdir = @abs_top_srcdir@ -ac_ct_CC = @ac_ct_CC@ -ac_ct_CXX = @ac_ct_CXX@ -ac_ct_F77 = @ac_ct_F77@ -am__include = @am__include@ -am__leading_dot = @am__leading_dot@ -am__quote = @am__quote@ -am__tar = @am__tar@ -am__untar = @am__untar@ -bindir = @bindir@ -build = @build@ -build_alias = @build_alias@ -build_cpu = @build_cpu@ -build_os = @build_os@ -build_vendor = @build_vendor@ -builddir = @builddir@ -datadir = @datadir@ -datarootdir = @datarootdir@ -docdir = @docdir@ -dvidir = @dvidir@ -exec_prefix = @exec_prefix@ -host = @host@ -host_alias = @host_alias@ -host_cpu = @host_cpu@ -host_os = @host_os@ -host_vendor = @host_vendor@ -htmldir = @htmldir@ -includedir = @includedir@ -infodir = @infodir@ -install_sh = @install_sh@ -libdir = @libdir@ -libexecdir = @libexecdir@ -localedir = @localedir@ -localstatedir = @localstatedir@ -mandir = @mandir@ -mkdir_p = @mkdir_p@ -oldincludedir = @oldincludedir@ -pdfdir = @pdfdir@ -prefix = @prefix@ -program_transform_name = @program_transform_name@ -psdir = @psdir@ -sbindir = @sbindir@ -sharedstatedir = @sharedstatedir@ -srcdir = @srcdir@ -sysconfdir = @sysconfdir@ -target_alias = @target_alias@ -top_build_prefix = @top_build_prefix@ -top_builddir = @top_builddir@ -top_srcdir = @top_srcdir@ -AUTOMAKE_OPTIONS = foreign no-dependencies -AM_CFLAGS = -I$(top_srcdir) -I$(top_srcdir)/compat -EXTRA_DIST = regress.rpc regress.gen.h regress.gen.c -BUILT_SOURCES = regress.gen.c regress.gen.h -test_init_SOURCES = test-init.c -test_init_LDADD = ../libevent_core.la -test_eof_SOURCES = test-eof.c -test_eof_LDADD = ../libevent_core.la -test_weof_SOURCES = test-weof.c -test_weof_LDADD = ../libevent_core.la -test_time_SOURCES = test-time.c -test_time_LDADD = ../libevent_core.la -regress_SOURCES = regress.c regress.h regress_http.c regress_dns.c \ - regress_rpc.c \ - regress.gen.c regress.gen.h - -regress_LDADD = ../libevent.la -bench_SOURCES = bench.c -bench_LDADD = ../libevent.la -DISTCLEANFILES = *~ -all: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) all-am - -.SUFFIXES: -.SUFFIXES: .c .lo .o .obj -$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) - @for dep in $?; do \ - case '$(am__configure_deps)' in \ - *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ - exit 1;; \ - esac; \ - done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign test/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign test/Makefile -.PRECIOUS: Makefile -Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ - *) \ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ - esac; - -$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh - -$(top_srcdir)/configure: $(am__configure_deps) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh -$(ACLOCAL_M4): $(am__aclocal_m4_deps) - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh - -clean-noinstPROGRAMS: - @list='$(noinst_PROGRAMS)'; for p in $$list; do \ - f=`echo $$p|sed 's/$(EXEEXT)$$//'`; \ - echo " rm -f $$p $$f"; \ - rm -f $$p $$f ; \ - done -bench$(EXEEXT): $(bench_OBJECTS) $(bench_DEPENDENCIES) - @rm -f bench$(EXEEXT) - $(LINK) $(bench_OBJECTS) $(bench_LDADD) $(LIBS) -regress$(EXEEXT): $(regress_OBJECTS) $(regress_DEPENDENCIES) - @rm -f regress$(EXEEXT) - $(LINK) $(regress_OBJECTS) $(regress_LDADD) $(LIBS) -test-eof$(EXEEXT): $(test_eof_OBJECTS) $(test_eof_DEPENDENCIES) - @rm -f test-eof$(EXEEXT) - $(LINK) $(test_eof_OBJECTS) $(test_eof_LDADD) $(LIBS) -test-init$(EXEEXT): $(test_init_OBJECTS) $(test_init_DEPENDENCIES) - @rm -f test-init$(EXEEXT) - $(LINK) $(test_init_OBJECTS) $(test_init_LDADD) $(LIBS) -test-time$(EXEEXT): $(test_time_OBJECTS) $(test_time_DEPENDENCIES) - @rm -f test-time$(EXEEXT) - $(LINK) $(test_time_OBJECTS) $(test_time_LDADD) $(LIBS) -test-weof$(EXEEXT): $(test_weof_OBJECTS) $(test_weof_DEPENDENCIES) - @rm -f test-weof$(EXEEXT) - $(LINK) $(test_weof_OBJECTS) $(test_weof_LDADD) $(LIBS) - -mostlyclean-compile: - -rm -f *.$(OBJEXT) - -distclean-compile: - -rm -f *.tab.c - -.c.o: - $(COMPILE) -c $< - -.c.obj: - $(COMPILE) -c `$(CYGPATH_W) '$<'` - -.c.lo: - $(LTCOMPILE) -c -o $@ $< - -mostlyclean-libtool: - -rm -f *.lo - -clean-libtool: - -rm -rf .libs _libs - -ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonemtpy = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - mkid -fID $$unique -tags: TAGS - -TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - tags=; \ - here=`pwd`; \ - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ - test -n "$$unique" || unique=$$empty_fix; \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$tags $$unique; \ - fi -ctags: CTAGS -CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - tags=; \ - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - test -z "$(CTAGS_ARGS)$$tags$$unique" \ - || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$tags $$unique - -GTAGS: - here=`$(am__cd) $(top_builddir) && pwd` \ - && cd $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) $$here - -distclean-tags: - -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags - -distdir: $(DISTFILES) - @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ - list='$(DISTFILES)'; \ - dist_files=`for file in $$list; do echo $$file; done | \ - sed -e "s|^$$srcdirstrip/||;t" \ - -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ - case $$dist_files in \ - */*) $(MKDIR_P) `echo "$$dist_files" | \ - sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ - sort -u` ;; \ - esac; \ - for file in $$dist_files; do \ - if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ - if test -d $$d/$$file; then \ - dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ - if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ - fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ - else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ - || exit 1; \ - fi; \ - done -check-am: all-am -check: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) check-am -all-am: Makefile $(PROGRAMS) -installdirs: -install: $(BUILT_SOURCES) - $(MAKE) $(AM_MAKEFLAGS) install-am -install-exec: install-exec-am -install-data: install-data-am -uninstall: uninstall-am - -install-am: all-am - @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am - -installcheck: installcheck-am -install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install -mostlyclean-generic: - -clean-generic: - -distclean-generic: - -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) - -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES) - -maintainer-clean-generic: - @echo "This command is intended for maintainers to use" - @echo "it deletes files that may require special tools to rebuild." - -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES) -clean: clean-am - -clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \ - mostlyclean-am - -distclean: distclean-am - -rm -f Makefile -distclean-am: clean-am distclean-compile distclean-generic \ - distclean-tags - -dvi: dvi-am - -dvi-am: - -html: html-am - -info: info-am - -info-am: - -install-data-am: - -install-dvi: install-dvi-am - -install-exec-am: - -install-html: install-html-am - -install-info: install-info-am - -install-man: - -install-pdf: install-pdf-am - -install-ps: install-ps-am - -installcheck-am: - -maintainer-clean: maintainer-clean-am - -rm -f Makefile -maintainer-clean-am: distclean-am maintainer-clean-generic - -mostlyclean: mostlyclean-am - -mostlyclean-am: mostlyclean-compile mostlyclean-generic \ - mostlyclean-libtool - -pdf: pdf-am - -pdf-am: - -ps: ps-am - -ps-am: - -uninstall-am: - -.MAKE: install-am install-strip - -.PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \ - clean-libtool clean-noinstPROGRAMS ctags distclean \ - distclean-compile distclean-generic distclean-libtool \ - distclean-tags distdir dvi dvi-am html html-am info info-am \ - install install-am install-data install-data-am install-dvi \ - install-dvi-am install-exec install-exec-am install-html \ - install-html-am install-info install-info-am install-man \ - install-pdf install-pdf-am install-ps install-ps-am \ - install-strip installcheck installcheck-am installdirs \ - maintainer-clean maintainer-clean-generic mostlyclean \ - mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ - pdf pdf-am ps ps-am tags uninstall uninstall-am - - -regress.gen.c regress.gen.h: regress.rpc $(top_srcdir)/event_rpcgen.py - $(top_srcdir)/event_rpcgen.py $(srcdir)/regress.rpc || echo "No Python installed" - -test: test-init test-eof test-weof test-time regress - -verify: test - @$(srcdir)/test.sh - -bench test-init test-eof test-weof test-time: ../libevent.la -# Tell versions [3.59,3.63) of GNU make to not export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT:
diff --git a/third_party/libevent/test/Makefile.nmake b/third_party/libevent/test/Makefile.nmake new file mode 100644 index 0000000..320abe7 --- /dev/null +++ b/third_party/libevent/test/Makefile.nmake
@@ -0,0 +1,47 @@ + +CFLAGS=/I.. /I../include /I../WIN32-Code /I../compat /DWIN32 /DHAVE_CONFIG_H + +CFLAGS=$(CFLAGS) /Ox /W3 /wd4996 /nologo + +REGRESS_OBJS=regress.obj regress_http.obj regress_dns.obj \ + regress_rpc.obj regress.gen.obj \ + +OTHER_OBJS=test-init.obj test-eof.obj test-weof.obj test-time.obj \ + bench.obj bench_cascade.obj bench_http.obj bench_httpclient.obj + +PROGRAMS=regress.exe \ + test-init.exe test-eof.exe test-weof.exe test-time.exe + +# Disabled for now: +# bench.exe bench_cascade.exe bench_http.exe bench_httpclient.exe + + +LIBS=..\libevent.lib ws2_32.lib advapi32.lib + +all: $(PROGRAMS) + +regress.exe: $(REGRESS_OBJS) + $(CC) $(CFLAGS) $(LIBS) $(REGRESS_OBJS) + +test-init.exe: test-init.obj + $(CC) $(CFLAGS) $(LIBS) test-init.obj +test-eof.exe: test-eof.obj + $(CC) $(CFLAGS) $(LIBS) test-eof.obj +test-weof.exe: test-weof.obj + $(CC) $(CFLAGS) $(LIBS) test-weof.obj +test-time.exe: test-time.obj + $(CC) $(CFLAGS) $(LIBS) test-time.obj + +bench.exe: bench.obj + $(CC) $(CFLAGS) $(LIBS) bench.obj +bench_cascade.exe: bench_cascade.obj + $(CC) $(CFLAGS) $(LIBS) bench_cascade.obj +bench_http.exe: bench_http.obj + $(CC) $(CFLAGS) $(LIBS) bench_http.obj +bench_httpclient.exe: bench_httpclient.obj + $(CC) $(CFLAGS) $(LIBS) bench_httpclient.obj + +clean: + -del $(REGRESS_OBJS) + -del $(OTHER_OBJS) + -del regress.exe
diff --git a/third_party/libevent/test/regress.c b/third_party/libevent/test/regress.c index 0b7517d..cce7d7d 100644 --- a/third_party/libevent/test/regress.c +++ b/third_party/libevent/test/regress.c
@@ -1020,6 +1020,205 @@ } static void +test_evbuffer_readln(void) +{ + struct evbuffer *evb = evbuffer_new(); + struct evbuffer *evb_tmp = evbuffer_new(); + const char *s; + char *cp = NULL; + size_t sz; + +#define tt_line_eq(content) \ + if (!cp || sz != strlen(content) || strcmp(cp, content)) { \ + fprintf(stdout, "FAILED\n"); \ + exit(1); \ + } +#define tt_assert(expression) \ + if (!(expression)) { \ + fprintf(stdout, "FAILED\n"); \ + exit(1); \ + } \ + + /* Test EOL_ANY. */ + fprintf(stdout, "Testing evbuffer_readln EOL_ANY: "); + + s = "complex silly newline\r\n\n\r\n\n\rmore\0\n"; + evbuffer_add(evb, s, strlen(s)+2); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_ANY); + tt_line_eq("complex silly newline"); + free(cp); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_ANY); + if (!cp || sz != 5 || memcmp(cp, "more\0\0", 6)) { + fprintf(stdout, "FAILED\n"); + exit(1); + } + if (evb->totallen == 0) { + fprintf(stdout, "FAILED\n"); + exit(1); + } + s = "\nno newline"; + evbuffer_add(evb, s, strlen(s)); + free(cp); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_ANY); + tt_line_eq(""); + free(cp); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_ANY); + tt_assert(!cp); + evbuffer_drain(evb, EVBUFFER_LENGTH(evb)); + tt_assert(EVBUFFER_LENGTH(evb) == 0); + + fprintf(stdout, "OK\n"); + + /* Test EOL_CRLF */ + fprintf(stdout, "Testing evbuffer_readln EOL_CRLF: "); + + s = "Line with\rin the middle\nLine with good crlf\r\n\nfinal\n"; + evbuffer_add(evb, s, strlen(s)); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF); + tt_line_eq("Line with\rin the middle"); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF); + tt_line_eq("Line with good crlf"); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF); + tt_line_eq(""); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF); + tt_line_eq("final"); + s = "x"; + evbuffer_add(evb, s, 1); + free(cp); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF); + tt_assert(!cp); + + fprintf(stdout, "OK\n"); + + /* Test CRLF_STRICT */ + fprintf(stdout, "Testing evbuffer_readln CRLF_STRICT: "); + + s = " and a bad crlf\nand a good one\r\n\r\nMore\r"; + evbuffer_add(evb, s, strlen(s)); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_line_eq("x and a bad crlf\nand a good one"); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_line_eq(""); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_assert(!cp); + evbuffer_add(evb, "\n", 1); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_line_eq("More"); + free(cp); + tt_assert(EVBUFFER_LENGTH(evb) == 0); + + s = "An internal CR\r is not an eol\r\nNor is a lack of one"; + evbuffer_add(evb, s, strlen(s)); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_line_eq("An internal CR\r is not an eol"); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_assert(!cp); + + evbuffer_add(evb, "\r\n", 2); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_line_eq("Nor is a lack of one"); + free(cp); + tt_assert(EVBUFFER_LENGTH(evb) == 0); + + fprintf(stdout, "OK\n"); + + /* Test LF */ + fprintf(stdout, "Testing evbuffer_readln LF: "); + + s = "An\rand a nl\n\nText"; + evbuffer_add(evb, s, strlen(s)); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_LF); + tt_line_eq("An\rand a nl"); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_LF); + tt_line_eq(""); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_LF); + tt_assert(!cp); + free(cp); + evbuffer_add(evb, "\n", 1); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_LF); + tt_line_eq("Text"); + free(cp); + + fprintf(stdout, "OK\n"); + + /* Test CRLF_STRICT - across boundaries */ + fprintf(stdout, + "Testing evbuffer_readln CRLF_STRICT across boundaries: "); + + s = " and a bad crlf\nand a good one\r"; + evbuffer_add(evb_tmp, s, strlen(s)); + evbuffer_add_buffer(evb, evb_tmp); + s = "\n\r"; + evbuffer_add(evb_tmp, s, strlen(s)); + evbuffer_add_buffer(evb, evb_tmp); + s = "\nMore\r"; + evbuffer_add(evb_tmp, s, strlen(s)); + evbuffer_add_buffer(evb, evb_tmp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_line_eq(" and a bad crlf\nand a good one"); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_line_eq(""); + free(cp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_assert(!cp); + free(cp); + evbuffer_add(evb, "\n", 1); + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_CRLF_STRICT); + tt_line_eq("More"); + free(cp); cp = NULL; + if (EVBUFFER_LENGTH(evb) != 0) { + fprintf(stdout, "FAILED\n"); + exit(1); + } + + fprintf(stdout, "OK\n"); + + /* Test memory problem */ + fprintf(stdout, "Testing evbuffer_readln memory problem: "); + + s = "one line\ntwo line\nblue line"; + evbuffer_add(evb_tmp, s, strlen(s)); + evbuffer_add_buffer(evb, evb_tmp); + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_LF); + tt_line_eq("one line"); + free(cp); cp = NULL; + + cp = evbuffer_readln(evb, &sz, EVBUFFER_EOL_LF); + tt_line_eq("two line"); + free(cp); cp = NULL; + + fprintf(stdout, "OK\n"); + + test_ok = 1; + evbuffer_free(evb); + evbuffer_free(evb_tmp); + if (cp) free(cp); +} + +static void test_evbuffer_find(void) { u_char* p; @@ -1640,6 +1839,7 @@ test_evbuffer(); test_evbuffer_find(); + test_evbuffer_readln(); test_bufferevent(); test_bufferevent_watermarks();
diff --git a/third_party/libevent/test/regress.gen.c b/third_party/libevent/test/regress.gen.c deleted file mode 100644 index 0918fc0e..0000000 --- a/third_party/libevent/test/regress.gen.c +++ /dev/null
@@ -1,878 +0,0 @@ -/* - * Automatically generated from ./regress.rpc - * by event_rpcgen.py/0.1. DO NOT EDIT THIS FILE. - */ - -#include <sys/types.h> -#ifdef _EVENT_HAVE_SYS_TIME_H -#include <sys/time.h> -#endif -#include <stdlib.h> -#include <string.h> -#include <assert.h> -#define EVENT_NO_STRUCT -#include <event.h> - -#ifdef _EVENT___func__ -#define __func__ _EVENT___func__ -#endif - -#include "./regress.gen.h" - -void event_err(int eval, const char *fmt, ...); -void event_warn(const char *fmt, ...); -void event_errx(int eval, const char *fmt, ...); -void event_warnx(const char *fmt, ...); - - -/* - * Implementation of msg - */ - -static struct msg_access_ __msg_base = { - msg_from_name_assign, - msg_from_name_get, - msg_to_name_assign, - msg_to_name_get, - msg_attack_assign, - msg_attack_get, - msg_run_assign, - msg_run_get, - msg_run_add, -}; - -struct msg * -msg_new(void) -{ - struct msg *tmp; - if ((tmp = malloc(sizeof(struct msg))) == NULL) { - event_warn("%s: malloc", __func__); - return (NULL); - } - tmp->base = &__msg_base; - - tmp->from_name_data = NULL; - tmp->from_name_set = 0; - - tmp->to_name_data = NULL; - tmp->to_name_set = 0; - - tmp->attack_data = NULL; - tmp->attack_set = 0; - - tmp->run_data = NULL; - tmp->run_length = 0; - tmp->run_num_allocated = 0; - tmp->run_set = 0; - - return (tmp); -} - - - - -struct run * -msg_run_add(struct msg *msg) -{ - if (++msg->run_length >= msg->run_num_allocated) { - int tobe_allocated = msg->run_num_allocated; - struct run ** new_data = NULL; - tobe_allocated = !tobe_allocated ? 1 : tobe_allocated << 1; - new_data = (struct run **) realloc(msg->run_data, - tobe_allocated * sizeof(struct run *)); - if (new_data == NULL) - goto error; - msg->run_data = new_data; - msg->run_num_allocated = tobe_allocated; - } - msg->run_data[msg->run_length - 1] = run_new(); - if (msg->run_data[msg->run_length - 1] == NULL) - goto error; - msg->run_set = 1; - return (msg->run_data[msg->run_length - 1]); -error: - --msg->run_length; - return (NULL); -} - - -int -msg_from_name_assign(struct msg *msg, - const char * value) -{ - if (msg->from_name_data != NULL) - free(msg->from_name_data); - if ((msg->from_name_data = strdup(value)) == NULL) - return (-1); - msg->from_name_set = 1; - return (0); -} - -int -msg_to_name_assign(struct msg *msg, - const char * value) -{ - if (msg->to_name_data != NULL) - free(msg->to_name_data); - if ((msg->to_name_data = strdup(value)) == NULL) - return (-1); - msg->to_name_set = 1; - return (0); -} - -int -msg_attack_assign(struct msg *msg, - const struct kill* value) -{ - struct evbuffer *tmp = NULL; - if (msg->attack_set) { - kill_clear(msg->attack_data); - msg->attack_set = 0; - } else { - msg->attack_data = kill_new(); - if (msg->attack_data == NULL) { - event_warn("%s: kill_new()", __func__); - goto error; - } - } - if ((tmp = evbuffer_new()) == NULL) { - event_warn("%s: evbuffer_new()", __func__); - goto error; - } - kill_marshal(tmp, value); - if (kill_unmarshal(msg->attack_data, tmp) == -1) { - event_warnx("%s: kill_unmarshal", __func__); - goto error; - } - msg->attack_set = 1; - evbuffer_free(tmp); - return (0); - error: - if (tmp != NULL) - evbuffer_free(tmp); - if (msg->attack_data != NULL) { - kill_free(msg->attack_data); - msg->attack_data = NULL; - } - return (-1); -} - -int -msg_run_assign(struct msg *msg, int off, - const struct run * value) -{ - struct evbuffer *tmp = NULL; - if (!msg->run_set || off < 0 || off >= msg->run_length) - return (-1); - run_clear(msg->run_data[off]); - if ((tmp = evbuffer_new()) == NULL) { - event_warn("%s: evbuffer_new()", __func__); - goto error; - } - run_marshal(tmp, value); - if (run_unmarshal(msg->run_data[off], tmp) == -1) { - event_warnx("%s: run_unmarshal", __func__); - goto error; - } - evbuffer_free(tmp); - return (0); -error: - if (tmp != NULL) - evbuffer_free(tmp); - run_clear(msg->run_data[off]); - return (-1); -} - -int -msg_from_name_get(struct msg *msg, char * *value) -{ - if (msg->from_name_set != 1) - return (-1); - *value = msg->from_name_data; - return (0); -} - -int -msg_to_name_get(struct msg *msg, char * *value) -{ - if (msg->to_name_set != 1) - return (-1); - *value = msg->to_name_data; - return (0); -} - -int -msg_attack_get(struct msg *msg, struct kill* *value) -{ - if (msg->attack_set != 1) { - msg->attack_data = kill_new(); - if (msg->attack_data == NULL) - return (-1); - msg->attack_set = 1; - } - *value = msg->attack_data; - return (0); -} - -int -msg_run_get(struct msg *msg, int offset, - struct run * *value) -{ - if (!msg->run_set || offset < 0 || offset >= msg->run_length) - return (-1); - *value = msg->run_data[offset]; - return (0); -} - -void -msg_clear(struct msg *tmp) -{ - if (tmp->from_name_set == 1) { - free (tmp->from_name_data); - tmp->from_name_data = NULL; - tmp->from_name_set = 0; - } - if (tmp->to_name_set == 1) { - free (tmp->to_name_data); - tmp->to_name_data = NULL; - tmp->to_name_set = 0; - } - if (tmp->attack_set == 1) { - kill_free(tmp->attack_data); - tmp->attack_data = NULL; - tmp->attack_set = 0; - } - if (tmp->run_set == 1) { - int i; - for (i = 0; i < tmp->run_length; ++i) { - run_free(tmp->run_data[i]); - } - free(tmp->run_data); - tmp->run_data = NULL; - tmp->run_set = 0; - tmp->run_length = 0; - tmp->run_num_allocated = 0; - } -} - -void -msg_free(struct msg *tmp) -{ - if (tmp->from_name_data != NULL) - free (tmp->from_name_data); - if (tmp->to_name_data != NULL) - free (tmp->to_name_data); - if (tmp->attack_data != NULL) - kill_free(tmp->attack_data); - if (tmp->run_data != NULL) { - int i; - for (i = 0; i < tmp->run_length; ++i) { - run_free(tmp->run_data[i]); - tmp->run_data[i] = NULL; - } - free(tmp->run_data); - tmp->run_data = NULL; - tmp->run_length = 0; - tmp->run_num_allocated = 0; - } - free(tmp); -} - -void -msg_marshal(struct evbuffer *evbuf, const struct msg *tmp){ - evtag_marshal_string(evbuf, MSG_FROM_NAME, tmp->from_name_data); - evtag_marshal_string(evbuf, MSG_TO_NAME, tmp->to_name_data); - if (tmp->attack_set) { - evtag_marshal_kill(evbuf, MSG_ATTACK, tmp->attack_data); - } - { - int i; - for (i = 0; i < tmp->run_length; ++i) { - evtag_marshal_run(evbuf, MSG_RUN, tmp->run_data[i]); - } - } -} - -int -msg_unmarshal(struct msg *tmp, struct evbuffer *evbuf) -{ - ev_uint32_t tag; - while (EVBUFFER_LENGTH(evbuf) > 0) { - if (evtag_peek(evbuf, &tag) == -1) - return (-1); - switch (tag) { - - case MSG_FROM_NAME: - - if (tmp->from_name_set) - return (-1); - if (evtag_unmarshal_string(evbuf, MSG_FROM_NAME, &tmp->from_name_data) == -1) { - event_warnx("%s: failed to unmarshal from_name", __func__); - return (-1); - } - tmp->from_name_set = 1; - break; - - case MSG_TO_NAME: - - if (tmp->to_name_set) - return (-1); - if (evtag_unmarshal_string(evbuf, MSG_TO_NAME, &tmp->to_name_data) == -1) { - event_warnx("%s: failed to unmarshal to_name", __func__); - return (-1); - } - tmp->to_name_set = 1; - break; - - case MSG_ATTACK: - - if (tmp->attack_set) - return (-1); - tmp->attack_data = kill_new(); - if (tmp->attack_data == NULL) - return (-1); - if (evtag_unmarshal_kill(evbuf, MSG_ATTACK, tmp->attack_data) == -1) { - event_warnx("%s: failed to unmarshal attack", __func__); - return (-1); - } - tmp->attack_set = 1; - break; - - case MSG_RUN: - - if (msg_run_add(tmp) == NULL) - return (-1); - if (evtag_unmarshal_run(evbuf, MSG_RUN, - tmp->run_data[tmp->run_length - 1]) == -1) { - --tmp->run_length; - event_warnx("%s: failed to unmarshal run", __func__); - return (-1); - } - tmp->run_set = 1; - break; - - default: - return -1; - } - } - - if (msg_complete(tmp) == -1) - return (-1); - return (0); -} - -int -msg_complete(struct msg *msg) -{ - if (!msg->from_name_set) - return (-1); - if (!msg->to_name_set) - return (-1); - if (msg->attack_set && kill_complete(msg->attack_data) == -1) - return (-1); - { - int i; - for (i = 0; i < msg->run_length; ++i) { - if (run_complete(msg->run_data[i]) == -1) - return (-1); - } - } - return (0); -} - -int -evtag_unmarshal_msg(struct evbuffer *evbuf, ev_uint32_t need_tag, struct msg *msg) -{ - ev_uint32_t tag; - int res = -1; - - struct evbuffer *tmp = evbuffer_new(); - - if (evtag_unmarshal(evbuf, &tag, tmp) == -1 || tag != need_tag) - goto error; - - if (msg_unmarshal(msg, tmp) == -1) - goto error; - - res = 0; - - error: - evbuffer_free(tmp); - return (res); -} - -void -evtag_marshal_msg(struct evbuffer *evbuf, ev_uint32_t tag, const struct msg *msg) -{ - struct evbuffer *_buf = evbuffer_new(); - assert(_buf != NULL); - evbuffer_drain(_buf, -1); - msg_marshal(_buf, msg); - evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), EVBUFFER_LENGTH(_buf)); - evbuffer_free(_buf); -} - -/* - * Implementation of kill - */ - -static struct kill_access_ __kill_base = { - kill_weapon_assign, - kill_weapon_get, - kill_action_assign, - kill_action_get, - kill_how_often_assign, - kill_how_often_get, -}; - -struct kill * -kill_new(void) -{ - struct kill *tmp; - if ((tmp = malloc(sizeof(struct kill))) == NULL) { - event_warn("%s: malloc", __func__); - return (NULL); - } - tmp->base = &__kill_base; - - tmp->weapon_data = NULL; - tmp->weapon_set = 0; - - tmp->action_data = NULL; - tmp->action_set = 0; - - tmp->how_often_data = 0; - tmp->how_often_set = 0; - - return (tmp); -} - - - - -int -kill_weapon_assign(struct kill *msg, - const char * value) -{ - if (msg->weapon_data != NULL) - free(msg->weapon_data); - if ((msg->weapon_data = strdup(value)) == NULL) - return (-1); - msg->weapon_set = 1; - return (0); -} - -int -kill_action_assign(struct kill *msg, - const char * value) -{ - if (msg->action_data != NULL) - free(msg->action_data); - if ((msg->action_data = strdup(value)) == NULL) - return (-1); - msg->action_set = 1; - return (0); -} - -int -kill_how_often_assign(struct kill *msg, const ev_uint32_t value) -{ - msg->how_often_set = 1; - msg->how_often_data = value; - return (0); -} - -int -kill_weapon_get(struct kill *msg, char * *value) -{ - if (msg->weapon_set != 1) - return (-1); - *value = msg->weapon_data; - return (0); -} - -int -kill_action_get(struct kill *msg, char * *value) -{ - if (msg->action_set != 1) - return (-1); - *value = msg->action_data; - return (0); -} - -int -kill_how_often_get(struct kill *msg, ev_uint32_t *value) -{ - if (msg->how_often_set != 1) - return (-1); - *value = msg->how_often_data; - return (0); -} - -void -kill_clear(struct kill *tmp) -{ - if (tmp->weapon_set == 1) { - free (tmp->weapon_data); - tmp->weapon_data = NULL; - tmp->weapon_set = 0; - } - if (tmp->action_set == 1) { - free (tmp->action_data); - tmp->action_data = NULL; - tmp->action_set = 0; - } - tmp->how_often_set = 0; -} - -void -kill_free(struct kill *tmp) -{ - if (tmp->weapon_data != NULL) - free (tmp->weapon_data); - if (tmp->action_data != NULL) - free (tmp->action_data); - free(tmp); -} - -void -kill_marshal(struct evbuffer *evbuf, const struct kill *tmp){ - evtag_marshal_string(evbuf, KILL_WEAPON, tmp->weapon_data); - evtag_marshal_string(evbuf, KILL_ACTION, tmp->action_data); - if (tmp->how_often_set) { - evtag_marshal_int(evbuf, KILL_HOW_OFTEN, tmp->how_often_data); - } -} - -int -kill_unmarshal(struct kill *tmp, struct evbuffer *evbuf) -{ - ev_uint32_t tag; - while (EVBUFFER_LENGTH(evbuf) > 0) { - if (evtag_peek(evbuf, &tag) == -1) - return (-1); - switch (tag) { - - case KILL_WEAPON: - - if (tmp->weapon_set) - return (-1); - if (evtag_unmarshal_string(evbuf, KILL_WEAPON, &tmp->weapon_data) == -1) { - event_warnx("%s: failed to unmarshal weapon", __func__); - return (-1); - } - tmp->weapon_set = 1; - break; - - case KILL_ACTION: - - if (tmp->action_set) - return (-1); - if (evtag_unmarshal_string(evbuf, KILL_ACTION, &tmp->action_data) == -1) { - event_warnx("%s: failed to unmarshal action", __func__); - return (-1); - } - tmp->action_set = 1; - break; - - case KILL_HOW_OFTEN: - - if (tmp->how_often_set) - return (-1); - if (evtag_unmarshal_int(evbuf, KILL_HOW_OFTEN, &tmp->how_often_data) == -1) { - event_warnx("%s: failed to unmarshal how_often", __func__); - return (-1); - } - tmp->how_often_set = 1; - break; - - default: - return -1; - } - } - - if (kill_complete(tmp) == -1) - return (-1); - return (0); -} - -int -kill_complete(struct kill *msg) -{ - if (!msg->weapon_set) - return (-1); - if (!msg->action_set) - return (-1); - return (0); -} - -int -evtag_unmarshal_kill(struct evbuffer *evbuf, ev_uint32_t need_tag, struct kill *msg) -{ - ev_uint32_t tag; - int res = -1; - - struct evbuffer *tmp = evbuffer_new(); - - if (evtag_unmarshal(evbuf, &tag, tmp) == -1 || tag != need_tag) - goto error; - - if (kill_unmarshal(msg, tmp) == -1) - goto error; - - res = 0; - - error: - evbuffer_free(tmp); - return (res); -} - -void -evtag_marshal_kill(struct evbuffer *evbuf, ev_uint32_t tag, const struct kill *msg) -{ - struct evbuffer *_buf = evbuffer_new(); - assert(_buf != NULL); - evbuffer_drain(_buf, -1); - kill_marshal(_buf, msg); - evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), EVBUFFER_LENGTH(_buf)); - evbuffer_free(_buf); -} - -/* - * Implementation of run - */ - -static struct run_access_ __run_base = { - run_how_assign, - run_how_get, - run_some_bytes_assign, - run_some_bytes_get, - run_fixed_bytes_assign, - run_fixed_bytes_get, -}; - -struct run * -run_new(void) -{ - struct run *tmp; - if ((tmp = malloc(sizeof(struct run))) == NULL) { - event_warn("%s: malloc", __func__); - return (NULL); - } - tmp->base = &__run_base; - - tmp->how_data = NULL; - tmp->how_set = 0; - - tmp->some_bytes_data = NULL; - tmp->some_bytes_length = 0; - tmp->some_bytes_set = 0; - - memset(tmp->fixed_bytes_data, 0, sizeof(tmp->fixed_bytes_data)); - tmp->fixed_bytes_set = 0; - - return (tmp); -} - - - - -int -run_how_assign(struct run *msg, - const char * value) -{ - if (msg->how_data != NULL) - free(msg->how_data); - if ((msg->how_data = strdup(value)) == NULL) - return (-1); - msg->how_set = 1; - return (0); -} - -int -run_some_bytes_assign(struct run *msg, const ev_uint8_t * value, ev_uint32_t len) -{ - if (msg->some_bytes_data != NULL) - free (msg->some_bytes_data); - msg->some_bytes_data = malloc(len); - if (msg->some_bytes_data == NULL) - return (-1); - msg->some_bytes_set = 1; - msg->some_bytes_length = len; - memcpy(msg->some_bytes_data, value, len); - return (0); -} - -int -run_fixed_bytes_assign(struct run *msg, const ev_uint8_t *value) -{ - msg->fixed_bytes_set = 1; - memcpy(msg->fixed_bytes_data, value, 24); - return (0); -} - -int -run_how_get(struct run *msg, char * *value) -{ - if (msg->how_set != 1) - return (-1); - *value = msg->how_data; - return (0); -} - -int -run_some_bytes_get(struct run *msg, ev_uint8_t * *value, ev_uint32_t *plen) -{ - if (msg->some_bytes_set != 1) - return (-1); - *value = msg->some_bytes_data; - *plen = msg->some_bytes_length; - return (0); -} - -int -run_fixed_bytes_get(struct run *msg, ev_uint8_t **value) -{ - if (msg->fixed_bytes_set != 1) - return (-1); - *value = msg->fixed_bytes_data; - return (0); -} - -void -run_clear(struct run *tmp) -{ - if (tmp->how_set == 1) { - free (tmp->how_data); - tmp->how_data = NULL; - tmp->how_set = 0; - } - if (tmp->some_bytes_set == 1) { - free (tmp->some_bytes_data); - tmp->some_bytes_data = NULL; - tmp->some_bytes_length = 0; - tmp->some_bytes_set = 0; - } - tmp->fixed_bytes_set = 0; - memset(tmp->fixed_bytes_data, 0, sizeof(tmp->fixed_bytes_data)); -} - -void -run_free(struct run *tmp) -{ - if (tmp->how_data != NULL) - free (tmp->how_data); - if (tmp->some_bytes_data != NULL) - free (tmp->some_bytes_data); - free(tmp); -} - -void -run_marshal(struct evbuffer *evbuf, const struct run *tmp){ - evtag_marshal_string(evbuf, RUN_HOW, tmp->how_data); - if (tmp->some_bytes_set) { - evtag_marshal(evbuf, RUN_SOME_BYTES, tmp->some_bytes_data, tmp->some_bytes_length); - } - evtag_marshal(evbuf, RUN_FIXED_BYTES, tmp->fixed_bytes_data, sizeof(tmp->fixed_bytes_data)); -} - -int -run_unmarshal(struct run *tmp, struct evbuffer *evbuf) -{ - ev_uint32_t tag; - while (EVBUFFER_LENGTH(evbuf) > 0) { - if (evtag_peek(evbuf, &tag) == -1) - return (-1); - switch (tag) { - - case RUN_HOW: - - if (tmp->how_set) - return (-1); - if (evtag_unmarshal_string(evbuf, RUN_HOW, &tmp->how_data) == -1) { - event_warnx("%s: failed to unmarshal how", __func__); - return (-1); - } - tmp->how_set = 1; - break; - - case RUN_SOME_BYTES: - - if (tmp->some_bytes_set) - return (-1); - if (evtag_payload_length(evbuf, &tmp->some_bytes_length) == -1) - return (-1); - if (tmp->some_bytes_length > EVBUFFER_LENGTH(evbuf)) - return (-1); - if ((tmp->some_bytes_data = malloc(tmp->some_bytes_length)) == NULL) - return (-1); - if (evtag_unmarshal_fixed(evbuf, RUN_SOME_BYTES, tmp->some_bytes_data, tmp->some_bytes_length) == -1) { - event_warnx("%s: failed to unmarshal some_bytes", __func__); - return (-1); - } - tmp->some_bytes_set = 1; - break; - - case RUN_FIXED_BYTES: - - if (tmp->fixed_bytes_set) - return (-1); - if (evtag_unmarshal_fixed(evbuf, RUN_FIXED_BYTES, tmp->fixed_bytes_data, sizeof(tmp->fixed_bytes_data)) == -1) { - event_warnx("%s: failed to unmarshal fixed_bytes", __func__); - return (-1); - } - tmp->fixed_bytes_set = 1; - break; - - default: - return -1; - } - } - - if (run_complete(tmp) == -1) - return (-1); - return (0); -} - -int -run_complete(struct run *msg) -{ - if (!msg->how_set) - return (-1); - if (!msg->fixed_bytes_set) - return (-1); - return (0); -} - -int -evtag_unmarshal_run(struct evbuffer *evbuf, ev_uint32_t need_tag, struct run *msg) -{ - ev_uint32_t tag; - int res = -1; - - struct evbuffer *tmp = evbuffer_new(); - - if (evtag_unmarshal(evbuf, &tag, tmp) == -1 || tag != need_tag) - goto error; - - if (run_unmarshal(msg, tmp) == -1) - goto error; - - res = 0; - - error: - evbuffer_free(tmp); - return (res); -} - -void -evtag_marshal_run(struct evbuffer *evbuf, ev_uint32_t tag, const struct run *msg) -{ - struct evbuffer *_buf = evbuffer_new(); - assert(_buf != NULL); - evbuffer_drain(_buf, -1); - run_marshal(_buf, msg); - evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), EVBUFFER_LENGTH(_buf)); - evbuffer_free(_buf); -} -
diff --git a/third_party/libevent/test/regress.gen.h b/third_party/libevent/test/regress.gen.h deleted file mode 100644 index b1feacd9..0000000 --- a/third_party/libevent/test/regress.gen.h +++ /dev/null
@@ -1,183 +0,0 @@ -/* - * Automatically generated from ./regress.rpc - */ - -#ifndef ___REGRESS_RPC_ -#define ___REGRESS_RPC_ - -#include <event-config.h> -#ifdef _EVENT_HAVE_STDINT_H -#include <stdint.h> -#endif -#define EVTAG_HAS(msg, member) ((msg)->member##_set == 1) -#ifdef __GNUC__ -#define EVTAG_ASSIGN(msg, member, args...) (*(msg)->base->member##_assign)(msg, ## args) -#define EVTAG_GET(msg, member, args...) (*(msg)->base->member##_get)(msg, ## args) -#else -#define EVTAG_ASSIGN(msg, member, ...) (*(msg)->base->member##_assign)(msg, ## __VA_ARGS__) -#define EVTAG_GET(msg, member, ...) (*(msg)->base->member##_get)(msg, ## __VA_ARGS__) -#endif -#define EVTAG_ADD(msg, member) (*(msg)->base->member##_add)(msg) -#define EVTAG_LEN(msg, member) ((msg)->member##_length) - -struct msg; -struct kill; -struct run; - -/* Tag definition for msg */ -enum msg_ { - MSG_FROM_NAME=1, - MSG_TO_NAME=2, - MSG_ATTACK=3, - MSG_RUN=4, - MSG_MAX_TAGS -}; - -/* Structure declaration for msg */ -struct msg_access_ { - int (*from_name_assign)(struct msg *, const char *); - int (*from_name_get)(struct msg *, char * *); - int (*to_name_assign)(struct msg *, const char *); - int (*to_name_get)(struct msg *, char * *); - int (*attack_assign)(struct msg *, const struct kill*); - int (*attack_get)(struct msg *, struct kill* *); - int (*run_assign)(struct msg *, int, const struct run *); - int (*run_get)(struct msg *, int, struct run * *); - struct run * (*run_add)(struct msg *); -}; - -struct msg { - struct msg_access_ *base; - - char *from_name_data; - char *to_name_data; - struct kill* attack_data; - struct run **run_data; - int run_length; - int run_num_allocated; - - ev_uint8_t from_name_set; - ev_uint8_t to_name_set; - ev_uint8_t attack_set; - ev_uint8_t run_set; -}; - -struct msg *msg_new(void); -void msg_free(struct msg *); -void msg_clear(struct msg *); -void msg_marshal(struct evbuffer *, const struct msg *); -int msg_unmarshal(struct msg *, struct evbuffer *); -int msg_complete(struct msg *); -void evtag_marshal_msg(struct evbuffer *, ev_uint32_t, - const struct msg *); -int evtag_unmarshal_msg(struct evbuffer *, ev_uint32_t, - struct msg *); -int msg_from_name_assign(struct msg *, const char *); -int msg_from_name_get(struct msg *, char * *); -int msg_to_name_assign(struct msg *, const char *); -int msg_to_name_get(struct msg *, char * *); -int msg_attack_assign(struct msg *, const struct kill*); -int msg_attack_get(struct msg *, struct kill* *); -int msg_run_assign(struct msg *, int, const struct run *); -int msg_run_get(struct msg *, int, struct run * *); -struct run * msg_run_add(struct msg *); -/* --- msg done --- */ - -/* Tag definition for kill */ -enum kill_ { - KILL_WEAPON=65825, - KILL_ACTION=2, - KILL_HOW_OFTEN=3, - KILL_MAX_TAGS -}; - -/* Structure declaration for kill */ -struct kill_access_ { - int (*weapon_assign)(struct kill *, const char *); - int (*weapon_get)(struct kill *, char * *); - int (*action_assign)(struct kill *, const char *); - int (*action_get)(struct kill *, char * *); - int (*how_often_assign)(struct kill *, const ev_uint32_t); - int (*how_often_get)(struct kill *, ev_uint32_t *); -}; - -struct kill { - struct kill_access_ *base; - - char *weapon_data; - char *action_data; - ev_uint32_t how_often_data; - - ev_uint8_t weapon_set; - ev_uint8_t action_set; - ev_uint8_t how_often_set; -}; - -struct kill *kill_new(void); -void kill_free(struct kill *); -void kill_clear(struct kill *); -void kill_marshal(struct evbuffer *, const struct kill *); -int kill_unmarshal(struct kill *, struct evbuffer *); -int kill_complete(struct kill *); -void evtag_marshal_kill(struct evbuffer *, ev_uint32_t, - const struct kill *); -int evtag_unmarshal_kill(struct evbuffer *, ev_uint32_t, - struct kill *); -int kill_weapon_assign(struct kill *, const char *); -int kill_weapon_get(struct kill *, char * *); -int kill_action_assign(struct kill *, const char *); -int kill_action_get(struct kill *, char * *); -int kill_how_often_assign(struct kill *, const ev_uint32_t); -int kill_how_often_get(struct kill *, ev_uint32_t *); -/* --- kill done --- */ - -/* Tag definition for run */ -enum run_ { - RUN_HOW=1, - RUN_SOME_BYTES=2, - RUN_FIXED_BYTES=3, - RUN_MAX_TAGS -}; - -/* Structure declaration for run */ -struct run_access_ { - int (*how_assign)(struct run *, const char *); - int (*how_get)(struct run *, char * *); - int (*some_bytes_assign)(struct run *, const ev_uint8_t *, ev_uint32_t); - int (*some_bytes_get)(struct run *, ev_uint8_t * *, ev_uint32_t *); - int (*fixed_bytes_assign)(struct run *, const ev_uint8_t *); - int (*fixed_bytes_get)(struct run *, ev_uint8_t **); -}; - -struct run { - struct run_access_ *base; - - char *how_data; - ev_uint8_t *some_bytes_data; - ev_uint32_t some_bytes_length; - ev_uint8_t fixed_bytes_data[24]; - - ev_uint8_t how_set; - ev_uint8_t some_bytes_set; - ev_uint8_t fixed_bytes_set; -}; - -struct run *run_new(void); -void run_free(struct run *); -void run_clear(struct run *); -void run_marshal(struct evbuffer *, const struct run *); -int run_unmarshal(struct run *, struct evbuffer *); -int run_complete(struct run *); -void evtag_marshal_run(struct evbuffer *, ev_uint32_t, - const struct run *); -int evtag_unmarshal_run(struct evbuffer *, ev_uint32_t, - struct run *); -int run_how_assign(struct run *, const char *); -int run_how_get(struct run *, char * *); -int run_some_bytes_assign(struct run *, const ev_uint8_t *, ev_uint32_t); -int run_some_bytes_get(struct run *, ev_uint8_t * *, ev_uint32_t *); -int run_fixed_bytes_assign(struct run *, const ev_uint8_t *); -int run_fixed_bytes_get(struct run *, ev_uint8_t **); -/* --- run done --- */ - -#endif /* ___REGRESS_RPC_ */
diff --git a/third_party/libevent/test/regress_http.c b/third_party/libevent/test/regress_http.c index 1e2a1eb..943b29d 100644 --- a/third_party/libevent/test/regress_http.c +++ b/third_party/libevent/test/regress_http.c
@@ -71,6 +71,7 @@ void http_post_cb(struct evhttp_request *req, void *arg); void http_dispatcher_cb(struct evhttp_request *req, void *arg); static void http_large_delay_cb(struct evhttp_request *req, void *arg); +static void http_badreq_cb(struct evhttp_request *req, void *arg); static struct evhttp * http_setup(short *pport, struct event_base *base) @@ -96,6 +97,7 @@ evhttp_set_cb(myhttp, "/chunked", http_chunked_cb, NULL); evhttp_set_cb(myhttp, "/postit", http_post_cb, NULL); evhttp_set_cb(myhttp, "/largedelay", http_large_delay_cb, NULL); + evhttp_set_cb(myhttp, "/badrequest", http_badreq_cb, NULL); evhttp_set_cb(myhttp, "/", http_dispatcher_cb, NULL); *pport = port; @@ -377,6 +379,155 @@ fprintf(stdout, "OK\n"); } +static void +http_badreq_cb(struct evhttp_request *req, void *arg) +{ + struct evbuffer *buf = evbuffer_new(); + + evhttp_add_header(req->output_headers, "Content-Type", "text/xml; charset=UTF-8"); + evbuffer_add_printf(buf, "Hello, %s!", "127.0.0.1"); + + evhttp_send_reply(req, HTTP_OK, "OK", buf); + evbuffer_free(buf); +} + +static void +http_badreq_errorcb(struct bufferevent *bev, short what, void *arg) +{ + event_debug(("%s: called (what=%04x, arg=%p)", __func__, what, arg)); + /* ignore */ +} + +static void +http_badreq_readcb(struct bufferevent *bev, void *arg) +{ + const char *what = "Hello, 127.0.0.1"; + const char *bad_request = "400 Bad Request"; + + event_debug(("%s: %s\n", __func__, EVBUFFER_DATA(bev->input))); + + if (evbuffer_find(bev->input, + (const unsigned char *) bad_request, strlen(bad_request)) != NULL) { + event_debug(("%s: bad request detected", __func__)); + test_ok = -10; + bufferevent_disable(bev, EV_READ); + event_loopexit(NULL); + return; + } + + if (evbuffer_find(bev->input, + (const unsigned char*) what, strlen(what)) != NULL) { + struct evhttp_request *req = evhttp_request_new(NULL, NULL); + enum message_read_status done; + + req->kind = EVHTTP_RESPONSE; + done = evhttp_parse_firstline(req, bev->input); + if (done != ALL_DATA_READ) + goto out; + + done = evhttp_parse_headers(req, bev->input); + if (done != ALL_DATA_READ) + goto out; + + if (done == 1 && + evhttp_find_header(req->input_headers, + "Content-Type") != NULL) + test_ok++; + + out: + evhttp_request_free(req); + evbuffer_drain(bev->input, EVBUFFER_LENGTH(bev->input)); + } + + shutdown(bev->ev_read.ev_fd, SHUT_WR); +} + +static void +http_badreq_successcb(int fd, short what, void *arg) +{ + event_debug(("%s: called (what=%04x, arg=%p)", __func__, what, arg)); + event_loopexit(NULL); +} + +static void +http_bad_request(void) +{ + struct timeval tv; + struct bufferevent *bev; + int fd; + const char *http_request; + short port = -1; + + test_ok = 0; + fprintf(stdout, "Testing \"Bad Request\" on connection close: "); + + http = http_setup(&port, NULL); + + /* bind to a second socket */ + if (evhttp_bind_socket(http, "127.0.0.1", port + 1) == -1) { + fprintf(stdout, "FAILED (bind)\n"); + exit(1); + } + + /* NULL request test */ + fd = http_connect("127.0.0.1", port); + + /* Stupid thing to send a request */ + bev = bufferevent_new(fd, http_badreq_readcb, http_writecb, + http_badreq_errorcb, NULL); + bufferevent_enable(bev, EV_READ); + + /* real NULL request */ + http_request = ""; + + shutdown(fd, SHUT_WR); + timerclear(&tv); + tv.tv_usec = 10000; + event_once(-1, EV_TIMEOUT, http_badreq_successcb, bev, &tv); + + event_dispatch(); + + bufferevent_free(bev); + EVUTIL_CLOSESOCKET(fd); + + if (test_ok != 0) { + fprintf(stdout, "FAILED\n"); + exit(1); + } + + /* Second answer (BAD REQUEST) on connection close */ + + /* connect to the second port */ + fd = http_connect("127.0.0.1", port + 1); + + /* Stupid thing to send a request */ + bev = bufferevent_new(fd, http_badreq_readcb, http_writecb, + http_badreq_errorcb, NULL); + bufferevent_enable(bev, EV_READ); + + /* first half of the http request */ + http_request = + "GET /badrequest HTTP/1.0\r\n" \ + "Connection: Keep-Alive\r\n" \ + "\r\n"; + + bufferevent_write(bev, http_request, strlen(http_request)); + + timerclear(&tv); + tv.tv_usec = 10000; + event_once(-1, EV_TIMEOUT, http_badreq_successcb, bev, &tv); + + event_dispatch(); + + evhttp_free(http); + + if (test_ok != 2) { + fprintf(stdout, "FAILED\n"); + exit(1); + } + + fprintf(stdout, "OK\n"); +} static struct evhttp_connection *delayed_client; static void @@ -1453,6 +1604,121 @@ fprintf(stdout, "OK\n"); } +/* + * Testing client reset of server chunked connections + */ + +struct terminate_state { + struct evhttp_request *req; + struct bufferevent *bev; + int fd; +} terminate_state; + +static void +terminate_chunked_trickle_cb(int fd, short events, void *arg) +{ + struct terminate_state *state = arg; + struct evbuffer *evb = evbuffer_new(); + struct timeval tv; + + if (evhttp_request_get_connection(state->req) == NULL) { + test_ok = 1; + evhttp_request_free(state->req); + event_loopexit(NULL); + return; + } + + evbuffer_add_printf(evb, "%p", evb); + evhttp_send_reply_chunk(state->req, evb); + evbuffer_free(evb); + + tv.tv_sec = 0; + tv.tv_usec = 3000; + event_once(-1, EV_TIMEOUT, terminate_chunked_trickle_cb, arg, &tv); +} + +static void +terminate_chunked_cb(struct evhttp_request *req, void *arg) +{ + struct terminate_state *state = arg; + struct timeval tv; + + state->req = req; + + evhttp_send_reply_start(req, HTTP_OK, "OK"); + + tv.tv_sec = 0; + tv.tv_usec = 3000; + event_once(-1, EV_TIMEOUT, terminate_chunked_trickle_cb, arg, &tv); +} + +static void +terminate_chunked_client(int fd, short event, void *arg) +{ + struct terminate_state *state = arg; + bufferevent_free(state->bev); + EVUTIL_CLOSESOCKET(state->fd); +} + +static void +terminate_readcb(struct bufferevent *bev, void *arg) +{ + /* just drop the data */ + evbuffer_drain(bev->output, -1); +} + + +static void +http_terminate_chunked_test(void) +{ + struct bufferevent *bev = NULL; + struct timeval tv; + const char *http_request; + short port = -1; + int fd = -1; + + test_ok = 0; + fprintf(stdout, "Testing Terminated Chunked Connection: "); + + http = http_setup(&port, NULL); + evhttp_del_cb(http, "/test"); + evhttp_set_cb(http, "/test", terminate_chunked_cb, &terminate_state); + + fd = http_connect("127.0.0.1", port); + + /* Stupid thing to send a request */ + bev = bufferevent_new(fd, terminate_readcb, http_writecb, + http_errorcb, NULL); + + terminate_state.fd = fd; + terminate_state.bev = bev; + + /* first half of the http request */ + http_request = + "GET /test HTTP/1.1\r\n" + "Host: some\r\n\r\n"; + + bufferevent_write(bev, http_request, strlen(http_request)); + evutil_timerclear(&tv); + tv.tv_usec = 10000; + event_once(-1, EV_TIMEOUT, terminate_chunked_client, &terminate_state, + &tv); + + event_dispatch(); + + if (test_ok != 1) { + fprintf(stdout, "FAILED\n"); + exit(1); + } + + fprintf(stdout, "OK\n"); + + if (fd >= 0) + EVUTIL_CLOSESOCKET(fd); + if (http) + evhttp_free(http); +} + void http_suite(void) { @@ -1462,8 +1728,9 @@ http_basic_test(); http_connection_test(0 /* not-persistent */); http_connection_test(1 /* persistent */); - http_close_detection(0 /* with delay */); + http_close_detection(0 /* without delay */); http_close_detection(1 /* with delay */); + http_bad_request(); http_post_test(); http_failure_test(); http_highport_test(); @@ -1473,4 +1740,5 @@ http_negative_content_length_test(); http_chunked_test(); + http_terminate_chunked_test(); }
diff --git a/third_party/libevent/test/test.sh b/third_party/libevent/test/test.sh old mode 100644 new mode 100755
diff --git a/third_party/libevent/whatsnew-14.txt b/third_party/libevent/whatsnew-14.txt new file mode 100644 index 0000000..769dda78 --- /dev/null +++ b/third_party/libevent/whatsnew-14.txt
@@ -0,0 +1,167 @@ +What's New In Libevent 1.4: + +0. About this document + + This document describes the key differences between Libevent 1.3 and + Libevent 1.4, from a user's point of view. It was most recently + updated based on features from libevent 1.4.2-rc. + +1. Packaging Issues. + +1.1. The great library division. + + The libevent source now builds two libraries: libevent_core and + libevent_extra. The libevent_core library includes event loops, + timers, buffer code, and various small compatibility functions. The + libevent_extra library includes code for HTTP, DNS, RPC, and so on. + Thus, if you're writing software that only uses libevent's event + loop, you should link against only the libevent_core library, + whereas if you're writing software that uses libevent's protocol + support as well, you need to link libevent_extra as well. + + For backward compatibility, libevent also builds a library called + "libevent" that includes everything. + +1.2. The event-config.h header + + Libevent configure script now builds two headers from its configure + script: config.h (which it uses internally) and event-config.h + (which it installs as a header file). All of the macros in + event-config.h are modified so that they're safe to include in other + projects. This allows libevent's header files (like event.h and + evutil.h) information about platform configuration. + + What does this mean for you? As of 1.4.x, it should never be + necessary to include extra files or define extra types before you + include event.h (or any other libevent header); event.h can now look + at the information in event-config.h and figure out what it needs to + include. + +1.3. Documentation + + Libevent now includes better doxygen documentation. It's not + perfect or complete, though; if you find a mistake, please let us + know. + +1.4. Libtool usage + + We now use libtool's library versioning support correctly. If we + don't mess this up, it means that binaries linked against old + version of libevent should continue working when we make changes to + libevent that don't break backward compatibility. + +1.5. Portability + + Libevent now builds with MSVC again. We've only tested it with MSVC + 2005, and the project files might not be right. Please let us know + if you run into any issues. + + Libevent now builds on platforms where /bin/sh is not bash. + + Libevent's regression test no longer requires Python to be + installed. + +2. New and Improved APIs: + + (This list includes functions that are new, functions whose behavior + has changed, and functions that were included in previous releases + but which never actually worked before.) + +2.1. Utility functions are defined in evutil.h + + Libevent now exposes a small set of functions for cross-platform + network programming in evutil.h, on the theory that they've been + useful enough to us that other people may likely want to use them + too. These are mainly workarounds for Windows issues for now: they + include evutil_socketpair (to fake socketpair on platforms that + don't have it) and evutil_make_socket_nonblocking (to make a socket + nonblocking in a cross-platform way. See the header for more + information. + +2.2. In the libevent core. + + The event_base_free() function now works. Previously, it would + crash with an assertion failure if there were events pending on a + base. Now, it simply deletes all the pending events and frees the + base. Be careful -- this might leak fds and memory associated with + the old events. To avoid leaks, you should still remove all the + events and free their resources before you delete the base. + + Libevent should now work properly with fork(). Just call + event_reinit() on your event base after the fork call, and it should + work okay. Please let us know about any bugs you find. + + There's a new event_base_new() function that acts just like + event_init(), but does not replace the default base. If you are + using multiple event bases in your code, you should just use + event_base_new() instead of event_init(), to avoid accidental bugs. + + There's new event_loopbreak() function to make a current event loop + stop exiting and return. Unlike event_loopexit, it stops subsequent + pending events from getting executed. This behavior is useful for + scripting languages to implement exceptions from inside callbacks. + + There's a new event_base_get_method() function, for use in place of + event_get_method() in multi-base applications. + +2.3. New in HTTP. + + There's an evhttp_connection_set_local_address() function you can + use to set the local address of an HTTP connection. + + HTTP/1.1 chunking now correctly ends chunks with '\r\n'. + +2.4. New in DNS + + Instead of picking your method for generating DNS transaction IDs at + startup, you can use evdns_set_transaction_id() to provide a + transaction ID function at runtime. + + The "class" field in evdns_server_request is now renamed to + dns_question_class, so that it won't break compilation under C++. + This uses some preprocessor hacks so that C code using the old name + won't break. Eventually, the old name will be deprecated entirely; + please don't use it. + +2.5. New in RPC + + There are now hooks on RPC input and output; can be used to + implement RPC independent processing such as compression or + authentication. + + RPC tags can now be up to 32 bits. This is wire-compatible, but + changes some of the types in the APIs. Please let us know if this + is problematic for you. + +3. Big bugfixes + + We've done a lot, with help from users on different platforms, to + make the different backends behave more similarly with respect to + signals and timeouts. The kqueue and solaris backends were the big + offenders previously, but they should be better now. Windows should + be better too, though it's likely that problems remain there. + + The libevent headers (though not the source files!) should now build + cleanly on C++. + + (For more bugfixes, see the ChangeLog file. These are only the + biggies.) + +4. Big performance improvements + + Libevent now uses a min-heap rather than a red-black tree to track + timeouts. This means that finding the next timeout to fire is now + O(1) instead of (lg n). + + The win32 select-based backend now uses a red-black tree to map + SOCKET handles to event structures. This changes the performance + characteristics of the event loop on win32 from O(n^2) to O(n lg n). + Not perfect, but better. + +5. Removed code and features + + The rtsig backend is now removed. It hasn't even compiled for a + while, and nobody seemed to miss it very much. All the platforms + that have rtsig seem to have a better option instead these days. + Please let us know if rtsig was crucial for you. +
diff --git a/third_party/mojo/mojo_edk_tests.gyp b/third_party/mojo/mojo_edk_tests.gyp index 34f590847..a970700 100644 --- a/third_party/mojo/mojo_edk_tests.gyp +++ b/third_party/mojo/mojo_edk_tests.gyp
@@ -90,7 +90,6 @@ 'dependencies': [ '../../testing/gtest.gyp:gtest', 'mojo_edk.gyp:mojo_run_all_unittests', - 'mojo_public.gyp:mojo_cpp_bindings', 'mojo_public.gyp:mojo_environment_standalone', 'mojo_public.gyp:mojo_public_test_utils', 'mojo_public.gyp:mojo_utility', @@ -125,7 +124,6 @@ 'dependencies': [ '../../testing/gtest.gyp:gtest', 'mojo_edk.gyp:mojo_run_all_unittests', - 'mojo_public.gyp:mojo_cpp_bindings', 'mojo_public.gyp:mojo_public_test_utils', 'mojo_public.gyp:mojo_utility', ],
diff --git a/third_party/mojo/mojo_public.gyp b/third_party/mojo/mojo_public.gyp index eaf0cb2..833776d 100644 --- a/third_party/mojo/mojo_public.gyp +++ b/third_party/mojo/mojo_public.gyp
@@ -150,9 +150,6 @@ 'src/mojo/public/cpp/bindings/lib/shared_ptr.h', 'src/mojo/public/cpp/bindings/lib/string_serialization.h', 'src/mojo/public/cpp/bindings/lib/string_serialization.cc', - 'src/mojo/public/cpp/bindings/lib/thread_checker.h', - 'src/mojo/public/cpp/bindings/lib/thread_checker_posix.cc', - 'src/mojo/public/cpp/bindings/lib/thread_checker_posix.h', 'src/mojo/public/cpp/bindings/lib/validate_params.h', 'src/mojo/public/cpp/bindings/lib/validation_errors.cc', 'src/mojo/public/cpp/bindings/lib/validation_errors.h',
diff --git a/third_party/mojo/src/mojo/public/cpp/bindings/BUILD.gn b/third_party/mojo/src/mojo/public/cpp/bindings/BUILD.gn index c91bdd5..14eb550c 100644 --- a/third_party/mojo/src/mojo/public/cpp/bindings/BUILD.gn +++ b/third_party/mojo/src/mojo/public/cpp/bindings/BUILD.gn
@@ -81,9 +81,6 @@ "lib/shared_data.h", "lib/shared_ptr.h", "lib/template_util.h", - "lib/thread_checker.h", - "lib/thread_checker_posix.cc", - "lib/thread_checker_posix.h", ] mojo_sdk_deps = [ "mojo/public/cpp/system" ]
diff --git a/third_party/mojo/src/mojo/public/cpp/bindings/lib/router.cc b/third_party/mojo/src/mojo/public/cpp/bindings/lib/router.cc index d384541f..1e81c39 100644 --- a/third_party/mojo/src/mojo/public/cpp/bindings/lib/router.cc +++ b/third_party/mojo/src/mojo/public/cpp/bindings/lib/router.cc
@@ -93,13 +93,11 @@ } bool Router::Accept(Message* message) { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); MOJO_DCHECK(!message->has_flag(kMessageExpectsResponse)); return connector_.Accept(message); } bool Router::AcceptWithResponder(Message* message, MessageReceiver* responder) { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); MOJO_DCHECK(message->has_flag(kMessageExpectsResponse)); // Reserve 0 in case we want it to convey special meaning in the future. @@ -117,13 +115,11 @@ } void Router::EnableTestingMode() { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); testing_mode_ = true; connector_.set_enforce_errors_from_incoming_receiver(false); } bool Router::HandleIncomingMessage(Message* message) { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); if (message->has_flag(kMessageExpectsResponse)) { if (incoming_receiver_) { MessageReceiverWithStatus* responder = new ResponderThunk(weak_self_);
diff --git a/third_party/mojo/src/mojo/public/cpp/bindings/lib/router.h b/third_party/mojo/src/mojo/public/cpp/bindings/lib/router.h index 8f848d86..faa7ba5 100644 --- a/third_party/mojo/src/mojo/public/cpp/bindings/lib/router.h +++ b/third_party/mojo/src/mojo/public/cpp/bindings/lib/router.h
@@ -11,9 +11,7 @@ #include "mojo/public/cpp/bindings/lib/connector.h" #include "mojo/public/cpp/bindings/lib/filter_chain.h" #include "mojo/public/cpp/bindings/lib/shared_data.h" -#include "mojo/public/cpp/bindings/lib/thread_checker.h" #include "mojo/public/cpp/environment/environment.h" -#include "mojo/public/cpp/environment/logging.h" namespace mojo { namespace internal { @@ -40,24 +38,14 @@ // Returns true if an error was encountered while reading from the pipe or // waiting to read from the pipe. - bool encountered_error() const { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); - return connector_.encountered_error(); - } + bool encountered_error() const { return connector_.encountered_error(); } // Is the router bound to a MessagePipe handle? - bool is_valid() const { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); - return connector_.is_valid(); - } + bool is_valid() const { return connector_.is_valid(); } - void CloseMessagePipe() { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); - connector_.CloseMessagePipe(); - } + void CloseMessagePipe() { connector_.CloseMessagePipe(); } ScopedMessagePipeHandle PassMessagePipe() { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); return connector_.PassMessagePipe(); } @@ -69,17 +57,14 @@ // Blocks the current thread until the first incoming method call, i.e., // either a call to a client method or a callback method, or |deadline|. bool WaitForIncomingMessage(MojoDeadline deadline) { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); return connector_.WaitForIncomingMessage(deadline); } // See Binding for details of pause/resume. void PauseIncomingMethodCallProcessing() { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); connector_.PauseIncomingMethodCallProcessing(); } void ResumeIncomingMethodCallProcessing() { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); connector_.ResumeIncomingMethodCallProcessing(); } @@ -93,10 +78,7 @@ MessagePipeHandle handle() const { return connector_.handle(); } // Returns true if this Router has any pending callbacks. - bool has_pending_responders() const { - MOJO_DCHECK(thread_checker_.CalledOnValidThread()); - return !responders_.empty(); - } + bool has_pending_responders() const { return !responders_.empty(); } private: typedef std::map<uint64_t, MessageReceiver*> ResponderMap; @@ -125,7 +107,6 @@ ResponderMap responders_; uint64_t next_request_id_; bool testing_mode_; - ThreadChecker thread_checker_; }; } // namespace internal
diff --git a/third_party/mojo/src/mojo/public/cpp/bindings/lib/shared_data.h b/third_party/mojo/src/mojo/public/cpp/bindings/lib/shared_data.h index 2676224c..54c9346 100644 --- a/third_party/mojo/src/mojo/public/cpp/bindings/lib/shared_data.h +++ b/third_party/mojo/src/mojo/public/cpp/bindings/lib/shared_data.h
@@ -5,9 +5,6 @@ #ifndef MOJO_PUBLIC_CPP_BINDINGS_LIB_SHARED_DATA_H_ #define MOJO_PUBLIC_CPP_BINDINGS_LIB_SHARED_DATA_H_ -#include <assert.h> - -#include "mojo/public/cpp/bindings/lib/thread_checker.h" #include "mojo/public/cpp/system/macros.h" namespace mojo { @@ -56,12 +53,8 @@ Holder() : value(), ref_count_(1) {} Holder(const T& value) : value(value), ref_count_(1) {} - void Retain() { - assert(thread_checker_.CalledOnValidThread()); - ++ref_count_; - } + void Retain() { ++ref_count_; } void Release() { - assert(thread_checker_.CalledOnValidThread()); if (--ref_count_ == 0) delete this; } @@ -70,7 +63,6 @@ private: int ref_count_; - ThreadChecker thread_checker_; MOJO_DISALLOW_COPY_AND_ASSIGN(Holder); };
diff --git a/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker.h b/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker.h deleted file mode 100644 index da45d0a..0000000 --- a/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker.h +++ /dev/null
@@ -1,37 +0,0 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef MOJO_PUBLIC_CPP_BINDINGS_LIB_THREAD_CHECKER_H_ -#define MOJO_PUBLIC_CPP_BINDINGS_LIB_THREAD_CHECKER_H_ - -#include "mojo/public/cpp/system/macros.h" - -#if !defined(_WIN32) -#include "mojo/public/cpp/bindings/lib/thread_checker_posix.h" -#endif - -namespace mojo { -namespace internal { - -class ThreadCheckerDoNothing { - public: - bool CalledOnValidThread() const MOJO_WARN_UNUSED_RESULT { - return true; - } -}; - -// ThreadChecker is a class used to verify that some methods of a class are -// called from the same thread. It is meant to be a member variable of a class. -// The entire lifecycle of a ThreadChecker must occur on a single thread. -// In Release mode (without dcheck_always_on), ThreadChecker does nothing. -#if !defined(_WIN32) && (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)) -using ThreadChecker = ThreadCheckerPosix; -#else -using ThreadChecker = ThreadCheckerDoNothing; -#endif - -} // namespace internal -} // namespace mojo - -#endif // MOJO_PUBLIC_CPP_BINDINGS_LIB_THREAD_CHECKER_H_
diff --git a/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker_posix.cc b/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker_posix.cc deleted file mode 100644 index c8a16e2..0000000 --- a/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker_posix.cc +++ /dev/null
@@ -1,24 +0,0 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "mojo/public/cpp/bindings/lib/thread_checker_posix.h" - -#include <assert.h> - -namespace mojo { -namespace internal { - -ThreadCheckerPosix::ThreadCheckerPosix() : attached_thread_id_(pthread_self()) { -} - -ThreadCheckerPosix::~ThreadCheckerPosix() { - assert(CalledOnValidThread()); -} - -bool ThreadCheckerPosix::CalledOnValidThread() const { - return pthread_equal(pthread_self(), attached_thread_id_); -} - -} // namespace internal -} // namespace mojo
diff --git a/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker_posix.h b/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker_posix.h deleted file mode 100644 index 4701b889..0000000 --- a/third_party/mojo/src/mojo/public/cpp/bindings/lib/thread_checker_posix.h +++ /dev/null
@@ -1,31 +0,0 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef MOJO_PUBLIC_CPP_BINDINGS_LIB_THREAD_CHECKER_POSIX_H_ -#define MOJO_PUBLIC_CPP_BINDINGS_LIB_THREAD_CHECKER_POSIX_H_ - -#include <pthread.h> - -#include "mojo/public/cpp/system/macros.h" - -namespace mojo { -namespace internal { - -// An implementation of ThreadChecker for POSIX systems. -class ThreadCheckerPosix { - public: - ThreadCheckerPosix(); - ~ThreadCheckerPosix(); - - bool CalledOnValidThread() const MOJO_WARN_UNUSED_RESULT; - private: - const pthread_t attached_thread_id_; - - MOJO_DISALLOW_COPY_AND_ASSIGN(ThreadCheckerPosix); -}; - -} // namespace internal -} // namespace mojo - -#endif // MOJO_PUBLIC_CPP_BINDINGS_LIB_THREAD_CHECKER_POSIX_H_
diff --git a/tools/checklicenses/checklicenses.py b/tools/checklicenses/checklicenses.py index 68998d6f..54081745 100755 --- a/tools/checklicenses/checklicenses.py +++ b/tools/checklicenses/checklicenses.py
@@ -151,6 +151,14 @@ 'GPL (v3 or later)', 'MPL (v1.1) LGPL (unversioned/unknown version)', ], + + # The project is BSD-licensed but the individual files do not have + # consistent license headers. Also, this is just used in a utility + # and not shipped. https://github.com/waylan/Python-Markdown/issues/435 + 'third_party/Python-Markdown': [ + 'UNKNOWN', + ], + 'third_party/WebKit': [ 'UNKNOWN', ],
diff --git a/tools/md_browser/OWNERS b/tools/md_browser/OWNERS new file mode 100644 index 0000000..3fc266c --- /dev/null +++ b/tools/md_browser/OWNERS
@@ -0,0 +1,2 @@ +dpranke@chromium.org +nodir@chromium.org
diff --git a/tools/md_browser/README.md b/tools/md_browser/README.md new file mode 100644 index 0000000..05d6b9d6 --- /dev/null +++ b/tools/md_browser/README.md
@@ -0,0 +1,27 @@ +# md_browser + +This is a simple tool to render the markdown docs in a chromium checkout +locally. It is written in Python and uses the Python 'markdown' package, +which is checked into src/third_party. + +md_browser attempts to emulate the flavor of Markdown implemented by +[Gitiles](https://gerrit.googlesource.com/gitiles/+/master/Documentation/markdown.md). + +Gitiles is the source browser running on https://chromium.googlesource.com, +and can be run locally, but to do so requires a Java install and a Buck +install, which can be slightly annoying to set up on Mac or Windows. + +This is a lighterweight solution, which also allows you to preview uncommitted +changes (i.e., it just serves files out of the filesystem, and is not a +full Git repo browser like Gitiles is). + +To run md_browser: + +1. cd to the top of your chromium checkout + +2. run `python tools/md_browser/md_browser.py` + +3. There is no step three. + +This will run a local web server on port 8080 that points to the top +of the repo. You can specify a different port with the `-p` flag.
diff --git a/tools/md_browser/doc.css b/tools/md_browser/doc.css new file mode 100644 index 0000000..93ea5fc --- /dev/null +++ b/tools/md_browser/doc.css
@@ -0,0 +1,298 @@ +/** + * Copyright 2015 The Chromium Authors. All rights reserved. + * Use of this source code is governed by a BSD-style license that can be + * found in the LICENSE file. + */ + +/* This file is cloned from + * https://gerrit.googlesource.com/gitiles/+/master/gitiles-servlet/src/main/resources/com/google/gitiles/static/doc.css + */ + +html.doc-page, .doc { + font-family: arial,sans-serif; +} +.doc-page body { + margin: 0; +} + +.banner { + min-height: 44px; + margin: 0; + padding: 14px 15px 13px; + border-bottom: 1px solid #eee; +} +.banner h1, .banner h2 { + float: left; + font-size: 32px; + font-weight: 300; + line-height: 1.375; + margin: 0; +} +.banner img { + margin: -1px 10px -4px 0px; + vertical-align: middle; +} +.banner a, .banner a:hover { + text-decoration: none; +} +.banner, .banner a:link, .banner a:visited { + color: #777; +} +.banner h2:before { + border-right: 1px solid #eee; + content: ""; + float: left; + height: 44px; + margin: 0 12px 0 14px; +} + +.nav, .footer-line { + color: #333; + padding: 0 15px; + background: #eee; +} +.nav ul { + list-style: none; + margin: 0; + padding: 6px 0; +} +.nav li { + float: left; + font-size: 14px; + line-height: 1.43; + margin: 0 20px 0 0; + padding: 6px 0; +} +.nav li a, .footer-line a { + color: #7a7af9; +} +.nav li a:hover { + color: #0000f9; +} +.banner:after, .nav ul:after, .cols:after { + clear: both; + content: ""; + display: block; +} + +.nav-aux, .doc { + max-width: 978px; +} +.nav-aux, .doc-page .doc { + margin: auto; +} + +.footer-break { + clear: both; + margin: 120px 0 0 0; +} +.footer-line { + font-size: 13px; + line-height: 30px; + height: 30px; +} +.footer-line ul { + list-style: none; + margin: 0; + padding: 0; +} +.footer-line li { + display: inline; +} +.footer-line li+li:before { + content: "·"; + padding: 0 5px; +} +.footer-line .nav-aux { + position: relative; +} +.gitiles-att { + color: #A0ADCC; + position: absolute; + top: 0; + right: 0; +} +.gitiles-att a { + font-style: italic; +} + +/* Markdown rendered in /+doc/ or tree view page . */ + +.doc { + color: #444; + font-size: 13px; + line-height: normal; +} + +.doc h1, .doc h2, .doc h3, .doc h4, .doc h5, .doc h6 { + font-family: "open sans",arial,sans-serif; + font-weight: bold; + color: #444; + height: auto; + white-space: normal; + overflow: visible; + margin: 0.67em 0 0.67em 0; +} +.doc h1 { + font-size: 20px; + margin: 0.67em 0 0.67em 0; +} +.doc h2 { + font-size: 16px; + margin: 0.67em 0 0.67em 0; +} +.doc h3 { + font-size: 14px; + margin: 0.67em 0 0.67em 0; +} +.doc h4 { + font-size: 13px; + margin: 1em 0 1em 0; +} +.doc h5 { + font-size: 13px; + margin: 1.3em 0 1.3em 0; +} +.doc h6 { + font-size: 13px; + margin: 1.6em 0 1.6em 0; +} + +.doc a { text-decoration: none; } +.doc a:link { color: #245dc1; } +.doc a:visited { color: #7759ae; } +.doc a:hover { text-decoration: underline; } + +.doc ul, .doc ol { + margin: 10px 10px 10px 30px; + padding: 0; +} + +.doc img { + border: 0; + max-width: 100%; +} +.doc iframe { + min-width: 100px; + min-height: 30px; +} +iframe.noborder { + border: 0; +} + +.doc em { + font-weight: normal; + font-style: italic; +} +.doc strong { + font-weight: bold; + color: inherit; +} + +.doc pre { + border: 1px solid silver; + background: #fafafa; + margin: 0 2em 0 2em; + padding: 2px; +} +.doc code, .doc .code { + color: #060; + font: 13px/1.54 "courier new",courier,monospace; +} + +.doc dl dt { + margin-top: 1em; +} + +.doc table { + border-collapse: collapse; + border-spacing: 0; +} +.doc th { + text-align: center; +} +.doc th, .doc td { + border: 1px solid #eee; + padding: 4px 12px; + vertical-align: top; +} +.doc th { + background-color: #f5f5f5; +} + +.toc { + margin-top: 30px; +} +.toc-aux { + padding: 2px; + background: #f9f9f9; + border: 1px solid #f2f2f2; + border-radius: 4px; +} +.toc h2 { + margin: 0 0 5px 0; +} +.toc ul { + margin: 0 0 0 30px; +} +.toc ul li { + margin-left: 0px; + list-style: disc; +} +.toc ul ul li { + list-style: circle; +} + +.note, .promo, .aside { + border: 1px solid; + border-radius: 4px; + margin: 10px 0; + padding: 10px; +} +.note { + background: #fffbe4; + border-color: #f8f6e6; +} +.promo { + background: #f6f9ff; + border-color: #eff2f9; +} +.aside { + background: #f9f9f9; + border-color: #f2f2f2; +} +.note :first-child, +.promo :first-child, +.aside :first-child { + margin-top: 0; +} +.note p:last-child, +.promo p:last-child, +.aside p:last-child { + margin-bottom: 0; +} + +.cols { + margin: 0 -1.533%; + width: 103.067%; +} +.col-1, .col-2, .col-3, .col-4, .col-5, .col-6, +.col-7, .col-8, .col-9, .col-10, .col-11, .col-12 { + float: left; + margin: 0 1.488% 20px; +} +.col-1 { width: 5.357%; } +.col-2 { width: 13.690%; } +.col-3 { width: 22.024%; } +.col-4 { width: 30.357%; } +.col-5 { width: 38.690%; } +.col-6 { width: 47.024%; } +.col-7 { width: 55.357%; } +.col-8 { width: 63.690%; } +.col-9 { width: 72.024%; } +.col-10 { width: 80.357%; } +.col-11 { width: 88.690%; } +.col-12 { width: 97.024%; } +.cols hr { + width: 80%; +}
diff --git a/tools/md_browser/footer.html b/tools/md_browser/footer.html new file mode 100644 index 0000000..8aab624 --- /dev/null +++ b/tools/md_browser/footer.html
@@ -0,0 +1,8 @@ +<div class="footer-break"></div> +<div class="footer-line"> +<div class="nav-aux"> +<div class="gitiles-att"></div> +</div> +</div> +</body> +</html>
diff --git a/tools/md_browser/header.html b/tools/md_browser/header.html new file mode 100644 index 0000000..939eb943 --- /dev/null +++ b/tools/md_browser/header.html
@@ -0,0 +1,7 @@ +<!DOCTYPE HTML PUBLIC "-//W3CDTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> +<html class="doc-page"> +<head> +<link rel="stylesheet" type="text/css" href="/doc.css" /> +</head> +<body> +<div class="doc">
diff --git a/tools/md_browser/md_browser.py b/tools/md_browser/md_browser.py new file mode 100644 index 0000000..7fb51eb2 --- /dev/null +++ b/tools/md_browser/md_browser.py
@@ -0,0 +1,102 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Simple Markdown browser for a Git checkout.""" +from __future__ import print_function + +import SimpleHTTPServer +import SocketServer +import argparse +import codecs +import os +import socket +import sys + + +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) +SRC_DIR = os.path.dirname(os.path.dirname(THIS_DIR)) +sys.path.append(os.path.join(SRC_DIR, 'third_party', 'Python-Markdown')) +import markdown + + +def main(argv): + parser = argparse.ArgumentParser(prog='md_browser') + parser.add_argument('-p', '--port', type=int, default=8080, + help='port to run on (default = %(default)s)') + args = parser.parse_args(argv) + + try: + s = Server(args.port, SRC_DIR) + print("Listening on http://localhost:%s/" % args.port) + s.serve_forever() + s.shutdown() + return 0 + except KeyboardInterrupt: + return 130 + + +class Server(SocketServer.TCPServer): + def __init__(self, port, top_level): + SocketServer.TCPServer.__init__(self, ('0.0.0.0', port), Handler) + self.port = port + self.top_level = top_level + + def server_bind(self): + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.socket.bind(self.server_address) + + +class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler): + def do_GET(self): + full_path = os.path.abspath(os.path.join(self.server.top_level, + self.path[1:])) + if not full_path.startswith(SRC_DIR): + self._DoUnknown() + elif self.path == '/doc.css': + self._WriteTemplate('doc.css') + elif not os.path.exists(full_path): + self._DoNotFound() + elif self.path.lower().endswith('.md'): + self._DoMD() + else: + self._DoUnknown() + + def _DoMD(self): + extensions = [ + 'markdown.extensions.fenced_code', + 'markdown.extensions.tables', + 'markdown.extensions.toc', + ] + + contents = self._Read(self.path[1:]) + md_fragment = markdown.markdown(contents, + extensions=extensions, + output_format='html4').encode('utf-8') + try: + self._WriteTemplate('header.html') + self.wfile.write(md_fragment) + self._WriteTemplate('footer.html') + except: + raise + + def _DoNotFound(self): + self.wfile.write('<html><body>%s not found</body></html>' % self.path) + + def _DoUnknown(self): + self.wfile.write('<html><body>I do not know how to serve %s.</body>' + '</html>' % self.path) + + def _Read(self, relpath): + assert not relpath.startswith(os.sep) + path = os.path.join(self.server.top_level, relpath) + with codecs.open(path, encoding='utf-8') as fp: + return fp.read() + + def _WriteTemplate(self, template): + contents = self._Read(os.path.join('tools', 'md_browser', template)) + self.wfile.write(contents.encode('utf-8')) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:]))
diff --git a/tools/metrics/histograms/histograms.xml b/tools/metrics/histograms/histograms.xml index 5d5ffb4..9ed7a2a3 100644 --- a/tools/metrics/histograms/histograms.xml +++ b/tools/metrics/histograms/histograms.xml
@@ -5838,12 +5838,27 @@ </summary> </histogram> +<histogram name="DataReductionProxy.HistoricalDataUsageLoadTime" + units="milliseconds"> + <owner>kundaji@chromium.org</owner> + <summary> + Time taken to load historical data usage from Level DB into memory. This UMA + is reported each time the method to load historical data usage is called, + which happens when user views data usage history. + </summary> +</histogram> + <histogram name="DataReductionProxy.LevelDBOpenStatus" enum="DataReductionProxyStoreStatus"> <owner>kundaji@chromium.org</owner> <summary>Status of calling Open() on Data Reduction Proxy LevelDB.</summary> </histogram> +<histogram name="DataReductionProxy.LevelDBSize" units="KB"> + <owner>kundaji@chromium.org</owner> + <summary>Size of Data Reduction Proxy LevelDB measured at startup.</summary> +</histogram> + <histogram name="DataReductionProxy.LoFi.ImplicitOptOutAction" enum="DataReductionProxyLoFiImplicitOptOutAction"> <owner>bengr@chromium.org</owner> @@ -37159,6 +37174,33 @@ </summary> </histogram> +<histogram name="RendererScheduler.UserModel.GestureDuration" + units="milliseconds"> + <owner>alexclarke@chromium.org</owner> + <summary>Duration of gestures (scrolls and pinches).</summary> +</histogram> + +<histogram name="RendererScheduler.UserModel.GesturePredictedCorrectly" + units="GesturePredictionResult"> + <owner>alexclarke@chromium.org</owner> + <summary>Whether a user gesture was predicted correctly.</summary> +</histogram> + +<histogram name="RendererScheduler.UserModel.GestureStartTimeSinceModelReset" + units="milliseconds"> + <owner>alexclarke@chromium.org</owner> + <summary> + Time between when the UserModel was last reset (which happens on navigation) + and a gesture starting. + </summary> +</histogram> + +<histogram name="RendererScheduler.UserModel.TimeBetweenGestures" + units="milliseconds"> + <owner>alexclarke@chromium.org</owner> + <summary>Time between subsequent gestures (scrolls and pinches).</summary> +</histogram> + <histogram name="RendererSyncIPC.ElapsedTime" units="milliseconds"> <owner>ppi@chromium.org</owner> <summary> @@ -61378,6 +61420,12 @@ <int value="20" label="Window resized double tap"/> </enum> +<enum name="GesturePredictionResult" type="int"> + <int value="0" label="Gesture occured and was predicted"/> + <int value="1" label="Gesture occured but was not predicted"/> + <int value="2" label="Gesture predicted but didn't occur"/> +</enum> + <enum name="GetPerfDataOutcome" type="int"> <int value="0" label="Success."> Perf data was collected, parsed and attached to the UMA protobuf
diff --git a/tools/metrics/rappor/rappor.xml b/tools/metrics/rappor/rappor.xml index e8ce7fc..f0d662f 100644 --- a/tools/metrics/rappor/rappor.xml +++ b/tools/metrics/rappor/rappor.xml
@@ -469,6 +469,42 @@ </summary> </rappor-metric> +<rappor-metric name="PageLoad.CoarseTiming.NavigationToFirstLayout" + type="UMA_RAPPOR_TYPE"> + <owner>csharrison@chromium.org</owner> + <owner>bmcquade@chromium.org</owner> + <summary> + The eTLD+1 of the website visited, along with a bitfield representing + bucket membership in a coarse histogram. The bucket's name is its lower + bound. + </summary> + <string-field name="Domain"> + <summary> + The domain+registry of the URL. + </summary> + </string-field> + <flags-field name="Bucket"> + <flag>Bit 0: 0_2_SECONDS</flag> + <flag>Bit 1: 2_4_SECONDS</flag> + <flag>Bit 2: 4_8_SECONDS</flag> + <flag>Bit 3: 8_16_SECONDS</flag> + <flag>Bit 4: 16_32_SECONDS</flag> + <flag>Bit 5: 32_INF_SECONDS</flag> + <summary> + Bitfield representing the bucket when the time to first layout occurred + </summary> + </flags-field> + <flags-field name="IsSlow"> + <flag>Bit 0</flag> + <summary> + Whether or not the navigation to first layout took over ten seconds. + We send this over to ease in analysis. The Bucket field has many + dimensions of noise (i.e. bits flipped), so it will be much harder + to de-noise than this one. + </summary> + </flags-field> +</rappor-metric> + <rappor-metric name="Permissions.Action.DurableStorage" type="SAFEBROWSING_RAPPOR_TYPE"> <owner>kcarattini@chromium.org</owner>
diff --git a/tools/perf/benchmarks/power.py b/tools/perf/benchmarks/power.py index f0c882b6..a5c70f8 100644 --- a/tools/perf/benchmarks/power.py +++ b/tools/perf/benchmarks/power.py
@@ -81,6 +81,21 @@ def Name(cls): return 'power.top_10' +@benchmark.Enabled('mac') +class PowerGpuRasterizationTop10(perf_benchmark.PerfBenchmark): + """Top 10 quiescent power test with GPU rasterization enabled.""" + tag = 'gpu_rasterization' + test = power.QuiescentPower + page_set = page_sets.Top10PageSet + + def SetExtraBrowserOptions(self, options): + silk_flags.CustomizeBrowserOptionsForGpuRasterization(options) + options.full_performance_mode = False + + @classmethod + def Name(cls): + return 'power.gpu_rasterization.top_10' + @benchmark.Enabled('mac') class PowerTop25(perf_benchmark.PerfBenchmark): @@ -104,6 +119,29 @@ stories.RemoveStory(found) return stories +@benchmark.Enabled('mac') +class PowerGpuRasterizationTop25(perf_benchmark.PerfBenchmark): + """Top 25 quiescent power test with GPU rasterization enabled.""" + tag = 'gpu_rasterization' + test = power.QuiescentPower + page_set = page_sets.Top25PageSet + + def SetExtraBrowserOptions(self, options): + silk_flags.CustomizeBrowserOptionsForGpuRasterization(options) + options.full_performance_mode = False + + @classmethod + def Name(cls): + return 'power.gpu_rasterization.top_25' + + def CreateStorySet(self, _): + # Exclude techcrunch.com. It is not suitable for this benchmark because it + # does not consistently become quiescent within 60 seconds. + stories = self.page_set() + found = next((x for x in stories if 'techcrunch.com' in x.url), None) + if found: + stories.RemoveStory(found) + return stories @benchmark.Enabled('linux', 'mac', 'win', 'chromeos') class PowerPPSControlDisabled(perf_benchmark.PerfBenchmark):
diff --git a/tools/perf/benchmarks/smoothness.py b/tools/perf/benchmarks/smoothness.py index 0dd6d9afe..2e88fc5 100644 --- a/tools/perf/benchmarks/smoothness.py +++ b/tools/perf/benchmarks/smoothness.py
@@ -153,7 +153,7 @@ return stories -@benchmark.Enabled('android') +@benchmark.Enabled('android', "mac") class SmoothnessGpuRasterizationTop25(_Smoothness): """Measures rendering statistics for the top 25 with GPU rasterization. """ @@ -260,7 +260,7 @@ @benchmark.Enabled('android', 'chromeos', 'mac') class SmoothnessToughPinchZoomCases(_Smoothness): - """Measures rendering statistics for pinch-zooming into the tough pinch zoom + """Measures rendering statistics for pinch-zooming in the tough pinch zoom cases. """ page_set = page_sets.ToughPinchZoomCasesPageSet @@ -270,6 +270,23 @@ return 'smoothness.tough_pinch_zoom_cases' +@benchmark.Enabled('android', 'chromeos', 'mac') +class SmoothnessGpuRasterizationToughPinchZoomCases(_Smoothness): + """Measures rendering statistics for pinch-zooming in the tough pinch zoom + cases with GPU rasterization. + """ + tag = 'gpu_rasterization' + test = smoothness.Smoothness + page_set = page_sets.ToughPinchZoomCasesPageSet + + def SetExtraBrowserOptions(self, options): + silk_flags.CustomizeBrowserOptionsForGpuRasterization(options) + + @classmethod + def Name(cls): + return 'smoothness.gpu_rasterization.tough_pinch_zoom_cases' + + @benchmark.Enabled('android', 'chromeos') class SmoothnessToughScrollingWhileZoomedInCases(_Smoothness): """Measures rendering statistics for pinch-zooming then diagonal scrolling""" @@ -313,6 +330,20 @@ def Name(cls): return 'smoothness.tough_scrolling_cases' + +@benchmark.Enabled('android', "mac") +class SmoothnessGpuRasterizationToughScrollingCases(_Smoothness): + tag = 'gpu_rasterization' + test = smoothness.Smoothness + page_set = page_sets.ToughScrollingCasesPageSet + + def SetExtraBrowserOptions(self, options): + silk_flags.CustomizeBrowserOptionsForGpuRasterization(options) + + @classmethod + def Name(cls): + return 'smoothness.gpu_rasterization.tough_scrolling_cases' + @benchmark.Disabled('android') # http://crbug.com/531593 class SmoothnessToughImageDecodeCases(_Smoothness): page_set = page_sets.ToughImageDecodeCasesPageSet
diff --git a/tools/perf/clear_system_cache/BUILD.gn b/tools/perf/clear_system_cache/BUILD.gn index c4d736534..dad3319 100644 --- a/tools/perf/clear_system_cache/BUILD.gn +++ b/tools/perf/clear_system_cache/BUILD.gn
@@ -12,5 +12,6 @@ deps = [ "//base", "//base/test:test_support", + "//build/config/sanitizers:deps", ] }
diff --git a/tools/perf/page_sets/data/top_10.json b/tools/perf/page_sets/data/top_10.json index 61acd27..621d86f4 100644 --- a/tools/perf/page_sets/data/top_10.json +++ b/tools/perf/page_sets/data/top_10.json
@@ -1,6 +1,9 @@ { - "description": "Describes the Web Page Replay archives for a page set. Don't edit by hand! Use record_wpr for updating.", + "description": "Describes the Web Page Replay archives for a story set. Don't edit by hand! Use record_wpr for updating.", "archives": { + "top_10_001.wpr": [ + "Facebook" + ], "top_10_000.wpr": [ "https://www.google.com/#hl=en&q=barack+obama", "https://mail.google.com/mail/", @@ -14,4 +17,4 @@ "http://www.ask.com/" ] } -} +} \ No newline at end of file
diff --git a/tools/perf/page_sets/data/top_10_001.wpr.sha1 b/tools/perf/page_sets/data/top_10_001.wpr.sha1 new file mode 100644 index 0000000..40eb46a --- /dev/null +++ b/tools/perf/page_sets/data/top_10_001.wpr.sha1
@@ -0,0 +1 @@ +e9417ee214dd7d9fe3263b8b159ceb6e35a8c7ed \ No newline at end of file
diff --git a/tools/perf/page_sets/top_10.py b/tools/perf/page_sets/top_10.py index bd57bd8..7e4402a0 100644 --- a/tools/perf/page_sets/top_10.py +++ b/tools/perf/page_sets/top_10.py
@@ -76,9 +76,9 @@ class Facebook(SimplePage): def __init__(self, page_set): super(Facebook, self).__init__( - url='http://www.facebook.com/barackobama', + url='https://www.facebook.com/barackobama', page_set=page_set, - credentials='facebook', + credentials='facebook2', name='Facebook') def RunNavigateSteps(self, action_runner):
diff --git a/tools/telemetry/telemetry/internal/backends/chrome_inspector/devtools_client_backend.py b/tools/telemetry/telemetry/internal/backends/chrome_inspector/devtools_client_backend.py index 9fd994f..a7930810 100644 --- a/tools/telemetry/telemetry/internal/backends/chrome_inspector/devtools_client_backend.py +++ b/tools/telemetry/telemetry/internal/backends/chrome_inspector/devtools_client_backend.py
@@ -4,6 +4,7 @@ import logging import re +import socket import sys from telemetry.core import exceptions @@ -11,13 +12,18 @@ from telemetry.internal.backends import browser_backend from telemetry.internal.backends.chrome_inspector import devtools_http from telemetry.internal.backends.chrome_inspector import inspector_backend +from telemetry.internal.backends.chrome_inspector import inspector_websocket from telemetry.internal.backends.chrome_inspector import tracing_backend +from telemetry.internal.backends.chrome_inspector import websocket from telemetry.internal.platform.tracing_agent import chrome_tracing_agent from telemetry.internal.platform.tracing_agent import ( chrome_tracing_devtools_manager) from telemetry.timeline import trace_data as trace_data_module +BROWSER_INSPECTOR_WEBSOCKET_URL = 'ws://127.0.0.1:%i/devtools/browser' + + class TabNotFoundError(exceptions.Error): pass @@ -26,8 +32,12 @@ """Returns True if a DevTools agent is available on the given port.""" if (isinstance(app_backend, browser_backend.BrowserBackend) and app_backend.supports_tracing): - if not tracing_backend.IsInspectorWebsocketAvailable(port): - return False + inspector_websocket_instance = inspector_websocket.InspectorWebsocket() + try: + if not _IsInspectorWebsocketAvailable(inspector_websocket_instance, port): + return False + finally: + inspector_websocket_instance.Disconnect() devtools_http_instance = devtools_http.DevToolsHttp(port) try: @@ -36,6 +46,22 @@ devtools_http_instance.Disconnect() +def _IsInspectorWebsocketAvailable(inspector_websocket_instance, port): + try: + inspector_websocket_instance.Connect(BROWSER_INSPECTOR_WEBSOCKET_URL % port) + except websocket.WebSocketException: + return False + except socket.error: + return False + except Exception as e: + sys.stderr.write('Unidentified exception while checking if wesocket is' + 'available on port %i. Exception message: %s\n' % + (port, e.message)) + return False + else: + return True + + # TODO(nednguyen): Find a more reliable way to check whether the devtool agent # is still alive. def _IsDevToolsAgentAvailable(devtools_http_instance): @@ -69,6 +95,7 @@ self._devtools_port = devtools_port self._remote_devtools_port = remote_devtools_port self._devtools_http = devtools_http.DevToolsHttp(devtools_port) + self._browser_inspector_websocket = None self._tracing_backend = None self._app_backend = app_backend self._devtools_context_map_backend = _DevToolsContextMapBackend( @@ -84,17 +111,14 @@ trace_config = (self._app_backend.platform_backend .tracing_controller_backend.GetChromeTraceConfig()) if not trace_config: - self._tracing_backend = tracing_backend.TracingBackend( - self._devtools_port, False) + self._CreateTracingBackendIfNeeded(is_tracing_running=False) return if self.support_startup_tracing: - self._tracing_backend = tracing_backend.TracingBackend( - self._devtools_port, True) + self._CreateTracingBackendIfNeeded(is_tracing_running=True) return - self._tracing_backend = tracing_backend.TracingBackend( - self._devtools_port, False) + self._CreateTracingBackendIfNeeded(is_tracing_running=False) self.StartChromeTracing( trace_options=trace_config.tracing_options, custom_categories=trace_config.tracing_category_filter.filter_string) @@ -144,6 +168,11 @@ if self._tracing_backend: self._tracing_backend.Close() self._tracing_backend = None + # Close the browser inspector socket last (in case the backend needs to + # interact with it before closing). + if self._browser_inspector_websocket: + self._browser_inspector_websocket.Disconnect() + self._browser_inspector_websocket = None @decorators.Cache def GetChromeBranchNumber(self): @@ -233,11 +262,19 @@ self._devtools_context_map_backend._Update(contexts) return self._devtools_context_map_backend - def _CreateTracingBackendIfNeeded(self): + def _CreateTracingBackendIfNeeded(self, is_tracing_running=False): assert self.supports_tracing if not self._tracing_backend: + self._CreateAndConnectBrowserInspectorWebsocketIfNeeded() self._tracing_backend = tracing_backend.TracingBackend( - self._devtools_port) + self._browser_inspector_websocket, is_tracing_running) + + def _CreateAndConnectBrowserInspectorWebsocketIfNeeded(self): + if not self._browser_inspector_websocket: + self._browser_inspector_websocket = ( + inspector_websocket.InspectorWebsocket()) + self._browser_inspector_websocket.Connect( + BROWSER_INSPECTOR_WEBSOCKET_URL % self._devtools_port) def IsChromeTracingSupported(self): if not self.supports_tracing:
diff --git a/tools/telemetry/telemetry/internal/backends/chrome_inspector/devtools_client_backend_unittest.py b/tools/telemetry/telemetry/internal/backends/chrome_inspector/devtools_client_backend_unittest.py index 1eb928e..c8aaa7a 100644 --- a/tools/telemetry/telemetry/internal/backends/chrome_inspector/devtools_client_backend_unittest.py +++ b/tools/telemetry/telemetry/internal/backends/chrome_inspector/devtools_client_backend_unittest.py
@@ -4,6 +4,9 @@ from telemetry import decorators from telemetry.testing import browser_test_case +from telemetry.timeline import model +from telemetry.timeline import trace_data +from telemetry.timeline import tracing_options class DevToolsClientBackendTest(browser_test_case.BrowserTestCase): @@ -72,3 +75,18 @@ self.assertEqual(len(c2.contexts), 1) self.assertTrue('blank.html' in c2.contexts[0]['url']) self.assertEqual(c2.GetInspectorBackend(context_id), backend) + + def testTracing(self): + devtools_client = self._devtools_client + if not devtools_client.IsChromeTracingSupported(): + self.skipTest('Browser does not support tracing, skipping test.') + + # Start Chrome tracing. + options = tracing_options.TracingOptions() + options.enable_chrome_trace = True + devtools_client.StartChromeTracing(options) + + # Stop Chrome tracing and check that the resulting data is valid. + builder = trace_data.TraceDataBuilder() + devtools_client.StopChromeTracing(builder) + model.TimelineModel(builder.AsData())
diff --git a/tools/telemetry/telemetry/internal/backends/chrome_inspector/tracing_backend.py b/tools/telemetry/telemetry/internal/backends/chrome_inspector/tracing_backend.py index b2f23c97..8b96e6a 100644 --- a/tools/telemetry/telemetry/internal/backends/chrome_inspector/tracing_backend.py +++ b/tools/telemetry/telemetry/internal/backends/chrome_inspector/tracing_backend.py
@@ -5,7 +5,6 @@ import json import logging import socket -import sys import time from telemetry import decorators @@ -34,32 +33,6 @@ pass - -def IsInspectorWebsocketAvailable(port): - """Returns True if inspector websocket is available on the given port.""" - inspector_websocket_instance = inspector_websocket.InspectorWebsocket() - try: - return _IsInspectorWebsocketAvailable(inspector_websocket_instance, port) - finally: - inspector_websocket_instance.Disconnect() - -def _IsInspectorWebsocketAvailable(inspector_websocket_instance, port): - try: - inspector_websocket_instance.Connect( - 'ws://127.0.0.1:%i/devtools/browser' % port) - except websocket.WebSocketException: - return False - except socket.error: - return False - except Exception as e: - sys.stderr.write('Unidentified exception while checking if wesocket is' - 'available on port %i. Exception message: %s\n' % - (port, e.message)) - return False - else: - return True - - class _DevToolsStreamReader(object): def __init__(self, inspector_socket, stream_handle): self._inspector_websocket = inspector_socket @@ -98,13 +71,13 @@ class TracingBackend(object): - def __init__(self, devtools_port, is_tracing_running=False): - self._inspector_websocket = inspector_websocket.InspectorWebsocket() - self._inspector_websocket.RegisterDomain( - 'Tracing', self._NotificationHandler) - self._inspector_websocket.Connect( - 'ws://127.0.0.1:%i/devtools/browser' % devtools_port) + _TRACING_DOMAIN = 'Tracing' + + def __init__(self, inspector_socket, is_tracing_running=False): + self._inspector_websocket = inspector_socket + self._inspector_websocket.RegisterDomain( + self._TRACING_DOMAIN, self._NotificationHandler) self._trace_events = [] self._is_tracing_running = is_tracing_running self._has_received_all_tracing_data = False @@ -287,7 +260,8 @@ self._has_received_all_tracing_data = True def Close(self): - self._inspector_websocket.Disconnect() + self._inspector_websocket.UnregisterDomain(self._TRACING_DOMAIN) + self._inspector_websocket = None @decorators.Cache def IsTracingSupported(self):
diff --git a/tools/telemetry/telemetry/internal/backends/chrome_inspector/tracing_backend_unittest.py b/tools/telemetry/telemetry/internal/backends/chrome_inspector/tracing_backend_unittest.py index 822ac2d..ccafea56 100644 --- a/tools/telemetry/telemetry/internal/backends/chrome_inspector/tracing_backend_unittest.py +++ b/tools/telemetry/telemetry/internal/backends/chrome_inspector/tracing_backend_unittest.py
@@ -87,12 +87,6 @@ else: raise Exception('Unexpected response type') - def CreateTracingBackend(self): - with mock.patch('telemetry.internal.backends.chrome_inspector.' - 'inspector_websocket.InspectorWebsocket') as mock_class: - mock_class.return_value = self - return tracing_backend.TracingBackend(devtools_port=65000) - class TracingBackendTest(tab_test_case.TabTestCase): @@ -224,7 +218,7 @@ inspector.AddEvent('Tracing.dataCollected', {'value': [{'ph': 'B'}]}, 9) inspector.AddEvent('Tracing.dataCollected', {'value': [{'ph': 'E'}]}, 19) inspector.AddEvent('Tracing.tracingComplete', {}, 35) - backend = inspector.CreateTracingBackend() + backend = tracing_backend.TracingBackend(inspector) # The third response is 16 seconds after the second response, so we expect # a TracingTimeoutException. @@ -238,7 +232,7 @@ inspector.AddEvent('Tracing.dataCollected', {'value': [{'ph': 'B'}]}, 9) inspector.AddEvent('Tracing.dataCollected', {'value': [{'ph': 'E'}]}, 14) inspector.AddEvent('Tracing.tracingComplete', {}, 19) - backend = inspector.CreateTracingBackend() + backend = tracing_backend.TracingBackend(inspector) backend._CollectTracingData(10) self.assertEqual(2, len(backend._trace_events)) @@ -249,7 +243,7 @@ inspector.AddEvent('Tracing.tracingComplete', {'stream': '42'}, 1) inspector.AddAsyncResponse('IO.read', {'data': '[{},{},{'}, 2) inspector.AddAsyncResponse('IO.read', {'data': '},{},{}]', 'eof': True}, 3) - backend = inspector.CreateTracingBackend() + backend = tracing_backend.TracingBackend(inspector) backend._CollectTracingData(10) self.assertEqual(5, len(backend._trace_events)) @@ -260,7 +254,7 @@ inspector.AddResponseHandler( 'Tracing.requestMemoryDump', lambda req: {'result': {'success': True, 'dumpGuid': '42abc'}}) - backend = inspector.CreateTracingBackend() + backend = tracing_backend.TracingBackend(inspector) self.assertEqual(backend.DumpMemory(), '42abc') @@ -269,7 +263,7 @@ inspector.AddResponseHandler( 'Tracing.requestMemoryDump', lambda req: {'result': {'success': False, 'dumpGuid': '42abc'}}) - backend = inspector.CreateTracingBackend() + backend = tracing_backend.TracingBackend(inspector) self.assertIsNone(backend.DumpMemory()) @@ -278,7 +272,7 @@ inspector = FakeInspectorWebsocket(self._mock_timer) inspector.AddResponseHandler( 'Memory.setPressureNotificationsSuppressed', response_handler) - backend = inspector.CreateTracingBackend() + backend = tracing_backend.TracingBackend(inspector) backend.SetMemoryPressureNotificationsSuppressed(True) self.assertEqual(1, response_handler.call_count) @@ -291,7 +285,7 @@ def testSetMemoryPressureNotificationsSuppressedFailure(self): response_handler = mock.Mock() inspector = FakeInspectorWebsocket(self._mock_timer) - backend = inspector.CreateTracingBackend() + backend = tracing_backend.TracingBackend(inspector) inspector.AddResponseHandler( 'Memory.setPressureNotificationsSuppressed', response_handler)
diff --git a/tools/telemetry/telemetry/testing/run_chromeos_tests.py b/tools/telemetry/telemetry/testing/run_chromeos_tests.py index ff331ed..1afc16d2 100644 --- a/tools/telemetry/telemetry/testing/run_chromeos_tests.py +++ b/tools/telemetry/telemetry/testing/run_chromeos_tests.py
@@ -46,7 +46,8 @@ def _RunOneSetOfTests(browser_type, top_level_dir, tests, stream): args = ['--browser', browser_type, '--top-level-dir', top_level_dir, - '--jobs', '1'] + tests + '--jobs', '1', + '--disable-logging-config'] + tests return run_tests.RunTestsCommand.main(args, stream=stream)
diff --git a/tools/telemetry/telemetry/testing/run_tests.py b/tools/telemetry/telemetry/testing/run_tests.py index 15c7c9bb..e40fda2a 100644 --- a/tools/telemetry/telemetry/testing/run_tests.py +++ b/tools/telemetry/telemetry/testing/run_tests.py
@@ -49,6 +49,8 @@ help='Treat test filter as exact matches (default is ' 'substring matches).') parser.add_option('--client-config', dest='client_config', default=None) + parser.add_option('--disable-logging-config', action='store_true', + default=False, help='Configure logging (default on)') typ.ArgumentParser.add_option_group(parser, "Options for running the tests", @@ -210,12 +212,13 @@ binary_manager.InitDependencyManager(context.client_config) # We need to reset the handlers in case some other parts of telemetry already # set it to make this work. - logging.getLogger().handlers = [] - logging.basicConfig( - level=logging.INFO, - format='(%(levelname)s) %(asctime)s %(module)s.%(funcName)s:%(lineno)d ' - '%(message)s') args = context + if not args.disable_logging_config: + logging.getLogger().handlers = [] + logging.basicConfig( + level=logging.INFO, + format='(%(levelname)s) %(asctime)s %(module)s.%(funcName)s:%(lineno)d' + ' %(message)s') if args.device and args.device == 'android': android_devices = device_finder.GetDevicesMatchingOptions(args) args.device = android_devices[child.worker_num-1].guid
diff --git a/ui/compositor/compositor.cc b/ui/compositor/compositor.cc index 7f16e283..e648c8a 100644 --- a/ui/compositor/compositor.cc +++ b/ui/compositor/compositor.cc
@@ -175,7 +175,7 @@ base::TimeTicks::Now() - before_create); host_->SetRootLayer(root_web_layer_); host_->set_surface_id_namespace(surface_id_allocator_->id_namespace()); - host_->SetLayerTreeHostClientReady(); + host_->SetVisible(true); } Compositor::~Compositor() {