Updated to arc-runtime-43.4410.295.0
diff --git a/.gitmodules b/.gitmodules
index c38c48d..d8b804e 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -126,7 +126,7 @@
url = https://android.googlesource.com/platform/external/junit
[submodule "third_party/tools/flake8"]
path = third_party/tools/flake8
- url = https://chromium.googlesource.com/external/bmcustodio/flake8.git
+ url = https://chromium.googlesource.com/external/gitlab.com/pycqa/flake8
[submodule "third_party/tools/pep8"]
path = third_party/tools/pep8
url = https://chromium.googlesource.com/external/jcrocholl/pep8
@@ -403,3 +403,6 @@
[submodule "third_party/android/packages/providers/DownloadProvider"]
path = third_party/android/packages/providers/DownloadProvider
url = https://android.googlesource.com/platform/packages/providers/DownloadProvider
+[submodule "third_party/tools/mccabe"]
+ path = third_party/tools/mccabe
+ url = https://chromium.googlesource.com/external/github.com/flintwork/mccabe
diff --git a/mods/graphics_translation/egl/native_pepper.cpp b/mods/graphics_translation/egl/native_pepper.cpp
index e0f28ea..f93f005 100644
--- a/mods/graphics_translation/egl/native_pepper.cpp
+++ b/mods/graphics_translation/egl/native_pepper.cpp
@@ -25,114 +25,6 @@
#include "ppapi/c/ppb_opengles2.h"
#include "ppapi/cpp/module.h"
-static arc::GPURendererInterface* GetGPURenderer() {
- arc::PluginHandle handle;
- arc::GPURendererInterface* renderer = handle.GetGPURenderer();
- LOG_ALWAYS_FATAL_IF(!renderer, "No renderer?");
- return renderer;
-}
-
-template<typename T>
-struct Command {
- virtual void Run(arc::GPURendererInterface* renderer) = 0;
-
- void Dispatch() {
- arc::PluginHandle handle;
- if (handle.GetPluginUtil()->IsRendererThread()) {
- Handler(this);
- } else {
- handle.GetGPURenderer()->WaitForSwapBuffers();
- handle.GetPluginUtil()->RunOnRendererThread(&Handler, this);
- }
- }
-
- static void* Handler(void* arg) {
- T* cmd = static_cast<T*>(arg);
- arc::GPURendererInterface* renderer = GetGPURenderer();
- cmd->Run(renderer);
- return NULL;
- }
-};
-
-struct CreateContextCmd : Command<CreateContextCmd> {
- arc::ContextGPU* context;
- arc::ContextGPU* shared;
- const std::vector<int32_t>* attribs;
-
- virtual void Run(arc::GPURendererInterface* renderer) {
- context = renderer->CreateContext(*attribs, shared);
- }
-};
-
-arc::ContextGPU* CreateContextImpl(arc::ContextGPU* shared,
- const std::vector<int32_t>* attribs) {
- CreateContextCmd cmd;
- cmd.context = NULL;
- cmd.shared = shared;
- cmd.attribs = attribs;
- cmd.Dispatch();
- return cmd.context;
-}
-
-struct BindGraphicsCmd : Command<BindGraphicsCmd> {
- arc::ContextGPU* context;
- bool result;
-
- virtual void Run(arc::GPURendererInterface* renderer) {
- result = renderer->BindContext(context);
- }
-};
-
-bool BindGraphicsImpl(arc::ContextGPU* context) {
- BindGraphicsCmd cmd;
- cmd.context = context;
- cmd.Dispatch();
- return cmd.result;
-}
-
-struct DestroyContextCmd : Command<DestroyContextCmd> {
- arc::ContextGPU* context;
-
- virtual void Run(arc::GPURendererInterface* renderer) {
- renderer->DestroyContext(context);
- }
-};
-
-void DestroyContextImpl(arc::ContextGPU* context) {
- DestroyContextCmd cmd;
- cmd.context = context;
- cmd.Dispatch();
-}
-
-struct SwapBuffersCmd : Command<SwapBuffersCmd> {
- arc::ContextGPU* context;
- bool result;
-
- virtual void Run(arc::GPURendererInterface* renderer) {
- result = renderer->SwapBuffers(context);
- }
-};
-
-bool SwapBuffersImpl(arc::ContextGPU* context) {
- SwapBuffersCmd cmd;
- cmd.context = context;
- cmd.Dispatch();
- return cmd.result;
-}
-
-// ----
-
-namespace {
-
-void InitPepperApis(PepperApis* apis) {
- apis->gles2 = static_cast<const PPB_OpenGLES2*>(
- ::pp::Module::Get()->GetBrowserInterface(PPB_OPENGLES2_INTERFACE));
- apis->mapsub = static_cast<const PPB_OpenGLES2ChromiumMapSub *>(
- ::pp::Module::Get()->GetBrowserInterface(
- PPB_OPENGLES2_CHROMIUMMAPSUB_INTERFACE));
-}
-}
-
struct NativeConfig {
NativeConfig(int red_size, int green_size, int blue_size, int alpha_size,
int depth_size, int stencil_size) :
@@ -187,7 +79,11 @@
explicit NativeContext(arc::ContextGPU* ctx)
: underlying_(ctx),
apis_() {
- InitPepperApis(&apis_);
+ apis_.gles2 = static_cast<const PPB_OpenGLES2*>(
+ ::pp::Module::Get()->GetBrowserInterface(PPB_OPENGLES2_INTERFACE));
+ apis_.mapsub = static_cast<const PPB_OpenGLES2ChromiumMapSub *>(
+ ::pp::Module::Get()->GetBrowserInterface(
+ PPB_OPENGLES2_CHROMIUMMAPSUB_INTERFACE));
}
arc::ContextGPU* underlying_;
@@ -273,7 +169,10 @@
// draw into that surface, we use this opportunity to bind it to
// the instance.
win->underlying_ = ctx->underlying_;
- if (!BindGraphicsImpl(win->underlying_)) {
+
+ arc::PluginHandle handle;
+ arc::RendererInterface* renderer = handle.GetRenderer();
+ if (!renderer->BindContext(win->underlying_)) {
LOG_ALWAYS_FATAL("Binding Graphics3D to the plugin failed");
return false;
}
@@ -282,7 +181,9 @@
bool SwapBuffers(NativeWindow* win) {
LOG_ALWAYS_FATAL_IF(win == NULL && win != s_window);
- return SwapBuffersImpl(win->underlying_);
+ arc::PluginHandle handle;
+ arc::RendererInterface* renderer = handle.GetRenderer();
+ return renderer->SwapBuffers(win->underlying_);
}
void DestroyNativeWindow(NativeWindow* win) {
@@ -297,7 +198,10 @@
shared_underlying = shared->underlying_;
}
- arc::ContextGPU* ctx = CreateContextImpl(shared_underlying, &cfg->attribs_);
+ arc::PluginHandle handle;
+ arc::RendererInterface* renderer = handle.GetRenderer();
+ arc::ContextGPU* ctx =
+ renderer->CreateContext(cfg->attribs_, shared_underlying);
if (!ctx) {
return NULL;
}
@@ -316,7 +220,9 @@
void DestroyContext(NativeContext* ctx) {
if (ctx) {
- DestroyContextImpl(ctx->underlying_);
+ arc::PluginHandle handle;
+ arc::RendererInterface* renderer = handle.GetRenderer();
+ renderer->DestroyContext(ctx->underlying_);
}
}
diff --git a/mods/graphics_translation/hwcomposer/hwcomposer.cpp b/mods/graphics_translation/hwcomposer/hwcomposer.cpp
index e5cf358..692bd27 100644
--- a/mods/graphics_translation/hwcomposer/hwcomposer.cpp
+++ b/mods/graphics_translation/hwcomposer/hwcomposer.cpp
@@ -363,7 +363,7 @@
static int hwc_device_open(const hw_module_t* module, const char* name,
hw_device_t** device) {
arc::PluginHandle handle;
- if (!handle.GetGPURenderer() || !handle.GetGPURenderer()->GetCompositor()) {
+ if (!handle.GetRenderer() || !handle.GetRenderer()->GetCompositor()) {
return -ENODEV;
}
@@ -386,7 +386,7 @@
dev->device.getDisplayConfigs = hwc_get_display_configs;
dev->device.getDisplayAttributes = hwc_get_display_attributes;
dev->device.registerProcs = hwc_register_procs;
- dev->compositor = handle.GetGPURenderer()->GetCompositor();
+ dev->compositor = handle.GetRenderer()->GetCompositor();
dev->width = params.width;
dev->height = params.height;
// TODO(crbug.com/459280): Get this information from the RenderParams.
diff --git a/src/build/build_common.py b/src/build/build_common.py
index be94fb1..3e80963 100644
--- a/src/build/build_common.py
+++ b/src/build/build_common.py
@@ -183,6 +183,10 @@
'AndroidConfig.h')
+def get_android_deps_file():
+ return 'src/build/DEPS.android'
+
+
def get_android_fs_path(filename):
return os.path.join(get_android_fs_root(), filename.lstrip(os.sep))
@@ -275,7 +279,7 @@
return objects
-def get_bionic_arch_subdir_name():
+def get_bionic_arch_name():
"""Returns Bionic's architecture sub directory name.
The architecture name is used in sub directories like
@@ -385,7 +389,8 @@
# This ARC tree should be a stashed copy. Return a fake version.
return '0'
return subprocess.check_output(
- ['git', 'describe', '--match', 'arc-runtime-*', commit]).strip()
+ ['git', 'describe', '--first-parent',
+ '--match', 'arc-runtime-*', commit]).strip()
def get_build_version_path():
@@ -399,9 +404,10 @@
def get_chrome_default_user_data_dir():
- return '%s/%s/%s' % (os.getenv('TMPDIR', '/tmp'),
- os.getenv('USER'),
- CHROME_USER_DATA_DIR_PREFIX)
+ return os.path.join(os.getenv('TMPDIR', '/tmp'),
+ os.getenv('USER'),
+ CHROME_USER_DATA_DIR_PREFIX,
+ get_target_dir_name())
def get_chrome_deps_file():
@@ -524,7 +530,8 @@
def get_runtime_version():
runtime_tag = subprocess.check_output(
- ['git', 'describe', '--abbrev=0', '--match', 'arc-runtime-*']).strip()
+ ['git', 'describe', '--first-parent', '--abbrev=0',
+ '--match', 'arc-runtime-*']).strip()
version_string = runtime_tag.replace('arc-runtime-', '')
for part in version_string.split('.'):
num = int(part)
diff --git a/src/build/build_options.py b/src/build/build_options.py
index 8c21d4f..70fee69 100644
--- a/src/build/build_options.py
+++ b/src/build/build_options.py
@@ -333,10 +333,6 @@
parser.add_argument('--enable-valgrind', action='store_true',
help='Run unit tests under Valgrind.')
- parser.add_argument('--enable-config-cache', action='store_true',
- help='[EXPERIMENTAL] Cache configuration result and'
- 'skip the config.py invocation if possible.')
-
parser.add_argument('--goma-dir', help='The directory for goma.')
parser.add_argument('--java-dir',
diff --git a/src/build/config.py b/src/build/config.py
index 67ca68c..0b5cf42 100644
--- a/src/build/config.py
+++ b/src/build/config.py
@@ -18,13 +18,15 @@
import ninja_generator_runner
import open_source
import staging
+import toolchain
from build_options import OPTIONS
from ninja_generator import ArchiveNinjaGenerator
from ninja_generator import NinjaGenerator
-_ANDROID_SYSTEM_IMAGE_DIR = 'ndk/platforms/android-19'
+_ANDROID_SYSTEM_IMAGE_DIR = ('ndk/platforms/android-' +
+ toolchain.get_android_api_level())
def _generate_test_framework_ninjas():
@@ -248,6 +250,7 @@
# important.
if not build_common.use_ndk_direct_execution():
return
+ assert OPTIONS.is_arm(), 'Only ARM supports NDK direct execution'
n = ninja_generator.NinjaGenerator('check_symbols')
script = staging.as_staging('src/build/check_symbols.py')
@@ -257,7 +260,6 @@
script, build_common.get_test_output_handler())),
description=(rule_name + ' $in'))
- assert OPTIONS.is_arm(), 'Only ARM supports NDK direct execution'
arch_subdir = 'arch-arm'
lib_dir = os.path.join(_ANDROID_SYSTEM_IMAGE_DIR, arch_subdir, 'usr/lib')
for so_file in build_common.find_all_files(lib_dir, suffixes='.so'):
diff --git a/src/build/config_runner.py b/src/build/config_runner.py
index f3565b1..6820de9 100644
--- a/src/build/config_runner.py
+++ b/src/build/config_runner.py
@@ -371,9 +371,7 @@
for config_context, generator in generator_list:
cache_path = _get_cache_file_path(config_context.config_name,
config_context.entry_point)
- config_cache = None
- if OPTIONS.enable_config_cache():
- config_cache = _load_config_cache_from_file(cache_path)
+ config_cache = _load_config_cache_from_file(cache_path)
if config_cache is not None and config_cache.check_cache_freshness():
cached_result_list.append(config_cache.to_config_result())
@@ -399,15 +397,14 @@
for cached_result in cached_result_list:
ninja_list.extend(cached_result.generated_ninjas)
- if OPTIONS.enable_config_cache():
- for cache_path, config_result in aggregated_result.iteritems():
- config_cache = cache_miss[cache_path]
- if config_cache is None:
- config_cache = _config_cache_from_config_result(config_result)
- else:
- config_cache.refresh_with_config_result(config_result)
+ for cache_path, config_result in aggregated_result.iteritems():
+ config_cache = cache_miss[cache_path]
+ if config_cache is None:
+ config_cache = _config_cache_from_config_result(config_result)
+ else:
+ config_cache.refresh_with_config_result(config_result)
- config_cache.save_to_file(cache_path)
+ config_cache.save_to_file(cache_path)
ninja_list.sort(key=lambda ninja: ninja.get_module_name())
timer.done()
diff --git a/src/build/filtered_subprocess.py b/src/build/filtered_subprocess.py
index 727547c..46bf809 100644
--- a/src/build/filtered_subprocess.py
+++ b/src/build/filtered_subprocess.py
@@ -116,18 +116,21 @@
self._stop_on_done = False
def _are_all_pipes_closed(self):
- return self.stdout.closed and self.stderr.closed
+ return self.stdout.closed and (not self.stderr or self.stderr.closed)
def _close_all_pipes(self):
if not self.stdout.closed:
self.stdout.close()
- if not self.stderr.closed:
+ if self.stderr and not self.stderr.closed:
self.stderr.close()
def _handle_output(self):
# Consume output from any streams.
- stderr_read = _handle_stream_output(
- self.stderr, self._output_handler.handle_stderr)
+ if not self.stderr:
+ stderr_read = False
+ else:
+ stderr_read = _handle_stream_output(
+ self.stderr, self._output_handler.handle_stderr)
stdout_read = _handle_stream_output(
self.stdout, self._output_handler.handle_stdout)
@@ -201,7 +204,7 @@
# Filter out any that have been closed.
if not self.stdout.closed:
streams_to_block_reading_on.append(self.stdout)
- if not self.stderr.closed:
+ if self.stderr and not self.stderr.closed:
streams_to_block_reading_on.append(self.stderr)
# If we have nothing to wait on, we're on our way out.
@@ -210,8 +213,12 @@
return
try:
- select.select(
- streams_to_block_reading_on, [], [], self._compute_timeout())[0]
+ # Note: we hit some timeout case this function does not handle.
+ # To improve the debuggability in such a case, we assign the
+ # timeout value to a variable, so that util.debug module outputs
+ # the value to the log on failure.
+ timeout = self._compute_timeout()
+ select.select(streams_to_block_reading_on, [], [], timeout)
except select.error as e:
if e[0] == errno.EINTR:
logging.info("select has been interrupted, exit normally.")
diff --git a/src/build/flake8 b/src/build/flake8
index fe5e872..8844cec 100755
--- a/src/build/flake8
+++ b/src/build/flake8
@@ -6,36 +6,75 @@
"""Runner for flake8 and its dependencies, using a source checkout."""
+import imp
import sys
-import warnings
-# Modify the Python module search path to add the appropriate source code
-# subdirectories for the various tools that make up flake8.
-sys.path.insert(0, 'third_party/tools/pyflakes')
-sys.path.insert(0, 'third_party/tools/pep8')
-sys.path.insert(0, 'third_party/tools/flake8')
-# We turn off writing of .pyc files here to avoid polluting the git submodule
-# checkouts with .pyc files, which would then show up as being modified from
-# the main repository.
-# TODO(lpique) See if the flake8 and pyflakes projects will take an
-# appropriate ignore configuration change.
-sys.dont_write_bytecode = True
+# This is a minimal fake for flake8
+class _EntryPoint(object):
+ def __init__(self, name, checker):
+ self.name = name
+ self._checker = checker
-# We disable this warning in case someone has one of these modules already
-# installed globally. Otherwise one or more messages might be displayed like
-# this one:
-#
-# /usr/lib/python2.7/dist-packages/setuptools/command/install_scripts.py:3:
-# UserWarning: Module pep8 was already imported from
-# third_party/tools/pep8/pep8.pyc, but
-# /usr/local/lib/python2.7/dist-packages/pep8-1.4.2-py2.7.egg is being
-# added to sys.path
-# from pkg_resources import Distribution, PathMetadata, ensure_directory
-warnings.filterwarnings('ignore', r'Module \w+ was already imported from')
+ def load(self):
+ return self._checker
-# This must happen after the modifications above.
-from flake8.run import main
+
+def _fake_iter_entry_points(key):
+ import mccabe
+ import flake8._pyflakes
+
+ # Assert that key is the expected 'flake8.extension' string.
+ assert key == 'flake8.extension', 'key was unexpectedly %s' % key
+
+ return [
+ # This repliactes the entry in the mccabe/setup.py file to define mccabe
+ # as an extension.
+ _EntryPoint(mccabe.McCabeChecker._code, mccabe.McCabeChecker),
+ # This replicates the entry in the flake8/setup.py file to define pyflakes
+ # as an extension.
+ _EntryPoint('F', flake8._pyflakes.FlakesChecker)]
+
+
+class _StubCommand(object):
+ pass
+
+
+def _create_fake_pkg_resources_module():
+ pkg_resources = imp.new_module('pkg_resources')
+
+ # Flake8 uses this function to enumerate all "plugins" compatible with flake8.
+ pkg_resources.iter_entry_points = _fake_iter_entry_points
+
+ return pkg_resources
+
+
+def _create_fake_setuptools_module():
+ setuptools = imp.new_module('setuptools')
+
+ # Flake8 requires this command class to create a Flake8Command, but it is only
+ # needed for setuptools (which we are faking out having)
+ setuptools.Command = _StubCommand
+
+ return setuptools
+
if __name__ == '__main__':
+ # We fake out having the pkg_resource and setuptools modules. These are not
+ # part of the standard Python package, and we actually need only a minimal set
+ # of functionality.
+ sys.modules['pkg_resources'] = _create_fake_pkg_resources_module()
+ sys.modules['setuptools'] = _create_fake_setuptools_module()
+
+ # Modify the Python module search path to add the appropriate source code
+ # subdirectories for the various tools that make up flake8.
+ sys.path[0:0] = [
+ 'third_party/tools/flake8',
+ 'third_party/tools/mccabe',
+ 'third_party/tools/pep8',
+ 'third_party/tools/pyflakes',
+ ]
+
+ # This must happen after the modifications above.
+ from flake8.main import main
main()
diff --git a/src/build/generate_build_prop.py b/src/build/generate_build_prop.py
index 3bd1c5e..5dcc684 100755
--- a/src/build/generate_build_prop.py
+++ b/src/build/generate_build_prop.py
@@ -15,6 +15,7 @@
import subprocess
import build_common
+import toolchain
from build_options import OPTIONS
OPTIONS.parse_configure_file()
@@ -47,7 +48,7 @@
os.environ['PLATFORM_VERSION'] = '4.4'
# SDK has to be pinned to correct level to avoid loading
# unsupported featured from app's APK file.
-os.environ['PLATFORM_SDK_VERSION'] = '19'
+os.environ['PLATFORM_SDK_VERSION'] = toolchain.get_android_api_level()
# By convention, ro.product.brand, ro.product.manufacturer and ro.product.name
# are always in lowercase.
diff --git a/src/build/git_pre_push.py b/src/build/git_pre_push.py
index bc47c22..28f5c75 100755
--- a/src/build/git_pre_push.py
+++ b/src/build/git_pre_push.py
@@ -21,6 +21,11 @@
"""A git pre-push hook script."""
+def _is_patch_to_next_pastry():
+ """Determines if the current patch is to arc/next-pastry or not."""
+ return util.git.get_branch_tracked_remote_url().endswith('arc/next-pastry')
+
+
def _check_uncommitted_change():
uncommitted_files = util.git.get_uncommitted_files()
if uncommitted_files:
@@ -34,6 +39,11 @@
def _check_lint(push_files):
ignore_file = os.path.join('src', 'build', 'lint_ignore.txt')
+ # If push_files contains any directories (representing submodules), filter
+ # them out. Passing a directory to the lint_source.process will cause all
+ # the files in that directory to be checked for lint errors, when those files
+ # may not even conform to the standards of the current project.
+ push_files = filter(os.path.isfile, push_files)
result = lint_source.process(push_files, ignore_file)
if result != 0:
print ''
@@ -73,6 +83,28 @@
return 0
+def _check_android_deps(push_files):
+ android_deps_file = build_common.get_android_deps_file()
+ if android_deps_file not in push_files:
+ return 0
+ with open(android_deps_file) as f:
+ linecount = len(f.readlines())
+ # For normal purposes, we expect that the DEPS.android file will contain just
+ # a single line, naming a release branch tag.
+ # For arc/next-pastry we allow it to be an Android manifest file, which
+ # identifies git hash tags for each Android sub-project. However these
+ # sub-project names may be confidential, and should not be published, so we
+ # have this check to help ensure that does not happen accidentally.
+ if linecount > 1:
+ if not _is_patch_to_next_pastry():
+ print ''
+ print 'DEPS.android appears to be something other than a one-line file'
+ print 'naming a publicly visible branch tag. Any other content should'
+ print 'be restricted to arc/next-pastry.'
+ print ''
+ return -1
+
+
def _check_ninja_lint_clean_after_deps_change(push_files):
# Watch out for any deps changes that could impact mods
prefix_pattern = os.path.join('src', 'build', 'DEPS.')
@@ -171,33 +203,29 @@
if not push_files:
return 0
- result = _check_uncommitted_change()
- if result != 0:
- return result
+ non_push_file_checks = [
+ _check_uncommitted_change,
+ _check_commit_messages,
+ _check_build_steps,
+ ]
- result = _check_lint(push_files)
- if result != 0:
- return result
+ push_file_checks = [
+ _check_lint,
+ _check_prebuilt_chrome_deps,
+ _check_android_deps,
+ _check_ninja_lint_clean_after_deps_change,
+ _check_docs,
+ ]
- result = _check_build_steps()
- if result != 0:
- return result
+ for check in non_push_file_checks:
+ result = check()
+ if result != 0:
+ return result
- result = _check_prebuilt_chrome_deps(push_files)
- if result != 0:
- return result
-
- result = _check_ninja_lint_clean_after_deps_change(push_files)
- if result != 0:
- return result
-
- result = _check_commit_messages()
- if result != 0:
- return result
-
- result = _check_docs(push_files)
- if result != 0:
- return result
+ for check in push_file_checks:
+ result = check(push_files)
+ if result != 0:
+ return result
if _has_file_list_changed_since_last_push(push_files):
suggest_reviewers.suggest_reviewer_set_for_in_flight_commits(False)
diff --git a/src/build/lint_source.py b/src/build/lint_source.py
index 00d70ef..ee3a03e 100755
--- a/src/build/lint_source.py
+++ b/src/build/lint_source.py
@@ -540,7 +540,14 @@
return _filter_files(None)
-def read_ignore_rule(path):
+def _expand_path_list(path_list):
+ result = []
+ for path in path_list:
+ result.extend(_walk(path) if os.path.isdir(path) else [path])
+ return result
+
+
+def _read_ignore_rule(path):
"""Reads the mapping of paths to lint checks to ignore from a file.
The ignore file is expected to define a simple mapping between file paths
@@ -570,8 +577,9 @@
return result
-def process(target_file_list, ignore_file=None, output_file=None):
- ignore_rule = read_ignore_rule(ignore_file)
+def process(target_path_list, ignore_file=None, output_file=None):
+ target_file_list = _expand_path_list(target_path_list)
+ ignore_rule = _read_ignore_rule(ignore_file)
target_file_list = _filter_files(target_file_list)
# Create a temporary directory as the output dir of the analyze_diffs.py,
@@ -651,19 +659,24 @@
def main():
parser = ResponseFileArgumentParser()
- parser.add_argument('files', nargs='*', help='The list of files to lint. If '
- 'no files provided, will lint all files.')
- parser.add_argument('--ignore', '-i', dest='ignore_file', help='A text file '
- 'containting list of files to ignore.')
+ parser.add_argument('files', nargs='*',
+ help='The list of files to lint. If no files provided, '
+ 'will lint all files.')
+ parser.add_argument('--ignore', '-i', dest='ignore_file',
+ help='A text file containting list of files to ignore.')
parser.add_argument('--merge', action='store_true', help='Merge results.')
parser.add_argument('--output', '-o', help='Output file for storing results.')
- parser.add_argument('--verbose', '-v', action='store_true', help='Prints '
- 'additional output.')
+ parser.add_argument('--verbose', '-v', action='store_true',
+ help='Prints additional output.')
args = parser.parse_args()
log_level = logging.DEBUG if args.verbose else logging.WARNING
logging.basicConfig(format='%(message)s', level=log_level)
+ if not args.ignore_file and not args.files:
+ args.ignore_file = os.path.join(
+ os.path.dirname(__file__), 'lint_ignore.txt')
+
if args.merge:
return merge_results(args.files, args.output)
else:
diff --git a/src/build/ninja_generator.py b/src/build/ninja_generator.py
index cadfaf3..90e1291 100644
--- a/src/build/ninja_generator.py
+++ b/src/build/ninja_generator.py
@@ -1160,10 +1160,10 @@
'android/bionic/libc/kernel/common') +
' -isystem ' + staging.as_staging(
'android/bionic/libc/kernel/%s' %
- build_common.get_bionic_arch_subdir_name()) +
+ build_common.get_bionic_arch_name()) +
' -isystem ' + staging.as_staging(
'android/bionic/libc/%s/include' %
- build_common.get_bionic_arch_subdir_name()) +
+ build_common.get_bionic_arch_name()) +
' -isystem ' + staging.as_staging('android/bionic/libc/include') +
' -isystem ' + staging.as_staging('android/bionic/libm/include') +
' -isystem ' + staging.as_staging(
diff --git a/src/build/perf_test.py b/src/build/perf_test.py
index 76e8dcb..9419579 100755
--- a/src/build/perf_test.py
+++ b/src/build/perf_test.py
@@ -310,12 +310,12 @@
'401-perf', config={'flags': flags.PASS})
args = _prepare_integration_tests_args(100)
- # Call set_up_common_test_directory and prepare_to_run iff source files
+ # Call setup_work_root() and prepare_to_run() iff source files
# to build tests exist. Perf builders do not have them, and can skip it.
- # The buidlers have downloaded pre-built files.
+ # The builders have downloaded pre-built files.
if os.path.exists(os.path.join(
dalvik_vm_test_runner.DalvikVMTestRunner.DALVIK_TESTS_DIR, 'etc')):
- runner.set_up_common_test_directory()
+ runner.setup_work_root()
runner.prepare_to_run([benchmark], args)
output, _ = runner.run_with_setup([benchmark], args)
diff --git a/src/build/prepare_open_source_commit.py b/src/build/prepare_open_source_commit.py
index 60d06dc..72684b6 100755
--- a/src/build/prepare_open_source_commit.py
+++ b/src/build/prepare_open_source_commit.py
@@ -32,10 +32,19 @@
def _add_and_sync_submodule(dest, force, src_submodule, dest_submodule):
- if dest_submodule is None:
+ # If dest_submodule is none, it means the open source repo does not contain
+ # the submodule, and we've never before tried to check it out on this machine.
+ # Conversely if dest_submodule.path does not exist, the open source repo does
+ # not have it, and we have checked it out on this machine before (there is
+ # data in .git/modules/... for it, but nothing actually checked out to the
+ # working tree.
+ if (dest_submodule is None or
+ not os.path.exists(os.path.join(dest, dest_submodule.path))):
logging.info('Adding submodule for %s' % src_submodule.path)
- subprocess.check_call(['git', 'submodule', 'add', src_submodule.url,
- src_submodule.path],
+ # We need to use --force for the second case of it already being a
+ # submodule. This ensures we get a checkout in the working tree.
+ subprocess.check_call(['git', 'submodule', 'add', '--force',
+ src_submodule.url, src_submodule.path],
cwd=dest)
if dest_submodule is None or dest_submodule.head != src_submodule.head:
logging.warning('Updating repository %s' % src_submodule.path)
@@ -43,6 +52,17 @@
logging.info('Repository was at %s' % dest_submodule.head)
logging.info('Checking out to %s' % src_submodule.head)
submodule_path = os.path.join(dest, src_submodule.path)
+ if dest_submodule is not None and src_submodule.url != dest_submodule.url:
+ logging.info('Updating repository url to %s', src_submodule.url)
+ # Replace the url in the .gitmodules file with the updated url.
+ subprocess.check_call(['git', 'config', '-f', '.gitmodules',
+ '--replace-all',
+ 'submodule.%s.url' % src_submodule.path,
+ src_submodule.url], cwd=dest)
+ # Syncronize the new url in .gitmodules with the actual submodule
+ # configuration.
+ subprocess.check_call(['git', 'submodule', 'sync', src_submodule.path],
+ cwd=dest)
subprocess.check_call(['git', 'submodule', 'update', '--init',
src_submodule.path],
cwd=dest)
diff --git a/src/build/toolchain.py b/src/build/toolchain.py
index 2db11f8..0998949 100644
--- a/src/build/toolchain.py
+++ b/src/build/toolchain.py
@@ -38,6 +38,11 @@
return os.path.join('third_party', 'android', 'build', 'tools')
+def get_android_api_level():
+ """Returns the pinned version of the Android API."""
+ return _ANDROID_SDK_BUILD_TOOLS_PINNED_VERSION.split('.')[0]
+
+
def get_android_sdk_build_tools_pinned_version():
"""Returns the pinned version of the Android SDK's build tools."""
return _ANDROID_SDK_BUILD_TOOLS_PINNED_VERSION
diff --git a/src/build/util/bare_metal_gdb.py b/src/build/util/bare_metal_gdb.py
index cf5c0c8..7bbcde2 100644
--- a/src/build/util/bare_metal_gdb.py
+++ b/src/build/util/bare_metal_gdb.py
@@ -44,7 +44,7 @@
# This will be like: $5 = 0x357bc "libc.so"
matched = re.search(r'^.*"(.*)"', name, re.M)
if not matched:
- print('Failed to retrieve the name of the shared object: %s' % name)
+ print('Failed to retrieve the name of the shared object: "%s"' % name)
return None
path = matched.group(1)
@@ -74,7 +74,7 @@
# This will be like: $3 = 4148191232
matched = re.search(r'^.* = (\d+)', base_addr_line, re.M)
if not matched:
- print('Failed to retrieve the address of the shared object: %s' %
+ print('Failed to retrieve the address of the shared object: "%s"' %
base_addr_line)
return None
base_addr = int(matched.group(1))
diff --git a/src/build/util/git.py b/src/build/util/git.py
index fa933ff..85b9ee2 100644
--- a/src/build/util/git.py
+++ b/src/build/util/git.py
@@ -59,12 +59,15 @@
orig_path = '.'.join(url_key.split('.')[1:-1])
# As for the submodule's path in the internal modules directory for it.
config_path = os.path.join('.git', 'modules', orig_path, 'config')
- out = subprocess.check_output(['git', 'config', '-f', config_path,
- '--get', 'core.worktree'],
- cwd=base_path,
- stderr=subprocess.STDOUT)
+ try:
+ worktree = subprocess.check_output(['git', 'config', '-f', config_path,
+ '--get', 'core.worktree'],
+ cwd=base_path,
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ continue
path = os.path.normpath(os.path.join(os.path.dirname(config_path),
- out.rstrip()))
+ worktree.rstrip()))
# Read the submodule's HEAD commit.
head = _read_submodule_head(base_path, orig_path)
submodules.append(Submodule(url, path, head))
@@ -212,6 +215,40 @@
stderr=devnull) == 0
+def get_branch_tracked_remote_url(cwd=None):
+ remote_name = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref',
+ '--symbolic-full-name', '@{u}'],
+ cwd=cwd)
+ if '/' in remote_name:
+ remote_name = remote_name.split('/', 1)[0]
+ url = subprocess.check_output(['git', 'config',
+ 'remote.%s.url' % remote_name], cwd=cwd)
+ return url.strip()
+
+
def reset_to_revision(revision, cwd=None):
subprocess.check_call(['git', 'fetch'], cwd=cwd)
subprocess.check_call(['git', 'reset', '--hard', revision], cwd=cwd)
+
+
+def force_checkout_revision(revision, cwd=None):
+ subprocess.check_call(['git', 'fetch'], cwd=cwd)
+ subprocess.check_call(['git', 'checkout', '-f', revision], cwd=cwd)
+
+
+def get_head_revision(cwd=None):
+ return subprocess.check_output(['git', 'rev-parse', 'HEAD'])
+
+
+def add_submodule(url, path):
+ subprocess.check_call(['git', 'submodule', 'add', '-f', url, path])
+
+
+def remove_submodule(path):
+ subprocess.check_call(['git', 'submodule', 'deinit', path])
+ subprocess.check_call(['git', 'rm', path])
+
+
+def get_origin_url(cwd=None):
+ return subprocess.check_output(['git', 'config', '--get',
+ 'remote.origin.url'], cwd=cwd)
diff --git a/src/build/util/git_test.py b/src/build/util/git_test.py
index df531a6..a97c933 100755
--- a/src/build/util/git_test.py
+++ b/src/build/util/git_test.py
@@ -27,16 +27,25 @@
self.assertFalse(self._ignore_checker.matches(
'mods/android/dalvik/vm/mterp/out/foo.cc'))
- def test_get_submodules_succeeds(self):
- submodules = util.git.get_submodules('.', True)
- self.assertTrue(any(['third_party/android' in s.path
- for s in submodules]))
- submodules = util.git.get_submodules('.', False)
- self.assertTrue(any(['third_party/android' in s.path
- for s in submodules]))
+ def test_get_submodules_succeeds(self):
+ submodules = util.git.get_submodules('.', True)
+ self.assertTrue(any(['third_party/android' in s.path
+ for s in submodules]))
+ # The following test essentially tests if you have stale entries
+ # in .git/config. Removing entries from .gitmodules and running
+ # "git submodule sync" does not remove entries from .git/config.
+ submodules = util.git.get_submodules('.', False)
+ self.assertTrue(any(['third_party/android' in s.path
+ for s in submodules]))
- def test_is_not_initial_commit(self):
- self.assertTrue(util.git.has_initial_commit())
+ def test_is_not_initial_commit(self):
+ self.assertTrue(util.git.has_initial_commit())
+
+ def test_get_head_revision(self):
+ self.assertIsNotNone(util.git.get_head_revision())
+
+ def test_get_origin_url(self):
+ self.assertIsNotNone(util.git.get_origin_url())
if __name__ == '__main__':
diff --git a/src/build/util/test/dalvik_vm_test_runner.py b/src/build/util/test/dalvik_vm_test_runner.py
index a8790e0..896973d 100644
--- a/src/build/util/test/dalvik_vm_test_runner.py
+++ b/src/build/util/test/dalvik_vm_test_runner.py
@@ -96,7 +96,7 @@
self._test_arg_map = test_arg_map
@staticmethod
- def set_up_common_test_directory():
+ def setup_work_root():
test_root = os.path.join(
build_common.get_target_common_dir(), 'dalvik_tests')
file_util.makedirs_safely(test_root)
diff --git a/src/build/util/test/suite_results.py b/src/build/util/test/suite_results.py
index 0a7b503..71f4d25 100644
--- a/src/build/util/test/suite_results.py
+++ b/src/build/util/test/suite_results.py
@@ -399,7 +399,9 @@
def _write_raw_output(self):
for suite in self._suite_states:
- if suite.scoreboard.unexpected_failed or suite.scoreboard.incompleted:
+ if (suite.scoreboard.unexpected_failed or
+ suite.scoreboard.expected_failed or
+ suite.scoreboard.incompleted):
self._writer.header(_NORMAL, 'Raw Output: %s' % (suite.name))
self._writer.write(_NORMAL, suite.raw_output)
self._writer.write(_NORMAL, '\n')
diff --git a/src/build/util/test/system_mode.py b/src/build/util/test/system_mode.py
index 7ce0a9e..85d85b3 100644
--- a/src/build/util/test/system_mode.py
+++ b/src/build/util/test/system_mode.py
@@ -271,7 +271,7 @@
return False
return True
- def run_adb(self, commands):
+ def run_adb(self, commands, **kwargs):
"""Runs an adb command and returns output.
Returns single adb command's output. The output is also appended to
@@ -281,11 +281,13 @@
if not self._thread.is_ready():
raise SystemModeError('adb is not currently serving.')
+ kwargs.setdefault('omit_xvfb', True)
+
try:
args = [self._adb, '-s', self._thread.get_android_serial()] + commands
self._logs.add_to_adb_log('SystemMode.run_adb: ' +
' '.join(args) + '\n')
- result = self._suite_runner.run_subprocess(args, omit_xvfb=True)
+ result = self._suite_runner.run_subprocess(args, **kwargs)
self._logs.add_to_adb_log(result + '\n')
return result
except BaseException as e:
diff --git a/src/build/util/test/unittest_util.py b/src/build/util/test/unittest_util.py
index 8e39376..858287c 100644
--- a/src/build/util/test/unittest_util.py
+++ b/src/build/util/test/unittest_util.py
@@ -68,7 +68,11 @@
gdb = toolchain.get_tool(build_options.OPTIONS.target(), 'gdb')
irt = toolchain.get_nacl_irt_core(build_options.OPTIONS.get_target_bitsize())
- subprocess.call([
+ # Note GDB uses NaCl manifest for arc.nexe so we do not need the library
+ # search paths for launch_chrome.
+ solib_paths = [build_common.get_load_library_path()]
+
+ args = [
gdb,
'-ex', 'target remote :4014',
'-ex', 'nacl-irt %s' % irt,
@@ -76,14 +80,11 @@
# debugger. Fixing this issue by modifying the Bionic loader
# will need a bunch of ARC MOD. We work-around the issue by
# passing the path of shared objects here.
- #
- # GDB uses NaCl Manifest file for arc.nexe so we do not need
- # this for launch_chrome.
- '-ex', 'set solib-search-path %s' %
- build_common.get_load_library_path(),
+ '-ex', 'set solib-search-path %s' % ':'.join(solib_paths),
'-ex',
'echo \n*** Type \'continue\' or \'c\' to start debugging ***\n\n',
- runnable_ld])
+ runnable_ld]
+ subprocess.call(args)
sel_ldr_proc.kill()
diff --git a/src/common/dlfcn_injection.cc b/src/common/dlfcn_injection.cc
index fc541ff..99b85e3 100644
--- a/src/common/dlfcn_injection.cc
+++ b/src/common/dlfcn_injection.cc
@@ -16,7 +16,8 @@
#include "base/strings/string_split.h"
#include "common/alog.h"
#include "common/android_static_libraries.h"
-#include "common/arm_syscall.h"
+#include "common/ndk_support/arm_syscall.h"
+#include "common/ndk_support/mmap.h"
#include "common/wrapped_functions.h"
namespace arc {
@@ -74,6 +75,9 @@
// of host's, we inject the syscall function for ARM.
(*g_wrapped_symbol_map)["syscall"] =
reinterpret_cast<void*>(&RunArmLibcSyscall);
+ // See src/common/ndk_support/mmap.cc for detail.
+ (*g_wrapped_symbol_map)["mmap"] =
+ reinterpret_cast<void*>(&MmapForNdk);
#endif
// Inject the custom symbol resolver and posix_translation based
diff --git a/src/common/ndk_support/OWNERS b/src/common/ndk_support/OWNERS
new file mode 100644
index 0000000..42cdddd
--- /dev/null
+++ b/src/common/ndk_support/OWNERS
@@ -0,0 +1,5 @@
+dpolukhin@google.com
+eaeltsin@google.com
+olonho@google.com
+levarum@google.com
+khim@google.com
diff --git a/src/common/ndk_support/README b/src/common/ndk_support/README
new file mode 100644
index 0000000..96401ea
--- /dev/null
+++ b/src/common/ndk_support/README
@@ -0,0 +1,2 @@
+This directory contains some functions which are shared by both NDK
+translation and NDK direct execution on Bare Metal ARM.
diff --git a/src/common/arm_syscall.cc b/src/common/ndk_support/arm_syscall.cc
similarity index 98%
rename from src/common/arm_syscall.cc
rename to src/common/ndk_support/arm_syscall.cc
index ad79076..b0444b3 100644
--- a/src/common/arm_syscall.cc
+++ b/src/common/ndk_support/arm_syscall.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "common/arm_syscall.h"
+#include "common/ndk_support/arm_syscall.h"
#include <errno.h>
#include <stdarg.h>
diff --git a/src/common/arm_syscall.h b/src/common/ndk_support/arm_syscall.h
similarity index 84%
rename from src/common/arm_syscall.h
rename to src/common/ndk_support/arm_syscall.h
index 5aa2c26..681416d 100644
--- a/src/common/arm_syscall.h
+++ b/src/common/ndk_support/arm_syscall.h
@@ -4,8 +4,8 @@
//
// Handles syscalls using ARM's syscall numbers.
-#ifndef COMMON_ARM_SYSCALL_H_
-#define COMMON_ARM_SYSCALL_H_
+#ifndef COMMON_NDK_SUPPORT_ARM_SYSCALL_H_
+#define COMMON_NDK_SUPPORT_ARM_SYSCALL_H_
#include <stdarg.h>
#include <stdint.h>
@@ -27,4 +27,4 @@
} // namespace arc
-#endif // COMMON_ARM_SYSCALL_H_
+#endif // COMMON_NDK_SUPPORT_ARM_SYSCALL_H_
diff --git a/src/common/ndk_support/mmap.cc b/src/common/ndk_support/mmap.cc
new file mode 100644
index 0000000..fa4e89a
--- /dev/null
+++ b/src/common/ndk_support/mmap.cc
@@ -0,0 +1,49 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "common/ndk_support/mmap.h"
+
+#include <pthread.h>
+#include <stdint.h>
+#include <sys/mman.h>
+#include <unistd.h>
+
+#include "common/scoped_pthread_mutex_locker.h"
+
+namespace arc {
+
+namespace {
+
+uintptr_t g_mmap_hint_addr = 0x70000000;
+
+} // namespace
+
+void* MmapForNdk(void* addr, size_t length, int prot, int flags,
+ int fd, off_t offset) {
+ if (!addr && !(flags & MAP_FIXED)) {
+ // We use 0x70000000 as the first hint address. Then, the next
+ // hint address will be increased by |length|, so this function
+ // will likely keep satisfying the limitation of old Bionic's
+ // loader which some NDKs have. On SFI NaCl, just keeping
+ // specifying 0x70000000 works but it does not work on Bare Metal
+ // mode. As SFI NaCl may change its internal implementation in
+ // future, it would be better to always update the address hint
+ // which is more likely used.
+ //
+ // Such NDK apps call mmap with NULL |addr| only twice at their
+ // start-ups. On SFI NaCl these addresses are always not used.
+ // TODO(hamaji): Check if ASLR on BMM does never use this region
+ // and update the comment and/or code.
+ //
+ // Essentially this way we emulate Android's mmap() behavior
+ // better, by hinting where it shall allocate, if application has
+ // no preferences.
+ addr = reinterpret_cast<char*>(
+ __sync_fetch_and_add(&g_mmap_hint_addr,
+ (length + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1)));
+ }
+ return mmap(addr, length, prot, flags, fd, offset);
+}
+
+} // namespace arc
diff --git a/src/common/ndk_support/mmap.h b/src/common/ndk_support/mmap.h
new file mode 100644
index 0000000..2dab9b8
--- /dev/null
+++ b/src/common/ndk_support/mmap.h
@@ -0,0 +1,24 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef COMMON_NDK_SUPPORT_MMAP_H_
+#define COMMON_NDK_SUPPORT_MMAP_H_
+
+#include <unistd.h>
+
+namespace arc {
+
+// When |addr| is NULL, this mmap automatically fills a hint address
+// to return values in certain range. Some old NDK applications
+// require this behavior because they have a copy of old Bionic
+// loader.
+// https://android.googlesource.com/platform/bionic/+/gingerbread/linker/linker.c
+// TODO(olonho): investigate why even with hint 0 linker expects memory
+// in certain range. Short ARM branches?
+void* MmapForNdk(void* addr, size_t length, int prot, int flags,
+ int fd, off_t offset);
+
+} // namespace arc
+
+#endif // COMMON_NDK_SUPPORT_MMAP_H_
diff --git a/src/common/plugin_handle.h b/src/common/plugin_handle.h
index b75f4cc..5173dbe 100644
--- a/src/common/plugin_handle.h
+++ b/src/common/plugin_handle.h
@@ -22,10 +22,6 @@
LOG_ALWAYS_FATAL_IF(!plugin_);
return plugin_->GetRenderer();
}
- GPURendererInterface* GetGPURenderer() const {
- LOG_ALWAYS_FATAL_IF(!plugin_);
- return plugin_->GetGPURenderer();
- }
InputManagerInterface* GetInputManager() const {
LOG_ALWAYS_FATAL_IF(!plugin_);
return plugin_->GetInputManager();
diff --git a/src/common/plugin_interface.h b/src/common/plugin_interface.h
index b4ef605..6ba5c23 100644
--- a/src/common/plugin_interface.h
+++ b/src/common/plugin_interface.h
@@ -24,42 +24,6 @@
class InputManagerInterface;
-class ResizeObserver {
- public:
- virtual void OnResize(int width, int height) = 0;
-};
-
-class RendererInterface {
- public:
- struct RenderParams {
- // Width of display in actual pixels.
- int width;
- // Width of display in actual pixels.
- int height;
- // Device scale from device independent pixels to actual pixels.
- float device_render_to_view_pixels;
- // Like crx_render_to_view_pixels, controls the size of the
- // Graphics3D/Image2D resource. See also common/options.h.
- float crx_render_to_view_pixels;
-
- bool operator==(const RenderParams& params) const {
- return width == params.width && height == params.height &&
- device_render_to_view_pixels == params.device_render_to_view_pixels &&
- crx_render_to_view_pixels == params.crx_render_to_view_pixels;
- }
- bool operator!=(const RenderParams& params) const {
- return !operator==(params);
- }
- };
- virtual ~RendererInterface() {}
-
- // Get the plugin's render characteristics.
- virtual void GetRenderParams(RenderParams* params) const = 0;
-
- virtual void AddResizeObserver(ResizeObserver* observer) = 0;
- virtual void RemoveResizeObserver(ResizeObserver* observer) = 0;
-};
-
class CompositorInterface {
public:
struct Size {
@@ -144,20 +108,73 @@
// Opaque type of GPU context pointers.
struct ContextGPU;
-class GPURendererInterface {
+class ResizeObserver {
+ public:
+ virtual void OnResize(int width, int height) = 0;
+};
+
+class RendererInterface {
public:
typedef std::vector<int32_t> Attributes;
- virtual ~GPURendererInterface() {}
+ struct RenderParams {
+ RenderParams()
+ : width(0),
+ height(0),
+ device_render_to_view_pixels(0.f),
+ crx_render_to_view_pixels(0.f) {
+ }
+
+ // Width of display in actual pixels.
+ int width;
+ // Width of display in actual pixels.
+ int height;
+ // Device scale from device independent pixels to actual pixels.
+ float device_render_to_view_pixels;
+ // Like crx_render_to_view_pixels, controls the size of the
+ // Graphics3D/Image2D resource. See also common/options.h.
+ float crx_render_to_view_pixels;
+
+ bool operator==(const RenderParams& rhs) const {
+ return width == rhs.width &&
+ height == rhs.height &&
+ device_render_to_view_pixels == rhs.device_render_to_view_pixels &&
+ crx_render_to_view_pixels == rhs.crx_render_to_view_pixels;
+ }
+ bool operator!=(const RenderParams& rhs) const {
+ return !operator==(rhs);
+ }
+
+ int ConvertFromDIP(float xy_in_dip) const {
+ float scale = device_render_to_view_pixels * crx_render_to_view_pixels;
+ return xy_in_dip * scale;
+ }
+ float ConvertToDIP(int xy) const {
+ float scale = device_render_to_view_pixels * crx_render_to_view_pixels;
+ return static_cast<float>(xy) / scale;
+ }
+ };
+
+ virtual ~RendererInterface() {}
+
+ virtual void GetRenderParams(RenderParams* params) const = 0;
+
+ virtual CompositorInterface* GetCompositor() = 0;
+
+ virtual void AddResizeObserver(ResizeObserver* observer) = 0;
+
+ virtual void RemoveResizeObserver(ResizeObserver* observer) = 0;
virtual ContextGPU* CreateContext(const Attributes& attribs,
ContextGPU* shared_context) = 0;
- virtual bool BindContext(ContextGPU* context) = 0;
- virtual bool SwapBuffers(ContextGPU* context) = 0;
- virtual void WaitForSwapBuffers() = 0;
- virtual void DestroyContext(ContextGPU* context) = 0;
- virtual CompositorInterface* GetCompositor() = 0;
+ virtual bool BindContext(ContextGPU* context) = 0;
+
+ virtual bool SwapBuffers(ContextGPU* context) = 0;
+
+ virtual void WaitForSwapBuffers() = 0;
+
+ virtual void DestroyContext(ContextGPU* context) = 0;
};
class AudioOutPollDataCallback {
@@ -459,7 +476,6 @@
class PluginInterface {
public:
virtual RendererInterface* GetRenderer() = 0;
- virtual GPURendererInterface* GetGPURenderer() = 0;
virtual InputManagerInterface* GetInputManager() = 0;
virtual AudioManagerInterface* GetAudioManager() = 0;
virtual CameraManagerInterface* GetCameraManager() = 0;
diff --git a/src/posix_translation/udp_socket.cc b/src/posix_translation/udp_socket.cc
index 582137a..054098a 100644
--- a/src/posix_translation/udp_socket.cc
+++ b/src/posix_translation/udp_socket.cc
@@ -89,16 +89,6 @@
} // namespace
-// A message unit which is sent to or received from the peer.
-struct UDPSocket::Message {
- // The address where this message is being sent to or where the message
- // comes from.
- sockaddr_storage addr;
-
- // Sent or received data.
- std::vector<char> data;
-};
-
// Thin wrapper of pp::UDPSocket. This is introduced to manage the lifetime of
// pp::UDPSocket instance correctly, and resolve race condition.
// The concept of this class is as same as TCPSocket::SocketWrapper. Please
diff --git a/src/posix_translation/udp_socket.h b/src/posix_translation/udp_socket.h
index 6ef55ab..d31d027 100644
--- a/src/posix_translation/udp_socket.h
+++ b/src/posix_translation/udp_socket.h
@@ -58,7 +58,18 @@
virtual void OnLastFileRef() OVERRIDE;
private:
- struct Message;
+ // A message unit which is sent to or received from the peer.
+ // Note: in libcxx, deque implementation uses sizeof(T) in the inlined
+ // initialization "const static" member, so we cannot use forward declaration
+ // here. cf): android/external/libcxx/include/deque.
+ struct Message {
+ // The address where this message is being sent to or where the message
+ // comes from.
+ sockaddr_storage addr;
+
+ // Sent or received data.
+ std::vector<char> data;
+ };
typedef std::deque<Message> MessageQueue;
class SocketWrapper;
diff --git a/third_party/tools/flake8 b/third_party/tools/flake8
index 708c653..bc18d8e 160000
--- a/third_party/tools/flake8
+++ b/third_party/tools/flake8
@@ -1 +1 @@
-Subproject commit 708c6536bf84d8f1ccf408f55e98d76dceb9453d
+Subproject commit bc18d8e2d1cb621ceaa554fe8ca64a720009d880
diff --git a/third_party/tools/mccabe b/third_party/tools/mccabe
new file mode 160000
index 0000000..e8aea16
--- /dev/null
+++ b/third_party/tools/mccabe
@@ -0,0 +1 @@
+Subproject commit e8aea16d28e92bd3c62601275762fc9c16808f6c