blob: cb18c96ef6fec5a91cc92b25071d2e48051c431a [file] [log] [blame]
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Aggregates Jacoco coverage files to produce output."""
from __future__ import print_function
import argparse
import fnmatch
import json
import os
import sys
import tempfile
import xml.etree.ElementTree as ET
import devil_chromium
from devil.utils import cmd_helper
from pylib.constants import host_paths
# Source paths should be passed to Jacoco in a way that the relative file paths
# reflect the class package name.
_PARTIAL_PACKAGE_NAMES = ['com/google', 'org/chromium']
# The sources_json_file is generated by jacoco_instr.py with source directories
# and input path to non-instrumented jars.
# e.g.
# 'source_dirs': [
# "chrome/android/java/src/org/chromium/chrome/browser/toolbar/bottom",
# "chrome/android/java/src/org/chromium/chrome/browser/ui/system",
# ...]
# 'input_path':
# '$CHROMIUM_OUTPUT_DIR/\
# obj/chrome/android/features/tab_ui/java__process_prebuilt-filtered.jar'
_SOURCES_JSON_FILES_SUFFIX = '__jacoco_sources.json'
def _GetFilesWithSuffix(root_dir, suffix):
"""Gets all files with a given suffix.
Args:
root_dir: Directory in which to search for files.
suffix: Suffix to look for.
Returns:
A list of absolute paths to files that match.
"""
files = []
for root, _, filenames in os.walk(root_dir):
basenames = fnmatch.filter(filenames, '*' + suffix)
files.extend([os.path.join(root, basename) for basename in basenames])
return files
def _ParseArguments(parser):
"""Parses the command line arguments.
Args:
parser: ArgumentParser object.
Returns:
The parsed arguments.
"""
parser.add_argument(
'--format',
required=True,
choices=['html', 'xml', 'csv', 'json'],
help='Output report format. Choose one from html, xml, csv and json.'
'json format conforms to '
'//infra/appengine/findit/model/proto/code_coverage.proto')
parser.add_argument('--output-dir', help='html report output directory.')
parser.add_argument(
'--output-file', help='xml, csv or json report output file.')
parser.add_argument(
'--coverage-dir',
required=True,
help='Root of the directory in which to search for '
'coverage data (.exec) files.')
parser.add_argument(
'--sources-json-dir',
help='Root of the directory in which to search for '
'*__jacoco_sources.json files.')
parser.add_argument(
'--class-files',
nargs='+',
help='Location of Java non-instrumented class files. '
'Use non-instrumented jars instead of instrumented jars. '
'e.g. use chrome_java__process_prebuilt-filtered.jar instead of'
'chrome_java__process_prebuilt-instrumented.jar')
parser.add_argument(
'--sources',
nargs='+',
help='Location of the source files. '
'Specified source folders must be the direct parent of the folders '
'that define the Java packages.'
'e.g. <src_dir>/chrome/android/java/src/')
parser.add_argument(
'--cleanup',
action='store_true',
help='If set, removes coverage files generated at '
'runtime.')
args = parser.parse_args()
if args.format == 'html':
if not args.output_dir:
parser.error('--output-dir needed for html report.')
elif not args.output_file:
parser.error('--output-file needed for xml, csv or json report.')
if not (args.sources_json_dir or args.class_files):
parser.error('At least either --sources-json-dir or --class-files needed.')
if args.format == 'json' and not args.sources_json_dir:
parser.error('--sources-json-dir needed for json report')
return args
def _GenerateJsonCoverageMetadata(out_file_path, jacoco_xml_path, source_dirs):
"""Generates a JSON representation based on Jacoco xml report.
JSON format conforms to the proto:
//infra/appengine/findit/model/proto/code_coverage.proto
Writes the results of the coverage analysis to the file specified by
|out_file_path|.
Args:
out_file_path: A string representing the location to write JSON metadata.
jacoco_xml_path: A string representing the file path to Jacoco xml report.
source_dirs: A list of source directories of Java source files.
Raises:
Exception: No Jacoco xml report found or
cannot find package directory according to src root.
"""
if not os.path.exists(jacoco_xml_path):
raise Exception('No Jacoco xml report found on %s' % jacoco_xml_path)
data = {}
data['files'] = []
tree = ET.parse(jacoco_xml_path)
root = tree.getroot()
for package in root.iter('package'):
package_path = package.attrib['name']
print('Processing package %s' % package_path)
# Find package directory according to src root.
package_source_dir = ''
for source_dir in source_dirs:
if package_path in source_dir:
package_source_dir = source_dir
break
if not package_source_dir:
raise Exception('Cannot find package directory according to src root')
for sourcefile in package.iter('sourcefile'):
sourcefile_name = sourcefile.attrib['name']
path = os.path.join(package_source_dir, sourcefile_name)
print('Processing file %s' % path)
file_coverage = {}
file_coverage['path'] = path
file_coverage['lines'] = []
file_coverage['branches'] = []
# Calculate file's total lines.
abs_path = os.path.join(host_paths.DIR_SOURCE_ROOT, path)
if os.path.exists(abs_path):
with open(abs_path, 'r') as f:
file_coverage['total_lines'] = sum(1 for _ in f)
for line in sourcefile.iter('line'):
line_number = int(line.attrib['nr'])
covered_instructions = int(line.attrib['ci'])
missed_branches = int(line.attrib['mb'])
covered_branches = int(line.attrib['cb'])
is_branch = False
if missed_branches > 0 or covered_branches > 0:
is_branch = True
line_coverage = {}
line_coverage['first'] = line_number
line_coverage['last'] = line_number
line_coverage['count'] = covered_instructions
file_coverage['lines'].append(line_coverage)
if is_branch:
branch_coverage = {}
branch_coverage['line'] = line_number
branch_coverage['total'] = covered_branches + missed_branches
branch_coverage['covered'] = covered_branches
file_coverage['branches'].append(branch_coverage)
data['files'].append(file_coverage)
with open(out_file_path, 'w') as f:
json.dump(data, f)
def main():
parser = argparse.ArgumentParser()
args = _ParseArguments(parser)
devil_chromium.Initialize()
coverage_files = _GetFilesWithSuffix(args.coverage_dir, '.exec')
if not coverage_files:
parser.error('No coverage file found under %s' % args.coverage_dir)
print('Found coverage files: %s' % str(coverage_files))
class_files = []
source_dirs = []
if args.sources_json_dir:
sources_json_files = _GetFilesWithSuffix(args.sources_json_dir,
_SOURCES_JSON_FILES_SUFFIX)
for f in sources_json_files:
with open(f, 'r') as json_file:
data = json.load(json_file)
class_files.append(data['input_path'])
source_dirs.extend(data['source_dirs'])
# Fix source directories as direct parent of Java packages.
fixed_source_dirs = set()
for path in source_dirs:
for partial in _PARTIAL_PACKAGE_NAMES:
if partial in path:
fixed_dir = os.path.join(host_paths.DIR_SOURCE_ROOT,
path[:path.index(partial)])
fixed_source_dirs.add(fixed_dir)
break
if args.class_files:
class_files += args.class_files
if args.sources:
fixed_source_dirs.update(args.sources)
cmd = [
'java', '-jar',
os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'jacoco', 'lib',
'jacococli.jar'), 'report'
] + coverage_files
for f in class_files:
cmd += ['--classfiles', f]
for source in fixed_source_dirs:
cmd += ['--sourcefiles', source]
# For json format, xml report will be generated first temporarily
# then parsed to json metadata to --output-file.
with tempfile.NamedTemporaryFile() as temp:
if args.format == 'html':
out_cmd = ['--html', args.output_dir]
elif args.format == 'xml':
out_cmd = ['--xml', args.output_file]
elif args.format == 'csv':
out_cmd = ['--csv', args.output_file]
else:
out_cmd = ['--xml', temp.name]
cmd += out_cmd
exit_code = cmd_helper.RunCmd(cmd)
if args.cleanup:
for f in coverage_files:
os.remove(f)
# Command tends to exit with status 0 when it actually failed.
if not exit_code:
if args.format == 'html':
if not os.path.exists(args.output_dir) or not os.listdir(
args.output_dir):
print('No report generated at %s' % args.output_dir)
exit_code = 1
elif not os.path.exists(out_cmd[1]):
print('No report generated at %s' % args.output_file)
exit_code = 1
if args.format == 'json':
_GenerateJsonCoverageMetadata(args.output_file, temp.name, source_dirs)
return exit_code
if __name__ == '__main__':
sys.exit(main())