blob: 65922445037061d9165b5a21ae55591834cfeee9 [file] [log] [blame]
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implementation of BenchmarkResults
Includes definition of MeasurementList, which is a list of Measurement
instances.
"""
from abc import abstractmethod, abstractproperty
from collections import defaultdict, namedtuple
import itertools
from safetynet import Optional, TypecheckMeta, List
import numpy as np
from optofidelity.detection import Event
def SummarizeList(values):
return "%.2f (std=%.2f, N=%d)" % (np.mean(values), np.std(values),
len(values))
MeasurementSeriesInfo = namedtuple("MeasurementSeriesInfo",
("series_name", "series_type", "units", "min_num_samples"))
class MeasurementSeries(list):
"""A series of measurements made in a single pass.
This is an abstract class which leaves it to the subclass to define the
type of items in this list. However the subclass has to define the values
property to return floating point values as results.
"""
DEFAULT_UNITS = "ms"
DEFAULT_MIN_NUM_SAMPLES = 100
__metaclass__ = TypecheckMeta
def __init__(self, series_name, pass_num, ms_per_frame):
self.series_name = series_name
self.pass_num = pass_num
self.type_name = type(self).__name__
self.ms_per_frame = ms_per_frame
def __str__(self):
return "%s (Pass #%d): %s" % (self.series_name, self.pass_num, self.values)
@abstractproperty
def begin_time(self):
""":returns int: time at which the first measurement starts."""
@abstractproperty
def end_time(self):
""":returns int: time at which the last measurement ends."""
@abstractproperty
def values(self):
""":returns List[float]: values of this series of measurements"""
def SanityCheck(self):
pass
DrawEventLatencyMeasurement = namedtuple("DrawEventLatencyMeasurement",
("input_event", "output_event"))
class DrawEventLatencySeries(MeasurementSeries):
"""A series that measures the latency between an input and a draw event."""
MIN_EXPECTED_LATENCY = 0
MAX_EXPECTED_LATENCY = 500
def AddMeasurement(self, input_event, output_event):
"""
:type input_event: Event
:type outout_event: Event
"""
self.append(DrawEventLatencyMeasurement(input_event, output_event))
@property
def begin_time(self):
return min(m.input_event.time for m in self)
@property
def end_time(self):
return max(m.output_event.time for m in self)
@property
def values(self):
return [float(m.output_event.time - m.input_event.time) * self.ms_per_frame
for m in self]
def SanityCheck(self):
for v in self.values:
if v < self.MIN_EXPECTED_LATENCY or v > self.MAX_EXPECTED_LATENCY:
raise Exception("Calculated latency is out of expected range")
class DrawEventStartLatencySeries(DrawEventLatencySeries):
"""A series that measures the latency between an input and a draw event."""
@property
def values(self):
return [float(m.output_event.start_time - m.input_event.time) *
self.ms_per_frame for m in self]
class DroppedFramesSeries(MeasurementSeries):
"""Measures the percent of dropped frames assuming a 60Hz refresh rate."""
DEFAULT_UNITS = "%"
DEFAULT_MIN_NUM_SAMPLES = 0
MAX_EXPECTED_FRAME_DELTA = 20
def AddMeasurement(self, draw_event):
"""
:type draw_event: Event
"""
self.append(draw_event)
@property
def begin_time(self):
return min(e.time for e in self)
@property
def end_time(self):
return max(e.time for e in self)
@property
def dropped_times(self):
dropped_times = []
last_time = None
for event in self:
if last_time is None:
last_time = event.time
continue
expected_frame = last_time + 5
while expected_frame < event.time - 1:
dropped_times.append(expected_frame)
expected_frame += 5
last_time = event.time
return dropped_times
@property
def values(self):
if len(self):
num_frames = len(self.dropped_times) + len(self)
return [float(len(self.dropped_times)) / float(num_frames) * 100.0]
return [100.0]
def SanityCheck(self):
times = [e.time for e in self]
if max(np.diff(times)) > self.MAX_EXPECTED_FRAME_DELTA:
raise Exception("Time between frames higher than expected")
class TimeValueMeasurementSeries(MeasurementSeries):
"""Allows to provide simple (time, value) measurement pairs."""
DEFAULT_UNITS = "ms"
DEFAULT_MIN_NUM_SAMPLES = 0
def AddMeasurement(self, time, value):
"""
:type draw_event: Event
"""
self.append((time, float(value)))
@property
def begin_time(self):
return min(e[0] for e in self)
@property
def end_time(self):
return max(e[0] for e in self)
@property
def values(self):
return [e[1] * self.ms_per_frame for e in self]
class PassMeasurements(dict):
"""A dictionary of MeasurementSeries made in a single pass.
This dictionary maps the series name to the instance of that series.
"""
@property
def begin_time(self):
""":returns int: time at which the first measurement starts."""
return min([s.begin_time for s in self.itervalues()])
@property
def end_time(self):
""":returns int: time at which the last measurement ends."""
return max([s.end_time for s in self.itervalues()])
class BenchmarkMeasurements(dict):
"""A dictionary of all MeasurementSeries made in one benchmark.
The dictionary maps a pass number to a PassMeasurements instance, which
contains the MeasurementSeries made in the benchmark.
"""
__metaclass__ = TypecheckMeta
DEFAULT_MIN_NUM_SAMPLES = 100
def __setstate__(self, state):
"""For compability to older pickled versions."""
self.series_infos = state.get("series_infos", dict())
self.ms_per_frame = state.get("ms_per_frame", 3.33)
def __init__(self, ms_per_frame=None, series_infos=None):
super(BenchmarkMeasurements, self).__init__()
self.series_infos = series_infos or dict()
self.ms_per_frame = ms_per_frame or 3.33
def GetSeries(self, series_name, pass_num, series_type):
if series_name not in self.series_infos:
series_info = MeasurementSeriesInfo(series_name, series_type,
series_type.DEFAULT_UNITS,
series_type.DEFAULT_MIN_NUM_SAMPLES)
self.series_infos[series_name] = series_info
if pass_num not in self:
self[pass_num] = PassMeasurements()
series = self[pass_num].get(series_name)
if not series:
series = series_type(series_name, pass_num, self.ms_per_frame)
self[pass_num][series_name] = series
return series
def AddDrawEventMeasurement(self, name, input_event, output_event,
pass_num=0):
start_series = self.GetSeries(name + "Start", pass_num,
DrawEventStartLatencySeries)
start_series.AddMeasurement(input_event, output_event)
end_series = self.GetSeries(name, pass_num, DrawEventLatencySeries)
end_series.AddMeasurement(input_event, output_event)
def AddTimeValueMeasurement(self, name, time, value, pass_num=0):
series = self.GetSeries(name, pass_num, TimeValueMeasurementSeries)
series.AddMeasurement(time, value)
def AddDroppedFramesMeasurement(self, name, draw_event, pass_num=0):
series = self.GetSeries(name, pass_num, DroppedFramesSeries)
series.AddMeasurement(draw_event)
def SanityCheck(self):
"""Checks measurements for sanity."""
for pass_msrmnts in self.values():
for series in pass_msrmnts.values():
series.SanityCheck()
def SummarizeValues(self, series_name):
"""Collects all MeasurementSeries.values for the specified series_name.
This summarizes the values from multiple passes.
:param str series_name: Name of the measurement series.
:returns List[float]: List of values.
"""
series_list = (pass_msrmnts.get(series_name)
for pass_msrmnts in self.values()
if series_name in pass_msrmnts)
return [value for series in series_list
for value in series.values]
def __str__(self):
lines = []
def add_line(indent, fmt, *params):
lines.append(" " * indent + (fmt % params))
add_line(1, "Benchmark Measurements:")
for pass_num, pass_msrmnts in self.iteritems():
add_line(2, "Pass %d:", pass_num)
for series in pass_msrmnts.itervalues():
info = SummarizeList(series.values)
add_line(3, "%s: %s", series.series_name, info)
return "\n".join(lines)
class AggregateMeasurements(dict):
"""A dictionary of measurements from multiple benchmarks"""
__metaclass__ = TypecheckMeta
def HasMinNumSamples(self):
for series_name, series_info in self.series_infos.iteritems():
series_values = self.SummarizeValues(series_name)
if len(series_values) < series_info.min_num_samples:
return False
return True
@property
def series_infos(self):
"""Aggregated series_infos from all BenchmarkMeasurements."""
return dict((k, v) for benchmark_msrmnts in self.itervalues()
for k, v in benchmark_msrmnts.series_infos.iteritems())
def SummarizeValues(self, series_name):
"""Collects all MeasurementSeries.values for the specified series_name.
This summarizes the values from multiple benchmarks and passes.
:param str series_name: Name of the measurement series.
:returns List[float]: List of values.
"""
return [value for benchmark_msrmnts in self.itervalues()
for value in benchmark_msrmnts.SummarizeValues(series_name)]
def __str__(self):
lines = []
def add_line(indent, fmt, *params):
lines.append(" " * indent + (fmt % params))
add_line(1, "Aggregate Measurements:")
for series_name in self.series_infos:
info = SummarizeList(self.SummarizeValues(series_name))
add_line(2, "%s: %s", series_name, info)
return "\n".join(lines)
class Measurement(object):
"""For backwards compability to older pickle files."""
class MeasurementList(list):
"""For backwards compability to older pickle files."""
class BenchmarkResults(object):
"""Results of a benchmark.
The results include a MeasurementList of measurements made by the benchmark.
An optional error string describing why the measurement might have failed and
a list of calibration measurements.
"""
__metaclass__ = TypecheckMeta
def __init__(self, benchmark_name, metadata):
self.benchmark_name = benchmark_name
self.metadata = metadata
self.uid = ""
self.report_url = ""
self.error = ""
self._counter = 0
self.measurements = None
def AddCalibration(self, input_event, output_event, pass_num=0):
"""Add a calibration measurement to the results.
:param Event input_event: Event at which the calibration measurement starts.
:param Event output_event: Event at which the calibration measurement ends.
:param int pass_num: Measurements can be made in multiple passes,
this field identifies which pass the calibration belongs to.
"""
def __setstate__(self, state):
self.__init__(state.get("benchmark_name", ""),
state.get("metadata", {}))
old_list = state.get("measurement_list", None)
if old_list:
self.measurements = BenchmarkMeasurements(state.get("ms_per_frame"))
for item in old_list:
series_name = item.measurement_id
pass_num = item.pass_num
if series_name not in self.measurements.series_infos:
self.measurements.AddSeriesInfo(series_name,
DrawEventLatencySeries)
if series_name not in self.measurements[pass_num]:
self.measurements.AddSeries(series_name, pass_num)
series = self.measurements[pass_num][series_name]
series.AddMeasurement(item.input_event, item.output_event)
self.__dict__.update(state)
def __str__(self):
lines = []
def add_line(indent, fmt, *params):
lines.append(" " * indent + (fmt % params))
def add_field(field_name):
add_line(1, "%s: %s", field_name, str(getattr(self, field_name)))
add_line(0, "%s Benchmark Results", self.benchmark_name)
add_line(1, "metadata:")
for key, value in self.metadata.iteritems():
add_line(2, "%s: %s", key, value)
add_field("uid")
add_field("report_url")
add_field("error")
if self.measurements:
lines.append(str(self.measurements))
return "\n".join(lines)
class AggregateResults(BenchmarkResults):
def __init__(self, benchmark_name, metadata):
super(AggregateResults, self).__init__(benchmark_name, metadata)
self.repetitions = []
self.measurements = AggregateMeasurements()
def AddRepetition(self, results):
self.repetitions.append(results)
self.measurements[len(self.repetitions) - 1] = results.measurements