Merge pull request #177 from bjackman/hwmon-no-permissions

hwmon: Disable if no permissions
diff --git a/devlib/__init__.py b/devlib/__init__.py
index b1b4fa3..2b3f3b6 100644
--- a/devlib/__init__.py
+++ b/devlib/__init__.py
@@ -13,14 +13,15 @@
 from devlib.instrument import MEASUREMENT_TYPES, INSTANTANEOUS, CONTINUOUS
 from devlib.instrument.daq import DaqInstrument
 from devlib.instrument.energy_probe import EnergyProbeInstrument
-from devlib.instrument.frames import GfxInfoFramesInstrument
+from devlib.instrument.frames import GfxInfoFramesInstrument, SurfaceFlingerFramesInstrument
 from devlib.instrument.hwmon import HwmonInstrument
 from devlib.instrument.monsoon import MonsoonInstrument
 from devlib.instrument.netstats import NetstatsInstrument
 from devlib.instrument.gem5power import Gem5PowerInstrument
 
-from devlib.derived import DerivedMeasurements
-from devlib.derived.derived_measurements import DerivedEnergyMeasurements
+from devlib.derived import DerivedMeasurements, DerivedMetric
+from devlib.derived.energy import DerivedEnergyMeasurements
+from devlib.derived.fps import DerivedGfxInfoStats, DerivedSurfaceFlingerStats
 
 from devlib.trace.ftrace import FtraceCollector
 
diff --git a/devlib/derived/__init__.py b/devlib/derived/__init__.py
index 5689a58..24ac060 100644
--- a/devlib/derived/__init__.py
+++ b/devlib/derived/__init__.py
@@ -12,8 +12,49 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
+from devlib.instrument import MeasurementType, MEASUREMENT_TYPES
+
+
+class DerivedMetric(object):
+
+    __slots__ = ['name', 'value', 'measurement_type']
+
+    @property
+    def units(self):
+        return self.measurement_type.units
+
+    def __init__(self, name, value, measurement_type):
+        self.name = name
+        self.value = value
+        if isinstance(measurement_type, MeasurementType):
+            self.measurement_type = measurement_type
+        else:
+            try:
+                self.measurement_type = MEASUREMENT_TYPES[measurement_type]
+            except KeyError:
+                msg = 'Unknown measurement type:  {}'
+                raise ValueError(msg.format(measurement_type))
+
+    def __cmp__(self, other):
+        if hasattr(other, 'value'):
+            return cmp(self.value, other.value)
+        else:
+            return cmp(self.value, other)
+
+    def __str__(self):
+        if self.units:
+            return '{}: {} {}'.format(self.name, self.value, self.units)
+        else:
+            return '{}: {}'.format(self.name, self.value)
+
+    __repr__ = __str__
+
+
 class DerivedMeasurements(object):
 
-    @staticmethod
-    def process(measurements_csv):
-        raise NotImplementedError()
+    def process(self, measurements_csv):
+        return []
+
+    def process_raw(self, *args):
+        return []
diff --git a/devlib/derived/derived_measurements.py b/devlib/derived/energy.py
similarity index 86%
rename from devlib/derived/derived_measurements.py
rename to devlib/derived/energy.py
index 770db88..84d3d7c 100644
--- a/devlib/derived/derived_measurements.py
+++ b/devlib/derived/energy.py
@@ -15,8 +15,8 @@
 from __future__ import division
 from collections import defaultdict
 
-from devlib import DerivedMeasurements
-from devlib.instrument import Measurement, MEASUREMENT_TYPES, InstrumentChannel
+from devlib import DerivedMeasurements, DerivedMetric
+from devlib.instrument import  MEASUREMENT_TYPES, InstrumentChannel
 
 
 class DerivedEnergyMeasurements(DerivedMeasurements):
@@ -56,7 +56,7 @@
         power_results = defaultdict(float)
 
         # Process data
-        for count, row in enumerate(measurements_csv.itermeasurements()):
+        for count, row in enumerate(measurements_csv.iter_measurements()):
             if use_timestamp:
                 last_ts = row_ts
                 row_ts = time_measurment.convert(float(row[ts_index].value), 'time')
@@ -86,12 +86,12 @@
         derived_measurements = []
         for site in energy_results:
             total_energy = energy_results[site]['end'] - energy_results[site]['start']
-            instChannel = InstrumentChannel('cum_energy', site, MEASUREMENT_TYPES['energy'])
-            derived_measurements.append(Measurement(total_energy, instChannel))
+            name = '{}_total_energy'.format(site)
+            derived_measurements.append(DerivedMetric(name, total_energy, MEASUREMENT_TYPES['energy']))
 
         for site in power_results:
             power = power_results[site] / (count + 1)  #pylint: disable=undefined-loop-variable
-            instChannel = InstrumentChannel('avg_power', site, MEASUREMENT_TYPES['power'])
-            derived_measurements.append(Measurement(power, instChannel))
+            name = '{}_average_power'.format(site)
+            derived_measurements.append(DerivedMetric(name, power, MEASUREMENT_TYPES['power']))
 
         return derived_measurements
diff --git a/devlib/derived/fps.py b/devlib/derived/fps.py
new file mode 100644
index 0000000..2695c4c
--- /dev/null
+++ b/devlib/derived/fps.py
@@ -0,0 +1,214 @@
+from __future__ import division
+import csv
+import os
+import re
+
+try:
+    import pandas as pd
+except ImportError:
+    pd = None
+
+from devlib import DerivedMeasurements, DerivedMetric, MeasurementsCsv, InstrumentChannel
+from devlib.exception import HostError
+from devlib.utils.rendering import gfxinfo_get_last_dump, VSYNC_INTERVAL
+from devlib.utils.types import numeric
+
+
+class DerivedFpsStats(DerivedMeasurements):
+
+    def __init__(self, drop_threshold=5, suffix=None, filename=None, outdir=None):
+        self.drop_threshold = drop_threshold
+        self.suffix = suffix
+        self.filename = filename
+        self.outdir = outdir
+        if (filename is None) and (suffix is None):
+            self.suffix = '-fps'
+        elif (filename is not None) and (suffix is not None):
+            raise ValueError('suffix and filename cannot be specified at the same time.')
+        if filename is not None and os.sep in filename:
+            raise ValueError('filename cannot be a path (cannot countain "{}"'.format(os.sep))
+
+    def process(self, measurements_csv):
+        if isinstance(measurements_csv, basestring):
+            measurements_csv = MeasurementsCsv(measurements_csv)
+        if pd is not None:
+            return self._process_with_pandas(measurements_csv)
+        return self._process_without_pandas(measurements_csv)
+
+    def _get_csv_file_name(self, frames_file):
+        outdir = self.outdir or os.path.dirname(frames_file)
+        if self.filename:
+            return os.path.join(outdir, self.filename)
+
+        frames_basename = os.path.basename(frames_file)
+        rest, ext = os.path.splitext(frames_basename)
+        csv_basename = rest + self.suffix + ext
+        return os.path.join(outdir, csv_basename)
+
+
+class DerivedGfxInfoStats(DerivedFpsStats):
+
+    @staticmethod
+    def process_raw(filepath, *args):
+        metrics = []
+        dump = gfxinfo_get_last_dump(filepath)
+        seen_stats = False
+        for line in dump.split('\n'):
+            if seen_stats and not line.strip():
+                break
+            elif line.startswith('Janky frames:'):
+                text = line.split(': ')[-1]
+                val_text, pc_text = text.split('(')
+                metrics.append(DerivedMetric('janks', numeric(val_text.strip()), 'count'))
+                metrics.append(DerivedMetric('janks_pc', numeric(pc_text[:-3]), 'percent'))
+            elif ' percentile: ' in line:
+                ptile, val_text = line.split(' percentile: ')
+                name = 'render_time_{}_ptile'.format(ptile)
+                value = numeric(val_text.strip()[:-2])
+                metrics.append(DerivedMetric(name, value, 'time_ms'))
+            elif line.startswith('Number '):
+                name_text, val_text = line.strip().split(': ')
+                name = name_text[7:].lower().replace(' ', '_')
+                value = numeric(val_text)
+                metrics.append(DerivedMetric(name, value, 'count'))
+            else:
+                continue
+            seen_stats = True
+        return metrics
+
+    def _process_without_pandas(self, measurements_csv):
+        per_frame_fps = []
+        start_vsync, end_vsync = None, None
+        frame_count = 0
+
+        for frame_data in measurements_csv.iter_values():
+            if frame_data.Flags_flags != 0:
+                continue
+            frame_count += 1
+
+            if start_vsync is None:
+                start_vsync = frame_data.Vsync_time_us
+            end_vsync = frame_data.Vsync_time_us
+
+            frame_time = frame_data.FrameCompleted_time_us - frame_data.IntendedVsync_time_us
+            pff = 1e9 / frame_time
+            if pff > self.drop_threshold:
+                per_frame_fps.append([pff])
+
+        if frame_count:
+            duration = end_vsync - start_vsync
+            fps = (1e6 * frame_count) / float(duration)
+        else:
+            duration = 0
+            fps = 0
+
+        csv_file = self._get_csv_file_name(measurements_csv.path)
+        with open(csv_file, 'wb') as wfh:
+            writer = csv.writer(wfh)
+            writer.writerow(['fps'])
+            writer.writerows(per_frame_fps)
+
+        return [DerivedMetric('fps', fps, 'fps'),
+                DerivedMetric('total_frames', frame_count, 'frames'),
+                MeasurementsCsv(csv_file)]
+
+    def _process_with_pandas(self, measurements_csv):
+        data = pd.read_csv(measurements_csv.path)
+        data = data[data.Flags_flags == 0]
+        frame_time = data.FrameCompleted_time_us - data.IntendedVsync_time_us
+        per_frame_fps = (1e6 / frame_time)
+        keep_filter = per_frame_fps > self.drop_threshold
+        per_frame_fps = per_frame_fps[keep_filter]
+        per_frame_fps.name = 'fps'
+
+        frame_count = data.index.size
+        if frame_count > 1:
+            duration = data.Vsync_time_us.iloc[-1] - data.Vsync_time_us.iloc[0]
+            fps = (1e9 * frame_count) / float(duration)
+        else:
+            duration = 0
+            fps = 0
+
+        csv_file = self._get_csv_file_name(measurements_csv.path)
+        per_frame_fps.to_csv(csv_file, index=False, header=True)
+
+        return [DerivedMetric('fps', fps, 'fps'),
+                DerivedMetric('total_frames', frame_count, 'frames'),
+                MeasurementsCsv(csv_file)]
+
+
+class DerivedSurfaceFlingerStats(DerivedFpsStats):
+
+    def _process_with_pandas(self, measurements_csv):
+        data = pd.read_csv(measurements_csv.path)
+
+        # fiter out bogus frames.
+        bogus_frames_filter = data.actual_present_time_us != 0x7fffffffffffffff
+        actual_present_times = data.actual_present_time_us[bogus_frames_filter]
+        actual_present_time_deltas = actual_present_times.diff().dropna()
+
+        vsyncs_to_compose = actual_present_time_deltas.div(VSYNC_INTERVAL)
+        vsyncs_to_compose.apply(lambda x: int(round(x, 0)))
+
+        # drop values lower than drop_threshold FPS as real in-game frame
+        # rate is unlikely to drop below that (except on loading screens
+        # etc, which should not be factored in frame rate calculation).
+        per_frame_fps = (1.0 / (vsyncs_to_compose.multiply(VSYNC_INTERVAL / 1e9)))
+        keep_filter = per_frame_fps > self.drop_threshold
+        filtered_vsyncs_to_compose = vsyncs_to_compose[keep_filter]
+        per_frame_fps.name = 'fps'
+
+        csv_file = self._get_csv_file_name(measurements_csv.path)
+        per_frame_fps.to_csv(csv_file, index=False, header=True)
+
+        if not filtered_vsyncs_to_compose.empty:
+            fps = 0
+            total_vsyncs = filtered_vsyncs_to_compose.sum()
+            frame_count = filtered_vsyncs_to_compose.size
+
+            if total_vsyncs:
+                fps = 1e9 * frame_count / (VSYNC_INTERVAL * total_vsyncs)
+
+            janks = self._calc_janks(filtered_vsyncs_to_compose)
+            not_at_vsync = self._calc_not_at_vsync(vsyncs_to_compose)
+        else:
+            fps = 0
+            frame_count = 0
+            janks = 0
+            not_at_vsync = 0
+
+        return [DerivedMetric('fps', fps, 'fps'),
+                DerivedMetric('total_frames', frame_count, 'frames'),
+                MeasurementsCsv(csv_file),
+                DerivedMetric('janks', janks, 'count'),
+                DerivedMetric('janks_pc', janks * 100 / frame_count, 'percent'),
+                DerivedMetric('missed_vsync', not_at_vsync, 'count')]
+
+    def _process_without_pandas(self, measurements_csv):
+        # Given that SurfaceFlinger has been deprecated in favor of GfxInfo,
+        # it does not seem worth it implementing this.
+        raise HostError('Please install "pandas" Python package to process SurfaceFlinger frames')
+
+    @staticmethod
+    def _calc_janks(filtered_vsyncs_to_compose):
+        """
+        Internal method for calculating jank frames.
+        """
+        pause_latency = 20
+        vtc_deltas = filtered_vsyncs_to_compose.diff().dropna()
+        vtc_deltas = vtc_deltas.abs()
+        janks = vtc_deltas.apply(lambda x: (pause_latency > x > 1.5) and 1 or 0).sum()
+
+        return janks
+
+    @staticmethod
+    def _calc_not_at_vsync(vsyncs_to_compose):
+        """
+        Internal method for calculating the number of frames that did not
+        render in a single vsync cycle.
+        """
+        epsilon = 0.0001
+        func = lambda x: (abs(x - 1.0) > epsilon) and 1 or 0
+        not_at_vsync = vsyncs_to_compose.apply(func).sum()
+
+        return not_at_vsync
diff --git a/devlib/instrument/__init__.py b/devlib/instrument/__init__.py
index 77ba1d3..0d2c1ed 100644
--- a/devlib/instrument/__init__.py
+++ b/devlib/instrument/__init__.py
@@ -72,38 +72,60 @@
             return text.format(self.name, self.units)
 
 
-# Standard measures
+# Standard measures. In order to make sure that downstream data processing is not tied
+# to particular insturments (e.g. a particular method of mearuing power), instruments
+# must, where possible, resport their measurments formatted as on of the standard types
+# defined here.
 _measurement_types = [
+    # For whatever reason, the type of measurement could not be established.
     MeasurementType('unknown', None),
-    MeasurementType('time', 'seconds',
+
+    # Generic measurements
+    MeasurementType('count', 'count'),
+    MeasurementType('percent', 'percent'),
+
+    # Time measurement. While there is typically a single "canonical" unit
+    # used for each type of measurmenent, time may be measured to a wide variety
+    # of events occuring at a wide range of scales. Forcing everying into a
+    # single scale will lead to inefficient and awkward to work with result tables.
+    # Coversion functions between the formats are specified, so that downstream
+    # processors that expect all times time be at a particular scale can automatically
+    # covert without being familar with individual instruments.
+    MeasurementType('time', 'seconds', 'time',
         conversions={
             'time_us': lambda x: x * 1000000,
             'time_ms': lambda x: x * 1000,
         }
     ),
-    MeasurementType('time_us', 'microseconds',
+    MeasurementType('time_us', 'microseconds', 'time',
         conversions={
             'time': lambda x: x / 1000000,
             'time_ms': lambda x: x / 1000,
         }
     ),
-    MeasurementType('time_ms', 'milliseconds',
+    MeasurementType('time_ms', 'milliseconds', 'time',
         conversions={
             'time': lambda x: x / 1000,
             'time_us': lambda x: x * 1000,
         }
     ),
-    MeasurementType('temperature', 'degrees'),
 
+    # Measurements related to thermals.
+    MeasurementType('temperature', 'degrees', 'thermal'),
+
+    # Measurements related to power end energy consumption.
     MeasurementType('power', 'watts', 'power/energy'),
     MeasurementType('voltage', 'volts', 'power/energy'),
     MeasurementType('current', 'amps', 'power/energy'),
     MeasurementType('energy', 'joules', 'power/energy'),
 
+    # Measurments realted to data transfer, e.g. neworking,
+    # memory, or backing storage.
     MeasurementType('tx', 'bytes', 'data transfer'),
     MeasurementType('rx', 'bytes', 'data transfer'),
     MeasurementType('tx/rx', 'bytes', 'data transfer'),
 
+    MeasurementType('fps', 'fps', 'ui render'),
     MeasurementType('frames', 'frames', 'ui render'),
 ]
 for m in _measurement_types:
@@ -127,7 +149,7 @@
         self.channel = channel
 
     def __cmp__(self, other):
-        if isinstance(other, Measurement):
+        if hasattr(other, 'value'):
             return cmp(self.value, other.value)
         else:
             return cmp(self.value, other)
@@ -147,26 +169,32 @@
         self.path = path
         self.channels = channels
         self.sample_rate_hz = sample_rate_hz
-        self._fh = open(path, 'rb')
         if self.channels is None:
             self._load_channels()
+        headings = [chan.label for chan in self.channels]
+        self.data_tuple = collections.namedtuple('csv_entry', headings)
 
     def measurements(self):
-        return list(self.itermeasurements())
+        return list(self.iter_measurements())
 
-    def itermeasurements(self):
-        self._fh.seek(0)
-        reader = csv.reader(self._fh)
-        reader.next()  # headings
-        for row in reader:
+    def iter_measurements(self):
+        for row in self._iter_rows():
             values = map(numeric, row)
             yield [Measurement(v, c) for (v, c) in zip(values, self.channels)]
 
+    def values(self):
+        return list(self.iter_values())
+
+    def iter_values(self):
+        for row in self._iter_rows():
+            values = map(numeric, row)
+            yield self.data_tuple(*values)
+
     def _load_channels(self):
-        self._fh.seek(0)
-        reader = csv.reader(self._fh)
-        header = reader.next()
-        self._fh.seek(0)
+        header = []
+        with open(self.path, 'rb') as fh:
+            reader = csv.reader(fh)
+            header = reader.next()
 
         self.channels = []
         for entry in header:
@@ -175,22 +203,35 @@
                 if entry.endswith(suffix):
                     site =  entry[:-len(suffix)]
                     measure = mt
-                    name = '{}_{}'.format(site, measure)
                     break
             else:
-                site = entry
-                measure = 'unknown'
-                name = entry
+                if entry in MEASUREMENT_TYPES:
+                    site = None
+                    measure = entry
+                else:
+                    site = entry
+                    measure = 'unknown'
 
-            chan = InstrumentChannel(name, site, measure)
+            chan = InstrumentChannel(site, measure)
             self.channels.append(chan)
 
+    def _iter_rows(self):
+        with open(self.path, 'rb') as fh:
+            reader = csv.reader(fh)
+            reader.next()  # headings
+            for row in reader:
+                yield row
+
 
 class InstrumentChannel(object):
 
     @property
     def label(self):
-        return '{}_{}'.format(self.site, self.kind)
+        if self.site is not None:
+            return '{}_{}'.format(self.site, self.kind)
+        return self.kind
+
+    name = label
 
     @property
     def kind(self):
@@ -200,8 +241,7 @@
     def units(self):
         return self.measurement_type.units
 
-    def __init__(self, name, site, measurement_type, **attrs):
-        self.name = name
+    def __init__(self, site, measurement_type, **attrs):
         self.site = site
         if isinstance(measurement_type, MeasurementType):
             self.measurement_type = measurement_type
@@ -243,10 +283,8 @@
             measure = measure.name
         return [c for c in self.list_channels() if c.kind == measure]
 
-    def add_channel(self, site, measure, name=None, **attrs):
-        if name is None:
-            name = '{}_{}'.format(site, measure)
-        chan = InstrumentChannel(name, site, measure, **attrs)
+    def add_channel(self, site, measure, **attrs):
+        chan = InstrumentChannel(site, measure, **attrs)
         self.channels[chan.label] = chan
 
     # initialization and teardown
@@ -297,3 +335,6 @@
 
     def get_data(self, outfile):
         pass
+
+    def get_raw(self):
+        return []
diff --git a/devlib/instrument/acmecape.py b/devlib/instrument/acmecape.py
index e1bb6c1..1053c9d 100644
--- a/devlib/instrument/acmecape.py
+++ b/devlib/instrument/acmecape.py
@@ -121,3 +121,6 @@
                             output_row.append(float(row[i])/1000)
                     writer.writerow(output_row)
         return MeasurementsCsv(outfile, self.active_channels, self.sample_rate_hz)
+
+    def get_raw(self):
+        return [self.raw_data_file]
diff --git a/devlib/instrument/daq.py b/devlib/instrument/daq.py
index 75e854d..d497151 100644
--- a/devlib/instrument/daq.py
+++ b/devlib/instrument/daq.py
@@ -33,6 +33,7 @@
         # pylint: disable=no-member
         super(DaqInstrument, self).__init__(target)
         self._need_reset = True
+        self._raw_files = []
         if execute_command is None:
             raise HostError('Could not import "daqpower": {}'.format(import_error_mesg))
         if labels is None:
@@ -68,6 +69,7 @@
         if not result.status == Status.OK:  # pylint: disable=no-member
             raise HostError(result.message)
         self._need_reset = False
+        self._raw_files = []
 
     def start(self):
         if self._need_reset:
@@ -86,6 +88,7 @@
             site = os.path.splitext(entry)[0]
             path = os.path.join(tempdir, entry)
             raw_file_map[site] = path
+            self._raw_files.append(path)
 
         active_sites = unique([c.site for c in self.active_channels])
         file_handles = []
@@ -131,6 +134,9 @@
             for fh in file_handles:
                 fh.close()
 
+    def get_raw(self):
+        return self._raw_files
+
     def teardown(self):
         self.execute('close')
 
diff --git a/devlib/instrument/energy_probe.py b/devlib/instrument/energy_probe.py
index 5f47430..c8f179e 100644
--- a/devlib/instrument/energy_probe.py
+++ b/devlib/instrument/energy_probe.py
@@ -52,6 +52,7 @@
         self.raw_output_directory = None
         self.process = None
         self.sample_rate_hz = 10000 # Determined empirically
+        self.raw_data_file = None
 
         for label in self.labels:
             for kind in self.attributes:
@@ -64,6 +65,7 @@
                  for i, rval in enumerate(self.resistor_values)]
         rstring = ''.join(parts)
         self.command = '{} -d {} -l {} {}'.format(self.caiman, self.device_entry, rstring, self.raw_output_directory)
+        self.raw_data_file = None
 
     def start(self):
         self.logger.debug(self.command)
@@ -92,10 +94,10 @@
         num_of_ports = len(self.resistor_values)
         struct_format = '{}I'.format(num_of_ports * self.attributes_per_sample)
         not_a_full_row_seen = False
-        raw_data_file = os.path.join(self.raw_output_directory, '0000000000')
+        self.raw_data_file = os.path.join(self.raw_output_directory, '0000000000')
 
-        self.logger.debug('Parsing raw data file: {}'.format(raw_data_file))
-        with open(raw_data_file, 'rb') as bfile:
+        self.logger.debug('Parsing raw data file: {}'.format(self.raw_data_file))
+        with open(self.raw_data_file, 'rb') as bfile:
             with open(outfile, 'wb') as wfh:
                 writer = csv.writer(wfh)
                 writer.writerow(active_channels)
@@ -114,3 +116,6 @@
                         else:
                             not_a_full_row_seen = True
         return MeasurementsCsv(outfile, self.active_channels, self.sample_rate_hz)
+
+    def get_raw(self):
+        return [self.raw_data_file]
diff --git a/devlib/instrument/frames.py b/devlib/instrument/frames.py
index d1899fb..a2e06b3 100644
--- a/devlib/instrument/frames.py
+++ b/devlib/instrument/frames.py
@@ -1,3 +1,4 @@
+from __future__ import division
 from devlib.instrument import (Instrument, CONTINUOUS,
                                MeasurementsCsv, MeasurementType)
 from devlib.utils.rendering import (GfxinfoFrameCollector,
@@ -20,6 +21,7 @@
         self.collector = None
         self.header = None
         self._need_reset = True
+        self._raw_file = None
         self._init_channels()
 
     def reset(self, sites=None, kinds=None, channels=None):
@@ -27,6 +29,7 @@
         self.collector = self.collector_cls(self.target, self.period,
                                             self.collector_target, self.header)
         self._need_reset = False
+        self._raw_file = None
 
     def start(self):
         if self._need_reset:
@@ -38,14 +41,16 @@
         self._need_reset = True
 
     def get_data(self, outfile):
-        raw_outfile = None
         if self.keep_raw:
-            raw_outfile = outfile + '.raw'
-        self.collector.process_frames(raw_outfile)
+            self._raw_file = outfile + '.raw'
+        self.collector.process_frames(self._raw_file)
         active_sites = [chan.label for chan in self.active_channels]
         self.collector.write_frames(outfile, columns=active_sites)
         return MeasurementsCsv(outfile, self.active_channels, self.sample_rate_hz)
 
+    def get_raw(self):
+        return [self._raw_file] if self._raw_file else []
+
     def _init_channels(self):
         raise NotImplementedError()
 
diff --git a/devlib/instrument/hwmon.py b/devlib/instrument/hwmon.py
index ae49f40..5a9d8af 100644
--- a/devlib/instrument/hwmon.py
+++ b/devlib/instrument/hwmon.py
@@ -45,7 +45,7 @@
                 measure = self.measure_map.get(ts.kind)[0]
                 if measure:
                     self.logger.debug('\tAdding sensor {}'.format(ts.name))
-                    self.add_channel(_guess_site(ts), measure, name=ts.name, sensor=ts)
+                    self.add_channel(_guess_site(ts), measure, sensor=ts)
                 else:
                     self.logger.debug('\tSkipping sensor {} (unknown kind "{}")'.format(ts.name, ts.kind))
             except ValueError:
diff --git a/devlib/platform/arm.py b/devlib/platform/arm.py
index e760eaf..76b58a4 100644
--- a/devlib/platform/arm.py
+++ b/devlib/platform/arm.py
@@ -210,22 +210,22 @@
     mode = CONTINUOUS | INSTANTANEOUS
 
     _channels = [
-        InstrumentChannel('sys_curr', 'sys', 'current'),
-        InstrumentChannel('a57_curr', 'a57', 'current'),
-        InstrumentChannel('a53_curr', 'a53', 'current'),
-        InstrumentChannel('gpu_curr', 'gpu', 'current'),
-        InstrumentChannel('sys_volt', 'sys', 'voltage'),
-        InstrumentChannel('a57_volt', 'a57', 'voltage'),
-        InstrumentChannel('a53_volt', 'a53', 'voltage'),
-        InstrumentChannel('gpu_volt', 'gpu', 'voltage'),
-        InstrumentChannel('sys_pow', 'sys', 'power'),
-        InstrumentChannel('a57_pow', 'a57', 'power'),
-        InstrumentChannel('a53_pow', 'a53', 'power'),
-        InstrumentChannel('gpu_pow', 'gpu', 'power'),
-        InstrumentChannel('sys_cenr', 'sys', 'energy'),
-        InstrumentChannel('a57_cenr', 'a57', 'energy'),
-        InstrumentChannel('a53_cenr', 'a53', 'energy'),
-        InstrumentChannel('gpu_cenr', 'gpu', 'energy'),
+        InstrumentChannel('sys', 'current'),
+        InstrumentChannel('a57', 'current'),
+        InstrumentChannel('a53', 'current'),
+        InstrumentChannel('gpu', 'current'),
+        InstrumentChannel('sys', 'voltage'),
+        InstrumentChannel('a57', 'voltage'),
+        InstrumentChannel('a53', 'voltage'),
+        InstrumentChannel('gpu', 'voltage'),
+        InstrumentChannel('sys', 'power'),
+        InstrumentChannel('a57', 'power'),
+        InstrumentChannel('a53', 'power'),
+        InstrumentChannel('gpu', 'power'),
+        InstrumentChannel('sys', 'energy'),
+        InstrumentChannel('a57', 'energy'),
+        InstrumentChannel('a53', 'energy'),
+        InstrumentChannel('gpu', 'energy'),
     ]
 
     def __init__(self, target):
diff --git a/devlib/target.py b/devlib/target.py
index 51826fb..4b2da42 100644
--- a/devlib/target.py
+++ b/devlib/target.py
@@ -1011,11 +1011,12 @@
             self.uninstall_executable(name)
 
     def get_pids_of(self, process_name):
-        result = self.execute('ps {}'.format(process_name[-15:]), check_exit_code=False).strip()
-        if result and 'not found' not in result:
-            return [int(x.split()[1]) for x in result.split('\n')[1:]]
-        else:
-            return []
+        result = []
+        search_term = process_name[-15:]
+        for entry in self.ps():
+            if search_term in entry.name:
+                result.append(entry.pid)
+        return result
 
     def ps(self, **kwargs):
         lines = iter(convert_new_lines(self.execute('ps')).split('\n'))
@@ -1023,8 +1024,12 @@
         result = []
         for line in lines:
             parts = line.split(None, 8)
-            if parts:
-                result.append(PsEntry(*(parts[0:1] + map(int, parts[1:5]) + parts[5:])))
+            if not parts:
+                continue
+            if len(parts) == 8:
+                # wchan was blank; insert an empty field where it should be.
+                parts.insert(5, '')
+            result.append(PsEntry(*(parts[0:1] + map(int, parts[1:5]) + parts[5:])))
         if not kwargs:
             return result
         else:
diff --git a/devlib/utils/android.py b/devlib/utils/android.py
index 170fda0..ce1ab0b 100644
--- a/devlib/utils/android.py
+++ b/devlib/utils/android.py
@@ -286,6 +286,10 @@
     """
     # TODO this is a hacky way to issue a adb command to all listed devices
 
+    # Ensure server is started so the 'daemon started successfully' message
+    # doesn't confuse the parsing below
+    adb_command(None, 'start-server', adb_server=adb_server)
+
     # The output of calling adb devices consists of a heading line then
     # a list of the devices sperated by new line
     # The last line is a blank new line. in otherwords, if there is a device found
@@ -533,6 +537,16 @@
         aapt = _env.aapt
 
 class LogcatMonitor(threading.Thread):
+    """
+    Helper class for monitoring Anroid's logcat
+
+    :param target: Android target to monitor
+    :type target: :class:`AndroidTarget`
+
+                    device. Logcat entries that don't match any will not be
+                    seen. If omitted, all entries will be sent to host.
+    :type regexps: list(str)
+    """
 
     FLUSH_SIZE = 1000
 
@@ -545,6 +559,7 @@
 
         self.target = target
 
+        self._started = threading.Event()
         self._stopped = threading.Event()
         self._match_found = threading.Event()
 
@@ -556,6 +571,12 @@
         self._regexps = regexps
 
     def start(self, outfile=None):
+        """
+        Start logcat and begin monitoring
+
+        :param outfile: Optional path to file to store all logcat entries
+        :type outfile: str
+        """
         if outfile:
             self._logfile = outfile
         else:
@@ -580,12 +601,21 @@
         logger.debug('logcat command ="{}"'.format(logcat_cmd))
         self._logcat = self.target.background(logcat_cmd)
 
+        self._started.set()
+
         while not self._stopped.is_set():
             line = self._logcat.stdout.readline(1024)
             if line:
                 self._add_line(line)
 
     def stop(self):
+        if not self.is_alive():
+            logger.warning('LogcatMonitor.stop called before start')
+            return
+
+        # Make sure we've started before we try to kill anything
+        self._started.wait()
+
         # Kill the underlying logcat process
         # This will unblock self._logcat.stdout.readline()
         host.kill_children(self._logcat.pid)
@@ -616,7 +646,7 @@
         with self._datalock:
             while not self._lines.empty():
                 self._lines.get()
-                
+
             with open(self._logfile, 'w') as fh:
                 pass
 
@@ -653,6 +683,15 @@
         """
         Search a line that matches a regexp in the logcat log
         Wait for it to appear if it's not found
+
+        :param regexp: regexp to search
+        :type regexp: str
+
+        :param timeout: Timeout in seconds, before rasing RuntimeError.
+                        ``None`` means wait indefinitely
+        :type timeout: number
+
+        :returns: List of matched strings
         """
         res = self.search(regexp)
 
diff --git a/devlib/utils/misc.py b/devlib/utils/misc.py
index f601686..8cfd59f 100644
--- a/devlib/utils/misc.py
+++ b/devlib/utils/misc.py
@@ -103,6 +103,9 @@
         0x211: {0x1: 'KryoGold'},
         0x800: {None: 'Falkor'},
     },
+    0x53: {  # Samsung LSI
+        0x001: {0x1: 'MongooseM1'},
+    },
     0x56: {  # Marvell
         0x131: {
             0x2: 'Feroceon 88F6281',
diff --git a/devlib/utils/rendering.py b/devlib/utils/rendering.py
index 3b7b6c4..6c3909d 100644
--- a/devlib/utils/rendering.py
+++ b/devlib/utils/rendering.py
@@ -18,6 +18,8 @@
 SurfaceFlingerFrame = namedtuple('SurfaceFlingerFrame',
                                  'desired_present_time actual_present_time frame_ready_time')
 
+VSYNC_INTERVAL = 16666667
+
 
 class FrameCollector(threading.Thread):
 
@@ -83,9 +85,14 @@
             header = self.header
             frames = self.frames
         else:
-            header = [c for c in self.header if c in columns]
-            indexes = [self.header.index(c) for c in header]
+            indexes = []
+            for c in columns:
+                if c not in self.header:
+                    msg = 'Invalid column "{}"; must be in {}'
+                    raise ValueError(msg.format(c, self.header))
+                indexes.append(self.header.index(c))
             frames = [[f[i] for i in indexes] for f in self.frames]
+            header = columns
         with open(outfile, 'w') as wfh:
             writer = csv.writer(wfh)
             if header:
@@ -122,7 +129,8 @@
         return self.target.execute(cmd.format(activity))
 
     def list(self):
-        return self.target.execute('dumpsys SurfaceFlinger --list').split('\r\n')
+        text = self.target.execute('dumpsys SurfaceFlinger --list')
+        return text.replace('\r\n', '\n').replace('\r', '\n').split('\n')
 
     def _process_raw_file(self, fh):
         text = fh.read().replace('\r\n', '\n').replace('\r', '\n')
@@ -187,6 +195,7 @@
     def _process_raw_file(self, fh):
         found = False
         try:
+            last_vsync = 0
             while True:
                 for line in fh:
                     if line.startswith('---PROFILEDATA---'):
@@ -197,9 +206,53 @@
                 for line in fh:
                     if line.startswith('---PROFILEDATA---'):
                         break
-                    self.frames.append(map(int, line.strip().split(',')[:-1]))  # has a trailing ','
+                    entries = map(int, line.strip().split(',')[:-1])  # has a trailing ','
+                    if entries[1] <= last_vsync:
+                        continue  # repeat frame
+                    last_vsync = entries[1]
+                    self.frames.append(entries)
         except StopIteration:
             pass
         if not found:
             logger.warning('Could not find frames data in gfxinfo output')
             return
+
+
+def _file_reverse_iter(fh, buf_size=1024):
+    fh.seek(0, os.SEEK_END)
+    offset = 0
+    file_size = remaining_size = fh.tell()
+    while remaining_size > 0:
+        offset = min(file_size, offset + buf_size)
+        fh.seek(file_size - offset)
+        buf = fh.read(min(remaining_size, buf_size))
+        remaining_size -= buf_size
+        yield buf
+
+
+def gfxinfo_get_last_dump(filepath):
+    """
+    Return the last gfxinfo dump from the frame collector's raw output.
+
+    """
+    record = ''
+    with open(filepath, 'r') as fh:
+        fh_iter = _file_reverse_iter(fh)
+        try:
+            while True:
+                buf = fh_iter.next()
+                ix = buf.find('** Graphics')
+                if ix >= 0:
+                    return buf[ix:] + record
+
+                ix = buf.find(' **\n')
+                if ix >= 0:
+                    buf =  fh_iter.next() + buf
+                    ix = buf.find('** Graphics')
+                    if ix < 0:
+                        msg = '"{}" appears to be corrupted'
+                        raise RuntimeError(msg.format(filepath))
+                    return buf[ix:] + record
+                record = buf + record
+        except StopIteration:
+            pass
diff --git a/doc/derived_measurements.rst b/doc/derived_measurements.rst
index fcd497c..d56f94b 100644
--- a/doc/derived_measurements.rst
+++ b/doc/derived_measurements.rst
@@ -9,7 +9,7 @@
 -------
 
 The following example shows how to use an implementation of a
-:class:`DerivedMeasurement` to obtain a list of calculated ``Measurements``.
+:class:`DerivedMeasurement` to obtain a list of calculated ``DerivedMetric``'s.
 
 .. code-block:: ipython
 
@@ -35,35 +35,187 @@
 Derived Measurements
 ~~~~~~~~~~~~~~~~~~~~
 
-.. class:: DerivedMeasurements()
+.. class:: DerivedMeasurements
 
-   The ``DerivedMeasurements`` class is an abstract base for implementing
-   additional classes to calculate various metrics.
+   The ``DerivedMeasurements`` class provides an API for post-processing
+   instrument output offline (i.e. without a connection to the target device) to
+   generate additional metrics.
 
 .. method:: DerivedMeasurements.process(measurement_csv)
 
-   Returns a list of :class:`Measurement` objects that have been calculated.
+   Process a :class:`MeasurementsCsv`, returning  a list of
+   :class:`DerivedMetric` and/or :class:`MeasurementsCsv` objects that have been
+   derived from the input. The exact nature and ordering of the list memebers
+   is specific to indivial 'class'`DerivedMeasurements` implementations.
 
+.. method:: DerivedMeasurements.process_raw(\*args)
+
+   Process raw output from an instrument, returnin a list :class:`DerivedMetric`
+   and/or :class:`MeasurementsCsv` objects that have been derived from the
+   input. The exact nature and ordering of the list memebers is specific to
+   indivial 'class'`DerivedMeasurements` implewmentations.
+
+   The arguents to this method should be paths to raw output files generated by
+   an instrument. The number and order of expected arguments is specific to
+   particular implmentations.
+
+
+Derived Metric
+~~~~~~~~~~~~~~
+
+.. class:: DerivedMetric
+
+  Represents a metric derived from previously collected ``Measurement``s.
+  Unlike, a ``Measurement``, this was not measured directly from the target.
+
+
+.. attribute:: DerivedMetric.name
+
+   The name of the derived metric. This uniquely defines a metric -- two
+   ``DerivedMetric`` objects with the same ``name`` represent to instances of
+   the same metric (e.g. computed from two different inputs).
+
+.. attribute:: DerivedMetric.value
+
+   The ``numeric`` value of the metric that has been computed for a particular
+   input.
+
+.. attribute:: DerivedMetric.measurement_type
+
+   The ``MeasurementType`` of the metric. This indicates which conceptual
+   category the metric falls into, its units, and conversions to other
+   measurement types.
+
+.. attribute:: DerivedMetric.units
+
+   The units in which the metric's value is expressed.
 
 
 Available Derived Measurements
 -------------------------------
-.. class:: DerivedEnergyMeasurements()
 
-  The ``DerivedEnergyMeasurements`` class is used to calculate average power and
-  cumulative energy for each site if the required data is present.
+.. note:: If a method of the API is not documented for a particular
+          implementation, that means that it s not overriden by that
+          implementation. It is still safe to call it -- an empty list will be
+          returned.
 
-  The calculation of cumulative energy can occur in 3 ways. If a
-  ``site`` contains ``energy`` results, the first and last measurements are extracted
-  and the delta calculated. If not, a ``timestamp`` channel will be used to calculate
-  the energy from the power channel, failing back to using the sample rate attribute
-  of the :class:`MeasurementCsv` file if timestamps are not available. If neither
-  timestamps or a sample rate are available then an error will be raised.
+Energy
+~~~~~~
+
+.. class:: DerivedEnergyMeasurements
+
+   The ``DerivedEnergyMeasurements`` class is used to calculate average power and
+   cumulative energy for each site if the required data is present.
+
+   The calculation of cumulative energy can occur in 3 ways. If a
+   ``site`` contains ``energy`` results, the first and last measurements are extracted
+   and the delta calculated. If not, a ``timestamp`` channel will be used to calculate
+   the energy from the power channel, failing back to using the sample rate attribute
+   of the :class:`MeasurementCsv` file if timestamps are not available. If neither
+   timestamps or a sample rate are available then an error will be raised.
 
 
 .. method:: DerivedEnergyMeasurements.process(measurement_csv)
 
-  Returns a list of :class:`Measurement` objects that have been calculated for
-  the average power and cumulative energy for each site.
+   This will return total cumulative energy for each energy channel, and the
+   average power for each power channel in the input CSV. The output will contain
+   all energy metrics followed by power metrics. The ordering of both will match
+   the ordering of channels in the input. The metrics will by named based on the
+   sites of the coresponding channels according to the following patters:
+   ``"<site>_total_energy"`` and ``"<site>_average_power"``.
 
 
+FPS / Rendering
+~~~~~~~~~~~~~~~
+
+.. class:: DerivedGfxInfoStats(drop_threshold=5, suffix='-fps', filename=None, outdir=None)
+
+   Produces FPS (frames-per-second) and other dervied statistics from
+   :class:`GfxInfoFramesInstrument` output. This takes several optional
+   parameters in creation:
+
+   :param drop_threshold: FPS in an application, such as a game, which this
+                          processor is primarily targeted at, cannot reasonably
+                          drop to a very low value. This is specified to this
+                          threhold. If an FPS for a frame is computed to be
+                          lower than this treshold, it will be dropped on the
+                          assumption that frame rednering was suspended by the
+                          system (e.g. when idling), or there was some sort of
+                          error, and therefore this should be used in
+                          performance calculations. defaults to ``5``.
+   :param  suffix: The name of the gerated per-frame FPS csv file will be
+                   derived from the input frames csv file by appending this
+                   suffix. This cannot be specified at the same time as
+                   a ``filename``.
+   :param filename: As an alternative to the suffix, a complete file name for
+                    FPS csv can be specified. This cannot be used at the same
+                    time as the ``suffix``.
+   :param outdir: By default, the FPS csv file will be placed in the same
+                  directory as the input frames csv file. This can be changed
+                  by specifying an alternate directory here
+
+   .. warning:: Specifying both ``filename`` and ``oudir`` will mean that exactly
+                the same file will be used for FPS output on each invocation of
+                ``process()`` (even for different inputs) resulting in previous
+                results being overwritten.
+
+.. method:: DerivedGfxInfoStats.process(measurement_csv)
+
+   Process the fames csv generated by :class:`GfxInfoFramesInstrument` and
+   returns a list containing exactly three entries: :class:`DerivedMetric`\ s
+   ``fps`` and ``total_frames``, followed by a :class:`MeasurentCsv` containing
+   per-frame FPSs values.
+
+.. method:: DerivedGfxInfoStats.process_raw(gfxinfo_frame_raw_file)
+
+   As input, this takes a single argument, which should be the path to the raw
+   output file of  :class:`GfxInfoFramesInstrument`. The returns stats
+   accumulated by gfxinfo. At the time of wrinting, the stats (in order) are:
+   ``janks``, ``janks_pc`` (percentage of all frames),
+   ``render_time_50th_ptile`` (50th percentile, or median, for time to render a
+   frame), ``render_time_90th_ptile``, ``render_time_95th_ptile``,
+   ``render_time_99th_ptile``, ``missed_vsync``, ``hight_input_latency``,
+   ``slow_ui_thread``, ``slow_bitmap_uploads``, ``slow_issue_draw_commands``.
+   Please see the `gfxinfo documentation`_ for details.
+
+.. _gfxinfo documentation: https://developer.android.com/training/testing/performance.html
+
+
+.. class:: DerivedSurfaceFlingerStats(drop_threshold=5, suffix='-fps', filename=None, outdir=None)
+
+   Produces FPS (frames-per-second) and other dervied statistics from
+   :class:`SurfaceFlingerFramesInstrument` output. This takes several optional
+   parameters in creation:
+
+   :param drop_threshold: FPS in an application, such as a game, which this
+                          processor is primarily targeted at, cannot reasonably
+                          drop to a very low value. This is specified to this
+                          threhold. If an FPS for a frame is computed to be
+                          lower than this treshold, it will be dropped on the
+                          assumption that frame rednering was suspended by the
+                          system (e.g. when idling), or there was some sort of
+                          error, and therefore this should be used in
+                          performance calculations. defaults to ``5``.
+   :param  suffix: The name of the gerated per-frame FPS csv file will be
+                   derived from the input frames csv file by appending this
+                   suffix. This cannot be specified at the same time as
+                   a ``filename``.
+   :param filename: As an alternative to the suffix, a complete file name for
+                    FPS csv can be specified. This cannot be used at the same
+                    time as the ``suffix``.
+   :param outdir: By default, the FPS csv file will be placed in the same
+                  directory as the input frames csv file. This can be changed
+                  by specifying an alternate directory here
+
+   .. warning:: Specifying both ``filename`` and ``oudir`` will mean that exactly
+                the same file will be used for FPS output on each invocation of
+                ``process()`` (even for different inputs) resulting in previous
+                results being overwritten.
+
+.. method:: DerivedSurfaceFlingerStats.process(measurement_csv)
+
+   Process the fames csv generated by :class:`SurfaceFlingerFramesInstrument` and
+   returns a list containing exactly three entries: :class:`DerivedMetric`\ s
+   ``fps`` and ``total_frames``, followed by a :class:`MeasurentCsv` containing
+   per-frame FPSs values, followed by ``janks`` ``janks_pc``, and
+   ``missed_vsync`` metrics.
diff --git a/doc/instrumentation.rst b/doc/instrumentation.rst
index 8aee1ce..0d4a6ce 100644
--- a/doc/instrumentation.rst
+++ b/doc/instrumentation.rst
@@ -65,8 +65,8 @@
    :INSTANTANEOUS: The instrument supports taking a single sample via
                    ``take_measurement()``.
    :CONTINUOUS: The instrument supports collecting measurements over a
-                period of time via ``start()``, ``stop()``, and
-                ``get_data()`` methods.
+                period of time via ``start()``, ``stop()``, ``get_data()``,
+		and (optionally) ``get_raw`` methods.
 
    .. note:: It's possible for one instrument to support more than a single
              mode.
@@ -161,6 +161,13 @@
    .. note:: This method is only implemented by :class:`Instrument`\ s that
              support ``CONTINUOUS`` measurement.
 
+.. method:: Instrument.get_raw()
+
+   Returns a list of paths to files containing raw output from the underlying
+   source(s) that is used to produce the data CSV. If now raw output is
+   generated or saved, an empty list will be returned. The format of the
+   contents of the raw files is entirely source-dependent.
+
 .. attribute:: Instrument.sample_rate_hz
 
    Sample rate of the instrument in Hz. Assumed to be the same for all channels.
@@ -229,13 +236,15 @@
 +-------------+-------------+---------------+
 | name        | units       | category      |
 +=============+=============+===============+
-| time        | seconds     |               |
+| count       | count       |               |
 +-------------+-------------+---------------+
-| time        | microseconds|               |
+| percent     | percent     |               |
 +-------------+-------------+---------------+
-| time        | milliseconds|               |
+| time_us     | microseconds|  time         |
 +-------------+-------------+---------------+
-| temperature | degrees     |               |
+| time_ms     | milliseconds|  time         |
++-------------+-------------+---------------+
+| temperature | degrees     |  thermal      |
 +-------------+-------------+---------------+
 | power       | watts       | power/energy  |
 +-------------+-------------+---------------+