tauto - remove more test libs now that cros_host is gone

BUG=None
TEST=dummy_Pass, unittest_suite

Change-Id: I693a03c87568a5c31159807eee8ebaf149bbc6fe
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/tauto/+/3125466
Tested-by: Derek Beckett <dbeckett@chromium.org>
Reviewed-by: C Shapiro <shapiroc@chromium.org>
diff --git a/server/cros/camerabox_utils.py b/server/cros/camerabox_utils.py
deleted file mode 100644
index 95204d9..0000000
--- a/server/cros/camerabox_utils.py
+++ /dev/null
@@ -1,174 +0,0 @@
-# Lint as: python2, python3
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import contextlib
-import json
-import logging
-import os
-import time
-
-from autotest_lib.client.common_lib import error, utils
-
-
-class ChartFixture:
-    """Sets up chart tablet to display placeholder scene image."""
-    DISPLAY_SCRIPT = '/usr/local/autotest/bin/display_chart.py'
-    OUTPUT_LOG = '/tmp/chart_service.log'
-
-    def __init__(self, chart_host, scene_uri):
-        self.host = chart_host
-        self.scene_uri = scene_uri
-        self.display_pid = None
-        self.host.run(['rm', '-f', self.OUTPUT_LOG])
-
-    def initialize(self):
-        """Prepare scene file and display it on chart host."""
-        logging.info('Prepare scene file')
-        tmpdir = self.host.get_tmp_dir()
-        scene_path = os.path.join(
-                tmpdir, self.scene_uri[self.scene_uri.rfind('/') + 1:])
-        self.host.run('wget', args=('-O', scene_path, self.scene_uri))
-
-        logging.info('Display scene file')
-        self.display_pid = self.host.run_background(
-                'python2 {script} {scene} >{log} 2>&1'.format(
-                        script=self.DISPLAY_SCRIPT,
-                        scene=scene_path,
-                        log=self.OUTPUT_LOG))
-
-        logging.info(
-                'Poll for "is ready" message for ensuring chart is ready.')
-        timeout = 60
-        poll_time_step = 0.1
-        while timeout > 0:
-            if self.host.run(
-                    'grep',
-                    args=('-q', 'Chart is ready.', self.OUTPUT_LOG),
-                    ignore_status=True).exit_status == 0:
-                break
-            time.sleep(poll_time_step)
-            timeout -= poll_time_step
-        else:
-            raise error.TestError('Timeout waiting for chart ready')
-
-    def cleanup(self):
-        """Cleanup display script."""
-        if self.display_pid is not None:
-            self.host.run(
-                    'kill',
-                    args=('-2', str(self.display_pid)),
-                    ignore_status=True)
-            self.host.get_file(self.OUTPUT_LOG, '.')
-
-
-def get_chart_address(host_address, args):
-    """Get address of chart tablet from commandline args or mapping logic in
-    test lab.
-
-    @param host_address: a list of hostname strings.
-    @param args: a dict parse from commandline args.
-    @return:
-        A list of strings for chart tablet addresses.
-    """
-    address = utils.args_to_dict(args).get('chart')
-    if address is not None:
-        return address.split(',')
-    elif utils.is_in_container():
-        return [utils.get_lab_chart_address(host) for host in host_address]
-    else:
-        return None
-
-
-class DUTFixture:
-    """Sets up camera filter for target camera facing on DUT."""
-    TEST_CONFIG_PATH = '/var/cache/camera/test_config.json'
-    CAMERA_SCENE_LOG = '/tmp/scene.jpg'
-
-    def __init__(self, test, host, facing):
-        self.test = test
-        self.host = host
-        self.facing = facing
-
-    @contextlib.contextmanager
-    def _set_selinux_permissive(self):
-        selinux_mode = self.host.run_output('getenforce')
-        self.host.run('setenforce 0')
-        yield
-        self.host.run('setenforce', args=(selinux_mode, ))
-
-    def _write_file(self, filepath, content, permission=None, owner=None):
-        """Write content to filepath on remote host.
-        @param permission: set permission to 0xxx octal number of remote file.
-        @param owner: set owner of remote file.
-        """
-        tmp_path = os.path.join(self.test.tmpdir, os.path.basename(filepath))
-        with open(tmp_path, 'w') as f:
-            f.write(content)
-        if permission is not None:
-            os.chmod(tmp_path, permission)
-        self.host.send_file(tmp_path, filepath, delete_dest=True)
-        if owner is not None:
-            self.host.run('chown', args=(owner, filepath))
-
-    def initialize(self):
-        """Filter out camera other than target facing on DUT."""
-        self._write_file(
-                self.TEST_CONFIG_PATH,
-                json.dumps({
-                        'enable_back_camera': self.facing == 'back',
-                        'enable_front_camera': self.facing == 'front',
-                        'enable_external_camera': False
-                }),
-                owner='arc-camera')
-
-        # cros_camera_service will reference the test config to filter out
-        # undesired cameras.
-        logging.info('Restart camera service with filter option')
-        self.host.upstart_restart('cros-camera')
-
-        # arc_setup will reference the test config to filter out the media
-        # profile of undesired cameras.
-        logging.info('Restart ARC++ container with camera test config')
-        self.host.run('restart ui')
-
-    @contextlib.contextmanager
-    def _stop_camera_service(self):
-        # Ensure camera service is running or the
-        # upstart_stop()/upstart_restart() may failed due to in
-        # "start|post-stop" sleep for respawning state. See b/183904344 for
-        # detail.
-        logging.info('Wait for presence of camera service')
-        self.host.wait_for_service('cros-camera')
-
-        self.host.upstart_stop('cros-camera')
-        yield
-        self.host.upstart_restart('cros-camera')
-
-    def log_camera_scene(self):
-        """Capture an image from camera as the log for debugging scene related
-        problem."""
-
-        gtest_filter = (
-                'Camera3StillCaptureTest/'
-                'Camera3DumpSimpleStillCaptureTest.DumpCaptureResult/0')
-        with self._stop_camera_service():
-            self.host.run(
-                    'sudo',
-                    args=('--user=arc-camera', 'cros_camera_test',
-                          '--gtest_filter=' + gtest_filter,
-                          '--camera_facing=' + self.facing,
-                          '--dump_still_capture_path=' +
-                          self.CAMERA_SCENE_LOG))
-
-        self.host.get_file(self.CAMERA_SCENE_LOG, '.')
-
-    def cleanup(self):
-        """Cleanup camera filter."""
-        logging.info('Remove filter option and restore camera service')
-        with self._stop_camera_service():
-            self.host.run('rm', args=('-f', self.TEST_CONFIG_PATH))
-
-        logging.info('Restore camera profile in ARC++ container')
-        self.host.run('restart ui')
diff --git a/server/cros/chaos_ap_list.conf b/server/cros/chaos_ap_list.conf
deleted file mode 100644
index 1361c36..0000000
--- a/server/cros/chaos_ap_list.conf
+++ /dev/null
@@ -1,3292 +0,0 @@
-# Copyright (c) 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Note - APs commented out in this config are either not configured correctly
-# or tested and verified to work as expected.
-
-# TODO (harpreet): Test / verify and uncomment or remove all the commented AP
-# entries below.
-
-# Row 2 Rack 1
-
-[40:b0:76:c0:34:00]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host1
-ssid = asus_gtax11000_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A1
-bss = 40:b0:76:c0:34:00
-wan mac = 40:b0:76:c0:34:00
-model = gt-ax11000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[40:b0:76:c0:34:04]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host1
-ssid = asus_gtax11000_n_ac_ax_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A1
-bss = 40:b0:76:c0:34:04
-wan mac = 40:b0:76:c0:34:00
-model = gt-ax11000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[40:b0:76:c0:34:08]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host1
-ssid = asus_gtax11000_nac_ax_ch128_wpa2
-frequency = 5640
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A1
-bss = 40:b0:76:c0:34:08
-wan mac = 40:b0:76:c0:34:00
-model = gt-ax11000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### [20:c9:d0:19:1d:df]
-### brand = apple
-### wan_hostname = chromeos3-row2-rack1-host2
-### ssid = apple_airport_extreme_n_1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### bss = 20:c9:d0:19:1d:df
-### wan mac = 20:c9:d0:10:50:63
-### model = airport_extreme
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[14:59:c0:a8:fc:47]
-brand = netgear
-wan_hostname = chromeos3-row2-rack1-host3
-ssid = netgear_rax80_n_ax_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A3
-bss = 14:59:c0:a8:fc:47
-wan mac = 14:59:c0:a8:fc:48
-model = rax80
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### b/153560792
-### [14:59:c0:a8:fc:49]
-### brand = netgear
-### wan_hostname = chromeos3-row2-rack1-host3
-### ssid = netgear_rax80_ac_ax_ch44_wpa2
-### frequency = 5220
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack1-rpm1
-### rpm_outlet = .A3
-### bss = 14:59:c0:a8:fc:49
-### wan mac = 14:59:c0:a8:fc:48
-### model = rax80
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [d0:17:c2:3d:16:50]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack1-host4
-### ssid = asus_ac5300_n_ch6_wpa2
-### frequency = 2437
-### rpm_managed = True
-### bss = d0:17:c2:3d:16:50
-### wan mac = d0:17:c2:3d:16:50
-### model = rtac5300
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [d0:17:c2:3d:16:54]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack1-host4
-### ssid = asus_ac5300_ac_ch40_wpa2
-### frequency = 5200
-### rpm_managed = True
-### bss5 = d0:17:c2:3d:16:54
-### wan mac = d0:17:c2:3d:16:50
-### model = rtac5300
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [d0:17:c2:3d:16:58]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack1-host4
-### ssid = asus_ac5300_ac_ch157_wpa2
-### frequency = 5785
-### rpm_managed = True
-### bss5 = d0:17:c2:3d:16:58
-### wan mac = d0:17:c2:3d:16:50
-### model = rtac5300
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [50:46:5d:00:bc:b0]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack1-host5
-### ssid = asus_n66u_n_1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### bss = 50:46:5d:00:bc:b0
-### wan mac = 50:46:5d:00:bc:b0
-### model = n66u
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [50:46:5d:00:bc:b4]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack1-host5
-### ssid = asus_n66u_n_48_wpa2
-### frequency = 5240
-### rpm_managed = True
-### bss = 50:46:5d:00:bc:b4
-### wan mac = 50:46:5d:00:bc:b0
-### model = n66u
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[50:46:5D:5C:D7:28]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host6
-ssid = asus_ac66r_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A6
-bss = 50:46:5D:5C:D7:28
-wan mac = c0:d8:19:d0:90:40
-model = ac66r
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[50:46:5D:5C:D7:2C]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host6
-ssid = asus_ac66r_ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A6
-bss5 = 50:46:5D:5C:D7:2C
-wan mac = c0:d8:19:d0:90:40
-model = ac66r
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[10:bf:48:e7:2e:88]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host7
-ssid = asus_rtn16_n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A7
-bss = 10:bf:48:e7:2e:88
-wan mac = 10:bf:48:e7:2e:88
-model = rt_n16
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:22:75:a2:3b:08]
-brand = belkin
-wan_hostname = chromeos3-row2-rack1-host8
-ssid = belkin_f5d8235_n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A8
-bss = 00:22:75:a2:3b:08
-wan mac = 00:22:75:a2:3b:0a
-model = f5d8235
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[0c:9d:92:02:40:f1]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host9
-ssid = asus_ax88u_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A9
-bss = 0c:9d:92:02:40:f1
-wan mac = 0c:9d:92:02:40:f0
-model = ax88u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[0c:9d:92:02:40:f4]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host9
-ssid = asus_ax88u_ac_ax_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A9
-bss = 0c:9d:92:02:40:f4
-wan mac = 0c:9d:92:02:40:f0
-model = ax88u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-
-[00:11:32:a5:90:c2]
-brand = synology
-wan_hostname = chromeos3-row2-rack1-host10
-ssid = synology_mr2200__n_ch6_wpa2_3
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A10
-bss = 00:11:32:a5:90:c2
-wan mac = 00:11:32:a5:90:c0
-model = mr2200
-security = wpa2_3
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:11:32:a5:90:c3]
-brand = synology
-wan_hostname = chromeos3-row2-rack1-host10
-ssid = synology_mr2200__n_ac_ch153_wpa2_3
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A10
-bss = 00:11:32:a5:90:c3
-wan mac = 00:11:32:a5:90:c0
-model = mr2200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:11:32:a5:90:c4]
-brand = synology
-wan_hostname = chromeos3-row2-rack1-host10
-ssid = synology_mr2200__ac_ch40_wpa2_3
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A10
-bss = 00:11:32:a5:90:c4
-wan mac = 00:11:32:a5:90:c0
-model = mr2200
-security = wpa2_3
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[94:44:52:18:f0:a7]
-brand = belkin
-wan_hostname = chromeos3-row2-rack1-host11
-ssid = belkin_f7d5301_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A11
-bss = 94:44:52:18:f0:a7
-wan mac = 94:44:52:18:f0:a8
-model = f7d5301_v1_1
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[08:86:3b:b3:50:c4]
-brand = belkin
-wan_hostname = chromeos3-row2-rack1-host12
-ssid = belkin_f9k1103v1_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A12
-bss = 08:86:3b:b3:50:c4
-wan mac = 08:86:3b:b3:50:c5
-model = f9k1103_v1
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[08:86:3b:b3:50:c6]
-brand = belkin
-wan_hostname = chromeos3-row2-rack1-host12
-ssid = belkin_f9k1103v1_n_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A12
-bss5 = 08:86:3b:b3:50:c6
-wan mac = 08:86:3b:b3:50:c5
-model = f9k1103_v1
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[10:6f:3f:f6:89:10]
-brand = buffalo
-wan_hostname = chromeos3-row2-rack1-host13
-ssid = buffalo_wzr-hp-g450h_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A13
-bss = 10:6f:3f:f6:89:10
-wan mac = 12:6f:32:f6:89:02
-model = wzr-hp-g450h
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### [c8:d7:19:06:1b:8f]
-### brand = cisco
-### wan_hostname = chromeos3-row2-rack1-host14
-### ssid = cisco_linksys_ea3500_n_1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### bss = c8:d7:19:06:1b:8f
-### wan mac = a8:b7:19:d1:92:12
-### model = ea3500
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [c8:d7:19:06:1b:91]
-### brand = cisco
-### wan_hostname = chromeos3-row2-rack1-host14
-### ssid = cisco_linksys_ea3500_n_153_wpa2
-### frequency = 5765
-### rpm_managed = True
-### bss5 = c8:d7:19:06:1b:91
-### wan mac = a8:b7:19:d1:92:12
-### model = ea3500
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[c0:c1:c0:8b:59:8b]
-brand = cisco
-wan_hostname = chromeos3-row2-rack1-host15
-ssid = cisco_e1000_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A15
-bss = c0:c1:c0:8b:59:8b
-wan mac = c0:c1:c0:8b:59:8a
-model = e1000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[68:7f:74:0a:97:bc]
-brand = cisco
-wan_hostname = chromeos3-row2-rack1-host16
-ssid = cisco_wrt610n_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A16
-bss = 68:7f:74:0a:97:bc
-wan mac = 68:7f:74:0a:97:bb
-model = linksys_wrt610n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[68:7f:74:0a:97:bd]
-brand = cisco
-wan_hostname = chromeos3-row2-rack1-host16
-ssid = cisco_wrt610n_n_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack1-rpm1
-rpm_outlet = .A16
-bss5 = 68:7f:74:0a:97:bd
-wan mac = 68:7f:74:0a:97:bb
-model = linksys_wrt610n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[14:dd:a9:9a:97:08]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host17
-ssid = asus_rtac56u__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A21
-bss = 14:dd:a9:9a:97:08
-wan mac = 10:2d:3a:04:b5:11
-model = rtac56u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[14:dd:a9:9a:97:0c]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host17
-ssid = asus_rtac56u__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A21
-bss = 14:dd:a9:9a:97:0c
-wan mac = 10:2d:3a:04:b5:11
-model = rtac56u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[30:5a:3a:6e:88:40]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host18
-ssid = asus_ac2400__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A22
-bss = 30:5a:3a:6e:88:40
-wan mac = 30:5a:3a:6e:88:44
-model = ac2400
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[30:5a:3a:6e:88:44]
-brand = asus
-wan_hostname = chromeos3-row2-rack1-host18
-ssid = asus_ac2400__ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A22
-bss = 30:5a:3a:6e:88:44
-wan mac = 30:5a:3a:6e:88:44
-model = ac2400
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[ec:1a:59:97:a8:30]
-brand = belkin
-wan_hostname = chromeos3-row2-rack1-host19
-ssid = belkin_n600__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A23
-bss = ec:1a:59:97:a8:30
-wan mac = ec:1a:59:97:a8:31
-model = n600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[ec:1a:59:97:a8:30]
-brand = belkin
-wan_hostname = chromeos3-row2-rack1-host19
-ssid = belkin_n600__n_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A23
-bss = ec:1a:59:97:a8:30
-wan mac = ec:1a:59:97:a8:31
-model = n600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:24:a5:b5:a3:17]
-brand = buffalo
-wan_hostname = chromeos3-row2-rack1-host20
-ssid = buffalo_whrhpg300n__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A24
-bss = 00:24:a5:b5:a3:17
-wan mac = 00:24:a5:b5:a3:17
-model = whrhpg300n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-# Row 2 Rack 2
-
-[68:7f:74:31:6a:ae]
-brand = cisco
-wan_hostname = chromeos3-row2-rack2-host1
-ssid = cisco_valet_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A1
-bss = 68:7f:74:31:6a:ae
-wan mac = 68:7f:74:31:6a:ad
-model = linksys_m10
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[84:c9:b2:50:9a:c7]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host2
-ssid = dlink_dir_655_n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A2
-bss = 84:c9:b2:50:9a:c7
-wan mac = 84:c9:b2:50:9a:c8
-model = dir_655
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[48:ee:0c:ec:81:ce]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host3
-ssid = dlink_dir866l_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A3
-bss = 48:ee:0c:ec:81:ce
-wan mac = 48:ee:0c:ec:81:cf
-model = dir866l
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[48:ee:0c:ec:81:d0]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host3
-ssid = dlink_dir866l_ac_ch161_wpa2
-frequency = 5805
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A3
-bss5 = 48:ee:0c:ec:81:d0
-wan mac = 48:ee:0c:ec:81:cf
-model = dir866l
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### b/153560792
-### [1c:7e:e5:fd:48:d4]
-### brand = dlink
-### wan_hostname = chromeos3-row2-rack2-host4
-### ssid = dlink_dir_600_n_ch11_wpa2
-### frequency = 2462
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack2-rpm1
-### rpm_outlet = .A4
-### bss = 1c:7e:e5:fd:48:d4
-### wan mac = 1c:7e:e5:fd:48:d5
-### model = dir600
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[00:18:e7:de:af:12]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host5
-ssid = dlink_dir_855_n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A5
-bss = 00:18:e7:de:af:12
-wan mac = 00:18:e7:de:af:13
-model = dir600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:18:e7:de:af:14]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host5
-ssid = dlink_dir_855_n_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A5
-bss5 = 00:18:e7:de:af:14
-wan mac = 00:18:e7:de:af:13
-model = dir600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:18:e7:e4:51:3a]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host6
-ssid = dlink_dgl_4500_n_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A6
-bss = 00:18:e7:e4:51:3a
-wan mac = 00:18:e7:e4:51:3a
-model = dgl_4500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[cc:b2:55:de:85:4a]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host7
-ssid = dlink_dir_628_n_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A7
-bss = cc:b2:55:de:85:4a
-wan mac = cc:b2:55:de:85:4b
-model = dir_628
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c8:d3:a3:5f:c9:0c]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host8
-ssid = dlink_dir_825_n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A8
-bss = c8:d3:a3:5f:c9:0c
-wan mac = c8:d3:a3:5f:c9:0d
-model = dir_825
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c8:d3:a3:5f:c9:0e]
-brand = dlink
-wan_hostname = chromeos3-row2-rack2-host8
-ssid = dlink_dir_825_n_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A8
-bss5 = c8:d3:a3:5f:c9:0e
-wan mac = c8:d3:a3:5f:c9:0d
-model = dir_825
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### [00:9c:02:65:f2:5c]
-### brand = hp
-### wan_hostname = chromeos3-row2-rack2-host9
-### ssid = hp_v_m200_n_6_wpa2
-### frequency = 2437
-### rpm_managed = True
-### bss = 00:9c:02:65:f2:5c
-### wan mac = 2c:41:38:28:f7:cf
-### model = v-m200
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [00:9c:02:65:f2:5d]
-### brand = hp
-### wan_hostname = chromeos3-row2-rack2-host9
-### ssid = hp_v_m200_n_36_wpa2
-### frequency = 5180
-### rpm_managed = True
-### bss5 = 00:9c:02:65:f2:5d
-### wan mac = 2c:41:38:28:f7:cf
-### model = v-m200
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### b/153560792
-### [00:11:6b:4a:a3:7c]
-### brand = levelone
-### wan_hostname = chromeos3-row2-rack2-host10
-### ssid = level_one_wbr_6002_n_ch11_wpa2
-### frequency = 2462
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack2-rpm1
-### rpm_outlet = .A10
-### bss = 00:11:6b:4a:a3:7c
-### wan mac = 00:11:6b:4a:a3:7d
-### model = wbr_6002
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [c0:56:27:40:d3:f6]
-### brand = linksys
-### wan_hostname = chromeos3-row2-rack2-host11
-### ssid = linksys_ea_8500_n_1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### bss = c0:56:27:40:d3:f6
-### wan mac = c0:56:27:40:d3:f5
-### model = ea8500
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [c0:56:27:40:d3:f7]
-### brand = linksys
-### wan_hostname = chromeos3-row2-rack2-host11
-### ssid = linksys_ea_8500_ac_161_wpa2
-### frequency = 5805
-### rpm_managed = True
-### bss5 = c0:56:27:40:d3:f7
-### wan mac = c0:56:27:40:d3:f5
-### model = ea8500
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [00:18:f8:33:38:36]
-### brand = linksys
-### wan_hostname = chromeos3-row2-rack2-host12
-### ssid = linksys_wasp_54_g_6_wpa2
-### frequency = 2437
-### rpm_managed = True
-### bss = 00:18:f8:33:38:36
-### wan mac = 00:18:f8:33:38:36
-### model = wasp_54g
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [00:25:9c:d2:32:30]
-### brand = linksys
-### wan_hostname = chromeos3-row2-rack2-host13
-### ssid = linksys_wrt_610_n_11_wpa2
-### frequency = 2462
-### rpm_managed = True
-### bss = 00:25:9c:d2:32:30
-### wan mac = 68:7f:74:0a:97:bb
-### model = wrt610n
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [00:25:9c:d2:32:31]
-### brand = linksys
-### wan_hostname = chromeos3-row2-rack2-host13
-### ssid = linksys_wrt_610_n_40_wpa2
-### frequency = 5200
-### rpm_managed = True
-### bss5 = 00:25:9c:d2:32:31
-### wan mac = 68:7f:74:0a:97:bb
-### model = wrt610n
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### [00:22:6b:67:15:18]
-### brand = linksys
-### wan_hostname = chromeos3-row2-rack2-host14
-### ssid = linksys_wrt_310_n_1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### bss = 00:22:6b:67:15:18
-### wan mac = 20:aa:4b:2c:0d:0f
-### model = wrt310n
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[00:25:9c:a0:75:f3]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host15
-ssid = linksys_wrt_400_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A15
-bss = 00:25:9c:a0:75:f3
-wan mac = 00:25:9c:a0:75:f5
-model = wrt400n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:25:9c:a0:75:f2]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host15
-ssid = linksys_wrt_400_n_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A15
-bss5 = 00:25:9c:a0:75:f2
-wan mac = 00:25:9c:a0:75:f5
-model = wrt400n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c8:d7:19:d4:c5:56]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host16
-ssid = linksys_e3200_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A16
-bss = c8:d7:19:d4:c5:56
-wan mac = c8:d7:19:d4:c5:55
-model = e3200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c8:d7:19:d4:c5:57]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host16
-ssid = linksys_e3200_n_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A16
-bss5 = c8:d7:19:d4:c5:57
-wan mac = c8:d7:19:d4:c5:55
-model = e3200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[94:10:3e:b8:77:a0]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host17
-ssid = linksys_ea4500__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A17
-bss = 94:10:3e:b8:77:a0
-wan mac = 94:10:3e:b8:77:ab
-model = ea4500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[94:10:3e:b8:77:a2]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host17
-ssid = linksys_ea4500__n_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A17
-bss = 94:10:3e:b8:77:a2
-wan mac = 94:10:3e:b8:77:ab
-model = ea4500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:21:29:6c:23:8a]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host18
-ssid = linksys_wrt600n__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A18
-bss = 00:21:29:6c:23:8a
-wan mac = 00:21:29:6c:23:8b
-model = wrt600n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:21:29:6c:6c:bd]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host18
-ssid = linksys_wrt600n__n_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A18
-bss = 00:21:29:6c:6c:bd
-wan mac = 00:21:29:6c:23:8b
-model = wrt600n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[48:f8:b3:ec:97:12]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host20
-ssid = linksys_ea9200__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A20
-bss = 48:f8:b3:ec:97:12
-wan mac = 48:f8:b3:ec:97:10
-model = ea9200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[48:f8:b3:ec:97:14]
-brand = linksys
-wan_hostname = chromeos3-row2-rack2-host20
-ssid = linksys_ea9200__ac_ch161_wpa2
-frequency = 5805
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack2-rpm1
-rpm_outlet = .A20
-bss = 48:f8:b3:ec:97:14
-wan mac = 48:f8:b3:ec:97:10
-model = ea9200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-# Row 2 Rack 3
-
-[14:35:8b:0c:01:9c]
-brand = medialink
-wan_hostname = chromeos3-row2-rack3-host1
-ssid = medialink_mwn_wapr300_n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A1
-bss = 14:35:8b:0c:01:9c
-wan mac = 14:35:8b:0c:01:9c
-model = mwn_wapr300n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[14:35:8b:0b:6c:80]
-brand = medialink
-wan_hostname = chromeos3-row2-rack3-host2
-ssid = medialink_mwnwapr150__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A2
-bss = 14:35:8b:0b:6c:80
-wan mac = 14:35:8b:0b:6c:80
-model = mwn_wapr150n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c0:3f:0e:ba:a8:e1]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host3
-ssid = netgear_wndr3300__g_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A3
-bss = c0:3f:0e:ba:a8:e1
-wan mac = c0:3f:0e:ba:a8:e2
-model = wndr3300
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c0:3f:0e:ba:a8:e0]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host3
-ssid = netgear_wndr3300__n_ch161_wpa2
-frequency = 5805
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A3
-bss5 = c0:3f:0e:ba:a8:e0
-wan mac = c0:3f:0e:ba:a8:e2
-model = wndr3300
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:1e:2a:08:65:a8]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host4
-ssid = netgear_wnr834b__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A4
-bss = 00:1e:2a:08:65:a8
-wan mac = 00:1e:2a:08:65:a9
-model = wnr834bv2
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[2c:30:33:df:80:21]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host5
-ssid = netgear_r7900_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A5
-bss = 2c:30:33:df:80:21
-wan mac = 2c:30:33:df:80:23
-model = r7900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[2c:30:33:df:80:20]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host5
-ssid = netgear_r7900_ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A5
-bss5 = 2c:30:33:df:80:20
-wan mac = 2c:30:33:df:80:23
-model = r7900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[2c:30:33:df:80:22]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host5
-ssid = netgear_r7900_ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A5
-bss5 = 2c:30:33:df:80:22
-wan mac = 2c:30:33:df:80:23
-model = r7900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:1f:33:2a:0c:d1]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host6
-ssid = netgear_wnd3500_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A6
-bss = 00:1f:33:2a:0c:d1
-wan mac = 00:1f:33:2a:0c:d3
-model = wnr3500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[a0:63:91:ea:2e:5c]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host7
-ssid = netgear_ac1750__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A7
-bss5 = a0:63:91:ea:2e:5c
-wan mac = a0:63:91:ea:2e:5c
-model = ac1750
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[a0:63:91:ea:2e:5b]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host7
-ssid = netgear_ac1750__ac_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A7
-bss5 = a0:63:91:ea:2e:5b
-wan mac = a0:63:91:ea:2e:5c
-model = ac1750
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[50:6a:03:f3:a2:80]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host8
-ssid = netgear_wnr_10000_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A8
-bss = 50:6a:03:f3:a2:80
-wan mac = 50:6a:03:f3:a2:81
-model = wnr1000v4
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[20:4e:7f:49:86:8f]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host9
-ssid = netgear_wpn824_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A9
-bss = 20:4e:7f:49:86:8f
-wan mac = 20:4e:7f:49:86:90
-model = wpn824n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[20:4e:7f:47:af:c6]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host10
-ssid = netgear_n600_n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A10
-bss5 = 20:4e:7f:47:af:c6
-wan mac = 20:4e:7F:47:af:c7
-model = n600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[20:4e:7f:47:af:c8]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host10
-ssid = netgear_n600_n_ch161_wpa2
-frequency = 5805
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A10
-bss5 = 20:4e:7f:47:af:c8
-wan mac = 20:4e:7F:47:af:c7
-model = n600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[14:59:c0:3e:05:16]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host11
-ssid = netgear_r9000__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A11
-bss = 14:59:c0:3e:05:16
-wan mac = 14:59:c0:3e:05:17
-model = R9000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### b/153560792
-### [14:59:c0:3e:05:18]
-### brand = netgear
-### wan_hostname = chromeos3-row2-rack3-host11
-### ssid = netgear_r9000__ac_ch36_wpa2
-### frequency = 5180
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack3-rpm1
-### rpm_outlet = .A11
-### bss5 = 14:59:c0:3e:05:18
-### wan mac = 14:59:c0:3e:05:17
-### model = R9000
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-# Exclude; ch11 is set to hidden, not used
-# [2c:30:33:3f:3b:7a]
-# brand = netgear
-# wan_hostname = chromeos3-row2-rack3-host12
-# ssid = netgear_r6400_n_ch11_wpa2
-# frequency = 2462
-# rpm_managed = True
-# rpm_hostname = chromeos3-row2-rack3-rpm1
-# rpm_outlet = .A12
-# bss = 2c:30:33:3f:3b:7a
-# wan mac = 2c:30:33:3f:3b:7b
-# model = ac1750r6400
-# security = wpa2
-# psk = chromeos
-# class_name = StaticAPConfigurator
-
-[2c:30:33:3f:3b:79]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host12
-ssid = netgear_r6400_ac_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A12
-bss5 = 2c:30:33:3f:3b:79
-wan mac = 2c:30:33:3f:3b:7b
-model = ac1750r6400
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:8e:f2:fc:ff:8f]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host13
-ssid = netgear_wndr4300_n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A13
-bss = 00:8e:f2:fc:ff:8f
-wan mac = 44:94:fc:71:88:9c
-model = wndr4300
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:8e:f2:fc:ff:91]
-brand = netgear
-wan_hostname = chromeos3-row2-rack3-host13
-ssid = netgear_wndr4300_n_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A13
-bss5 = 00:8e:f2:fc:ff:91
-wan mac = 44:94:fc:71:88:9c
-model = wndr4300
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### #crbug.com/388887
-### [84:1b:5e:54:de:4b]
-### brand = netgear
-### wan_hostname = chromeos3-row2-rack3-host14
-### ssid = netgear_wndr_3400_n_ch6_wpa2
-### frequency = 2437
-### rpm_managed = True
-### bss = 84:1b:5e:54:de:4b
-### wan mac = 84:1b:5e:54:de:4c
-### model = dgnd3800b
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### #crbug.com/388887
-### [84:1b:5e:54:de:4c]
-### brand = netgear
-### wan_hostname = chromeos3-row2-rack3-host14
-### ssid = netgear_wndr_3400_n_ch44_wpa2
-### frequency = 5220
-### rpm_managed = True
-### bss5 = 84:1b:5e:54:de:4c
-### wan mac = 84:1b:5e:54:de:4c
-### model = dgnd3800b
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[68:1c:a2:02:14:8d]
-brand = Rosewill
-wan_hostname = chromeos3-row2-rack3-host15
-ssid = rosewill_l600_n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A15
-bss = 68:1c:a2:02:14:8d
-wan mac = 68:1c:a2:02:14:8c
-model = l600n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[4c:1c:a2:02:14:8d]
-brand = Rosewill
-wan_hostname = chromeos3-row2-rack3-host15
-ssid = rosewill_l600_n_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack3-rpm1
-rpm_outlet = .A15
-bss5 = 4c:1c:a2:02:14:8d
-wan mac = 68:1c:a2:02:14:8c
-model = l600n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### [f8:1a:67:99:58:90]
-### brand = TP-Link
-### wan_hostname = chromeos3-row2-rack3-host16
-### ssid = tplink_mr3420_n_1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### bss = f8:1a:67:99:58:90
-### wan mac = f8:1a:67:99:58:91
-### model = mr3420
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[20:aa:4b:cd:67:e5]
-brand = linksys
-wan_hostname = chromeos3-row2-rack3-host17
-ssid = linksys_e2500__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A21
-bss = 20:aa:4b:cd:67:e5
-wan mac = 20:aa:4b:cd:67:e4
-model = e2500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[20:aa:4b:cd:67:e6]
-brand = linksys
-wan_hostname = chromeos3-row2-rack3-host17
-ssid = linksys_e2500__n_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A21
-bss = 20:aa:4b:cd:67:e6
-wan mac = 20:aa:4b:cd:67:e4
-model = e2500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[98:fc:11:78:64:c0]
-brand = linksys
-wan_hostname = chromeos3-row2-rack3-host18
-ssid = linksys_e3000__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A22
-bss = 98:fc:11:78:64:c0
-wan mac = 98:fc:11:78:64:bf
-model = e3000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[98:fc:11:78:64:c1]
-brand = linksys
-wan_hostname = chromeos3-row2-rack3-host18
-ssid = linksys_e3000__n_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A22
-bss =  98:fc:11:78:64:c1
-wan mac = 98:fc:11:78:64:bf
-model = e3000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:21:91:5f:59:f6]
-brand = dlink
-wan_hostname = chromeos3-row2-rack3-host19
-ssid = dlink_dir300__g_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A23
-bss = 00:21:91:5f:59:f6
-wan mac = 00:21:91:5f:59:f6
-model = dir300
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[80:1f:02:0e:17:ab]
-brand = edimax
-wan_hostname = chromeos3-row2-rack3-host20
-ssid = edimax_br6428n__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A24
-bss = 80:1f:02:0e:17:ab
-wan mac = 80:1f:02:0e:17:ab
-model = br6428n
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-# Row 2 Rack 4
-
-### [10:fe:ed:68:29:a2]
-### brand = TP-Link
-### wan_hostname = chromeos3-row2-rack4-host1
-### ssid = tplink_mr4320_n_6_wpa2
-### frequency = 2437
-### rpm_managed = True
-### bss = 10:fe:ed:68:29:a2
-### wan mac = 10:fe:ed:68:29:a3
-### model = mr3420
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-# Missing AP chromeos3-row2-rack4-host2
-
-[d8:eb:97:2a:c7:ce]
-brand = trendnet
-wan_hostname = chromeos3-row2-rack4-host3
-ssid = trendnet_tew823dru__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A3
-bss = d8:eb:97:2a:c7:ce
-wan mac = d8:eb:97:2a:c7:cf
-model = tew823dru
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[d8:eb:97:2a:c7:d0]
-brand = trendnet
-wan_hostname = chromeos3-row2-rack4-host3
-ssid = trendnet_tew823dru__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A3
-bss5 = d8:eb:97:2a:c7:d0
-wan mac = d8:eb:97:2a:c7:cf
-model = tew823dru
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### [d8:eb:97:18:e2:d6]
-### brand = trendnet
-### wan_hostname = chromeos3-row2-rack4-host4
-### ssid = trendnet_tew654tr_n_1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### bss = d8:eb:97:18:e2:d6
-### wan mac = d8:eb:97:18:e2:d7
-### model = tew_654tr
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[00:14:d1:c5:68:94]
-brand = trendnet
-wan_hostname = chromeos3-row2-rack4-host5
-ssid = trendnet_tew639gr__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A5
-bss = 00:14:d1:c5:68:94
-wan mac = 00:14:d1:c5:68:94
-model = tew_639gr
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[d8:eb:97:c8:d9:fb]
-brand = trendnet
-wan_hostname = chromeos3-row2-rack4-host6
-ssid = trendnet_828dru__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A6
-bss = d8:eb:97:c8:d9:fb
-wan mac = d8:eb:97:c8:d9:f7
-model = tew828dru
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[d8:eb:97:c8:d9:ff]
-brand = trendnet
-wan_hostname = chromeos3-row2-rack4-host6
-ssid = trendnet_828dru__ac_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A6
-bss5 = d8:eb:97:c8:d9:ff
-wan mac = d8:eb:97:c8:d9:f7
-model = tew828dru
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[d8:eb:97:c8:da:03]
-brand = trendnet
-wan_hostname = chromeos3-row2-rack4-host6
-ssid = trendnet_828dru__ac_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A6
-bss5 = d8:eb:97:c8:da:03
-wan mac = d8:eb:97:c8:d9:f7
-model = tew828dru
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:90:a9:05:2e:31]
-brand = WD
-wan_hostname = chromeos3-row2-rack4-host7
-ssid = wd_n750__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A7
-bss = 00:90:a9:05:2e:31
-wan mac = 00:90:a9:05:2e:30
-model = mynet_n750
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:90:a9:05:2e:33]
-brand = WD
-wan_hostname = chromeos3-row2-rack4-host7
-ssid = wd_n750__n_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A7
-bss5 = 00:90:a9:05:2e:33
-wan mac = 00:90:a9:05:2e:30
-model = mynet_n750
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[fc:f5:28:d4:b5:a8]
-brand = zyxel
-wan_hostname = chromeos3-row2-rack4-host8
-ssid = zyxel_nbg6716__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A8
-bss = fc:f5:28:d4:b5:a8
-wan mac = fc:f5:28:d4:b5:ab
-model = nbg6716
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[fc:f5:28:d4:b5:a9]
-brand = zyxel
-wan_hostname = chromeos3-row2-rack4-host8
-ssid = zyxel_nbg6716__ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A8
-bss5 = fc:f5:28:d4:b5:a9
-wan mac = fc:f5:28:d4:b5:ab
-model = nbg6716
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[68:1c:a2:00:93:18]
-brand = rosewill
-wan_hostname = chromeos3-row2-rack4-host9
-ssid = rosewill_rnxn150rt__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A9
-bss = 68:1c:a2:00:93:18
-wan mac = 68:1c:a2:00:93:19
-model = rnxn150rt
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:19:9d:74:f0:29 ]
-brand = vizio
-wan_hostname = chromeos3-row2-rack4-host10
-ssid = vizio_xwr100__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A10
-bss = 00:19:9d:74:f0:29
-wan mac = 00:19:9d:74:f0:2b
-model = xwr100
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:19:9d:74:f0:28]
-brand = vizio
-wan_hostname = chromeos3-row2-rack4-host10
-ssid = vizio_xwr100__n_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A10
-bss = 00:19:9d:74:f0:28
-wan mac = 00:19:9d:74:f0:2b
-model = xwr100
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[d8:0d:17:b2:8e:37]
-brand = tp-link
-wan_hostname = chromeos3-row2-rack4-host11
-ssid = tplink_a9__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A11
-bss = d8:0d:17:b2:8e:37
-wan mac = d8:0d:17:b2:8e:39
-model = tplink_a9
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[d8:0d:17:b2:8e:36]
-brand = tp-link
-wan_hostname = chromeos3-row2-rack4-host11
-ssid = tplink_a9__ac_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A11
-bss = d8:0d:17:b2:8e:36
-wan mac = d8:0d:17:b2:8e:39
-model = tplink_a9
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c4:71:54:ad:f8:50]
-brand = tp-link
-wan_hostname = chromeos3-row2-rack4-host12
-ssid = tplink_c3200__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A12
-bss = c4:71:54:ad:f8:50
-wan mac = c4:71:54:ad:f8:51
-model = c3200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c4:71:54:ad:f8:4e]
-brand = tp-link
-wan_hostname = chromeos3-row2-rack4-host12
-ssid = tplink_c3200__ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A12
-bss = c4:71:54:ad:f8:4e
-wan mac = c4:71:54:ad:f8:51
-model = c3200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[98:da:c4:82:ca:a9]
-brand = tp-link
-wan_hostname = chromeos3-row2-rack4-host13
-ssid = tplink_a7__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A13
-bss = 98:da:c4:82:ca:a9
-wan mac = 98:da:c4:82:ca:aa
-model = a7
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[98:da:c4:82:ca:a8]
-brand = tp-link
-wan_hostname = chromeos3-row2-rack4-host13
-ssid = tplink_a7__ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A13
-bss = 98:da:c4:82:ca:a8
-wan mac = 98:da:c4:82:ca:aa
-model = a7
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:95:e6:23:82:a1]
-brand = tenda
-wan_hostname = chromeos3-row2-rack4-host14
-ssid = tenda_ac18__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A14
-bss = 04:95:e6:23:82:a1
-wan mac = 04:95:e6:23:82:a9
-model = ac18
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:95:e6:23:82:a5]
-brand = tenda
-wan_hostname = chromeos3-row2-rack4-host14
-ssid = tenda_ac18__ac_ch48_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A14
-bss = 04:95:e6:23:82:a5
-wan mac = 04:95:e6:23:82:a9
-model = ac18
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[d8:32:14:ac:30:91]
-brand = tenda
-wan_hostname = chromeos3-row2-rack4-host15
-ssid = tenda_ac6__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A15
-bss = d8:32:14:ac:30:91
-wan mac = d8:32:14:ac:30:99
-model = ac6
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[d8:32:14:ac:30:95]
-brand = tenda
-wan_hostname = chromeos3-row2-rack4-host15
-ssid = tenda_ac6__ac_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A15
-bss = d8:32:14:ac:30:95
-wan mac = d8:32:14:ac:30:99
-model = ac6
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[50:0f:f5:65:ef:10]
-brand = tenda
-wan_hostname = chromeos3-row2-rack4-host16
-ssid = tenda_n301__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A16
-bss = 50:0f:f5:65:ef:10
-wan mac = 50:0f:f5:65:ef:10
-model = n301
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d4:c4:08:67:08]
-brand = asus
-wan_hostname = chromeos3-row2-rack4-host17
-ssid = asus_ac88u__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A17
-bss = 04:d4:c4:08:67:08
-wan mac = 04:d4:c4:08:67:08
-model = ac3100
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### b/153560792
-### [04:d4:c4:08:67:0c]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack4-host17
-### ssid = asus_ac88u__n_ch153_wpa2
-### frequency = 5765
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack4-rpm1
-### rpm_outlet = .A17
-### bss = 04:d4:c4:08:67:0c
-### wan mac = 04:d4:c4:08:67:08
-### model = ac3100
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[04:d4:c4:d7:20:48]
-brand = asus
-wan_hostname = chromeos3-row2-rack4-host18
-ssid = asus_ac51u__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A18
-bss = 04:d4:c4:d7:20:48
-wan mac = 04:d4:c4:d7:20:48
-model = ac51u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d4:c4:d7:20:4c]
-brand = asus
-wan_hostname = chromeos3-row2-rack4-host18
-ssid = asus_ac51u__n_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row2-rack4-rpm1
-rpm_outlet = .A18
-bss = 04:d4:c4:d7:20:4c
-wan mac = 04:d4:c4:d7:20:48
-model = ac51u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### b/153560792
-### [4c:ed:fb:7b:a2:88]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack4-host19
-### ssid = asus_ac1900p__n_ch1_wpa2
-### frequency = 2412
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack4-rpm1
-### rpm_outlet = .A19
-### bss = 4c:ed:fb:7b:a2:88
-### wan mac = 4c:ed:fb:7b:a2:88
-### model = ac1900p
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-### b/153560792
-### [4c:ed:fb:7b:a2:8c]
-### brand = asus
-### wan_hostname = chromeos3-row2-rack4-host19
-### ssid = asus_ac1900p__ac_ch36_wpa2
-### frequency = 5180
-### rpm_managed = True
-### rpm_hostname = chromeos3-row2-rack4-rpm1
-### rpm_outlet = .A19
-### bss = 4c:ed:fb:7b:a2:8c
-### wan mac = 4c:ed:fb:7b:a2:88
-### model = ac1900p
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[74:03:bd:00:8a:60]
-brand = buffalo
-wan_hostname = chromeos3-row2-rack4-host20
-ssid = buffalo_whr1166d__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack4-rpm1
-rpm_outlet = .A20
-bss = 74:03:bd:00:8a:60
-wan mac = 74:03:bd:00:8a:60
-model = whr1166d
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[74:03:bd:00:8a:64]
-brand = buffalo
-wan_hostname = chromeos3-row2-rack4-host20
-ssid = buffalo_whr1166d__ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack4-rpm1
-rpm_outlet = .A20
-bss = 74:03:bd:00:8a:64
-wan mac = 74:03:bd:00:8a:60
-model = whr1166d
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[98:bb:99:16:cf:e4]
-brand = wisetiger
-wan_hostname = chromeos3-row3-rack1-host1
-ssid = wisetiger_rt8501__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A1
-bss = 98:bb:99:16:cf:e4
-wan mac = 98:bb:99:16:cf:e5
-model = rt8501
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[98:bb:99:16:cf:e4]
-brand = wisetiger
-wan_hostname = chromeos3-row3-rack1-host1
-ssid = wisetiger_rt8501__ac_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A1
-bss = 98:bb:99:16:cf:e4
-wan mac = 98:bb:99:16:cf:e5
-model = rt8501
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[14:35:8b:1c:af:01]
-brand = medialink
-wan_hostname = chromeos3-row3-rack1-host2
-ssid = medialink_ac1200r__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A2
-bss = 14:35:8b:1c:af:01
-wan mac = 14:35:8b:1c:af:09
-model = ac1200r
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[14:35:8b:1c:af:05]
-brand = medialink
-wan_hostname = chromeos3-row3-rack1-host2
-ssid = medialink_ac1200r__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A2
-bss = 14:35:8b:1c:af:05
-wan mac = 14:35:8b:1c:af:09
-model = ac1200r
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[cc:40:d0:97:ed:0a]
-brand =netgear
-wan_hostname = chromeos3-row3-rack1-host3
-ssid = netgear_r6230__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A3
-bss = cc:40:d0:97:ed:0a
-wan mac = cc:40:d0:97:ed:0b
-model = r6230
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[cc:40:d0:97:ed:0a]
-brand = netgear
-wan_hostname = chromeos3-row3-rack1-host3
-ssid = netgear_r6230__ac_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A3
-bss = cc:40:d0:97:ed:0a
-wan mac = cc:40:d0:97:ed:0b
-model = r6230
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[80:3f:5d:b4:f1:1c]
-brand = wavlink
-wan_hostname = chromeos3-row3-rack1-host4
-ssid = wavlink_wn531g3__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A4
-bss = 80:3f:5d:b4:f1:1c
-wan mac = 80:3f:5d:b4:f1:1d
-model = wn531g3
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[80:3f:5d:b4:f1:1c]
-brand = wavlink
-wan_hostname = chromeos3-row3-rack1-host4
-ssid = wavlink_wn531g3__ac_ch161_wpa2
-frequency = 5805
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A4
-bss = 80:3f:5d:b4:f1:1c
-wan mac = 80:3f:5d:b4:f1:1d
-model = wn531g3
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:11:32:b3:ff:d1]
-brand = synology
-wan_hostname = chromeos3-row3-rack1-host5
-ssid = synology_rt2600__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A5
-bss = 00:11:32:b3:ff:d1
-wan mac = 00:11:32:b3:ff:ce
-model = rt2600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:11:32:b3:ff:d2]
-brand = synology
-wan_hostname = chromeos3-row3-rack1-host5
-ssid = synology_rt2600__ac_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A5
-bss = 00:11:32:b3:ff:d2
-wan mac = 00:11:32:b3:ff:ce
-model = rt2600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### b/153560792
-### [74:da:38:f6:62:6a]
-### brand = edimax
-### wan_hostname = chromeos3-row3-rack1-host6
-### ssid = edimax_br6478ac__n_ch6_wpa2
-### frequency = 2437
-### rpm_managed = True
-### rpm_hostname = chromeos3-row3-rack1-rpm1
-### rpm_outlet = .A6
-### bss = 74:da:38:f6:62:6a
-### wan mac = 74:da:38:f6:62:6c
-### model = br6478ac
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[74:da:38:f6:62:6b]
-brand = edimax
-wan_hostname = chromeos3-row3-rack1-host6
-ssid = edimax_br6478ac__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A6
-bss = 74:da:38:f6:62:6b
-wan mac = 74:da:38:f6:62:6c
-model = br6478ac
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[e4:90:7e:f5:67:b4]
-brand = motorola
-wan_hostname = chromeos3-row3-rack1-host7
-ssid = motorola_mr1900__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A7
-bss = e4:90:7e:f5:67:b4
-wan mac = e4:90:7e:f5:67:b0
-model = mr1900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[e4:90:7e:f5:67:ba]
-brand = motorola
-wan_hostname = chromeos3-row3-rack1-host7
-ssid = motorola_mr1900__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A7
-bss = e4:90:7e:f5:67:ba
-wan mac = e4:90:7e:f5:67:b0
-model = mr1900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[50:c7:bf:de:3f:75]
-brand = iqrouter
-wan_hostname = chromeos3-row3-rack1-host8
-ssid = iqrouter_ac1750__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A8
-bss = 50:c7:bf:de:3f:75
-wan mac = 50:c7:bf:de:3f:76
-model = ac1750
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[50:c7:bf:de:3f:76]
-brand = iqrouter
-wan_hostname = chromeos3-row3-rack1-host8
-ssid = iqrouter_ac1750__ac_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A8
-bss = 50:c7:bf:de:3f:76
-wan mac = 50:c7:bf:de:3f:76
-model = ac1750
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[80:3f:5d:50:57:46]
-brand = meco
-wan_hostname = chromeos3-row3-rack1-host9
-ssid = meco_n300 __n_ch6_wpa2 
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A9
-bss = 80:3f:5d:50:57:46
-wan mac = 80:3f:5d:50:57:47
-model = n300
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[b4:4b:d6:20:94:d0]
-brand = cudy
-wan_hostname = chromeos3-row3-rack1-host10
-ssid = cudy_wr1000__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A10
-bss = b4:4b:d6:20:94:d0
-wan mac = b4:4b:d6:20:94:d0
-model = wr1000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[b4:4b:d6:20:94:d4]
-brand = cudy
-wan_hostname = chromeos3-row3-rack1-host10
-ssid = cudy_wr1000__n_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A10
-bss = b4:4b:d6:20:94:d4
-wan mac = b4:4b:d6:20:94:d0
-model = wr1000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[bc:64:4b:af:22:d5]
-brand = arris
-wan_hostname = chromeos3-row3-rack1-host11
-ssid = arris_sbrac1750__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A11
-bss = bc:64:4b:af:22:d5
-wan mac = bc:64:4b:af:22:d4
-model = sbrac1750
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[bc:64:4b:af:22:d6]
-brand = arris
-wan_hostname = chromeos3-row3-rack1-host11
-ssid = arris_sbrac1750__ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A11
-bss = bc:64:4b:af:22:d6
-wan mac = bc:64:4b:af:22:d4
-model = sbrac1750
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-### b/153560792
-### [e4:95:6e:4a:92:fe]
-### brand = glinet
-### wan_hostname = chromeos3-row3-rack1-host12
-### ssid = glinet_ar750s__n_ch11_wpa2
-### frequency = 2462
-### rpm_managed = True
-### rpm_hostname = chromeos3-row3-rack1-rpm1
-### rpm_outlet = .A12
-### bss = e4:95:6e:4a:92:fe
-### wan mac = e4:95:6e:4a:92:fe
-### model = ar750s
-### security = wpa2
-### psk = chromeos
-### class_name = StaticAPConfigurator
-
-[e4:95:6e:4a:92:ff]
-brand = glinet
-wan_hostname = chromeos3-row3-rack1-host12
-ssid = glinet_ar750s__ac_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A12
-bss = e4:95:6e:4a:92:ff
-wan mac = e4:95:6e:4a:92:fe
-model = ar750s
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[8c:59:73:fe:4a:dc]
-brand = zyxel
-wan_hostname = chromeos3-row3-rack1-host13
-ssid = zyxel_nbg6817__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A13
-bss = 8c:59:73:fe:4a:dc
-wan mac = 8c:59:73:fe:4a:df
-model = nbg6817
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[8c:59:73:fe:4a:dd]
-brand = zyxel
-wan_hostname = chromeos3-row3-rack1-host13
-ssid = zyxel_nbg6817__ac_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A13
-bss = 8c:59:73:fe:4a:dd
-wan mac = 8c:59:73:fe:4a:df
-model = nbg6817
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[88:dc:96:6e:15:9a]
-brand = engenius
-wan_hostname = chromeos3-row3-rack1-host14
-ssid = engenius_eap1300ext__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A14
-bss = 88:dc:96:6e:15:9a
-wan mac = 88:dc:96:6e:15:99
-model = eap1300ext
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[88:dc:96:6e:15:9b]
-brand = engenius
-wan_hostname = chromeos3-row3-rack1-host14
-ssid = engenius_eap1300ext__ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A14
-bss = 88:dc:96:6e:15:9b
-wan mac = 88:dc:96:6e:15:99
-model = eap1300ext
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d9:f5:77:fa:d8]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host15
-ssid = asus_rtac68u__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A15
-bss = 04:d9:f5:77:fa:d8
-wan mac = 04:d9:f5:77:fa:d8
-model = rtac68u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d9:f5:77:fa:dc]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host15
-ssid = asus_rtac68u__ac_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A15
-bss = 04:d9:f5:77:fa:dc
-wan mac = 04:d9:f5:77:fa:d8
-model = rtac68u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d9:f5:2b:28:50]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host16
-ssid = asus_rtax92u__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A16
-bss = 04:d9:f5:2b:28:50
-wan mac = 68:7f:74:0a:97:bb
-model = rtax92u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d9:f5:2b:28:54]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host16
-ssid = asus_rtax92u__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack1-rpm1
-rpm_outlet = .A16
-bss = 04:d9:f5:2b:28:54
-wan mac = 04:d9:f5:2b:28:50
-model = rtax92u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[a8:5e:45:88:21:c0]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host17
-ssid = asus_rtac86u__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A21
-bss = a8:5e:45:88:21:c0
-wan mac = a8:5e:45:88:21:c0
-model = rtac86u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[a8:5e:45:88:21:c4]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host17
-ssid = asus_rtac86u__ac_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A21
-bss = a8:5e:45:88:21:c4
-wan mac = a8:5e:45:88:21:c0
-model = rtac86u
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d4:c4:c7:35:e0]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host18
-ssid = asus_rtacrh17__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A22
-bss = 04:d4:c4:c7:35:e0
-wan mac = 04:d4:c4:c7:35:e0
-model = rtacrh17
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d4:c4:c7:35:e4]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host18
-ssid = asus_rtacrh17__ac_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A22
-bss = 04:d4:c4:c7:35:e4
-wan mac = 04:d4:c4:c7:35:e0
-model = rtacrh17
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d9:f5:c2:38:68]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host19
-ssid = asus_rtacrh13__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A23
-bss = 04:d9:f5:c2:38:68
-wan mac = 04:d9:f5:c2:38:68
-model = rtacrh13
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[04:d9:f5:c2:38:6c]
-brand = asus
-wan_hostname = chromeos3-row3-rack1-host19
-ssid = asus_rtacrh13__ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A23
-bss = 04:d9:f5:c2:38:6c
-wan mac = 04:d9:f5:c2:38:68
-model = rtacrh13
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[14:91:82:f4:84:f5]
-brand = belkin
-wan_hostname = chromeos3-row3-rack2-host1
-ssid = belkin_ac1600__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A1
-bss = 14:91:82:f4:84:f5
-wan mac = 14:91:82:f4:84:f5
-model = ac1600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[14:91:82:f4:84:f4]
-brand = belkin
-wan_hostname = chromeos3-row3-rack2-host1
-ssid = belkin_ac1600__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A1
-bss = 14:91:82:f4:84:f4
-wan mac = 14:91:82:f4:84:f5
-model = ac1600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[00:30:44:1d:ee:7b]
-brand = cradlepoint
-wan_hostname = chromeos3-row3-rack2-host2
-ssid = cradlepoint_mbr95__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A2
-bss = 00:30:44:1d:ee:7b
-wan mac = 00:30:44:1d:ee:7b
-model = mbr95
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[60:63:4c:79:dc:b5]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host3
-ssid = dlink_dir867__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A3
-bss = 60:63:4c:79:dc:b5
-wan mac = 60:63:4c:79:dc:b5
-model = dir867
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[60:63:4c:79:dc:b2]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host3
-ssid = dlink_dir867__ac_ch161_wpa2
-frequency = 5805
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A3
-bss = 60:63:4c:79:dc:b2
-wan mac = 60:63:4c:79:dc:b5
-model = dir867
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[ec:ad:e0:a7:48:2e]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host4
-ssid = dlink_dir842__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A4
-bss = ec:ad:e0:a7:48:2e
-wan mac = ec:ad:e0:a7:48:2e
-model = dir842
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[ec:ad:eo:a7:48:2c]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host4
-ssid = dlink_dir842__ac_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A4
-bss = ec:ad:eo:a7:48:2c
-wan mac = ec:ad:e0:a7:48:2e
-model = dir842
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[ec:ad:e0:2f:04:db]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host5
-ssid = dlink_dir882__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A5
-bss = ec:ad:e0:2f:04:db
-wan mac = ec:ad:e0:2f:04:db
-model = dir882
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[ec:ad:e0:2f:04:d8]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host5
-ssid = dlink_dir882__ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A5
-bss = ec:ad:e0:2f:04:d8
-wan mac = ec:ad:e0:2f:04:db
-model = dir882
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[10:be:f5:21:ed:6b]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host6
-ssid = dlink_dir879__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A6
-bss = 10:be:f5:21:ed:6b
-wan mac = 10:be:f5:21:ed:6b
-model = dir879
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[10:be:f5:21:ed:68]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host6
-ssid = dlink_dir879__ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A6
-bss = 10:be:f5:21:ed:68
-wan mac = 10:be:f5:21:ed:6b
-model = dir879
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[78:32:1b:61:1b:53]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host7
-ssid = dlink_dir878__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A7
-bss = 78:32:1b:61:1b:53
-wan mac = 78:32:1b:61:1b:53
-model = dir878
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-
-[78:32:1b:61:1b:50]
-brand = dlink
-wan_hostname = chromeos3-row3-rack2-host7
-ssid = dlink_dir878__ac_ch161_wpa2
-frequency = 5805
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A7
-bss = 78:32:1b:61:1b:50
-wan mac = 78:32:1b:61:1b:53
-model = dir878
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[60:38:e0:ac:7a:78]
-brand = linksys
-wan_hostname = chromeos3-row3-rack2-host8
-ssid = linksys_ea9500__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A8
-bss = 60:38:e0:ac:7a:78
-wan mac = 60:38:e0:ac:7a:76
-model = ea9500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[60:38:e0:ac:7a:79]
-brand = linksys
-wan_hostname = chromeos3-row3-rack2-host8
-ssid = linksys_ea9500__ac_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A8
-bss = 60:38:e0:ac:7a:79
-wan mac = 60:38:e0:ac:7a:76
-model = ea9500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[30:23:03:25:21:8a]
-brand = linksys
-wan_hostname = chromeos3-row3-rack2-host9
-ssid = linksys_ea6900__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A9
-bss = 30:23:03:25:21:8a
-wan mac = 30:23:03:25:21:89
-model = ea6900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[30:23:03:25:21:8b]
-brand = linksys
-wan_hostname = chromeos3-row3-rack2-host9
-ssid = linksys_ea6900__ac_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A9
-bss = 30:23:03:25:21:8b
-wan mac = 30:23:03:25:21:89
-model = ea6900
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[60:38:e0:bf:c9:e9]
-brand = linksys
-wan_hostname = chromeos3-row3-rack2-host10
-ssid = linksys_wrt32x__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A10
-bss = 60:38:e0:bf:c9:e9
-wan mac = 60:38:e0:bf:c9:e8
-model = wrt32x
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[60:38:e0:bf:c9:ea]
-brand = linksys
-wan_hostname = chromeos3-row3-rack2-host10
-ssid = linksys_wrt32x__ac_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A10
-bss = 60:38:e0:bf:c9:ea
-wan mac = 60:38:e0:bf:c9:e8
-model = wrt32x
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[30:23:03:72:77:b4]
-brand = linksys
-wan_hostname = chromeos3-row3-rack2-host11
-ssid = linksys_ea8250__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A11
-bss = 30:23:03:72:77:b4
-wan mac = 30:23:03:72:77:b4
-model = ea8250
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[30:23:03:72:77:b6]
-brand = linksys
-wan_hostname = chromeos3-row3-rack2-host11
-ssid = linksys_ea8250__ac_ch40_wpa2
-frequency = 5200
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A11
-bss = 30:23:03:72:77:b6
-wan mac = 30:23:03:72:77:b4
-model = ea8250
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c4:ad:34:6d:e6:c8]
-brand = microtik
-wan_hostname = chromeos3-row3-rack2-host12
-ssid = microtik_2hnd__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A12
-bss = c4:ad:34:6d:e6:c8
-wan mac = c4:ad:34:6d:e6:c8
-model = 2hnd
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[c4:ad:34:6d:e6:cd]
-brand = microtik
-wan_hostname = chromeos3-row3-rack2-host12
-ssid = microtik_2hnd__ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A12
-bss = c4:ad:34:6d:e6:cd
-wan mac = c4:ad:34:6d:e6:c8
-model = 2hnd
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[28:80:88:23:8c:e6]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host13
-ssid = netgear_x6r8000__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A13
-bss = 28:80:88:23:8c:e6
-wan mac = 28:80:88:23:8c:e6
-model = x6r8000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[28:80:88:23:8c:e5]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host13
-ssid = netgear_x6r8000__ac_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A13
-bss = 28:80:88:23:8c:e5
-wan mac = 28:80:88:23:8c:e6
-model = x6r8000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[28:80:88:20:ba:6d]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host14
-ssid = netgear_r7000__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A14
-bss = 28:80:88:20:ba:6d
-wan mac = 28:80:88:20:ba:6e
-model = r7000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[28:80:88:20:ba:6c]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host14
-ssid = netgear_r7000__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A14
-bss = 28:80:88:20:ba:6c
-wan mac = 28:80:88:20:ba:6e
-model = r7000
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[3c:37:86:da:78:13]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host15
-ssid = netgear_rax200__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A15
-bss = 3c:37:86:da:78:13
-wan mac = 3c:37:86:da:78:14
-model = rax200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[3c:37:86:da:78:11]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host15
-ssid = netgear_rax200__ac_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A15
-bss = 3c:37:86:da:78:11
-wan mac = 3c:37:86:da:78:14
-model = rax200
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[bc:a5:11:13:c0:f8]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host16
-ssid = netgear_rax120__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A16
-bss = bc:a5:11:13:c0:f8
-wan mac = bc:a5:11:13:c0:f9
-model = rax120
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[bc:a5:11:13:c0:fa]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host16
-ssid = netgear_rax120__ac_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A16
-bss = bc:a5:11:13:c0:fa
-wan mac = bc:a5:11:13:c0:f9
-model = rax120
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[38:94:ed:fb:cc:1e]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host17
-ssid = netgear_r6080__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A17
-bss = 38:94:ed:fb:cc:1e
-wan mac = 38:94:ed:fb:cc:1f
-model = r6080
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[38:94:ed:fb:cc:20]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host17
-ssid = netgear_r6080__ac_ch161_wpa2
-frequency = 5805
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A17
-bss = 38:94:ed:fb:cc:20
-wan mac = 38:94:ed:fb:cc:1f
-model = r6080
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[78:d2:94:e8:62:c8]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host18
-ssid = netgear_rax40__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A18
-bss = 78:d2:94:e8:62:c8
-wan mac = 78:d2:94:e8:62:c2
-model = rax40
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[78:d2:94:e8:62:ca]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host18
-ssid = netgear_rax40__ac_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A18
-bss = 78:d2:94:e8:62:ca
-wan mac = 78:d2:94:e8:62:c2
-model = rax40
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[38:94:ed:b8:b0:73]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host19
-ssid = netgear_xr500__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A19
-bss = 38:94:ed:b8:b0:73
-wan mac = 38:94:ed:b8:b0:74
-model = xr500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[38:94:ed:b8:b0:75]
-brand = netgear
-wan_hostname = chromeos3-row3-rack2-host19
-ssid = netgear_xr500__ac_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A19
-bss = 38:94:ed:b8:b0:75
-wan mac = 38:94:ed:b8:b0:74
-model = xr500
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[1c:3b:f3:94:02:eb]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack2-host20
-ssid = tplink_archerax10__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A20
-bss = 1c:3b:f3:94:02:eb
-wan mac = 1c:3b:f3:94:02:ed
-model = archerax10
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[1c:3b:f3:94:02:ea]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack2-host20
-ssid = tplink_archerax10__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack2-rpm1
-rpm_outlet = .A20
-bss = 1c:3b:f3:94:02:ea
-wan mac = 1c:3b:f3:94:02:ed
-model = archerax10
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[98:da:c4:c9:7a:f0]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack3-host1
-ssid = tplink_ac2600__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A1
-bss = 98:da:c4:c9:7a:f0
-wan mac = 98:da:c4:c9:7a:f1
-model = ac2600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[98:da:c4:c9:7a:ef]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack3-host1
-ssid = tplink_ac2600__ac_ch149_wpa2
-frequency = 5745
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A1
-bss = 98:da:c4:c9:7a:ef
-wan mac = 98:da:c4:c9:7a:f1
-model = ac2600
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[cc:32:e5:49:cf:82]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack3-host2
-ssid = tplink_archera5__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A2
-bss = cc:32:e5:49:cf:82
-wan mac = cc:32:e5:49:cf:83
-model = archera5
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[cc:32:e5:49:cf:81]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack3-host2
-ssid = tplink_archera5__ac_ch44_wpa2
-frequency = 5220
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A2
-bss = cc:32:e5:49:cf:81
-wan mac = cc:32:e5:49:cf:83
-model = archera5
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[cc:32:e5:e6:f5:ef]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack3-host4
-ssid = tplink_ax50__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A4
-bss = cc:32:e5:e6:f5:ef
-wan mac = cc:32:e5:e6:f5:f1
-model = ax50
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[cc:32:e5:e6:f5:ee]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack3-host4
-ssid = tplink_ax50__ac_ch48_wpa2
-frequency = 5240
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A4
-bss = cc:32:e5:e6:f5:ee
-wan mac = cc:32:e5:e6:f5:f1
-model = ax50
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[50:d4:f7:0f:b5:1d]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack3-host5
-ssid = tplink_archera6__n_ch11_wpa2
-frequency = 2462
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A5
-bss = 50:d4:f7:0f:b5:1d
-wan mac = 50:d4:f7:0f:b5:1e
-model = archera6
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[50:d4:f7:0f:b5:1c]
-brand = tp-link
-wan_hostname = chromeos3-row3-rack3-host5
-ssid = tplink_archera6__ac_ch157_wpa2
-frequency = 5785
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A5
-bss = 50:d4:f7:0f:b5:1c
-wan mac = 50:d4:f7:0f:b5:1e
-model = archera6
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[80:3f:5d:f8:68:6a]
-brand = wavlink
-wan_hostname = chromeos3-row3-rack3-host7
-ssid = wavlink_arialg__n_ch6_wpa2
-frequency = 2437
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A7
-bss = 80:3f:5d:f8:68:6a
-wan mac = 80:3f:5d:f8:68:68
-model = arialg
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[80:3f:5d:f8:68:6b]
-brand = wavlink
-wan_hostname = chromeos3-row3-rack3-host7
-ssid = wavlink_arialg__ac_ch36_wpa2
-frequency = 5180
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A7
-bss = 80:3f:5d:f8:68:6b
-wan mac = 80:3f:5d:f8:68:68
-model = arialg
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[5c:4a:1f:fc:ca:ec]
-brand = juplink
-wan_hostname = chromeos3-row3-rack3-host8
-ssid = vanin_juplinkrx4__n_ch1_wpa2
-frequency = 2412
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A8
-bss = 5c:4a:1f:fc:ca:ec
-wan mac = 5c:4a:1f:fc:ca:ee
-model = juplinkrx4
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
-[85c:4a:1f:fc:ca:ed]
-brand = juplink
-wan_hostname = chromeos3-row3-rack3-host8
-ssid = vanin_juplinkrx4__ac_ch153_wpa2
-frequency = 5765
-rpm_managed = True
-rpm_hostname = chromeos3-row3-rack3-rpm1
-rpm_outlet = .A8
-bss = 5c:4a:1f:fc:ca:ed
-wan mac = 5c:4a:1f:fc:ca:ee
-model = juplinkrx4
-security = wpa2
-psk = chromeos
-class_name = StaticAPConfigurator
-
diff --git a/server/cros/device_health_profile/__init__.py b/server/cros/device_health_profile/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/server/cros/device_health_profile/__init__.py
+++ /dev/null
diff --git a/server/cros/device_health_profile/common.py b/server/cros/device_health_profile/common.py
deleted file mode 100644
index 7dfe2af..0000000
--- a/server/cros/device_health_profile/common.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-autotest_dir = os.path.abspath(os.path.join(dirname, '../../..'))
-client_dir = os.path.join(autotest_dir, 'client')
-sys.path.insert(0, client_dir)
-import setup_modules
-sys.path.pop(0)
-setup_modules.setup(base_path=autotest_dir, root_module_name='autotest_lib')
diff --git a/server/cros/device_health_profile/device_health_profile.py b/server/cros/device_health_profile/device_health_profile.py
deleted file mode 100644
index 378ea71..0000000
--- a/server/cros/device_health_profile/device_health_profile.py
+++ /dev/null
@@ -1,502 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import copy
-import json
-import time
-import logging
-
-from autotest_lib.server.cros.device_health_profile.profile_constants import *
-
-
-class DeviceHealthProfileError(Exception):
-    """
-    Generic Exception for failures from DeviceHealthProfile object.
-    """
-
-
-class InvalidDeviceHealthProfileKeyError(DeviceHealthProfileError):
-    """
-    Exception to throw when trying to get an invalid health profile key.
-    """
-
-
-class DeviceHealthProfile(object):
-    """This class provide interfaces to access device health profile
-    that cached on profile host(usually labstation).
-    """
-
-    def __init__(self, hostname, host_info=None, result_dir=None):
-        """Initialize the class.
-
-        @param hostname:    The device hostaname or identification.
-        @param host_info:   A HostInfo object of the device of the profile.
-        @param result_dir:  A result directory where we can keep local copy of
-                            device profile.
-        """
-        self._hostname = hostname
-        # Cache host-info data
-        self._device_board = host_info.board if host_info else ''
-        self._device_model = host_info.model if host_info else ''
-        # the profile is located on servo-host as temporally location.
-        # The servo-host will be provided later
-        self._profile_host = None
-        self._health_profile = None
-
-        # Construct remote and local file path.
-        profile_filename = self._hostname + '.profile'
-        self._remote_path = os.path.join(PROFILE_FILE_DIR, profile_filename)
-        result_dir = result_dir or '/tmp'
-        self._local_path = os.path.join(result_dir, profile_filename)
-
-    def init_profile(self, profile_host):
-        """Initialize device health profile data.
-
-        If the cached file exists on profile host the method will download
-        file to a local path and read data, otherwise create a profile data
-        from template.
-
-        @param profile_host: An ServoHost object, where is the location
-                             we store device health for device.
-        """
-        if not profile_host:
-            raise DeviceHealthProfileError('The profile host is not provided.')
-        self._profile_host = profile_host
-        # Do a lightweighted check to make sure the machine is up
-        # (by ping), as we don't waste time on unreachable DUT.
-        if not self._profile_host.check_cached_up_status():
-            raise DeviceHealthProfileError(
-                'The profile host %s is not reachable via ping.'
-                % self._profile_host.hostname)
-
-        # We also want try to check if the DUT is available for ssh.
-        if not self._profile_host.is_up():
-            raise DeviceHealthProfileError(
-                'The profile host %s is pingable but not sshable.'
-                % self._profile_host.hostname)
-
-        if not self._sync_existing_profile():
-            self._create_profile_from_template()
-
-    def is_loaded(self):
-        """Check if device profile was loaded on not."""
-        return self._health_profile is not None
-
-    def _sync_existing_profile(self):
-        """Sync health profile from remote profile host(servohost) and
-        validate profile data is not corrupted or outdated.
-
-        @returns True if sync and validate succeed otherwise False.
-        """
-        if not self._profile_host.is_file_exists(self._remote_path):
-            logging.debug('%s not exists on %s.', self._remote_path,
-                          self._profile_host.hostname)
-            return False
-        self._download_profile()
-        self._read_profile()
-        return self._validate_profile_data(self._health_profile)
-
-    def _download_profile(self):
-        """Copy profile file from remote profile host to local path.
-        """
-        logging.debug('Downloading profile file from %s:%s to local path: %s',
-                      self._profile_host.hostname,
-                      self._remote_path,
-                      self._local_path)
-        self._profile_host.get_file(source=self._remote_path,
-                                    dest=self._local_path)
-
-    def _upload_profile(self):
-        """Copy profile file from local path to remote profile host.
-        """
-        # Make sure the device health profile directory exists on profile host.
-        self._profile_host.run('mkdir -p %s' % PROFILE_FILE_DIR,
-                               ignore_status=True)
-
-        logging.debug('Uploading profile from local path: %s to remote %s:%s',
-                      self._local_path,
-                      self._profile_host.hostname,
-                      self._remote_path)
-        self._profile_host.send_file(source=self._local_path,
-                                     dest=self._remote_path)
-
-    def _read_profile(self):
-        """Read profile data from local path and convert it into json format.
-        """
-        logging.debug('Reading device health profile from: %s',
-                      self._local_path)
-        with open(self._local_path, 'r') as f:
-            try:
-                self._health_profile = json.load(f)
-            except Exception as e:
-                logging.warning('Could not decode %s to json format, the file'
-                                ' may be corrupted; %s',
-                                self._local_path, str(e))
-
-    def _dump_profile(self):
-        """Dump profile data into local file.
-        """
-        logging.debug('Dumping device health profile to: %s', self._local_path)
-        with open(self._local_path, 'w') as f:
-            json.dump(self._health_profile, f)
-
-    def _create_profile_from_template(self):
-        """Create a new health profile dict from template.
-        """
-        logging.info('Creating new health profile from template for %s.',
-                     self._hostname)
-        self._health_profile = copy.deepcopy(DEVICE_HEALTH_PROFILE_TEMPLATE)
-        if self._device_board or self._device_model:
-            self._set_board(self._device_board)
-            self._set_model(self._device_model)
-        self.refresh_update_time()
-
-    def _validate_profile_data(self, data):
-        """Validate the given profile data is in good state.
-        """
-        logging.debug('Validating health profile data.')
-        if not isinstance(data, dict):
-            logging.debug('Non-dict type detected, the profile data'
-                          ' may be corrupted.')
-            return False
-
-        # Validate that cached health profile version is not outdated.
-        input_version = data.get(PROFILE_VERSION_KEY)
-        if input_version != PROFILE_VERSION:
-            logging.info('The input profile version: %s is outdated,'
-                         ' expected version: %s', input_version,
-                         PROFILE_VERSION)
-            return False
-
-        # Validate that cached board/model is match with device, in case
-        # there is was decom/redeploy.
-        cached_board = data.get(BOARD_KEY)
-        cached_model = data.get(MODEL_KEY)
-        if (self._device_board and cached_board
-                    and (self._device_board != cached_board)):
-            logging.info(
-                    'The board: %s from host_info does not match board: %s'
-                    ' from cached profile, the device hardware probably has'
-                    ' been changed.', self._device_board, cached_board)
-            return False
-        if (self._device_model and cached_model
-                    and (self._device_model != cached_model)):
-            logging.info(
-                    'The model: %s from host_info does not match model: %s'
-                    ' from cached profile, the device hardware probably has'
-                    ' been changed.', self._device_model, cached_model)
-            return False
-        return True
-
-    def _is_validate_profile_key(self, key):
-        return key in DEVICE_HEALTH_PROFILE_TEMPLATE
-
-    def _update_profile(self, key, value):
-        if not self._is_validate_profile_key(key):
-            logging.info('%s is an invalid health profile key.', key)
-            return
-        logging.debug('Updating health profile key %s to %s', key, value)
-        self._health_profile[key] = value
-
-    def _get_value(self, key):
-        """The basic interface to get a value from health profile dictionary.
-
-        @raises InvalidDeviceHealthProfileKeyError if the input key is
-                not a valid device health profile key.
-        """
-        if not self._is_validate_profile_key(key):
-            raise InvalidDeviceHealthProfileKeyError(
-                '%s is not a valid device health profile key' % key)
-        return self._health_profile.get(key)
-
-    def _set_board(self, board):
-        # pylint: disable=missing-docstring
-        self._update_profile(BOARD_KEY, board)
-
-    def _set_model(self, model):
-        # pylint: disable=missing-docstring
-        self._update_profile(MODEL_KEY, model)
-
-    @property
-    def health_profile(self):
-        # pylint: disable=missing-docstring
-        return self._health_profile
-
-    def get_board(self):
-        """Get device board from cached device health profile.
-        """
-        return self._get_value(BOARD_KEY)
-
-    def get_model(self):
-        """Get device model from cached device health profile.
-        """
-        return self._get_value(MODEL_KEY)
-
-    def get_profile_version(self):
-        """Get the version of cached device health profile.
-        """
-        return self._get_value(PROFILE_VERSION_KEY)
-
-    def get_dut_state(self):
-        """Get most recent dut state from device health profile.
-        """
-        return self._get_value(DUT_STATE_KEY)
-
-    def get_servo_state(self):
-        """Get most recent servo state from device health profile.
-        """
-        return self._get_value(SERVO_STATE_KEY)
-
-    def get_cros_stable_version(self):
-        """Get the most recent used cros image during repair.
-        """
-        return self._get_value(CROS_STABLE_VERSION_KEY)
-
-    def get_firmware_stable_version(self):
-        """Get the most recent used firmware image during repair, we only
-        expect to see this on non-faft pool device.
-        """
-        return self._get_value(FIRMWARE_STABLE_VERSION_KEY)
-
-    def get_last_update_time(self):
-        """Get the timestamp of when device health profile file received
-        the most recent updates. Example "2020-01-01 15:05:05"
-        """
-        return self._get_value(LAST_UPDATE_TIME_KEY)
-
-    def get_last_update_time_epoch(self):
-        """Get the unix time in int of when device health profile file
-        received the most recent updates.
-        """
-        return int(time.mktime(time.strptime(
-            self.get_last_update_time(), TIME_PATTERN)))
-
-    def get_enter_current_state_time(self):
-        """Get the timestamp of when DUT enter current state.
-        Example "2020-01-01 15:05:05"
-        """
-        return self._get_value(TIME_ENTER_CURRENT_STATE_KEY)
-
-    def get_enter_current_state_time_epoch(self):
-        """Get the unix time in int of when DUT enter current state.
-        """
-        return int(time.mktime(time.strptime(
-            self.get_enter_current_state_time(), TIME_PATTERN)))
-
-    def get_repair_fail_count(self):
-        """Get repair fail count since enter current state.
-        """
-        return self._get_value(REPAIR_FAIL_COUNT_KEY)
-
-    def get_provision_fail_count(self):
-        """Get provision fail count since enter current state.
-        """
-        return self._get_value(PROVISION_FAIL_COUNT_KEY)
-
-    def get_failed_verifiers(self):
-        """Get all failed verifiers.
-
-        @returns a dict represents all failed verifiers and
-                 their fail count.
-        """
-        return self._get_value(FAILED_VERIFIERS_KEY)
-
-    def get_failed_verifier(self, tag):
-        """Get fail count of a specific verifier.
-
-        @param tag: the short identifier of the verifier.
-
-        @returns the fail count of the specified verifier.
-        """
-        return self.get_failed_verifiers().get(tag, 0)
-
-    def get_succeed_repair_actions(self):
-        """Get all repair actions that has been applied and succeed.
-
-        @returns a dict represents all succeed repair actions
-                 and their success count.
-        """
-        return self._get_value(SUCCEED_REPAIR_ACTIONS_KEY)
-
-    def get_succeed_repair_action(self, tag):
-        """Get success count of a specific repair action.
-
-        @param tag: the short identifier of the repair action.
-
-        @returns the success count of the specified repair action.
-        """
-        return self.get_succeed_repair_actions().get(tag, 0)
-
-    def get_failed_repair_actions(self):
-        """Get all repair actions that has been applied and failed.
-
-        @returns a dict represents all failed repair actions
-                 and their fail count.
-        """
-        return self._get_value(FAILED_REPAIR_ACTIONS_KEY)
-
-    def get_failed_repair_action(self, tag):
-        """Get fail count of a specific repair action.
-
-        @param tag: the short identifier of the repair action.
-
-        @returns the failed count of the specified repair action.
-        """
-        return self.get_failed_repair_actions().get(tag, 0)
-
-    def get_badblocks_ro_run_time(self):
-        """Get the timestamp of when run last read-only badblocks check
-        on the device. Example "2020-01-01 15:05:05"
-        """
-        last_time = self._get_value(LAST_BADBLOCKS_RO_RUN_TIME_KEY)
-        return last_time or DEFAULT_TIMESTAMP
-
-    def get_badblocks_ro_run_time_epoch(self):
-        """Get the unix time of when run last read-only badblocks check
-        on the device."
-        """
-        last_time = self.get_badblocks_ro_run_time()
-        return int(time.mktime(time.strptime(last_time, TIME_PATTERN)))
-
-    def get_badblocks_rw_run_time(self):
-        """Get the timestamp of when run last read-write badblocks check
-        on the device. Example "2020-01-01 15:05:05"
-        """
-        last_time = self._get_value(LAST_BADBLOCKS_RW_RUN_TIME_KEY)
-        return last_time or DEFAULT_TIMESTAMP
-
-    def get_badblocks_rw_run_time_epoch(self):
-        """Get the unix time of when run last read-write badblocks check
-        on the device."
-        """
-        last_time = self.get_badblocks_rw_run_time()
-        return int(time.mktime(time.strptime(last_time, TIME_PATTERN)))
-
-    def get_servo_micro_fw_update_time(self):
-        """Get the timestamp of when run last fw update for servo_micro.
-        Example "2020-01-01 15:05:05"
-        """
-        last_time = self._get_value(LAST_SERVO_MICRO_FW_UPDATE_RUN_TIME_KEY)
-        return last_time or DEFAULT_TIMESTAMP
-
-    def get_servo_micro_fw_update_time_epoch(self):
-        """Get the unix time of when run last fw update for servo_micro.
-        """
-        last_time = self.get_servo_micro_fw_update_time()
-        return int(time.mktime(time.strptime(last_time, TIME_PATTERN)))
-
-    def set_cros_stable_version(self, build):
-        """Set the most recent used cros image during repair.
-        """
-        self._update_profile(CROS_STABLE_VERSION_KEY, build)
-
-    def set_firmware_stable_version(self, build):
-        """Set the most recent used firmware image during repair, we only
-        expect to see this on non-faft pool device.
-        """
-        self._update_profile(FIRMWARE_STABLE_VERSION_KEY, build)
-
-    def refresh_badblocks_ro_run_time(self):
-        """Get the timestamp of when run last read-only badblocks check
-        on the device.
-        """
-        return self._update_profile(
-                LAST_BADBLOCKS_RO_RUN_TIME_KEY,
-                time.strftime(TIME_PATTERN, time.localtime()))
-
-    def refresh_badblocks_rw_run_time(self):
-        """Get the timestamp of when run last read-write badblocks check
-        on the device.
-        """
-        return self._update_profile(
-                LAST_BADBLOCKS_RW_RUN_TIME_KEY,
-                time.strftime(TIME_PATTERN, time.localtime()))
-
-    def refresh_servo_miro_fw_update_run_time(self):
-        """Get the timestamp of when run last fw update for servo_micro.
-        """
-        return self._update_profile(
-                LAST_SERVO_MICRO_FW_UPDATE_RUN_TIME_KEY,
-                time.strftime(TIME_PATTERN, time.localtime()))
-
-    def refresh_update_time(self):
-        """Update last_update_time to current timestamp in UTC.
-        """
-        self._update_profile(LAST_UPDATE_TIME_KEY,
-                             time.strftime(TIME_PATTERN, time.localtime()))
-
-    def increase_repair_fail_count(self):
-        # pylint: disable=missing-docstring
-        self._update_profile(REPAIR_FAIL_COUNT_KEY,
-                             self.get_repair_fail_count() + 1)
-
-    def increase_provision_fail_count(self):
-        # pylint: disable=missing-docstring
-        self._update_profile(PROVISION_FAIL_COUNT_KEY,
-                             self.get_provision_fail_count() + 1)
-
-    def insert_failed_verifier(self, tag):
-        """Increase fail count for a specific verifier by 1.
-        """
-        verifiers = self.get_failed_verifiers()
-        if tag not in verifiers:
-            verifiers[tag] = 0
-        verifiers[tag] += 1
-        self._update_profile(FAILED_VERIFIERS_KEY, verifiers)
-
-    def insert_succeed_repair_action(self, tag):
-        """Increase succeed count for a specific repair action by 1.
-        """
-        actions = self.get_succeed_repair_actions()
-        if tag not in actions:
-            actions[tag] = 0
-        actions[tag] += 1
-        self._update_profile(SUCCEED_REPAIR_ACTIONS_KEY, actions)
-
-    def insert_failed_repair_action(self, tag):
-        """Increase fail count for a specific repair action by 1.
-        """
-        actions = self.get_failed_repair_actions()
-        if tag not in actions:
-            actions[tag] = 0
-        actions[tag] += 1
-        self._update_profile(FAILED_REPAIR_ACTIONS_KEY, actions)
-
-    def update_dut_state(self, state, reset_counters=False):
-        """Update state of the device, this will also reset all fail counts.
-
-        @param state: the new dut state to update.
-        @param reset_counts: a boolean to indicate whether we want to reset
-                             all counters.
-        """
-        if state == self.get_dut_state():
-            logging.debug('The host is already in %s state.', state)
-            if state == DUT_STATE_REPAIR_FAILED:
-                self.increase_repair_fail_count()
-            return
-        # Reset some records when dut state changes.
-        if reset_counters:
-            self._update_profile(REPAIR_FAIL_COUNT_KEY, 0)
-            self._update_profile(PROVISION_FAIL_COUNT_KEY, 0)
-            self._update_profile(FAILED_VERIFIERS_KEY, {})
-            self._update_profile(SUCCEED_REPAIR_ACTIONS_KEY, {})
-            self._update_profile(FAILED_REPAIR_ACTIONS_KEY, {})
-        self._update_profile(TIME_ENTER_CURRENT_STATE_KEY,
-                             time.strftime(TIME_PATTERN, time.localtime()))
-        self._update_profile(DUT_STATE_KEY, state)
-
-    def update_servo_state(self, state):
-        # pylint: disable=missing-docstring
-        if state == self.get_servo_state():
-            logging.debug('The servo is already in %s state.', state)
-            return
-        self._update_profile(SERVO_STATE_KEY, state)
-
-    def close(self):
-        # pylint: disable=missing-docstring
-        self.refresh_update_time()
-        self._dump_profile()
-        self._upload_profile()
diff --git a/server/cros/device_health_profile/device_health_profile_unittest.py b/server/cros/device_health_profile/device_health_profile_unittest.py
deleted file mode 100644
index 77176a5..0000000
--- a/server/cros/device_health_profile/device_health_profile_unittest.py
+++ /dev/null
@@ -1,230 +0,0 @@
-#!/usr/bin/python3
-# pylint: disable=missing-docstring
-
-import time
-import unittest
-
-import common
-from autotest_lib.server.cros.device_health_profile import device_health_profile
-from autotest_lib.server.cros.device_health_profile import profile_constants
-
-
-class MockHostInfoStore(object):
-    def __init__(self):
-        self.board = 'mock_board'
-        self.model = 'mock_model'
-
-
-class MockHost(object):
-    def __init__(self, hostname):
-        self.hostname = hostname
-
-    def check_cached_up_status(self):
-        return True
-
-    def is_up(self):
-        return True
-
-    def send_file(self, source, dest):
-        return True
-
-    def get_file(self, source, dest):
-        return True
-
-    def is_file_exists(self, file_path):
-        return False
-
-
-def create_device_health_profile():
-    servohost = MockHost('placeholder_servohost_hostname')
-    host_info = MockHostInfoStore()
-    dhp = device_health_profile.DeviceHealthProfile(
-            hostname='placeholder_dut_hostname',
-            host_info=host_info,
-            result_dir=None)
-    dhp.init_profile(servohost)
-    return dhp
-
-
-class DeviceHealthProfileTestCase(unittest.TestCase):
-    dhp = create_device_health_profile()
-
-    def test_shows_not_loaded_till_profile_host_provided(self):
-        host_info = MockHostInfoStore()
-        dhp = device_health_profile.DeviceHealthProfile(
-                hostname='placeholder_dut_hostname',
-                host_info=host_info,
-                result_dir=None)
-        self.assertFalse(dhp.is_loaded())
-
-    def test_set_loaded_when_provide_profile_host_provided(self):
-        dhp = create_device_health_profile()
-        self.assertTrue(dhp.is_loaded())
-
-    def test_validate_device_health_profile_data(self):
-        profile_data = self.dhp.health_profile
-        self.assertEqual(self.dhp._validate_profile_data(profile_data), True)
-
-    def test_get_board(self):
-        self.assertEqual(self.dhp.get_board(), 'mock_board')
-
-    def test_get_model(self):
-        self.assertEqual(self.dhp.get_model(), 'mock_model')
-
-    def test_get_profile_version(self):
-        self.assertEqual(self.dhp.get_profile_version(),
-                         profile_constants.PROFILE_VERSION)
-
-    def test_dut_state(self):
-        self.assertEqual(self.dhp.get_dut_state(),
-                         profile_constants.DEFAULT_STRING_VALUE)
-        self.dhp.update_dut_state('test_state_1')
-        self.assertEqual(self.dhp.get_dut_state(), 'test_state_1')
-
-    def test_servo_state(self):
-        self.assertEqual(self.dhp.get_servo_state(),
-                         profile_constants.DEFAULT_STRING_VALUE)
-        self.dhp.update_servo_state('servod_issue')
-        self.assertEqual(self.dhp.get_servo_state(), 'servod_issue')
-
-    def test_cros_stable_version(self):
-        self.assertEqual(self.dhp.get_cros_stable_version(),
-                         profile_constants.DEFAULT_STRING_VALUE)
-        self.dhp.set_cros_stable_version('placeholder-release/R80-10000.0.0')
-        self.assertEqual(self.dhp.get_cros_stable_version(),
-                         'placeholder-release/R80-10000.0.0')
-
-    def test_firmware_stable_version(self):
-        self.assertEqual(self.dhp.get_firmware_stable_version(),
-                         profile_constants.DEFAULT_STRING_VALUE)
-        self.dhp.set_firmware_stable_version('placeholder_firmware_release')
-        self.assertEqual(self.dhp.get_firmware_stable_version(),
-                         'placeholder_firmware_release')
-
-    def test_last_update_time(self):
-        cached_time = self.dhp.get_last_update_time()
-        self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.refresh_update_time()
-        self.assertNotEqual(cached_time, self.dhp.get_last_update_time())
-
-    def test_last_update_time_epoch(self):
-        cached_time_epoch = self.dhp.get_last_update_time_epoch()
-        self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.refresh_update_time()
-        self.assertGreater(self.dhp.get_last_update_time_epoch(),
-                           cached_time_epoch)
-
-    def test_enter_current_state_time(self):
-        cached_time = self.dhp.get_enter_current_state_time()
-        self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.update_dut_state('test_state_2')
-        self.assertNotEqual(cached_time,
-                            self.dhp.get_enter_current_state_time())
-
-    def test_enter_current_state_time_epoch(self):
-        cached_time_epoch = self.dhp.get_enter_current_state_time_epoch()
-        self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.update_dut_state('test_state_3')
-        self.assertGreater(self.dhp.get_enter_current_state_time_epoch(),
-                           cached_time_epoch)
-
-    def test_repair_fail_count(self):
-        cached_count = self.dhp.get_repair_fail_count()
-        self.dhp.increase_repair_fail_count()
-        self.assertEqual(self.dhp.get_repair_fail_count(), cached_count + 1)
-
-    def test_provision_fail_count(self):
-        cached_count = self.dhp.get_provision_fail_count()
-        self.dhp.increase_provision_fail_count()
-        self.assertEqual(self.dhp.get_provision_fail_count(), cached_count + 1)
-
-    def test_failed_verifiers(self):
-        tag = 'placeholder_verifier'
-        self.assertEqual(self.dhp.get_failed_verifiers(), {})
-        self.assertEqual(self.dhp.get_failed_verifier(tag), 0)
-        self.dhp.insert_failed_verifier(tag)
-        self.assertEqual(self.dhp.get_failed_verifier(tag), 1)
-        self.assertEqual(self.dhp.get_failed_verifiers(),
-                         {'placeholder_verifier': 1})
-
-    def test_succeed_repair_action(self):
-        tag = 'placeholder_succeed_action'
-        self.assertEqual(self.dhp.get_succeed_repair_actions(), {})
-        self.assertEqual(self.dhp.get_succeed_repair_action(tag), 0)
-        self.dhp.insert_succeed_repair_action(tag)
-        self.assertEqual(self.dhp.get_succeed_repair_action(tag), 1)
-        self.assertEqual(self.dhp.get_succeed_repair_actions(),
-                         {'placeholder_succeed_action': 1})
-
-    def test_failed_repair_action(self):
-        tag = 'placeholder_failed_action'
-        self.assertEqual(self.dhp.get_failed_repair_actions(), {})
-        self.assertEqual(self.dhp.get_failed_repair_action(tag), 0)
-        self.dhp.insert_failed_repair_action(tag)
-        self.assertEqual(self.dhp.get_failed_repair_action(tag), 1)
-        self.assertEqual(self.dhp.get_failed_repair_actions(),
-                         {'placeholder_failed_action': 1})
-
-    def test_get_badblocks_ro_run_time(self):
-        cached_time = self.dhp.get_badblocks_ro_run_time()
-        self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.refresh_badblocks_ro_run_time()
-        self.assertNotEqual(cached_time, self.dhp.get_badblocks_ro_run_time())
-
-    def test_get_badblocks_ro_run_time_epoch(self):
-        cached_time_epoch = self.dhp.get_badblocks_ro_run_time_epoch()
-        self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.refresh_badblocks_ro_run_time()
-        self.assertGreater(self.dhp.get_badblocks_ro_run_time_epoch(),
-                           cached_time_epoch)
-
-    def test_get_badblocks_rw_run_time(self):
-        cached_time = self.dhp.get_badblocks_rw_run_time()
-        self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.refresh_badblocks_rw_run_time()
-        self.assertNotEqual(cached_time, self.dhp.get_badblocks_rw_run_time())
-
-    def test_get_badblocks_rw_run_time_epoch(self):
-        cached_time_epoch = self.dhp.get_badblocks_rw_run_time_epoch()
-        self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.refresh_badblocks_rw_run_time()
-        self.assertGreater(self.dhp.get_badblocks_rw_run_time_epoch(),
-                           cached_time_epoch)
-
-    def test_get_servo_micro_fw_update_time(self):
-        cached_time = self.dhp.get_servo_micro_fw_update_time()
-        self.assertRegexpMatches(cached_time, r'\d{4}[-/]\d{2}[-/]\d{2}')
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.refresh_servo_miro_fw_update_run_time()
-        self.assertNotEqual(cached_time,
-                            self.dhp.get_servo_micro_fw_update_time())
-
-    def test_get_servo_micro_fw_update_time_epoch(self):
-        cached_time_epoch = self.dhp.get_servo_micro_fw_update_time_epoch()
-        self.assertEqual(type(cached_time_epoch), int)
-        # Sleep 1 second so updated timestamp is different than current one.
-        time.sleep(1)
-        self.dhp.refresh_servo_miro_fw_update_run_time()
-        self.assertGreater(self.dhp.get_servo_micro_fw_update_time_epoch(),
-                           cached_time_epoch)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/cros/device_health_profile/profile_constants.py b/server/cros/device_health_profile/profile_constants.py
deleted file mode 100644
index 0984399..0000000
--- a/server/cros/device_health_profile/profile_constants.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-PROFILE_FILE_DIR = '/var/lib/device_health_profile/'
-
-# Constants that will be used as key name in device health profile.
-BOARD_KEY = 'board'
-MODEL_KEY = 'model'
-LAST_UPDATE_TIME_KEY = 'last_update_time'
-REPAIR_FAIL_COUNT_KEY = 'repair_fail_count'
-PROVISION_FAIL_COUNT_KEY = 'provision_fail_count'
-DUT_STATE_KEY = 'dut_state'
-SERVO_STATE_KEY = 'servo_state'
-FAILED_VERIFIERS_KEY = 'failed_verifiers'
-SUCCEED_REPAIR_ACTIONS_KEY = 'succeed_repair_actions'
-FAILED_REPAIR_ACTIONS_KEY = 'failed_repair_actions'
-TIME_ENTER_CURRENT_STATE_KEY = 'time_enter_current_state'
-PROFILE_VERSION_KEY = 'profile_verision'
-CROS_STABLE_VERSION_KEY = 'last_used_cros_stable_version'
-FIRMWARE_STABLE_VERSION_KEY = 'last_used_firmware_stable_version'
-FAFT_STABLE_VERSION_KEY = 'last_used_faft_stable_version'
-LAST_BADBLOCKS_RO_RUN_TIME_KEY = 'last_badblocks_ro_run_time'
-LAST_BADBLOCKS_RW_RUN_TIME_KEY = 'last_badblocks_rw_run_time'
-LAST_SERVO_MICRO_FW_UPDATE_RUN_TIME_KEY = 'last_servo_micro_flash_run_time'
-
-# Constant for values
-DEFAULT_STRING_VALUE = ''
-DEFAULT_TIMESTAMP = '1970-01-01 00:00:00'
-TIME_PATTERN = '%Y-%m-%d %H:%M:%S'
-DUT_STATE_READY = 'ready'
-DUT_STATE_REPAIR_FAILED = 'repair_failed'
-STATES_NEED_RESET_COUNTER = (DUT_STATE_READY, DUT_STATE_REPAIR_FAILED)
-
-# Please update profile version when you update any key name or template.
-PROFILE_VERSION = '0.0.1'
-
-DEVICE_HEALTH_PROFILE_TEMPLATE = {
-        BOARD_KEY: DEFAULT_STRING_VALUE,
-        MODEL_KEY: DEFAULT_STRING_VALUE,
-        LAST_UPDATE_TIME_KEY: DEFAULT_TIMESTAMP,
-        REPAIR_FAIL_COUNT_KEY: 0,
-        PROVISION_FAIL_COUNT_KEY: 0,
-        DUT_STATE_KEY: DEFAULT_STRING_VALUE,
-        SERVO_STATE_KEY: DEFAULT_STRING_VALUE,
-        FAILED_VERIFIERS_KEY: {},
-        SUCCEED_REPAIR_ACTIONS_KEY: {},
-        FAILED_REPAIR_ACTIONS_KEY: {},
-        TIME_ENTER_CURRENT_STATE_KEY: DEFAULT_TIMESTAMP,
-        PROFILE_VERSION_KEY: PROFILE_VERSION,
-        CROS_STABLE_VERSION_KEY: DEFAULT_STRING_VALUE,
-        FIRMWARE_STABLE_VERSION_KEY: DEFAULT_STRING_VALUE,
-        FAFT_STABLE_VERSION_KEY: DEFAULT_STRING_VALUE,
-        LAST_BADBLOCKS_RO_RUN_TIME_KEY: DEFAULT_TIMESTAMP,
-        LAST_BADBLOCKS_RW_RUN_TIME_KEY: DEFAULT_TIMESTAMP,
-        LAST_SERVO_MICRO_FW_UPDATE_RUN_TIME_KEY: DEFAULT_TIMESTAMP
-}
diff --git a/server/cros/dnsname_mangler.py b/server/cros/dnsname_mangler.py
deleted file mode 100644
index 9312611..0000000
--- a/server/cros/dnsname_mangler.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import socket
-
-from autotest_lib.client.common_lib import error
-
-# See server/cros/network/wifi_test_context_manager.py for commandline
-# flags to control IP addresses in WiFi tests.
-DEFAULT_FAILURE_MESSAGE = (
-        'Cannot infer DNS name of companion device from an IP address.')
-ATTENUATOR_FAILURE_MESSAGE = (
-        'Cannot infer DNS name of WiFi variable attenuator from a client IP '
-        'address.  Use --atten_addr=<ip or dns name>')
-ROUTER_FAILURE_MESSAGE = (
-        'Cannot infer DNS name of WiFi router from a client IP address.')
-PCAP_FAILURE_MESSAGE = (
-        'Cannot infer DNS name of Packet Capturer from a client IP address.')
-
-
-def is_ip_address(hostname):
-    """Infers whether |hostname| could be an IP address.
-
-    @param hostname: string DNS name or IP address.
-    @return True iff hostname is a valid IP address.
-
-    """
-    try:
-        socket.inet_aton(hostname)
-        return True
-    except socket.error:
-        return False
-
-
-def get_companion_device_addr(client_hostname,
-                              suffix,
-                              cmdline_override=None,
-                              not_dnsname_msg=DEFAULT_FAILURE_MESSAGE,
-                              allow_failure=False):
-    """Build a usable hostname for a test companion device from the client name.
-
-    Optionally, override the generated name with a commandline provided version.
-
-    @param client_hostname: string DNS name of device under test (the client).
-    @param suffix: string suffix to append to the client hostname.
-    @param cmdline_override: optional DNS name of companion device.  If this is
-            given, it overrides the generated client based hostname.
-    @param not_dnsname_msg: string message to include in the exception raised
-            if the client hostname is found to be an IP address rather than a
-            DNS name.
-    @param allow_failure: boolean True iff we should return None on failure to
-            infer a DNS name.
-    @return string DNS name of companion device or None if |allow_failure|
-            is True and no DNS name can be inferred.
-
-    """
-    if cmdline_override is not None:
-        return cmdline_override
-    if is_ip_address(client_hostname):
-        logging.error('%r looks like an IP address?', client_hostname)
-        if allow_failure:
-            return None
-        raise error.TestError(not_dnsname_msg)
-    parts = client_hostname.split('.', 1)
-    parts[0] = parts[0] + suffix
-    return '.'.join(parts)
-
-
-def get_router_addr(client_hostname, cmdline_override=None):
-    """Build a hostname for a WiFi router from the client hostname.
-
-    Optionally override that hostname with the provided command line hostname.
-
-    @param client_hostname: string DNS name of the client.
-    @param cmdline_override: string DNS name of the router provided
-            via commandline arguments.
-    @return usable DNS name for router host.
-
-    """
-    return get_companion_device_addr(
-            client_hostname,
-            '-router',
-            cmdline_override=cmdline_override,
-            not_dnsname_msg=ROUTER_FAILURE_MESSAGE)
-
-
-def get_pcap_addr(client_hostname,
-                  cmdline_override=None,
-                  allow_failure=False):
-    """Build a hostname for a packet capturer from the client hostname.
-
-    @param client_hostname: string DNS name of the client.
-    @param cmdline_override: string DNS name of the packet capturer provided
-            via commandline arguments.
-    @return usable DNS name for capturer host or None.
-
-    """
-    return get_companion_device_addr(
-            client_hostname,
-            '-pcap',
-            cmdline_override=cmdline_override,
-            not_dnsname_msg=PCAP_FAILURE_MESSAGE,
-            allow_failure=allow_failure)
-
-
-def get_attenuator_addr(client_hostname,
-                        cmdline_override=None,
-                        allow_failure=False):
-    """Build a hostname for a WiFi variable attenuator from the client hostname.
-
-    Optionally override that hostname with the provided command line hostname.
-
-    @param client_hostname: string DNS name of the client.
-    @param cmdline_override: string DNS name of the variable attenuator
-            controller provided via commandline arguments.
-    @param allow_failure: boolean True iff we should return None on failure to
-            infer a DNS name.
-    @return usable DNS name for attenuator controller.
-
-    """
-    return get_companion_device_addr(
-            client_hostname,
-            '-attenuator',
-            cmdline_override=cmdline_override,
-            not_dnsname_msg=ATTENUATOR_FAILURE_MESSAGE,
-            allow_failure=allow_failure)
diff --git a/server/cros/dnsname_mangler_unittest.py b/server/cros/dnsname_mangler_unittest.py
deleted file mode 100755
index d9d758a..0000000
--- a/server/cros/dnsname_mangler_unittest.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import unittest
-
-import common
-from autotest_lib.server.cros import dnsname_mangler
-
-HOST = 'chromeos1-row1-rack1-host1'
-ROUTER = 'chromeos1-row1-rack1-host1-router'
-ATTENUATOR = 'chromeos1-row1-rack1-host1-attenuator'
-
-HOST_FROM_OUTSIDE_LAB = HOST + '.cros'
-ROUTER_FROM_OUTSIDE_LAB = ROUTER + '.cros'
-ATTENUATOR_FROM_OUTSIDE_LAB = ATTENUATOR + '.cros'
-
-
-class DnsnameMangerUnittest(unittest.TestCase):
-    """Check that we're correctly mangling DNS names."""
-
-
-    def testRouterNamesCorrect(self):
-        """Router names should look like <dut_dns_name>-router[.cros]"""
-        self.assertEquals(ROUTER, dnsname_mangler.get_router_addr(HOST))
-        self.assertEquals(
-                ROUTER_FROM_OUTSIDE_LAB,
-                dnsname_mangler.get_router_addr(HOST_FROM_OUTSIDE_LAB))
-
-
-    def testAttenuatorNamesCorrect(self):
-        """Router names should look like <dut_dns_name>-attenuator[.cros]"""
-        self.assertEquals(ATTENUATOR, dnsname_mangler.get_attenuator_addr(HOST))
-        self.assertEquals(
-                ATTENUATOR_FROM_OUTSIDE_LAB,
-                dnsname_mangler.get_attenuator_addr(HOST_FROM_OUTSIDE_LAB))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/cros/factory_install_test.py b/server/cros/factory_install_test.py
deleted file mode 100644
index 2e6b967..0000000
--- a/server/cros/factory_install_test.py
+++ /dev/null
@@ -1,376 +0,0 @@
-# Lint as: python2, python3
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Factory install tests.
-
-FactoryInstallTest is an abstract superclass; factory_InstallVM and
-factory_InstallServo are two concrete implementations.
-
-Subclasses of FactoryInstallTest supports the following flags:
-
-    factory_install_image: (required) path to factory install shim
-    factory_test_image: (required) path to factory test image
-    test_image: (required) path to ChromeOS test image
-    miniomaha_port: port for miniomaha
-    debug_make_factory_package: whether to re-make the factory package before
-        running tests (defaults to true; may be set to false for debugging
-        only)
-"""
-
-import glob, logging, os, re, shutil, socket, sys, six.moves._thread, time, traceback
-from abc import abstractmethod
-from six import StringIO
-
-from autotest_lib.client.bin import utils as client_utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test, utils
-
-
-# How long to wait for the mini-Omaha server to come up.
-_MINIOMAHA_TIMEOUT_SEC = 50
-
-# Path to make_factory_package.sh within the source root.
-_MAKE_FACTORY_PACKAGE_PATH = \
-    "platform/factory-utils/factory_setup/make_factory_package.sh"
-
-# Path to miniomaha.py within the source root.
-_MINIOMAHA_PATH = "platform/factory-utils/factory_setup/miniomaha.py"
-
-# Sleep interval for nontrivial operations (like rsyncing).
-_POLL_SLEEP_INTERVAL_SEC = 2
-
-# The hwid_updater script (run in the factory install shim).  This is a format
-# string with a single argument (the name of the HWID cfg).
-_HWID_UPDATER_SH_TEMPLATE = """
-echo Running hwid_updater "$@" >&2
-set -ex
-MOUNT_DIR=$(mktemp -d --tmpdir)
-mount "$1" "$MOUNT_DIR"
-ls -l "$MOUNT_DIR"
-mkdir -p "$MOUNT_DIR/dev_image/share/chromeos-hwid"
-echo %s > "$MOUNT_DIR/dev_image/share/chromeos-hwid/cfg"
-umount "$MOUNT_DIR"
-"""
-
-
-class FactoryInstallTest(test.test):
-    """
-    Factory install VM tests.
-
-    See file-level docstring for details.
-    """
-
-    version = 1
-
-    # How long to wait for the factory tests to install.
-    FACTORY_INSTALL_TIMEOUT_SEC = 1800
-
-    # How long to wait for the factory test image to come up.
-    WAIT_UP_TIMEOUT_SEC = 30
-
-    # How long to wait for the factory tests to run.
-    FACTORY_TEST_TIMEOUT_SEC = 240
-
-    # How long to wait for the ChromeOS image to run.
-    FIRST_BOOT_TIMEOUT_SEC = 480
-
-    #
-    # Abstract functions that must be overridden by subclasses.
-    #
-
-    @abstractmethod
-    def get_hwid_cfg(self):
-        """
-        Returns the HWID cfg, used to select a test list.
-        """
-        pass
-
-    @abstractmethod
-    def run_factory_install(self, shim_image):
-        """
-        Performs the factory install and starts the factory tests.
-
-        When this returns, the DUT should be starting up (or have already
-        started up) in factory test mode.
-        """
-        pass
-
-    @abstractmethod
-    def get_dut_client(self):
-        """
-        Returns a client (subclass of CrosHost) to control the DUT.
-        """
-        pass
-
-    @abstractmethod
-    def reboot_for_wipe(self):
-        """
-        Reboots the machine after preparing to wipe the hard drive.
-        """
-        pass
-
-    #
-    # Utility methods that may be used by subclasses.
-    #
-
-    def src_root(self):
-        """
-        Returns the CrOS source root.
-        """
-        return os.path.join(os.environ["CROS_WORKON_SRCROOT"], "src")
-
-    def parse_boolean(self, val):
-        """
-        Parses a string as a Boolean value.
-        """
-        # Insist on True or False, because (e.g.) bool('false') == True.
-        if str(val) not in ["True", "False"]:
-            raise error.TestError("Not a boolean: '%s'" % val)
-        return str(val) == "True"
-
-    #
-    # Private utility methods.
-    #
-
-    def _modify_file(self, path, func):
-        """
-        Modifies a file as the root user.
-
-        @param path: The path to the file to modify.
-        @param func: A function that will be invoked with a single argument
-            (the current contents of the file, or None if the file does not
-            exist) and which should return the new contents.
-        """
-        if os.path.exists(path):
-            contents = utils.system_output("sudo cat %s" % path)
-        else:
-            contents = func(None)
-
-        utils.run("sudo dd of=%s" % path, stdin=func(contents))
-
-    def _mount_partition(self, image, index):
-        """
-        Mounts a partition of an image temporarily using loopback.
-
-        The partition will be automatically unmounted when the test exits.
-
-        @param image: The image to mount.
-        @param index: The partition number to mount.
-        @return: The mount point.
-        """
-        mount_point = os.path.join(self.tmpdir,
-                                   "%s_%d" % (image, index))
-        if not os.path.exists(mount_point):
-            os.makedirs(mount_point)
-        common_args = "cgpt show -i %d %s" % (index, image)
-        offset = int(utils.system_output(common_args + " -b")) * 512
-        size = int(utils.system_output(common_args + " -s")) * 512
-        utils.run("sudo mount -o rw,loop,offset=%d,sizelimit=%d %s %s" % (
-                offset, size, image, mount_point))
-        self.cleanup_tasks.append(lambda: self._umount_partition(mount_point))
-        return mount_point
-
-    def _umount_partition(self, mount_point):
-        """
-        Unmounts the mount at the given mount point.
-
-        Also deletes the mount point directory.  Does not raise an
-        exception if the mount point does not exist or the mount fails.
-        """
-        if os.path.exists(mount_point):
-            utils.run("sudo umount -d %s" % mount_point)
-            os.rmdir(mount_point)
-
-    def _make_factory_package(self, factory_test_image, test_image):
-        """
-        Makes the factory package.
-        """
-        # Create a pseudo-HWID-updater that merely sets the HWID to "vm" or
-        # "servo" so that the appropriate test list will run.  (This gets run by
-        # the factory install shim.)
-        hwid_updater = os.path.join(self.tmpdir, "hwid_updater.sh")
-        with open(hwid_updater, "w") as f:
-            f.write(_HWID_UPDATER_SH_TEMPLATE % self.get_hwid_cfg())
-
-        utils.run("%s --factory=%s --release=%s "
-                  "--firmware_updater=none --hwid_updater=%s " %
-                  (os.path.join(self.src_root(), _MAKE_FACTORY_PACKAGE_PATH),
-                   factory_test_image, test_image, hwid_updater))
-
-    def _start_miniomaha(self):
-        """
-        Starts a mini-Omaha server and drains its log output.
-        """
-        def is_miniomaha_up():
-            try:
-                utils.urlopen(
-                    "http://localhost:%d" % self.miniomaha_port).read()
-                return True
-            except:
-                return False
-
-        assert not is_miniomaha_up()
-
-        self.miniomaha_output = os.path.join(self.outputdir, "miniomaha.out")
-
-        # TODO(jsalz): Add cwd to BgJob rather than including the 'cd' in the
-        # command.
-        bg_job = utils.BgJob(
-            "cd %s; exec ./%s --port=%d --factory_config=miniomaha.conf"
-            % (os.path.join(self.src_root(),
-                            os.path.dirname(_MINIOMAHA_PATH)),
-               os.path.basename(_MINIOMAHA_PATH),
-               self.miniomaha_port), verbose=True,
-            stdout_tee=utils.TEE_TO_LOGS,
-            stderr_tee=open(self.miniomaha_output, "w"))
-        self.cleanup_tasks.append(lambda: utils.nuke_subprocess(bg_job.sp))
-        six.moves._thread.start_new_thread(utils.join_bg_jobs, ([bg_job],))
-
-        client_utils.poll_for_condition(is_miniomaha_up,
-                                        timeout=_MINIOMAHA_TIMEOUT_SEC,
-                                        desc="Miniomaha server")
-
-    def _prepare_factory_install_shim(self, factory_install_image):
-        # Make a copy of the factory install shim image (to use as hdb).
-        modified_image = os.path.join(self.tmpdir, "shim.bin")
-        logging.info("Creating factory install image: %s", modified_image)
-        shutil.copyfile(factory_install_image, modified_image)
-
-        # Mount partition 1 of the modified_image and set the mini-Omaha server.
-        mount = self._mount_partition(modified_image, 1)
-        self._modify_file(
-            os.path.join(mount, "dev_image/etc/lsb-factory"),
-            lambda contents: re.sub(
-                r"^(CHROMEOS_(AU|DEV)SERVER)=.+",
-                r"\1=http://%s:%d/update" % (
-                    socket.gethostname(), self.miniomaha_port),
-                contents,
-                re.MULTILINE))
-        self._umount_partition(mount)
-
-        return modified_image
-
-    def _run_factory_tests_and_prepare_wipe(self):
-        """
-        Runs the factory tests and prepares the machine for wiping.
-        """
-        dut_client = self.get_dut_client()
-        if not dut_client.wait_up(FactoryInstallTest.WAIT_UP_TIMEOUT_SEC):
-            raise error.TestFail("DUT never came up to run factory tests")
-
-        # Poll the factory log, and wait for the factory_Review test to become
-        # active.
-        local_factory_log = os.path.join(self.outputdir, "factory.log")
-        remote_factory_log = "/var/log/factory.log"
-
-        # Wait for factory.log file to exist
-        dut_client.run(
-            "while ! [ -e %s ]; do sleep 1; done" % remote_factory_log,
-            timeout=FactoryInstallTest.FACTORY_TEST_TIMEOUT_SEC)
-
-        status_map = {}
-
-        def wait_for_factory_logs():
-            dut_client.get_file(remote_factory_log, local_factory_log)
-            data = open(local_factory_log).read()
-            new_status_map = dict(
-                re.findall(r"status change for (\S+) : \S+ -> (\S+)", data))
-            if status_map != new_status_map:
-                logging.info("Test statuses: %s", status_map)
-                # Can't assign directly since it's in a context outside
-                # this function.
-                status_map.clear()
-                status_map.update(new_status_map)
-            return status_map.get("factory_Review.z") == "ACTIVE"
-
-        client_utils.poll_for_condition(
-            wait_for_factory_logs,
-            timeout=FactoryInstallTest.FACTORY_TEST_TIMEOUT_SEC,
-            sleep_interval=_POLL_SLEEP_INTERVAL_SEC,
-            desc="Factory logs")
-
-        # All other statuses should be "PASS".
-        expected_status_map = {
-            "memoryrunin": "PASS",
-            "factory_Review.z": "ACTIVE",
-            "factory_Start.e": "PASS",
-            "hardware_SAT.memoryrunin_s1": "PASS",
-        }
-        if status_map != expected_status_map:
-            raise error.TestFail("Expected statuses of %s but found %s" % (
-                    expected_status_map, status_map))
-
-        dut_client.run("cd /usr/local/factory/bin; "
-                       "./gooftool --prepare_wipe --verbose")
-
-    def _complete_install(self):
-        """
-        Completes the install, resulting in a full ChromeOS image.
-        """
-        # Restart the SSH client: with a new OS, some configuration
-        # properties (e.g., availability of rsync) may have changed.
-        dut_client = self.get_dut_client()
-
-        if not dut_client.wait_up(FactoryInstallTest.FIRST_BOOT_TIMEOUT_SEC):
-            raise error.TestFail("DUT never came up after install")
-
-        # Check lsb-release to make sure we have a real live ChromeOS image
-        # (it should be the test build).
-        lsb_release = os.path.join(self.tmpdir, "lsb-release")
-        dut_client.get_file("/etc/lsb-release", lsb_release)
-        expected_re = r"^CHROMEOS_RELEASE_DESCRIPTION=.*Test Build"
-        data = open(lsb_release).read()
-        assert re.search(
-            "^CHROMEOS_RELEASE_DESCRIPTION=.*Test Build", data, re.MULTILINE), (
-            "Didn't find expected regular expression %s in lsb-release: " % (
-                expected_re, data))
-        logging.info("Install succeeded!  lsb-release is:\n%s", data)
-
-        dut_client.halt()
-        if not dut_client.wait_down(
-            timeout=FactoryInstallTest.WAIT_UP_TIMEOUT_SEC):
-            raise error.TestFail("Client never went down after ChromeOS boot")
-
-    #
-    # Autotest methods.
-    #
-
-    def setup(self):
-        self.cleanup_tasks = []
-        self.ssh_tunnel_port = utils.get_unused_port()
-
-    def run_once(self, factory_install_image, factory_test_image, test_image,
-                 miniomaha_port=None, debug_make_factory_package=True,
-                 **args):
-        """
-        Runs the test once.
-
-        See the file-level comments for an explanation of the test arguments.
-
-        @param args: Must be empty (present as a check against misspelled
-            arguments on the command line)
-        """
-        assert not args, "Unexpected arguments %s" % args
-
-        self.miniomaha_port = (
-            int(miniomaha_port) if miniomaha_port else utils.get_unused_port())
-
-        if self.parse_boolean(debug_make_factory_package):
-            self._make_factory_package(factory_test_image, test_image)
-        self._start_miniomaha()
-        shim_image = self._prepare_factory_install_shim(factory_install_image)
-        self.run_factory_install(shim_image)
-        self._run_factory_tests_and_prepare_wipe()
-        self.reboot_for_wipe()
-        self._complete_install()
-
-    def cleanup(self):
-        for task in self.cleanup_tasks:
-            try:
-                task()
-            except:
-                logging.info("Exception in cleanup task:")
-                traceback.print_exc(file=sys.stdout)
diff --git a/server/cros/faft/__init__.py b/server/cros/faft/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/server/cros/faft/__init__.py
+++ /dev/null
diff --git a/server/cros/faft/utils/__init__.py b/server/cros/faft/utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/server/cros/faft/utils/__init__.py
+++ /dev/null
diff --git a/server/cros/faft/utils/common.py b/server/cros/faft/utils/common.py
deleted file mode 100644
index 7915157..0000000
--- a/server/cros/faft/utils/common.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-autotest_dir = os.path.abspath(
-        os.path.join(dirname, os.pardir, os.pardir, os.pardir, os.pardir))
-client_dir = os.path.join(autotest_dir, 'client')
-sys.path.insert(0, client_dir)
-import setup_modules
-sys.path.pop(0)
-setup_modules.setup(base_path=autotest_dir, root_module_name='autotest_lib')
diff --git a/server/cros/faft/utils/config.py b/server/cros/faft/utils/config.py
deleted file mode 100644
index f5cc67c..0000000
--- a/server/cros/faft/utils/config.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-
-import common
-from autotest_lib.client.common_lib import error
-
-
-# Path to the local checkout of the fw-testing-configs repo
-_CONFIG_DIR = os.path.abspath(os.path.join(
-        os.path.dirname(os.path.realpath(__file__)), os.pardir,
-        'fw-testing-configs'))
-_CONSOLIDATED_JSON_BASENAME = 'CONSOLIDATED.json'
-
-
-def _consolidated_json_fp():
-    """Get the absolute path to CONSOLIDATED.json."""
-    return os.path.join(_CONFIG_DIR, _CONSOLIDATED_JSON_BASENAME)
-
-
-class Config(object):
-    """Configuration for FAFT tests.
-
-    This object is meant to be the interface to all configuration required
-    by FAFT tests, including device specific overrides.
-
-    It gets the values from the JSON files in _CONFIG_DIR.
-    Default values are declared in the DEFAULTS.json.
-    Platform-specific overrides come from <platform>.json.
-    If the platform has model-specific overrides, then those take precedence
-    over the platform's config.
-    If the platform inherits overrides from a parent platform, then the child
-    platform's overrides take precedence over the parent's.
-
-    @ivar platform: string containing the board name being tested.
-    @ivar model: string containing the model name being tested
-    """
-
-    def __init__(self, platform, model=None):
-        """Initialize an object with FAFT settings.
-        Load JSON in order of importance (model, platform, parent/s, DEFAULTS).
-
-        @param platform: The name of the platform being tested.
-        """
-        self._precedence_list = []
-        self._precedence_names = []
-        with open(_consolidated_json_fp()) as f:
-            consolidated_json = json.load(f)
-        # Load the most specific JSON config possible by splitting `platform`
-        # at '_'/'-' and reversing ([::-1]). For example, veyron_minnie should
-        # load minnie.json. octopus_fleex should look for fleex.json. It
-        # doesn't exist, so instead it loads octopus.json.
-        platform = platform.lower().replace('-', '_')
-        for p in platform.rsplit('_')[::-1]:
-            logging.debug('Looking for %s config', p)
-            if p in consolidated_json:
-                logging.info('Found %s config', p)
-                self.platform = p
-                break
-        else:
-            self.platform = platform
-        if self.platform in consolidated_json:
-            platform_config = consolidated_json[self.platform]
-            seen_platforms = [self.platform]
-            self._add_cfg_to_precedence(self.platform, platform_config)
-            model_configs = platform_config.get('models', {})
-            model_config = model_configs.get(model, None)
-            if model_config is not None:
-                self._add_cfg_to_precedence(
-                        'MODEL:%s' % model, model_config, prepend=True)
-                logging.debug('Using model override for %s', model)
-            parent_platform = self._precedence_list[-1].get('parent', None)
-            while parent_platform is not None:
-                if parent_platform in seen_platforms:
-                    loop = ' -> '.join(seen_platforms + [parent_platform])
-                    raise error.TestError('fw-testing-configs for platform %s '
-                                          'contains an inheritance loop: %s' % (
-                                          self.platform, loop))
-                parent_config = consolidated_json[parent_platform]
-                seen_platforms.append(parent_platform)
-                self._add_cfg_to_precedence(parent_platform, parent_config)
-                parent_platform = self._precedence_list[-1].get('parent', None)
-        else:
-            logging.debug('Platform %s not found in %s. Using DEFAULTS.',
-                          self.platform, consolidated_json)
-        default_config = consolidated_json['DEFAULTS']
-        self._add_cfg_to_precedence('DEFAULTS', default_config)
-
-        # Set attributes
-        all_attributes = self._precedence_list[-1].keys()
-        self.attributes = {}
-        self.attributes['platform'] = self.platform
-        for attribute in all_attributes:
-            if attribute.endswith('.DOC') or attribute == 'models':
-                continue
-            for config_dict in self._precedence_list:
-                if attribute in config_dict:
-                    self.attributes[attribute] = config_dict[attribute]
-                    break
-
-    def _add_cfg_to_precedence(self, cfg_name, cfg, prepend=False):
-        """Add a configuration to self._precedence_list.
-
-        @ivar cfg_name: The name of the config.
-        @ivar cfg: The config dict.
-        @ivar prepend: If true, add to the beginning of self._precedence_list.
-                       Otherwise, add it to the end.
-        """
-        position = 0 if prepend else len(self._precedence_list)
-        self._precedence_list.insert(position, cfg)
-        self._precedence_names.insert(position, cfg_name)
-
-    def __getattr__(self, attr):
-        if attr in self.attributes:
-            return self.attributes[attr]
-        raise AttributeError('FAFT config has no attribute named %s' % attr)
-
-    def __str__(self):
-        str_list = []
-        str_list.append('----------[ FW Testing Config Variables ]----------')
-        str_list.append('--- Precedence list: %s ---' % self._precedence_names)
-        for attr in sorted(self.attributes):
-            str_list.append('  %s: %s' % (attr, self.attributes[attr]))
-        str_list.append('---------------------------------------------------')
-        return '\n'.join(str_list)
diff --git a/server/cros/faft/utils/config_unittest.py b/server/cros/faft/utils/config_unittest.py
deleted file mode 100755
index e50b838..0000000
--- a/server/cros/faft/utils/config_unittest.py
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import os
-import tempfile
-import unittest
-
-import common
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros.faft.utils import config
-
-
-class CanLoadDefaultTestCase(unittest.TestCase):
-    """Ensure that configs can load the default JSON"""
-
-    def runTest(self):
-        """Main test logic"""
-        # TODO, re-sync the faft config repo into tauto.
-        # platform = "foo"
-        # cfg = config.Config(platform)
-        # self.assertIsInstance(cfg.has_keyboard, bool)
-
-
-class _MockConfigTestCaseBaseClass(unittest.TestCase):
-    """
-    Base class which handles the setup/teardown of mock config files.
-
-    Sub-classes should declare a class attribute, mock_configs,
-    as a dict representing all platforms to be written as JSON files.
-    This class writes those JSON files during setUp() and deletes them
-    during tearDown().
-    During runTest(), sub-classes can create config.Config instances by name
-    and run assertions as normal.
-
-    """
-
-    mock_configs = None
-
-    def setUp(self):
-        """Set up a tempfile containing the test data"""
-        if self.mock_configs is None:
-            return
-
-        # Setup mock config._CONFIG_DIR, but remember the original.
-        self.mock_config_dir = tempfile.mkdtemp()
-        self.original_config_dir = config._CONFIG_DIR
-        config._CONFIG_DIR = self.mock_config_dir
-
-        # Write mock config file.
-        with open(config._consolidated_json_fp(), 'w') as f:
-            json.dump(self.mock_configs, f)
-
-    def tearDown(self):
-        """After tests are complete, delete the tempfile"""
-        if self.mock_configs is None:
-            return
-        os.remove(config._consolidated_json_fp())
-        os.rmdir(self.mock_config_dir)
-        config._CONFIG_DIR = self.original_config_dir
-
-
-class InheritanceTestCase(_MockConfigTestCaseBaseClass):
-    """Ensure that platforms inherit attributes correctly"""
-
-    mock_configs = {
-            'DEFAULTS': {
-                    'no_override': 'default',
-                    'parent_override': 'default',
-                    'child_override': 'default',
-                    'both_override': 'default',
-                    'parent': None
-            },
-            'childboard': {
-                    'child_override': 'child',
-                    'both_override': 'child',
-                    'parent': 'parentboard'
-            },
-            'parentboard': {
-                    'parent_override': 'parent',
-                    'both_override': 'parent'
-            }
-    }
-
-    def runTest(self):
-        """
-        Verify that the following situations resolve correctly:
-            A platform that inherit some overridess from another platform
-            A platform that does not inherit from another platform
-            A platform not found in the config file
-        """
-        child_config = config.Config('childboard')
-        #print(child_config)
-        self.assertEqual(child_config.no_override, 'default')
-        self.assertEqual(child_config.parent_override, 'parent')
-        self.assertEqual(child_config.child_override, 'child')
-        self.assertEqual(child_config.both_override, 'child')
-        with self.assertRaises(AttributeError):
-            child_config.foo  # pylint: disable=pointless-statement
-
-        parent_config = config.Config('parentboard')
-        self.assertEqual(parent_config.no_override, 'default')
-        self.assertEqual(parent_config.parent_override, 'parent')
-        self.assertEqual(parent_config.child_override, 'default')
-        self.assertEqual(parent_config.both_override, 'parent')
-
-        foo_config = config.Config('foo')
-        self.assertEqual(foo_config.no_override, 'default')
-        self.assertEqual(foo_config.parent_override, 'default')
-        self.assertEqual(foo_config.child_override, 'default')
-        self.assertEqual(foo_config.both_override, 'default')
-
-        # While we're here, verify that str(config) doesn't break
-        str(child_config)  # pylint: disable=pointless-statement
-
-
-class ModelOverrideTestCase(_MockConfigTestCaseBaseClass):
-    """Verify that models of boards inherit overrides with proper precedence"""
-    mock_configs = {
-            'parentboard': {
-                    'attr1': 'parent_attr1',
-                    'attr2': 'parent_attr2',
-                    'models': {
-                            'modelA': {
-                                    'attr1': 'parent_modelA_attr1'
-                            }
-                    }
-            },
-            'childboard': {
-                    'parent': 'parentboard',
-                    'attr1': 'child_attr1',
-                    'models': {
-                            'modelA': {
-                                    'attr1': 'child_modelA_attr1'
-                            }
-                    }
-            },
-            'DEFAULTS': {
-                    'models': None,
-                    'attr1': 'default',
-                    'attr2': 'default'
-            }
-    }
-
-    def runTest(self):
-        """Run assertions on test data"""
-        child_config = config.Config('childboard')
-        child_modelA_config = config.Config('childboard', 'modelA')
-        child_modelB_config = config.Config('childboard', 'modelB')
-        parent_config = config.Config('parentboard')
-        parent_modelA_config = config.Config('parentboard', 'modelA')
-        parent_modelB_config = config.Config('parentboard', 'modelB')
-
-        self.assertEqual(child_config.attr1, 'child_attr1')
-        self.assertEqual(child_config.attr2, 'parent_attr2')
-        self.assertEqual(child_modelA_config.attr1, 'child_modelA_attr1')
-        self.assertEqual(child_modelA_config.attr2, 'parent_attr2')
-        self.assertEqual(child_modelB_config.attr1, 'child_attr1')
-        self.assertEqual(child_modelB_config.attr2, 'parent_attr2')
-        self.assertEqual(parent_config.attr1, 'parent_attr1')
-        self.assertEqual(parent_config.attr2, 'parent_attr2')
-        self.assertEqual(parent_modelA_config.attr1, 'parent_modelA_attr1')
-        self.assertEqual(parent_modelA_config.attr2, 'parent_attr2')
-        self.assertEqual(parent_modelB_config.attr1, 'parent_attr1')
-        self.assertEqual(parent_modelB_config.attr2, 'parent_attr2')
-
-
-class DirectSelfInheritanceTestCase(_MockConfigTestCaseBaseClass):
-    """Ensure that a config which inherits from itself raises an error."""
-
-    mock_configs = {
-        'selfloop': {
-            'parent': 'selfloop',
-        },
-    }
-
-    def runTest(self):
-        """Run assertions on test data."""
-        with self.assertRaises(error.TestError):
-            config.Config('selfloop')
-
-
-class IndirectSelfInheritanceTestCase(_MockConfigTestCaseBaseClass):
-    """Ensure that configs which inherit from each other raise an error."""
-
-    mock_configs = {
-        'indirectloop1': {
-            'parent': 'indirectloop2',
-        },
-        'indirectloop2': {
-            'parent': 'indirectloop1',
-        },
-        'indirectloop3': {
-            'parent': 'indirectloop1',
-        },
-    }
-
-    def runTest(self):
-        """Run assertions on test data."""
-        with self.assertRaises(error.TestError):
-            config.Config('indirectloop1')
-        with self.assertRaises(error.TestError):
-            config.Config('indirectloop3')
-
-
-class FindMostSpecificConfigTestCase(_MockConfigTestCaseBaseClass):
-    """Ensure that configs named like $BOARD-kernelnext load $BOARD.json."""
-
-    mock_configs = {
-            'DEFAULTS': {},
-            'samus': {},
-            'veyron': {},
-            'minnie': {'parent': 'veyron'},
-    }
-
-    def runTest(self):
-        cfg = config.Config('samus-kernelnext')
-        self.assertEqual(config.Config('samus-kernelnext').platform, 'samus')
-        self.assertEqual(config.Config('samus-arc-r').platform, 'samus')
-        self.assertEqual(config.Config('veyron_minnie').platform, 'minnie')
-        self.assertEqual(config.Config('veyron_monroe').platform, 'veyron')
-        self.assertEqual(config.Config('veyron_minnie-arc-r').platform, 'minnie')
-        self.assertEqual(config.Config('veyron_monroe-arc-r').platform, 'veyron')
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/cros/faft/utils/faft_checkers.py b/server/cros/faft/utils/faft_checkers.py
deleted file mode 100644
index 027483d..0000000
--- a/server/cros/faft/utils/faft_checkers.py
+++ /dev/null
@@ -1,218 +0,0 @@
-# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import re
-import logging
-
-from autotest_lib.client.common_lib import error
-
-
-class FAFTCheckers(object):
-    """Class that contains FAFT checkers."""
-    version = 1
-
-    def __init__(self, faft_framework):
-        self.faft_framework = faft_framework
-        self.faft_client = faft_framework.faft_client
-        self.faft_config = faft_framework.faft_config
-        self.fw_vboot2 = self.faft_client.system.get_fw_vboot2()
-
-    def _parse_crossystem_output(self, lines):
-        """Parse the crossystem output into a dict.
-
-        @param lines: The list of crossystem output strings.
-        @return: A dict which contains the crossystem keys/values.
-        @raise TestError: If wrong format in crossystem output.
-
-        >>> seq = FAFTSequence()
-        >>> seq._parse_crossystem_output([ \
-                "arch          = x86    # Platform architecture", \
-                "cros_debug    = 1      # OS should allow debug", \
-            ])
-        {'cros_debug': '1', 'arch': 'x86'}
-        >>> seq._parse_crossystem_output([ \
-                "arch=x86", \
-            ])
-        Traceback (most recent call last):
-            ...
-        TestError: Failed to parse crossystem output: arch=x86
-        >>> seq._parse_crossystem_output([ \
-                "arch          = x86    # Platform architecture", \
-                "arch          = arm    # Platform architecture", \
-            ])
-        Traceback (most recent call last):
-            ...
-        TestError: Duplicated crossystem key: arch
-        """
-        pattern = "^([^ =]*) *= *(.*[^ ]) *# [^#]*$"
-        parsed_list = {}
-        for line in lines:
-            matched = re.match(pattern, line.strip())
-            if not matched:
-                raise error.TestError("Failed to parse crossystem output: %s"
-                                      % line)
-            (name, value) = (matched.group(1), matched.group(2))
-            if name in parsed_list:
-                raise error.TestError("Duplicated crossystem key: %s" % name)
-            parsed_list[name] = value
-        return parsed_list
-
-    def crossystem_checker(self, expected_dict, suppress_logging=False):
-        """Check the crossystem values matched.
-
-        Given an expect_dict which describes the expected crossystem values,
-        this function check the current crossystem values are matched or not.
-
-        @param expected_dict: A dict which contains the expected values.
-        @param suppress_logging: True to suppress any logging messages.
-        @return: True if the crossystem value matched; otherwise, False.
-        """
-        succeed = True
-        lines = self.faft_client.system.run_shell_command_get_output(
-                'crossystem')
-        got_dict = self._parse_crossystem_output(lines)
-        for key in expected_dict:
-            if key not in got_dict:
-                logging.warn('Expected key %r not in crossystem result', key)
-                succeed = False
-                continue
-            if isinstance(expected_dict[key], str):
-                if got_dict[key] != expected_dict[key]:
-                    message = ('Expected %r value %r but got %r' % (
-                               key, expected_dict[key], got_dict[key]))
-                    succeed = False
-                else:
-                    message = ('Expected %r value %r == real value %r' % (
-                               key, expected_dict[key], got_dict[key]))
-
-            elif isinstance(expected_dict[key], tuple):
-                # Expected value is a tuple of possible actual values.
-                if got_dict[key] not in expected_dict[key]:
-                    message = ('Expected %r values %r but got %r' % (
-                               key, expected_dict[key], got_dict[key]))
-                    succeed = False
-                else:
-                    message = ('Expected %r values %r == real value %r' % (
-                               key, expected_dict[key], got_dict[key]))
-            else:
-                logging.warn('The expected value of %r is neither a str nor a '
-                             'dict: %r', key, expected_dict[key])
-                succeed = False
-                continue
-            if not suppress_logging:
-                logging.info(message)
-        return succeed
-
-    def mode_checker(self, mode):
-        """Check whether the DUT is in the given firmware boot mode.
-
-        @param mode: A string of the expected boot mode: normal, rec, or dev.
-        @return: True if the system in the given mode; otherwise, False.
-        @raise ValueError: If the expected boot mode is not one of normal, rec,
-                           or dev.
-        """
-        if mode not in ('normal', 'rec', 'dev'):
-            raise ValueError(
-                    'Unexpected boot mode %s: want normal, rec, or dev' % mode)
-        return self.faft_client.system.get_boot_mode() == mode
-
-    def fw_tries_checker(self,
-                         expected_mainfw_act,
-                         expected_fw_tried=True,
-                         expected_try_count=0):
-        """Check the current FW booted and try_count
-
-        Mainly for dealing with the vboot1-specific flags fwb_tries and
-        tried_fwb fields in crossystem.  In vboot2, fwb_tries is meaningless and
-        is ignored while tried_fwb is translated into fw_try_count.
-
-        @param expected_mainfw_act: A string of expected firmware, 'A', 'B', or
-                       None if don't care.
-        @param expected_fw_tried: True if tried expected FW at last boot.
-                       This means that mainfw_act=A,tried_fwb=0 or
-                       mainfw_act=B,tried_fwb=1. Set to False if want to
-                       check the opposite case for the mainfw_act.  This
-                       check is only performed in vboot1 as tried_fwb is
-                       never set in vboot2.
-        @param expected_try_count: Number of times to try a FW slot.
-
-        @return: True if the correct boot firmware fields matched.  Otherwise,
-                       False.
-        """
-        crossystem_dict = {'mainfw_act': expected_mainfw_act.upper()}
-
-        if not self.fw_vboot2:
-            if expected_mainfw_act == 'B':
-                tried_fwb_val = True
-            else:
-                tried_fwb_val = False
-            if not expected_fw_tried:
-                tried_fwb_val = not tried_fwb_val
-            crossystem_dict['tried_fwb'] = '1' if tried_fwb_val else '0'
-
-            crossystem_dict['fwb_tries'] = str(expected_try_count)
-        else:
-            crossystem_dict['fw_try_count'] = str(expected_try_count)
-        return self.crossystem_checker(crossystem_dict)
-
-    def dev_boot_usb_checker(self, dev_boot_usb=True, kernel_key_hash=False):
-        """Check the current boot is from a developer USB (Ctrl-U trigger).
-
-        @param dev_boot_usb: True to expect an USB boot;
-                             False to expect an internal device boot.
-        @param kernel_key_hash: True to expect an USB boot with kernkey_vfy
-                                value as 'hash';
-                                False to expect kernkey_vfy value as 'sig'.
-        @return: True if the current boot device matched; otherwise, False.
-        """
-        assert (dev_boot_usb or not kernel_key_hash), ("Invalid condition "
-            "dev_boot_usb_checker(%s, %s). kernel_key_hash should not be "
-            "True in internal disk boot.") % (dev_boot_usb, kernel_key_hash)
-        # kernkey_vfy value will be 'sig', when device booted in internal
-        # disk or booted in USB image signed with SSD key(Ctrl-U trigger).
-        expected_kernkey_vfy = 'sig'
-        if kernel_key_hash:
-            expected_kernkey_vfy = 'hash'
-        return (self.crossystem_checker({'mainfw_type': 'developer',
-                                         'kernkey_vfy':
-                                             expected_kernkey_vfy}) and
-                self.faft_client.system.is_removable_device_boot() ==
-                dev_boot_usb)
-
-    def root_part_checker(self, expected_part):
-        """Check the partition number of the root device matched.
-
-        @param expected_part: A string containing the number of the expected
-                              root partition.
-        @return: True if the currect root  partition number matched;
-                 otherwise, False.
-        """
-        part = self.faft_client.system.get_root_part()[-1]
-        if self.faft_framework.ROOTFS_MAP[expected_part] != part:
-            logging.info("Expected root part %s but got %s",
-                         self.faft_framework.ROOTFS_MAP[expected_part], part)
-            return False
-        return True
-
-    def ec_act_copy_checker(self, expected_copy):
-        """Check the EC running firmware copy matches.
-
-        @param expected_copy: A string containing 'RO', 'A', or 'B' indicating
-                              the expected copy of EC running firmware.
-        @return: True if the current EC running copy matches; otherwise, False.
-        """
-        cmd = 'ectool version'
-        lines = self.faft_client.system.run_shell_command_get_output(cmd)
-        pattern = re.compile("Firmware copy: (.*)")
-        for line in lines:
-            matched = pattern.match(line)
-            if matched:
-                if matched.group(1) == expected_copy:
-                    return True
-                else:
-                    logging.info("Expected EC in %s but now in %s",
-                                 expected_copy, matched.group(1))
-                    return False
-        logging.info("Wrong output format of '%s':\n%s", cmd, '\n'.join(lines))
-        return False
diff --git a/server/cros/faft/utils/mode_switcher.py b/server/cros/faft/utils/mode_switcher.py
deleted file mode 100644
index e0953d8..0000000
--- a/server/cros/faft/utils/mode_switcher.py
+++ /dev/null
@@ -1,1059 +0,0 @@
-# Copyright 2015 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import time
-
-from autotest_lib.client.common_lib import error
-from autotest_lib.server.cros import vboot_constants as vboot
-
-DEBOUNCE_STATE = 'debouncing'
-
-class ConnectionError(Exception):
-    """Raised on an error of connecting DUT."""
-    pass
-
-
-class _BaseFwBypasser(object):
-    """Base class that controls bypass logic for firmware screens."""
-
-    # Duration of holding Volume down button to quickly bypass the developer
-    # warning screen in tablets/detachables.
-    HOLD_VOL_DOWN_BUTTON_BYPASS = 3
-
-    def __init__(self, faft_framework):
-        self.faft_framework = faft_framework
-        self.servo = faft_framework.servo
-        self.faft_config = faft_framework.faft_config
-        self.client_host = faft_framework._client
-        self.ec = getattr(faft_framework, 'ec', None)
-
-
-    def bypass_dev_mode(self):
-        """Bypass the dev mode firmware logic to boot internal image."""
-        raise NotImplementedError
-
-
-    def bypass_dev_boot_usb(self):
-        """Bypass the dev mode firmware logic to boot USB."""
-        raise NotImplementedError
-
-
-    def bypass_rec_mode(self):
-        """Bypass the rec mode firmware logic to boot USB."""
-        raise NotImplementedError
-
-
-    def trigger_dev_to_rec(self):
-        """Trigger to the rec mode from the dev screen."""
-        raise NotImplementedError
-
-
-    def trigger_rec_to_dev(self):
-        """Trigger to the dev mode from the rec screen."""
-        raise NotImplementedError
-
-
-    def trigger_dev_to_normal(self):
-        """Trigger to the normal mode from the dev screen."""
-        raise NotImplementedError
-
-
-    # This is used as a workaround of a bug in RO - DUT does not supply
-    # Vbus when in SRC_ACCESSORY state (we set servo to snk before booting
-    # to recovery due to the assumption of no PD in RO). It causes that DUT can
-    # not see USB Stick in recovery mode(RO) despite being DFP(b/159938441).
-    # The bug in RO has been fixed in 251212fb.
-    # Some boards already have it in RO, so the issue does not appear
-    def check_vbus_and_pd_state(self):
-        """Perform PD power and data swap, if DUT is SRC and doesn't supply
-        Vbus"""
-        if (self.ec and self.faft_config.ec_ro_vbus_bug
-                    and self.servo.is_servo_v4_type_c()):
-            time.sleep(self.faft_framework.PD_RESYNC_DELAY)
-            servo_pr_role = self.servo.get_servo_v4_role()
-            if servo_pr_role == 'snk':
-                mv = self.servo.get_vbus_voltage()
-                # Despite the faft_config, make sure the issue occurs -
-                # servo is snk and vbus is not supplied.
-                if mv is not None and mv < self.servo.VBUS_THRESHOLD:
-                    # Make servo SRC to supply Vbus correctly
-                    self.servo.set_servo_v4_role('src')
-                    time.sleep(self.faft_framework.PD_RESYNC_DELAY)
-
-            # After reboot, EC can be UFP so check that
-            MAX_PORTS = 2
-            ec_is_dfp = False
-            for port in range(0, MAX_PORTS):
-                if self.ec.is_dfp(port):
-                    ec_is_dfp = True
-                    break
-
-            if not ec_is_dfp:
-                # EC is UFP, perform PD Data Swap
-                for port in range(0, MAX_PORTS):
-                    self.ec.send_command("pd %d swap data" % port)
-                    time.sleep(self.faft_framework.PD_RESYNC_DELAY)
-                    # Make sure EC is DFP now
-                    if self.ec.is_dfp(port):
-                        ec_is_dfp = True
-                        break
-
-            if not ec_is_dfp:
-                # EC is still UFP
-                raise error.TestError('DUT is not DFP in recovery mode.')
-
-
-class _KeyboardBypasser(_BaseFwBypasser):
-    """Controls bypass logic via keyboard shortcuts for menu UI."""
-
-    def bypass_dev_mode(self):
-        """Bypass the dev mode firmware logic to boot internal image.
-
-        Press Ctrl-D repeatedly. To obtain a low firmware boot time, pressing
-        Ctrl+D for every half second until firmware_screen delay has been
-        reached.
-        """
-        logging.info("Pressing Ctrl-D.")
-        # At maximum, device waits for twice of firmware_screen delay to
-        # bypass the Dev screen.
-        timeout = time.time() + (self.faft_config.firmware_screen * 2)
-        while time.time() < timeout:
-            self.servo.ctrl_d()
-            time.sleep(0.5)
-            if self.client_host.ping_wait_up(timeout=0.1):
-                break
-
-
-    def bypass_dev_boot_usb(self):
-        """Bypass the dev mode firmware logic to boot USB."""
-        self.faft_framework.wait_for('firmware_screen', 'Pressing ctrl+u')
-        self.servo.ctrl_u()
-
-
-    def bypass_dev_default_boot(self):
-        """Bypass the dev mode firmware logic to boot from default target."""
-        self.faft_framework.wait_for('firmware_screen', 'Pressing enter')
-        self.servo.enter_key()
-
-
-    def bypass_rec_mode(self):
-        """Bypass the rec mode firmware logic to boot USB."""
-        self.servo.switch_usbkey('host')
-        self.faft_framework.wait_for('usb_plug', 'Switching usb key to DUT')
-        self.check_vbus_and_pd_state()
-        self.servo.switch_usbkey('dut')
-        logging.info('Enabled dut_sees_usb')
-        if not self.client_host.ping_wait_up(
-                timeout=self.faft_config.delay_reboot_to_ping):
-            logging.info('ping timed out, try REC_ON')
-            psc = self.servo.get_power_state_controller()
-            psc.power_on(psc.REC_ON)
-            # Check Vbus after reboot again
-            self.check_vbus_and_pd_state()
-
-
-    def trigger_dev_to_rec(self):
-        """Trigger to the to-norm screen from the dev screen."""
-        self.faft_framework.wait_for('firmware_screen', 'Pressing ctrl+s')
-        self.servo.ctrl_s()
-
-
-    def trigger_rec_to_dev(self):
-        """Trigger to the dev mode from the rec screen."""
-        self.faft_framework.wait_for('firmware_screen', 'Pressing ctrl+d')
-        self.servo.ctrl_d()
-        self.faft_framework.wait_for('keypress_delay',
-                                     'Pressing button to switch to dev mode')
-        if self.faft_config.rec_button_dev_switch:
-            logging.info('RECOVERY button pressed to switch to dev mode')
-            self.servo.toggle_recovery_switch()
-        elif self.faft_config.power_button_dev_switch:
-            logging.info('POWER button pressed to switch to dev mode')
-            self.servo.power_normal_press()
-        else:
-            logging.info('ENTER pressed to switch to dev mode')
-            self.servo.enter_key()
-
-
-    def trigger_dev_to_normal(self):
-        """Trigger to the normal mode from the dev screen."""
-        # Navigate to to-norm screen
-        self.faft_framework.wait_for('firmware_screen', 'Pressing ctrl+s')
-        self.servo.ctrl_s()
-        # Select "Confirm"
-        self.faft_framework.wait_for('keypress_delay', 'Pressing enter')
-        self.servo.enter_key()
-
-
-class _LegacyKeyboardBypasser(_KeyboardBypasser):
-    """Controls bypass logic via keyboard shortcuts for legacy clamshell UI."""
-
-    def trigger_dev_to_rec(self):
-        """Trigger to the to-norm screen from the dev screen."""
-        self.faft_framework.wait_for('firmware_screen', 'Pressing enter')
-        self.servo.enter_key()
-
-    def trigger_dev_to_normal(self):
-        """Trigger to the normal mode from the dev screen."""
-        self.faft_framework.wait_for('firmware_screen', 'Pressing enter')
-        self.servo.enter_key()
-        self.faft_framework.wait_for('keypress_delay', 'Pressing enter')
-        self.servo.enter_key()
-
-
-class _JetstreamBypasser(_BaseFwBypasser):
-    """Controls bypass logic of Jetstream devices."""
-
-    def bypass_dev_mode(self):
-        """Bypass the dev mode firmware logic to boot internal image."""
-        # Jetstream does nothing to bypass.
-        pass
-
-
-    def bypass_dev_boot_usb(self):
-        """Bypass the dev mode firmware logic to boot USB."""
-        self.servo.switch_usbkey('dut')
-        self.faft_framework.wait_for('firmware_screen', 'Toggling development switch')
-        self.servo.toggle_development_switch()
-
-
-    def bypass_rec_mode(self):
-        """Bypass the rec mode firmware logic to boot USB."""
-        self.servo.switch_usbkey('host')
-        self.faft_framework.wait_for('usb_plug', 'Switching usb key to DUT')
-        self.check_vbus_and_pd_state()
-        self.servo.switch_usbkey('dut')
-        if not self.client_host.ping_wait_up(
-                timeout=self.faft_config.delay_reboot_to_ping):
-            psc = self.servo.get_power_state_controller()
-            psc.power_on(psc.REC_ON)
-            # Check Vbus after reboot again
-            self.check_vbus_and_pd_state()
-
-
-    def trigger_dev_to_rec(self):
-        """Trigger to the rec mode from the dev screen."""
-        # Jetstream does not have this triggering logic.
-        raise NotImplementedError
-
-
-    def trigger_rec_to_dev(self):
-        """Trigger to the dev mode from the rec screen."""
-        self.servo.disable_development_mode()
-        self.faft_framework.wait_for('firmware_screen', 'Toggling development switch')
-        self.servo.toggle_development_switch()
-
-
-    def trigger_dev_to_normal(self):
-        """Trigger to the normal mode from the dev screen."""
-        # Jetstream does not have this triggering logic.
-        raise NotImplementedError
-
-
-class _TabletDetachableBypasser(_BaseFwBypasser):
-    """Controls bypass logic of tablet/ detachable chromebook devices."""
-
-    def set_button(self, button, duration, info):
-        """Helper method that sets the button hold time for UI selections"""
-        self.servo.set_nocheck(button, duration)
-        self.faft_framework.wait_for('keypress_delay')
-        logging.info(info)
-
-
-    def bypass_dev_boot_usb(self):
-        """Bypass the dev mode firmware logic to boot USB.
-
-        On tablets/ detachables, recovery entered by pressing pwr, vol up
-        & vol down buttons for 10s.
-           Menu options seen in DEVELOPER WARNING screen:
-                 Developer Options
-                 Show Debug Info
-                 Enable Root Verification
-                 Power Off*
-                 Language
-           Menu options seen in DEV screen:
-                 Boot legacy BIOS
-                 Boot USB image
-                 Boot developer image*
-                 Cancel
-                 Power off
-                 Language
-
-        Vol up button selects previous item, vol down button selects
-        next item and pwr button selects current activated item.
-
-        Note: if dev_default_boot=usb, the default selection will start on USB,
-        and this will move up one to legacy boot instead.
-        """
-        self.trigger_dev_screen()
-        self.faft_framework.wait_for('firmware_screen', 'Pressing volume up')
-        self.set_button('volume_up_hold', 100, ('Selecting power as'
-                        ' enter key to select Boot USB Image'))
-        self.servo.power_short_press()
-
-    def bypass_dev_default_boot(self):
-        """Open the Developer Options menu, and accept the default boot device
-
-           Menu options seen in DEVELOPER WARNING screen:
-                 Developer Options
-                 Show Debug Info
-                 Enable Root Verification
-                 Power Off*
-                 Language
-           Menu options seen in DEV screen:
-                 Boot legacy BIOS*      (default if dev_default_boot=legacy)
-                 Boot USB image*        (default if dev_default_boot=usb)
-                 Boot developer image*  (default if dev_default_boot=disk)
-                 Cancel
-                 Power off
-                 Language
-
-        Vol up button selects previous item, vol down button selects
-        next item and pwr button selects current activated item.
-        """
-        self.trigger_dev_screen()
-        self.faft_framework.wait_for('firmware_screen', 'Pressing power button')
-        logging.info('Selecting power as enter key to accept the default'
-                     ' boot option.')
-        self.servo.power_short_press()
-
-    def bypass_rec_mode(self):
-        """Bypass the rec mode firmware logic to boot USB."""
-        self.servo.switch_usbkey('host')
-        self.faft_framework.wait_for('usb_plug', 'Switching usb key to DUT')
-        self.check_vbus_and_pd_state()
-        self.servo.switch_usbkey('dut')
-        logging.info('Enabled dut_sees_usb')
-        if not self.client_host.ping_wait_up(
-                timeout=self.faft_config.delay_reboot_to_ping):
-            logging.info('ping timed out, try REC_ON')
-            psc = self.servo.get_power_state_controller()
-            psc.power_on(psc.REC_ON)
-            # Check Vbus after reboot again
-            self.check_vbus_and_pd_state()
-
-
-    def bypass_dev_mode(self):
-        """Bypass the developer warning screen immediately to boot into
-        internal disk.
-
-        On tablets/detachables, press & holding the Volume down button for
-        3-seconds will quickly bypass the developer warning screen.
-        """
-        # Unit for the "volume_down_hold" console command is msec.
-        duration = (self.HOLD_VOL_DOWN_BUTTON_BYPASS + 0.1) * 1000
-        logging.info("Press and hold volume down button for %.1f seconds to "
-                     "immediately bypass the Developer warning screen.",
-                     self.HOLD_VOL_DOWN_BUTTON_BYPASS + 0.1)
-        # At maximum, device waits for twice of firmware_screen delay to
-        # bypass the Dev screen.
-        timeout = time.time() + (self.faft_config.firmware_screen * 2)
-        # To obtain a low firmware boot time, volume_down button pressed for
-        # every 3.1 seconds until firmware_screen delay has been reached.
-        while time.time() < timeout:
-            self.servo.set_nocheck('volume_down_hold', duration)
-            # After pressing 'volume_down_hold' button, wait for 0.1 seconds
-            # before start pressing the button for next iteration.
-            time.sleep(0.1)
-            if self.client_host.ping_wait_up(timeout=0.1):
-                break
-
-
-    def trigger_dev_screen(self):
-        """Helper method that transitions from DEVELOPER WARNING to DEV screen
-
-           Menu options seen in DEVELOPER WARNING screen:
-                 Developer Options
-                 Show Debug Info
-                 Enable Root Verification
-                 Power Off*
-                 Language
-           Menu options seen in DEV screen:
-                 Boot legacy BIOS
-                 Boot USB image
-                 Boot developer image*
-                 Cancel
-                 Power off
-                 Language
-        Vol up button selects previous item, vol down button selects
-        next item and pwr button selects current activated item.
-        """
-        self.faft_framework.wait_for('firmware_screen', 'Pressing volume up')
-        self.servo.set_nocheck('volume_up_hold', 100)
-        self.faft_framework.wait_for('keypress_delay', 'Pressing volume up')
-        self.servo.set_nocheck('volume_up_hold', 100)
-        self.faft_framework.wait_for('keypress_delay', 'Pressing volume up')
-        self.set_button('volume_up_hold', 100, ('Selecting power '
-                        'as enter key to select Developer Options'))
-        self.servo.power_short_press()
-
-
-    def trigger_rec_to_dev(self):
-        """Trigger to the dev mode from the rec screen using vol up button.
-
-        On tablets/ detachables, recovery entered by pressing pwr, vol up
-        & vol down buttons for 10s. TO_DEV screen is entered by pressing
-        vol up & vol down buttons together on the INSERT screen.
-           Menu options seen in TO_DEV screen:
-                 Confirm enabling developer mode
-                 Cancel*
-                 Power off
-                 Language
-        Vol up button selects previous item, vol down button selects
-        next item and pwr button selects current activated item.
-        """
-        self.faft_framework.wait_for('firmware_screen', 'Pressing volume up + volume down')
-        self.set_button('volume_up_down_hold', 100, ('Enter Recovery Menu.'))
-        self.faft_framework.wait_for('keypress_delay', 'Pressing volume up')
-        self.set_button('volume_up_hold', 100, ('Selecting power as '
-                        'enter key to select Confirm Enabling Developer Mode'))
-        self.servo.power_short_press()
-        self.faft_framework.wait_for('firmware_screen')
-
-
-    def trigger_dev_to_normal(self):
-        """Trigger to the normal mode from the dev screen.
-
-           Menu options seen in DEVELOPER WARNING screen:
-                 Developer Options
-                 Show Debug Info
-                 Enable Root Verification
-                 Power Off*
-                 Language
-           Menu options seen in TO_NORM screen:
-                 Confirm Enabling Verified Boot*
-                 Cancel
-                 Power off
-                 Language
-        Vol up button selects previous item, vol down button selects
-        next item and pwr button selects current activated item.
-        """
-        self.faft_framework.wait_for('firmware_screen', 'Pressing volume up')
-        self.set_button('volume_up_hold', 100, ('Selecting '
-                        'Enable Root Verification using pwr '
-                        'button to enter TO_NORM screen'))
-        self.servo.power_short_press()
-        logging.info('Transitioning from DEV to TO_NORM screen.')
-        self.faft_framework.wait_for('firmware_screen', 'Pressing power button')
-        logging.info('Selecting Confirm Enabling Verified '
-                        'Boot using pwr button in '
-                        'TO_NORM screen')
-        self.servo.power_short_press()
-
-    def trigger_dev_to_rec(self):
-        """Trigger to the TO_NORM screen from the dev screen.
-           Menu options seen in DEVELOPER WARNING screen:
-                 Developer Options
-                 Show Debug Info
-                 Enable Root Verification
-                 Power Off*
-                 Language
-           Menu options seen in TO_NORM screen:
-                 Confirm Enabling Verified Boot*
-                 Cancel
-                 Power off
-                 Language
-        Vol up button selects previous item, vol down button selects
-        next item and pwr button selects current activated item.
-        """
-        self.faft_framework.wait_for('firmware_screen', 'Pressing volume up')
-        self.set_button('volume_up_hold', 100, ('Selecting '
-                        'Enable Root Verification using pwr '
-                        'button to enter TO_NORM screen'))
-        self.servo.power_short_press()
-        logging.info('Transitioning from DEV to TO_NORM screen.')
-        self.faft_framework.wait_for('firmware_screen', 'Pressing volume down')
-
-        # In firmware_FwScreenPressPower, test will power off the DUT using
-        # Power button in second screen (TO_NORM screen) so scrolling to
-        # Power-off is necessary in this case. Hence scroll to Power-off as
-        # a generic action and wait for next action of either Lid close or
-        # power button press.
-        self.servo.set_nocheck('volume_down_hold', 100)
-        self.faft_framework.wait_for('keypress_delay', 'Pressing volume down')
-        self.servo.set_nocheck('volume_down_hold', 100)
-        self.faft_framework.wait_for('keypress_delay')
-
-
-class _BaseModeSwitcher(object):
-    """Base class that controls firmware mode switching."""
-
-    HOLD_VOL_DOWN_BUTTON_BYPASS = _BaseFwBypasser.HOLD_VOL_DOWN_BUTTON_BYPASS
-
-    FW_BYPASSER_CLASS = _BaseFwBypasser
-
-    def __init__(self, faft_framework):
-        self.faft_framework = faft_framework
-        self.client_host = faft_framework._client
-        self.faft_client = faft_framework.faft_client
-        self.servo = faft_framework.servo
-        self.faft_config = faft_framework.faft_config
-        self.checkers = faft_framework.checkers
-        self.bypasser = self._create_fw_bypasser()
-        original_boot_mode = self.faft_client.system.get_boot_mode()
-        # Only resume to normal/dev mode after test, not recovery.
-        self._backup_mode = 'dev' if original_boot_mode == 'dev' else 'normal'
-
-    def _create_fw_bypasser(self):
-        """Creates a proper firmware bypasser.
-
-        @rtype: _BaseFwBypasser
-        """
-        return self.FW_BYPASSER_CLASS(self.faft_framework)
-
-    def setup_mode(self, to_mode, allow_gbb_force=False):
-        """Setup for the requested boot mode.
-
-        It makes sure the system in the requested mode. If not, it tries to
-        do so.
-
-        @param to_mode: A string of boot mode, one of 'normal', 'dev', or 'rec'.
-        @param allow_gbb_force: Bool. If True, allow forcing dev mode via GBB
-                                flags. This is more reliable, but it can prevent
-                                testing other mode-switch workflows.
-        @raise TestFail: If the system not switched to expected mode after
-                         reboot_to_mode.
-
-        """
-        current_mode = self.faft_client.system.get_boot_mode()
-        if current_mode == to_mode:
-            logging.debug('System already in expected %s mode.', to_mode)
-            return
-        logging.info('System not in expected %s mode. Reboot into it.',
-                     to_mode)
-
-        self.reboot_to_mode(to_mode, allow_gbb_force=allow_gbb_force)
-        current_mode = self.faft_client.system.get_boot_mode()
-        if current_mode != to_mode:
-            raise error.TestFail(
-                    'After setup_mode, wanted mode=%s but got %s' %
-                    (to_mode, current_mode))
-
-    def restore_mode(self):
-        """Restores original dev mode status if it has changed.
-
-        @raise TestFail: If the system not restored to expected mode.
-        """
-        if self._backup_mode is None:
-            logging.debug('No backup mode to restore.')
-            return
-        current_mode = self.faft_client.system.get_boot_mode()
-        if current_mode == self._backup_mode:
-            logging.debug('System already in backup %s mode.', current_mode)
-            return
-
-        self.reboot_to_mode(self._backup_mode, allow_gbb_force=True)
-        current_mode = self.faft_client.system.get_boot_mode()
-        if current_mode != self._backup_mode:
-            raise error.TestFail(
-                    'After restore_mode, wanted mode=%s but got %s' %
-                    (self._backup_mode, current_mode))
-        self._backup_mode = None
-
-    def reboot_to_mode(self,
-                       to_mode,
-                       allow_gbb_force=False,
-                       sync_before_boot=True,
-                       wait_for_dut_up=True):
-        """Reboot and execute the mode switching sequence.
-
-        Normally this method simulates what a user would do to switch between
-        different modes of ChromeOS. However, if allow_gbb_force is True, then
-        booting to dev mode will instead be forced by GBB flags.
-
-        Note that the modes are end-states where the OS is booted up
-        to the Welcome screen, so it takes care of navigating through
-        intermediate steps such as various boot confirmation screens.
-
-        From the user perspective, these are the states (note that there's also
-        a rec_force_mrc mode which is like rec mode but forces MRC retraining):
-
-        normal <-----> dev <------ rec
-          ^                         ^
-          |                         |
-          +-------------------------+
-
-        Normal <-----> Dev:
-          _enable_dev_mode_and_reboot()
-
-        Rec,normal -----> Dev:
-          disable_rec_mode_and_reboot()
-
-        Any -----> normal:
-          _enable_normal_mode_and_reboot()
-
-        Normal <-----> rec:
-          enable_rec_mode_and_reboot(usb_state='dut')
-
-        Normal <-----> rec_force_mrc:
-          _enable_rec_mode_force_mrc_and_reboot(usb_state='dut')
-
-        Note that one shouldn't transition to dev again without going through the
-        normal mode.  This is because trying to disable os_verification when it's
-        already off is not supported by reboot_to_mode.
-
-        @param to_mode: The target mode, one of 'normal', 'dev', or 'rec'.
-        @param allow_gbb_force: Bool. If True, allow forcing dev mode via GBB
-                                flags. This is more reliable, but it can prevent
-                                testing other mode-switch workflows.
-        @param sync_before_boot: True to sync to disk before booting.
-        @param wait_for_dut_up: True to wait DUT online again. False to do the
-                                reboot and mode switching sequence only and may
-                                need more operations to pass the firmware
-                                screen.
-        """
-        logging.info(
-                '-[ModeSwitcher]-[ start reboot_to_mode(%r, %r, %r, %r)]-',
-                to_mode, sync_before_boot, allow_gbb_force, wait_for_dut_up)
-
-        from_mode = self.faft_client.system.get_boot_mode()
-        note = 'reboot_to_mode: from=%s, to=%s' % (from_mode, to_mode)
-
-        if sync_before_boot:
-            lines = self.faft_client.system.run_shell_command_get_output(
-                'crossystem')
-            logging.debug('-[ModeSwitcher]- crossystem output:\n%s',
-                          '\n'.join(lines))
-            devsw_cur = self.faft_client.system.get_crossystem_value(
-                'devsw_cur')
-            note += ', devsw_cur=%s' % devsw_cur
-            self.faft_framework.blocking_sync(freeze_for_reset=True)
-        note += '.'
-
-        # If booting to anything but dev mode, make sure we're not forcing dev.
-        # This is irrespective of allow_gbb_force: disabling the flag doesn't
-        # force a mode, it just stops forcing dev.
-        if to_mode != 'dev':
-            self.faft_framework.clear_set_gbb_flags(
-                    vboot.GBB_FLAG_FORCE_DEV_SWITCH_ON, 0, reboot=False)
-
-        if to_mode == 'rec':
-            self.enable_rec_mode_and_reboot(usb_state='dut')
-        elif to_mode == 'rec_force_mrc':
-            self._enable_rec_mode_force_mrc_and_reboot(usb_state='dut')
-        elif to_mode == 'dev':
-            if allow_gbb_force:
-                self.faft_framework.clear_set_gbb_flags(
-                        0, vboot.GBB_FLAG_FORCE_DEV_SWITCH_ON, reboot=True)
-            else:
-                self._enable_dev_mode_and_reboot()
-                if wait_for_dut_up:
-                    self.bypass_dev_mode()
-        elif to_mode == 'normal':
-            self._enable_normal_mode_and_reboot()
-        else:
-            raise NotImplementedError('Unexpected boot mode param: %s',
-                                      to_mode)
-        if wait_for_dut_up:
-            self.wait_for_client(retry_power_on=True, note=note)
-
-        logging.info('-[ModeSwitcher]-[ end reboot_to_mode(%r, %r, %r, %r)]-',
-                     to_mode, sync_before_boot, allow_gbb_force,
-                     wait_for_dut_up)
-
-    def simple_reboot(self, reboot_type='warm', sync_before_boot=True):
-        """Simple reboot method
-
-        Just reboot the DUT using either cold or warm reset.  Does not wait for
-        DUT to come back online.  Will wait for test to handle this.
-
-        @param reboot_type: A string of reboot type, 'warm' or 'cold'.
-                            If reboot_type != warm/cold, raise exception.
-        @param sync_before_boot: True to sync to disk before booting.
-                                 If sync_before_boot=False, DUT offline before
-                                 calling mode_aware_reboot.
-        """
-        if reboot_type == 'warm':
-            reboot_method = self.servo.get_power_state_controller().warm_reset
-        elif reboot_type == 'cold':
-            reboot_method = self.servo.get_power_state_controller().reset
-        else:
-            raise NotImplementedError('Not supported reboot_type: %s',
-                                      reboot_type)
-        if sync_before_boot:
-            boot_id = self.faft_framework.get_bootid()
-            self.faft_framework.blocking_sync(freeze_for_reset=True)
-        logging.info("-[ModeSwitcher]-[ start simple_reboot(%r) ]-",
-                     reboot_type)
-        reboot_method()
-        if sync_before_boot:
-            self.wait_for_client_offline(orig_boot_id=boot_id)
-        logging.info("-[ModeSwitcher]-[ end simple_reboot(%r) ]-",
-                     reboot_type)
-
-    def mode_aware_reboot(self, reboot_type=None, reboot_method=None,
-                          sync_before_boot=True, wait_for_dut_up=True):
-        """Uses a mode-aware way to reboot DUT.
-
-        For example, if DUT is in dev mode, it requires pressing Ctrl-D to
-        bypass the developer screen.
-
-        @param reboot_type: A string of reboot type, one of 'warm', 'cold', or
-                            'custom'. Default is a warm reboot.
-        @param reboot_method: A custom method to do the reboot. Only use it if
-                              reboot_type='custom'.
-        @param sync_before_boot: True to sync to disk before booting.
-                                 If sync_before_boot=False, DUT offline before
-                                 calling mode_aware_reboot.
-        @param wait_for_dut_up: True to wait DUT online again. False to do the
-                                reboot only.
-        """
-        if reboot_type is None or reboot_type == 'warm':
-            reboot_method = self.servo.get_power_state_controller().warm_reset
-        elif reboot_type == 'cold':
-            reboot_method = self.servo.get_power_state_controller().reset
-        elif reboot_type != 'custom':
-            raise NotImplementedError('Not supported reboot_type: %s',
-                                      reboot_type)
-
-        logging.info("-[ModeSwitcher]-[ start mode_aware_reboot(%r, %s, ..) ]-",
-                     reboot_type, reboot_method.__name__)
-        is_dev = is_rec = is_devsw_boot = False
-        if sync_before_boot:
-            is_dev = self.checkers.mode_checker('dev')
-            is_rec = self.checkers.mode_checker('rec')
-            is_devsw_boot = self.checkers.crossystem_checker(
-                                               {'devsw_boot': '1'}, True)
-            boot_id = self.faft_framework.get_bootid()
-
-            self.faft_framework.blocking_sync(reboot_type != 'custom')
-        if is_rec:
-            logging.info("-[mode_aware_reboot]-[ is_rec=%s is_dev_switch=%s ]-",
-                         is_rec, is_devsw_boot)
-        else:
-            logging.info("-[mode_aware_reboot]-[ is_dev=%s ]-", is_dev)
-        reboot_method()
-        if sync_before_boot:
-            self.wait_for_client_offline(orig_boot_id=boot_id)
-        # Encapsulating the behavior of skipping dev firmware screen,
-        # hitting ctrl-D
-        # Note that if booting from recovery mode, we can predict the next
-        # boot based on the developer switch position at boot (devsw_boot).
-        # If devsw_boot is True, we will call bypass_dev_mode after reboot.
-        if is_dev or is_devsw_boot:
-            self.bypass_dev_mode()
-        if wait_for_dut_up:
-            self.wait_for_client()
-        logging.info("-[ModeSwitcher]-[ end mode_aware_reboot(%r, %s, ..) ]-",
-                     reboot_type, reboot_method.__name__)
-
-
-    def enable_rec_mode_and_reboot(self, usb_state=None):
-        """Switch to rec mode and reboot.
-
-        This method emulates the behavior of the old physical recovery switch,
-        i.e. switch ON + reboot + switch OFF, and the new keyboard controlled
-        recovery mode, i.e. just press Power + Esc + Refresh.
-
-        @param usb_state: A string, one of 'dut', 'host', or 'off'.
-        """
-        psc = self.servo.get_power_state_controller()
-        # Switch the USB key when AP is on, because there is a
-        # bug (b/172909077) - using "image_usbkey_direction:usb_state", when
-        # AP if off may cause not recognizing the file system,
-        # so system won't boot in recovery from USB.
-        # When the issue is fixed, it can be done when AP is off.
-        if usb_state:
-            self.servo.switch_usbkey(usb_state)
-        psc.power_off()
-        psc.power_on(psc.REC_ON)
-        # Check VBUS and pd state only if we are going to boot
-        # to ChromeOS in the recovery mode
-        if usb_state == 'dut':
-            self.bypasser.check_vbus_and_pd_state()
-
-
-    def _enable_rec_mode_force_mrc_and_reboot(self, usb_state=None):
-        """Switch to rec mode, enable force mrc cache retraining, and reboot.
-
-        This method emulates the behavior of the old physical recovery switch,
-        i.e. switch ON + reboot + switch OFF, and the new keyboard controlled
-        recovery mode, i.e. just press Power + Esc + Refresh.
-
-        @param usb_state: A string, one of 'dut', 'host', or 'off'.
-        """
-        psc = self.servo.get_power_state_controller()
-        # Switch the USB key when AP is on, because there is a
-        # bug (b/172909077) - using "image_usbkey_direction:usb_state", when
-        # AP if off may cause not recognizing the file system,
-        # so system won't boot in recovery from USB.
-        # When the issue is fixed, it can be done when AP is off.
-        if usb_state:
-            self.servo.switch_usbkey(usb_state)
-        psc.power_off()
-        psc.power_on(psc.REC_ON_FORCE_MRC)
-
-    def disable_rec_mode_and_reboot(self, usb_state=None):
-        """Disable the rec mode and reboot.
-
-        It is achieved by calling power state controller to do a normal
-        power on.
-        """
-        psc = self.servo.get_power_state_controller()
-        psc.power_off()
-        self.faft_framework.wait_for('ec_boot_to_pwr_button', 'Powering on')
-        psc.power_on(psc.REC_OFF)
-
-
-    def _enable_dev_mode_and_reboot(self):
-        """Switch to developer mode and reboot."""
-        raise NotImplementedError
-
-
-    def _enable_normal_mode_and_reboot(self):
-        """Switch to normal mode and reboot."""
-        raise NotImplementedError
-
-
-    # Redirects the following methods to FwBypasser
-    def bypass_dev_mode(self):
-        """Bypass the dev mode firmware logic to boot internal image."""
-        logging.info("-[bypass_dev_mode]-")
-        self.bypasser.bypass_dev_mode()
-
-
-    def bypass_dev_boot_usb(self):
-        """Bypass the dev mode firmware logic to boot USB."""
-        logging.info("-[bypass_dev_boot_usb]-")
-        self.bypasser.bypass_dev_boot_usb()
-
-
-    def bypass_dev_default_boot(self):
-        """Bypass the dev mode firmware logic to boot from default target."""
-        logging.info("-[bypass_dev_default_boot]-")
-        self.bypasser.bypass_dev_default_boot()
-
-
-    def bypass_rec_mode(self):
-        """Bypass the rec mode firmware logic to boot USB."""
-        logging.info("-[bypass_rec_mode]-")
-        self.bypasser.bypass_rec_mode()
-
-
-    def trigger_dev_to_rec(self):
-        """Trigger to the rec mode from the dev screen."""
-        self.bypasser.trigger_dev_to_rec()
-
-
-    def trigger_rec_to_dev(self):
-        """Trigger to the dev mode from the rec screen."""
-        self.bypasser.trigger_rec_to_dev()
-
-
-    def trigger_dev_to_normal(self):
-        """Trigger to the normal mode from the dev screen."""
-        self.bypasser.trigger_dev_to_normal()
-
-
-    def wait_for_client(self, timeout=180, retry_power_on=False,
-                        debounce_power_state=True, note=''):
-        """Wait for the client to come back online.
-
-        New remote processes will be launched if their used flags are enabled.
-
-        @param timeout: Time in seconds to wait for the client SSH daemon to
-                        come up.
-        @param retry_power_on: Try to power on the DUT if it isn't in S0.
-        @param debounce_power_state: Wait until power_state is the same two
-                                     times in a row to determine the actual
-                                     power_state.
-        @param note: Extra note to add to the end of the error text
-        @raise ConnectionError: Failed to connect DUT.
-        """
-        logging.info("-[FAFT]-[ start wait_for_client(%ds) ]---",
-                     timeout if retry_power_on else 0)
-        # Wait for the system to be powered on before trying the network
-        # Skip "None" result because that indicates lack of EC or problem
-        # querying the power state.
-        current_timer = 0
-        self.faft_framework.wait_for('delay_powerinfo_stable',
-                                     'checking power state')
-        power_state = self.faft_framework.get_power_state()
-
-        # The device may transition between states. Wait until the power state
-        # is stable for two seconds before determining the state.
-        if debounce_power_state:
-            last_state = power_state
-            power_state = DEBOUNCE_STATE
-
-        while (timeout > current_timer and
-               power_state not in (self.faft_framework.POWER_STATE_S0, None)):
-            time.sleep(2)
-            current_timer += 2
-            power_state = self.faft_framework.get_power_state()
-
-            # If the state changed, debounce it.
-            if debounce_power_state and power_state != last_state:
-                last_state = power_state
-                power_state = DEBOUNCE_STATE
-
-            logging.info('power state: %s', power_state)
-
-            # Only power-on the device if it has been consistently out of
-            # S0.
-            if (retry_power_on and
-                power_state not in (self.faft_framework.POWER_STATE_S0,
-                                    None, DEBOUNCE_STATE)):
-                logging.info("-[FAFT]-[ retry powering on the DUT ]---")
-                psc = self.servo.get_power_state_controller()
-                psc.retry_power_on()
-
-        # Use the last state if the device didn't reach a stable state in
-        # timeout seconds.
-        if power_state == DEBOUNCE_STATE:
-            power_state = last_state
-        if power_state not in (self.faft_framework.POWER_STATE_S0, None):
-            msg = 'DUT unexpectedly down, power state is %s.' % power_state
-            if note:
-                msg += ' %s' % note
-            raise ConnectionError(msg)
-
-        # Wait for the system to respond to ping before attempting ssh
-        if self.client_host.use_icmp and not self.client_host.ping_wait_up(
-                timeout):
-            logging.warning("-[FAFT]-[ system did not respond to ping ]")
-        if self.client_host.wait_up(timeout, host_is_down=True):
-            # Check the FAFT client is avaiable.
-            self.faft_client.system.is_available()
-            # Stop update-engine as it may change firmware/kernel.
-            self.faft_framework.faft_client.updater.stop_daemon()
-        else:
-            logging.error('wait_for_client() timed out.')
-            power_state = self.faft_framework.get_power_state()
-            msg = 'DUT is still down unexpectedly.'
-            if power_state:
-                msg += ' Power state: %s.' % power_state
-            if note:
-                msg += ' %s' % note
-            raise ConnectionError(msg)
-        logging.info("-[FAFT]-[ end wait_for_client ]-----")
-
-
-    def wait_for_client_offline(self, timeout=60, orig_boot_id=None):
-        """Wait for the client to come offline.
-
-        @param timeout: Time in seconds to wait the client to come offline.
-        @param orig_boot_id: A string containing the original boot id.
-        @raise ConnectionError: Failed to wait DUT offline.
-        """
-        if not self.client_host.ping_wait_down(timeout):
-            if orig_boot_id and self.client_host.get_boot_id() != orig_boot_id:
-                logging.warn('Reboot done very quickly.')
-                return
-            raise ConnectionError('DUT is still up unexpectedly')
-
-
-class _MenuSwitcher(_BaseModeSwitcher):
-    """Mode switcher via keyboard shortcuts for menu UI."""
-
-    FW_BYPASSER_CLASS = _KeyboardBypasser
-
-    def _enable_dev_mode_and_reboot(self):
-        """Switch to developer mode and reboot."""
-        logging.info("Enabling keyboard controlled developer mode")
-        # Rebooting EC with rec mode on. Should power on AP.
-        # Plug out USB disk for preventing recovery boot without warning
-        self.enable_rec_mode_and_reboot(usb_state='host')
-        self.wait_for_client_offline()
-        self.bypasser.trigger_rec_to_dev()
-
-    def _enable_normal_mode_and_reboot(self):
-        """Switch to normal mode and reboot."""
-        logging.info("Disabling keyboard controlled developer mode")
-        self.disable_rec_mode_and_reboot()
-        self.wait_for_client_offline()
-        self.bypasser.trigger_dev_to_normal()
-
-
-class _KeyboardDevSwitcher(_MenuSwitcher):
-    """Mode switcher via keyboard shortcuts for legacy clamshell UI."""
-
-    FW_BYPASSER_CLASS = _LegacyKeyboardBypasser
-
-
-class _JetstreamSwitcher(_BaseModeSwitcher):
-    """Mode switcher for Jetstream devices."""
-
-    FW_BYPASSER_CLASS = _JetstreamBypasser
-
-    def _enable_dev_mode_and_reboot(self):
-        """Switch to developer mode and reboot."""
-        logging.info("Enabling Jetstream developer mode")
-        self.enable_rec_mode_and_reboot(usb_state='host')
-        self.wait_for_client_offline()
-        self.bypasser.trigger_rec_to_dev()
-
-    def _enable_normal_mode_and_reboot(self):
-        """Switch to normal mode and reboot."""
-        logging.info("Disabling Jetstream developer mode")
-        self.servo.disable_development_mode()
-        self.enable_rec_mode_and_reboot(usb_state='host')
-        self.faft_framework.wait_for('firmware_screen', 'Disabling rec and rebooting')
-        self.disable_rec_mode_and_reboot(usb_state='host')
-
-
-class _TabletDetachableSwitcher(_BaseModeSwitcher):
-    """Mode switcher for legacy menu UI."""
-
-    FW_BYPASSER_CLASS = _TabletDetachableBypasser
-
-    def _enable_dev_mode_and_reboot(self):
-        """Switch to developer mode and reboot.
-
-        On tablets/ detachables, recovery entered by pressing pwr, vol up
-        & vol down buttons for 10s.
-           Menu options seen in RECOVERY screen:
-                 Enable Developer Mode
-                 Show Debug Info
-                 Power off*
-                 Language
-        """
-        logging.info('Enabling tablets/detachable recovery mode')
-        self.enable_rec_mode_and_reboot(usb_state='host')
-        self.wait_for_client_offline()
-        self.bypasser.trigger_rec_to_dev()
-
-    def _enable_normal_mode_and_reboot(self):
-        """Switch to normal mode and reboot.
-
-           Menu options seen in DEVELOPER WARNING screen:
-                 Developer Options
-                 Show Debug Info
-                 Enable Root Verification
-                 Power Off*
-                 Language
-           Menu options seen in TO_NORM screen:
-                 Confirm Enabling Verified Boot
-                 Cancel
-                 Power off*
-                 Language
-        Vol up button selects previous item, vol down button selects
-        next item and pwr button selects current activated item.
-        """
-        self.disable_rec_mode_and_reboot()
-        self.wait_for_client_offline()
-        self.bypasser.trigger_dev_to_normal()
-
-
-_SWITCHER_CLASSES = {
-    'menu_switcher': _MenuSwitcher,
-    'keyboard_dev_switcher': _KeyboardDevSwitcher,
-    'jetstream_switcher': _JetstreamSwitcher,
-    'tablet_detachable_switcher': _TabletDetachableSwitcher,
-}
-
-
-def create_mode_switcher(faft_framework):
-    """Creates a proper mode switcher.
-
-    @param faft_framework: The main FAFT framework object.
-    """
-    switcher_type = faft_framework.faft_config.mode_switcher_type
-    switcher_class = _SWITCHER_CLASSES.get(switcher_type, None)
-    if switcher_class is None:
-        raise NotImplementedError('Not supported mode_switcher_type: %s',
-                                  switcher_type)
-    else:
-        return switcher_class(faft_framework)
diff --git a/server/cros/provision_unittest.py b/server/cros/provision_unittest.py
deleted file mode 100755
index d593bf3..0000000
--- a/server/cros/provision_unittest.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/python3
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import unittest
-
-import common
-from autotest_lib.server.cros import provision
-
-_CROS_VERSION_SAMPLES = [
-    'cave-release/R57-9030.0.0',
-    'grunt-llvm-next-toolchain-tryjob/R69-10851.0.0-b2726174'
-    'eve-tot-chrome-pfq-informational/R69-10822.0.0-b2700960',
-]
-_CROS_ANDROID_VERSION_SAMPLES = [
-    'git_nyc-mr1-arc/cheets_arm-user/4866647',
-    'git_nyc-mr1-arc/cheets_arm-user/P6244267',
-    'git_nyc-mr1-arc/cheets_x86-user/P6256537',
-]
-
-
-class ActionTestCase(unittest.TestCase):
-    """Tests for Action functions."""
-    #pylint:disable=missing-docstring
-
-    def test__get_label_action_with_keyval_label(self):
-        got = provision._get_label_action('cros-version:foo')
-        self.assertEqual(got, provision._Action('cros-version', 'foo'))
-
-    def test__get_label_action_with_plain_label(self):
-        got = provision._get_label_action('webcam')
-        self.assertEqual(got, provision._Action('webcam', None))
-
-    def test__get_label_action_with_empty_string(self):
-        got = provision._get_label_action('')
-        self.assertEqual(got, provision._Action('', None))
-
-
-class ImageParsingTests(unittest.TestCase):
-    """Unit tests for `provision.get_version_label_prefix()`."""
-
-    def _do_test_prefixes(self, expected, version_samples):
-        for v in version_samples:
-            prefix = provision.get_version_label_prefix(v)
-            self.assertEqual(prefix, expected)
-
-    def test_cros_prefix(self):
-        """Test handling of Chrome OS version strings."""
-        self._do_test_prefixes(provision.CROS_VERSION_PREFIX,
-                               _CROS_VERSION_SAMPLES)
-
-    def test_cros_android_prefix(self):
-        """Test handling of Chrome OS version strings."""
-        self._do_test_prefixes(provision.CROS_ANDROID_VERSION_PREFIX,
-                               _CROS_ANDROID_VERSION_SAMPLES)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/cros/repair/__init__.py b/server/cros/repair/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/server/cros/repair/__init__.py
+++ /dev/null
diff --git a/server/cros/repair/common.py b/server/cros/repair/common.py
deleted file mode 100644
index 90fbb54..0000000
--- a/server/cros/repair/common.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/python3
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Build relative paths for files with root of autotest_lib.
-
-import os, sys
-dirname = os.path.dirname(sys.modules[__name__].__file__)
-autotest_dir = os.path.abspath(os.path.join(dirname, '../../../..'))
-client_dir = os.path.join(autotest_dir, 'client')
-sys.path.insert(0, client_dir)
-import setup_modules
-sys.path.pop(0)
-setup_modules.setup(base_path=autotest_dir, root_module_name='autotest_lib')
diff --git a/server/cros/repair/mac_address_helper.py b/server/cros/repair/mac_address_helper.py
deleted file mode 100644
index 2d63d8c..0000000
--- a/server/cros/repair/mac_address_helper.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-import re
-
-import common
-from autotest_lib.client.common_lib import error
-
-
-class MacAddressHelper():
-    """Verify and update cached NIC mac address on servo.
-
-    Servo_v4 plugged to the DUT and providing NIC for that. We caching mac
-    address on servod side to better debugging.
-    """
-
-    # HUB and NIC VID/PID.
-    # Values presented as the string of the hex without 0x to match
-    # representation in sysfs (idVendor/idProduct).
-    HUB_VID = '04b4'
-    HUB_PID = '6502'
-    NIC_VID = '0bda'
-    NIC_PID = '8153'
-
-    # Regex to check mac address format.
-    # eg: f4:f5:e8:50:e9:45
-    RE_MACADDR = re.compile('^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$')
-
-    def is_supported(self, host):
-        """Verify if setup is support cached NIC mac address on servo
-
-        @param host:    CrosHost instance
-        """
-        if not host._servo_host.is_labstation():
-            logging.info('Only servo_v4 has NIC; Skipping the action')
-            return False
-        if not host.servo.has_control('macaddr'):
-            logging.info('"macaddr" control not supported;'
-                         'Skipping the action')
-            return False
-        return True
-
-    def update_if_needed(self, host):
-        """Update the cached NIC mac address on servo
-
-        The process will verify if NIC mac changes and update only if
-        it required.
-
-        @param host:    CrosHost instance
-        """
-
-        if not self.is_supported(host):
-            return
-
-        servo = host.servo
-        # Path to the NIC has to be located in the HUB.
-        # eg.
-        # HUB: /sys/bus/usb/devices/1-1
-        # NIC: /sys/bus/usb/devices/1-1.1
-        hub_path = self._get_device_path(host, None, self.HUB_VID,
-                                         self.HUB_PID)
-        if not hub_path or hub_path == '.':
-            raise Exception('The servo_v4 HUB not detected from DUT.')
-        logging.debug('Path to the servo_v4 HUB device: %s', hub_path)
-        nic_path = self._get_device_path(host, hub_path, self.NIC_VID,
-                                         self.NIC_PID)
-        if not nic_path or nic_path == '.':
-            raise Exception('The servo_v4 NIC not detected in HUB folder.')
-        logging.debug('Path to the servo_v4 NIC device: %s', nic_path)
-        if hub_path == nic_path or not nic_path.startswith(hub_path):
-            raise Exception('The servo_v4 NIC was detect out of servo_v4 HUB')
-
-        macaddr = self._get_mac_address(host, nic_path)
-        if not macaddr:
-            raise Exception('Failed to extract mac address from host.')
-
-        cached_mac = self._get_cached_mac_address(host)
-        if not cached_mac or macaddr != cached_mac:
-            try:
-                servo.set('macaddr', macaddr)
-                logging.info('Successfully updated the servo "macaddr"!')
-            except error.TestFail as e:
-                logging.debug('Fail to update macaddr value; %s', e)
-                raise Exception('Fail to update the "macaddr" value!')
-        else:
-            logging.info('The servo "macaddr" doe not need update.')
-
-    def _get_cached_mac_address(self, host):
-        """Get NIC mac address from servo cache"""
-        try:
-            return host.servo.get('macaddr')
-        except error.TestFail as e:
-            logging.debug('(Non-critical) Fail to get macaddr: %s', e)
-            return None
-
-    def _get_mac_address(self, host, nic_path):
-        """Get NIC mac address from host
-
-        @param host:        CrosHost instance
-        @param nic_path:    Path to network device on the host
-        """
-        cmd = r'find %s/ | grep /net/ | grep /address' % nic_path
-        res = host.run(cmd,
-                       timeout=30,
-                       ignore_status=True,
-                       ignore_timeout=True)
-        if not res:
-            logging.info('Timeout during retriving NIC address files.')
-            return None
-        addrs = res.stdout.splitlines()
-        if not addrs or len(addrs) == 0:
-            logging.info('No NIC address file found.')
-            return None
-        if len(addrs) > 1:
-            logging.info('More than one NIC address file found.')
-            return None
-        logging.info('Found NIC address file: %s', addrs[0])
-        cmd = r'cat %s' % addrs[0]
-        res = host.run(cmd,
-                       timeout=30,
-                       ignore_status=True,
-                       ignore_timeout=True)
-        if not res:
-            logging.info('Timeout during attemp read NIC address file: %s',
-                         addrs[0])
-            return None
-        mac_addr = res.stdout.strip()
-        if not self.RE_MACADDR.match(mac_addr):
-            logging.info('incorrect format of the mac address: %s', mac_addr)
-            return None
-        logging.info('Servo_v4 NIC mac address from DUT side: %s', mac_addr)
-        return mac_addr
-
-    def _get_device_path(self, host, base_path, vid, pid):
-        """Find a device by VID/PID under particular path.
-
-        1) Get path to the unique idVendor file with VID
-        2) Get path to the unique idProduct file with PID
-        3) Get directions of both file and compare them
-
-        @param host:        CrosHost instance
-        @param base_path:   Path to the directory where to look for the device.
-        @param vid:         Vendor ID of the looking device.
-        @param pid:         Product ID of the looking device.
-
-        @returns: path to the folder of the device
-        """
-
-        def _run(cmd):
-            res = host.run(cmd,
-                           timeout=30,
-                           ignore_status=True,
-                           ignore_timeout=True)
-            l = res.stdout.splitlines()
-            if not l or len(l) != 1:
-                return None
-            return l[0]
-
-        if not base_path:
-            base_path = '/sys/bus/usb/devices/*/'
-        else:
-            base_path += '*/'
-        cmd_template = 'grep -l %s $(find %s -maxdepth 1 -name %s)'
-        vid_path = _run(cmd_template % (vid, base_path, 'idVendor'))
-        if not vid_path:
-            return None
-
-        pid_path = _run(cmd_template % (pid, base_path, 'idProduct'))
-        if not pid_path:
-            return None
-
-        # check if both files locates in the same folder
-        return _run('LC_ALL=C comm -12 <(dirname %s) <(dirname %s)' %
-                    (vid_path, pid_path))
diff --git a/server/cros/resource_monitor.py b/server/cros/resource_monitor.py
deleted file mode 100644
index c8ec5f5..0000000
--- a/server/cros/resource_monitor.py
+++ /dev/null
@@ -1,304 +0,0 @@
-# Lint as: python2, python3
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import logging
-import csv
-import six
-import random
-import re
-import collections
-
-from autotest_lib.client.common_lib.cros import path_utils
-
-class ResourceMonitorRawResult(object):
-    """Encapsulates raw resource_monitor results."""
-
-    def __init__(self, raw_results_filename):
-        self._raw_results_filename = raw_results_filename
-
-
-    def get_parsed_results(self):
-        """Constructs parsed results from the raw ones.
-
-        @return ResourceMonitorParsedResult object
-
-        """
-        return ResourceMonitorParsedResult(self.raw_results_filename)
-
-
-    @property
-    def raw_results_filename(self):
-        """@return string filename storing the raw top command output."""
-        return self._raw_results_filename
-
-
-class IncorrectTopFormat(Exception):
-    """Thrown if top output format is not as expected"""
-    pass
-
-
-def _extract_value_before_single_keyword(line, keyword):
-    """Extract word occurring immediately before the specified keyword.
-
-    @param line string the line in which to search for the keyword.
-    @param keyword string the keyword to look for. Can be a regexp.
-    @return string the word just before the keyword.
-
-    """
-    pattern = ".*?(\S+) " + keyword
-    matches = re.match(pattern, line)
-    if matches is None or len(matches.groups()) != 1:
-        raise IncorrectTopFormat
-
-    return matches.group(1)
-
-
-def _extract_values_before_keywords(line, *args):
-    """Extract the words occuring immediately before each specified
-        keyword in args.
-
-    @param line string the string to look for the keywords.
-    @param args variable number of string args the keywords to look for.
-    @return string list the words occuring just before each keyword.
-
-    """
-    line_nocomma = re.sub(",", " ", line)
-    line_singlespace = re.sub("\s+", " ", line_nocomma)
-
-    return [_extract_value_before_single_keyword(
-            line_singlespace, arg) for arg in args]
-
-
-def _find_top_output_identifying_pattern(line):
-    """Return true iff the line looks like the first line of top output.
-
-    @param line string to look for the pattern
-    @return boolean
-
-    """
-    pattern ="\s*top\s*-.*up.*users.*"
-    matches = re.match(pattern, line)
-    return matches is not None
-
-
-class ResourceMonitorParsedResult(object):
-    """Encapsulates logic to parse and represent top command results."""
-
-    _columns = ["Time", "UserCPU", "SysCPU", "NCPU", "Idle",
-            "IOWait", "IRQ", "SoftIRQ", "Steal",
-            "MemUnits", "UsedMem", "FreeMem",
-            "SwapUnits", "UsedSwap", "FreeSwap"]
-    UtilValues = collections.namedtuple('UtilValues', ' '.join(_columns))
-
-    def __init__(self, raw_results_filename):
-        """Construct a ResourceMonitorResult.
-
-        @param raw_results_filename string filename of raw batch top output.
-
-        """
-        self._raw_results_filename = raw_results_filename
-        self.parse_resource_monitor_results()
-
-
-    def parse_resource_monitor_results(self):
-        """Extract utilization metrics from output file."""
-        self._utils_over_time = []
-
-        with open(self._raw_results_filename, "r") as results_file:
-            while True:
-                curr_line = '\n'
-                while curr_line != '' and \
-                        not _find_top_output_identifying_pattern(curr_line):
-                    curr_line = results_file.readline()
-                if curr_line == '':
-                    break
-                try:
-                    time, = _extract_values_before_keywords(curr_line, "up")
-
-                    # Ignore one line.
-                    _ = results_file.readline()
-
-                    # Get the cpu usage.
-                    curr_line = results_file.readline()
-                    (cpu_user, cpu_sys, cpu_nice, cpu_idle, io_wait, irq, sirq,
-                            steal) = _extract_values_before_keywords(curr_line,
-                            "us", "sy", "ni", "id", "wa", "hi", "si", "st")
-
-                    # Get memory usage.
-                    curr_line = results_file.readline()
-                    (mem_units, mem_free,
-                            mem_used) = _extract_values_before_keywords(
-                            curr_line, "Mem", "free", "used")
-
-                    # Get swap usage.
-                    curr_line = results_file.readline()
-                    (swap_units, swap_free,
-                            swap_used) = _extract_values_before_keywords(
-                            curr_line, "Swap", "free", "used")
-
-                    curr_util_values = ResourceMonitorParsedResult.UtilValues(
-                            Time=time, UserCPU=cpu_user,
-                            SysCPU=cpu_sys, NCPU=cpu_nice, Idle=cpu_idle,
-                            IOWait=io_wait, IRQ=irq, SoftIRQ=sirq, Steal=steal,
-                            MemUnits=mem_units, UsedMem=mem_used,
-                            FreeMem=mem_free,
-                            SwapUnits=swap_units, UsedSwap=swap_used,
-                            FreeSwap=swap_free)
-                    self._utils_over_time.append(curr_util_values)
-                except IncorrectTopFormat:
-                    logging.error(
-                            "Top output format incorrect. Aborting parse.")
-                    return
-
-
-    def __repr__(self):
-        output_stringfile = six.StringIO()
-        self.save_to_file(output_stringfile)
-        return output_stringfile.getvalue()
-
-
-    def save_to_file(self, file):
-        """Save parsed top results to file
-
-        @param file file object to write to
-
-        """
-        if len(self._utils_over_time) < 1:
-            logging.warning("Tried to save parsed results, but they were "
-                    "empty. Skipping the save.")
-            return
-        csvwriter = csv.writer(file, delimiter=',')
-        csvwriter.writerow(self._utils_over_time[0]._fields)
-        for row in self._utils_over_time:
-            csvwriter.writerow(row)
-
-
-    def save_to_filename(self, filename):
-        """Save parsed top results to filename
-
-        @param filename string filepath to write to
-
-        """
-        out_file = open(filename, "wb")
-        self.save_to_file(out_file)
-        out_file.close()
-
-
-class ResourceMonitorConfig(object):
-    """Defines a single top run."""
-
-    DEFAULT_MONITOR_PERIOD = 3
-
-    def __init__(self, monitor_period=DEFAULT_MONITOR_PERIOD,
-            rawresult_output_filename=None):
-        """Construct a ResourceMonitorConfig.
-
-        @param monitor_period float seconds between successive top refreshes.
-        @param rawresult_output_filename string filename to output the raw top
-                                                results to
-
-        """
-        if monitor_period < 0.1:
-            logging.info('Monitor period must be at least 0.1s.'
-                    ' Given: %r. Defaulting to 0.1s', monitor_period)
-            monitor_period = 0.1
-
-        self._monitor_period = monitor_period
-        self._server_outfile = rawresult_output_filename
-
-
-class ResourceMonitor(object):
-    """Delegate to run top on a client.
-
-    Usage example (call from a test):
-    rmc = resource_monitor.ResourceMonitorConfig(monitor_period=1,
-            rawresult_output_filename=os.path.join(self.resultsdir,
-                                                    'topout.txt'))
-    with resource_monitor.ResourceMonitor(self.context.client.host, rmc) as rm:
-        rm.start()
-        <operation_to_monitor>
-        rm_raw_res = rm.stop()
-        rm_res = rm_raw_res.get_parsed_results()
-        rm_res.save_to_filename(
-                os.path.join(self.resultsdir, 'resource_mon.csv'))
-
-    """
-
-    def __init__(self, client_host, config):
-        """Construct a ResourceMonitor.
-
-        @param client_host: SSHHost object representing a remote ssh host
-
-        """
-        self._client_host = client_host
-        self._config = config
-        self._command_top = path_utils.must_be_installed(
-                'top', host=self._client_host)
-        self._top_pid = None
-
-
-    def __enter__(self):
-        return self
-
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        if self._top_pid is not None:
-            self._client_host.run('kill %s && rm %s' %
-                    (self._top_pid, self._client_outfile), ignore_status=True)
-        return True
-
-
-    def start(self):
-        """Run top and save results to a temp file on the client."""
-        if self._top_pid is not None:
-            logging.debug("Tried to start monitoring before stopping. "
-                    "Ignoring request.")
-            return
-
-        # Decide where to write top's output to (on the client).
-        random_suffix = random.random()
-        self._client_outfile = '/tmp/topcap-%r' % random_suffix
-
-        # Run top on the client.
-        top_command = '%s -b -d%d > %s' % (self._command_top,
-                self._config._monitor_period, self._client_outfile)
-        logging.info('Running top.')
-        self._top_pid = self._client_host.run_background(top_command)
-        logging.info('Top running with pid %s', self._top_pid)
-
-
-    def stop(self):
-        """Stop running top and return the results.
-
-        @return ResourceMonitorRawResult object
-
-        """
-        logging.debug("Stopping monitor")
-        if self._top_pid is None:
-            logging.debug("Tried to stop monitoring before starting. "
-                    "Ignoring request.")
-            return
-
-        # Stop top on the client.
-        self._client_host.run('kill %s' % self._top_pid, ignore_status=True)
-
-        # Get the top output file from the client onto the server.
-        if self._config._server_outfile is None:
-            self._config._server_outfile = self._client_outfile
-        self._client_host.get_file(
-                self._client_outfile, self._config._server_outfile)
-
-        # Delete the top output file from client.
-        self._client_host.run('rm %s' % self._client_outfile,
-                ignore_status=True)
-
-        self._top_pid = None
-        logging.info("Saved resource monitor results at %s",
-                self._config._server_outfile)
-        return ResourceMonitorRawResult(self._config._server_outfile)
diff --git a/server/cros/resource_monitor_unittest.py b/server/cros/resource_monitor_unittest.py
deleted file mode 100644
index 4196885..0000000
--- a/server/cros/resource_monitor_unittest.py
+++ /dev/null
@@ -1,305 +0,0 @@
-# Lint as: python2, python3
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import logging
-import unittest
-import re
-import csv
-import common
-import os
-
-
-from autotest_lib.server.cros import resource_monitor
-from autotest_lib.server.hosts import abstract_ssh
-from autotest_lib.server import utils
-import six
-from six.moves import map
-from six.moves import range
-
-class HostMock(abstract_ssh.AbstractSSHHost):
-    """Mocks a host object."""
-
-    TOP_PID = '1234'
-
-    def _initialize(self, test_env):
-        self.top_is_running = False
-
-        # Keep track of whether the top raw output file exists on the system,
-        # and if it does, where it is.
-        self.top_output_file_path = None
-
-        # Keep track of whether the raw top output file is currently being
-        # written to by top.
-        self.top_output_file_is_open = False
-        self.test_env = test_env
-
-
-    def get_file(self, src, dest):
-        pass
-
-
-    def called_unsupported_command(self, command):
-        """Raises assertion error when called.
-
-        @param command string the unsupported command called.
-
-        """
-        raise AssertionError(
-                "ResourceMonitor called unsupported command %s" % command)
-
-
-    def _process_top(self, cmd_args, cmd_line):
-        """Process top command.
-
-        @param cmd_args string_list of command line args.
-        @param cmd_line string the command to run.
-
-        """
-        self.test_env.assertFalse(self.top_is_running,
-                msg="Top must not already be running.")
-        self.test_env.assertFalse(self.top_output_file_is_open,
-                msg="The top output file should not be being written "
-                "to before top is started")
-        self.test_env.assertIsNone(self.top_output_file_path,
-                msg="The top output file should not exist"
-                "before top is started")
-        try:
-            self.redirect_index = cmd_args.index(">")
-            self.top_output_file_path = cmd_args[self.redirect_index + 1]
-        except (ValueError, IndexError):
-            self.called_unsupported_command(cmd_line)
-
-        self.top_is_running = True
-        self.top_output_file_is_open = True
-
-        return HostMock.TOP_PID
-
-
-    def _process_kill(self, cmd_args, cmd_line):
-        """Process kill command.
-
-        @param cmd_args string_list of command line args.
-        @param cmd_line string the command to run.
-
-        """
-        try:
-            if cmd_args[1].startswith('-'):
-                pid_to_kill = cmd_args[2]
-            else:
-                pid_to_kill = cmd_args[1]
-        except IndexError:
-            self.called_unsupported_command(cmd_line)
-
-        self.test_env.assertEqual(pid_to_kill, HostMock.TOP_PID,
-                msg="Wrong pid (%r) killed . Top pid is %r." % (pid_to_kill,
-                HostMock.TOP_PID))
-        self.test_env.assertTrue(self.top_is_running,
-                msg="Top must be running before we try to kill it")
-
-        self.top_is_running = False
-        self.top_output_file_is_open = False
-
-
-    def _process_rm(self, cmd_args, cmd_line):
-        """Process rm command.
-
-        @param cmd_args string list list of command line args.
-        @param cmd_line string the command to run.
-
-        """
-        try:
-            if cmd_args[1].startswith('-'):
-                file_to_rm = cmd_args[2]
-            else:
-                file_to_rm = cmd_args[1]
-        except IndexError:
-            self.called_unsupported_command(cmd_line)
-
-        self.test_env.assertEqual(file_to_rm, self.top_output_file_path,
-                msg="Tried to remove file that is not the top output file.")
-        self.test_env.assertFalse(self.top_output_file_is_open,
-                msg="Tried to remove top output file while top is still "
-                "writing to it.")
-        self.test_env.assertFalse(self.top_is_running,
-                msg="Top was still running when we tried to remove"
-                "the top output file.")
-        self.test_env.assertIsNotNone(self.top_output_file_path)
-
-        self.top_output_file_path = None
-
-
-    def _run_single_cmd(self, cmd_line, *args, **kwargs):
-        """Run a single command on host.
-
-        @param cmd_line command to run on the host.
-
-        """
-        # Make the input a little nicer.
-        cmd_line = cmd_line.strip()
-        cmd_line = re.sub(">", " > ", cmd_line)
-
-        cmd_args = re.split("\s+", cmd_line)
-        self.test_env.assertTrue(len(cmd_args) >= 1)
-        command = cmd_args[0]
-        if (command == "top"):
-            return self._process_top(cmd_args, cmd_line)
-        elif (command == "kill"):
-            return self._process_kill(cmd_args, cmd_line)
-        elif(command == "rm"):
-            return self._process_rm(cmd_args, cmd_line)
-        else:
-            logging.warning("Called unemulated command %r", cmd_line)
-            return None
-
-
-    def run(self, cmd_line, *args, **kwargs):
-        """Run command(s) on host.
-
-        @param cmd_line command to run on the host.
-        @return CmdResult object.
-
-        """
-        cmds = re.split("&&", cmd_line)
-        for cmd in cmds:
-            self._run_single_cmd(cmd)
-        return utils.CmdResult(exit_status=0)
-
-
-    def run_background(self, cmd_line, *args, **kwargs):
-        """Run command in background on host.
-
-        @param cmd_line command to run on the host.
-
-        """
-        return self._run_single_cmd(cmd_line, args, kwargs)
-
-
-    def is_monitoring(self):
-        """Return true iff host is currently running top and writing output
-            to a file.
-        """
-        return self.top_is_running and self.top_output_file_is_open and (
-            self.top_output_file_path is not None)
-
-
-    def monitoring_stopped(self):
-        """Return true iff host is not running top and all top output files are
-            closed.
-        """
-        return not self.is_monitoring()
-
-
-class ResourceMonitorTest(unittest.TestCase):
-    """Tests the non-trivial functionality of ResourceMonitor."""
-
-    def setUp(self):
-        self.topoutfile = '/tmp/resourcemonitorunittest-1234'
-        self.monitor_period = 1
-        self.rm_conf = resource_monitor.ResourceMonitorConfig(
-                monitor_period=self.monitor_period,
-                rawresult_output_filename=self.topoutfile)
-        self.host = HostMock(self)
-
-
-    def test_normal_operation(self):
-        """Checks that normal (i.e. no exceptions, etc.) execution works."""
-        self.assertFalse(self.host.is_monitoring())
-        with resource_monitor.ResourceMonitor(self.host, self.rm_conf) as rm:
-            self.assertFalse(self.host.is_monitoring())
-            for i in range(3):
-                rm.start()
-                self.assertTrue(self.host.is_monitoring())
-                rm.stop()
-                self.assertTrue(self.host.monitoring_stopped())
-        self.assertTrue(self.host.monitoring_stopped())
-
-
-    def test_forgot_to_stop_monitor(self):
-        """Checks that resource monitor is cleaned up even if user forgets to
-            explicitly stop it.
-        """
-        self.assertFalse(self.host.is_monitoring())
-        with resource_monitor.ResourceMonitor(self.host, self.rm_conf) as rm:
-            self.assertFalse(self.host.is_monitoring())
-            rm.start()
-            self.assertTrue(self.host.is_monitoring())
-        self.assertTrue(self.host.monitoring_stopped())
-
-
-    def test_unexpected_interruption_while_monitoring(self):
-        """Checks that monitor is cleaned up upon unexpected interrupt."""
-        self.assertFalse(self.host.is_monitoring())
-
-        with resource_monitor.ResourceMonitor(self.host, self.rm_conf) as rm:
-            self.assertFalse(self.host.is_monitoring())
-            rm.start()
-            self.assertTrue(self.host.is_monitoring())
-            raise KeyboardInterrupt
-
-        self.assertTrue(self.host.monitoring_stopped())
-
-
-class ResourceMonitorResultTest(unittest.TestCase):
-    """Functional tests for ResourceMonitorParsedResult."""
-
-    def setUp(self):
-        self._res_dir = os.path.join(
-                            os.path.dirname(os.path.realpath(__file__)),
-                            'res_resource_monitor')
-
-
-    def run_with_test_data(self, testdata_file, testans_file):
-        """Parses testdata_file with the parses, and checks that results
-            are the same as those in testans_file.
-
-        @param testdata_file string filename containing top output to test.
-        @param testans_file string filename containing answers to the test.
-
-        """
-        parsed_results = resource_monitor.ResourceMonitorParsedResult(
-                testdata_file)
-        with open(testans_file, "r") as testans:
-            csvreader = csv.reader(testans)
-            columns = next(csvreader)
-            self.assertEqual(list(columns),
-                    resource_monitor.ResourceMonitorParsedResult._columns)
-            utils_over_time = []
-            for util_val in map(
-                    resource_monitor.
-                            ResourceMonitorParsedResult.UtilValues._make,
-                    csvreader):
-                utils_over_time.append(util_val)
-            self.assertEqual(utils_over_time, parsed_results._utils_over_time)
-
-
-    def test_full_data(self):
-        """General test with many possible changes to input."""
-        self.run_with_test_data(
-                os.path.join(self._res_dir, 'top_test_data.txt'),
-                os.path.join(self._res_dir, 'top_test_data_ans.csv'))
-
-
-    def test_whitespace_ridden(self):
-        """Tests resilience to arbitrary whitespace characters between fields"""
-        self.run_with_test_data(
-                os.path.join(self._res_dir, 'top_whitespace_ridden.txt'),
-                os.path.join(self._res_dir, 'top_whitespace_ridden_ans.csv'))
-
-
-    def test_field_order_changed(self):
-        """Tests resilience to changes in the order of fields
-            (for e.g, if the Mem free/used fields change orders in the input).
-        """
-        self.run_with_test_data(
-                os.path.join(self._res_dir, 'top_field_order_changed.txt'),
-                os.path.join(self._res_dir, 'top_field_order_changed_ans.csv'))
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/cros/telemetry_runner.py b/server/cros/telemetry_runner.py
deleted file mode 100644
index b8d4e0e..0000000
--- a/server/cros/telemetry_runner.py
+++ /dev/null
@@ -1,706 +0,0 @@
-# Lint as: python2, python3
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import abc
-import json
-import logging
-import numbers
-import os
-import tempfile
-import six
-
-import numpy
-
-import common
-from autotest_lib.client.common_lib import error, utils
-from autotest_lib.server.cros import telemetry_setup
-
-TELEMETRY_RUN_BENCHMARKS_SCRIPT = 'tools/perf/run_benchmark'
-TELEMETRY_RUN_TESTS_SCRIPT = 'tools/telemetry/run_tests'
-TELEMETRY_RUN_GPU_TESTS_SCRIPT = 'content/test/gpu/run_gpu_integration_test.py'
-TELEMETRY_TIMEOUT_MINS = 150
-
-DUT_CHROME_ROOT = '/usr/local/telemetry/src'
-
-CHART_JSON_RESULT = 'results-chart.json'
-HISTOGRAM_SET_RESULT = 'histograms.json'
-PROFILE_ARTIFACTS = 'artifacts'
-
-# Result Statuses
-SUCCESS_STATUS = 'SUCCESS'
-WARNING_STATUS = 'WARNING'
-FAILED_STATUS = 'FAILED'
-
-# A list of telemetry tests that cannot run on dut.
-ON_DUT_BLOCKLIST = [
-        'loading.desktop',  # crbug/882299
-        'rendering.desktop',  # crbug/882291
-]
-
-
-class TelemetryResult(object):
-    """Class to represent the results of a telemetry run.
-
-    This class represents the results of a telemetry run, whether it ran
-    successful, failed or had warnings.
-    """
-
-    def __init__(self, exit_code=0, stdout='', stderr=''):
-        """Initializes this TelemetryResultObject instance.
-
-        @param status: Status of the telemtry run.
-        @param stdout: Stdout of the telemetry run.
-        @param stderr: Stderr of the telemetry run.
-        """
-        if exit_code == 0:
-            self.status = SUCCESS_STATUS
-        else:
-            self.status = FAILED_STATUS
-
-        self._stdout = stdout
-        self._stderr = stderr
-        self.output = '\n'.join([stdout, stderr])
-
-
-class TelemetryRunnerFactory(object):
-    """A factory class to determine TelemetryRunner subclass to be used.
-
-    The TelemetryRunner class, today, has various ways to execute the telemetry
-    test. The test can be executed locally (using a tool like test_that) or can
-    be executed in the Lab environment - for this usecase, either the drone OR
-    the devserver can be used.
-
-    A Factory class offloads this determination overhead from the clients. Users
-    of the TelemetryRunner class are highly encouraged to go through this
-    Factory class while determining the correct TelemetryRunner subclass.
-    """
-
-    def get_runner(self, host, local=False, telemetry_on_dut=True):
-        """Method to determine which TelemetryRunner subclass to use."""
-        if local:
-            return LocalTelemetryRunner(host, telemetry_on_dut)
-        else:
-            return DroneTelemetryRunner(host, telemetry_on_dut)
-
-
-class TelemetryRunner(six.with_metaclass(abc.ABCMeta, object)):
-    """Class responsible for telemetry for a given build.
-
-    This class will extract and install telemetry environment and is
-    responsible for executing the telemetry benchmarks and returning their
-    output to the caller.
-    """
-
-    def __init__(self, host, telemetry_on_dut=True):
-        """Initializes this telemetry runner instance.
-
-        If telemetry is not installed for this build, it will be.
-
-        @param host: Host where the test will be run.
-        @param telemetry_on_dut: If set, telemetry itself (the test harness)
-                                 will run on dut.
-                                 It decides browser=[system|cros-chrome]
-        """
-        self._host = host
-        self._telemetry_path = None
-        self._perf_value_writer = None
-        self._setup_telemetry()
-        self._telemetry_on_dut = telemetry_on_dut
-        self._benchmark_deps = None
-        logging.debug('Telemetry Path: %s', self._telemetry_path)
-
-    def __enter__(self):
-        """Called while entering context manager; does nothing."""
-        return self
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        """Called while exiting context manager."""
-
-    @abc.abstractmethod
-    def _setup_telemetry(self):
-        """Set up telemetry environment."""
-
-    def _get_telemetry_cmd(self, script, test_or_benchmark, output_format,
-                           *args, **kwargs):
-        """Build command to execute telemetry based on script and benchmark.
-
-        @param script: Telemetry script we want to run. For example:
-                       [path_to_telemetry_src]/src/tools/telemetry/run_tests.
-        @param test_or_benchmark: Name of the test or benchmark we want to run,
-                                  with the page_set (if required) as part of
-                                  the string.
-        @param output_format: Format of the json result file: histogram or
-                              chart-json.
-        @param args: additional list of arguments to pass to the script.
-        @param kwargs: additional list of keyword arguments to pass to the
-                       script.
-
-        @returns Full telemetry command to execute the script.
-        """
-        telemetry_cmd = []
-        no_verbose = kwargs.get('no_verbose', False)
-
-        output_dir = (DUT_CHROME_ROOT
-                      if self._telemetry_on_dut else self._telemetry_path)
-        # Create a temp directory to hold single test run.
-        if self._perf_value_writer:
-            output_dir = os.path.join(
-                    output_dir, self._perf_value_writer.tmpdir.strip('/'))
-
-        if self._telemetry_on_dut:
-            telemetry_cmd.extend([
-                    self._host.ssh_command(
-                            alive_interval=900, connection_attempts=4),
-                    'python2',
-                    script,
-                    '--output-format=%s' % output_format,
-                    '--output-dir=%s' % output_dir,
-                    '--browser=system',
-            ])
-        else:
-            telemetry_cmd.extend([
-                    'python2',
-                    script,
-                    '--browser=cros-chrome',
-                    '--output-format=%s' % output_format,
-                    '--output-dir=%s' % output_dir,
-                    '--remote=%s' % self._host.hostname,
-            ])
-            if self._host.host_port != self._host.hostname:
-                # If the user specify a different port for the DUT, we should
-                # use different telemetry argument to set it up.
-                #
-                # e.g. When user is running experiments with ssh port
-                # forwarding, they specify remote as 127.0.0.1:2222. Now
-                # host_port is 127.0.0.1:2222 and hostname is 127.0.0.1
-                # port is 2222
-                telemetry_cmd.append('--remote-ssh-port=%s' % self._host.port)
-
-        if not no_verbose:
-            telemetry_cmd.append('--verbose')
-        telemetry_cmd.extend(args)
-        telemetry_cmd.append(test_or_benchmark)
-
-        return ' '.join(telemetry_cmd)
-
-    def _scp_telemetry_results_cmd(self, perf_results_dir, output_format,
-                                   artifacts):
-        """Build command to copy the telemetry results from the work directory.
-
-        @param perf_results_dir: directory path where test output is to be
-                                 collected.
-        @param output_format: Format of the json result file: histogram or
-                              chart-json.
-        @param artifacts: Whether we want to copy artifacts directory.
-
-        @returns SCP command to copy the results json to the specified
-                 directory.
-        """
-        if not perf_results_dir:
-            return ''
-
-        output_filename = CHART_JSON_RESULT
-        if output_format == 'histograms':
-            output_filename = HISTOGRAM_SET_RESULT
-        scp_cmd = []
-        if self._telemetry_on_dut:
-            scp_cmd.extend(['scp', '-r'])
-            scp_cmd.append(
-                    self._host.make_ssh_options(
-                            alive_interval=900, connection_attempts=4))
-            if not self._host.is_default_port:
-                scp_cmd.append('-P %d' % self._host.port)
-            src = 'root@%s:%s' % (self._host.hostname, DUT_CHROME_ROOT)
-        else:
-            # Use rsync --remove-source-file to move rather than copy from
-            # work dir. This is because each run will generate certain artifacts
-            # and will not be removed after, making result size getting larger.
-            # We don't do this for results on DUT because 1) rsync doesn't work
-            # 2) DUT will be reflashed frequently and no need to worry about
-            # result size.
-            scp_cmd.extend(['rsync', '-avz', '--remove-source-files'])
-            src = self._telemetry_path
-
-        if self._perf_value_writer:
-            src = os.path.join(src, self._perf_value_writer.tmpdir.strip('/'))
-
-        scp_cmd.append(os.path.join(src, output_filename))
-
-        # Copy artifacts back to result directory if needed.
-        if artifacts:
-            scp_cmd.append(os.path.join(src, PROFILE_ARTIFACTS))
-
-        scp_cmd.append(perf_results_dir)
-        return ' '.join(scp_cmd)
-
-    def _run_cmd(self, cmd):
-        """Execute an command in a external shell and capture the output.
-
-        @param cmd: String of is a valid shell command.
-
-        @returns The standard out, standard error and the integer exit code of
-                 the executed command.
-        """
-        logging.debug('Running: %s', cmd)
-
-        output = six.StringIO()
-        error_output = six.StringIO()
-        exit_code = 0
-        try:
-            result = utils.run(
-                    cmd,
-                    stdout_tee=output,
-                    stderr_tee=error_output,
-                    timeout=TELEMETRY_TIMEOUT_MINS * 60)
-            exit_code = result.exit_status
-        except error.CmdError as e:
-            logging.debug('Error occurred executing.')
-            exit_code = e.result_obj.exit_status
-
-        stdout = output.getvalue()
-        stderr = error_output.getvalue()
-        logging.debug('Completed with exit code: %d.\nstdout:%s\n'
-                      'stderr:%s', exit_code, stdout, stderr)
-        return stdout, stderr, exit_code
-
-    def _run_telemetry(self, script, test_or_benchmark, output_format, *args,
-                       **kwargs):
-        """Runs telemetry on a dut.
-
-        @param script: Telemetry script we want to run. For example:
-                       [path_to_telemetry_src]/src/tools/telemetry/run_tests.
-        @param test_or_benchmark: Name of the test or benchmark we want to run,
-                                 with the page_set (if required) as part of the
-                                 string.
-        @param args: additional list of arguments to pass to the script.
-        @param kwargs: additional list of keyword arguments to pass to the
-                       script.
-
-        @returns A TelemetryResult Instance with the results of this telemetry
-                 execution.
-        """
-        # TODO (sbasi crbug.com/239933) add support for incognito mode.
-
-        telemetry_cmd = self._get_telemetry_cmd(script, test_or_benchmark,
-                                                output_format, *args, **kwargs)
-        logging.info('Running Telemetry: %s', telemetry_cmd)
-
-        stdout, stderr, exit_code = self._run_cmd(telemetry_cmd)
-
-        return TelemetryResult(
-                exit_code=exit_code, stdout=stdout, stderr=stderr)
-
-    def _run_scp(self, perf_results_dir, output_format, artifacts=False):
-        """Runs telemetry on a dut.
-
-        @param perf_results_dir: The local directory that results are being
-                                 collected.
-        @param output_format: Format of the json result file.
-        @param artifacts: Whether we want to copy artifacts directory.
-        """
-        scp_cmd = self._scp_telemetry_results_cmd(perf_results_dir,
-                                                  output_format, artifacts)
-        logging.debug('Retrieving Results: %s', scp_cmd)
-        _, _, exit_code = self._run_cmd(scp_cmd)
-        if exit_code != 0:
-            raise error.TestFail('Unable to retrieve results.')
-
-        if output_format == 'histograms':
-            # Converts to chart json format.
-            input_filename = os.path.join(perf_results_dir,
-                                          HISTOGRAM_SET_RESULT)
-            output_filename = os.path.join(perf_results_dir, CHART_JSON_RESULT)
-            histograms = json.loads(open(input_filename).read())
-            chartjson = TelemetryRunner.convert_chart_json(histograms)
-            with open(output_filename, 'w') as fout:
-                fout.write(json.dumps(chartjson, indent=2))
-
-    def _run_test(self, script, test, *args):
-        """Runs a telemetry test on a dut.
-
-        @param script: Which telemetry test script we want to run. Can be
-                       telemetry's base test script or the Chrome OS specific
-                       test script.
-        @param test: Telemetry test we want to run.
-        @param args: additional list of arguments to pass to the script.
-
-        @returns A TelemetryResult Instance with the results of this telemetry
-                 execution.
-        """
-        logging.debug('Running telemetry test: %s', test)
-        telemetry_script = os.path.join(self._telemetry_path, script)
-        result = self._run_telemetry(telemetry_script, test, 'chartjson',
-                                     *args)
-        if result.status is FAILED_STATUS:
-            raise error.TestFail('Telemetry test %s failed.' % test)
-        return result
-
-    def run_telemetry_test(self, test, *args):
-        """Runs a telemetry test on a dut.
-
-        @param test: Telemetry test we want to run.
-        @param args: additional list of arguments to pass to the telemetry
-                     execution script.
-
-        @returns A TelemetryResult Instance with the results of this telemetry
-                 execution.
-        """
-        return self._run_test(TELEMETRY_RUN_TESTS_SCRIPT, test, *args)
-
-    def run_telemetry_benchmark(self,
-                                benchmark,
-                                perf_value_writer=None,
-                                *args,
-                                **kwargs):
-        """Runs a telemetry benchmark on a dut.
-
-        @param benchmark: Benchmark we want to run.
-        @param perf_value_writer: Should be an instance with the function
-                                  output_perf_value(), if None, no perf value
-                                  will be written. Typically this will be the
-                                  job object from an autotest test.
-        @param args: additional list of arguments to pass to the telemetry
-                     execution script.
-        @param kwargs: additional list of keyword arguments to pass to the
-                       telemetry execution script.
-
-        @returns A TelemetryResult Instance with the results of this telemetry
-                 execution.
-        """
-        logging.debug('Running telemetry benchmark: %s', benchmark)
-
-        self._perf_value_writer = perf_value_writer
-
-        if benchmark in ON_DUT_BLOCKLIST:
-            self._telemetry_on_dut = False
-
-        output_format = kwargs.get('ex_output_format', '')
-
-        if not output_format:
-            output_format = 'histograms'
-
-        if self._telemetry_on_dut:
-            telemetry_script = os.path.join(DUT_CHROME_ROOT,
-                                            TELEMETRY_RUN_BENCHMARKS_SCRIPT)
-            self._ensure_deps(self._host, benchmark)
-        else:
-            telemetry_script = os.path.join(self._telemetry_path,
-                                            TELEMETRY_RUN_BENCHMARKS_SCRIPT)
-
-        result = self._run_telemetry(telemetry_script, benchmark,
-                                     output_format, *args, **kwargs)
-
-        if result.status is WARNING_STATUS:
-            raise error.TestWarn('Telemetry Benchmark: %s'
-                                 ' exited with Warnings.\nOutput:\n%s\n' %
-                                 (benchmark, result.output))
-        elif result.status is FAILED_STATUS:
-            raise error.TestFail('Telemetry Benchmark: %s'
-                                 ' failed to run.\nOutput:\n%s\n' %
-                                 (benchmark, result.output))
-        elif '[  PASSED  ] 0 tests.' in result.output:
-            raise error.TestWarn('Telemetry Benchmark: %s exited successfully,'
-                                 ' but no test actually passed.\nOutput\n%s\n'
-                                 % (benchmark, result.output))
-        if perf_value_writer:
-            artifacts = kwargs.get('artifacts', False)
-            self._run_scp(perf_value_writer.resultsdir, output_format,
-                          artifacts)
-        return result
-
-    def run_gpu_integration_test(self, test, *args):
-        """Runs a gpu test on a dut.
-
-        @param test: Gpu test we want to run.
-        @param args: additional list of arguments to pass to the telemetry
-                     execution script.
-
-        @returns A TelemetryResult instance with the results of this telemetry
-                 execution.
-        """
-        script = os.path.join(DUT_CHROME_ROOT, TELEMETRY_RUN_GPU_TESTS_SCRIPT)
-        cmd = [
-                self._host.ssh_command(alive_interval=900,
-                                       connection_attempts=4), 'python2',
-                script
-        ]
-        cmd.extend(args)
-        cmd.append(test)
-        cmd = ' '.join(cmd)
-        stdout, stderr, exit_code = self._run_cmd(cmd)
-
-        if exit_code:
-            raise error.TestFail('Gpu Integration Test: %s'
-                                 ' failed to run.' % test)
-
-        return TelemetryResult(
-                exit_code=exit_code, stdout=stdout, stderr=stderr)
-
-    def _ensure_deps(self, dut, test_name):
-        """
-        Ensure the dependencies are locally available on DUT.
-
-        @param dut: The autotest host object representing DUT.
-        @param test_name: Name of the telemetry test.
-        """
-        # Get DEPs using host's telemetry.
-        # Example output, fetch_benchmark_deps.py --output-deps=deps octane:
-        # {'octane': ['tools/perf/page_sets/data/octane_002.wprgo']}
-        fetch_path = os.path.join(self._telemetry_path, 'tools', 'perf',
-                                  'fetch_benchmark_deps.py')
-        # Use a temporary file for |deps_path| to avoid race conditions. The
-        # created temporary file is assigned to |self._benchmark_deps| to make
-        # it valid until |self| is destroyed.
-        self._benchmark_deps = tempfile.NamedTemporaryFile(
-                prefix='fetch_benchmark_deps_result.', suffix='.json')
-        deps_path = self._benchmark_deps.name
-        format_fetch = ('python2 %s --output-deps=%s %s')
-        command_fetch = format_fetch % (fetch_path, deps_path, test_name)
-        command_get = 'cat %s' % deps_path
-
-        logging.info('Getting DEPs: %s', command_fetch)
-        _, _, exit_code = self._run_cmd(command_fetch)
-        if exit_code != 0:
-            raise error.TestFail('Error occurred while fetching DEPs.')
-        stdout, _, exit_code = self._run_cmd(command_get)
-        if exit_code != 0:
-            raise error.TestFail('Error occurred while getting DEPs.')
-
-        # Download DEPs to DUT.
-        # send_file() relies on rsync over ssh. Couldn't be better.
-        deps = json.loads(stdout)
-        for dep in deps[test_name]:
-            src = os.path.join(self._telemetry_path, dep)
-            dst = os.path.join(DUT_CHROME_ROOT, dep)
-            if not os.path.isfile(src):
-                raise error.TestFail('Error occurred while saving DEPs.')
-            logging.info('Copying: %s -> %s', src, dst)
-            dut.send_file(src, dst)
-
-    @staticmethod
-    def convert_chart_json(histogram_set):
-        """
-        Convert from histogram set to chart json format.
-
-        @param histogram_set: result in histogram set format.
-
-        @returns result in chart json format.
-        """
-        value_map = {}
-
-        # Gets generic set values.
-        for obj in histogram_set:
-            if 'type' in obj and obj['type'] == 'GenericSet':
-                value_map[obj['guid']] = obj['values']
-
-        charts = {}
-        benchmark_name = ''
-        benchmark_desc = ''
-
-        # Checks the unit test for how this conversion works.
-        for obj in histogram_set:
-            if 'name' not in obj or 'sampleValues' not in obj:
-                continue
-            metric_name = obj['name']
-            diagnostics = obj['diagnostics']
-            if 'stories' in diagnostics:
-                story_name = value_map[diagnostics['stories']][0]
-            else:
-                story_name = 'default'
-            local_benchmark_name = value_map[diagnostics['benchmarks']][0]
-            if benchmark_name == '':
-                benchmark_name = local_benchmark_name
-                if 'benchmarkDescriptions' in diagnostics:
-                    benchmark_desc = value_map[
-                            diagnostics['benchmarkDescriptions']][0]
-            if benchmark_name != local_benchmark_name:
-                logging.warning(
-                        'There are more than 1 benchmark names in the'
-                        'result. old: %s, new: %s', benchmark_name,
-                        local_benchmark_name)
-                continue
-
-            unit = obj['unit']
-            smaller_postfixes = ('_smallerIsBetter', '-')
-            bigger_postfixes = ('_biggerIsBetter', '+')
-            all_postfixes = smaller_postfixes + bigger_postfixes
-
-            improvement = 'up'
-            for postfix in smaller_postfixes:
-                if unit.endswith(postfix):
-                    improvement = 'down'
-            for postfix in all_postfixes:
-                if unit.endswith(postfix):
-                    unit = unit[:-len(postfix)]
-                    break
-
-            if unit == 'unitless':
-                unit = 'score'
-
-            values = [
-                    x for x in obj['sampleValues']
-                    if isinstance(x, numbers.Number)
-            ]
-            if metric_name not in charts:
-                charts[metric_name] = {}
-            charts[metric_name][story_name] = {
-                    'improvement_direction': improvement,
-                    'name': metric_name,
-                    'std': numpy.std(values),
-                    'type': 'list_of_scalar_values',
-                    'units': unit,
-                    'values': values
-            }
-
-        # Adds summaries.
-        for metric_name in charts:
-            values = []
-            metric_content = charts[metric_name]
-            for story_name in metric_content:
-                story_content = metric_content[story_name]
-                values += story_content['values']
-                metric_type = story_content['type']
-                units = story_content['units']
-                improvement = story_content['improvement_direction']
-            values.sort()
-            std = numpy.std(values)
-            metric_content['summary'] = {
-                    'improvement_direction': improvement,
-                    'name': metric_name,
-                    'std': std,
-                    'type': metric_type,
-                    'units': units,
-                    'values': values
-            }
-
-        benchmark_metadata = {
-                'description': benchmark_desc,
-                'name': benchmark_name,
-                'type': 'telemetry_benchmark'
-        }
-        return {
-                'benchmark_description': benchmark_desc,
-                'benchmark_metadata': benchmark_metadata,
-                'benchmark_name': benchmark_name,
-                'charts': charts,
-                'format_version': 1.0
-        }
-
-
-class LocalTelemetryRunner(TelemetryRunner):
-    """Specialized TelemetryRunner to handle local telemetry test runs."""
-
-    def __init__(self, *args, **kwargs):
-        """Initialize LocalTelemetryRunner.
-
-        The telemetry test will run locally. Depending on whether
-        telemetry_on_dut is True or False, there can be possible combinations
-        for the execution of this test:
-
-        telemetry_on_dut=False:
-        python2 run_benchmark --browser=cros-chrome --remote=[dut] [test]
-
-        telemetry_on_dut=True:
-        ssh [dut] python2 run_benchmark --browser=system [test]
-
-        @param args: The list of arguments to be passed. See Base class for a
-                     complete list of accepted arguments.
-        @param kwargs: Any keyword arguments to be passed. See Base class for a
-                       complete list of accepted keyword arguments.
-        """
-        super(LocalTelemetryRunner, self).__init__(*args, **kwargs)
-
-    def _setup_telemetry(self):
-        """Setup Telemetry to use local path to its sources.
-
-        First look for chrome source root, either externally mounted, or inside
-        the chroot.  Prefer chrome-src-internal source tree to chrome-src.
-        """
-        TELEMETRY_DIR = 'src'
-        CHROME_LOCAL_SRC = '/var/cache/chromeos-cache/distfiles/target/'
-        CHROME_EXTERNAL_SRC = os.path.expanduser('~/chrome_root/')
-
-        logging.debug('Setting up telemetry for local testing')
-
-        sources_list = ('chrome-src-internal', 'chrome-src')
-        dir_list = [CHROME_EXTERNAL_SRC]
-        dir_list.extend(
-                [os.path.join(CHROME_LOCAL_SRC, x) for x in sources_list])
-        if 'CHROME_ROOT' in os.environ:
-            dir_list.insert(0, os.environ['CHROME_ROOT'])
-
-        telemetry_src = ''
-        for dir in dir_list:
-            if os.path.exists(dir):
-                telemetry_src = os.path.join(dir, TELEMETRY_DIR)
-                break
-        else:
-            raise error.TestError('Telemetry source directory not found.')
-
-        self._telemetry_path = telemetry_src
-
-
-class DroneTelemetryRunner(TelemetryRunner):
-    """Handle telemetry test setup on the drone.
-
-    Users of this class are strongly advised to use this class as a context
-    manager. Since the setup for telemetry environment happens on the drone, it
-    is imperative that this setup be cleaned up once the test is done. Using
-    this class as a context manager will transfer the burden of clean up from
-    the user to Python.
-    """
-
-    def __init__(self, *args, **kwargs):
-        """Initialize DroneTelemetryRunner.
-
-        The telemetry test will run on the drone. Depending on whether
-        telemetry_on_dut is True or False, there can be possible combinations
-        for the execution of this test:
-
-        telemetry_on_dut=False:
-        python2 run_benchmark --browser=cros-chrome --remote=[dut] [test]
-
-        telemetry_on_dut=True:
-        ssh [dut] python2 run_benchmark --browser=system [test]
-
-        @param args: The list of arguments to be passed. See Base class for a
-                     complete list of accepted arguments.
-        @param kwargs: Any keyword arguments to be passed. See Base class for a
-                       complete list of accepted keyword arguments.
-        """
-        self._telemetry_setup = None
-        super(DroneTelemetryRunner, self).__init__(*args, **kwargs)
-
-    def __enter__(self):
-        """Called while entering context manager; does nothing."""
-        return self
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        """Called while exiting context manager; cleans up temp files."""
-        logging.info('Cleaning up the telemetry environment on the drone.')
-        self._telemetry_setup.Cleanup()
-
-    def _setup_telemetry(self):
-        """Setup Telemetry on the drone."""
-        logging.debug('Setting up telemetry on the drone')
-        info = self._host.host_info_store.get()
-        if not info.build:
-            logging.error('Unable to locate build label for host: %s.',
-                          self._host.host_port)
-            raise error.AutotestError('Failed to grab build for host %s.' %
-                                      self._host.host_port)
-
-        logging.debug('Setting up telemetry for build: %s', info.build)
-        try:
-            self._telemetry_setup = telemetry_setup.TelemetrySetup(
-                    hostname=self._host.hostname, build=info.build)
-            self._telemetry_path = self._telemetry_setup.Setup()
-        except telemetry_setup.TelemetrySetupError as e:
-            raise error.AutotestError('Telemetry Environment could not be '
-                                      'setup: %s.' % e)
diff --git a/server/cros/telemetry_runner_unittest.py b/server/cros/telemetry_runner_unittest.py
deleted file mode 100755
index 3014f5d..0000000
--- a/server/cros/telemetry_runner_unittest.py
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/python3
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import unittest
-
-import common
-from autotest_lib.server.cros import telemetry_runner
-
-histograms_sample = [
-    {
-        'values': [
-            'story1'
-        ],
-        'guid': '00000001-...',
-        'type': 'GenericSet'
-    },
-    {
-        'values': [
-            'story2'
-        ],
-        'guid': '00000002-...',
-        'type': 'GenericSet'
-    },
-    {
-        'values': [
-            'benchmark1'
-        ],
-        'guid': 'a0000001-...',
-        'type': 'GenericSet'
-    },
-    {
-        'values': [
-            'benchmark_desc1'
-        ],
-        'guid': 'b0000001-...',
-        'type': 'GenericSet'
-    },
-    {
-        'sampleValues': [1.0, 2.0],
-        'name': 'metric1',
-        'diagnostics': {
-            'stories': '00000001-...',
-            'benchmarks': 'a0000001-...',
-            'benchmarkDescriptions': 'b0000001-...'
-        },
-        'unit': 'ms_smallerIsBetter'
-    },
-    {
-        'sampleValues': [1.0, 2.0],
-        'name': 'metric1',
-        'diagnostics': {
-            'stories': '00000002-...',
-            'benchmarks': 'a0000001-...',
-            'benchmarkDescriptions': 'b0000001-...'
-        },
-        'unit': 'ms_smallerIsBetter'
-    }
-]
-
-chartjson_sample = {
-    'format_version': 1.0,
-    'benchmark_name': 'benchmark1',
-    'benchmark_description': 'benchmark_desc1',
-    'benchmark_metadata': {
-        'type': 'telemetry_benchmark',
-        'name': 'benchmark1',
-        'description': 'benchmark_desc1'
-    },
-    'charts': {
-        'metric1': {
-            'story1': {
-                'std': 0.5,
-                'name': 'metric1',
-                'type': 'list_of_scalar_values',
-                'values': [1.0, 2.0],
-                'units': 'ms',
-                'improvement_direction': 'down'
-            },
-            'story2': {
-                'std': 0.5,
-                'name': 'metric1',
-                'type': 'list_of_scalar_values',
-                'values': [1.0, 2.0],
-                'units': 'ms',
-                'improvement_direction': 'down'
-            },
-            'summary': {
-                'std': 0.5,
-                'name': 'metric1',
-                'type': 'list_of_scalar_values',
-                'values': [1.0, 1.0, 2.0, 2.0],
-                'units': 'ms',
-                'improvement_direction': 'down'
-            }
-        },
-    }
-}
-
-class TelemetryRunnerTestCase(unittest.TestCase):
-    """Test telemetry runner module."""
-
-    def test_convert_chart_json(self):
-        # Deep comparison of 2 objects with json dumps.
-        converted = telemetry_runner.TelemetryRunner.convert_chart_json(
-            histograms_sample)
-        chartjson_dumps = json.dumps(chartjson_sample, sort_keys=True)
-        chartjson_dumps2 = json.dumps(converted, sort_keys=True)
-        self.assertEqual(chartjson_dumps, chartjson_dumps2)
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/server/cros/telemetry_setup.py b/server/cros/telemetry_setup.py
deleted file mode 100644
index 289c2d5..0000000
--- a/server/cros/telemetry_setup.py
+++ /dev/null
@@ -1,247 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2021 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""A class that sets up the environment for telemetry testing."""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-from autotest_lib.client.common_lib.cros import dev_server
-
-import contextlib
-import errno
-import fcntl
-import logging
-import os
-import shutil
-import subprocess
-import tempfile
-
-import requests
-
-_READ_BUFFER_SIZE_BYTES = 1024 * 1024  # 1 MB
-
-
-@contextlib.contextmanager
-def lock_dir(dir_name):
-    """Lock a directory exclusively by placing a file lock in it.
-
-    Args:
-      dir_name: the directory name to be locked.
-    """
-    lock_file = os.path.join(dir_name, '.lock')
-    with open(lock_file, 'w+') as f:
-        fcntl.flock(f, fcntl.LOCK_EX)
-        try:
-            yield
-        finally:
-            fcntl.flock(f, fcntl.LOCK_UN)
-
-
-class TelemetrySetupError(Exception):
-    """Exception class used by this module."""
-    pass
-
-
-class TelemetrySetup(object):
-    """Class that sets up the environment for telemetry testing."""
-
-    # Relevant directory paths.
-    _BASE_DIR_PATH = '/tmp/telemetry-workdir'
-    _PARTIAL_DEPENDENCY_DIR_PATH = 'autotest/packages'
-
-    # Relevant directory names.
-    _TELEMETRY_SRC_DIR_NAME = 'telemetry_src'
-    _TEST_SRC_DIR_NAME = 'test_src'
-    _SRC_DIR_NAME = 'src'
-
-    # Names of the telemetry dependency tarballs.
-    _DEPENDENCIES = [
-            'dep-telemetry_dep.tar.bz2',
-            'dep-page_cycler_dep.tar.bz2',
-            'dep-chrome_test.tar.bz2',
-            'dep-perf_data_dep.tar.bz2',
-    ]
-
-    # Partial devserver URLs.
-    _STATIC_URL_TEMPLATE = '%s/static/%s/autotest/packages/%s'
-
-    def __init__(self, hostname, build):
-        """Initializes the TelemetrySetup class.
-
-        Args:
-        hostname: The host for which telemetry environment should be setup. This
-            is important for devserver resolution.
-        build: The build for which telemetry environment should be setup. It is
-            typically in the format <board>/<version>.
-        """
-        self._build = build
-        self._ds = dev_server.ImageServer.resolve(self._build,
-                                                  hostname=hostname)
-        self._setup_dir_path = tempfile.mkdtemp(prefix='telemetry-setupdir_')
-        self._tmp_build_dir = os.path.join(self._BASE_DIR_PATH, self._build)
-        self._tlm_src_dir_path = os.path.join(self._tmp_build_dir,
-                                              self._TELEMETRY_SRC_DIR_NAME)
-
-    def Setup(self):
-        """Sets up the environment for telemetry testing.
-
-        This method downloads the telemetry dependency tarballs and extracts
-        them into a 'src' directory.
-
-        Returns:
-        Path to the src directory where the telemetry dependencies have been
-            downloaded and extracted.
-        """
-        src_folder = os.path.join(self._tlm_src_dir_path, self._SRC_DIR_NAME)
-        test_src = os.path.join(self._tlm_src_dir_path,
-                                self._TEST_SRC_DIR_NAME)
-        self._MkDirP(self._tlm_src_dir_path)
-        with lock_dir(self._tlm_src_dir_path):
-            if not os.path.exists(src_folder):
-                # Download the required dependency tarballs.
-                for dep in self._DEPENDENCIES:
-                    dep_path = self._DownloadFilesFromDevserver(
-                            dep, self._setup_dir_path)
-                    if os.path.exists(dep_path):
-                        self._ExtractTarball(dep_path, self._tlm_src_dir_path)
-
-                # By default all the tarballs extract to test_src but some parts
-                # of the telemetry code specifically hardcoded to exist inside
-                # of 'src'.
-                try:
-                    shutil.move(test_src, src_folder)
-                except shutil.Error:
-                    raise TelemetrySetupError(
-                            'Failure in telemetry setup for build %s. Appears '
-                            'that the test_src to src move failed.' %
-                            self._build)
-        return src_folder
-
-    def _DownloadFilesFromDevserver(self, filename, dest_path):
-        """Downloads the given tar.bz2 file from the devserver.
-
-        Args:
-          filename: Name of the tar.bz2 file to be downloaded.
-          dest_path: Full path to the directory where it should be downloaded.
-
-        Returns:
-            Full path to the downloaded file.
-
-        Raises:
-          TelemetrySetupError when the download cannot be completed for any
-              reason.
-        """
-        dep_path = os.path.join(dest_path, filename)
-        url = (self._STATIC_URL_TEMPLATE %
-               (self._ds.url(), self._build, filename))
-        try:
-            resp = requests.get(url)
-            resp.raise_for_status()
-            with open(dep_path, 'w') as f:
-                for content in resp.iter_content(_READ_BUFFER_SIZE_BYTES):
-                    f.write(content)
-        except Exception as e:
-            if (isinstance(e, requests.exceptions.HTTPError)
-                        and resp.status_code == 404):
-                logging.error(
-                        'The request %s returned a 404 Not Found status.'
-                        'This dependency could be new and therefore does not '
-                        'exist. Hence, squashing the exception and proceeding.',
-                        url)
-            elif isinstance(e, requests.exceptions.ConnectionError):
-                logging.warning(
-                        'The request failed because a connection to the devserver '
-                        '%s could not be established. Attempting to execute the '
-                        'request %s once by SSH-ing into the devserver.',
-                        self._ds.url(), url)
-                return self._DownloadFilesFromDevserverViaSSH(url, dep_path)
-            else:
-                raise TelemetrySetupError(
-                        'An error occurred while trying to complete  %s: %s' %
-                        (url, e))
-        return dep_path
-
-    def _DownloadFilesFromDevserverViaSSH(self, url, dep_path):
-        """Downloads the file at the URL from the devserver by SSH-ing into it.
-
-        Args:
-          url: URL of the location of the tar.bz2 file on the devserver.
-          dep_path: Full path to the file where it will be downloaded.
-
-        Returns:
-            Full path to the downloaded file.
-
-        Raises:
-          TelemetrySetupError when the download cannot be completed for any
-              reason.
-        """
-        cmd = ['ssh', self._ds.hostname, 'curl', url]
-        with open(dep_path, 'w') as f:
-            proc = subprocess.Popen(cmd, stdout=f, stderr=subprocess.PIPE)
-            _, err = proc.communicate()
-            if proc.returncode != 0:
-                raise TelemetrySetupError(
-                        'The command: %s finished with returncode %s and '
-                        'errors as following: %s. The telemetry dependency '
-                        'could not be downloaded.' %
-                        (' '.join(cmd), proc.returncode, err))
-        return dep_path
-
-    def _ExtractTarball(self, tarball_path, dest_path):
-        """Extracts the given tarball into the destination directory.
-
-        Args:
-          tarball_path: Full path to the tarball to be extracted.
-          dest_path: Full path to the directory where the tarball should be
-              extracted.
-
-        Raises:
-          TelemetrySetupError if the method is unable to extract the tarball for
-              any reason.
-        """
-        cmd = ['tar', 'xf', tarball_path, '--directory', dest_path]
-        try:
-            proc = subprocess.Popen(cmd,
-                                    stdout=subprocess.PIPE,
-                                    stderr=subprocess.PIPE)
-            proc.communicate()
-        except Exception as e:
-            shutil.rmtree(dest_path)
-            raise TelemetrySetupError(
-                    'An exception occurred while trying to untar %s into %s: %s'
-                    % (tarball_path, dest_path, str(e)))
-
-    def _MkDirP(self, path):
-        """Recursively creates the given directory.
-
-        Args:
-          path: Full path to the directory that needs to the created.
-
-        Raises:
-          TelemetrySetupError is the method is unable to create directories for
-              any reason except OSError EEXIST which indicates that the
-              directory already exists.
-        """
-        try:
-            os.makedirs(path)
-        except Exception as e:
-            if not isinstance(e, OSError) or e.errno != errno.EEXIST:
-                raise TelemetrySetupError(
-                        'Could not create directory %s due to %s.' %
-                        (path, str(e)))
-
-    def Cleanup(self):
-        """Cleans up telemetry setup and work environment."""
-        try:
-            shutil.rmtree(self._setup_dir_path)
-        except Exception as e:
-            logging.error('Something went wrong. Could not delete %s: %s',
-                          self._setup_dir_path, e)
-        try:
-            shutil.rmtree(self._tlm_src_dir_path)
-        except Exception as e:
-            logging.error('Something went wrong. Could not delete %s: %s',
-                          self._tlm_src_dir_path, e)
diff --git a/server/cros/usb_mux_controller.py b/server/cros/usb_mux_controller.py
deleted file mode 100644
index 64198f1..0000000
--- a/server/cros/usb_mux_controller.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Lint as: python2, python3
-# Copyriht (c) 2014 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-import logging
-import six
-
-# Following Beaglebone GPIO pins are used to control the 8 port multiplexer.
-MUX_EN = '20'
-MUX_S0 = '115'
-MUX_S1 = '117'
-MUX_S2 = '49'
-
-ENABLE_MUX = '1'
-DISABLE_MUX = '0'
-
-# Various commands used to control the GPIO pins at the kernel level.
-LS_GPIO_DIRECTORY = 'ls /sys/class/gpio'
-EXPORT_GPIO_PIN = 'echo %s > /sys/class/gpio/export'
-SET_GPIO_DIRECTION = 'echo high > /sys/class/gpio/gpio%s/direction'
-SET_GPIO_VALUE = 'echo %s > /sys/class/gpio/gpio%s/value'
-UNEXPORT_GPIO_PIN = 'echo %s > /sys/class/gpio/unexport'
-
-# Values passed to each GPIO pin to enable a specific port.
-# Bit sequence: MUX_S2, MUX_S1, MUX_S0
-# Example: To enable port 5, MUX_S2 will be set to 1, MUX_S1 will be set to 0
-# and MUX_S0 will be set to 1
-ports = {0:'000', 1:'001', 2:'010', 3:'011', 4:'100', 5:'101', 6:'110', 7:'111'}
-
-class USBMuxController(object):
-    """Class to control individual ports on a 8 port USB switch/hub.
-
-    This class is responsible for enabling all the GPIO pins on the beaglebone
-    needed to control the 8 port USB switch/hub. In order to use this USB mux
-    controller you need custom hardware setup which connects to the beaglebone
-    and drives the 8 port relay switch to turn the individual ports on the USB
-    hub 'on'/'off'.
-
-    TODO(harpreet) Write a USB mux hardware design document and provide a link
-    here.
-
-    """
-
-    version = 1
-
-    def __init__(self, host):
-        """Initializes this USB Mux Controller instance.
-
-        @param host: Host where the test will be run.
-
-        """
-        self.host = host
-
-    def __del__(self):
-        """Destructor of USBMuxController.
-
-        Disables all GPIO pins used that control the multiplexer.
-
-        """
-        self.mux_teardown()
-
-    def mux_setup(self):
-        """
-        Enable GPIO pins that control the multiplexer.
-
-        """
-        logging.info('Enable GPIO pins that control the multiplexer.')
-        self.enable_gpio_pins(MUX_EN)
-        self.disable_all_ports()
-        self.enable_gpio_pins(MUX_S2)
-        self.enable_gpio_pins(MUX_S1)
-        self.enable_gpio_pins(MUX_S0)
-
-    def mux_teardown(self):
-        """
-        Disable the multiplexer and unexport all GPIO pins.
-
-        """
-        logging.info('Start USB multiplexer teardown.')
-        self.disable_all_ports()
-        # unexport gpio pins
-        logging.info('Unexport all GPIO pins.')
-        self.host.servo.system(UNEXPORT_GPIO_PIN % MUX_S0)
-        self.host.servo.system(UNEXPORT_GPIO_PIN % MUX_S1)
-        self.host.servo.system(UNEXPORT_GPIO_PIN % MUX_S2)
-        self.host.servo.system(UNEXPORT_GPIO_PIN % MUX_EN)
-        logging.info('Completed USB multiplexer teardown. All USB ports should'
-                     'now be turned off.')
-
-    def enable_gpio_pins(self, pin):
-        """
-        Enables the given GPIO pin by exporting the pin and setting the
-        direction.
-
-        @param pin: GPIO pin to be enabled.
-
-        """
-        if 'gpio' + pin not in self.host.servo.system_output(LS_GPIO_DIRECTORY):
-            self.host.servo.system(EXPORT_GPIO_PIN % pin)
-            self.host.servo.system(SET_GPIO_DIRECTION % pin)
-
-    def enable_port(self, usb_port):
-        """
-        Enables the given port on the USB hub.
-
-        @param usb_port: USB port to be enabled.
-
-        """
-        port = ports[usb_port]
-        logging.info('Enable port %s.', port)
-        self.mux_setup()
-        self.disable_all_ports()
-
-        logging.info('Set GPIO pins to correct logic levels.')
-        self.host.servo.system(SET_GPIO_VALUE % (port[0], MUX_S2))
-        self.host.servo.system(SET_GPIO_VALUE % (port[1], MUX_S1))
-        self.host.servo.system(SET_GPIO_VALUE % (port[2], MUX_S0))
-
-        logging.info('Enable USB multiplexer. Appropriate port should now be'
-                     'enabled')
-        self.host.servo.system(SET_GPIO_VALUE % (ENABLE_MUX, MUX_EN))
-
-    def disable_all_ports(self):
-        """
-        Disables all USB ports that are currently enabled.
-
-        """
-        if 'gpio20' in self.host.servo.system_output(LS_GPIO_DIRECTORY):
-            logging.info('Disable USB ports.')
-            self.host.servo.system(SET_GPIO_VALUE % (DISABLE_MUX, MUX_EN))
diff --git a/server/hosts/factory.py b/server/hosts/factory.py
index 752718e..bad7614 100644
--- a/server/hosts/factory.py
+++ b/server/hosts/factory.py
@@ -256,7 +256,6 @@
 
     # create a custom host class for this machine and return an instance of it
     classes = (host_class, connectivity_class)
-    logging.info("!!!!!!!! {}".format(classes))
     custom_host_class = type("%s_host" % hostname, classes, {})
     host_instance = custom_host_class(hostname, **args)