Bug 1403131 - Run linters against mozharness scripts and configs. r?rail draft
authorSteve Armand <stevea1@mac.com>
Thu, 05 Oct 2017 21:52:57 -0400
changeset 685624 18de5fbbba440c9c57cf1ea577f86c0fe4b18ff0
parent 684481 8ccbd32831c7c7e9a253aaa35834f1d7b7244672
child 737218 0c650862591ca6bcc51a28c78930e8e33578e81e
push id86005
push userbmo:stevea1@mac.com
push dateTue, 24 Oct 2017 23:23:20 +0000
reviewersrail
bugs1403131
milestone58.0a1
Bug 1403131 - Run linters against mozharness scripts and configs. r?rail MozReview-Commit-ID: GX7w7zGem8j
testing/mozharness/mozfile/__init__.py
testing/mozharness/mozfile/mozfile.py
testing/mozharness/mozinfo/__init__.py
testing/mozharness/mozinfo/mozinfo.py
testing/mozharness/scripts/android_emulator_unittest.py
testing/mozharness/scripts/awsy_script.py
testing/mozharness/scripts/bouncer_submitter.py
testing/mozharness/scripts/configtest.py
testing/mozharness/scripts/desktop_l10n.py
testing/mozharness/scripts/desktop_partner_repacks.py
testing/mozharness/scripts/desktop_unittest.py
testing/mozharness/scripts/fx_desktop_build.py
testing/mozharness/scripts/l10n_bumper.py
testing/mozharness/scripts/marionette.py
testing/mozharness/scripts/merge_day/gecko_migration.py
testing/mozharness/scripts/mobile_l10n.py
testing/mozharness/scripts/mobile_partner_repack.py
testing/mozharness/scripts/release/antivirus.py
testing/mozharness/scripts/release/beet_mover.py
testing/mozharness/scripts/release/generate-checksums.py
testing/mozharness/scripts/release/postrelease_version_bump.py
testing/mozharness/scripts/release/publish_balrog.py
testing/mozharness/scripts/release/push-candidate-to-releases.py
testing/mozharness/scripts/release/submit-to-ct.py
testing/mozharness/scripts/release/updates.py
testing/mozharness/scripts/spidermonkey_build.py
testing/mozharness/scripts/telemetry/telemetry_client.py
testing/mozharness/scripts/web_platform_tests.py
tools/lint/flake8.yml
--- a/testing/mozharness/mozfile/__init__.py
+++ b/testing/mozharness/mozfile/__init__.py
@@ -1,5 +1,5 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-from mozfile import *
+from mozfile import *  # noqa
--- a/testing/mozharness/mozfile/mozfile.py
+++ b/testing/mozharness/mozfile/mozfile.py
@@ -24,20 +24,20 @@ import time
            'rmtree',
            'tree',
            'NamedTemporaryFile',
            'TemporaryDirectory']
 
 try:
     WindowsError
 except NameError:
-    WindowsError = None # so we can unconditionally catch it later...
+    WindowsError = None  # so we can unconditionally catch it later...
 
 
-### utilities for extracting archives
+# utilities for extracting archives
 
 def extract_tarball(src, dest):
     """extract a .tar file"""
 
     bundle = tarfile.open(src)
     namelist = bundle.getnames()
 
     for name in namelist:
@@ -49,17 +49,17 @@ def extract_tarball(src, dest):
 def extract_zip(src, dest):
     """extract a zip file"""
 
     if isinstance(src, zipfile.ZipFile):
         bundle = src
     else:
         try:
             bundle = zipfile.ZipFile(src)
-        except Exception, e:
+        except Exception:
             print "src: %s" % src
             raise
 
     namelist = bundle.namelist()
 
     for name in namelist:
         filename = os.path.realpath(os.path.join(dest, name))
         if name.endswith('/'):
@@ -113,17 +113,17 @@ def extract(src, dest=None):
         if index != -1:
             root = os.path.join(dest, name[:index])
             if root not in top_level_files:
                 top_level_files.append(root)
 
     return top_level_files
 
 
-### utilities for removal of files and directories
+# utilities for removal of files and directories
 
 def rmtree(dir):
     """Deprecated wrapper method to remove a directory tree.
 
     Ensure to update your code to use mozfile.remove() directly
 
     :param dir: directory to be removed
     """
@@ -174,42 +174,45 @@ def remove(path):
         os.chmod(path, path_stats.st_mode | stat.S_IRUSR | stat.S_IWUSR)
         _call_with_windows_retry(os.remove, path)
 
     elif os.path.isdir(path):
         # Verify the directory is read/write/execute for the current user
         os.chmod(path, path_stats.st_mode | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
         _call_with_windows_retry(shutil.rmtree, path)
 
+
 def depth(directory):
     """returns the integer depth of a directory or path relative to '/' """
 
     directory = os.path.abspath(directory)
     level = 0
     while True:
         directory, remainder = os.path.split(directory)
         level += 1
         if not remainder:
             break
     return level
 
+
 # ASCII delimeters
 ascii_delimeters = {
-    'vertical_line' : '|',
-    'item_marker'   : '+',
-    'last_child'    : '\\'
+    'vertical_line': '|',
+    'item_marker': '+',
+    'last_child': '\\'
     }
 
 # unicode delimiters
 unicode_delimeters = {
-    'vertical_line' : '│',
-    'item_marker'   : '├',
-    'last_child'    : '└'
+    'vertical_line': '│',
+    'item_marker': '├',
+    'last_child': '└'
     }
 
+
 def tree(directory,
          item_marker=unicode_delimeters['item_marker'],
          vertical_line=unicode_delimeters['vertical_line'],
          last_child=unicode_delimeters['last_child'],
          sort_key=lambda x: x.lower()):
     """
     display tree directory structure for `directory`
     """
@@ -225,18 +228,17 @@ def tree(directory,
         basename = os.path.basename(abspath)
         parent = os.path.dirname(abspath)
         level = depth(abspath) - top
 
         # sort articles of interest
         for resource in (dirnames, filenames):
             resource[:] = sorted(resource, key=sort_key)
 
-        files_end =  item_marker
-        dirpath_marker = item_marker
+        files_end = item_marker
 
         if level > len(indent):
             indent.append(vertical_line)
         indent = indent[:level]
 
         if dirnames:
             files_end = item_marker
             last[abspath] = dirnames[-1]
@@ -249,31 +251,29 @@ def tree(directory,
             indent[-1] = ' '
         elif not indent:
             dirpath_mark = ''
         else:
             dirpath_mark = item_marker
 
         # append the directory and piece of tree structure
         # if the top-level entry directory, print as passed
-        retval.append('%s%s%s'% (''.join(indent[:-1]),
-                                 dirpath_mark,
-                                 basename if retval else directory))
+        retval.append('%s%s%s' % (''.join(indent[:-1]),
+                      dirpath_mark, basename if retval else directory))
         # add the files
         if filenames:
             last_file = filenames[-1]
             retval.extend([('%s%s%s' % (''.join(indent),
-                                        files_end if filename == last_file else item_marker,
-                                        filename))
-                                        for index, filename in enumerate(filenames)])
+                          files_end if filename == last_file else item_marker, filename))
+                          for index, filename in enumerate(filenames)])
 
     return '\n'.join(retval)
 
 
-### utilities for temporary resources
+# utilities for temporary resources
 
 class NamedTemporaryFile(object):
     """
     Like tempfile.NamedTemporaryFile except it works on Windows
     in the case where you open the created file a second time.
 
     This behaves very similarly to tempfile.NamedTemporaryFile but may
     not behave exactly the same. For example, this function does not
@@ -335,38 +335,38 @@ def TemporaryDirectory():
     """
     tempdir = tempfile.mkdtemp()
     try:
         yield tempdir
     finally:
         shutil.rmtree(tempdir)
 
 
-### utilities dealing with URLs
+# utilities dealing with URLs
 
 def is_url(thing):
     """
     Return True if thing looks like a URL.
     """
 
     parsed = urlparse.urlparse(thing)
     if 'scheme' in parsed:
         return len(parsed.scheme) >= 2
     else:
         return len(parsed[0]) >= 2
 
+
 def load(resource):
     """
     open a file or URL for reading.  If the passed resource string is not a URL,
     or begins with 'file://', return a ``file``.  Otherwise, return the
     result of urllib2.urlopen()
     """
 
     # handle file URLs separately due to python stdlib limitations
     if resource.startswith('file://'):
         resource = resource[len('file://'):]
 
     if not is_url(resource):
         # if no scheme is given, it is a file path
         return file(resource)
 
     return urllib2.urlopen(resource)
-
--- a/testing/mozharness/mozinfo/__init__.py
+++ b/testing/mozharness/mozinfo/__init__.py
@@ -46,11 +46,11 @@ Module variables:
 
    * :attr:`bits`
    * :attr:`os`
    * :attr:`processor`
    * :attr:`version`
 
 """
 
-import mozinfo
-from mozinfo import *
+import mozinfo  # noqa
+
 __all__ = mozinfo.__all__
--- a/testing/mozharness/mozinfo/mozinfo.py
+++ b/testing/mozharness/mozinfo/mozinfo.py
@@ -14,29 +14,34 @@ import platform
 import re
 import sys
 
 import mozfile
 
 # keep a copy of the os module since updating globals overrides this
 _os = os
 
+
 class unknown(object):
     """marker class for unknown information"""
+
     def __nonzero__(self):
         return False
+
     def __str__(self):
         return 'UNKNOWN'
-unknown = unknown() # singleton
+
+
+unknown = unknown()  # singleton
 
 # get system information
 info = {'os': unknown,
         'processor': unknown,
         'version': unknown,
-        'bits': unknown }
+        'bits': unknown}
 (system, node, release, version, machine, processor) = platform.uname()
 (bits, linkage) = platform.architecture()
 
 # get os information and related data
 if system in ["Microsoft", "Windows"]:
     info['os'] = 'win'
     # There is a Python bug on Windows to determine platform values
     # http://bugs.python.org/issue7860
@@ -61,52 +66,53 @@ elif system in ['DragonFly', 'FreeBSD', 
     version = sys.platform
 elif system == "Darwin":
     (release, versioninfo, machine) = platform.mac_ver()
     version = "OS X %s" % release
     info['os'] = 'mac'
 elif sys.platform in ('solaris', 'sunos5'):
     info['os'] = 'unix'
     version = sys.platform
-info['version'] = version # os version
+info['version'] = version  # os version
 
 # processor type and bits
 if processor in ["i386", "i686"]:
     if bits == "32bit":
         processor = "x86"
     elif bits == "64bit":
         processor = "x86_64"
 elif processor.upper() == "AMD64":
     bits = "64bit"
     processor = "x86_64"
 elif processor == "Power Macintosh":
     processor = "ppc"
 bits = re.search('(\d+)bit', bits).group(1)
 info.update({'processor': processor,
              'bits': int(bits),
-            })
+             })
 
 # standard value of choices, for easy inspection
 choices = {'os': ['linux', 'bsd', 'win', 'mac', 'unix'],
            'bits': [32, 64],
            'processor': ['x86', 'x86_64', 'ppc']}
 
 
 def sanitize(info):
     """Do some sanitization of input values, primarily
     to handle universal Mac builds."""
     if "processor" in info and info["processor"] == "universal-x86-x86_64":
         # If we're running on OS X 10.6 or newer, assume 64-bit
-        if release[:4] >= "10.6": # Note this is a string comparison
+        if release[:4] >= "10.6":  # Note this is a string comparison
             info["processor"] = "x86_64"
             info["bits"] = 64
         else:
             info["processor"] = "x86"
             info["bits"] = 32
 
+
 # method for updating information
 def update(new_info):
     """
     Update the info.
 
     :param new_info: Either a dict containing the new info or a path/url
                      to a json file containing the new info.
     """
@@ -119,19 +125,20 @@ def update(new_info):
     info.update(new_info)
     sanitize(info)
     globals().update(info)
 
     # convenience data for os access
     for os_name in choices['os']:
         globals()['is' + os_name.title()] = info['os'] == os_name
     # unix is special
-    if isLinux or isBsd:
+    if isLinux or isBsd:  # noqa
         globals()['isUnix'] = True
 
+
 def find_and_update_from_json(*dirs):
     """
     Find a mozinfo.json file, load it, and update the info with the
     contents.
 
     :param dirs: Directories in which to look for the file. They will be
                  searched after first looking in the root of the objdir
                  if the current script is being run from a Mozilla objdir.
@@ -153,30 +160,32 @@ def find_and_update_from_json(*dirs):
         d = _os.path.abspath(d)
         json_path = _os.path.join(d, "mozinfo.json")
         if _os.path.isfile(json_path):
             update(json_path)
             return json_path
 
     return None
 
+
 update({})
 
 # exports
 __all__ = info.keys()
 __all__ += ['is' + os_name.title() for os_name in choices['os']]
 __all__ += [
     'info',
     'unknown',
     'main',
     'choices',
     'update',
     'find_and_update_from_json',
     ]
 
+
 def main(args=None):
 
     # parse the command line
     from optparse import OptionParser
     parser = OptionParser(description=__doc__)
     for key in choices:
         parser.add_option('--%s' % key, dest=key,
                           action='store_true', default=False,
@@ -194,16 +203,18 @@ def main(args=None):
 
     # print out choices if requested
     flag = False
     for key, value in options.__dict__.items():
         if value is True:
             print '%s choices: %s' % (key, ' '.join([str(choice)
                                                      for choice in choices[key]]))
             flag = True
-    if flag: return
+    if flag:
+        return
 
     # otherwise, print out all info
     for key, value in info.items():
         print '%s: %s' % (key, value)
 
+
 if __name__ == '__main__':
     main()
--- a/testing/mozharness/scripts/android_emulator_unittest.py
+++ b/testing/mozharness/scripts/android_emulator_unittest.py
@@ -7,17 +7,16 @@
 
 import copy
 import datetime
 import glob
 import os
 import re
 import sys
 import signal
-import socket
 import subprocess
 import time
 import tempfile
 
 # load modules from parent dir
 sys.path.insert(1, os.path.dirname(sys.path[0]))
 
 from mozprocess import ProcessHandler
@@ -27,44 +26,45 @@ from mozharness.base.script import BaseS
 from mozharness.base.vcs.vcsbase import VCSMixin
 from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
 from mozharness.mozilla.buildbot import TBPL_RETRY, EXIT_STATUS_DICT
 from mozharness.mozilla.mozbase import MozbaseMixin
 from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
 from mozharness.mozilla.testing.unittest import EmulatorMixin
 
 
-class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin, BaseScript, MozbaseMixin):
+class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin, BaseScript,
+                          MozbaseMixin):
     config_options = [[
         ["--test-suite"],
         {"action": "store",
          "dest": "test_suite",
          "default": None
-        }
+         }
     ], [
         ["--adb-path"],
         {"action": "store",
          "dest": "adb_path",
          "default": None,
          "help": "Path to adb",
-        }
+         }
     ], [
         ["--total-chunk"],
         {"action": "store",
          "dest": "total_chunks",
          "default": None,
          "help": "Number of total chunks",
-        }
+         }
     ], [
         ["--this-chunk"],
         {"action": "store",
          "dest": "this_chunk",
          "default": None,
          "help": "Number of this chunk",
-        }
+         }
     ]] + copy.deepcopy(testing_config_options) + \
         copy.deepcopy(blobupload_config_options)
 
     error_list = [
     ]
 
     virtualenv_requirements = [
     ]
@@ -81,25 +81,25 @@ class AndroidEmulatorTest(BlobUploadMixi
                          'read-buildbot-config',
                          'setup-avds',
                          'start-emulator',
                          'download-and-extract',
                          'create-virtualenv',
                          'verify-emulator',
                          'install',
                          'run-tests',
-                        ],
+                         ],
             default_actions=['clobber',
                              'start-emulator',
                              'download-and-extract',
                              'create-virtualenv',
                              'verify-emulator',
                              'install',
                              'run-tests',
-                            ],
+                             ],
             require_config_file=require_config_file,
             config={
                 'virtualenv_modules': self.virtualenv_modules,
                 'virtualenv_requirements': self.virtualenv_requirements,
                 'require_test_zip': True,
                 # IP address of the host as seen from the emulator
                 'remote_webserver': '10.0.2.2',
             }
@@ -173,21 +173,21 @@ class AndroidEmulatorTest(BlobUploadMixi
 
     @PreScriptAction('create-virtualenv')
     def _pre_create_virtualenv(self, action):
         dirs = self.query_abs_dirs()
         requirements = None
         if self.test_suite == 'mochitest-media':
             # mochitest-media is the only thing that needs this
             requirements = os.path.join(dirs['abs_mochitest_dir'],
-                        'websocketprocessbridge',
-                        'websocketprocessbridge_requirements.txt')
+                                        'websocketprocessbridge',
+                                        'websocketprocessbridge_requirements.txt')
         elif self.test_suite == 'marionette':
             requirements = os.path.join(dirs['abs_test_install_dir'],
-                                    'config', 'marionette_requirements.txt')
+                                        'config', 'marionette_requirements.txt')
         if requirements:
             self.register_virtualenv_module(requirements=[requirements],
                                             two_pass=True)
 
     def _launch_emulator(self):
         env = self.query_env()
 
         # Set $LD_LIBRARY_PATH to self.dirs['abs_work_dir'] so that
@@ -221,33 +221,34 @@ class AndroidEmulatorTest(BlobUploadMixi
         self.info("Created temp file %s." % tmp_file.name)
         self.info("Trying to start the emulator with this command: %s" % ' '.join(command))
         proc = subprocess.Popen(command, stdout=tmp_stdout, stderr=tmp_stdout, env=env)
         return {
             "process": proc,
             "tmp_file": tmp_file,
         }
 
-    def _retry(self, max_attempts, interval, func, description, max_time = 0):
+    def _retry(self, max_attempts, interval, func, description, max_time=0):
         '''
         Execute func until it returns True, up to max_attempts times, waiting for
         interval seconds between each attempt. description is logged on each attempt.
         If max_time is specified, no further attempts will be made once max_time
         seconds have elapsed; this provides some protection for the case where
         the run-time for func is long or highly variable.
         '''
         status = False
         attempts = 0
         if max_time > 0:
-            end_time = datetime.datetime.now() + datetime.timedelta(seconds = max_time)
+            end_time = datetime.datetime.now() + datetime.timedelta(seconds=max_time)
         else:
             end_time = None
         while attempts < max_attempts and not status:
             if (end_time is not None) and (datetime.datetime.now() > end_time):
-                self.info("Maximum retry run-time of %d seconds exceeded; remaining attempts abandoned" % max_time)
+                self.info("Maximum retry run-time of %d seconds exceeded; "
+                          "remaining attempts abandoned" % max_time)
                 break
             if attempts != 0:
                 self.info("Sleeping %d seconds" % interval)
                 time.sleep(interval)
             attempts += 1
             self.info(">> %s: Attempt #%d of %d" % (description, attempts, max_attempts))
             status = func()
         return status
@@ -285,21 +286,23 @@ class AndroidEmulatorTest(BlobUploadMixi
             return True
         return False
 
     def _verify_emulator(self):
         adb_ok = self._verify_adb()
         if not adb_ok:
             self.warning('Unable to communicate with adb')
             return False
-        adb_device_ok = self._retry(4, 30, self._verify_adb_device, "Verify emulator visible to adb")
+        adb_device_ok = self._retry(4, 30, self._verify_adb_device,
+                                    "Verify emulator visible to adb")
         if not adb_device_ok:
             self.warning('Unable to communicate with emulator via adb')
             return False
-        boot_ok = self._retry(30, 10, self._is_boot_completed, "Verify Android boot completed", max_time = 330)
+        boot_ok = self._retry(30, 10, self._is_boot_completed, "Verify Android boot completed",
+                              max_time=330)
         if not boot_ok:
             self.warning('Unable to verify Android boot completion')
             return False
         return True
 
     def _verify_emulator_and_restart_on_fail(self):
         emulator_ok = self._verify_emulator()
         if not emulator_ok:
@@ -314,30 +317,34 @@ class AndroidEmulatorTest(BlobUploadMixi
             self._restart_adbd()
             time.sleep(5)
             self.emulator_proc = self._launch_emulator()
         return emulator_ok
 
     def _install_fennec_apk(self):
         install_ok = False
         if int(self.sdk_level) >= 23:
-            cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.installer_path]
+            cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g',
+                   self.installer_path]
         else:
-            cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.installer_path]
+            cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r',
+                   self.installer_path]
         out = self._run_with_timeout(300, cmd, True)
         if 'Success' in out:
             install_ok = True
         return install_ok
 
     def _install_robocop_apk(self):
         install_ok = False
         if int(self.sdk_level) >= 23:
-            cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.robocop_path]
+            cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g',
+                   self.robocop_path]
         else:
-            cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.robocop_path]
+            cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r',
+                   self.robocop_path]
         out = self._run_with_timeout(300, cmd, True)
         if 'Success' in out:
             install_ok = True
         return install_ok
 
     def _dump_host_state(self):
         self._run_proc(['ps', '-ef'])
         self._run_proc(['netstat', '-a', '-p', '-n', '-t', '-u'])
@@ -369,17 +376,17 @@ class AndroidEmulatorTest(BlobUploadMixi
         """
         dirs = self.query_abs_dirs()
         utility = os.path.join(self.xre_path, "screentopng")
         if not os.path.exists(utility):
             self.warning("Unable to take screenshot: %s does not exist" % utility)
             return
         try:
             tmpfd, filename = tempfile.mkstemp(prefix=prefix, suffix='.png',
-                dir=dirs['abs_blob_upload_dir'])
+                                               dir=dirs['abs_blob_upload_dir'])
             os.close(tmpfd)
             self.info("Taking screenshot with %s; saving to %s" % (utility, filename))
             subprocess.call([utility, filename], env=self.query_env())
         except OSError, err:
             self.warning("Failed to take screenshot: %s" % err.strerror)
 
     def _query_package_name(self):
         if self.app_name is None:
@@ -450,28 +457,28 @@ class AndroidEmulatorTest(BlobUploadMixi
             opt = option.split('=')[0]
             # override configured chunk options with script args, if specified
             if opt == '--this-chunk' and self.this_chunk is not None:
                 continue
             if opt == '--total-chunks' and self.total_chunks is not None:
                 continue
             if '%(app)' in option:
                 # only query package name if requested
-                cmd.extend([option % {'app' : self._query_package_name()}])
+                cmd.extend([option % {'app': self._query_package_name()}])
             else:
                 cmd.extend([option % str_format_values])
 
         if self.this_chunk is not None:
             cmd.extend(['--this-chunk', self.this_chunk])
         if self.total_chunks is not None:
             cmd.extend(['--total-chunks', self.total_chunks])
 
         try_options, try_tests = self.try_args(self.test_suite)
         cmd.extend(try_options)
-        if self.config.get('verify') != True:
+        if self.config.get('verify') is not True:
             cmd.extend(self.query_tests_args(
                 self.config["suite_definitions"][self.test_suite].get("tests"),
                 None,
                 try_tests))
 
         return cmd
 
     def _get_repo_url(self, path):
@@ -489,17 +496,18 @@ class AndroidEmulatorTest(BlobUploadMixi
         elif self.buildbot_config and 'properties' in self.buildbot_config:
             # probably buildbot
             repo = 'https://hg.mozilla.org/%s' % self.buildbot_config['properties']['repo_path']
             revision = self.buildbot_config['properties']['revision']
         else:
             # something unexpected!
             repo = 'https://hg.mozilla.org/mozilla-central'
             revision = 'default'
-            self.warning('Unable to find repo/revision for manifest; using mozilla-central/default')
+            self.warning('Unable to find repo/revision for manifest; '
+                         'using mozilla-central/default')
         url = '%s/raw-file/%s/%s' % (
             repo,
             revision,
             path)
         return url
 
     def _tooltool_fetch(self, url, dir):
         c = self.config
@@ -514,17 +522,17 @@ class AndroidEmulatorTest(BlobUploadMixi
             self.fatal("Could not retrieve manifest needed to retrieve "
                        "artifacts from %s" % manifest_path)
 
         self.tooltool_fetch(manifest_path,
                             output_dir=dir,
                             cache=c.get("tooltool_cache", None))
 
     ##########################################
-    ### Actions for AndroidEmulatorTest ###
+    # Actions for AndroidEmulatorTest        #
     ##########################################
     def setup_avds(self):
         '''
         If tooltool cache mechanism is enabled, the cached version is used by
         the fetch command. If the manifest includes an "unpack" field, tooltool
         will unpack all compressed archives mentioned in the manifest.
         '''
         c = self.config
@@ -556,17 +564,18 @@ class AndroidEmulatorTest(BlobUploadMixi
             proc = ProcessHandler(cmd)
             proc.run()
             proc.wait()
 
     def start_emulator(self):
         '''
         Starts the emulator
         '''
-        if 'emulator_url' in self.config or 'emulator_manifest' in self.config or 'tools_manifest' in self.config:
+        if 'emulator_url' in self.config or 'emulator_manifest' in self.config or \
+           'tools_manifest' in self.config:
             self.install_emulator()
 
         if not os.path.isfile(self.adb_path):
             self.fatal("The adb binary '%s' is not a valid file!" % self.adb_path)
         self._restart_adbd()
 
         if not self.config.get("developer_mode"):
             # We kill compiz because it sometimes prevents us from starting the emulator
@@ -613,63 +622,65 @@ class AndroidEmulatorTest(BlobUploadMixi
             f.write(out)
 
             f.write('\n\nHost process list:\n')
             out = self._run_proc(['ps', '-ef'], quiet=True)
             f.write(out)
 
             f.write('\n\nEmulator /proc/cpuinfo:\n')
             cmd = [self.adb_path, '-s', self.emulator['device_id'],
-                    'shell', 'cat', '/proc/cpuinfo']
+                   'shell', 'cat', '/proc/cpuinfo']
             out = self._run_with_timeout(30, cmd, quiet=True)
             f.write(out)
 
             f.write('\n\nEmulator /proc/meminfo:\n')
             cmd = [self.adb_path, '-s', self.emulator['device_id'],
-                    'shell', 'cat', '/proc/meminfo']
+                   'shell', 'cat', '/proc/meminfo']
             out = self._run_with_timeout(30, cmd, quiet=True)
             f.write(out)
 
             f.write('\n\nEmulator process list:\n')
             cmd = [self.adb_path, '-s', self.emulator['device_id'],
-                    'shell', 'ps']
+                   'shell', 'ps']
             out = self._run_with_timeout(30, cmd, quiet=True)
             f.write(out)
 
     def verify_emulator(self):
         '''
         Check to see if the emulator can be contacted via adb.
         If any communication attempt fails, kill the emulator, re-launch, and re-check.
         '''
         self.mkdir_p(self.query_abs_dirs()['abs_blob_upload_dir'])
         max_restarts = 5
-        emulator_ok = self._retry(max_restarts, 10, self._verify_emulator_and_restart_on_fail, "Check emulator")
+        emulator_ok = self._retry(max_restarts, 10, self._verify_emulator_and_restart_on_fail,
+                                  "Check emulator")
         if not emulator_ok:
             self.fatal('INFRA-ERROR: Unable to start emulator after %d attempts' % max_restarts,
-                EXIT_STATUS_DICT[TBPL_RETRY])
+                       EXIT_STATUS_DICT[TBPL_RETRY])
         self._dump_perf_info()
         # Start logcat for the emulator. The adb process runs until the
         # corresponding emulator is killed. Output is written directly to
         # the blobber upload directory so that it is uploaded automatically
         # at the end of the job.
         logcat_filename = 'logcat-%s.log' % self.emulator["device_id"]
         logcat_path = os.path.join(self.abs_dirs['abs_blob_upload_dir'], logcat_filename)
-        logcat_cmd = '%s -s %s logcat -v threadtime Trace:S StrictMode:S ExchangeService:S > %s &' % \
-            (self.adb_path, self.emulator["device_id"], logcat_path)
+        logcat_cmd = '%s -s %s logcat -v threadtime Trace:S StrictMode:S '\
+            ' ExchangeService:S > %s &' % (self.adb_path, self.emulator["device_id"], logcat_path)
         self.info(logcat_cmd)
         os.system(logcat_cmd)
         # Get a post-boot emulator process list for diagnostics
         ps_cmd = [self.adb_path, '-s', self.emulator["device_id"], 'shell', 'ps']
         self._run_with_timeout(30, ps_cmd)
 
     def download_and_extract(self):
         """
         Download and extract fennec APK, tests.zip, host utils, and robocop (if required).
         """
-        super(AndroidEmulatorTest, self).download_and_extract(suite_categories=self._query_suite_categories())
+        super(AndroidEmulatorTest, self).download_and_extract(
+            suite_categories=self._query_suite_categories())
         dirs = self.query_abs_dirs()
         if self.test_suite and self.test_suite.startswith('robocop'):
             robocop_url = self.installer_url[:self.installer_url.rfind('/')] + '/robocop.apk'
             self.info("Downloading robocop...")
             self.download_file(robocop_url, 'robocop.apk', dirs['abs_work_dir'], error_level=FATAL)
         self.rmtree(dirs['abs_xre_dir'])
         self.mkdir_p(dirs['abs_xre_dir'])
         if self.config["hostutils_manifest_path"]:
@@ -682,39 +693,43 @@ class AndroidEmulatorTest(BlobUploadMixi
                 self.fatal("xre path not found in %s" % dirs['abs_xre_dir'])
         else:
             self.fatal("configure hostutils_manifest_path!")
 
     def install(self):
         """
         Install APKs on the emulator
         """
-        install_needed = (not self.test_suite) or self.config["suite_definitions"][self.test_suite].get("install")
-        if install_needed == False:
+        install_needed = (not self.test_suite) or \
+            self.config["suite_definitions"][self.test_suite].get("install")
+        if install_needed is False:
             self.info("Skipping apk installation for %s" % self.test_suite)
             return
 
         assert self.installer_path is not None, \
             "Either add installer_path to the config or use --installer-path."
 
-        self.sdk_level = self._run_with_timeout(30, [self.adb_path, '-s', self.emulator['device_id'],
-            'shell', 'getprop', 'ro.build.version.sdk'])
+        self.sdk_level = self._run_with_timeout(30, [self.adb_path, '-s',
+                                                     self.emulator['device_id'],
+                                                'shell', 'getprop', 'ro.build.version.sdk'])
 
         # Install Fennec
         install_ok = self._retry(3, 30, self._install_fennec_apk, "Install app APK")
         if not install_ok:
             self.fatal('INFRA-ERROR: Failed to install %s on %s' %
-                (self.installer_path, self.emulator["name"]), EXIT_STATUS_DICT[TBPL_RETRY])
+                       (self.installer_path, self.emulator["name"]),
+                       EXIT_STATUS_DICT[TBPL_RETRY])
 
         # Install Robocop if required
         if self.test_suite and self.test_suite.startswith('robocop'):
             install_ok = self._retry(3, 30, self._install_robocop_apk, "Install Robocop APK")
             if not install_ok:
                 self.fatal('INFRA-ERROR: Failed to install %s on %s' %
-                    (self.robocop_path, self.emulator["name"]), EXIT_STATUS_DICT[TBPL_RETRY])
+                           (self.robocop_path, self.emulator["name"]),
+                           EXIT_STATUS_DICT[TBPL_RETRY])
 
         self.info("Finished installing apps for %s" % self.emulator["name"])
 
     def _query_suites(self):
         if self.test_suite:
             return [(self.test_suite, self.test_suite)]
         # test-verification: determine test suites to be verified
         all = [('mochitest', {'plain': 'mochitest',
@@ -768,17 +783,18 @@ class AndroidEmulatorTest(BlobUploadMixi
             env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
             env['RUST_BACKTRACE'] = 'full'
 
             for verify_args in self.query_verify_args(verify_suite):
                 if (datetime.datetime.now() - self.start_time) > max_verify_time:
                     # Verification has run out of time. That is okay! Stop running
                     # tests so that a task timeout is not triggered, and so that
                     # (partial) results are made available in a timely manner.
-                    self.info("TinderboxPrint: Verification too long: Not all tests were verified.<br/>")
+                    self.info("TinderboxPrint: Verification too long: "
+                              "Not all tests were verified.<br/>")
                     # Signal verify time exceeded, to break out of suites and
                     # suite categories loops also.
                     return False
 
                 final_cmd = copy.copy(cmd)
                 if len(verify_args) > 0:
                     # in verify mode, remove any chunk arguments from command
                     for arg in final_cmd:
@@ -810,17 +826,16 @@ class AndroidEmulatorTest(BlobUploadMixi
                     self._dump_emulator_log()
                     self.buildbot_status(tbpl_status, level=log_level)
                     self.log("The %s suite: %s ran with return status: %s" %
                              (suite_category, suite, tbpl_status), level=log_level)
 
         if len(verify_args) > 0:
             self._dump_emulator_log()
 
-
     @PostScriptAction('run-tests')
     def stop_emulator(self, action, success=None):
         '''
         Report emulator health, then make sure that the emulator has been stopped
         '''
         self._verify_emulator()
         self._kill_processes(self.config["emulator_process_name"])
 
@@ -832,11 +847,12 @@ class AndroidEmulatorTest(BlobUploadMixi
         '''
         if self.config.get('blob_upload_branch'):
             # Except on interactive workers, we want the emulator to keep running
             # after the script is finished. So only kill it if blobber would otherwise
             # have run anyway (it doesn't get run on interactive workers).
             self._kill_processes(self.config["emulator_process_name"])
         super(AndroidEmulatorTest, self).upload_blobber_files()
 
+
 if __name__ == '__main__':
     emulatorTest = AndroidEmulatorTest()
     emulatorTest.run_and_exit()
--- a/testing/mozharness/scripts/awsy_script.py
+++ b/testing/mozharness/scripts/awsy_script.py
@@ -24,42 +24,43 @@ from mozharness.base.vcs.vcsbase import 
 from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
 from mozharness.mozilla.tooltool import TooltoolMixin
 from mozharness.mozilla.structuredlog import StructuredOutputParser
 from mozharness.mozilla.testing.codecoverage import (
     CodeCoverageMixin,
     code_coverage_config_options
 )
 
+
 class AWSY(TestingMixin, MercurialScript, BlobUploadMixin, TooltoolMixin, CodeCoverageMixin):
     config_options = [
         [["--e10s"],
-        {"action": "store_true",
-         "dest": "e10s",
-         "default": False,
-         "help": "Run tests with multiple processes. (Desktop builds only)",
-         }],
+         {"action": "store_true",
+          "dest": "e10s",
+          "default": False,
+          "help": "Run tests with multiple processes. (Desktop builds only)",
+          }],
         [["--enable-stylo"],
-        {"action": "store_true",
-         "dest": "enable_stylo",
-         "default": False,
-         "help": "Run tests with Stylo enabled.",
-         }],
+         {"action": "store_true",
+          "dest": "enable_stylo",
+          "default": False,
+          "help": "Run tests with Stylo enabled.",
+          }],
         [["--disable-stylo"],
-        {"action": "store_true",
-         "dest": "disable_stylo",
-         "default": False,
-         "help": "Run tests with Stylo disabled.",
-         }],
+         {"action": "store_true",
+          "dest": "disable_stylo",
+          "default": False,
+          "help": "Run tests with Stylo disabled.",
+          }],
         [["--single-stylo-traversal"],
-        {"action": "store_true",
-         "dest": "single_stylo_traversal",
-         "default": False,
-         "help": "Set STYLO_THREADS=1.",
-         }]
+         {"action": "store_true",
+          "dest": "single_stylo_traversal",
+          "default": False,
+          "help": "Set STYLO_THREADS=1.",
+          }]
     ] + testing_config_options + copy.deepcopy(blobupload_config_options) \
                                + copy.deepcopy(code_coverage_config_options)
 
     error_list = [
         {'regex': re.compile(r'''(TEST-UNEXPECTED|PROCESS-CRASH)'''), 'level': ERROR},
     ]
 
     def __init__(self, **kwargs):
@@ -116,17 +117,16 @@ class AWSY(TestingMixin, MercurialScript
                                            'marionette_requirements.txt')]
 
         for requirements_file in requirements_files:
             self.register_virtualenv_module(requirements=[requirements_file],
                                             two_pass=True)
 
         self.register_virtualenv_module('awsy', self.awsy_path)
 
-
     def populate_webroot(self):
         """Populate the production test slaves' webroots"""
         self.info("Downloading pageset with tooltool...")
         manifest_file = os.path.join(self.awsy_path, 'tp5n-pageset.manifest')
         page_load_test_dir = os.path.join(self.webroot_dir, 'page_load_test')
         if not os.path.isdir(page_load_test_dir):
             self.mkdir_p(page_load_test_dir)
         self.tooltool_fetch(
@@ -135,17 +135,16 @@ class AWSY(TestingMixin, MercurialScript
             cache=self.config.get('tooltool_cache')
         )
         archive = os.path.join(page_load_test_dir, 'tp5n.zip')
         unzip = self.query_exe('unzip')
         unzip_cmd = [unzip, '-q', '-o', archive, '-d', page_load_test_dir]
         self.run_command(unzip_cmd, halt_on_failure=True)
         self.run_command("ls %s" % page_load_test_dir)
 
-
     def run_tests(self, args=None, **kw):
         '''
         AWSY test should be implemented here
         '''
         dirs = self.abs_dirs
         env = {}
         error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
                                           'marionette_errorsummary.log')
--- a/testing/mozharness/scripts/bouncer_submitter.py
+++ b/testing/mozharness/scripts/bouncer_submitter.py
@@ -58,36 +58,40 @@ class BouncerSubmitter(BaseScript, Purge
                                 'submit',
                             ],
                             default_actions=[
                                 'clobber',
                                 'download-shipped-locales',
                                 'submit',
                             ],
                             config={
-                                 'buildbot_json_path' : 'buildprops.json'
+                                 'buildbot_json_path': 'buildprops.json'
                             }
                             )
         self.locales = None
         self.credentials = None
 
     def _pre_config_lock(self, rw_config):
         super(BouncerSubmitter, self)._pre_config_lock(rw_config)
 
-        #override properties from buildbot properties here as defined by taskcluster properties
+        # override properties from buildbot properties here as defined by taskcluster properties
         self.read_buildbot_config()
 
-        #check if release promotion is true first before overwriting these properties
+        # check if release promotion is true first before overwriting these properties
         if self.buildbot_config["properties"].get("release_promotion"):
-            for prop in ['product', 'version', 'build_number', 'revision', 'bouncer_submitter_config', ]:
+            for prop in \
+                    ['product', 'version', 'build_number', 'revision',
+                     'bouncer_submitter_config', ]:
                 if self.buildbot_config["properties"].get(prop):
-                    self.info("Overriding %s with %s" % (prop,  self.buildbot_config["properties"].get(prop)))
+                    self.info("Overriding %s with %s" %
+                              (prop,  self.buildbot_config["properties"].get(prop)))
                     self.config[prop] = self.buildbot_config["properties"].get(prop)
             if self.buildbot_config["properties"].get("partial_versions"):
-                self.config["prev_versions"] = self.buildbot_config["properties"].get("partial_versions").split(", ")
+                self.config["prev_versions"] = \
+                    self.buildbot_config["properties"].get("partial_versions").split(", ")
 
         for opt in ["version", "credentials_file", "bouncer-api-prefix"]:
             if opt not in self.config:
                 self.fatal("%s must be specified" % opt)
         if self.need_shipped_locales():
             for opt in ["shipped-locales-url", "repo", "revision"]:
                 if opt not in self.config:
                     self.fatal("%s must be specified" % opt)
--- a/testing/mozharness/scripts/configtest.py
+++ b/testing/mozharness/scripts/configtest.py
@@ -19,38 +19,39 @@ try:
     import simplejson as json
 except ImportError:
     import json
 
 sys.path.insert(1, os.path.dirname(sys.path[0]))
 
 from mozharness.base.script import BaseScript
 
+
 # ConfigTest {{{1
 class ConfigTest(BaseScript):
     config_options = [[
-     ["--test-file",],
+     ["--test-file", ],
      {"action": "extend",
       "dest": "test_files",
       "help": "Specify which config files to test"
-     }
+      }
     ]]
 
     def __init__(self, require_config_file=False):
         self.config_files = []
         BaseScript.__init__(self, config_options=self.config_options,
                             all_actions=['list-config-files',
                                          'test-json-configs',
                                          'test-python-configs',
                                          'summary',
                                          ],
                             default_actions=['test-json-configs',
                                              'test-python-configs',
                                              'summary',
-                                            ],
+                                             ],
                             require_config_file=require_config_file)
 
     def query_config_files(self):
         """This query method, much like others, caches its runtime
         settings in self.VAR so we don't have to figure out config_files
         multiple times.
         """
         if self.config_files:
@@ -123,20 +124,22 @@ class ConfigTest(BaseScript):
                     self.add_summary("%s is invalid python." % config_file,
                                      level="error")
                     self.error(pprint.pformat(sys.exc_info()[1]))
                 else:
                     if 'config' in local_dict and isinstance(local_dict['config'], dict):
                         self.info("Good.")
                         filecount[1] += 1
                     else:
-                        self.add_summary("%s is valid python, but doesn't create a config dictionary." %
+                        self.add_summary("%s is valid python, "
+                                         "but doesn't create a config dictionary." %
                                          config_file, level="error")
         if filecount[0]:
             self.add_summary("%d of %d python config files were good." %
                              (filecount[1], filecount[0]))
         else:
             self.add_summary("No python config files to test.")
 
+
 # __main__ {{{1
 if __name__ == '__main__':
     config_test = ConfigTest()
     config_test.run_and_exit()
--- a/testing/mozharness/scripts/desktop_l10n.py
+++ b/testing/mozharness/scripts/desktop_l10n.py
@@ -29,17 +29,16 @@ from mozharness.mozilla.building.buildba
 from mozharness.mozilla.l10n.locales import LocalesMixin
 from mozharness.mozilla.mar import MarMixin
 from mozharness.mozilla.mock import MockMixin
 from mozharness.mozilla.release import ReleaseMixin
 from mozharness.mozilla.signing import SigningMixin
 from mozharness.mozilla.updates.balrog import BalrogMixin
 from mozharness.mozilla.taskcluster_helper import Taskcluster
 from mozharness.base.python import VirtualenvMixin
-from mozharness.mozilla.mock import ERROR_MSGS
 
 try:
     import simplejson as json
     assert json
 except ImportError:
     import json
 
 
@@ -153,19 +152,19 @@ class DesktopSingleLocale(LocalesMixin, 
     ], [
         ['--en-us-installer-url', ],
         {"action": "store",
          "dest": "en_us_installer_url",
          "type": "string",
          "help": "Specify the url of the en-us binary"}
     ], [
         ["--disable-mock"], {
-        "dest": "disable_mock",
-        "action": "store_true",
-        "help": "do not run under mock despite what gecko-config says"}
+         "dest": "disable_mock",
+         "action": "store_true",
+         "help": "do not run under mock despite what gecko-config says"}
     ]]
 
     def __init__(self, require_config_file=True):
         # fxbuild style:
         buildscript_kwargs = {
             'all_actions': [
                 "clobber",
                 "pull",
@@ -479,17 +478,17 @@ class DesktopSingleLocale(LocalesMixin, 
         self.read_buildbot_config()
         config = self.config
         revision = None
         if config.get("revision"):
             revision = config["revision"]
         elif 'revision' in self.buildbot_properties:
             revision = self.buildbot_properties['revision']
         elif (self.buildbot_config and
-                  self.buildbot_config.get('sourcestamp', {}).get('revision')):
+              self.buildbot_config.get('sourcestamp', {}).get('revision')):
             revision = self.buildbot_config['sourcestamp']['revision']
         elif self.buildbot_config and self.buildbot_config.get('revision'):
             revision = self.buildbot_config['revision']
         elif config.get("update_gecko_source_to_enUS", True):
             revision = self._query_enUS_revision()
 
         if not revision:
             self.fatal("Can't determine revision!")
@@ -613,17 +612,18 @@ class DesktopSingleLocale(LocalesMixin, 
             current_repo = {}
             for key, value in repository.iteritems():
                 try:
                     current_repo[key] = value % replace_dict
                 except TypeError:
                     # pass through non-interpolables, like booleans
                     current_repo[key] = value
                 except KeyError:
-                    self.error('not all the values in "{0}" can be replaced. Check your configuration'.format(value))
+                    self.error('not all the values in "{0}" can be replaced. Check your '
+                               'configuration'.format(value))
                     raise
             repos.append(current_repo)
         self.info("repositories: %s" % repos)
         self.vcs_checkout_repos(repos, parent_dir=dirs['abs_work_dir'],
                                 tag_override=config.get('tag_override'))
 
     def clone_locales(self):
         self.pull_locale_source()
@@ -814,17 +814,17 @@ class DesktopSingleLocale(LocalesMixin, 
                        glob.glob(os.path.join(upload_target, 'setup-stub.exe')))
             targets_exts = ["tar.bz2", "dmg", "langpack.xpi",
                             "complete.mar", "checksums", "zip",
                             "installer.exe", "installer-stub.exe"]
             targets = ["target.%s" % ext for ext in targets_exts]
             targets.extend(['setup.exe', 'setup-stub.exe'])
             for f in matches:
                 target_file = next(target_file for target_file in targets
-                                    if f.endswith(target_file[6:]))
+                                   if f.endswith(target_file[6:]))
                 if target_file:
                     # Remove from list of available options for this locale
                     targets.remove(target_file)
                 else:
                     # wasn't valid (or already matched)
                     raise RuntimeError("Unexpected matching file name encountered: %s"
                                        % f)
                 self.move(os.path.join(f),
@@ -966,17 +966,18 @@ class DesktopSingleLocale(LocalesMixin, 
         c_marfile = self._query_complete_mar_filename(locale)
         c_mar_url = self._query_complete_mar_url(locale)
 
         # Set other necessary properties for Balrog submission. None need to
         # be passed back to buildbot, so we won't write them to the properties
         # files
         # Locale is hardcoded to en-US, for silly reasons
         # The Balrog submitter translates this platform into a build target
-        # via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
+        # via
+        # https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
         self.set_buildbot_property("completeMarSize", self.query_filesize(c_marfile))
         self.set_buildbot_property("completeMarHash", self.query_sha512sum(c_marfile))
         self.set_buildbot_property("completeMarUrl", c_mar_url)
         self.set_buildbot_property("locale", locale)
         if "partialInfo" in self.package_urls[locale]:
             self.set_buildbot_property("partialInfo",
                                        self.package_urls[locale]["partialInfo"])
         ret = FAILURE
@@ -1092,17 +1093,18 @@ class DesktopSingleLocale(LocalesMixin, 
         """Set buildbot properties required to trigger funsize tasks
          responsible to generate partial updates for successfully generated locales"""
         locales = self.query_locales()
         funsize_info = {
             'locales': locales,
             'branch': self.config['branch'],
             'appName': self.config['appName'],
             'platform': self.config['platform'],
-            'completeMarUrls':  {locale: self._query_complete_mar_url(locale) for locale in locales},
+            'completeMarUrls': {locale: self._query_complete_mar_url(locale)
+                                for locale in locales},
         }
         self.info('funsize info: %s' % funsize_info)
         self.set_buildbot_property('funsize_info', json.dumps(funsize_info),
                                    write_to_file=True)
 
     def taskcluster_upload(self):
         auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
         credentials = {}
@@ -1167,17 +1169,17 @@ class DesktopSingleLocale(LocalesMixin, 
                     'build_type': self.query_build_type(),
                     'locale': locale,
                 }
                 fmt.update(self.buildid_to_dict(self._query_buildid()))
                 routes.append(template.format(**fmt))
 
             self.info('Using routes: %s' % routes)
             tc = Taskcluster(branch,
-                             pushinfo.pushdate, # Use pushdate as the rank
+                             pushinfo.pushdate,  # Use pushdate as the rank
                              client_id,
                              access_token,
                              self.log_obj,
                              )
             task = tc.create_task(routes)
             tc.claim_task(task)
 
             for upload_file in files:
--- a/testing/mozharness/scripts/desktop_partner_repacks.py
+++ b/testing/mozharness/scripts/desktop_partner_repacks.py
@@ -93,27 +93,28 @@ class DesktopPartnerRepacks(ReleaseMixin
         #
 
         BaseScript.__init__(
             self,
             config_options=self.config_options,
             **buildscript_kwargs
         )
 
-
     def _pre_config_lock(self, rw_config):
         self.read_buildbot_config()
         if not self.buildbot_config:
             self.warning("Skipping buildbot properties overrides")
         else:
             if self.config.get('require_buildprops', False) is True:
                 if not self.buildbot_config:
-                    self.fatal("Unable to load properties from file: %s" % self.config.get('buildbot_json_path'))
+                    self.fatal("Unable to load properties from file: %s" %
+                               self.config.get('buildbot_json_path'))
             props = self.buildbot_config["properties"]
-            for prop in ['version', 'build_number', 'revision', 'repo_file', 'repack_manifests_url', 'partner']:
+            for prop in ['version', 'build_number', 'revision', 'repo_file',
+                         'repack_manifests_url', 'partner']:
                 if props.get(prop):
                     self.info("Overriding %s with %s" % (prop, props[prop]))
                     self.config[prop] = props.get(prop)
 
         if 'version' not in self.config:
             self.fatal("Version (-v) not supplied.")
         if 'build_number' not in self.config:
             self.fatal("Build number (-n) not supplied.")
@@ -186,12 +187,13 @@ class DesktopPartnerRepacks(ReleaseMixin
         if self.config.get('hgrepo'):
             repack_cmd.extend(["--repo", self.config['hgrepo']])
         if self.config.get('revision'):
             repack_cmd.extend(["--tag", self.config["revision"]])
 
         return self.run_command(repack_cmd,
                                 cwd=self.query_abs_dirs()['abs_scripts_dir'])
 
+
 # main {{{
 if __name__ == '__main__':
     partner_repacks = DesktopPartnerRepacks()
     partner_repacks.run_and_exit()
--- a/testing/mozharness/scripts/desktop_unittest.py
+++ b/testing/mozharness/scripts/desktop_unittest.py
@@ -1,12 +1,13 @@
 #!/usr/bin/env python
 # ***** BEGIN LICENSE BLOCK *****
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
+
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 # ***** END LICENSE BLOCK *****
 """desktop_unittest.py
 The goal of this is to extract desktop unittesting from buildbot's factory.py
 
 author: Jordan Lund
 """
 
@@ -19,38 +20,40 @@ import glob
 import imp
 
 from datetime import datetime, timedelta
 
 # load modules from parent dir
 sys.path.insert(1, os.path.dirname(sys.path[0]))
 
 from mozharness.base.errors import BaseErrorList
-from mozharness.base.log import INFO, ERROR
+from mozharness.base.log import INFO
 from mozharness.base.script import PreScriptAction
 from mozharness.base.vcs.vcsbase import MercurialScript
 from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
 from mozharness.mozilla.buildbot import TBPL_EXCEPTION
 from mozharness.mozilla.mozbase import MozbaseMixin
 from mozharness.mozilla.structuredlog import StructuredOutputParser
 from mozharness.mozilla.testing.errors import HarnessErrorList
 from mozharness.mozilla.testing.unittest import DesktopUnittestOutputParser
 from mozharness.mozilla.testing.codecoverage import (
     CodeCoverageMixin,
     code_coverage_config_options
 )
 from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
 
-SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell', 'mozbase', 'mozmill']
+SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell',
+                    'mozbase', 'mozmill']
 SUITE_DEFAULT_E10S = ['mochitest', 'reftest']
 SUITE_NO_E10S = ['xpcshell']
 
 
 # DesktopUnittest {{{1
-class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin, CodeCoverageMixin):
+class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin,
+                      CodeCoverageMixin):
     config_options = [
         [['--mochitest-suite', ], {
             "action": "extend",
             "dest": "specified_mochitest_suites",
             "type": "string",
             "help": "Specify which mochi suite to run. "
                     "Suites are defined in the config file.\n"
                     "Examples: 'all', 'plain1', 'plain5', 'chrome', or 'a11y'"}
@@ -146,17 +149,18 @@ class DesktopUnittest(TestingMixin, Merc
             "action": "store",
             "dest": "this_chunk",
             "help": "Number of this chunk"}
          ],
         [["--allow-software-gl-layers"], {
             "action": "store_true",
             "dest": "allow_software_gl_layers",
             "default": False,
-            "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}
+            "help": "Permits a software GL implementation (such as LLVMPipe) to use "
+                    "the GL compositor."}
          ],
         [["--single-stylo-traversal"], {
             "action": "store_true",
             "dest": "single_stylo_traversal",
             "default": False,
             "help": "Forcibly enable single thread traversal in Stylo with STYLO_THREADS=1"}
          ],
         [["--enable-stylo"], {
@@ -276,18 +280,20 @@ class DesktopUnittest(TestingMixin, Merc
                                                         'plugins')
         dirs['abs_test_bin_components_dir'] = os.path.join(dirs['abs_test_bin_dir'],
                                                            'components')
         dirs['abs_mochitest_dir'] = os.path.join(dirs['abs_test_install_dir'], "mochitest")
         dirs['abs_reftest_dir'] = os.path.join(dirs['abs_test_install_dir'], "reftest")
         dirs['abs_xpcshell_dir'] = os.path.join(dirs['abs_test_install_dir'], "xpcshell")
         dirs['abs_cppunittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "cppunittest")
         dirs['abs_gtest_dir'] = os.path.join(dirs['abs_test_install_dir'], "gtest")
-        dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
-        dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "jit-test", "jit-test")
+        dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'],
+                                                   'blobber_upload_dir')
+        dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'],
+                                               "jit-test", "jit-test")
         dirs['abs_mozbase_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozbase")
         dirs['abs_mozmill_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozmill")
 
         if os.path.isabs(c['virtualenv_path']):
             dirs['abs_virtualenv_dir'] = c['virtualenv_path']
         else:
             dirs['abs_virtualenv_dir'] = os.path.join(abs_dirs['abs_work_dir'],
                                                       c['virtualenv_path'])
@@ -330,20 +336,18 @@ class DesktopUnittest(TestingMixin, Merc
     def _pre_create_virtualenv(self, action):
         dirs = self.query_abs_dirs()
 
         self.register_virtualenv_module(name='pip>=1.5')
         self.register_virtualenv_module('psutil==3.1.1', method='pip')
         self.register_virtualenv_module(name='mock')
         self.register_virtualenv_module(name='simplejson')
 
-        requirements_files = [
-                os.path.join(dirs['abs_test_install_dir'],
-                    'config',
-                    'marionette_requirements.txt')]
+        requirements_files = [os.path.join(dirs['abs_test_install_dir'],
+                              'config', 'marionette_requirements.txt')]
 
         if self._query_specified_suites('mochitest') is not None:
             # mochitest is the only thing that needs this
             requirements_files.append(
                 os.path.join(dirs['abs_mochitest_dir'],
                              'websocketprocessbridge',
                              'websocketprocessbridge_requirements.txt'))
 
@@ -386,17 +390,17 @@ class DesktopUnittest(TestingMixin, Merc
             raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
                                         '%s_raw.log' % suite)
 
             error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
                                               '%s_errorsummary.log' % suite)
             str_format_values = {
                 'binary_path': self.binary_path,
                 'symbols_path': self._query_symbols_url(),
-                'abs_work_dir' : dirs['abs_work_dir'],
+                'abs_work_dir': dirs['abs_work_dir'],
                 'abs_app_dir': abs_app_dir,
                 'abs_res_dir': abs_res_dir,
                 'raw_log_file': raw_log_file,
                 'error_summary_file': error_summary_file,
                 'gtest_dir': os.path.join(dirs['abs_test_install_dir'],
                                           'gtest'),
             }
 
@@ -414,21 +418,21 @@ class DesktopUnittest(TestingMixin, Merc
             if c.get('total_chunks') and c.get('this_chunk'):
                 base_cmd.extend(['--total-chunks', c['total_chunks'],
                                  '--this-chunk', c['this_chunk']])
 
             if c['no_random']:
                 if suite_category == "mochitest":
                     base_cmd.append('--bisect-chunk=default')
                 else:
-                    self.warning("--no-random does not currently work with suites other than mochitest.")
-
+                    self.warning("--no-random does not currently work with suites other than "
+                                 "mochitest.")
 
             if c['headless']:
-                base_cmd.append('--headless');
+                base_cmd.append('--headless')
 
             # set pluginsPath
             abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
             str_format_values['test_plugin_path'] = abs_res_plugins_dir
 
             if suite_category not in c["suite_definitions"]:
                 self.fatal("'%s' not defined in the config!")
 
@@ -509,17 +513,18 @@ class DesktopUnittest(TestingMixin, Merc
                 return flavor
 
     def structured_output(self, suite_category, flavor=None):
         unstructured_flavors = self.config.get('unstructured_flavors')
         if not unstructured_flavors:
             return False
         if suite_category not in unstructured_flavors:
             return True
-        if not unstructured_flavors.get(suite_category) or flavor in unstructured_flavors.get(suite_category):
+        if not unstructured_flavors.get(suite_category) or \
+                flavor in unstructured_flavors.get(suite_category):
             return False
         return True
 
     def get_test_output_parser(self, suite_category, flavor=None, strict=False,
                                **kwargs):
         if not self.structured_output(suite_category, flavor):
             return DesktopUnittestOutputParser(suite_category=suite_category, **kwargs)
         self.info("Structured output parser in use for %s." % suite_category)
@@ -552,20 +557,19 @@ class DesktopUnittest(TestingMixin, Merc
             suites = self._query_specified_suites(category) or []
             for suite in suites:
                 if any([suite.startswith(c) for c in compiled_code_suites]):
                     rejected.append(suite)
                     break
         if rejected:
             self.buildbot_status(TBPL_EXCEPTION)
             self.fatal("There are specified suites that are incompatible with "
-                      "--artifact try syntax flag: {}".format(', '.join(rejected)),
+                       "--artifact try syntax flag: {}".format(', '.join(rejected)),
                        exit_code=self.return_code)
 
-
     def download_and_extract(self):
         """
         download and extract test zip / download installer
         optimizes which subfolders to extract from tests zip
         """
         c = self.config
 
         extract_dirs = None
@@ -693,17 +697,17 @@ class DesktopUnittest(TestingMixin, Merc
                     # Mac specific, but points to abs_app_dir on other
                     # platforms.
                     'abs_res_dir': abs_res_dir,
                 }
                 options_list = []
                 env = {}
                 if isinstance(suites[suite], dict):
                     options_list = suites[suite].get('options', [])
-                    if self.config.get('verify') == True:
+                    if self.config.get('verify') is True:
                         tests_list = []
                     else:
                         tests_list = suites[suite].get('tests', [])
                     env = copy.deepcopy(suites[suite].get('env', {}))
                 else:
                     options_list = suites[suite]
                     tests_list = []
 
@@ -768,17 +772,18 @@ class DesktopUnittest(TestingMixin, Merc
                 env = self.query_env(partial_env=env, log_level=INFO)
                 cmd_timeout = self.get_timeout_for_category(suite_category)
 
                 for verify_args in self.query_verify_args(suite):
                     if (datetime.now() - self.start_time) > max_verify_time:
                         # Verification has run out of time. That is okay! Stop running
                         # tests so that a task timeout is not triggered, and so that
                         # (partial) results are made available in a timely manner.
-                        self.info("TinderboxPrint: Verification too long: Not all tests were verified.<br/>")
+                        self.info("TinderboxPrint: Verification too long: Not all tests "
+                                  "were verified.<br/>")
                         # Signal verify time exceeded, to break out of suites and
                         # suite categories loops also.
                         return False
 
                     final_cmd = copy.copy(cmd)
                     final_cmd.extend(verify_args)
                     return_code = self.run_command(final_cmd, cwd=dirs['abs_work_dir'],
                                                    output_timeout=cmd_timeout,
--- a/testing/mozharness/scripts/fx_desktop_build.py
+++ b/testing/mozharness/scripts/fx_desktop_build.py
@@ -73,17 +73,18 @@ class FxDesktopBuild(BuildScript, TryToo
                     "%(objdir)s/dist/firefox-*",
                     "%(objdir)s/dist/fennec*",
                     "%(objdir)s/dist/seamonkey*",
                     "%(objdir)s/dist/thunderbird*",
                     "%(objdir)s/dist/install/sea/*.exe"
                 ],
                 'stage_product': 'firefox',
                 'platform_supports_post_upload_to_latest': True,
-                'build_resources_path': '%(abs_src_dir)s/obj-firefox/.mozbuild/build_resources.json',
+                'build_resources_path': \
+                '%(abs_src_dir)s/obj-firefox/.mozbuild/build_resources.json',
                 'nightly_promotion_branches': ['mozilla-central', 'mozilla-aurora'],
 
                 # try will overwrite these
                 'clone_with_purge': False,
                 'clone_by_revision': False,
                 'tinderbox_build_dir': None,
                 'to_tinderbox_dated': True,
                 'release_to_try_builds': False,
@@ -200,17 +201,16 @@ class FxDesktopBuild(BuildScript, TryToo
                 rw_config.volatile_config['actions'])
             )
         # replace rw_config as well to set actions as in BaseScript
         rw_config.set_config(c, overwrite=True)
         rw_config.update_actions()
         self.actions = tuple(rw_config.actions)
         self.all_actions = tuple(rw_config.all_actions)
 
-
     def query_abs_dirs(self):
         if self.abs_dirs:
             return self.abs_dirs
         c = self.config
         abs_dirs = super(FxDesktopBuild, self).query_abs_dirs()
         if not c.get('app_ini_path'):
             self.fatal('"app_ini_path" is needed in your config for this '
                        'script.')
@@ -252,11 +252,12 @@ class FxDesktopBuild(BuildScript, TryToo
 
     @script.PreScriptRun
     def suppress_windows_modal_dialogs(self, *args, **kwargs):
         if self._is_windows():
             # Suppress Windows modal dialogs to avoid hangs
             import ctypes
             ctypes.windll.kernel32.SetErrorMode(0x8001)
 
+
 if __name__ == '__main__':
     fx_desktop_build = FxDesktopBuild()
     fx_desktop_build.run_and_exit()
--- a/testing/mozharness/scripts/l10n_bumper.py
+++ b/testing/mozharness/scripts/l10n_bumper.py
@@ -207,21 +207,23 @@ class L10nBumper(VCSScript):
         dirs = self.query_abs_dirs()
         tree = c.get('treestatus_tree', os.path.basename(c['gecko_pull_url'].rstrip("/")))
         treestatus_url = "%s/trees/%s" % (c['treestatus_base_url'], tree)
         treestatus_json = os.path.join(dirs['abs_work_dir'], 'treestatus.json')
         if not os.path.exists(dirs['abs_work_dir']):
             self.mkdir_p(dirs['abs_work_dir'])
         self.rmtree(treestatus_json)
 
-        self.run_command(["curl", "--retry", "4", "-o", treestatus_json, treestatus_url], throw_exception=True)
+        self.run_command(["curl", "--retry", "4", "-o", treestatus_json, treestatus_url],
+                         throw_exception=True)
 
         treestatus = self._read_json(treestatus_json)
         if treestatus['result']['status'] != 'closed':
-            self.info("treestatus is %s - assuming we can land" % repr(treestatus['result']['status']))
+            self.info("treestatus is %s - assuming we can land" %
+                      repr(treestatus['result']['status']))
             return True
 
         return False
 
     # Actions {{{1
     def check_treestatus(self):
         if not self.config['ignore_closed_tree'] and not self.query_treestatus():
             self.info("breaking early since treestatus is closed")
--- a/testing/mozharness/scripts/marionette.py
+++ b/testing/mozharness/scripts/marionette.py
@@ -2,24 +2,23 @@
 # ***** BEGIN LICENSE BLOCK *****
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 # ***** END LICENSE BLOCK *****
 
 import copy
 import os
-import re
 import sys
 
 # load modules from parent dir
 sys.path.insert(1, os.path.dirname(sys.path[0]))
 
 from mozharness.base.errors import BaseErrorList, TarErrorList
-from mozharness.base.log import INFO, ERROR, WARNING
+from mozharness.base.log import INFO
 from mozharness.base.script import PreScriptAction
 from mozharness.base.transfer import TransferMixin
 from mozharness.base.vcs.vcsbase import MercurialScript
 from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
 from mozharness.mozilla.testing.errors import LogcatErrorList
 from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
 from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
 from mozharness.mozilla.testing.codecoverage import (
@@ -29,17 +28,18 @@ from mozharness.mozilla.testing.codecove
 from mozharness.mozilla.testing.errors import HarnessErrorList
 
 from mozharness.mozilla.structuredlog import StructuredOutputParser
 
 # TODO: we could remove emulator specific code after B2G ICS emulator buildbot
 #       builds is turned off, Bug 1209180.
 
 
-class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMixin, CodeCoverageMixin):
+class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMixin,
+                     CodeCoverageMixin):
     config_options = [[
         ["--application"],
         {"action": "store",
          "dest": "application",
          "default": None,
          "help": "application name of binary"
          }
     ], [
@@ -49,17 +49,18 @@ class MarionetteTest(TestingMixin, Mercu
          "default": None,
          "help": "Optional command-line argument to pass to the browser"
          }
     ], [
         ["--marionette-address"],
         {"action": "store",
          "dest": "marionette_address",
          "default": None,
-         "help": "The host:port of the Marionette server running inside Gecko.  Unused for emulator testing",
+         "help": "The host:port of the Marionette server running inside Gecko. "
+                 "Unused for emulator testing",
          }
     ], [
         ["--emulator"],
         {"action": "store",
          "type": "choice",
          "choices": ['arm', 'x86'],
          "dest": "emulator",
          "default": None,
@@ -93,17 +94,17 @@ class MarionetteTest(TestingMixin, Mercu
          "help": "Run tests with multiple processes. (Desktop builds only)",
          }
     ], [
         ["--headless"],
         {"action": "store_true",
          "dest": "headless",
          "default": False,
          "help": "Run tests in headless mode.",
-        }
+         }
     ], [
        ["--allow-software-gl-layers"],
        {"action": "store_true",
         "dest": "allow_software_gl_layers",
         "default": False,
         "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."
         }
     ], [
@@ -149,17 +150,18 @@ class MarionetteTest(TestingMixin, Mercu
         if c.get('structured_output'):
             self.parser_class = StructuredOutputParser
         else:
             self.parser_class = TestSummaryOutputParserHelper
 
     def _pre_config_lock(self, rw_config):
         super(MarionetteTest, self)._pre_config_lock(rw_config)
         if not self.config.get('emulator') and not self.config.get('marionette_address'):
-                self.fatal("You need to specify a --marionette-address for non-emulator tests! (Try --marionette-address localhost:2828 )")
+                self.fatal("You need to specify a --marionette-address for non-emulator tests! "
+                           "(Try --marionette-address localhost:2828 )")
 
     def query_abs_dirs(self):
         if self.abs_dirs:
             return self.abs_dirs
         abs_dirs = super(MarionetteTest, self).query_abs_dirs()
         dirs = {}
         dirs['abs_test_install_dir'] = os.path.join(
             abs_dirs['abs_work_dir'], 'tests')
--- a/testing/mozharness/scripts/merge_day/gecko_migration.py
+++ b/testing/mozharness/scripts/merge_day/gecko_migration.py
@@ -15,24 +15,22 @@ http://hg.mozilla.org/build/tools/file/0
 and
 http://hg.mozilla.org/build/tools/file/084bc4e2fc76/release/merge_helper.py
 """
 
 import os
 import pprint
 import subprocess
 import sys
-from getpass import getpass
 
 sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
 
 from mozharness.base.errors import HgErrorList
 from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
 from mozharness.base.vcs.vcsbase import MercurialScript
-from mozharness.mozilla.selfserve import SelfServeMixin
 from mozharness.mozilla.updates.balrog import BalrogMixin
 from mozharness.mozilla.buildbot import BuildbotMixin
 from mozharness.mozilla.repo_manipulation import MercurialRepoManipulationMixin
 
 VALID_MIGRATION_BEHAVIORS = (
     "beta_to_release", "central_to_beta", "release_to_esr", "bump_second_digit",
 )
 
@@ -99,23 +97,26 @@ class GeckoMigration(MercurialScript, Ba
         self.run_sanity_check()
 
 # Helper methods {{{1
     def run_sanity_check(self):
         """ Verify the configs look sane before proceeding.
             """
         message = ""
         if self.config['migration_behavior'] not in VALID_MIGRATION_BEHAVIORS:
-            message += "%s must be one of %s!\n" % (self.config['migration_behavior'], VALID_MIGRATION_BEHAVIORS)
+            message += "%s must be one of %s!\n" % (self.config['migration_behavior'],
+                                                    VALID_MIGRATION_BEHAVIORS)
         if self.config['migration_behavior'] == 'beta_to_release':
-            if self.config.get("require_remove_locales") and not self.config.get("remove_locales") and 'migrate' in self.actions:
+            if self.config.get("require_remove_locales") \
+                    and not self.config.get("remove_locales") and 'migrate' in self.actions:
                 message += "You must specify --remove-locale!\n"
         else:
             if self.config.get("require_remove_locales") or self.config.get("remove_locales"):
-                self.warning("--remove-locale isn't valid unless you're using beta_to_release migration_behavior!\n")
+                self.warning("--remove-locale isn't valid unless you're using beta_to_release "
+                             "migration_behavior!\n")
         if message:
             self.fatal(message)
 
     def query_abs_dirs(self):
         """ Allow for abs_from_dir and abs_to_dir
             """
         if self.abs_dirs:
             return self.abs_dirs
@@ -216,17 +217,18 @@ class GeckoMigration(MercurialScript, Ba
             # I'm reverting .hgtags to old_head, then appending the new tags
             # from new_head to .hgtags, and hoping nothing goes wrong.
             # I'd rather not write patch files from scratch, so this seems
             # like a slightly more complex but less objectionable method?
             self.info("Trying to preserve tags from before debugsetparents...")
             dirs = self.query_abs_dirs()
             patch_file = os.path.join(dirs['abs_work_dir'], 'patch_file')
             self.run_command(
-                subprocess.list2cmdline(hg + ['diff', '-r', old_head, '.hgtags', '-U9', '>', patch_file]),
+                subprocess.list2cmdline(hg + ['diff', '-r', old_head, '.hgtags',
+                                        '-U9', '>', patch_file]),
                 cwd=cwd,
             )
             self.run_command(
                 ['patch', '-R', '-p1', '-i', patch_file],
                 cwd=cwd,
                 halt_on_failure=True,
             )
             tag_diff = self.read_from_file(patch_file)
@@ -319,31 +321,31 @@ class GeckoMigration(MercurialScript, Ba
 
             We could have all of these individually toggled by flags, but
             by separating into workflow methods we can be more precise about
             what happens in each workflow, while allowing for things like
             staging beta user repo migrations.
             """
         dirs = self.query_abs_dirs()
         next_mb_version = self.get_version(dirs['abs_to_dir'])[0]
-        self.bump_version(dirs['abs_to_dir'], next_mb_version, next_mb_version, "a1", "", use_config_suffix=True)
+        self.bump_version(dirs['abs_to_dir'], next_mb_version, next_mb_version, "a1", "",
+                          use_config_suffix=True)
         self.apply_replacements()
         # bump m-c version
         curr_mc_version = self.get_version(dirs['abs_from_dir'])[0]
         next_mc_version = str(int(curr_mc_version) + 1)
         self.bump_version(
             dirs['abs_from_dir'], curr_mc_version, next_mc_version, "a1", "a1",
             bump_major=True,
             use_config_suffix=False
         )
         # touch clobber files
         self.touch_clobber_file(dirs['abs_from_dir'])
         self.touch_clobber_file(dirs['abs_to_dir'])
 
-
     def beta_to_release(self, *args, **kwargs):
         """ mozilla-beta -> mozilla-release behavior.
 
             We could have all of these individually toggled by flags, but
             by separating into workflow methods we can be more precise about
             what happens in each workflow, while allowing for things like
             staging beta user repo migrations.
             """
@@ -487,16 +489,18 @@ class GeckoMigration(MercurialScript, Ba
         if end_tag:
             end_tag = end_tag % {'major_version': to_fx_major_version}
             self.hg_tag(
                 dirs['abs_to_dir'], end_tag, user=self.config['hg_user'],
                 revision=base_to_rev, force=True,
             )
         # Call beta_to_release etc.
         if not hasattr(self, self.config['migration_behavior']):
-            self.fatal("Don't know how to proceed with migration_behavior %s !" % self.config['migration_behavior'])
+            self.fatal("Don't know how to proceed with migration_behavior %s !" %
+                       self.config['migration_behavior'])
         getattr(self, self.config['migration_behavior'])(end_tag=end_tag)
-        self.info("Verify the diff, and apply any manual changes, such as disabling features, and --commit-changes")
+        self.info("Verify the diff, and apply any manual changes, such as disabling features, "
+                  "and --commit-changes")
 
 
 # __main__ {{{1
 if __name__ == '__main__':
     GeckoMigration().run_and_exit()
--- a/testing/mozharness/scripts/mobile_l10n.py
+++ b/testing/mozharness/scripts/mobile_l10n.py
@@ -106,17 +106,17 @@ class MobileSingleLocale(MockMixin, Loca
          "type": "int",
          "help": "Specify the total number of chunks of locales"
          }
     ], [
         ["--disable-mock"],
         {"dest": "disable_mock",
          "action": "store_true",
          "help": "do not run under mock despite what gecko-config says",
-        }
+         }
     ], [
         ['--revision', ],
         {"action": "store",
          "dest": "revision",
          "type": "string",
          "help": "Override the gecko revision to use (otherwise use buildbot supplied"
                  " value, or en-US revision) "}
     ]]
@@ -178,17 +178,18 @@ class MobileSingleLocale(MockMixin, Loca
                 'buildnum': rc['buildnum']
             }
         repack_env = self.query_env(partial_env=c.get("repack_env"),
                                     replace_dict=replace_dict)
         if c.get('base_en_us_binary_url') and c.get('release_config_file'):
             rc = self.query_release_config()
             repack_env['EN_US_BINARY_URL'] = c['base_en_us_binary_url'] % replace_dict
         if 'MOZ_SIGNING_SERVERS' in os.environ:
-            repack_env['MOZ_SIGN_CMD'] = subprocess.list2cmdline(self.query_moz_sign_cmd(formats=['jar']))
+            repack_env['MOZ_SIGN_CMD'] = \
+                subprocess.list2cmdline(self.query_moz_sign_cmd(formats=['jar']))
         self.repack_env = repack_env
         return self.repack_env
 
     def query_l10n_env(self):
         return self.query_env()
 
     def query_upload_env(self):
         if self.upload_env:
@@ -266,17 +267,17 @@ class MobileSingleLocale(MockMixin, Loca
         self.read_buildbot_config()
         config = self.config
         revision = None
         if config.get("revision"):
             revision = config["revision"]
         elif 'revision' in self.buildbot_properties:
             revision = self.buildbot_properties['revision']
         elif (self.buildbot_config and
-                  self.buildbot_config.get('sourcestamp', {}).get('revision')):
+                self.buildbot_config.get('sourcestamp', {}).get('revision')):
             revision = self.buildbot_config['sourcestamp']['revision']
         elif self.buildbot_config and self.buildbot_config.get('revision'):
             revision = self.buildbot_config['revision']
         elif config.get("update_gecko_source_to_enUS", True):
             revision = self._query_enUS_revision()
 
         if not revision:
             self.fatal("Can't determine revision!")
@@ -342,30 +343,28 @@ class MobileSingleLocale(MockMixin, Loca
 
     def query_upload_url(self, locale):
         if locale in self.upload_urls:
             return self.upload_urls[locale]
         else:
             self.error("Can't determine the upload url for %s!" % locale)
 
     def query_abs_dirs(self):
-         if self.abs_dirs:
-             return self.abs_dirs
-         abs_dirs = super(MobileSingleLocale, self).query_abs_dirs()
+        if self.abs_dirs:
+            return self.abs_dirs
+        abs_dirs = super(MobileSingleLocale, self).query_abs_dirs()
 
-         dirs = {
-             'abs_tools_dir':
-                 os.path.join(abs_dirs['base_work_dir'], 'tools'),
-             'build_dir':
-                 os.path.join(abs_dirs['base_work_dir'], 'build'),
-         }
+        dirs = {
+             'abs_tools_dir': os.path.join(abs_dirs['base_work_dir'], 'tools'),
+             'build_dir': os.path.join(abs_dirs['base_work_dir'], 'build'),
+        }
 
-         abs_dirs.update(dirs)
-         self.abs_dirs = abs_dirs
-         return self.abs_dirs
+        abs_dirs.update(dirs)
+        self.abs_dirs = abs_dirs
+        return self.abs_dirs
 
     def add_failure(self, locale, message, **kwargs):
         self.locales_property[locale] = "Failed"
         prop_key = "%s_failure" % locale
         prop_value = self.query_buildbot_property(prop_key)
         if prop_value:
             prop_value = "%s  %s" % (prop_value, message)
         else:
@@ -374,17 +373,18 @@ class MobileSingleLocale(MockMixin, Loca
         MercurialScript.add_failure(self, locale, message=message, **kwargs)
 
     def summary(self):
         MercurialScript.summary(self)
         # TODO we probably want to make this configurable on/off
         locales = self.query_locales()
         for locale in locales:
             self.locales_property.setdefault(locale, "Success")
-        self.set_buildbot_property("locales", json.dumps(self.locales_property), write_to_file=True)
+        self.set_buildbot_property("locales", json.dumps(self.locales_property),
+                                   write_to_file=True)
 
     # Actions {{{2
     def clobber(self):
         self.read_buildbot_config()
         dirs = self.query_abs_dirs()
         c = self.config
         objdir = os.path.join(dirs['abs_work_dir'], c['mozilla_dir'],
                               c['objdir'])
@@ -405,30 +405,30 @@ class MobileSingleLocale(MockMixin, Loca
             current_repo = {}
             for key, value in repository.iteritems():
                 try:
                     current_repo[key] = value % replace_dict
                 except TypeError:
                     # pass through non-interpolables, like booleans
                     current_repo[key] = value
                 except KeyError:
-                    self.error('not all the values in "{0}" can be replaced. Check your configuration'.format(value))
+                    self.error('not all the values in "{0}" can be replaced. Check '
+                               'your configuration'.format(value))
                     raise
             repos.append(current_repo)
         self.info("repositories: %s" % repos)
         self.vcs_checkout_repos(repos, parent_dir=dirs['abs_work_dir'],
                                 tag_override=c.get('tag_override'))
 
     def clone_locales(self):
         self.pull_locale_source()
 
     # list_locales() is defined in LocalesMixin.
 
     def _setup_configure(self, buildid=None):
-        c = self.config
         dirs = self.query_abs_dirs()
         env = self.query_repack_env()
         make = self.query_exe("make")
         if self.run_command_m([make, "-f", "client.mk", "configure"],
                               cwd=dirs['abs_mozilla_dir'],
                               env=env,
                               error_list=MakefileErrorList):
             self.fatal("Configure failed!")
@@ -505,17 +505,18 @@ class MobileSingleLocale(MockMixin, Loca
         success_count = total_count = 0
         for locale in locales:
             total_count += 1
             if self.run_command_m([make, "installers-%s" % locale],
                                   cwd=dirs['abs_locales_dir'],
                                   env=repack_env,
                                   error_list=MakefileErrorList,
                                   halt_on_failure=False):
-                self.add_failure(locale, message="%s failed in make installers-%s!" % (locale, locale))
+                self.add_failure(locale, message="%s failed in make installers-%s!" %
+                                 (locale, locale))
                 continue
             success_count += 1
         self.summarize_success_count(success_count, total_count,
                                      message="Repacked %d of %d binaries successfully.")
 
     def validate_repacks_signed(self):
         c = self.config
         dirs = self.query_abs_dirs()
@@ -539,17 +540,18 @@ class MobileSingleLocale(MockMixin, Loca
             )
             self.disable_mock()
             if status:
                 self.add_failure(locale, message="Errors verifying %s binary!" % locale)
                 # No need to rm because upload is per-locale
                 continue
             success_count += 1
         self.summarize_success_count(success_count, total_count,
-                                     message="Validated signatures on %d of %d binaries successfully.")
+                                     message="Validated signatures on %d of %d "
+                                             "binaries successfully.")
 
     def taskcluster_upload(self):
         auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
         credentials = {}
         execfile(auth, credentials)
         client_id = credentials.get('taskcluster_clientId')
         access_token = credentials.get('taskcluster_accessToken')
         if not client_id or not access_token:
@@ -597,17 +599,17 @@ class MobileSingleLocale(MockMixin, Loca
                 'build_type': self.query_build_type(),
                 'locale': locale,
             }
             for template in templates:
                 routes.append(template.format(**fmt))
 
             self.info('Using routes: %s' % routes)
             tc = Taskcluster(branch,
-                             pushinfo.pushdate, # Use pushdate as the rank
+                             pushinfo.pushdate,  # Use pushdate as the rank
                              client_id,
                              access_token,
                              self.log_obj,
                              )
             task = tc.create_task(routes)
             tc.claim_task(task)
 
             for upload_file in abs_files:
@@ -628,17 +630,19 @@ class MobileSingleLocale(MockMixin, Loca
             rc = self.query_release_config()
             buildnum = rc['buildnum']
         for locale in locales:
             if self.query_failure(locale):
                 self.warning("Skipping previously failed locale %s." % locale)
                 continue
             total_count += 1
             if c.get('base_post_upload_cmd'):
-                upload_env['POST_UPLOAD_CMD'] = c['base_post_upload_cmd'] % {'version': version, 'locale': locale, 'buildnum': str(buildnum), 'post_upload_extra': ' '.join(c.get('post_upload_extra', []))}
+                upload_env['POST_UPLOAD_CMD'] = c['base_post_upload_cmd'] % \
+                    {'version': version, 'locale': locale, 'buildnum': str(buildnum),
+                        'post_upload_extra': ' '.join(c.get('post_upload_extra', []))}
             output = self.get_output_from_command_m(
                 # Ugly hack to avoid |make upload| stderr from showing up
                 # as get_output_from_command errors
                 "%s upload AB_CD=%s 2>&1" % (make, locale),
                 cwd=dirs['abs_locales_dir'],
                 env=upload_env,
                 silent=True
             )
@@ -682,28 +686,29 @@ class MobileSingleLocale(MockMixin, Loca
             'repo': self.config['tools_repo'],
             'vcs': "hg",
             'branch': "default",
             'dest': dirs['abs_tools_dir'],
         }]
         rev = self.vcs_checkout(**repos[0])
         self.set_buildbot_property("tools_revision", rev, write_to_file=True)
 
-    def query_apkfile_path(self,locale):
+    def query_apkfile_path(self, locale):
 
         dirs = self.query_abs_dirs()
         apkdir = os.path.join(dirs['abs_objdir'], 'dist')
-        r  = r"(\.)" + re.escape(locale) + r"(\.*)"
+        r = r"(\.)" + re.escape(locale) + r"(\.*)"
 
         apks = []
         for f in os.listdir(apkdir):
             if f.endswith(".apk") and re.search(r, f):
                 apks.append(f)
         if len(apks) == 0:
-            self.fatal("Found no apks files in %s, don't know what to do:\n%s" % (apkdir, apks), exit_code=1)
+            self.fatal("Found no apks files in %s, don't know what to do:\n%s" %
+                       (apkdir, apks), exit_code=1)
 
         return os.path.join(apkdir, apks[0])
 
     def query_is_release_or_beta(self):
 
         return bool(self.config.get("is_release_or_beta"))
 
     def submit_to_balrog(self):
@@ -716,44 +721,46 @@ class MobileSingleLocale(MockMixin, Loca
 
         dirs = self.query_abs_dirs()
         locales = self.query_locales()
         if not self.config.get('taskcluster_nightly'):
             balrogReady = True
             for locale in locales:
                 apk_url = self.query_upload_url(locale)
                 if not apk_url:
-                    self.add_failure(locale, message="Failed to detect %s url in make upload!" % (locale))
+                    self.add_failure(locale, message="Failed to detect %s url in make upload!" %
+                                     (locale))
                     balrogReady = False
                     continue
             if not balrogReady:
-                return self.fatal(message="Not all repacks successful, abort without submitting to balrog")
+                return self.fatal(message="Not all repacks successful, abort without "
+                                          "submitting to balrog.")
 
         env = self.query_upload_env()
         for locale in locales:
             apkfile = self.query_apkfile_path(locale)
             if self.config.get('taskcluster_nightly'):
                 # Taskcluster needs stage_platform
                 self.set_buildbot_property("stage_platform",
                                            self.config.get("stage_platform"))
                 self.set_buildbot_property("branch", self.config.get("branch"))
             else:
                 apk_url = self.query_upload_url(locale)
                 self.set_buildbot_property("completeMarUrl", apk_url)
 
                 # The Balrog submitter translates this platform into a build target
-                # via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
+                # via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23  # noqa
                 self.set_buildbot_property(
                     "platform",
                     self.buildbot_config["properties"]["platform"])
-                #TODO: Is there a better way to get this?
+                # TODO: Is there a better way to get this?
 
             # Set other necessary properties for Balrog submission. None need to
             # be passed back to buildbot, so we won't write them to the properties
-            #files.
+            # files.
             self.set_buildbot_property("locale", locale)
 
             self.set_buildbot_property("appVersion", self.query_version())
 
             self.set_buildbot_property("appName", "Fennec")
             # TODO: don't hardcode
             self.set_buildbot_property("hashType", "sha512")
             self.set_buildbot_property("completeMarSize", self.query_filesize(apkfile))
@@ -773,12 +780,13 @@ class MobileSingleLocale(MockMixin, Loca
                 if self.query_is_nightly():
                     self.submit_balrog_updates(release_type="nightly")
                 else:
                     self.submit_balrog_updates(release_type="release")
 
                 if not self.query_is_nightly():
                     self.submit_balrog_release_pusher(dirs)
 
+
 # main {{{1
 if __name__ == '__main__':
     single_locale = MobileSingleLocale()
     single_locale.run_and_exit()
--- a/testing/mozharness/scripts/mobile_partner_repack.py
+++ b/testing/mozharness/scripts/mobile_partner_repack.py
@@ -168,17 +168,18 @@ class MobilePartnerRepack(LocalesMixin, 
                 installer_name = base_installer_name % replace_dict
                 parent_dir = '%s/original/%s/%s' % (dirs['abs_work_dir'],
                                                     platform, locale)
                 file_path = '%s/%s' % (parent_dir, installer_name)
                 self.mkdir_p(parent_dir)
                 total_count += 1
                 if not self.download_file(url, file_path):
                     self.add_failure(platform, locale,
-                                     message="Unable to download %(platform)s:%(locale)s installer!")
+                                     message="Unable to "
+                                             "download %(platform)s:%(locale)s installer!")
                 else:
                     success_count += 1
         self.summarize_success_count(success_count, total_count,
                                      message="Downloaded %d of %d installers successfully.")
 
     def _repack_apk(self, partner, orig_path, repack_path):
         """ Repack the apk with a partner update channel.
         Returns True for success, None for failure
@@ -232,29 +233,33 @@ class MobilePartnerRepack(LocalesMixin, 
     def repack(self):
         c = self.config
         rc = self.query_release_config()
         dirs = self.query_abs_dirs()
         locales = self.query_locales()
         success_count = total_count = 0
         for platform in c['platforms']:
             for locale in locales:
-                installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
+                installer_name = c['installer_base_names'][platform] % \
+                    {'version': rc['version'], 'locale': locale}
                 if self.query_failure(platform, locale):
                     self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
                     continue
-                original_path = '%s/original/%s/%s/%s' % (dirs['abs_work_dir'], platform, locale, installer_name)
+                original_path = '%s/original/%s/%s/%s' % \
+                    (dirs['abs_work_dir'], platform, locale, installer_name)
                 for partner in c['partner_config'].keys():
-                    repack_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
+                    repack_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % \
+                        (dirs['abs_work_dir'], partner, platform, locale, installer_name)
                     total_count += 1
                     if self._repack_apk(partner, original_path, repack_path):
                         success_count += 1
                     else:
                         self.add_failure(platform, locale,
-                                         message="Unable to repack %(platform)s:%(locale)s installer!")
+                                         message="Unable to repack %(platform)s:%(locale)s "
+                                                 "installer!")
         self.summarize_success_count(success_count, total_count,
                                      message="Repacked %d of %d installers successfully.")
 
     def _upload(self, dir_name="unsigned/partner-repacks"):
         c = self.config
         dirs = self.query_abs_dirs()
         local_path = os.path.join(dirs['abs_work_dir'], dir_name)
         rc = self.query_release_config()
@@ -282,33 +287,37 @@ class MobilePartnerRepack(LocalesMixin, 
     def sign(self):
         c = self.config
         rc = self.query_release_config()
         dirs = self.query_abs_dirs()
         locales = self.query_locales()
         success_count = total_count = 0
         for platform in c['platforms']:
             for locale in locales:
-                installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
+                installer_name = c['installer_base_names'][platform] % \
+                    {'version': rc['version'], 'locale': locale}
                 if self.query_failure(platform, locale):
                     self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
                     continue
                 for partner in c['partner_config'].keys():
-                    unsigned_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
-                    signed_dir = '%s/partner-repacks/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale)
+                    unsigned_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % \
+                        (dirs['abs_work_dir'], partner, platform, locale, installer_name)
+                    signed_dir = '%s/partner-repacks/%s/%s/%s' % \
+                        (dirs['abs_work_dir'], partner, platform, locale)
                     signed_path = "%s/%s" % (signed_dir, installer_name)
                     total_count += 1
                     self.info("Signing %s %s." % (platform, locale))
                     if not os.path.exists(unsigned_path):
                         self.error("Missing apk %s!" % unsigned_path)
                         continue
                     if self.sign_apk(unsigned_path, c['keystore'],
                                      self.store_passphrase, self.key_passphrase,
                                      c['key_alias']) != 0:
-                        self.add_summary("Unable to sign %s:%s apk!" % (platform, locale), level=FATAL)
+                        self.add_summary("Unable to sign %s:%s apk!" % (platform, locale),
+                                         level=FATAL)
                     else:
                         self.mkdir_p(signed_dir)
                         if self.align_apk(unsigned_path, signed_path):
                             self.add_failure(platform, locale,
                                              message="Unable to align %(platform)s%(locale)s apk!")
                             self.rmtree(signed_dir)
                         else:
                             success_count += 1
--- a/testing/mozharness/scripts/release/antivirus.py
+++ b/testing/mozharness/scripts/release/antivirus.py
@@ -70,44 +70,44 @@ class AntivirusScan(BaseScript, Virtuale
         r"^.*/mar-tools/.*$",
         r"^.*robocop.apk$",
         r"^.*contrib.*"
     ]
     CACHE_DIR = 'cache'
 
     def __init__(self):
         BaseScript.__init__(self,
-            config_options=self.config_options,
-            require_config_file=False,
-            config={
-                "virtualenv_modules": [
-                    "pip==1.5.5",
-                    "boto",
-                    "redo",
-                    "mar",
-                ],
-                "virtualenv_path": "venv",
-            },
-            all_actions=[
-                "create-virtualenv",
-                "activate-virtualenv",
-                "get-extract-script",
-                "get-files",
-                "scan-files",
-                "cleanup-cache",
-            ],
-            default_actions=[
-                "create-virtualenv",
-                "activate-virtualenv",
-                "get-extract-script",
-                "get-files",
-                "scan-files",
-                "cleanup-cache",
-            ],
-        )
+                            config_options=self.config_options,
+                            require_config_file=False,
+                            config={
+                                "virtualenv_modules": [
+                                    "pip==1.5.5",
+                                    "boto",
+                                    "redo",
+                                    "mar",
+                                ],
+                                "virtualenv_path": "venv",
+                            },
+                            all_actions=[
+                                "create-virtualenv",
+                                "activate-virtualenv",
+                                "get-extract-script",
+                                "get-files",
+                                "scan-files",
+                                "cleanup-cache",
+                            ],
+                            default_actions=[
+                                "create-virtualenv",
+                                "activate-virtualenv",
+                                "get-extract-script",
+                                "get-files",
+                                "scan-files",
+                                "cleanup-cache",
+                            ],
+                            )
         self.excludes = self.config.get('excludes', self.DEFAULT_EXCLUDES)
         self.dest_dir = self.CACHE_DIR
 
     def _get_candidates_prefix(self):
         return "pub/{}/candidates/{}-candidates/build{}/".format(
             self.config['product'],
             self.config["version"],
             self.config["build_number"]
@@ -117,18 +117,18 @@ class AntivirusScan(BaseScript, Virtuale
         for exclude in self.excludes:
             if re.search(exclude, keyname):
                 return True
         return False
 
     def get_extract_script(self):
         """Gets a copy of extract_and_run_command.py from tools, and the supporting mar.py,
         so that we can unpack various files for clam to scan them."""
-        remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py".format(self.config["tools_repo"],
-                                                                               self.config["tools_revision"])
+        remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py"\
+                      .format(self.config["tools_repo"], self.config["tools_revision"])
         self.download_file(remote_file, file_name="extract_and_run_command.py")
 
     def get_files(self):
         """Pull the candidate files down from S3 for scanning, using parallel requests"""
         from boto.s3.connection import S3Connection
         from boto.exception import S3CopyError, S3ResponseError
         from redo import retry
         from httplib import HTTPException
@@ -161,17 +161,18 @@ class AntivirusScan(BaseScript, Virtuale
         def find_release_files():
             candidates_prefix = self._get_candidates_prefix()
             self.info("Getting key names from candidates")
             for key in bucket.list(prefix=candidates_prefix):
                 keyname = key.name
                 if self._matches_exclude(keyname):
                     self.debug("Excluding {}".format(keyname))
                 else:
-                    destination = os.path.join(self.dest_dir, keyname.replace(candidates_prefix, ''))
+                    destination = os.path.join(self.dest_dir,
+                                               keyname.replace(candidates_prefix, ''))
                     dest_dir = os.path.dirname(destination)
                     if not os.path.isdir(dest_dir):
                         os.makedirs(dest_dir)
                     yield (keyname, destination)
 
         pool = ThreadPool(self.config["download_parallelization"])
         pool.map(worker, find_release_files())
 
--- a/testing/mozharness/scripts/release/beet_mover.py
+++ b/testing/mozharness/scripts/release/beet_mover.py
@@ -132,46 +132,48 @@ class BeetMover(BaseScript, VirtualenvMi
             'config_options': CONFIG_OPTIONS,
             'all_actions': [
                 # 'clobber',
                 'create-virtualenv',
                 'activate-virtualenv',
                 'generate-candidates-manifest',
                 'refresh-antivirus',
                 'verify-bits',  # beets
-                'download-bits', # beets
+                'download-bits',  # beets
                 'scan-bits',     # beets
                 'upload-bits',  # beets
             ],
             'require_config_file': False,
             # Default configuration
             'config': {
                 # base index url where to find taskcluster artifact based on taskid
-                "artifact_base_url": 'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
+                "artifact_base_url": \
+                'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
                 "virtualenv_modules": [
                     "boto",
                     "PyYAML",
                     "Jinja2",
                     "redo",
                     "cryptography==2.0.3",
                     "mar",
                 ],
                 "virtualenv_path": "venv",
             },
         }
-        #todo do excludes need to be configured via command line for specific builds?
+        # todo do excludes need to be configured via command line for specific builds?
         super(BeetMover, self).__init__(**beetmover_kwargs)
 
         c = self.config
         self.manifest = {}
         # assigned in _post_create_virtualenv
         self.virtualenv_imports = None
         self.bucket = c['bucket']
         if not all(aws_creds):
-            self.fatal('credentials must be passed in env: "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
+            self.fatal('credentials must be passed in env: '
+                       '"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
         self.aws_key_id, self.aws_secret_key = aws_creds
         # if excludes is set from command line, use it otherwise use defaults
         self.excludes = self.config.get('excludes', DEFAULT_EXCLUDES)
         dirs = self.query_abs_dirs()
         self.dest_dir = os.path.join(dirs['abs_work_dir'], CACHE_DIR)
         self.mime_fix()
 
     def activate_virtualenv(self):
@@ -251,17 +253,18 @@ class BeetMover(BaseScript, VirtualenvMi
         """
         downloads list of artifacts to self.dest_dir dir based on a given manifest
         """
         self.log('downloading and uploading artifacts to self_dest_dir...')
         dirs = self.query_abs_dirs()
 
         for locale in self.manifest['mapping']:
             for deliverable in self.manifest['mapping'][locale]:
-                self.log("downloading '{}' deliverable for '{}' locale".format(deliverable, locale))
+                self.log("downloading '{}' deliverable for '{}' locale".format(deliverable,
+                                                                               locale))
                 source = self.manifest['mapping'][locale][deliverable]['artifact']
                 self.retry(
                     self.download_file,
                     args=[source],
                     kwargs={'parent_dir': dirs['abs_work_dir']},
                     error_level=FATAL)
         self.log('Success!')
 
@@ -283,17 +286,18 @@ class BeetMover(BaseScript, VirtualenvMi
         bucket = conn.get_bucket(self.bucket)
 
         for locale in self.manifest['mapping']:
             for deliverable in self.manifest['mapping'][locale]:
                 self.log("uploading '{}' deliverable for '{}' locale".format(deliverable, locale))
                 # we have already downloaded the files locally so we can use that version
                 source = self.manifest['mapping'][locale][deliverable]['artifact']
                 s3_key = self.manifest['mapping'][locale][deliverable]['s3_key']
-                downloaded_file = os.path.join(dirs['abs_work_dir'], self.get_filename_from_url(source))
+                downloaded_file = os.path.join(dirs['abs_work_dir'],
+                                               self.get_filename_from_url(source))
                 # generate checksums for every uploaded file
                 beet_file_name = '{}.beet'.format(downloaded_file)
                 # upload checksums to a separate subdirectory
                 beet_dest = '{prefix}beetmover-checksums/{f}.beet'.format(
                     prefix=self._get_template_vars()["s3_prefix"],
                     f=self._strip_prefix(s3_key)
                 )
                 beet_contents = '\n'.join([
@@ -305,67 +309,70 @@ class BeetMover(BaseScript, VirtualenvMi
                 ])
                 self.write_to_file(beet_file_name, beet_contents)
                 self.upload_bit(source=downloaded_file, s3_key=s3_key,
                                 bucket=bucket)
                 self.upload_bit(source=beet_file_name, s3_key=beet_dest,
                                 bucket=bucket)
         self.log('Success!')
 
-
     def upload_bit(self, source, s3_key, bucket):
         boto = self.virtualenv_imports['boto']
         self.info('uploading to s3 with key: {}'.format(s3_key))
         key = boto.s3.key.Key(bucket)  # create new key
         key.key = s3_key  # set key name
 
         self.info("Checking if `{}` already exists".format(s3_key))
         key = bucket.get_key(s3_key)
         if not key:
             self.info("Uploading to `{}`".format(s3_key))
             key = bucket.new_key(s3_key)
             # set key value
             mime_type, _ = mimetypes.guess_type(source)
-            self.retry(lambda: key.set_contents_from_filename(source, headers={'Content-Type': mime_type}),
-                       error_level=FATAL),
+            self.retry(lambda: key.set_contents_from_filename(
+                       source, headers={'Content-Type': mime_type}), error_level=FATAL),
         else:
             if not get_hash(key.get_contents_as_string()) == get_hash(open(source).read()):
                 # for now, let's halt. If necessary, we can revisit this and allow for overwrites
                 #  to the same buildnum release with different bits
                 self.fatal("`{}` already exists with different checksum.".format(s3_key))
             self.log("`{}` has the same MD5 checksum, not uploading".format(s3_key))
 
     def scan_bits(self):
 
         dirs = self.query_abs_dirs()
 
-        filenames = [f for f in listdir(dirs['abs_work_dir']) if isfile(join(dirs['abs_work_dir'], f))]
+        filenames = [f for f in listdir(dirs['abs_work_dir'])
+                     if isfile(join(dirs['abs_work_dir'], f))]
         self.mkdir_p(self.dest_dir)
         for file_name in filenames:
             if self._matches_exclude(file_name):
                 self.info("Excluding {} from virus scan".format(file_name))
             else:
-                self.info('Copying {} to {}'.format(file_name,self.dest_dir))
-                self.copyfile(os.path.join(dirs['abs_work_dir'], file_name), os.path.join(self.dest_dir,file_name))
+                self.info('Copying {} to {}'.format(file_name, self.dest_dir))
+                self.copyfile(os.path.join(dirs['abs_work_dir'], file_name),
+                              os.path.join(self.dest_dir, file_name))
         self._scan_files()
         self.info('Emptying {}'.format(self.dest_dir))
         self.rmtree(self.dest_dir)
 
     def _scan_files(self):
         """Scan the files we've collected. We do the download and scan concurrently to make
         it easier to have a coherent log afterwards. Uses the venv python."""
-        external_tools_path = os.path.join(
-                              os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), 'external_tools')
-        self.run_command([self.query_python_path(), os.path.join(external_tools_path,'extract_and_run_command.py'),
-                         '-j{}'.format(self.config['scan_parallelization']),
-                         'clamscan', '--no-summary', '--', self.dest_dir])
+        external_tools_path = os.path.join(os.path.abspath(os.path.dirname(
+                              os.path.dirname(mozharness.__file__))), 'external_tools')
+        self.run_command([self.query_python_path(), os.path.join(external_tools_path,
+                         'extract_and_run_command.py'),
+                          '-j{}'.format(self.config['scan_parallelization']),
+                          'clamscan', '--no-summary', '--', self.dest_dir])
 
     def _matches_exclude(self, keyname):
-         return any(re.search(exclude, keyname) for exclude in self.excludes)
+        return any(re.search(exclude, keyname) for exclude in self.excludes)
 
     def mime_fix(self):
         """ Add mimetypes for custom extensions """
         mimetypes.init()
         map(lambda (ext, mime_type,): mimetypes.add_type(mime_type, ext), MIME_MAP.items())
 
+
 if __name__ == '__main__':
     beet_mover = BeetMover(pop_aws_auth_from_env())
     beet_mover.run_and_exit()
--- a/testing/mozharness/scripts/release/generate-checksums.py
+++ b/testing/mozharness/scripts/release/generate-checksums.py
@@ -11,37 +11,41 @@ sys.path.insert(1, os.path.dirname(os.pa
 from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
 from mozharness.base.script import BaseScript
 from mozharness.base.vcs.vcsbase import VCSMixin
 from mozharness.mozilla.checksums import parse_checksums_file
 from mozharness.mozilla.signing import SigningMixin
 from mozharness.mozilla.buildbot import BuildbotMixin
 from mozharness.mozilla.merkle import MerkleTree
 
+
 class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, BuildbotMixin):
     config_options = [
         [["--stage-product"], {
             "dest": "stage_product",
-            "help": "Name of product used in file server's directory structure, eg: firefox, mobile",
+            "help": "Name of product used in file server's directory structure, "
+                    "e.g.: firefox, mobile",
         }],
         [["--version"], {
             "dest": "version",
-            "help": "Version of release, eg: 39.0b5",
+            "help": "Version of release, e.g.: 39.0b5",
         }],
         [["--build-number"], {
             "dest": "build_number",
-            "help": "Build number of release, eg: 2",
+            "help": "Build number of release, e.g.: 2",
         }],
         [["--bucket-name-prefix"], {
             "dest": "bucket_name_prefix",
-            "help": "Prefix of bucket name, eg: net-mozaws-prod-delivery. This will be used to generate a full bucket name (such as net-mozaws-prod-delivery-{firefox,archive}.",
+            "help": "Prefix of bucket name, e.g.: net-mozaws-prod-delivery. This will be used to "
+                    "generate a full bucket name (such as "
+                    "net-mozaws-prod-delivery-{firefox,archive}.",
         }],
         [["--bucket-name-full"], {
             "dest": "bucket_name_full",
-            "help": "Full bucket name, eg: net-mozaws-prod-delivery-firefox",
+            "help": "Full bucket name, e.g.: net-mozaws-prod-delivery-firefox",
         }],
         [["-j", "--parallelization"], {
             "dest": "parallelization",
             "default": 20,
             "type": int,
             "help": "Number of checksums file to download concurrently",
         }],
         [["-f", "--format"], {
@@ -49,58 +53,59 @@ class ChecksumsGenerator(BaseScript, Vir
             "default": [],
             "action": "append",
             "help": "Format(s) to generate big checksums file for. Default: sha512",
         }],
         [["--include"], {
             "dest": "includes",
             "default": [],
             "action": "append",
-            "help": "List of patterns to include in big checksums file. See script source for default.",
+            "help": "List of patterns to include in big checksums file. See script "
+                    "source for default.",
         }],
         [["--tools-repo"], {
             "dest": "tools_repo",
             "default": "https://hg.mozilla.org/build/tools",
         }],
         [["--credentials"], {
             "dest": "credentials",
             "help": "File containing access key and secret access key for S3",
         }],
     ] + virtualenv_config_options
 
     def __init__(self):
         BaseScript.__init__(self,
-            config_options=self.config_options,
-            require_config_file=False,
-            config={
-                "virtualenv_modules": [
-                    "pip==1.5.5",
-                    "boto",
-                ],
-                "virtualenv_path": "venv",
-                'buildbot_json_path': 'buildprops.json',
-            },
-            all_actions=[
-                "create-virtualenv",
-                "collect-individual-checksums",
-                "create-big-checksums",
-                "create-summary",
-                "sign",
-                "upload",
-                "copy-info-files",
-            ],
-            default_actions=[
-                "create-virtualenv",
-                "collect-individual-checksums",
-                "create-big-checksums",
-                "create-summary",
-                "sign",
-                "upload",
-            ],
-        )
+                            config_options=self.config_options,
+                            require_config_file=False,
+                            config={
+                                "virtualenv_modules": [
+                                    "pip==1.5.5",
+                                    "boto",
+                                ],
+                                "virtualenv_path": "venv",
+                                'buildbot_json_path': 'buildprops.json',
+                            },
+                            all_actions=[
+                                "create-virtualenv",
+                                "collect-individual-checksums",
+                                "create-big-checksums",
+                                "create-summary",
+                                "sign",
+                                "upload",
+                                "copy-info-files",
+                            ],
+                            default_actions=[
+                                "create-virtualenv",
+                                "collect-individual-checksums",
+                                "create-big-checksums",
+                                "create-summary",
+                                "sign",
+                                "upload",
+                            ],
+                            )
 
         self.checksums = {}
         self.bucket = None
         self.bucket_name = self._get_bucket_name()
         self.file_prefix = self._get_file_prefix()
         # set the env var for boto to read our special config file
         # rather than anything else we have at ~/.boto
         os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
@@ -139,17 +144,18 @@ class ChecksumsGenerator(BaseScript, Vir
                 r"^.*/jsshell.*$",
             ]
 
     def _get_bucket_name(self):
         if self.config.get('bucket_name_full'):
             return self.config['bucket_name_full']
 
         suffix = "archive"
-        # Firefox has a special bucket, per https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
+        # Firefox has a special bucket, per
+        # https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
         if self.config["stage_product"] == "firefox":
             suffix = "firefox"
 
         return "{}-{}".format(self.config["bucket_name_prefix"], suffix)
 
     def _get_file_prefix(self):
         return "pub/{}/candidates/{}-candidates/build{}/".format(
             self.config["stage_product"], self.config["version"], self.config["build_number"]
@@ -183,16 +189,17 @@ class ChecksumsGenerator(BaseScript, Vir
         """This step grabs all of the small checksums files for the release,
         filters out any unwanted files from within them, and adds the remainder
         to self.checksums for subsequent steps to use."""
         bucket = self._get_bucket()
         self.info("File prefix is: {}".format(self.file_prefix))
 
         # Temporary holding place for checksums
         raw_checksums = []
+
         def worker(item):
             self.debug("Downloading {}".format(item))
             # TODO: It would be nice to download the associated .asc file
             # and verify against it.
             sums = bucket.get_key(item).get_contents_as_string()
             raw_checksums.append(sums)
 
         def find_checksums_files():
@@ -217,17 +224,18 @@ class ChecksumsGenerator(BaseScript, Vir
         pool = ThreadPool(self.config["parallelization"])
         pool.map(worker, find_checksums_files())
 
         for c in raw_checksums:
             for f, info in parse_checksums_file(c).iteritems():
                 for pattern in self.config["includes"]:
                     if re.search(pattern, f):
                         if f in self.checksums:
-                            self.fatal("Found duplicate checksum entry for {}, don't know which one to pick.".format(f))
+                            self.fatal("Found duplicate checksum entry for {}, "
+                                       "don't know which one to pick.".format(f))
                         if not set(self.config["formats"]) <= set(info["hashes"]):
                             self.fatal("Missing necessary format for file {}".format(f))
                         self.debug("Adding checksums for file: {}".format(f))
                         self.checksums[f] = info
                         break
                 else:
                     self.debug("Ignoring checksums for file: {}".format(f))
 
@@ -239,17 +247,18 @@ class ChecksumsGenerator(BaseScript, Vir
         """
         for fmt in self.config["formats"]:
             hash_fn = self._get_hash_function(fmt)
             files = [fn for fn in sorted(self.checksums)]
             data = [self.checksums[fn]["hashes"][fmt] for fn in files]
 
             tree = MerkleTree(hash_fn, data)
             head = tree.head().encode("hex")
-            proofs = [tree.inclusion_proof(i).to_rfc6962_bis().encode("hex") for i in range(len(files))]
+            proofs = [tree.inclusion_proof(i).to_rfc6962_bis().encode("hex")
+                      for i in range(len(files))]
 
             summary = self._get_summary_filename(fmt)
             self.info("Creating summary file: {}".format(summary))
 
             content = "{} TREE_HEAD\n".format(head)
             for i in range(len(files)):
                 content += "{} {}\n".format(proofs[i], files[i])
 
--- a/testing/mozharness/scripts/release/postrelease_version_bump.py
+++ b/testing/mozharness/scripts/release/postrelease_version_bump.py
@@ -180,11 +180,12 @@ class PostReleaseVersionBump(MercurialSc
         message = "No bug - Tagging {revision} with {tags} a=release CLOSED TREE"
         message = message.format(
             revision=self.config["revision"],
             tags=', '.join(tags))
         self.hg_tag(cwd=dirs["abs_gecko_dir"], tags=tags,
                     revision=self.config["revision"], message=message,
                     user=self.config["hg_user"], force=True)
 
+
 # __main__ {{{1
 if __name__ == '__main__':
     PostReleaseVersionBump().run_and_exit()
--- a/testing/mozharness/scripts/release/publish_balrog.py
+++ b/testing/mozharness/scripts/release/publish_balrog.py
@@ -161,12 +161,11 @@ class PublishBalrog(MercurialScript, Bui
             cmd.extend(["--schedule-at", self.config["schedule_at"]])
         if self.config.get("background_rate"):
             cmd.extend(["--background-rate", str(self.config["background_rate"])])
 
         self.retry(lambda: self.run_command(cmd, halt_on_failure=True, env=env),
                    error_level=FATAL)
 
 
-
 # __main__ {{{1
 if __name__ == '__main__':
     PublishBalrog().run_and_exit()
--- a/testing/mozharness/scripts/release/push-candidate-to-releases.py
+++ b/testing/mozharness/scripts/release/push-candidate-to-releases.py
@@ -61,37 +61,37 @@ class ReleasePusher(BaseScript, Virtuale
             "default": 20,
             "type": "int",
             "help": "Number of copy requests to run concurrently",
         }],
     ] + virtualenv_config_options
 
     def __init__(self, aws_creds):
         BaseScript.__init__(self,
-            config_options=self.config_options,
-            require_config_file=False,
-            config={
-                "virtualenv_modules": [
-                    "pip==1.5.5",
-                    "boto",
-                    "redo",
-                ],
-                "virtualenv_path": "venv",
-            },
-            all_actions=[
-                "create-virtualenv",
-                "activate-virtualenv",
-                "push-to-releases",
-            ],
-            default_actions=[
-                "create-virtualenv",
-                "activate-virtualenv",
-                "push-to-releases",
-            ],
-        )
+                            config_options=self.config_options,
+                            require_config_file=False,
+                            config={
+                                    "virtualenv_modules": [
+                                        "pip==1.5.5",
+                                        "boto",
+                                        "redo",
+                                    ],
+                                    "virtualenv_path": "venv",
+                                   },
+                            all_actions=[
+                                "create-virtualenv",
+                                "activate-virtualenv",
+                                "push-to-releases",
+                            ],
+                            default_actions=[
+                                "create-virtualenv",
+                                "activate-virtualenv",
+                                "push-to-releases",
+                            ],
+                            )
 
         # validate aws credentials
         if not (all(aws_creds) or self.config.get('credentials')):
             self.fatal("aws creds not defined. please add them to your config or env.")
         if any(aws_creds) and self.config.get('credentials'):
             self.fatal("aws creds found in env and self.config. please declare in one place only.")
 
         # set aws credentials
@@ -147,38 +147,40 @@ class ReleasePusher(BaseScript, Virtuale
             self.warning("Destination already exists with %s keys" % len(keys))
 
         def worker(item):
             source, destination = item
 
             def copy_key():
                 source_key = bucket.get_key(source)
                 dest_key = bucket.get_key(destination)
-                # According to http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
+                # According to
+                # http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
                 # S3 key MD5 is represented as ETag, except when objects are
                 # uploaded using multipart method. In this case objects's ETag
                 # is constructed using its MD5, minus symbol, and number of
-                # part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823
+                # part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823  # noqa
                 source_md5 = source_key.etag.split("-")[0]
                 if dest_key:
                     dest_md5 = dest_key.etag.split("-")[0]
                 else:
                     dest_md5 = None
 
                 if not dest_key:
                     self.info("Copying {} to {}".format(source, destination))
                     bucket.copy_key(destination, self.config["bucket_name"],
                                     source)
                 elif source_md5 == dest_md5:
                     self.warning(
                         "{} already exists with the same content ({}), skipping copy".format(
                             destination, dest_md5))
                 else:
                     self.fatal(
-                        "{} already exists with the different content (src ETag: {}, dest ETag: {}), aborting".format(
+                        "{} already exists with the different content "
+                        "(src ETag: {}, dest ETag: {}), aborting".format(
                             destination, source_key.etag, dest_key.etag))
 
             return retry(copy_key, sleeptime=5, max_sleeptime=60,
                          retry_exceptions=(S3CopyError, S3ResponseError))
 
         def find_release_files():
             candidates_prefix = self._get_candidates_prefix()
             release_prefix = self._get_releases_prefix()
@@ -190,11 +192,12 @@ class ReleasePusher(BaseScript, Virtuale
                 else:
                     destination = keyname.replace(candidates_prefix,
                                                   release_prefix)
                     yield (keyname, destination)
 
         pool = ThreadPool(self.config["parallelization"])
         pool.map(worker, find_release_files())
 
+
 if __name__ == "__main__":
     myScript = ReleasePusher(pop_aws_auth_from_env())
     myScript.run_and_exit()
--- a/testing/mozharness/scripts/release/submit-to-ct.py
+++ b/testing/mozharness/scripts/release/submit-to-ct.py
@@ -11,43 +11,44 @@ from mozharness.mozilla.signed_certifica
 
 
 class CTSubmitter(BaseScript, VirtualenvMixin):
     config_options = virtualenv_config_options
 
     config_options = [
         [["--chain"], {
             "dest": "chain",
-            "help": "URL from which to download the cert chain to be submitted to CT (in PEM format)"
+            "help": "URL from which to download the cert chain to be "
+                    "submitted to CT (in PEM format)"
         }],
         [["--log"], {
             "dest": "log",
             "help": "URL for the log to which the chain should be submitted"
         }],
         [["--sct"], {
             "dest": "sct",
             "help": "File where the SCT from the log should be written"
         }],
     ]
 
     def __init__(self):
         BaseScript.__init__(self,
-            config_options=self.config_options,
-            config={
-                "virtualenv_modules": [
-                    "pem",
-                    "redo",
-                    "requests",
-                ],
-                "virtualenv_path": "venv",
-            },
-            require_config_file=False,
-            all_actions=["add-chain"],
-            default_actions=["add-chain"],
-        )
+                            config_options=self.config_options,
+                            config={
+                                "virtualenv_modules": [
+                                    "pem",
+                                    "redo",
+                                    "requests",
+                                ],
+                                "virtualenv_path": "venv",
+                            },
+                            require_config_file=False,
+                            all_actions=["add-chain"],
+                            default_actions=["add-chain"],
+                            )
 
         self.chain_url = self.config["chain"]
         self.log_url = self.config["log"]
         self.sct_filename = self.config["sct"]
 
     def add_chain(self):
         from redo import retry
         import requests
@@ -55,27 +56,28 @@ class CTSubmitter(BaseScript, Virtualenv
 
         def get_chain():
             r = requests.get(self.chain_url)
             r.raise_for_status()
             return r.text
 
         chain = retry(get_chain)
 
-        req = { "chain": [] }
+        req = {"chain": []}
         chain = pem.parse(chain)
         for i in range(len(chain)):
             cert = crypto.load_certificate(crypto.FILETYPE_PEM, str(chain[i]))
             der = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert)
             req["chain"].append(base64.b64encode(der))
 
         def post_chain():
             r = requests.post(self.log_url + '/ct/v1/add-chain', json=req)
             r.raise_for_status()
             return r.json()
 
         resp = retry(post_chain)
         sct = SignedCertificateTimestamp(resp)
         self.write_to_file(self.sct_filename, sct.to_rfc6962())
 
+
 if __name__ == "__main__":
     myScript = CTSubmitter()
     myScript.run_and_exit()
old mode 100644
new mode 100755
--- a/testing/mozharness/scripts/release/updates.py
+++ b/testing/mozharness/scripts/release/updates.py
@@ -89,18 +89,17 @@ class UpdatesBumper(MercurialScript, Bui
         # taskcluster properties
         self.read_buildbot_config()
         if not self.buildbot_config:
             self.warning("Skipping buildbot properties overrides")
             return
         # TODO: version and appVersion should come from repo
         props = self.buildbot_config["properties"]
         for prop in ['product', 'version', 'build_number', 'revision',
-                     'appVersion', 'balrog_api_root', "channels",
-                     'generate_bz2_blob']:
+                     'appVersion', 'balrog_api_root', "channels"]:
             if props.get(prop):
                 self.info("Overriding %s with %s" % (prop, props[prop]))
                 self.config[prop] = props.get(prop)
 
         partials = [v.strip() for v in props["partial_versions"].split(",")]
         self.config["partial_versions"] = [v.split("build") for v in partials]
         self.config["platforms"] = [p.strip() for p in
                                     props["platforms"].split(",")]
@@ -265,20 +264,16 @@ class UpdatesBumper(MercurialScript, Bui
                          build_number=self.config["build_number"])
                 for t in tags]
         self.hg_tag(cwd=dirs["abs_tools_dir"], tags=tags,
                     user=self.config["hg_user"], force=True)
 
     def submit_to_balrog(self):
         for _, channel_config in self.query_channel_configs():
             self._submit_to_balrog(channel_config)
-        if 'generate_bz2_blob' in self.config and \
-                self.config['generate_bz2_blob']:
-            for _, channel_config in self.query_channel_configs():
-                self._submit_to_balrog_bz2(channel_config)
 
     def _submit_to_balrog(self, channel_config):
         dirs = self.query_abs_dirs()
         auth = os.path.join(os.getcwd(), self.config['credentials_file'])
         cmd = [
             sys.executable,
             os.path.join(dirs["abs_tools_dir"],
                          "scripts/build-promotion/balrog-release-pusher.py")]
@@ -306,65 +301,12 @@ class UpdatesBumper(MercurialScript, Bui
             cmd.extend(["--partial-update", partial])
         if channel_config["requires_mirrors"]:
             cmd.append("--requires-mirrors")
         if self.config["balrog_use_dummy_suffix"]:
             cmd.append("--dummy")
 
         self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
 
-    def _submit_to_balrog_bz2(self, channel_config):
-        if "bz2_blob_suffix" not in channel_config:
-            self.info("No need to generate BZ2 blob")
-            return
-
-        dirs = self.query_abs_dirs()
-        # Use env varialbe instead of command line to avoid issues with blob
-        # names starting with "-", e.g. "-bz2"
-        env = {"BALROG_BLOB_SUFFIX": channel_config["bz2_blob_suffix"]}
-        auth = os.path.join(os.getcwd(), self.config['credentials_file'])
-        cmd = [
-            sys.executable,
-            os.path.join(dirs["abs_tools_dir"],
-                         "scripts/build-promotion/balrog-release-pusher.py")]
-        cmd.extend([
-            "--api-root", self.config["balrog_api_root"],
-            "--download-domain", self.config["download_domain"],
-            "--archive-domain", self.config["archive_domain"],
-            "--credentials-file", auth,
-            "--product", self.config["product"],
-            "--version", self.config["version"],
-            "--build-number", str(self.config["build_number"]),
-            "--app-version", self.config["appVersion"],
-            "--username", self.config["balrog_username"],
-            "--complete-mar-filename-pattern",
-            channel_config["complete_mar_filename_pattern"],
-            "--complete-mar-bouncer-product-pattern",
-            channel_config["complete_mar_bouncer_product_pattern"],
-            "--verbose",
-        ])
-
-        for v, build_number in self.query_matching_partials(channel_config):
-            if v < "56.0":
-                self.info("Adding %s to partials" % v)
-                partial = "{version}build{build_number}".format(
-                    version=v, build_number=build_number)
-                cmd.extend(["--partial-update", partial])
-            else:
-                self.info("Not adding %s to partials" % v)
-
-        for c in channel_config["channel_names"]:
-            cmd.extend(["--channel", c])
-        for r in channel_config["bz2_rules_to_update"]:
-            cmd.extend(["--rule-to-update", r])
-        for p in self.config["platforms"]:
-            cmd.extend(["--platform", p])
-        if channel_config["requires_mirrors"]:
-            cmd.append("--requires-mirrors")
-        if self.config["balrog_use_dummy_suffix"]:
-            cmd.append("--dummy")
-
-        self.retry(lambda: self.run_command(cmd, halt_on_failure=True, env=env))
-
 
 # __main__ {{{1
 if __name__ == '__main__':
     UpdatesBumper().run_and_exit()
--- a/testing/mozharness/scripts/spidermonkey_build.py
+++ b/testing/mozharness/scripts/spidermonkey_build.py
@@ -146,32 +146,33 @@ class SpidermonkeyBuild(MockMixin,
 
         if self.buildbot_config is None:
             self.info("Reading buildbot build properties...")
             self.read_buildbot_config()
 
         if self.buildbot_config:
             bb_props = [('mock_target', 'mock_target', None),
                         ('hgurl', 'hgurl', None),
-                        ('clobberer_url', 'clobberer_url', 'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
+                        ('clobberer_url', 'clobberer_url',
+                         'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
                         ('force_clobber', 'force_clobber', None),
                         ('branch', 'blob_upload_branch', None),
                         ]
             buildbot_props = self.buildbot_config.get('properties', {})
             for bb_prop, cfg_prop, default in bb_props:
                 if not self.config.get(cfg_prop) and buildbot_props.get(bb_prop, default):
                     self.config[cfg_prop] = buildbot_props.get(bb_prop, default)
             self.config['is_automation'] = True
         else:
             self.config['is_automation'] = False
             self.config.setdefault('blob_upload_branch', 'devel')
 
         dirs = self.query_abs_dirs()
         replacements = self.config['env_replacements'].copy()
-        for k,v in replacements.items():
+        for k, v in replacements.items():
             replacements[k] = v % dirs
 
         self.env = self.query_env(replace_dict=replacements,
                                   partial_env=self.config['partial_env'],
                                   purge_env=nuisance_env_vars)
         self.env['MOZ_UPLOAD_DIR'] = dirs['abs_blob_upload_dir']
         self.env['TOOLTOOL_DIR'] = dirs['abs_work_dir']
 
@@ -231,24 +232,26 @@ class SpidermonkeyBuild(MockMixin,
         elif 'branch' in self.config:
             # Used for locally testing try vs non-try
             return self.config['branch']
         else:
             return os.path.basename(self.query_repo())
 
     def query_compiler_manifest(self):
         dirs = self.query_abs_dirs()
-        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['compiler_manifest'])
+        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'],
+                                self.config['compiler_manifest'])
         if os.path.exists(manifest):
             return manifest
         return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])
 
     def query_sixgill_manifest(self):
         dirs = self.query_abs_dirs()
-        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
+        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'],
+                                self.config['sixgill_manifest'])
         if os.path.exists(manifest):
             return manifest
         return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])
 
     def query_buildid(self):
         if self.buildid:
             return self.buildid
         if self.buildbot_config and 'properties' in self.buildbot_config:
--- a/testing/mozharness/scripts/telemetry/telemetry_client.py
+++ b/testing/mozharness/scripts/telemetry/telemetry_client.py
@@ -13,17 +13,17 @@ import sys
 # load modules from parent dir
 sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
 
 GECKO_SRCDIR = os.path.join(os.path.expanduser('~'), 'checkouts', 'gecko')
 
 TELEMETRY_TEST_HOME = os.path.join(GECKO_SRCDIR, 'toolkit', 'components', 'telemetry',
                                    'tests', 'marionette')
 
-from mozharness.base.python import PostScriptRun, PreScriptAction
+from mozharness.base.python import PreScriptAction
 from mozharness.mozilla.structuredlog import StructuredOutputParser
 from mozharness.mozilla.testing.testbase import (
     TestingMixin,
     testing_config_options,
 )
 from mozharness.mozilla.testing.codecoverage import (
     CodeCoverageMixin,
     code_coverage_config_options
@@ -31,17 +31,18 @@ from mozharness.mozilla.testing.codecove
 from mozharness.mozilla.vcstools import VCSToolsScript
 
 # General command line arguments for Firefox ui tests
 telemetry_tests_config_options = [
     [["--allow-software-gl-layers"], {
         "action": "store_true",
         "dest": "allow_software_gl_layers",
         "default": False,
-        "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor.",
+        "help": "Permits a software GL implementation (such as LLVMPipe) "
+                "to use the GL compositor.",
     }],
     [["--enable-webrender"], {
         "action": "store_true",
         "dest": "enable_webrender",
         "default": False,
         "help": "Tries to enable the WebRender compositor.",
     }],
     [['--dry-run'], {
@@ -96,17 +97,16 @@ class TelemetryTests(TestingMixin, VCSTo
         self.test_url = self.config.get('test_url')
 
         if not self.test_url and not self.test_packages_url:
             self.fatal(
                 'You must use --test-url, or --test-packages-url')
 
     @PreScriptAction('create-virtualenv')
     def _pre_create_virtualenv(self, action):
-        dirs = self.query_abs_dirs()
 
         requirements = os.path.join(GECKO_SRCDIR, 'testing',
                                     'config', 'telemetry_tests_requirements.txt')
         self.register_virtualenv_module(requirements=[requirements], two_pass=True)
 
     def query_abs_dirs(self):
         if self.abs_dirs:
             return self.abs_dirs
@@ -153,17 +153,18 @@ class TelemetryTests(TestingMixin, VCSTo
             '-vv',
         ]
 
         parser = StructuredOutputParser(config=self.config,
                                         log_obj=self.log_obj,
                                         strict=False)
 
         # Add the default tests to run
-        tests = [os.path.join(dirs['abs_telemetry_dir'], 'tests', test) for test in self.default_tests]
+        tests = [os.path.join(dirs['abs_telemetry_dir'], 'tests', test)
+                 for test in self.default_tests]
         cmd.extend(tests)
 
         # Set further environment settings
         env = env or self.query_env()
         env.update({'MINIDUMP_SAVE_PATH': dirs['abs_blob_upload_dir']})
         if self.query_minidump_stackwalk():
             env.update({'MINIDUMP_STACKWALK': self.minidump_stackwalk_path})
         env['RUST_BACKTRACE'] = '1'
--- a/testing/mozharness/scripts/web_platform_tests.py
+++ b/testing/mozharness/scripts/web_platform_tests.py
@@ -1,37 +1,36 @@
 #!/usr/bin/env python
 # ***** BEGIN LICENSE BLOCK *****
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 # ***** END LICENSE BLOCK *****
 import copy
-import glob
-import json
 import os
 import sys
 
 # load modules from parent dir
 sys.path.insert(1, os.path.dirname(sys.path[0]))
 
 from mozharness.base.errors import BaseErrorList
 from mozharness.base.script import PreScriptAction
 from mozharness.base.vcs.vcsbase import MercurialScript
 from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
-from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options, TOOLTOOL_PLATFORM_DIR
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
 from mozharness.mozilla.testing.codecoverage import (
     CodeCoverageMixin,
     code_coverage_config_options
 )
 from mozharness.mozilla.testing.errors import HarnessErrorList
 
 from mozharness.mozilla.structuredlog import StructuredOutputParser
 from mozharness.base.log import INFO
 
+
 class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCoverageMixin):
     config_options = [
         [['--test-type'], {
             "action": "extend",
             "dest": "test_type",
             "help": "Specify the test types to run."}
          ],
         [['--e10s'], {
@@ -49,17 +48,18 @@ class WebPlatformTest(TestingMixin, Merc
             "action": "store",
             "dest": "this_chunk",
             "help": "Number of this chunk"}
          ],
         [["--allow-software-gl-layers"], {
             "action": "store_true",
             "dest": "allow_software_gl_layers",
             "default": False,
-            "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}
+            "help": "Permits a software GL implementation (such as LLVMPipe) "
+                    "to use the GL compositor."}
          ],
         [["--enable-webrender"], {
             "action": "store_true",
             "dest": "enable_webrender",
             "default": False,
             "help": "Tries to enable the WebRender compositor."}
          ],
         [["--headless"], {
@@ -163,17 +163,17 @@ class WebPlatformTest(TestingMixin, Merc
                                     'marionette_requirements.txt')
 
         self.register_virtualenv_module(requirements=[requirements],
                                         two_pass=True)
 
     def _query_cmd(self):
         if not self.binary_path:
             self.fatal("Binary path could not be determined")
-            #And exit
+            # And exit
 
         c = self.config
         dirs = self.query_abs_dirs()
         abs_app_dir = self.query_abs_app_dir()
         run_file_name = "runtests.py"
 
         cmd = [self.query_python_path('python'), '-u']
         cmd.append(os.path.join(dirs["abs_wpttest_dir"], run_file_name))
@@ -261,17 +261,18 @@ class WebPlatformTest(TestingMixin, Merc
 
     def _install_fonts(self):
         # Ensure the Ahem font is available
         dirs = self.query_abs_dirs()
 
         if not sys.platform.startswith("darwin"):
             font_path = os.path.join(os.path.dirname(self.binary_path), "fonts")
         else:
-            font_path = os.path.join(os.path.dirname(self.binary_path), os.pardir, "Resources", "res", "fonts")
+            font_path = os.path.join(os.path.dirname(self.binary_path), os.pardir,
+                                     "Resources", "res", "fonts")
         if not os.path.exists(font_path):
             os.makedirs(font_path)
         ahem_src = os.path.join(dirs["abs_wpttest_dir"], "tests", "fonts", "Ahem.ttf")
         ahem_dest = os.path.join(font_path, "Ahem.ttf")
         with open(ahem_src, "rb") as src, open(ahem_dest, "wb") as dest:
             dest.write(src.read())
 
     def run_tests(self):
--- a/tools/lint/flake8.yml
+++ b/tools/lint/flake8.yml
@@ -15,18 +15,21 @@ flake8:
         - python/mozversioncontrol
         - security/manager
         - taskcluster
         - testing/firefox-ui
         - testing/mach_commands.py
         - testing/marionette/client
         - testing/marionette/harness
         - testing/marionette/puppeteer
+        - testing/mochitest
         - testing/mozbase
-        - testing/mochitest
+        - testing/mozharness/mozfile
+        - testing/mozharness/mozinfo
+        - testing/mozharness/scripts
         - testing/remotecppunittests.py
         - testing/runcppunittests.py
         - testing/talos/
         - testing/xpcshell
         - tools/git
         - tools/lint
         - tools/mercurial
         - tools/tryselect