Bug 1438839 - Make tools/ flake8/pep8 compatible r?ahal draft
authorSylvestre Ledru <sledru@mozilla.com>
Fri, 16 Feb 2018 11:45:31 +0100
changeset 756131 ef1e44a22641c41f65b840a37bf0ead0e810b0b1
parent 756119 a274eb9c8f1f980668fb53a60e10df2b3cbb718e
child 756132 fc78cbc89204c7c7940d5ed8892282e7260c2949
push id99389
push usersledru@mozilla.com
push dateFri, 16 Feb 2018 14:22:33 +0000
reviewersahal
bugs1438839
milestone60.0a1
Bug 1438839 - Make tools/ flake8/pep8 compatible r?ahal MozReview-Commit-ID: PhyBiDyiug
tools/.flake8
tools/compare-locales/mach_commands.py
tools/docs/conf.py
tools/docs/mach_commands.py
tools/docs/moztreedocs/__init__.py
tools/jprof/split-profile.py
tools/lint/docs/conf.py
tools/lint/eslint/__init__.py
tools/lint/eslint/setup_helper.py
tools/lint/python/__init__.py
tools/lint/python/compat.py
tools/lint/python/flake8.py
tools/lint/wpt/wpt.py
tools/lint/yamllint_/__init__.py
tools/mach_commands.py
tools/power/mach_commands.py
tools/profiler/merge-profiles.py
tools/profiler/nm-symbolicate.py
tools/rb/find_leakers.py
tools/rb/fix_linux_stack.py
tools/rb/fix_macosx_stack.py
tools/rb/fix_stack_using_bpsyms.py
tools/tryselect/cli.py
tools/tryselect/mach_commands.py
tools/tryselect/selectors/fuzzy.py
tools/tryselect/selectors/syntax.py
tools/tryselect/tasks.py
tools/tryselect/templates.py
tools/tryselect/test/test_fuzzy.py
tools/tryselect/test/test_templates.py
tools/tryselect/vcs.py
tools/update-packaging/generatesnippet.py
tools/update-packaging/make_incremental_updates.py
tools/update-packaging/test_make_incremental_updates.py
new file mode 100644
--- /dev/null
+++ b/tools/.flake8
@@ -0,0 +1,6 @@
+[flake8]
+ignore =
+    E121, E123, E126, E129, E133, E226, E241, E242, E704, W503, E402,
+    # The following errors should be fixed eventually
+    # line too long
+    E501,
--- a/tools/compare-locales/mach_commands.py
+++ b/tools/compare-locales/mach_commands.py
@@ -39,16 +39,17 @@ class CompareLocales(object):
                      help="Compare projects that are disabled")
     def compare(self, **kwargs):
         from compare_locales.commands import CompareLocales
 
         class ErrorHelper(object):
             '''Dummy ArgumentParser to marshall compare-locales
             commandline errors to mach exceptions.
             '''
+
             def error(self, msg):
                 raise FailedCommandError(msg)
 
             def exit(self, message=None, status=0):
                 raise FailedCommandError(message, exit_code=status)
 
         cmd = CompareLocales()
         cmd.parser = ErrorHelper()
--- a/tools/docs/conf.py
+++ b/tools/docs/conf.py
@@ -51,17 +51,17 @@ js_source_path = [
 ]
 root_for_relative_js_paths = '.'
 jsdoc_config_path = 'tools/docs/jsdoc.json'
 
 templates_path = ['_templates']
 source_suffix = '.rst'
 source_suffix = ['.rst', '.md']
 source_parsers = {
-   '.md': CommonMarkParser,
+    '.md': CommonMarkParser,
 }
 master_doc = 'index'
 project = u'Mozilla Source Tree Docs'
 year = datetime.now().year
 
 # Grab the version from the source tree's milestone.
 # FUTURE Use Python API from bug 941299.
 with open(os.path.join(topsrcdir, 'config', 'milestone.txt'), 'rt') as fh:
--- a/tools/docs/mach_commands.py
+++ b/tools/docs/mach_commands.py
@@ -1,16 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
-import platform
 import sys
 
 from mach.decorators import (
     Command,
     CommandArgument,
     CommandProvider,
 )
 
@@ -42,22 +41,23 @@ class Documentation(MachCommandBase):
     @CommandArgument('--http', const=':6666', metavar='ADDRESS', nargs='?',
                      help='Serve documentation on an HTTP server, '
                           'e.g. ":6666".')
     @CommandArgument('--upload', action='store_true',
                      help='Upload generated files to S3')
     def build_docs(self, what=None, format=None, outdir=None, auto_open=True,
                    http=None, archive=False, upload=False):
         try:
-            jsdoc = which.which('jsdoc')
+            which.which('jsdoc')
         except which.WhichError:
             return die('jsdoc not found - please install from npm.')
 
         self._activate_virtualenv()
-        self.virtualenv_manager.install_pip_requirements(os.path.join(here, 'requirements.txt'))
+        self.virtualenv_manager.install_pip_requirements(
+            os.path.join(here, 'requirements.txt'))
 
         import sphinx
         import webbrowser
         import moztreedocs
 
         if not outdir:
             outdir = os.path.join(self.topobjdir, 'docs')
         if not what:
@@ -171,9 +171,8 @@ class Documentation(MachCommandBase):
         if project == 'main':
             s3_upload(files)
 
 
 def die(msg, exit_code=1):
     msg = '%s: %s' % (sys.argv[0], msg)
     print(msg, file=sys.stderr)
     return exit_code
-
--- a/tools/docs/moztreedocs/__init__.py
+++ b/tools/docs/moztreedocs/__init__.py
@@ -53,17 +53,17 @@ class SphinxManager(object):
 
             if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
                 self.add_python_package_dir(os.path.join(reldir, value))
 
     def add_tree(self, source_dir, dest_dir):
         """Add a directory from where docs should be sourced."""
         if dest_dir in self._trees:
             raise Exception('%s has already been registered as a destination.'
-                % dest_dir)
+                            % dest_dir)
 
         self._trees[dest_dir] = source_dir
 
     def add_python_package_dir(self, source_dir):
         """Add a directory containing Python packages.
 
         Added directories will have Python API docs generated automatically.
         """
--- a/tools/jprof/split-profile.py
+++ b/tools/jprof/split-profile.py
@@ -50,69 +50,75 @@
 # functions in the input file can lead to a logical splitting of the
 # profile into segments.
 
 import sys
 import subprocess
 import os.path
 
 if len(sys.argv) < 5:
-    sys.stderr.write("Expected arguments: <jprof> <split-file> <program> <jprof-log>\n")
+    sys.stderr.write(
+        "Expected arguments: <jprof> <split-file> <program> <jprof-log>\n")
     sys.exit(1)
 
 jprof = sys.argv[1]
 splitfile = sys.argv[2]
 passthrough = sys.argv[3:]
 
 for f in [jprof, splitfile]:
     if not os.path.isfile(f):
         sys.stderr.write("could not find file: {0}\n".format(f))
         sys.exit(1)
 
+
 def read_splits(splitfile):
     """
     Read splitfile (each line of which contains a name, a space, and
     then a function name to split on), and return a list of pairs
     representing exactly that.  (Note that the name cannot contain
     spaces, but the function name can, and often does.)
     """
     def line_to_split(line):
         line = line.strip("\r\n")
         idx = line.index(" ")
-        return (line[0:idx], line[idx+1:])
+        return (line[0:idx], line[idx + 1:])
 
     io = open(splitfile, "r")
     result = [line_to_split(line) for line in io]
     io.close()
     return result
 
+
 splits = read_splits(splitfile)
 
+
 def generate_profile(options, destfile):
     """
     Run jprof to generate one split of the profile.
     """
     args = [jprof] + options + passthrough
-    print "Generating {0}".format(destfile)
+    print("Generating {0}".format(destfile))
     destio = open(destfile, "w")
     # jprof expects the "jprof-map" file to be in its current working directory
     cwd = None
     for option in passthrough:
         if option.find("jprof-log"):
             cwd = os.path.dirname(option)
     if cwd is None:
-        raise StandardError("no jprof-log option given")
+        raise StandardError("no jprof-log option given")  # noqa: F821
     process = subprocess.Popen(args, stdout=destio, cwd=cwd)
     process.wait()
     destio.close()
     if process.returncode != 0:
         os.remove(destfile)
-        sys.stderr.write("Error {0} from command:\n  {1}\n".format(process.returncode, " ".join(args)))
+        sys.stderr.write("Error {0} from command:\n  {1}\n".format(
+            process.returncode, " ".join(args)))
         sys.exit(process.returncode)
 
+
 def output_filename(number, splitname):
     """
     Return the filename (absolute path) we should use to output the
     profile segment with the given number and splitname.  Splitname
     should be None for the complete profile and the remainder.
     """
     def pad_count(i):
         result = str(i)
@@ -122,16 +128,17 @@ def output_filename(number, splitname):
 
     name = pad_count(number)
     if splitname is not None:
         name += "-" + splitname
 
     return os.path.join(os.path.dirname(splitfile),
                         "jprof-{0}.html".format(name))
 
+
 # generate the complete profile
 generate_profile([], output_filename(0, None))
 
 # generate the listed splits
 count = 1
 excludes = []
 for (splitname, splitfunction) in splits:
     generate_profile(excludes + ["-i" + splitfunction],
--- a/tools/lint/docs/conf.py
+++ b/tools/lint/docs/conf.py
@@ -79,18 +79,18 @@ htmlhelp_basename = 'mozlintdoc'
 # -- Options for LaTeX output ---------------------------------------------
 
 latex_elements = {}
 
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title,
 #  author, documentclass [howto, manual, or own class]).
 latex_documents = [
-  (master_doc, 'mozlint.tex', u'mozlint Documentation',
-   u'Andrew Halberstadt', 'manual'),
+    (master_doc, 'mozlint.tex', u'mozlint Documentation',
+     u'Andrew Halberstadt', 'manual'),
 ]
 
 # -- Options for manual page output ---------------------------------------
 
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
 man_pages = [
     (master_doc, 'mozlint', u'mozlint Documentation',
@@ -98,15 +98,15 @@ man_pages = [
 ]
 
 # -- Options for Texinfo output -------------------------------------------
 
 # Grouping the document tree into Texinfo files. List of tuples
 # (source start file, target name, title, author,
 #  dir menu entry, description, category)
 texinfo_documents = [
-  (master_doc, 'mozlint', u'mozlint Documentation',
-   author, 'mozlint', 'One line description of project.',
-   'Miscellaneous'),
+    (master_doc, 'mozlint', u'mozlint Documentation',
+     author, 'mozlint', 'One line description of project.',
+     'Miscellaneous'),
 ]
 
 # Example configuration for intersphinx: refer to the Python standard library.
 intersphinx_mapping = {'https://docs.python.org/': None}
--- a/tools/lint/eslint/__init__.py
+++ b/tools/lint/eslint/__init__.py
@@ -50,17 +50,18 @@ def lint(paths, config, binary=None, fix
     #  - Any provided by the binary argument.
     #  - Any pointed at by the ESLINT environmental variable.
     #  - Those provided by |mach lint --setup|.
 
     if not binary:
         binary = os.environ.get('ESLINT', None)
 
         if not binary:
-            binary = os.path.join(module_path, "node_modules", ".bin", "eslint")
+            binary = os.path.join(
+                module_path, "node_modules", ".bin", "eslint")
             if not os.path.isfile(binary):
                 binary = None
 
     if not binary:
         print(ESLINT_NOT_FOUND_MESSAGE)
         return 1
 
     extra_args = lintargs.get('extra_args') or []
--- a/tools/lint/eslint/setup_helper.py
+++ b/tools/lint/eslint/setup_helper.py
@@ -94,31 +94,33 @@ def eslint_setup(should_clobber=False):
 
     # Install ESLint and external plugins
     cmd = [npm_path, "install"]
     cmd.extend(extra_parameters)
     print("Installing eslint for mach using \"%s\"..." % (" ".join(cmd)))
     if not call_process("eslint", cmd):
         return 1
 
-    eslint_path = os.path.join(get_project_root(), "node_modules", ".bin", "eslint")
+    eslint_path = os.path.join(
+        get_project_root(), "node_modules", ".bin", "eslint")
 
     print("\nESLint and approved plugins installed successfully!")
     print("\nNOTE: Your local eslint binary is at %s\n" % eslint_path)
 
     os.chdir(orig_cwd)
 
 
 def call_process(name, cmd, cwd=None):
     try:
         with open(os.devnull, "w") as fnull:
             subprocess.check_call(cmd, cwd=cwd, stdout=fnull)
     except subprocess.CalledProcessError:
         if cwd:
-            print("\nError installing %s in the %s folder, aborting." % (name, cwd))
+            print("\nError installing %s in the %s folder, aborting." %
+                  (name, cwd))
         else:
             print("\nError installing %s, aborting." % name)
 
         return False
 
     return True
 
 
@@ -180,17 +182,18 @@ def eslint_module_needs_setup():
         if "version" in expected_data:
             version_range = expected_data["version"]
         else:
             version_range = expected_data
 
         path = os.path.join(node_modules_path, name, "package.json")
 
         if not os.path.exists(path):
-            print("%s v%s needs to be installed locally." % (name, version_range))
+            print("%s v%s needs to be installed locally." %
+                  (name, version_range))
             has_issues = True
             continue
 
         data = json.load(open(path))
 
         if version_range.startswith("file:"):
             # We don't need to check local file installations for versions, as
             # these are symlinked, so we'll always pick up the latest.
@@ -198,34 +201,36 @@ def eslint_module_needs_setup():
 
         if name == "eslint" and LooseVersion("4.0.0") > LooseVersion(data["version"]):
             print("ESLint is an old version, clobbering node_modules directory")
             needs_clobber = True
             has_issues = True
             continue
 
         if not version_in_range(data["version"], version_range):
-            print("%s v%s should be v%s." % (name, data["version"], version_range))
+            print("%s v%s should be v%s." %
+                  (name, data["version"], version_range))
             has_issues = True
             continue
 
     return has_issues, needs_clobber
 
 
 def version_in_range(version, version_range):
     """
     Check if a module version is inside a version range.  Only supports explicit versions and
     caret ranges for the moment, since that's all we've used so far.
     """
     if version == version_range:
         return True
 
     version_match = VERSION_RE.match(version)
     if not version_match:
-        raise RuntimeError("mach eslint doesn't understand module version %s" % version)
+        raise RuntimeError(
+            "mach eslint doesn't understand module version %s" % version)
     version = LooseVersion(version)
 
     # Caret ranges as specified by npm allow changes that do not modify the left-most non-zero
     # digit in the [major, minor, patch] tuple.  The code below assumes the major digit is
     # non-zero.
     range_match = CARET_VERSION_RANGE_RE.match(version_range)
     if range_match:
         range_version = range_match.group(1)
@@ -313,19 +318,21 @@ def get_node_or_npm_path(filename, minve
     version_str = get_version(node_or_npm_path).lstrip('v')
 
     version = LooseVersion(version_str)
 
     if version > minversion:
         return node_or_npm_path
 
     if filename == "npm":
-        print(NPM_MACHING_VERSION_NOT_FOUND_MESSAGE % (version_str.strip(), minversion))
+        print(NPM_MACHING_VERSION_NOT_FOUND_MESSAGE %
+              (version_str.strip(), minversion))
     else:
-        print(NODE_MACHING_VERSION_NOT_FOUND_MESSAGE % (version_str.strip(), minversion))
+        print(NODE_MACHING_VERSION_NOT_FOUND_MESSAGE %
+              (version_str.strip(), minversion))
 
     return None
 
 
 def get_version(path):
     try:
         version_str = subprocess.check_output([path, "--version"],
                                               stderr=subprocess.STDOUT)
--- a/tools/lint/python/__init__.py
+++ b/tools/lint/python/__init__.py
@@ -156,17 +156,18 @@ def lint(paths, config, **lintargs):
 
     # Run any paths with a .flake8 file in the directory separately so
     # it gets picked up. This means only .flake8 files that live in
     # directories that are explicitly included will be considered.
     # See bug 1277851
     paths_by_config = defaultdict(list)
     for path in paths:
         configs = get_ancestors_by_name('.flake8', path, lintargs['root'])
-        paths_by_config[os.pathsep.join(configs) if configs else 'default'].append(path)
+        paths_by_config[os.pathsep.join(
+            configs) if configs else 'default'].append(path)
 
     for configs, paths in paths_by_config.items():
         cmd = cmdargs[:]
 
         if configs != 'default':
             configs = reversed(configs.split(os.pathsep))
             cmd.extend(['--append-config={}'.format(c) for c in configs])
 
--- a/tools/lint/python/compat.py
+++ b/tools/lint/python/compat.py
@@ -25,17 +25,18 @@ class PyCompatProcess(ProcessHandlerMixi
         self.config = config
         kwargs['processOutputLine'] = [self.process_line]
         ProcessHandlerMixin.__init__(self, *args, **kwargs)
 
     def process_line(self, line):
         try:
             res = json.loads(line)
         except ValueError:
-            print('Non JSON output from {} linter: {}'.format(self.config['name'], line))
+            print('Non JSON output from {} linter: {}'.format(
+                self.config['name'], line))
             return
 
         res['level'] = 'error'
         results.append(result.from_config(self.config, **res))
 
 
 def setup(python):
     """Setup doesn't currently do any bootstrapping. For now, this function
--- a/tools/lint/python/flake8.py
+++ b/tools/lint/python/flake8.py
@@ -148,17 +148,18 @@ def lint(paths, config, **lintargs):
 
     # Run any paths with a .flake8 file in the directory separately so
     # it gets picked up. This means only .flake8 files that live in
     # directories that are explicitly included will be considered.
     # See bug 1277851
     paths_by_config = defaultdict(list)
     for path in paths:
         configs = get_ancestors_by_name('.flake8', path, lintargs['root'])
-        paths_by_config[os.pathsep.join(configs) if configs else 'default'].append(path)
+        paths_by_config[os.pathsep.join(
+            configs) if configs else 'default'].append(path)
 
     for configs, paths in paths_by_config.items():
         cmd = cmdargs[:]
 
         if configs != 'default':
             configs = reversed(configs.split(os.pathsep))
             cmd.extend(['--append-config={}'.format(c) for c in configs])
 
--- a/tools/lint/wpt/wpt.py
+++ b/tools/lint/wpt/wpt.py
@@ -12,25 +12,27 @@ import sys
 from mozprocess import ProcessHandler
 
 from mozlint import result
 
 results = []
 
 
 def lint(files, config, **kwargs):
-    tests_dir = os.path.join(kwargs['root'], 'testing', 'web-platform', 'tests')
+    tests_dir = os.path.join(
+        kwargs['root'], 'testing', 'web-platform', 'tests')
 
     def process_line(line):
         try:
             data = json.loads(line)
         except ValueError:
             return
         data["level"] = "error"
-        data["path"] = os.path.relpath(os.path.join(tests_dir, data["path"]), kwargs['root'])
+        data["path"] = os.path.relpath(os.path.join(
+            tests_dir, data["path"]), kwargs['root'])
         results.append(result.from_config(config, **data))
 
     if files == [tests_dir]:
         print >> sys.stderr, ("No specific files specified, running the full wpt lint"
                               " (this is slow)")
         files = ["--all"]
     cmd = [os.path.join(tests_dir, 'wpt'), 'lint', '--json'] + files
     if platform.system() == 'Windows':
--- a/tools/lint/yamllint_/__init__.py
+++ b/tools/lint/yamllint_/__init__.py
@@ -20,17 +20,18 @@ YAMLLINT_REQUIREMENTS_PATH = os.path.joi
 
 
 YAMLLINT_INSTALL_ERROR = """
 Unable to install correct version of yamllint
 Try to install it manually with:
     $ pip install -U --require-hashes -r {}
 """.strip().format(YAMLLINT_REQUIREMENTS_PATH)
 
-YAMLLINT_FORMAT_REGEX = re.compile(r'(.*):(.*):(.*): \[(error|warning)\] (.*) \((.*)\)$')
+YAMLLINT_FORMAT_REGEX = re.compile(
+    r'(.*):(.*):(.*): \[(error|warning)\] (.*) \((.*)\)$')
 
 results = []
 
 
 class YAMLLintProcess(ProcessHandlerMixin):
     def __init__(self, config, *args, **kwargs):
         self.config = config
         kwargs['processOutputLine'] = [self.process_line]
@@ -109,17 +110,17 @@ def run_process(config, cmd):
     try:
         proc.wait()
     except KeyboardInterrupt:
         proc.kill()
 
 
 def gen_yamllint_args(cmdargs, paths=None, conf_file=None):
     args = cmdargs[:]
-    if isinstance(paths, basestring):
+    if isinstance(paths, basestring):  # noqa: F821
         paths = [paths]
     if conf_file and conf_file != 'default':
         return args + ['-c', conf_file] + paths
     return args + paths
 
 
 def lint(files, config, **lintargs):
     if not reinstall_yamllint():
@@ -140,11 +141,12 @@ def lint(files, config, **lintargs):
     # it gets picked up. This means only .yamllint files that live in
     # directories that are explicitly included will be considered.
     paths_by_config = defaultdict(list)
     for f in files:
         conf_files = get_ancestors_by_name('.yamllint', f, config['root'])
         paths_by_config[conf_files[0] if conf_files else 'default'].append(f)
 
     for conf_file, paths in paths_by_config.items():
-        run_process(config, gen_yamllint_args(cmdargs, conf_file=conf_file, paths=paths))
+        run_process(config, gen_yamllint_args(
+            cmdargs, conf_file=conf_file, paths=paths))
 
     return results
--- a/tools/mach_commands.py
+++ b/tools/mach_commands.py
@@ -66,17 +66,17 @@ class UUIDProvider(object):
         u = uuid.uuid4()
         if format in [None, 'idl']:
             print(u)
             if format is None:
                 print('')
         if format in [None, 'cpp', 'c++']:
             u = u.hex
             print('{ 0x%s, 0x%s, 0x%s, \\' % (u[0:8], u[8:12], u[12:16]))
-            pairs = tuple(map(lambda n: u[n:n+2], range(16, 32, 2)))
+            pairs = tuple(map(lambda n: u[n:n + 2], range(16, 32, 2)))
             print(('  { ' + '0x%s, ' * 7 + '0x%s } }') % pairs)
 
 
 @CommandProvider
 class PastebinProvider(object):
     @Command('pastebin', category='misc',
              description='Command line interface to pastebin.mozilla.org.')
     @CommandArgument('--language', default=None,
--- a/tools/power/mach_commands.py
+++ b/tools/power/mach_commands.py
@@ -8,57 +8,57 @@ from distutils.version import StrictVers
 
 from mach.decorators import (
     Command,
     CommandArgument,
     CommandProvider,
 )
 from mozbuild.base import (
     MachCommandBase,
-    MachCommandConditions as conditions,
 )
 
 
 def is_osx_10_10_or_greater(cls):
     import platform
     release = platform.mac_ver()[0]
     return release and StrictVersion(release) >= StrictVersion('10.10')
 
 
 @CommandProvider
 class MachCommands(MachCommandBase):
     '''
     Get system power consumption and related measurements.
     '''
+
     def __init__(self, context):
         MachCommandBase.__init__(self, context)
 
     @Command('power', category='misc',
-        conditions=[is_osx_10_10_or_greater],
-        description='Get system power consumption and related measurements for '
-        'all running browsers. Available only on Mac OS X 10.10 and above. '
-        'Requires root access.')
+             conditions=[is_osx_10_10_or_greater],
+             description='Get system power consumption and related measurements for '
+             'all running browsers. Available only on Mac OS X 10.10 and above. '
+             'Requires root access.')
     @CommandArgument('-i', '--interval', type=int, default=30000,
-        help='The sample period, measured in milliseconds. Defaults to 30000.')
+                     help='The sample period, measured in milliseconds. Defaults to 30000.')
     def power(self, interval):
         import os
         import re
         import subprocess
 
         rapl = os.path.join(self.topobjdir, 'dist', 'bin', 'rapl')
 
         interval = str(interval)
 
         # Run a trivial command with |sudo| to gain temporary root privileges
         # before |rapl| and |powermetrics| are called. This ensures that |rapl|
         # doesn't start measuring while |powermetrics| is waiting for the root
         # password to be entered.
         try:
             subprocess.check_call(['sudo', 'true'])
-        except:
+        except Exception:
             print('\nsudo failed; aborting')
             return 1
 
         # This runs rapl in the background because nothing in this script
         # depends on the output. This is good because we want |rapl| and
         # |powermetrics| to run at the same time.
         subprocess.Popen([rapl, '-n', '1', '-i', interval])
 
--- a/tools/profiler/merge-profiles.py
+++ b/tools/profiler/merge-profiles.py
@@ -1,49 +1,52 @@
-#!/usr/bin/env python 
+#!/usr/bin/env python
 #
 # This script takes b2g process profiles and merged them into a single profile.
 # The meta data is taken from the first profile. The startTime for each profile
 # is used to syncronized the samples. Each thread is moved into the merged
 # profile.
 #
 import json
 import re
 import sys
 
+
 def MergeProfiles(files):
     threads = []
     fileData = []
     symTable = dict()
     meta = None
     libs = None
     videoUrl = None
     minStartTime = None
 
     for fname in files:
         if fname.startswith("--video="):
             videoUrl = fname[8:]
             continue
 
         match = re.match('profile_([0-9]+)_(.+)\.sym', fname)
         if match is None:
-            raise Exception("Filename '" + fname + "' doesn't match expected pattern")
+            raise Exception("Filename '" + fname +
+                            "' doesn't match expected pattern")
         pid = match.groups(0)[0]
         pname = match.groups(0)[1]
 
         fp = open(fname, "r")
         fileData = json.load(fp)
         fp.close()
 
         if meta is None:
             meta = fileData['profileJSON']['meta'].copy()
             libs = fileData['profileJSON']['libs']
             minStartTime = meta['startTime']
         else:
-            minStartTime = min(minStartTime, fileData['profileJSON']['meta']['startTime'])
+            minStartTime = min(
+                minStartTime, fileData['profileJSON']['meta']['startTime'])
             meta['startTime'] = minStartTime
 
         for thread in fileData['profileJSON']['threads']:
             thread['name'] = thread['name'] + " (" + pname + ":" + pid + ")"
             threads.append(thread)
 
             # Note that pid + sym, pid + location could be ambigious
             # if we had pid=11 sym=1 && pid=1 sym=11.
@@ -53,17 +56,16 @@ def MergeProfiles(files):
             if meta['version'] >= 3:
                 stringTable = thread['stringTable']
                 for i, str in enumerate(stringTable):
                     if str[:2] == '0x':
                         newLoc = pidStr + str
                         stringTable[i] = newLoc
                         symTable[newLoc] = str
             else:
-                samples = thread['samples']
                 for sample in thread['samples']:
                     for frame in sample['frames']:
                         if "location" in frame and frame['location'][0:2] == '0x':
                             oldLoc = frame['location']
                             newLoc = pidStr + oldLoc
                             frame['location'] = newLoc
                             # Default to the unprefixed symbol if no translation is
                             symTable[newLoc] = oldLoc
@@ -102,12 +104,9 @@ def MergeProfiles(files):
 
     json.dump(result, sys.stdout)
 
 
 if len(sys.argv) > 1:
     MergeProfiles(sys.argv[1:])
     sys.exit(0)
 
-print "Usage: merge-profile.py profile_<pid1>_<pname1>.sym profile_<pid2>_<pname2>.sym > merged.sym"
-
-
-
+print("Usage: merge-profile.py profile_<pid1>_<pname1>.sym profile_<pid2>_<pname2>.sym > merged.sym")
--- a/tools/profiler/nm-symbolicate.py
+++ b/tools/profiler/nm-symbolicate.py
@@ -1,48 +1,50 @@
 #!/usr/bin/env python
 
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-import sys, subprocess, os
+import sys
+import subprocess
+import os
+
 
 def NMSymbolicate(library, addresses):
-  target_tools_prefix = os.environ.get("TARGET_TOOLS_PREFIX", "")
-  args = [
-    target_tools_prefix + "nm", "-D", "-S", library
-  ]
-  nm_lines = subprocess.check_output(args).split("\n")
-  symbol_table = []
-  for line in nm_lines:
-    pieces = line.split(" ", 4)
-    if len(pieces) != 4 or pieces[2] != "T":
-      continue
-    start = int(pieces[0], 16)
-    end = int(pieces[1], 16)
-    symbol = pieces[3]
-    symbol_table.append({
-      "start": int(pieces[0], 16),
-      "end": int(pieces[0], 16) + int(pieces[1], 16),
-      "funcName": pieces[3]
-    });
+    target_tools_prefix = os.environ.get("TARGET_TOOLS_PREFIX", "")
+    args = [
+        target_tools_prefix + "nm", "-D", "-S", library
+    ]
+    nm_lines = subprocess.check_output(args).split("\n")
+    symbol_table = []
+    for line in nm_lines:
+        pieces = line.split(" ", 4)
+        if len(pieces) != 4 or pieces[2] != "T":
+            continue
+        start = int(pieces[0], 16)
+        end = int(pieces[1], 16)
+        symbol = pieces[3]
+        symbol_table.append({
+            "start": start,
+            "end": start + end,
+            "funcName": pieces[3]
+        })
 
-  for addressStr in addresses:
-    address = int(addressStr, 16)
-    symbolForAddress = None
-    for symbol in symbol_table:
-      if address >= symbol["start"] and address <= symbol["end"]:
-        symbolForAddress = symbol
-        break
-    if symbolForAddress:
-      print symbolForAddress["funcName"]
-    else:
-      print "??" # match addr2line
-    print ":0" # no line information from nm
+    for addressStr in addresses:
+        address = int(addressStr, 16)
+        symbolForAddress = None
+        for symbol in symbol_table:
+            if address >= symbol["start"] and address <= symbol["end"]:
+                symbolForAddress = symbol
+                break
+        if symbolForAddress:
+            print(symbolForAddress["funcName"])
+        else:
+            print("??")  # match addr2line
+        print(":0")  # no line information from nm
+
 
 if len(sys.argv) > 1:
     NMSymbolicate(sys.argv[1], sys.argv[2:])
     sys.exit(0)
 
-print "Usage: nm-symbolicate.py <library> <addresses> > merged.sym"
-
-
+print("Usage: nm-symbolicate.py <library> <addresses> > merged.sym")
--- a/tools/rb/find_leakers.py
+++ b/tools/rb/find_leakers.py
@@ -7,29 +7,31 @@
 # This script processes a `refcount' log, and finds out if any object leaked.
 # It simply goes through the log, finds `AddRef' or `Ctor' lines, and then
 # sees if they `Release' or `Dtor'. If not, it reports them as leaks.
 # Please see README file in the same directory.
 
 
 import sys
 
+
 def print_output(allocation, obj_to_class):
     '''Formats and prints output.'''
     items = []
     for obj, count, in allocation.iteritems():
         # Adding items to a list, so we can sort them.
         items.append((obj, count))
     # Sorting by count.
     items.sort(key=lambda item: item[1])
 
     for obj, count, in items:
-        print "{obj} ({count}) @ {class_name}".format(obj=obj,
+        print("{obj} ({count}) @ {class_name}".format(obj=obj,
                                                       count=count,
-                                                      class_name=obj_to_class[obj])
+                                                      class_name=obj_to_class[obj]))
+
 
 def process_log(log_lines):
     '''Process through the log lines, and print out the result.
 
     @param log_lines: List of strings.
     '''
     allocation = {}
     class_count = {}
@@ -44,57 +46,58 @@ def process_log(log_lines):
          ignore,
          operation,
          count,) = log_line.strip('\r\n').split(' ')[:5]
 
         # for AddRef/Release `count' is the refcount,
         # for Ctor/Dtor it's the size.
 
         if ((operation == 'AddRef' and count == '1') or
-           operation == 'Ctor'):
+                operation == 'Ctor'):
             # Examples:
             #     <nsStringBuffer> 0x01AFD3B8 1 AddRef 1
             #     <PStreamNotifyParent> 0x08880BD0 8 Ctor (20)
             class_count[class_name] = class_count.setdefault(class_name, 0) + 1
             allocation[obj] = class_count[class_name]
             obj_to_class[obj] = class_name
 
         elif ((operation == 'Release' and count == '0') or
-             operation == 'Dtor'):
+              operation == 'Dtor'):
             # Examples:
             #     <nsStringBuffer> 0x01AFD3B8 1 Release 0
             #     <PStreamNotifyParent> 0x08880BD0 8 Dtor (20)
             if obj not in allocation:
-                print "An object was released that wasn't allocated!",
-                print obj, "@", class_name
+                print("An object was released that wasn't allocated!",)
+                print(obj, "@", class_name)
             else:
                 allocation.pop(obj)
             obj_to_class.pop(obj)
 
     # Printing out the result.
     print_output(allocation, obj_to_class)
 
 
 def print_usage():
-    print
-    print "Usage: find-leakers.py [log-file]"
-    print
-    print "If `log-file' provided, it will read that as the input log."
-    print "Else, it will read the stdin as the input log."
-    print
+    print()
+    print("Usage: find-leakers.py [log-file]")
+    print()
+    print("If `log-file' provided, it will read that as the input log.")
+    print("Else, it will read the stdin as the input log.")
+    print()
+
 
 def main():
     '''Main method of the script.'''
     if len(sys.argv) == 1:
         # Reading log from stdin.
         process_log(sys.stdin.readlines())
     elif len(sys.argv) == 2:
         # Reading log from file.
         with open(sys.argv[1], 'r') as log_file:
             log_lines = log_file.readlines()
         process_log(log_lines)
     else:
-        print 'ERROR: Invalid number of arguments'
+        print('ERROR: Invalid number of arguments')
         print_usage()
 
+
 if __name__ == '__main__':
     main()
-
--- a/tools/rb/fix_linux_stack.py
+++ b/tools/rb/fix_linux_stack.py
@@ -7,21 +7,22 @@
 # This script uses addr2line (part of binutils) to post-process the entries
 # produced by NS_FormatCodeAddress(), which on Linux often lack a function
 # name, a file name and a line number.
 
 import subprocess
 import sys
 import re
 import os
-import pty
-import termios
 from StringIO import StringIO
 
-objdump_section_re = re.compile("^ [0-9a-f]* ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}).*")
+objdump_section_re = re.compile(
+    "^ [0-9a-f]* ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}).*")
+
+
 def elf_section(file, section):
     """
     Return the requested ELF section of the file as a str, representing
     a sequence of bytes.
     """
     # We can read the .gnu_debuglink section using either of:
     #   objdump -s --section=.gnu_debuglink $file
     #   readelf -x .gnu_debuglink $file
@@ -39,21 +40,22 @@ def elf_section(file, section):
     # Turn hexadecimal dump into the bytes it represents
     for line in StringIO(objdump_stdout).readlines():
         m = objdump_section_re.match(line)
         if m:
             for gnum in [0, 1, 2, 3]:
                 word = m.groups()[gnum]
                 if word != "        ":
                     for idx in [0, 2, 4, 6]:
-                        result += chr(int(word[idx:idx+2], 16))
+                        result += chr(int(word[idx:idx + 2], 16))
     return result
 
+
 # FIXME: Hard-coded to gdb defaults (works on Fedora and Ubuntu).
-global_debug_dir = '/usr/lib/debug';
+global_debug_dir = '/usr/lib/debug'
 
 endian_re = re.compile("\s*Data:\s+.*(little|big) endian.*$")
 
 # Table of 256 values, per documentation of .gnu_debuglink sections.
 gnu_debuglink_crc32_table = [
     0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
     0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
     0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
@@ -103,30 +105,33 @@ gnu_debuglink_crc32_table = [
     0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
     0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
     0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
     0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
     0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
     0x2d02ef8d
 ]
 
+
 def gnu_debuglink_crc32(stream):
     # Note that python treats bitwise operators as though integers have
     # an infinite number of bits (and thus such that negative integers
     # 1-pad out to infinity).
     crc = 0xffffffff
     while True:
         # Choose to read in 4096 byte chunks.
         bytes = stream.read(4096)
         if len(bytes) == 0:
             break
         for byte in bytes:
-            crc = gnu_debuglink_crc32_table[(crc ^ ord(byte)) & 0xff] ^ (crc >> 8)
+            crc = gnu_debuglink_crc32_table[(
+                crc ^ ord(byte)) & 0xff] ^ (crc >> 8)
     return ~crc & 0xffffffff
 
+
 def separate_debug_file_for(file):
     """
     Finds a separated file with the debug sections for a binary.  Such
     files are commonly installed by debug packages on linux distros.
     Rules for finding them are documented in:
     https://sourceware.org/gdb/current/onlinedocs/gdb/Separate-Debug-Files.html
     """
     def have_debug_file(debugfile):
@@ -142,41 +147,43 @@ def separate_debug_file_for(file):
             break
     readelf.terminate()
     if endian is None:
         sys.stderr.write("Could not determine endianness of " + file + "\n")
         return None
 
     def word32(s):
         if type(s) != str or len(s) != 4:
-            raise StandardError("expected 4 byte string input")
+            raise StandardError("expected 4 byte string input")  # noqa: F821
         s = list(s)
         if endian == "big":
             s.reverse()
         return sum(map(lambda idx: ord(s[idx]) * (256 ** idx), range(0, 4)))
 
-    buildid = elf_section(file, ".note.gnu.build-id");
+    buildid = elf_section(file, ".note.gnu.build-id")
     if buildid is not None:
         # The build ID is an ELF note section, so it begins with a
         # name size (4), a description size (size of contents), a
         # type (3), and the name "GNU\0".
         note_header = buildid[0:16]
         buildid = buildid[16:]
         if word32(note_header[0:4]) != 4 or \
            word32(note_header[4:8]) != len(buildid) or \
            word32(note_header[8:12]) != 3 or \
            note_header[12:16] != "GNU\0":
             sys.stderr.write("malformed .note.gnu.build_id in " + file + "\n")
         else:
-            buildid = "".join(map(lambda ch: "%02X" % ord(ch), buildid)).lower()
-            f = os.path.join(global_debug_dir, ".build-id", buildid[0:2], buildid[2:] + ".debug")
+            buildid = "".join(map(lambda ch: "%02X" %
+                                  ord(ch), buildid)).lower()
+            f = os.path.join(global_debug_dir, ".build-id",
+                             buildid[0:2], buildid[2:] + ".debug")
             if have_debug_file(f):
                 return f
 
-    debuglink = elf_section(file, ".gnu_debuglink");
+    debuglink = elf_section(file, ".gnu_debuglink")
     if debuglink is not None:
         # The debuglink section contains a string, ending with a
         # null-terminator and then 0 to three bytes of padding to fill the
         # current 32-bit unit.  (This padding is usually null bytes, but
         # I've seen null-null-H, on Ubuntu x86_64.)  This is followed by
         # a 4-byte CRC.
         debuglink_name = debuglink[:-4]
         null_idx = debuglink_name.find("\0")
@@ -197,18 +204,21 @@ def separate_debug_file_for(file):
             if have_debug_file(f):
                 fio = open(f, mode="r")
                 file_crc = gnu_debuglink_crc32(fio)
                 fio.close()
                 if file_crc == debuglink_crc:
                     return f
     return None
 
+
 elf_type_re = re.compile("^\s*Type:\s+(\S+)")
-elf_text_section_re = re.compile("^\s*\[\s*\d+\]\s+\.text\s+\w+\s+(\w+)\s+(\w+)\s+")
+elf_text_section_re = re.compile(
+    "^\s*\[\s*\d+\]\s+\.text\s+\w+\s+(\w+)\s+(\w+)\s+")
+
 
 def address_adjustment_for(file):
     """
     Return the address adjustment to use for a file.
 
     addr2line wants offsets relative to the base address for shared
     libraries, but it wants addresses including the base address offset
     for executables.  This returns the appropriate address adjustment to
@@ -231,27 +241,28 @@ def address_adjustment_for(file):
     adjustment = 0
     readelf = subprocess.Popen(['readelf', '-S', file],
                                stdout=subprocess.PIPE)
     for line in readelf.stdout.readlines():
         m = elf_text_section_re.match(line)
         if m:
             # Subtract the .text section's offset within the
             # file from its base address.
-            adjustment = int(m.groups()[0], 16) - int(m.groups()[1], 16);
+            adjustment = int(m.groups()[0], 16) - int(m.groups()[1], 16)
             break
     readelf.terminate()
     return adjustment
 
 
 devnull = open(os.devnull)
 file_stuff = {}
 
+
 def addressToSymbol(file, address):
-    if not file in file_stuff:
+    if file not in file_stuff:
         debug_file = separate_debug_file_for(file) or file
 
         # Start an addr2line process for this file. Note that addr2line
         # sometimes prints error messages, which we want to suppress.
         args = ['/usr/bin/addr2line', '-C', '-f', '-e', debug_file]
         addr2line = subprocess.Popen(args, stdin=subprocess.PIPE,
                                      stdout=subprocess.PIPE,
                                      stderr=devnull)
@@ -262,24 +273,26 @@ def addressToSymbol(file, address):
         (addr2line, address_adjustment, cache) = file_stuff[file]
 
     if address in cache:
         return cache[address]
 
     # For each line of input, addr2line produces two lines of output.
     addr2line.stdin.write(hex(int(address, 16) + address_adjustment) + '\n')
     addr2line.stdin.flush()
-    result = (addr2line.stdout.readline().rstrip("\r\n"), \
+    result = (addr2line.stdout.readline().rstrip("\r\n"),
               addr2line.stdout.readline().rstrip("\r\n"))
     cache[address] = result
     return result
 
+
 # Matches lines produced by NS_FormatCodeAddress().
 line_re = re.compile("^(.*#\d+: )(.+)\[(.+) \+(0x[0-9A-Fa-f]+)\](.*)$")
 
+
 def fixSymbols(line):
     result = line_re.match(line)
     if result is not None:
         (before, fn, file, address, after) = result.groups()
 
         if os.path.exists(file) and os.path.isfile(file):
             (name, fileline) = addressToSymbol(file, address)
 
@@ -287,16 +300,18 @@ def fixSymbols(line):
             if name == "??":
                 name = fn
             if fileline == "??:0" or fileline == "??:?":
                 fileline = file
 
             nl = '\n' if line[-1] == '\n' else ''
             return "%s%s (%s)%s%s" % (before, name, fileline, after, nl)
         else:
-            sys.stderr.write("Warning: File \"" + file + "\" does not exist.\n")
+            sys.stderr.write("Warning: File \"" + file +
+                             "\" does not exist.\n")
             return line
     else:
         return line
 
+
 if __name__ == "__main__":
     for line in sys.stdin:
         sys.stdout.write(fixSymbols(line))
--- a/tools/rb/fix_macosx_stack.py
+++ b/tools/rb/fix_macosx_stack.py
@@ -10,97 +10,114 @@
 
 import subprocess
 import sys
 import re
 import os
 import pty
 import termios
 
+
 class unbufferedLineConverter:
     """
     Wrap a child process that responds to each line of input with one line of
     output.  Uses pty to trick the child into providing unbuffered output.
     """
-    def __init__(self, command, args = []):
+
+    def __init__(self, command, args=[]):
         pid, fd = pty.fork()
         if pid == 0:
             # We're the child.  Transfer control to command.
             os.execvp(command, [command] + args)
         else:
             # Disable echoing.
             attr = termios.tcgetattr(fd)
             attr[3] = attr[3] & ~termios.ECHO
             termios.tcsetattr(fd, termios.TCSANOW, attr)
             # Set up a file()-like interface to the child process
             self.r = os.fdopen(fd, "r", 1)
             self.w = os.fdopen(os.dup(fd), "w", 1)
+
     def convert(self, line):
         self.w.write(line + "\n")
         return self.r.readline().rstrip("\r\n")
+
     @staticmethod
     def test():
         assert unbufferedLineConverter("rev").convert("123") == "321"
         assert unbufferedLineConverter("cut", ["-c3"]).convert("abcde") == "c"
-        print "Pass"
+        print("Pass")
+
 
 def separate_debug_file_for(file):
     return None
 
+
 address_adjustments = {}
+
+
 def address_adjustment(file):
-    if not file in address_adjustments:
+    if file not in address_adjustments:
         result = None
         otool = subprocess.Popen(["otool", "-l", file], stdout=subprocess.PIPE)
         while True:
             line = otool.stdout.readline()
             if line == "":
                 break
             if line == "  segname __TEXT\n":
                 line = otool.stdout.readline()
                 if not line.startswith("   vmaddr "):
-                    raise StandardError("unexpected otool output")
+                    raise StandardError("unexpected otool output")  # noqa: F821
                 result = int(line[10:], 16)
                 break
         otool.stdout.close()
 
         if result is None:
-            raise StandardError("unexpected otool output")
+            raise StandardError("unexpected otool output")  # noqa: F821
 
         address_adjustments[file] = result
 
     return address_adjustments[file]
 
+
 atoses = {}
+
+
 def addressToSymbol(file, address):
     converter = None
-    if not file in atoses:
+    if file not in atoses:
         debug_file = separate_debug_file_for(file) or file
-        converter = unbufferedLineConverter('/usr/bin/xcrun', ['atos', '-arch', 'x86_64', '-o', debug_file])
+        converter = unbufferedLineConverter(
+            '/usr/bin/xcrun', ['atos', '-arch', 'x86_64', '-o', debug_file])
         atoses[file] = converter
     else:
         converter = atoses[file]
     return converter.convert("0x%X" % address)
 
+
 cxxfilt_proc = None
+
+
 def cxxfilt(sym):
     if cxxfilt_proc is None:
         # --no-strip-underscores because atos already stripped the underscore
         globals()["cxxfilt_proc"] = subprocess.Popen(['c++filt',
                                                       '--no-strip-underscores',
                                                       '--format', 'gnu-v3'],
                                                      stdin=subprocess.PIPE,
                                                      stdout=subprocess.PIPE)
     cxxfilt_proc.stdin.write(sym + "\n")
     return cxxfilt_proc.stdout.readline().rstrip("\n")
 
+
 # Matches lines produced by NS_FormatCodeAddress().
 line_re = re.compile("^(.*#\d+: )(.+)\[(.+) \+(0x[0-9A-Fa-f]+)\](.*)$")
 atos_name_re = re.compile("^(.+) \(in ([^)]+)\) \((.+)\)$")
 
+
 def fixSymbols(line):
     result = line_re.match(line)
     if result is not None:
         (before, fn, file, address, after) = result.groups()
         address = int(address, 16)
 
         if os.path.exists(file) and os.path.isfile(file):
             address += address_adjustment(file)
@@ -118,16 +135,18 @@ def fixSymbols(line):
                 # up the remaining cases(!), which will begin with '_Z'.
                 if (name.startswith("_Z")):
                     name = cxxfilt(name)
                 info = "%s (%s, in %s)" % (name, fileline, library)
 
             nl = '\n' if line[-1] == '\n' else ''
             return before + info + after + nl
         else:
-            sys.stderr.write("Warning: File \"" + file + "\" does not exist.\n")
+            sys.stderr.write("Warning: File \"" + file +
+                             "\" does not exist.\n")
             return line
     else:
         return line
 
+
 if __name__ == "__main__":
     for line in sys.stdin:
         sys.stdout.write(fixSymbols(line))
--- a/tools/rb/fix_stack_using_bpsyms.py
+++ b/tools/rb/fix_stack_using_bpsyms.py
@@ -13,151 +13,163 @@ from __future__ import with_statement
 import sys
 import os
 import re
 import subprocess
 import bisect
 
 here = os.path.dirname(__file__)
 
+
 def prettyFileName(name):
-  if name.startswith("../") or name.startswith("..\\"):
-    # dom_quickstubs.cpp and many .h files show up with relative paths that are useless
-    # and/or don't correspond to the layout of the source tree.
-    return os.path.basename(name) + ":"
-  elif name.startswith("hg:"):
-    bits = name.split(":")
-    if len(bits) == 4:
-      (junk, repo, path, rev) = bits
-      # We could construct an hgweb URL with /file/ or /annotate/, like this:
-      # return "http://%s/annotate/%s/%s#l" % (repo, rev, path)
-      return path  + ":"
-  return name  + ":"
+    if name.startswith("../") or name.startswith("..\\"):
+        # dom_quickstubs.cpp and many .h files show up with relative paths that are useless
+        # and/or don't correspond to the layout of the source tree.
+        return os.path.basename(name) + ":"
+    elif name.startswith("hg:"):
+        bits = name.split(":")
+        if len(bits) == 4:
+            (junk, repo, path, rev) = bits
+            # We could construct an hgweb URL with /file/ or /annotate/, like this:
+            # return "http://%s/annotate/%s/%s#l" % (repo, rev, path)
+            return path + ":"
+    return name + ":"
+
 
 class SymbolFile:
-  def __init__(self, fn):
-    addrs = [] # list of addresses, which will be sorted once we're done initializing
-    funcs = {} # hash: address --> (function name + possible file/line)
-    files = {} # hash: filenum (string) --> prettified filename ready to have a line number appended
-    with open(fn) as f:
-      for line in f:
-        line = line.rstrip()
-        # https://chromium.googlesource.com/breakpad/breakpad/+/master/docs/symbol_files.md
-        if line.startswith("FUNC "):
-          # FUNC <address> <size> <stack_param_size> <name>
-          bits = line.split(None, 4)
-          if len(bits) < 5:
-            bits.append('unnamed_function')
-          (junk, rva, size, ss, name) = bits
-          rva = int(rva,16)
-          funcs[rva] = name
-          addrs.append(rva)
-          lastFuncName = name
-        elif line.startswith("PUBLIC "):
-          # PUBLIC <address> <stack_param_size> <name>
-          (junk, rva, ss, name) = line.split(None, 3)
-          rva = int(rva,16)
-          funcs[rva] = name
-          addrs.append(rva)
-        elif line.startswith("FILE "):
-          # FILE <number> <name>
-          (junk, filenum, name) = line.split(None, 2)
-          files[filenum] = prettyFileName(name)
-        elif line[0] in "0123456789abcdef":
-          # This is one of the "line records" corresponding to the last FUNC record
-          # <address> <size> <line> <filenum>
-          (rva, size, line, filenum) = line.split(None)
-          rva = int(rva,16)
-          file = files[filenum]
-          name = lastFuncName + " [" + file + line + "]"
-          funcs[rva] = name
-          addrs.append(rva)
-        # skip everything else
-    #print "Loaded %d functions from symbol file %s" % (len(funcs), os.path.basename(fn))
-    self.addrs = sorted(addrs)
-    self.funcs = funcs
+    def __init__(self, fn):
+        addrs = []  # list of addresses, which will be sorted once we're done initializing
+        funcs = {}  # hash: address --> (function name + possible file/line)
+        # hash: filenum (string) --> prettified filename ready to have a line number appended
+        files = {}
+        with open(fn) as f:
+            for line in f:
+                line = line.rstrip()
+                # https://chromium.googlesource.com/breakpad/breakpad/+/master/docs/symbol_files.md
+                if line.startswith("FUNC "):
+                    # FUNC <address> <size> <stack_param_size> <name>
+                    bits = line.split(None, 4)
+                    if len(bits) < 5:
+                        bits.append('unnamed_function')
+                    (junk, rva, size, ss, name) = bits
+                    rva = int(rva, 16)
+                    funcs[rva] = name
+                    addrs.append(rva)
+                    lastFuncName = name
+                elif line.startswith("PUBLIC "):
+                    # PUBLIC <address> <stack_param_size> <name>
+                    (junk, rva, ss, name) = line.split(None, 3)
+                    rva = int(rva, 16)
+                    funcs[rva] = name
+                    addrs.append(rva)
+                elif line.startswith("FILE "):
+                    # FILE <number> <name>
+                    (junk, filenum, name) = line.split(None, 2)
+                    files[filenum] = prettyFileName(name)
+                elif line[0] in "0123456789abcdef":
+                    # This is one of the "line records" corresponding to the last FUNC record
+                    # <address> <size> <line> <filenum>
+                    (rva, size, line, filenum) = line.split(None)
+                    rva = int(rva, 16)
+                    file = files[filenum]
+                    name = lastFuncName + " [" + file + line + "]"
+                    funcs[rva] = name
+                    addrs.append(rva)
+                # skip everything else
+        # print "Loaded %d functions from symbol file %s" % (len(funcs), os.path.basename(fn))
+        self.addrs = sorted(addrs)
+        self.funcs = funcs
 
-  def addrToSymbol(self, address):
-    i = bisect.bisect(self.addrs, address) - 1
-    if i > 0:
-      #offset = address - self.addrs[i]
-      return self.funcs[self.addrs[i]]
-    else:
-      return ""
+    def addrToSymbol(self, address):
+        i = bisect.bisect(self.addrs, address) - 1
+        if i > 0:
+            # offset = address - self.addrs[i]
+            return self.funcs[self.addrs[i]]
+        else:
+            return ""
+
 
 def findIdForPath(path):
-  """Finds the breakpad id for the object file at the given path."""
-  # We should always be packaged with a "fileid" executable.
-  fileid_exe = os.path.join(here, 'fileid')
-  if not os.path.isfile(fileid_exe):
-    fileid_exe = fileid_exe + '.exe'
+    """Finds the breakpad id for the object file at the given path."""
+    # We should always be packaged with a "fileid" executable.
+    fileid_exe = os.path.join(here, 'fileid')
     if not os.path.isfile(fileid_exe):
-      raise Exception("Could not find fileid executable in %s" % here)
+        fileid_exe = fileid_exe + '.exe'
+        if not os.path.isfile(fileid_exe):
+            raise Exception("Could not find fileid executable in %s" % here)
 
-  if not os.path.isfile(path):
-    for suffix in ('.exe', '.dll'):
-      if os.path.isfile(path + suffix):
-        path = path + suffix
-  try:
-    return subprocess.check_output([fileid_exe, path]).rstrip()
-  except subprocess.CalledProcessError as e:
-    raise Exception("Error getting fileid for %s: %s" %
-                    (path, e.output))
+    if not os.path.isfile(path):
+        for suffix in ('.exe', '.dll'):
+            if os.path.isfile(path + suffix):
+                path = path + suffix
+    try:
+        return subprocess.check_output([fileid_exe, path]).rstrip()
+    except subprocess.CalledProcessError as e:
+        raise Exception("Error getting fileid for %s: %s" %
+                        (path, e.output))
+
 
 def guessSymbolFile(full_path, symbolsDir):
-  """Guess a symbol file based on an object file's basename, ignoring the path and UUID."""
-  fn = os.path.basename(full_path)
-  d1 = os.path.join(symbolsDir, fn)
-  root, _ = os.path.splitext(fn)
-  if os.path.exists(os.path.join(symbolsDir, root) + '.pdb'):
-    d1 = os.path.join(symbolsDir, root) + '.pdb'
-    fn = root
-  if not os.path.exists(d1):
-    return None
-  uuids = os.listdir(d1)
-  if len(uuids) == 0:
-    raise Exception("Missing symbol file for " + fn)
-  if len(uuids) > 1:
-    uuid = findIdForPath(full_path)
-  else:
-    uuid = uuids[0]
-  return os.path.join(d1, uuid, fn + ".sym")
+    """Guess a symbol file based on an object file's basename, ignoring the path and UUID."""
+    fn = os.path.basename(full_path)
+    d1 = os.path.join(symbolsDir, fn)
+    root, _ = os.path.splitext(fn)
+    if os.path.exists(os.path.join(symbolsDir, root) + '.pdb'):
+        d1 = os.path.join(symbolsDir, root) + '.pdb'
+        fn = root
+    if not os.path.exists(d1):
+        return None
+    uuids = os.listdir(d1)
+    if len(uuids) == 0:
+        raise Exception("Missing symbol file for " + fn)
+    if len(uuids) > 1:
+        uuid = findIdForPath(full_path)
+    else:
+        uuid = uuids[0]
+    return os.path.join(d1, uuid, fn + ".sym")
+
 
 parsedSymbolFiles = {}
+
+
 def getSymbolFile(file, symbolsDir):
-  p = None
-  if not file in parsedSymbolFiles:
-    symfile = guessSymbolFile(file, symbolsDir)
-    if symfile:
-      p = SymbolFile(symfile)
+    p = None
+    if file not in parsedSymbolFiles:
+        symfile = guessSymbolFile(file, symbolsDir)
+        if symfile:
+            p = SymbolFile(symfile)
+        else:
+            p = None
+        parsedSymbolFiles[file] = p
     else:
-      p = None
-    parsedSymbolFiles[file] = p
-  else:
-    p = parsedSymbolFiles[file]
-  return p
+        p = parsedSymbolFiles[file]
+    return p
+
 
 def addressToSymbol(file, address, symbolsDir):
-  p = getSymbolFile(file, symbolsDir)
-  if p:
-    return p.addrToSymbol(address)
-  else:
-    return ""
+    p = getSymbolFile(file, symbolsDir)
+    if p:
+        return p.addrToSymbol(address)
+    else:
+        return ""
+
 
 # Matches lines produced by NS_FormatCodeAddress().
 line_re = re.compile("^(.*#\d+: )(.+)\[(.+) \+(0x[0-9A-Fa-f]+)\](.*)$")
 
+
 def fixSymbols(line, symbolsDir):
-  result = line_re.match(line)
-  if result is not None:
-    (before, fn, file, address, after) = result.groups()
-    address = int(address, 16)
-    symbol = addressToSymbol(file, address, symbolsDir)
-    if not symbol:
-      symbol = "%s + 0x%x" % (os.path.basename(file), address)
-    return before + symbol + after + "\n"
-  else:
-    return line
+    result = line_re.match(line)
+    if result is not None:
+        (before, fn, file, address, after) = result.groups()
+        address = int(address, 16)
+        symbol = addressToSymbol(file, address, symbolsDir)
+        if not symbol:
+            symbol = "%s + 0x%x" % (os.path.basename(file), address)
+        return before + symbol + after + "\n"
+    else:
+        return line
+
 
 if __name__ == "__main__":
-  symbolsDir = sys.argv[1]
-  for line in iter(sys.stdin.readline, ''):
-    print fixSymbols(line, symbolsDir),
+    symbolsDir = sys.argv[1]
+    for line in iter(sys.stdin.readline, ''):
+        print(fixSymbols(line, symbolsDir),)
--- a/tools/tryselect/cli.py
+++ b/tools/tryselect/cli.py
@@ -97,27 +97,29 @@ class BaseTryParser(ArgumentParser):
         self.templates = {t: all_templates[t]() for t in self.templates}
         for template in self.templates.values():
             template.add_arguments(group)
 
     def validate(self, args):
         if hasattr(args, 'message'):
             if args.message == 'editor':
                 if 'EDITOR' not in os.environ:
-                    self.error("must set the $EDITOR environment variable to use blank --message")
+                    self.error(
+                        "must set the $EDITOR environment variable to use blank --message")
 
                 with tempfile.NamedTemporaryFile(mode='r') as fh:
                     subprocess.call([os.environ['EDITOR'], fh.name])
                     args.message = fh.read().strip()
 
             if '{msg}' not in args.message:
                 args.message = '{}\n\n{}'.format(args.message, '{msg}')
 
     def parse_known_args(self, *args, **kwargs):
-        args, remainder = ArgumentParser.parse_known_args(self, *args, **kwargs)
+        args, remainder = ArgumentParser.parse_known_args(
+            self, *args, **kwargs)
         self.validate(args)
 
         if self.templates:
             args.templates = {}
             for cls in self.templates.itervalues():
                 context = cls.context(**vars(args))
                 if context is not None:
                     args.templates.update(context)
--- a/tools/tryselect/mach_commands.py
+++ b/tools/tryselect/mach_commands.py
@@ -28,17 +28,18 @@ and try again.
 '''.lstrip()
 
 
 class get_parser(object):
     def __init__(self, selector):
         self.selector = selector
 
     def __call__(self):
-        mod = importlib.import_module('tryselect.selectors.{}'.format(self.selector))
+        mod = importlib.import_module(
+            'tryselect.selectors.{}'.format(self.selector))
         return getattr(mod, '{}Parser'.format(self.selector.capitalize()))()
 
 
 def generic_parser():
     from tryselect.cli import BaseTryParser
     parser = BaseTryParser()
     parser.add_argument('argv', nargs=argparse.REMAINDER)
     return parser
--- a/tools/tryselect/selectors/fuzzy.py
+++ b/tools/tryselect/selectors/fuzzy.py
@@ -118,17 +118,18 @@ def fzf_bootstrap(update=False):
     the install script.
     """
     fzf_bin = find_executable('fzf')
     if fzf_bin and not update:
         return fzf_bin
 
     fzf_path = os.path.join(get_state_dir()[0], 'fzf')
     if update and not os.path.isdir(fzf_path):
-        print("fzf installed somewhere other than {}, please update manually".format(fzf_path))
+        print("fzf installed somewhere other than {}, please update manually".format(
+            fzf_path))
         sys.exit(1)
 
     def get_fzf():
         return find_executable('fzf', os.path.join(fzf_path, 'bin'))
 
     if update:
         ret = run(['git', 'pull'], cwd=fzf_path)
         if ret:
@@ -140,26 +141,27 @@ def fzf_bootstrap(update=False):
 
     if os.path.isdir(fzf_path):
         fzf_bin = get_fzf()
         if fzf_bin:
             return fzf_bin
         # Fzf is cloned, but binary doesn't exist. Try running the install script
         return fzf_bootstrap(update=True)
 
-    install = raw_input("Could not detect fzf, install it now? [y/n]: ")
+    install = raw_input("Could not detect fzf, install it now? [y/n]: ")  # noqa: F821
     if install.lower() != 'y':
         return
 
     if not find_executable('git'):
         print("Git not found.")
         print(FZF_INSTALL_FAILED)
         sys.exit(1)
 
-    cmd = ['git', 'clone', '--depth', '1', 'https://github.com/junegunn/fzf.git']
+    cmd = ['git', 'clone', '--depth', '1',
+           'https://github.com/junegunn/fzf.git']
     if subprocess.call(cmd, cwd=os.path.dirname(fzf_path)):
         print(FZF_INSTALL_FAILED)
         sys.exit(1)
 
     run_fzf_install_script(fzf_path)
 
     print("Installed fzf to {}".format(fzf_path))
     return get_fzf()
--- a/tools/tryselect/selectors/syntax.py
+++ b/tools/tryselect/selectors/syntax.py
@@ -207,17 +207,18 @@ class TryArgumentParser(object):
     def consume(self):
         try:
             self.token = self.tokens.next()
         except StopIteration:
             self.token = (self.EOF, None)
 
     def expect(self, *types):
         if self.token[0] not in types:
-            raise ValueError("Error parsing try string, unexpected %s" % (self.token[0]))
+            raise ValueError(
+                "Error parsing try string, unexpected %s" % (self.token[0]))
 
     def item_state(self):
         self.expect("item")
         value = self.token[1].strip()
         if value not in self.data:
             self.data[value] = []
         self.current_item = value
         self.consume()
@@ -332,27 +333,30 @@ class AutoTry(object):
 
         for t in tests:
             if t['flavor'] in self.flavor_suites:
                 flavor = t['flavor']
                 if 'subsuite' in t and t['subsuite'] == 'devtools':
                     flavor = 'devtools-chrome'
 
                 if flavor in ['crashtest', 'reftest']:
-                    manifest_relpath = os.path.relpath(t['manifest'], self.topsrcdir)
-                    paths_by_flavor[flavor].add(os.path.dirname(manifest_relpath))
+                    manifest_relpath = os.path.relpath(
+                        t['manifest'], self.topsrcdir)
+                    paths_by_flavor[flavor].add(
+                        os.path.dirname(manifest_relpath))
                 elif 'dir_relpath' in t:
                     paths_by_flavor[flavor].add(t['dir_relpath'])
                 else:
                     file_relpath = os.path.relpath(t['path'], self.topsrcdir)
                     dir_relpath = os.path.dirname(file_relpath)
                     paths_by_flavor[flavor].add(dir_relpath)
 
         for flavor, path_set in paths_by_flavor.items():
-            paths_by_flavor[flavor] = self.deduplicate_prefixes(path_set, paths)
+            paths_by_flavor[flavor] = self.deduplicate_prefixes(
+                path_set, paths)
 
         return dict(paths_by_flavor)
 
     def deduplicate_prefixes(self, path_set, input_paths):
         # Removes paths redundant to test selection in the given path set.
         # If a path was passed on the commandline that is the prefix of a
         # path in our set, we only need to include the specified prefix to
         # run the intended tests (every test in "layout/base" will run if
@@ -408,27 +412,29 @@ class AutoTry(object):
             for suite in suites.keys():
                 if any([suite.startswith(c) for c in self.compiled_suites]):
                     rejected.append(suite)
             if rejected:
                 raise ValueError("You can't run {} with "
                                  "--artifact option.".format(', '.join(rejected)))
 
         if extras.get('artifact') and 'all' in suites.keys():
-            non_compiled_suites = set(self.common_suites) - set(self.compiled_suites)
+            non_compiled_suites = set(
+                self.common_suites) - set(self.compiled_suites)
             message = ('You asked for |-u all| with |--artifact| but compiled-code tests ({tests})'
                        ' can\'t run against an artifact build. Running (-u {non_compiled_suites}) '
                        'instead.')
             string_format = {
                 'tests': ','.join(self.compiled_suites),
                 'non_compiled_suites': ','.join(non_compiled_suites),
             }
             print(message.format(**string_format))
             del suites['all']
-            suites.update({suite_name: None for suite_name in non_compiled_suites})
+            suites.update(
+                {suite_name: None for suite_name in non_compiled_suites})
 
         if suites:
             parts.append("-u")
             parts.append(",".join("%s%s" % (k, "[%s]" % ",".join(v) if v else "")
                                   for k, v in sorted(suites.items())))
 
         if talos:
             parts.append("-t")
@@ -440,17 +446,18 @@ class AutoTry(object):
             parts.append(",".join(jobs))
 
         if tags:
             parts.append(' '.join('--tag %s' % t for t in tags))
 
         if paths:
             parts.append("--try-test-paths %s" % " ".join(sorted(paths)))
 
-        args_by_dest = {v['dest']: k for k, v in SyntaxParser.pass_through_arguments.items()}
+        args_by_dest = {v['dest']: k for k,
+                        v in SyntaxParser.pass_through_arguments.items()}
         for dest, value in extras.iteritems():
             assert dest in args_by_dest
             arg = args_by_dest[dest]
             action = SyntaxParser.pass_through_arguments[arg]['action']
             if action == 'store':
                 parts.append(arg)
                 parts.append(value)
             if action == 'append':
@@ -480,42 +487,45 @@ class AutoTry(object):
         tests_selected = kwargs["tests"] or kwargs["paths"] or kwargs["tags"]
         if kwargs["platforms"] is None and (kwargs["jobs"] is None or tests_selected):
             if 'AUTOTRY_PLATFORM_HINT' in os.environ:
                 kwargs["platforms"] = [os.environ['AUTOTRY_PLATFORM_HINT']]
             elif tests_selected:
                 print("Must specify platform when selecting tests.")
                 sys.exit(1)
             else:
-                print("Either platforms or jobs must be specified as an argument to autotry.")
+                print(
+                    "Either platforms or jobs must be specified as an argument to autotry.")
                 sys.exit(1)
 
         try:
             platforms = (self.normalise_list(kwargs["platforms"])
                          if kwargs["platforms"] else {})
         except ValueError as e:
             print("Error parsing -p argument:\n%s" % e.message)
             sys.exit(1)
 
         try:
             tests = (self.normalise_list(kwargs["tests"], allow_subitems=True)
                      if kwargs["tests"] else {})
         except ValueError as e:
-            print("Error parsing -u argument (%s):\n%s" % (kwargs["tests"], e.message))
+            print("Error parsing -u argument (%s):\n%s" %
+                  (kwargs["tests"], e.message))
             sys.exit(1)
 
         try:
             talos = (self.normalise_list(kwargs["talos"], allow_subitems=True)
                      if kwargs["talos"] else [])
         except ValueError as e:
             print("Error parsing -t argument:\n%s" % e.message)
             sys.exit(1)
 
         try:
-            jobs = (self.normalise_list(kwargs["jobs"]) if kwargs["jobs"] else {})
+            jobs = (self.normalise_list(
+                kwargs["jobs"]) if kwargs["jobs"] else {})
         except ValueError as e:
             print("Error parsing -j argument:\n%s" % e.message)
             sys.exit(1)
 
         paths = []
         for p in kwargs["paths"]:
             p = mozpath.normpath(os.path.abspath(p))
             if not (os.path.isdir(p) and p.startswith(self.topsrcdir)):
@@ -524,35 +534,38 @@ class AutoTry(object):
                 sys.exit(1)
             if len(p) <= len(self.topsrcdir):
                 print('Specified path "%s" is at the top of the srcdir and would'
                       ' select all tests.' % p)
                 sys.exit(1)
             paths.append(os.path.relpath(p, self.topsrcdir))
 
         try:
-            tags = self.normalise_list(kwargs["tags"]) if kwargs["tags"] else []
+            tags = self.normalise_list(
+                kwargs["tags"]) if kwargs["tags"] else []
         except ValueError as e:
             print("Error parsing --tags argument:\n%s" % e.message)
             sys.exit(1)
 
-        extra_values = {k['dest'] for k in SyntaxParser.pass_through_arguments.values()}
+        extra_values = {k['dest']
+                        for k in SyntaxParser.pass_through_arguments.values()}
         extra_args = {k: v for k, v in kwargs.items()
                       if k in extra_values and v}
 
         return kwargs["builds"], platforms, tests, talos, jobs, paths, tags, extra_args
 
     def run(self, **kwargs):
         if kwargs["mod_presets"]:
             getattr(preset, kwargs["mod_presets"])(section='try')
             sys.exit()
 
         if kwargs["preset"]:
             value = preset.load(kwargs["preset"], section='try')[0]
-            defaults = vars(SyntaxParser().parse_args(self.split_try_string(value)))
+            defaults = vars(SyntaxParser().parse_args(
+                self.split_try_string(value)))
 
             if defaults is None:
                 print("No saved configuration called %s found in autotry.ini" % kwargs["preset"],
                       file=sys.stderr)
 
             for key, value in kwargs.iteritems():
                 if value in (None, []) and key in defaults:
                     kwargs[key] = defaults[key]
@@ -561,30 +574,32 @@ class AutoTry(object):
             if kwargs['detect_paths']:
                 res = self.resolver.get_outgoing_metadata()
                 kwargs['paths'] = res['paths']
                 kwargs['tags'] = res['tags']
             else:
                 kwargs['paths'] = set()
                 kwargs['tags'] = set()
 
-        builds, platforms, tests, talos, jobs, paths, tags, extra = self.validate_args(**kwargs)
+        builds, platforms, tests, talos, jobs, paths, tags, extra = self.validate_args(
+            **kwargs)
 
         if paths or tags:
             paths = [os.path.relpath(os.path.normpath(os.path.abspath(item)), self.topsrcdir)
                      for item in paths]
             paths_by_flavor = self.paths_by_flavor(paths=paths, tags=tags)
 
             if not paths_by_flavor and not tests:
                 print("No tests were found when attempting to resolve paths:\n\n\t%s" %
                       paths)
                 sys.exit(1)
 
             if not kwargs["intersection"]:
-                paths_by_flavor = self.remove_duplicates(paths_by_flavor, tests)
+                paths_by_flavor = self.remove_duplicates(
+                    paths_by_flavor, tests)
         else:
             paths_by_flavor = {}
 
         # No point in dealing with artifacts if we aren't running any builds
         local_artifact_build = False
         if platforms:
             local_artifact_build = kwargs.get('local_artifact_build', False)
 
--- a/tools/tryselect/tasks.py
+++ b/tools/tryselect/tasks.py
@@ -34,17 +34,18 @@ https://firefox-source-docs.mozilla.org/
 """
 
 
 def invalidate(cache, root):
     if not os.path.isfile(cache):
         return
 
     tc_dir = os.path.join(root, 'taskcluster')
-    tmod = max(os.path.getmtime(os.path.join(tc_dir, p)) for p, _ in FileFinder(tc_dir))
+    tmod = max(os.path.getmtime(os.path.join(tc_dir, p))
+               for p, _ in FileFinder(tc_dir))
     cmod = os.path.getmtime(cache)
 
     if tmod > cmod:
         os.remove(cache)
 
 
 def generate_tasks(params, full, root):
     params = params or "project=mozilla-central"
--- a/tools/tryselect/templates.py
+++ b/tools/tryselect/templates.py
@@ -71,17 +71,18 @@ class Path(Template):
         if not paths:
             return
 
         for p in paths:
             if not os.path.exists(p):
                 print("error: '{}' is not a valid path.".format(p), file=sys.stderr)
                 sys.exit(1)
 
-        paths = [mozpath.relpath(mozpath.join(os.getcwd(), p), build.topsrcdir) for p in paths]
+        paths = [mozpath.relpath(mozpath.join(
+            os.getcwd(), p), build.topsrcdir) for p in paths]
         return {
             'env': {
                 # can't use os.pathsep as machine splitting could be a different platform
                 'MOZHARNESS_TEST_PATHS': ':'.join(paths),
             }
         }
 
 
--- a/tools/tryselect/test/test_fuzzy.py
+++ b/tools/tryselect/test/test_fuzzy.py
@@ -11,24 +11,26 @@ from moztest.resolve import TestResolver
 from tryselect.selectors import fuzzy
 
 
 @pytest.fixture
 def patch_resolver(monkeypatch):
     def inner(suites, tests):
         def fake_test_metadata(*args, **kwargs):
             return suites, tests
-        monkeypatch.setattr(TestResolver, 'resolve_metadata', fake_test_metadata)
+        monkeypatch.setattr(
+            TestResolver, 'resolve_metadata', fake_test_metadata)
     return inner
 
 
 def test_filter_by_paths(patch_resolver):
     tasks = ['foobar/xpcshell-1', 'foobar/mochitest', 'foobar/xpcshell']
 
     patch_resolver(['xpcshell'], {})
     assert fuzzy.filter_by_paths(tasks, 'dummy') == []
 
     patch_resolver([], [{'flavor': 'xpcshell'}])
-    assert fuzzy.filter_by_paths(tasks, 'dummy') == ['foobar/xpcshell-1', 'foobar/xpcshell']
+    assert fuzzy.filter_by_paths(tasks, 'dummy') == [
+        'foobar/xpcshell-1', 'foobar/xpcshell']
 
 
 if __name__ == '__main__':
     mozunit.main()
--- a/tools/tryselect/test/test_templates.py
+++ b/tools/tryselect/test/test_templates.py
@@ -16,21 +16,23 @@ from tryselect.templates import all_temp
 # templates have a list of tests of the form (input, expected)
 TEMPLATE_TESTS = {
     'artifact': [
         (['--no-artifact'], None),
         (['--artifact'], {'artifact': {'enabled': '1'}}),
     ],
     'env': [
         ([], None),
-        (['--env', 'foo=bar', '--env', 'num=10'], {'env': {'foo': 'bar', 'num': '10'}}),
+        (['--env', 'foo=bar', '--env', 'num=10'],
+         {'env': {'foo': 'bar', 'num': '10'}}),
     ],
     'path': [
         ([], None),
-        (['dom/indexedDB'], {'env': {'MOZHARNESS_TEST_PATHS': 'dom/indexedDB'}}),
+        (['dom/indexedDB'],
+         {'env': {'MOZHARNESS_TEST_PATHS': 'dom/indexedDB'}}),
         (['dom/indexedDB', 'testing'],
          {'env': {'MOZHARNESS_TEST_PATHS': 'dom/indexedDB:testing'}}),
         (['invalid/path'], SystemExit),
     ],
     'rebuild': [
         ([], None),
         (['--rebuild', '10'], {'rebuild': 10}),
         (['--rebuild', '1'], SystemExit),
--- a/tools/tryselect/vcs.py
+++ b/tools/tryselect/vcs.py
@@ -50,33 +50,35 @@ class VCSHelper(object):
         # First check if we're in an hg repo, if not try git
         commands = (
             ['hg', 'root'],
             ['git', 'rev-parse', '--show-toplevel'],
         )
 
         for cmd in commands:
             try:
-                output = subprocess.check_output(cmd, stderr=open(os.devnull, 'w')).strip()
+                output = subprocess.check_output(
+                    cmd, stderr=open(os.devnull, 'w')).strip()
             except (subprocess.CalledProcessError, OSError):
                 continue
 
             return cmd[0], output
         return None, ''
 
     @classmethod
     def create(cls):
         vcs, root = cls.find_vcs()
         if not vcs:
             print(VCS_NOT_FOUND)
             sys.exit(1)
         return vcs_class[vcs](root)
 
     def run(self, cmd):
-        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        proc = subprocess.Popen(
+            cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
         out, err = proc.communicate()
 
         if proc.returncode:
             print("Error running `{}`:".format(' '.join(cmd)))
             if out:
                 print("stdout:\n{}".format(out))
             if err:
                 print("stderr:\n{}".format(err))
@@ -168,17 +170,18 @@ class HgHelper(VCSHelper):
         stat = [s for s in self.run(['hg', 'status', '-amrn']).split() if s]
         return len(stat) > 0
 
 
 class GitHelper(VCSHelper):
 
     def _push_to_try(self, msg, config):
         try:
-            subprocess.check_output(['git', 'cinnabar', '--version'], stderr=subprocess.STDOUT)
+            subprocess.check_output(
+                ['git', 'cinnabar', '--version'], stderr=subprocess.STDOUT)
         except subprocess.CalledProcessError:
             print(GIT_CINNABAR_NOT_FOUND)
             return 1
 
         if config:
             self.run(['git', 'add', config])
         subprocess.check_call(['git', 'commit', '--allow-empty', '-m', msg])
         try:
@@ -190,17 +193,18 @@ class GitHelper(VCSHelper):
     @property
     def files_changed(self):
         # This finds the files changed on the current branch based on the
         # diff of the current branch its merge-base base with other branches.
         current_branch = self.run(['git', 'rev-parse', 'HEAD']).strip()
         all_branches = self.run(['git', 'for-each-ref', 'refs/heads', 'refs/remotes',
                                  '--format=%(objectname)']).splitlines()
         other_branches = set(all_branches) - set([current_branch])
-        base_commit = self.run(['git', 'merge-base', 'HEAD'] + list(other_branches)).strip()
+        base_commit = self.run(
+            ['git', 'merge-base', 'HEAD'] + list(other_branches)).strip()
         return self.run(['git', 'diff', '--name-only', '-z', 'HEAD',
                          base_commit]).strip('\0').split('\0')
 
     @property
     def has_uncommitted_changes(self):
         stat = [s for s in self.run(['git', 'diff', '--cached', '--name-only',
                                      '--diff-filter=AMD']).split() if s]
         return len(stat) > 0
--- a/tools/update-packaging/generatesnippet.py
+++ b/tools/update-packaging/generatesnippet.py
@@ -2,23 +2,25 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 """
 This script generates the complete snippet for a given locale or en-US
 Most of the parameters received are to generate the MAR's download URL
 and determine the MAR's filename
 """
-import sys, os, platform, sha
+import sys
+import os
+import sha
 from optparse import OptionParser
 from ConfigParser import ConfigParser
 from stat import ST_SIZE
 
+
 def main():
-    error = False
     parser = OptionParser(
         usage="%prog [options]")
     parser.add_option("--mar-path",
                       action="store",
                       dest="marPath",
                       help="[Required] Specify the absolute path where the MAR file is found.")
     parser.add_option("--application-ini-file",
                       action="store",
@@ -49,17 +51,18 @@ def main():
     parser.add_option("-v",
                       "--verbose",
                       action="store_true",
                       dest="verbose",
                       default=False,
                       help="This option increases the output of the script.")
     (options, args) = parser.parse_args()
     for req, msg in (('marPath', "the absolute path to the where the MAR file is"),
-                     ('applicationIniFile', "the absolute path to the application.ini file."),
+                     ('applicationIniFile',
+                      "the absolute path to the application.ini file."),
                      ('locale', "a locale."),
                      ('product', "specify a product."),
                      ('platform', "specify the platform.")):
         if not hasattr(options, req):
             parser.error('You must specify %s' % msg)
 
     if not options.downloadBaseURL or options.downloadBaseURL == '':
         options.downloadBaseURL = 'http://ftp.mozilla.org/pub/mozilla.org/%s/nightly' % options.product
@@ -75,26 +78,27 @@ def main():
                               options.platform,
                               options.branch)
     f = open(os.path.join(options.marPath, 'complete.update.snippet'), 'wb')
     f.write(snippet)
     f.close()
 
     if options.verbose:
         # Show in our logs what the contents of the snippet are
-        print snippet
+        print(snippet)
+
 
 def generateSnippet(abstDistDir, applicationIniFile, locale,
                     downloadBaseURL, product, platform, branch):
     # Let's extract information from application.ini
     c = ConfigParser()
     try:
         c.readfp(open(applicationIniFile))
-    except IOError, (stderror):
-       sys.exit(stderror)
+    except IOError as stderror:
+        sys.exit(stderror)
     buildid = c.get("App", "BuildID")
     appVersion = c.get("App", "Version")
     branchName = branch or c.get("App", "SourceRepository").split('/')[-1]
 
     marFileName = '%s-%s.%s.%s.complete.mar' % (
         product,
         appVersion,
         locale,
@@ -117,50 +121,53 @@ def generateSnippet(abstDistDir, applica
     snippet = """complete
 %(marDownloadURL)s
 sha1
 %(completeMarHash)s
 %(completeMarSize)s
 %(buildid)s
 %(appVersion)s
 %(appVersion)s
-""" % dict( marDownloadURL=marDownloadURL,
-            completeMarHash=completeMarHash,
-            completeMarSize=completeMarSize,
-            buildid=buildid,
-            appVersion=appVersion)
+""" % dict(marDownloadURL=marDownloadURL,
+           completeMarHash=completeMarHash,
+           completeMarSize=completeMarSize,
+           buildid=buildid,
+           appVersion=appVersion)
 
     return snippet
 
+
 def getFileHashAndSize(filepath):
     sha1Hash = 'UNKNOWN'
     size = 'UNKNOWN'
 
     try:
         # open in binary mode to make sure we get consistent results
         # across all platforms
         f = open(filepath, "rb")
         shaObj = sha.new(f.read())
         sha1Hash = shaObj.hexdigest()
         size = os.stat(filepath)[ST_SIZE]
-    except IOError, (stderror):
-       sys.exit(stderror)
+    except IOError as stderror:
+        sys.exit(stderror)
 
     return (sha1Hash, size)
 
+
 def datedDirPath(buildid, milestone):
     """
     Returns a string that will look like:
     2009/12/2009-12-31-09-mozilla-central
     """
-    year  = buildid[0:4]
+    year = buildid[0:4]
     month = buildid[4:6]
-    day   = buildid[6:8]
-    hour  = buildid[8:10]
+    day = buildid[6:8]
+    hour = buildid[8:10]
     datedDir = "%s-%s-%s-%s-%s" % (year,
                                    month,
                                    day,
                                    hour,
                                    milestone)
     return "%s/%s/%s" % (year, month, datedDir)
 
+
 if __name__ == '__main__':
     main()
--- a/tools/update-packaging/make_incremental_updates.py
+++ b/tools/update-packaging/make_incremental_updates.py
@@ -1,317 +1,348 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import os
 import shutil
 import hashlib
-from os.path import join, getsize
-from stat import *
 import re
 import sys
 import getopt
 import time
-import datetime
-import string
 import tempfile
 import io
 
+
 class PatchInfo:
     """ Represents the meta-data associated with a patch
         work_dir = working dir where files are stored for this patch
         archive_files = list of files to include in this patch
         manifestv2 = set of manifest version 2 patch instructions
         manifestv3 = set of manifest version 3 patch instructions
         file_exclusion_list =
         files to exclude from this patch. names without slashes will be
         excluded anywhere in the directory hiearchy.   names with slashes
         will only be excluded at that exact path
         """
+
     def __init__(self, work_dir, file_exclusion_list, path_exclusion_list):
-        self.work_dir=work_dir
-        self.archive_files=[]
-        self.manifestv2=[]
-        self.manifestv3=[]
-        self.file_exclusion_list=file_exclusion_list
-        self.path_exclusion_list=path_exclusion_list
+        self.work_dir = work_dir
+        self.archive_files = []
+        self.manifestv2 = []
+        self.manifestv3 = []
+        self.file_exclusion_list = file_exclusion_list
+        self.path_exclusion_list = path_exclusion_list
 
     def append_add_instruction(self, filename):
         """ Appends an add instruction for this patch.
             if filename starts with distribution/extensions/.*/ this will add an
             add-if instruction that will add the file if the parent directory
             of the file exists.  This was ported from
             mozilla/tools/update-packaging/common.sh's make_add_instruction.
         """
         m = re.match("((?:|.*/)distribution/extensions/.*)/", filename)
         if m:
             # Directory immediately following extensions is used for the test
             testdir = m.group(1)
-            print('     add-if "'+testdir+'" "'+filename+'"')
-            self.manifestv2.append('add-if "'+testdir+'" "'+filename+'"')
-            self.manifestv3.append('add-if "'+testdir+'" "'+filename+'"')
+            print('     add-if "' + testdir + '" "' + filename + '"')
+            self.manifestv2.append(
+                'add-if "' + testdir + '" "' + filename + '"')
+            self.manifestv3.append(
+                'add-if "' + testdir + '" "' + filename + '"')
         else:
-            print('        add "'+filename+'"')
-            self.manifestv2.append('add "'+filename+'"')
-            self.manifestv3.append('add "'+filename+'"')
+            print('        add "' + filename + '"')
+            self.manifestv2.append('add "' + filename + '"')
+            self.manifestv3.append('add "' + filename + '"')
 
     def append_add_if_not_instruction(self, filename):
         """ Appends an add-if-not instruction to the version 3 manifest for this patch.
             This was ported from mozilla/tools/update-packaging/common.sh's
             make_add_if_not_instruction.
         """
-        print(' add-if-not "'+filename+'" "'+filename+'"')
-        self.manifestv3.append('add-if-not "'+filename+'" "'+filename+'"')
+        print(' add-if-not "' + filename + '" "' + filename + '"')
+        self.manifestv3.append(
+            'add-if-not "' + filename + '" "' + filename + '"')
 
     def append_patch_instruction(self, filename, patchname):
         """ Appends a patch instruction for this patch.
 
             filename = file to patch
             patchname = patchfile to apply to file
 
             if filename starts with distribution/extensions/.*/ this will add a
             patch-if instruction that will patch the file if the parent
             directory of the file exists. This was ported from
             mozilla/tools/update-packaging/common.sh's make_patch_instruction.
         """
         m = re.match("((?:|.*/)distribution/extensions/.*)/", filename)
         if m:
             testdir = m.group(1)
-            print('   patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"')
-            self.manifestv2.append('patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"')
-            self.manifestv3.append('patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"')
+            print('   patch-if "' + testdir + '" "' +
+                  patchname + '" "' + filename + '"')
+            self.manifestv2.append(
+                'patch-if "' + testdir + '" "' + patchname + '" "' + filename + '"')
+            self.manifestv3.append(
+                'patch-if "' + testdir + '" "' + patchname + '" "' + filename + '"')
         else:
-            print('      patch "'+patchname+'" "'+filename+'"')
-            self.manifestv2.append('patch "'+patchname+'" "'+filename+'"')
-            self.manifestv3.append('patch "'+patchname+'" "'+filename+'"')
+            print('      patch "' + patchname + '" "' + filename + '"')
+            self.manifestv2.append(
+                'patch "' + patchname + '" "' + filename + '"')
+            self.manifestv3.append(
+                'patch "' + patchname + '" "' + filename + '"')
 
     def append_remove_instruction(self, filename):
         """ Appends an remove instruction for this patch.
             This was ported from
             mozilla/tools/update-packaging/common.sh/make_remove_instruction
         """
         if filename.endswith("/"):
-            print('      rmdir "'+filename+'"')
-            self.manifestv2.append('rmdir "'+filename+'"')
-            self.manifestv3.append('rmdir "'+filename+'"')
+            print('      rmdir "' + filename + '"')
+            self.manifestv2.append('rmdir "' + filename + '"')
+            self.manifestv3.append('rmdir "' + filename + '"')
         elif filename.endswith("/*"):
             filename = filename[:-1]
-            print('    rmrfdir "'+filename+'"')
-            self.manifestv2.append('rmrfdir "'+filename+'"')
-            self.manifestv3.append('rmrfdir "'+filename+'"')
+            print('    rmrfdir "' + filename + '"')
+            self.manifestv2.append('rmrfdir "' + filename + '"')
+            self.manifestv3.append('rmrfdir "' + filename + '"')
         else:
-            print('     remove "'+filename+'"')
-            self.manifestv2.append('remove "'+filename+'"')
-            self.manifestv3.append('remove "'+filename+'"')
+            print('     remove "' + filename + '"')
+            self.manifestv2.append('remove "' + filename + '"')
+            self.manifestv3.append('remove "' + filename + '"')
 
     def create_manifest_files(self):
         """ Create the v2 manifest file in the root of the work_dir """
-        manifest_file_path = os.path.join(self.work_dir,"updatev2.manifest")
+        manifest_file_path = os.path.join(self.work_dir, "updatev2.manifest")
         manifest_file = open(manifest_file_path, "wb")
         manifest_file.writelines(io.BytesIO(b"type \"partial\"\n"))
-        manifest_file.writelines(io.BytesIO('\n'.join(self.manifestv2).encode('ascii')))
+        manifest_file.writelines(io.BytesIO(
+            '\n'.join(self.manifestv2).encode('ascii')))
         manifest_file.writelines(io.BytesIO(b"\n"))
         manifest_file.close()
 
         xz_file(manifest_file_path)
         self.archive_files.append('"updatev2.manifest"')
 
         """ Create the v3 manifest file in the root of the work_dir """
-        manifest_file_path = os.path.join(self.work_dir,"updatev3.manifest")
+        manifest_file_path = os.path.join(self.work_dir, "updatev3.manifest")
         manifest_file = open(manifest_file_path, "wb")
         manifest_file.writelines(io.BytesIO(b"type \"partial\"\n"))
-        manifest_file.writelines(io.BytesIO('\n'.join(self.manifestv3).encode('ascii')))
+        manifest_file.writelines(io.BytesIO(
+            '\n'.join(self.manifestv3).encode('ascii')))
         manifest_file.writelines(io.BytesIO(b"\n"))
         manifest_file.close()
 
         xz_file(manifest_file_path)
         self.archive_files.append('"updatev3.manifest"')
 
     def build_marfile_entry_hash(self, root_path):
         """ Iterates through the root_path, creating a MarFileEntry for each file
             and directory in that path.  Excludes any filenames in the file_exclusion_list
         """
         mar_entry_hash = {}
         filename_set = set()
         dirname_set = set()
         for root, dirs, files in os.walk(root_path):
             for name in files:
                 # filename is the relative path from root directory
-                partial_path = root[len(root_path)+1:]
+                partial_path = root[len(root_path) + 1:]
                 if name not in self.file_exclusion_list:
                     filename = os.path.join(partial_path, name)
-                    if "/"+filename not in self.path_exclusion_list:
-                        mar_entry_hash[filename]=MarFileEntry(root_path, filename)
+                    if "/" + filename not in self.path_exclusion_list:
+                        mar_entry_hash[filename] = MarFileEntry(
+                            root_path, filename)
                         filename_set.add(filename)
 
             for name in dirs:
                 # dirname is the relative path from root directory
-                partial_path = root[len(root_path)+1:]
+                partial_path = root[len(root_path) + 1:]
                 if name not in self.file_exclusion_list:
                     dirname = os.path.join(partial_path, name)
-                    if "/"+dirname not in self.path_exclusion_list:
-                        dirname = dirname+"/"
-                        mar_entry_hash[dirname]=MarFileEntry(root_path, dirname)
+                    if "/" + dirname not in self.path_exclusion_list:
+                        dirname = dirname + "/"
+                        mar_entry_hash[dirname] = MarFileEntry(
+                            root_path, dirname)
                         dirname_set.add(dirname)
 
         return mar_entry_hash, filename_set, dirname_set
 
 
 class MarFileEntry:
     """Represents a file inside a Mozilla Archive Format (MAR)
         abs_path = abspath to the the file
         name =  relative path within the mar.  e.g.
           foo.mar/dir/bar.txt extracted into /tmp/foo:
             abs_path=/tmp/foo/dir/bar.txt
             name = dir/bar.txt
     """
+
     def __init__(self, root, name):
         """root = path the the top of the mar
            name = relative path within the mar"""
-        self.name=name.replace("\\", "/")
-        self.abs_path=os.path.join(root,name)
-        self.sha_cache=None
+        self.name = name.replace("\\", "/")
+        self.abs_path = os.path.join(root, name)
+        self.sha_cache = None
 
     def __str__(self):
-        return 'Name: %s FullPath: %s' %(self.name,self.abs_path)
+        return 'Name: %s FullPath: %s' % (self.name, self.abs_path)
 
     def calc_file_sha_digest(self, filename):
         """ Returns sha digest of given filename"""
         file_content = open(filename, 'rb').read()
         return hashlib.sha1(file_content).digest()
 
     def sha(self):
         """ Returns sha digest of file repreesnted by this _marfile_entry"""
         if not self.sha_cache:
-            self.sha_cache=self.calc_file_sha_digest(self.abs_path)
+            self.sha_cache = self.calc_file_sha_digest(self.abs_path)
         return self.sha_cache
 
+
 def exec_shell_cmd(cmd):
     """Execs shell cmd and raises an exception if the cmd fails"""
     if (os.system(cmd)):
-        raise Exception("cmd failed "+cmd)
+        raise Exception("cmd failed " + cmd)
 
 
 def copy_file(src_file_abs_path, dst_file_abs_path):
     """ Copies src to dst creating any parent dirs required in dst first """
-    dst_file_dir=os.path.dirname(dst_file_abs_path)
+    dst_file_dir = os.path.dirname(dst_file_abs_path)
     if not os.path.exists(dst_file_dir):
-         os.makedirs(dst_file_dir)
+        os.makedirs(dst_file_dir)
     # Copy the file over
     shutil.copy2(src_file_abs_path, dst_file_abs_path)
 
+
 def xz_file(filename):
     """ XZ compresses the file in place.  The original file is replaced with the xz compressed version of itself
         assumes the path is absolute"""
-    exec_shell_cmd('xz --compress --x86 --lzma2 --format=xz --check=crc64 "' + filename+'"')
-    os.rename(filename+".xz",filename)
+    exec_shell_cmd(
+        'xz --compress --x86 --lzma2 --format=xz --check=crc64 "' + filename + '"')
+    os.rename(filename + ".xz", filename)
+
 
 def xzunzip_file(filename):
     """ xz decompresses the file in palce.  The original file is replaced with a xz decompressed version of itself.
         doesn't matter if the filename ends in .xz or not"""
     if not filename.endswith(".xz"):
-        os.rename(filename, filename+".xz")
-        filename=filename+".xz"
-    exec_shell_cmd('xz -d "' + filename+'"')
+        os.rename(filename, filename + ".xz")
+        filename = filename + ".xz"
+    exec_shell_cmd('xz -d "' + filename + '"')
 
 
 def extract_mar(filename, work_dir):
     """ Extracts the marfile intot he work_dir
         assumes work_dir already exists otherwise will throw osError"""
-    print("Extracting "+filename+" to "+work_dir)
+    print("Extracting " + filename + " to " + work_dir)
     saved_path = os.getcwd()
     try:
         os.chdir(work_dir)
-        exec_shell_cmd("mar -x "+filename)
+        exec_shell_cmd("mar -x " + filename)
     finally:
         os.chdir(saved_path)
 
+
 def create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info):
     """ Creates the partial patch file and manifest entry for the pair of files passed in
     """
-    if not (from_marfile_entry.sha(),to_marfile_entry.sha()) in shas:
-        print('diffing "'+from_marfile_entry.name+'\"')
-        #bunzip to/from
+    if not (from_marfile_entry.sha(), to_marfile_entry.sha()) in shas:
+        print('diffing "' + from_marfile_entry.name + '\"')
+        # bunzip to/from
         xzunzip_file(from_marfile_entry.abs_path)
         xzunzip_file(to_marfile_entry.abs_path)
 
         # The patch file will be created in the working directory with the
         # name of the file in the mar + .patch
-        patch_file_abs_path = os.path.join(patch_info.work_dir,from_marfile_entry.name+".patch")
-        patch_file_dir=os.path.dirname(patch_file_abs_path)
+        patch_file_abs_path = os.path.join(
+            patch_info.work_dir, from_marfile_entry.name + ".patch")
+        patch_file_dir = os.path.dirname(patch_file_abs_path)
         if not os.path.exists(patch_file_dir):
             os.makedirs(patch_file_dir)
 
         # Create xz compressed patch file
-        exec_shell_cmd("mbsdiff "+from_marfile_entry.abs_path+" "+to_marfile_entry.abs_path+" "+patch_file_abs_path)
+        exec_shell_cmd("mbsdiff " + from_marfile_entry.abs_path +
+                       " " + to_marfile_entry.abs_path + " " + patch_file_abs_path)
         xz_file(patch_file_abs_path)
 
         # Create xz compressed full file
-        full_file_abs_path =  os.path.join(patch_info.work_dir, to_marfile_entry.name)
+        full_file_abs_path = os.path.join(
+            patch_info.work_dir, to_marfile_entry.name)
         shutil.copy2(to_marfile_entry.abs_path, full_file_abs_path)
         xz_file(full_file_abs_path)
 
         if os.path.getsize(patch_file_abs_path) < os.path.getsize(full_file_abs_path):
             # Patch is smaller than file.  Remove the file and add patch to manifest
             os.remove(full_file_abs_path)
-            file_in_manifest_name = from_marfile_entry.name+".patch"
+            file_in_manifest_name = from_marfile_entry.name + ".patch"
             file_in_manifest_abspath = patch_file_abs_path
-            patch_info.append_patch_instruction(to_marfile_entry.name, file_in_manifest_name)
+            patch_info.append_patch_instruction(
+                to_marfile_entry.name, file_in_manifest_name)
         else:
             # File is smaller than patch.  Remove the patch and add file to manifest
             os.remove(patch_file_abs_path)
             file_in_manifest_name = from_marfile_entry.name
             file_in_manifest_abspath = full_file_abs_path
             patch_info.append_add_instruction(file_in_manifest_name)
 
-        shas[from_marfile_entry.sha(),to_marfile_entry.sha()] = (file_in_manifest_name,file_in_manifest_abspath)
-        patch_info.archive_files.append('"'+file_in_manifest_name+'"')
+        shas[from_marfile_entry.sha(), to_marfile_entry.sha()] = (
+            file_in_manifest_name, file_in_manifest_abspath)
+        patch_info.archive_files.append('"' + file_in_manifest_name + '"')
     else:
-        filename, src_file_abs_path = shas[from_marfile_entry.sha(),to_marfile_entry.sha()]
+        filename, src_file_abs_path = shas[from_marfile_entry.sha(
+        ), to_marfile_entry.sha()]
         # We've already calculated the patch for this pair of files.
         if (filename.endswith(".patch")):
             # print "skipping diff: "+from_marfile_entry.name
             # Patch was smaller than file - add patch instruction to manifest
-            file_in_manifest_name = to_marfile_entry.name+'.patch';
-            patch_info.append_patch_instruction(to_marfile_entry.name, file_in_manifest_name)
+            file_in_manifest_name = to_marfile_entry.name + '.patch'
+            patch_info.append_patch_instruction(
+                to_marfile_entry.name, file_in_manifest_name)
         else:
             # File was smaller than file - add file to manifest
             file_in_manifest_name = to_marfile_entry.name
             patch_info.append_add_instruction(file_in_manifest_name)
         # Copy the pre-calculated file into our new patch work aread
-        copy_file(src_file_abs_path, os.path.join(patch_info.work_dir, file_in_manifest_name))
-        patch_info.archive_files.append('"'+file_in_manifest_name+'"')
+        copy_file(src_file_abs_path, os.path.join(
+            patch_info.work_dir, file_in_manifest_name))
+        patch_info.archive_files.append('"' + file_in_manifest_name + '"')
+
 
 def create_add_patch_for_file(to_marfile_entry, patch_info):
     """  Copy the file to the working dir, add the add instruction, and add it to the list of archive files """
-    copy_file(to_marfile_entry.abs_path, os.path.join(patch_info.work_dir, to_marfile_entry.name))
+    copy_file(to_marfile_entry.abs_path, os.path.join(
+        patch_info.work_dir, to_marfile_entry.name))
     patch_info.append_add_instruction(to_marfile_entry.name)
-    patch_info.archive_files.append('"'+to_marfile_entry.name+'"')
+    patch_info.archive_files.append('"' + to_marfile_entry.name + '"')
+
 
 def create_add_if_not_patch_for_file(to_marfile_entry, patch_info):
     """  Copy the file to the working dir, add the add-if-not instruction, and add it to the list of archive files """
-    copy_file(to_marfile_entry.abs_path, os.path.join(patch_info.work_dir, to_marfile_entry.name))
+    copy_file(to_marfile_entry.abs_path, os.path.join(
+        patch_info.work_dir, to_marfile_entry.name))
     patch_info.append_add_if_not_instruction(to_marfile_entry.name)
-    patch_info.archive_files.append('"'+to_marfile_entry.name+'"')
+    patch_info.archive_files.append('"' + to_marfile_entry.name + '"')
+
 
 def process_explicit_remove_files(dir_path, patch_info):
     """ Looks for a 'removed-files' file in the dir_path.  If the removed-files does not exist
     this will throw.  If found adds the removed-files
     found in that file to the patch_info"""
 
     # Windows and linux have this file at the root of the dir
     list_file_path = os.path.join(dir_path, "removed-files")
     if not os.path.exists(list_file_path):
-        list_file_path = os.path.join(dir_path, "Contents/Resources/removed-files")
+        list_file_path = os.path.join(
+            dir_path, "Contents/Resources/removed-files")
 
     if (os.path.exists(list_file_path)):
         fd, tmppath = tempfile.mkstemp('', 'tmp', os.getcwd())
         os.close(fd)
-        exec_shell_cmd('xz -k -d --stdout "' + list_file_path + '" > "'+tmppath+'"')
+        exec_shell_cmd('xz -k -d --stdout "' +
+                       list_file_path + '" > "' + tmppath + '"')
         list_file = open(tmppath)
 
         lines = []
         for line in list_file:
             lines.append(line.strip())
 
         list_file.close()
         os.remove(tmppath)
@@ -319,74 +350,78 @@ def process_explicit_remove_files(dir_pa
         for line in lines:
             # Exclude any blank and comment lines.
             if line and not line.startswith("#"):
                 # Python on windows uses \ for path separators and the update
                 # manifests expects / for path separators on all platforms.
                 line = line.replace("\\", "/")
                 patch_info.append_remove_instruction(line)
 
+
 def create_partial_patch(from_dir_path, to_dir_path, patch_filename, shas, patch_info, forced_updates, add_if_not_list):
     """ Builds a partial patch by comparing the files in from_dir_path to those of to_dir_path"""
     # Cannocolize the paths for safey
     from_dir_path = os.path.abspath(from_dir_path)
     to_dir_path = os.path.abspath(to_dir_path)
     # Create a hashtable of the from  and to directories
-    from_dir_hash,from_file_set,from_dir_set = patch_info.build_marfile_entry_hash(from_dir_path)
-    to_dir_hash,to_file_set,to_dir_set = patch_info.build_marfile_entry_hash(to_dir_path)
+    from_dir_hash, from_file_set, from_dir_set = patch_info.build_marfile_entry_hash(
+        from_dir_path)
+    to_dir_hash, to_file_set, to_dir_set = patch_info.build_marfile_entry_hash(
+        to_dir_path)
     # Create a list of the forced updates
     forced_list = forced_updates.strip().split('|')
     # Require that the precomplete file is included in the complete update
     if "precomplete" in to_file_set:
         forced_list.append("precomplete")
     elif "Contents/Resources/precomplete" in to_file_set:
         forced_list.append("Contents/Resources/precomplete")
     # The check with \ file separators allows tests for Mac to run on Windows
     elif "Contents\Resources\precomplete" in to_file_set:
         forced_list.append("Contents\Resources\precomplete")
     else:
-        raise Exception("missing precomplete file in: "+to_dir_path)
+        raise Exception("missing precomplete file in: " + to_dir_path)
 
     if "removed-files" in to_file_set:
         forced_list.append("removed-files")
     elif "Contents/Resources/removed-files" in to_file_set:
         forced_list.append("Contents/Resources/removed-files")
     # The check with \ file separators allows tests for Mac to run on Windows
     elif "Contents\Resources\\removed-files" in to_file_set:
         forced_list.append("Contents\Resources\\removed-files")
     else:
-        raise Exception("missing removed-files file in: "+to_dir_path)
+        raise Exception("missing removed-files file in: " + to_dir_path)
 
     if "chrome.manifest" in to_file_set:
         forced_list.append("chrome.manifest")
     elif "Contents/Resources/chrome.manifest" in to_file_set:
         forced_list.append("Contents/Resources/chrome.manifest")
     # The check with \ file separators allows tests for Mac to run on Windows
     elif "Contents\Resources\\chrome.manifest" in to_file_set:
         forced_list.append("Contents\Resources\\chrome.manifest")
     else:
-        raise Exception("missing chrome.manifest file in: "+to_dir_path)
+        raise Exception("missing chrome.manifest file in: " + to_dir_path)
 
     # Files which exist in both sets need to be patched
     patch_filenames = list(from_file_set.intersection(to_file_set))
     patch_filenames.sort(reverse=True)
     for filename in patch_filenames:
         from_marfile_entry = from_dir_hash[filename]
         to_marfile_entry = to_dir_hash[filename]
         if os.path.basename(filename) in add_if_not_list:
             # This filename is in the add if not list, explicitly add-if-not
             create_add_if_not_patch_for_file(to_dir_hash[filename], patch_info)
         elif filename in forced_list:
-            print('Forcing "'+filename+'"')
+            print('Forcing "' + filename + '"')
             # This filename is in the forced list, explicitly add
             create_add_patch_for_file(to_dir_hash[filename], patch_info)
         else:
-          if from_marfile_entry.sha() != to_marfile_entry.sha():
-              # Not the same - calculate a patch
-              create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info)
+            if from_marfile_entry.sha() != to_marfile_entry.sha():
+                # Not the same - calculate a patch
+                create_partial_patch_for_file(
+                    from_marfile_entry, to_marfile_entry, shas, patch_info)
 
     # files in to_dir not in from_dir need to added
     add_filenames = list(to_file_set - from_file_set)
     add_filenames.sort(reverse=True)
     for filename in add_filenames:
         if os.path.basename(filename) in add_if_not_list:
             create_add_if_not_patch_for_file(to_dir_hash[filename], patch_info)
         else:
@@ -405,167 +440,181 @@ def create_partial_patch(from_dir_path, 
     remove_dirnames.sort(reverse=True)
     for dirname in remove_dirnames:
         patch_info.append_remove_instruction(from_dir_hash[dirname].name)
 
     # Construct the Manifest files
     patch_info.create_manifest_files()
 
     # And construct the mar
-    mar_cmd = 'mar -C '+patch_info.work_dir+' -c output.mar '+' '.join(patch_info.archive_files)
+    mar_cmd = 'mar -C ' + patch_info.work_dir + \
+        ' -c output.mar ' + ' '.join(patch_info.archive_files)
     exec_shell_cmd(mar_cmd)
 
     # Copy mar to final destination
     patch_file_dir = os.path.split(patch_filename)[0]
     if not os.path.exists(patch_file_dir):
         os.makedirs(patch_file_dir)
-    shutil.copy2(os.path.join(patch_info.work_dir,"output.mar"), patch_filename)
+    shutil.copy2(os.path.join(patch_info.work_dir,
+                              "output.mar"), patch_filename)
 
     return patch_filename
 
+
 def usage():
     print("-h for help")
     print("-f for patchlist_file")
 
+
 def get_buildid(work_dir):
     """ extracts buildid from MAR
     """
     ini = '%s/application.ini' % work_dir
     if not os.path.exists(ini):
         ini = '%s/Contents/Resources/application.ini' % work_dir
         if not os.path.exists(ini):
             print('WARNING: application.ini not found, cannot find build ID')
             return ''
 
     fd, tmppath = tempfile.mkstemp('', 'tmp', os.getcwd())
     os.close(fd)
-    exec_shell_cmd('xz -k -d --stdout "' + ini + '" > "'+tmppath+'"')
+    exec_shell_cmd('xz -k -d --stdout "' + ini + '" > "' + tmppath + '"')
     file = open(tmppath)
     for line in file:
         if line.find('BuildID') == 0:
             file.close()
             os.remove(tmppath)
             return line.strip().split('=')[1]
     print('WARNING: cannot find build ID in application.ini')
     file.close()
     os.remove(tmppath)
     return ''
 
+
 def decode_filename(filepath):
     """ Breaks filename/dir structure into component parts based on regex
         for example: firefox-3.0b3pre.en-US.linux-i686.complete.mar
         Or linux-i686/en-US/firefox-3.0b3.complete.mar
         Returns dict with keys product, version, locale, platform, type
     """
     try:
-      m = re.search(
-        '(?P<product>\w+)(-)(?P<version>\w+\.\w+(\.\w+){0,2})(\.)(?P<locale>.+?)(\.)(?P<platform>.+?)(\.)(?P<type>\w+)(.mar)',
-      os.path.basename(filepath))
-      return m.groupdict()
-    except Exception(exc):
-      try:
         m = re.search(
-          '(?P<platform>.+?)\/(?P<locale>.+?)\/(?P<product>\w+)-(?P<version>\w+\.\w+)\.(?P<type>\w+).mar',
-        filepath)
+            '(?P<product>\w+)(-)(?P<version>\w+\.\w+(\.\w+){0,2})(\.)(?P<locale>.+?)(\.)(?P<platform>.+?)(\.)(?P<type>\w+)(.mar)',
+            os.path.basename(filepath))
         return m.groupdict()
-      except:
-        raise Exception("could not parse filepath %s: %s" % (filepath, exc))
+    except Exception as exc:
+        try:
+            m = re.search(
+                '(?P<platform>.+?)\/(?P<locale>.+?)\/(?P<product>\w+)-(?P<version>\w+\.\w+)\.(?P<type>\w+).mar',
+                filepath)
+            return m.groupdict()
+        except Exception:
+            raise Exception("could not parse filepath %s: %s" %
+                            (filepath, exc))
+
 
 def create_partial_patches(patches):
     """ Given the patches generates a set of partial patches"""
     shas = {}
 
     work_dir_root = None
     metadata = []
     try:
         work_dir_root = tempfile.mkdtemp('-fastmode', 'tmp', os.getcwd())
         print("Building patches using work dir: %s" % (work_dir_root))
 
         # Iterate through every patch set in the patch file
         patch_num = 1
         for patch in patches:
             startTime = time.time()
 
-            from_filename,to_filename,patch_filename,forced_updates = patch.split(",")
-            from_filename,to_filename,patch_filename = os.path.abspath(from_filename),os.path.abspath(to_filename),os.path.abspath(patch_filename)
+            from_filename, to_filename, patch_filename, forced_updates = patch.split(
+                ",")
+            from_filename, to_filename, patch_filename = os.path.abspath(
+                from_filename), os.path.abspath(to_filename), os.path.abspath(patch_filename)
 
             # Each patch iteration uses its own work dir
-            work_dir = os.path.join(work_dir_root,str(patch_num))
+            work_dir = os.path.join(work_dir_root, str(patch_num))
             os.mkdir(work_dir)
 
             # Extract from mar into from dir
-            work_dir_from =  os.path.join(work_dir,"from");
+            work_dir_from = os.path.join(work_dir, "from")
             os.mkdir(work_dir_from)
-            extract_mar(from_filename,work_dir_from)
+            extract_mar(from_filename, work_dir_from)
             from_decoded = decode_filename(from_filename)
             from_buildid = get_buildid(work_dir_from)
-            from_shasum = hashlib.sha1(open(from_filename, "rb").read()).hexdigest()
+            from_shasum = hashlib.sha1(
+                open(from_filename, "rb").read()).hexdigest()
             from_size = str(os.path.getsize(to_filename))
 
             # Extract to mar into to dir
-            work_dir_to =  os.path.join(work_dir,"to")
+            work_dir_to = os.path.join(work_dir, "to")
             os.mkdir(work_dir_to)
             extract_mar(to_filename, work_dir_to)
             to_decoded = decode_filename(from_filename)
             to_buildid = get_buildid(work_dir_to)
-            to_shasum = hashlib.sha1(open(to_filename, 'rb').read()).hexdigest()
+            to_shasum = hashlib.sha1(
+                open(to_filename, 'rb').read()).hexdigest()
             to_size = str(os.path.getsize(to_filename))
 
             mar_extract_time = time.time()
 
-            partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, ['update.manifest','updatev2.manifest','updatev3.manifest'],[]),forced_updates,['channel-prefs.js','update-settings.ini'])
-            partial_buildid = to_buildid
-            partial_shasum = hashlib.sha1(open(partial_filename, "rb").read()).hexdigest()
+            partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, [
+                                                    'update.manifest', 'updatev2.manifest', 'updatev3.manifest'], []), forced_updates, ['channel-prefs.js', 'update-settings.ini'])
+            partial_shasum = hashlib.sha1(
+                open(partial_filename, "rb").read()).hexdigest()
             partial_size = str(os.path.getsize(partial_filename))
 
             metadata.append({
-             'to_filename': os.path.basename(to_filename),
-             'from_filename': os.path.basename(from_filename),
-             'partial_filename': os.path.basename(partial_filename),
-             'to_buildid':to_buildid,
-             'from_buildid':from_buildid,
-             'to_sha1sum':to_shasum,
-             'from_sha1sum':from_shasum,
-             'partial_sha1sum':partial_shasum,
-             'to_size':to_size,
-             'from_size':from_size,
-             'partial_size':partial_size,
-             'to_version':to_decoded['version'],
-             'from_version':from_decoded['version'],
-             'locale':from_decoded['locale'],
-             'platform':from_decoded['platform'],
+                'to_filename': os.path.basename(to_filename),
+                'from_filename': os.path.basename(from_filename),
+                'partial_filename': os.path.basename(partial_filename),
+                'to_buildid': to_buildid,
+                'from_buildid': from_buildid,
+                'to_sha1sum': to_shasum,
+                'from_sha1sum': from_shasum,
+                'partial_sha1sum': partial_shasum,
+                'to_size': to_size,
+                'from_size': from_size,
+                'partial_size': partial_size,
+                'to_version': to_decoded['version'],
+                'from_version': from_decoded['version'],
+                'locale': from_decoded['locale'],
+                'platform': from_decoded['platform'],
             })
-            print("done with patch %s/%s time (%.2fs/%.2fs/%.2fs) (mar/patch/total)" % (str(patch_num),str(len(patches)),mar_extract_time-startTime,time.time()-mar_extract_time,time.time()-startTime))
+            print("done with patch %s/%s time (%.2fs/%.2fs/%.2fs) (mar/patch/total)" % (str(patch_num),
+                                                                                        str(len(patches)), mar_extract_time - startTime, time.time() - mar_extract_time, time.time() - startTime))
             patch_num += 1
         return metadata
     finally:
         # If we fail or get a ctrl-c during run be sure to clean up temp dir
         if (work_dir_root and os.path.exists(work_dir_root)):
             shutil.rmtree(work_dir_root)
 
+
 def main(argv):
     patchlist_file = None
     try:
-         opts, args = getopt.getopt(argv, "hf:", ["help", "patchlist_file="])
-         for opt, arg in opts:
+        opts, args = getopt.getopt(argv, "hf:", ["help", "patchlist_file="])
+        for opt, arg in opts:
             if opt in ("-h", "--help"):
                 usage()
                 sys.exit()
             elif opt in ("-f", "--patchlist_file"):
                 patchlist_file = arg
     except getopt.GetoptError:
-          usage()
-          sys.exit(2)
+        usage()
+        sys.exit(2)
 
     if not patchlist_file:
         usage()
         sys.exit(2)
 
     patches = []
     f = open(patchlist_file, 'r')
     for line in f.readlines():
         patches.append(line)
     f.close()
     create_partial_patches(patches)
 
+
 if __name__ == "__main__":
     main(sys.argv[1:])
-
--- a/tools/update-packaging/test_make_incremental_updates.py
+++ b/tools/update-packaging/test_make_incremental_updates.py
@@ -1,57 +1,71 @@
 #!/usr/bin/python
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
+# noqa: E501
 
 import unittest
 import make_incremental_updates as mkup
 from make_incremental_updates import PatchInfo, MarFileEntry
 
+
 class TestPatchInfo(unittest.TestCase):
     def setUp(self):
         self.work_dir = 'work_dir'
-        self.file_exclusion_list = ['update.manifest','updatev2.manifest','updatev3.manifest']
+        self.file_exclusion_list = ['update.manifest',
+                                    'updatev2.manifest', 'updatev3.manifest']
         self.path_exclusion_list = ['/readme.txt']
-        self.patch_info = PatchInfo(self.work_dir, self.file_exclusion_list, self.path_exclusion_list)
+        self.patch_info = PatchInfo(
+            self.work_dir, self.file_exclusion_list, self.path_exclusion_list)
 
     def testPatchInfo(self):
         self.assertEquals(self.work_dir, self.patch_info.work_dir)
         self.assertEquals([], self.patch_info.archive_files)
         self.assertEquals([], self.patch_info.manifestv2)
         self.assertEquals([], self.patch_info.manifestv3)
-        self.assertEquals(self.file_exclusion_list, self.patch_info.file_exclusion_list)
-        self.assertEquals(self.path_exclusion_list, self.patch_info.path_exclusion_list)
+        self.assertEquals(self.file_exclusion_list,
+                          self.patch_info.file_exclusion_list)
+        self.assertEquals(self.path_exclusion_list,
+                          self.patch_info.path_exclusion_list)
 
     def test_append_add_instruction(self):
         self.patch_info.append_add_instruction('file.test')
         self.assertEquals(['add "file.test"'], self.patch_info.manifestv2)
         self.assertEquals(['add "file.test"'], self.patch_info.manifestv3)
 
     def test_append_add_if_instruction(self):
-        self.patch_info.append_add_instruction('distribution/extensions/extension/file.test')
-        self.assertEquals(['add-if "distribution/extensions/extension" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv2)
-        self.assertEquals(['add-if "distribution/extensions/extension" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv3)
+        self.patch_info.append_add_instruction(
+            'distribution/extensions/extension/file.test')
+        self.assertEquals(
+            ['add-if "distribution/extensions/extension" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv2)
+        self.assertEquals(
+            ['add-if "distribution/extensions/extension" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv3)
 
     def test_append_add_if_not_instruction(self):
         self.patch_info.append_add_if_not_instruction('file.test')
         self.assertEquals([], self.patch_info.manifestv2)
-        self.assertEquals(['add-if-not "file.test" "file.test"'], self.patch_info.manifestv3)
+        self.assertEquals(['add-if-not "file.test" "file.test"'],
+                          self.patch_info.manifestv3)
 
     def test_append_patch_instruction(self):
         self.patch_info.append_patch_instruction('file.test', 'patchname')
-        self.assertEquals(['patch "patchname" "file.test"'], self.patch_info.manifestv2)
-        self.assertEquals(['patch "patchname" "file.test"'], self.patch_info.manifestv3)
+        self.assertEquals(['patch "patchname" "file.test"'],
+                          self.patch_info.manifestv2)
+        self.assertEquals(['patch "patchname" "file.test"'],
+                          self.patch_info.manifestv3)
 
     def test_append_patch_if_instruction(self):
-        self.patch_info.append_patch_instruction('distribution/extensions/extension/file.test', 'patchname')
-        self.assertEquals(['patch-if "distribution/extensions/extension" "patchname" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv2)
-        self.assertEquals(['patch-if "distribution/extensions/extension" "patchname" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv3)
+        self.patch_info.append_patch_instruction(
+            'distribution/extensions/extension/file.test', 'patchname')
+        self.assertEquals(
+            ['patch-if "distribution/extensions/extension" "patchname" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv2)
+        self.assertEquals(
+            ['patch-if "distribution/extensions/extension" "patchname" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv3)
 
     def test_append_remove_instruction(self):
         self.patch_info.append_remove_instruction('file.test')
         self.assertEquals(['remove "file.test"'], self.patch_info.manifestv2)
         self.assertEquals(['remove "file.test"'], self.patch_info.manifestv3)
 
     def test_append_rmdir_instruction(self):
         self.patch_info.append_remove_instruction('dirtest/')
@@ -64,17 +78,19 @@ class TestPatchInfo(unittest.TestCase):
         self.assertEquals(['rmrfdir "dirtest/"'], self.patch_info.manifestv3)
 
     """ FIXME touches the filesystem, need refactoring
     def test_create_manifest_file(self):
         self.patch_info.create_manifest_file()
     """
 
     def test_build_marfile_entry_hash(self):
-        self.assertEquals(({}, set([]), set([])), self.patch_info.build_marfile_entry_hash('root_path'))
+        self.assertEquals(({}, set([]), set([])),
+                          self.patch_info.build_marfile_entry_hash('root_path'))
+
 
 """ FIXME touches the filesystem, need refactoring
 class TestMarFileEntry(unittest.TestCase):
     def setUp(self):
         root_path = '.'
         self.filename = 'file.test'
         f = open(self.filename, 'w')
         f.write('Test data\n')
@@ -91,20 +107,22 @@ class TestMarFileEntry(unittest.TestCase
     def test_sha(self):
         f = open('test.sha', 'r')
         goodSha = f.read()
         f.close()
         sha = self.mar_file_entry.sha()
         self.assertEquals(goodSha, sha)
 """
 
+
 class TestMakeIncrementalUpdates(unittest.TestCase):
     def setUp(self):
         work_dir = '.'
-        self.patch_info = PatchInfo(work_dir, ['update.manifest','updatev2.manifest','updatev3.manifest'],['/readme.txt'])
+        self.patch_info = PatchInfo(work_dir, [
+                                    'update.manifest', 'updatev2.manifest', 'updatev3.manifest'], ['/readme.txt'])
         root_path = '/'
         filename = 'test.file'
         self.mar_file_entry = MarFileEntry(root_path, filename)
 
     """ FIXME makes direct shell calls, need refactoring
     def test_exec_shell_cmd(self):
         mkup.exec_shell_cmd('echo test')
 
@@ -112,40 +130,43 @@ class TestMakeIncrementalUpdates(unittes
         mkup.copy_file('src_file_abs_path', 'dst_file_abs_path')
 
     def test_bzip_file(self):
         mkup.bzip_file('filename')
 
     def test_bunzip_file(self):
         mkup.bunzip_file('filename')
 
-    def test_extract_mar(self): 
+    def test_extract_mar(self):
         mkup.extract_mar('filename', 'work_dir')
 
     def test_create_partial_patch_for_file(self):
         mkup.create_partial_patch_for_file('from_marfile_entry', 'to_marfile_entry', 'shas', self.patch_info)
 
-    def test_create_add_patch_for_file(self):           
+    def test_create_add_patch_for_file(self):
         mkup.create_add_patch_for_file('to_marfile_entry', self.patch_info)
 
-    def test_process_explicit_remove_files(self): 
+    def test_process_explicit_remove_files(self):
         mkup.process_explicit_remove_files('dir_path', self.patch_info)
 
     def test_create_partial_patch(self):
         mkup.create_partial_patch('from_dir_path', 'to_dir_path', 'patch_filename', 'shas', self.patch_info, 'forced_updates')
 
     def test_create_partial_patches(patches):
         mkup.create_partial_patches('patches')
 
     """
 
     """ FIXME touches the filesystem, need refactoring
     def test_get_buildid(self):
         mkup.get_buildid('work_dir', 'platform')
     """
 
     def test_decode_filename(self):
-        expected = {'locale': 'lang', 'platform': 'platform', 'product': 'product', 'version': '1.0', 'type': 'complete'}
-        self.assertEquals(expected, mkup.decode_filename('product-1.0.lang.platform.complete.mar'))
+        expected = {'locale': 'lang', 'platform': 'platform',
+                    'product': 'product', 'version': '1.0', 'type': 'complete'}
+        self.assertEquals(expected, mkup.decode_filename(
+            'product-1.0.lang.platform.complete.mar'))
         self.assertRaises(Exception, mkup.decode_filename, 'fail')
 
+
 if __name__ == '__main__':
     unittest.main()