Bug 1438839 - Fix the remaining flake8 issues by hand r?ahal draft
authorSylvestre Ledru <sledru@mozilla.com>
Thu, 15 Mar 2018 11:34:03 +0100
changeset 767911 2dc2d60128cd754b009e37792a885aba8a4031e2
parent 767910 c1c9f618164eea4b929232cfb105e61519794e45
child 767912 dc4c6b053aa27d7e42c27b5991ba15676f09857e
push id102736
push usersledru@mozilla.com
push dateThu, 15 Mar 2018 10:37:33 +0000
reviewersahal
bugs1438839
milestone61.0a1
Bug 1438839 - Fix the remaining flake8 issues by hand r?ahal MozReview-Commit-ID: Fv1MZIpCL8Z
tools/docs/mach_commands.py
tools/power/mach_commands.py
tools/profiler/merge-profiles.py
tools/rb/fix_linux_stack.py
tools/rb/fix_macosx_stack.py
tools/rb/fix_stack_using_bpsyms.py
tools/update-packaging/make_incremental_updates.py
tools/update-packaging/test_make_incremental_updates.py
--- a/tools/docs/mach_commands.py
+++ b/tools/docs/mach_commands.py
@@ -1,16 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
-import platform
 import sys
 
 from mach.decorators import (
     Command,
     CommandArgument,
     CommandProvider,
 )
 
@@ -42,17 +41,17 @@ class Documentation(MachCommandBase):
     @CommandArgument('--http', const=':6666', metavar='ADDRESS', nargs='?',
                      help='Serve documentation on an HTTP server, '
                           'e.g. ":6666".')
     @CommandArgument('--upload', action='store_true',
                      help='Upload generated files to S3')
     def build_docs(self, what=None, format=None, outdir=None, auto_open=True,
                    http=None, archive=False, upload=False):
         try:
-            jsdoc = which.which('jsdoc')
+            which.which('jsdoc')
         except which.WhichError:
             return die('jsdoc not found - please install from npm.')
 
         self._activate_virtualenv()
         self.virtualenv_manager.install_pip_requirements(os.path.join(here, 'requirements.txt'))
 
         import sphinx
         import webbrowser
--- a/tools/power/mach_commands.py
+++ b/tools/power/mach_commands.py
@@ -6,20 +6,17 @@ from __future__ import print_function
 
 from distutils.version import StrictVersion
 
 from mach.decorators import (
     Command,
     CommandArgument,
     CommandProvider,
 )
-from mozbuild.base import (
-    MachCommandBase,
-    MachCommandConditions as conditions,
-)
+from mozbuild.base import MachCommandBase
 
 
 def is_osx_10_10_or_greater(cls):
     import platform
     release = platform.mac_ver()[0]
     return release and StrictVersion(release) >= StrictVersion('10.10')
 
 
@@ -49,17 +46,17 @@ class MachCommands(MachCommandBase):
         interval = str(interval)
 
         # Run a trivial command with |sudo| to gain temporary root privileges
         # before |rapl| and |powermetrics| are called. This ensures that |rapl|
         # doesn't start measuring while |powermetrics| is waiting for the root
         # password to be entered.
         try:
             subprocess.check_call(['sudo', 'true'])
-        except:
+        except Exception:
             print('\nsudo failed; aborting')
             return 1
 
         # This runs rapl in the background because nothing in this script
         # depends on the output. This is good because we want |rapl| and
         # |powermetrics| to run at the same time.
         subprocess.Popen([rapl, '-n', '1', '-i', interval])
 
--- a/tools/profiler/merge-profiles.py
+++ b/tools/profiler/merge-profiles.py
@@ -1,19 +1,20 @@
-#!/usr/bin/env python 
+#!/usr/bin/env python
 #
 # This script takes b2g process profiles and merged them into a single profile.
 # The meta data is taken from the first profile. The startTime for each profile
 # is used to syncronized the samples. Each thread is moved into the merged
 # profile.
 #
 import json
 import re
 import sys
 
+
 def MergeProfiles(files):
     threads = []
     fileData = []
     symTable = dict()
     meta = None
     libs = None
     videoUrl = None
     minStartTime = None
@@ -103,11 +104,8 @@ def MergeProfiles(files):
     json.dump(result, sys.stdout)
 
 
 if len(sys.argv) > 1:
     MergeProfiles(sys.argv[1:])
     sys.exit(0)
 
 print "Usage: merge-profile.py profile_<pid1>_<pname1>.sym profile_<pid2>_<pname2>.sym > merged.sym"
-
-
-
--- a/tools/rb/fix_linux_stack.py
+++ b/tools/rb/fix_linux_stack.py
@@ -7,18 +7,16 @@
 # This script uses addr2line (part of binutils) to post-process the entries
 # produced by NS_FormatCodeAddress(), which on Linux often lack a function
 # name, a file name and a line number.
 
 import subprocess
 import sys
 import re
 import os
-import pty
-import termios
 from StringIO import StringIO
 
 objdump_section_re = re.compile(
     "^ [0-9a-f]* ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}).*")
 
 
 def elf_section(file, section):
     """
@@ -250,17 +248,17 @@ def address_adjustment_for(file):
     return adjustment
 
 
 devnull = open(os.devnull)
 file_stuff = {}
 
 
 def addressToSymbol(file, address):
-    if not file in file_stuff:
+    if file not in file_stuff:
         debug_file = separate_debug_file_for(file) or file
 
         # Start an addr2line process for this file. Note that addr2line
         # sometimes prints error messages, which we want to suppress.
         args = ['/usr/bin/addr2line', '-C', '-f', '-e', debug_file]
         addr2line = subprocess.Popen(args, stdin=subprocess.PIPE,
                                      stdout=subprocess.PIPE,
                                      stderr=devnull)
--- a/tools/rb/fix_macosx_stack.py
+++ b/tools/rb/fix_macosx_stack.py
@@ -50,17 +50,17 @@ class unbufferedLineConverter:
 def separate_debug_file_for(file):
     return None
 
 
 address_adjustments = {}
 
 
 def address_adjustment(file):
-    if not file in address_adjustments:
+    if file not in address_adjustments:
         result = None
         otool = subprocess.Popen(["otool", "-l", file], stdout=subprocess.PIPE)
         while True:
             line = otool.stdout.readline()
             if line == "":
                 break
             if line == "  segname __TEXT\n":
                 line = otool.stdout.readline()
@@ -78,17 +78,17 @@ def address_adjustment(file):
     return address_adjustments[file]
 
 
 atoses = {}
 
 
 def addressToSymbol(file, address):
     converter = None
-    if not file in atoses:
+    if file not in atoses:
         debug_file = separate_debug_file_for(file) or file
         converter = unbufferedLineConverter(
             '/usr/bin/xcrun', ['atos', '-arch', 'x86_64', '-o', debug_file])
         atoses[file] = converter
     else:
         converter = atoses[file]
     return converter.convert("0x%X" % address)
 
--- a/tools/rb/fix_stack_using_bpsyms.py
+++ b/tools/rb/fix_stack_using_bpsyms.py
@@ -76,17 +76,17 @@ class SymbolFile:
                 # skip everything else
         # print "Loaded %d functions from symbol file %s" % (len(funcs), os.path.basename(fn))
         self.addrs = sorted(addrs)
         self.funcs = funcs
 
     def addrToSymbol(self, address):
         i = bisect.bisect(self.addrs, address) - 1
         if i > 0:
-            #offset = address - self.addrs[i]
+            # offset = address - self.addrs[i]
             return self.funcs[self.addrs[i]]
         else:
             return ""
 
 
 def findIdForPath(path):
     """Finds the breakpad id for the object file at the given path."""
     # We should always be packaged with a "fileid" executable.
@@ -127,17 +127,17 @@ def guessSymbolFile(full_path, symbolsDi
     return os.path.join(d1, uuid, fn + ".sym")
 
 
 parsedSymbolFiles = {}
 
 
 def getSymbolFile(file, symbolsDir):
     p = None
-    if not file in parsedSymbolFiles:
+    if file not in parsedSymbolFiles:
         symfile = guessSymbolFile(file, symbolsDir)
         if symfile:
             p = SymbolFile(symfile)
         else:
             p = None
         parsedSymbolFiles[file] = p
     else:
         p = parsedSymbolFiles[file]
--- a/tools/update-packaging/make_incremental_updates.py
+++ b/tools/update-packaging/make_incremental_updates.py
@@ -1,23 +1,19 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import os
 import shutil
 import hashlib
-from os.path import join, getsize
-from stat import *
 import re
 import sys
 import getopt
 import time
-import datetime
-import string
 import tempfile
 import io
 
 
 class PatchInfo:
     """ Represents the meta-data associated with a patch
         work_dir = working dir where files are stored for this patch
         archive_files = list of files to include in this patch
@@ -530,17 +526,16 @@ def create_partial_patches(patches):
             to_buildid = get_buildid(work_dir_to)
             to_shasum = hashlib.sha1(open(to_filename, 'rb').read()).hexdigest()
             to_size = str(os.path.getsize(to_filename))
 
             mar_extract_time = time.time()
 
             partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, [
                                                     'update.manifest', 'updatev2.manifest', 'updatev3.manifest'], []), forced_updates, ['channel-prefs.js', 'update-settings.ini'])
-            partial_buildid = to_buildid
             partial_shasum = hashlib.sha1(open(partial_filename, "rb").read()).hexdigest()
             partial_size = str(os.path.getsize(partial_filename))
 
             metadata.append({
                 'to_filename': os.path.basename(to_filename),
                 'from_filename': os.path.basename(from_filename),
                 'partial_filename': os.path.basename(partial_filename),
                 'to_buildid': to_buildid,
--- a/tools/update-packaging/test_make_incremental_updates.py
+++ b/tools/update-packaging/test_make_incremental_updates.py
@@ -123,26 +123,26 @@ class TestMakeIncrementalUpdates(unittes
         mkup.copy_file('src_file_abs_path', 'dst_file_abs_path')
 
     def test_bzip_file(self):
         mkup.bzip_file('filename')
 
     def test_bunzip_file(self):
         mkup.bunzip_file('filename')
 
-    def test_extract_mar(self): 
+    def test_extract_mar(self):
         mkup.extract_mar('filename', 'work_dir')
 
     def test_create_partial_patch_for_file(self):
         mkup.create_partial_patch_for_file('from_marfile_entry', 'to_marfile_entry', 'shas', self.patch_info)
 
-    def test_create_add_patch_for_file(self):           
+    def test_create_add_patch_for_file(self):
         mkup.create_add_patch_for_file('to_marfile_entry', self.patch_info)
 
-    def test_process_explicit_remove_files(self): 
+    def test_process_explicit_remove_files(self):
         mkup.process_explicit_remove_files('dir_path', self.patch_info)
 
     def test_create_partial_patch(self):
         mkup.create_partial_patch('from_dir_path', 'to_dir_path', 'patch_filename', 'shas', self.patch_info, 'forced_updates')
 
     def test_create_partial_patches(patches):
         mkup.create_partial_patches('patches')