Bug 1344834 - Enable flake8 rule E501 and and fixes errors.; r?Dexter draft
authorErich Stoekl <ems5311@gmail.com>
Fri, 17 Mar 2017 15:08:01 -0700
changeset 500914 ae716e8055302fa2e6097130e7b011eafbc4aabb
parent 500857 23a4b7430dd7e83a2809bf3dc41471f154301eda
child 549748 b2bfca27412c65a8a19ffaa49a0e1773ca331680
push id49841
push userbmo:ems5311@gmail.com
push dateFri, 17 Mar 2017 22:24:47 +0000
reviewersDexter
bugs1344834
milestone55.0a1
Bug 1344834 - Enable flake8 rule E501 and and fixes errors.; r?Dexter MozReview-Commit-ID: 5oJVET7FVsM
toolkit/components/telemetry/.flake8
toolkit/components/telemetry/gen-histogram-data.py
toolkit/components/telemetry/gen-histogram-enum.py
toolkit/components/telemetry/histogram_tools.py
toolkit/components/telemetry/parse_events.py
toolkit/components/telemetry/parse_scalars.py
toolkit/components/telemetry/shared_telemetry_utils.py
--- a/toolkit/components/telemetry/.flake8
+++ b/toolkit/components/telemetry/.flake8
@@ -1,5 +1,5 @@
 [flake8]
 # See http://pep8.readthedocs.io/en/latest/intro.html#configuration
-ignore = E121, E123, E126, E129, E133, E226, E241, E242, E704, W503, E402, E128, E501, E713, E202, W602, E127, W601
+ignore = E121, E123, E126, E129, E133, E226, E241, E242, E704, W503, E402, E128, E713, E202, W602, E127, W601
 max-line-length = 99
 filename = *.py, +.lint
--- a/toolkit/components/telemetry/gen-histogram-data.py
+++ b/toolkit/components/telemetry/gen-histogram-data.py
@@ -95,17 +95,18 @@ def shared_static_asserts(output, histog
     name = histogram.name()
     low = histogram.low()
     high = histogram.high()
     n_buckets = histogram.n_buckets()
     static_assert(output, "%s < %s" % (low, high), "low >= high for %s" % name)
     static_assert(output, "%s > 2" % n_buckets, "Not enough values for %s" % name)
     static_assert(output, "%s >= 1" % low, "Incorrect low value for %s" % name)
     static_assert(output, "%s > %s" % (high, n_buckets),
-                  "high must be > number of buckets for %s; you may want an enumerated histogram" % name)
+                  "high must be > number of buckets for %s;"
+                  "you may want an enumerated histogram" % name)
 
 
 def static_asserts_for_linear(output, histogram):
     shared_static_asserts(output, histogram)
 
 
 def static_asserts_for_exponential(output, histogram):
     shared_static_asserts(output, histogram)
--- a/toolkit/components/telemetry/gen-histogram-enum.py
+++ b/toolkit/components/telemetry/gen-histogram-enum.py
@@ -73,17 +73,18 @@ def main(output, *filenames):
                 print("#endif", file=output)
 
         if use_counter_group:
             print("  HistogramDUMMY2,", file=output)
             print("  HistogramLastUseCounter = HistogramDUMMY2 - 1,", file=output)
 
     print("  HistogramCount,", file=output)
     if seen_use_counters:
-        print("  HistogramUseCounterCount = HistogramLastUseCounter - HistogramFirstUseCounter + 1", file=output)
+        print("  HistogramUseCounterCount = HistogramLastUseCounter -"
+              "HistogramFirstUseCounter + 1", file=output)
     else:
         print("  HistogramFirstUseCounter = 0,", file=output)
         print("  HistogramLastUseCounter = 0,", file=output)
         print("  HistogramUseCounterCount = 0", file=output)
     print("};", file=output)
 
     # Write categorical label enums.
     categorical = filter(lambda h: h.kind() == "categorical", all_histograms)
@@ -94,15 +95,16 @@ def main(output, *filenames):
         print("};", file=output)
 
     print("\ntemplate<class T> struct IsCategoricalLabelEnum : FalseType {};", file=output)
     for name, _, _ in enums:
         print("template<> struct IsCategoricalLabelEnum<%s> : TrueType {};" % name, file=output)
 
     print("\ntemplate<class T> struct CategoricalLabelId {};", file=output)
     for name, _, id in enums:
-        print("template<> struct CategoricalLabelId<%s> : IntegralConstant<uint32_t, %s> {};" % (name, id), file=output)
+        print("template<> struct CategoricalLabelId<%s> :"
+              "IntegralConstant<uint32_t, %s> {};" % (name, id), file=output)
 
     # Footer.
     print(footer, file=output)
 
 if __name__ == '__main__':
     main(sys.stdout, *sys.argv[1:])
--- a/toolkit/components/telemetry/histogram_tools.py
+++ b/toolkit/components/telemetry/histogram_tools.py
@@ -75,27 +75,30 @@ def exponential_buckets(dmin, dmax, n_bu
     return ret_array
 
 always_allowed_keys = ['kind', 'description', 'cpp_guard', 'expires_in_version',
                        'alert_emails', 'keyed', 'releaseChannelCollection',
                        'bug_numbers']
 
 whitelists = None
 try:
-    whitelist_path = os.path.join(os.path.abspath(os.path.realpath(os.path.dirname(__file__))), 'histogram-whitelists.json')
+    whitelist_path = os.path.join(os.path.abspath(
+        os.path.realpath(os.path.dirname(__file__))),
+            'histogram-whitelists.json')
     with open(whitelist_path, 'r') as f:
         try:
             whitelists = json.load(f)
             for name, whitelist in whitelists.iteritems():
                 whitelists[name] = set(whitelist)
         except ValueError, e:
             raise BaseException, 'error parsing whitelist (%s)' % whitelist_path
 except IOError:
     whitelists = None
-    print 'Unable to parse whitelist (%s). Assuming all histograms are acceptable.' % whitelist_path
+    print 'Unable to parse whitelist (%s).'
+    'Assuming all histograms are acceptable.' % whitelist_path
 
 
 class Histogram:
     """A class for representing a histogram definition."""
 
     def __init__(self, name, definition, strict_type_checks=False):
         """Initialize a histogram named name with the given definition.
 definition is a dict-like object that must contain at least the keys:
@@ -315,17 +318,18 @@ associated with the histogram.  Returns 
         # In the pipeline we don't have whitelists available.
         if whitelists is None:
             return
 
         for field in ['alert_emails', 'bug_numbers']:
             if field not in definition and name not in whitelists[field]:
                 raise KeyError, 'New histogram "%s" must have a %s field.' % (name, field)
             if field in definition and name in whitelists[field]:
-                msg = 'Should remove histogram "%s" from the whitelist for "%s" in histogram-whitelists.json'
+                msg = 'Should remove histogram "%s" from the whitelist'
+                'for "%s" in histogram-whitelists.json'
                 raise KeyError, msg % (name, field)
 
     def check_field_types(self, name, definition):
         # Define expected types for the histogram properties.
         type_checked_fields = {
             "n_buckets": int,
             "n_values": int,
             "low": int,
@@ -387,19 +391,23 @@ associated with the histogram.  Returns 
                 raise KeyError, '%s not permitted for %s' % (key, name)
 
     def set_bucket_parameters(self, low, high, n_buckets):
         self._low = low
         self._high = high
         self._n_buckets = n_buckets
         if whitelists is not None and self._n_buckets > 100 and type(self._n_buckets) is int:
             if self._name not in whitelists['n_buckets']:
-                raise KeyError, ('New histogram "%s" is not permitted to have more than 100 buckets. '
-                                'Histograms with large numbers of buckets use disproportionately high amounts of resources. '
-                                'Contact the Telemetry team (e.g. in #telemetry) if you think an exception ought to be made.' % self._name)
+                raise KeyError, ('New histogram "%s" is not permitted'
+                        'to have more than 100 buckets. '
+                        'Histograms with large numbers of buckets'
+                        'use disproportionately high amounts of resources. '
+                        'Contact the Telemetry team (e.g.'
+                        'in #telemetry) if you think an'
+                        'exception ought to be made.' % self._name)
 
     @staticmethod
     def boolean_flag_bucket_parameters(definition):
         return (1, 2, 3)
 
     @staticmethod
     def linear_bucket_parameters(definition):
         return (definition.get('low', 1),
@@ -409,17 +417,18 @@ associated with the histogram.  Returns 
     @staticmethod
     def enumerated_bucket_parameters(definition):
         n_values = definition['n_values']
         return (1, n_values, n_values + 1)
 
     @staticmethod
     def categorical_bucket_parameters(definition):
         # Categorical histograms default to 50 buckets to make working with them easier.
-        # Otherwise when adding labels later we run into problems with the pipeline not supporting bucket changes.
+        # Otherwise when adding labels later we run into problems
+        # with the pipeline not supporting bucket changes.
         # This can be overridden using the n_values field.
         n_values = max(len(definition['labels']),
                        definition.get('n_values', 0),
                        MIN_CATEGORICAL_BUCKET_COUNT)
         return (1, n_values, n_values + 1)
 
     @staticmethod
     def exponential_bucket_parameters(definition):
@@ -509,20 +518,23 @@ the histograms defined in filenames.
     # block.
     use_counter_indices = filter(lambda x: x[1].startswith("USE_COUNTER2_"),
                                  enumerate(all_histograms.iterkeys()))
     if use_counter_indices:
         lower_bound = use_counter_indices[0][0]
         upper_bound = use_counter_indices[-1][0]
         n_counters = upper_bound - lower_bound + 1
         if n_counters != len(use_counter_indices):
-            raise DefinitionException, "use counter histograms must be defined in a contiguous block"
+            raise DefinitionException, "use counter histograms"
+            "must be defined in a contiguous block"
 
-    # Check that histograms that were removed from Histograms.json etc. are also removed from the whitelists.
+    # Check that histograms that were removed from Histograms.json etc.
+    # are also removed from the whitelists.
     if whitelists is not None:
         all_whitelist_entries = itertools.chain.from_iterable(whitelists.itervalues())
         orphaned = set(all_whitelist_entries) - set(all_histograms.keys())
         if len(orphaned) > 0:
-            msg = 'The following entries are orphaned and should be removed from histogram-whitelists.json: %s'
+            msg = 'The following entries are orphaned and should'
+            'be removed from histogram-whitelists.json: %s'
             raise BaseException, msg % (', '.join(sorted(orphaned)))
 
     for (name, definition) in all_histograms.iteritems():
         yield Histogram(name, definition, strict_type_checks=True)
--- a/toolkit/components/telemetry/parse_events.py
+++ b/toolkit/components/telemetry/parse_events.py
@@ -65,37 +65,42 @@ class TypeChecker:
             raise ValueError, "%s: failed type check for %s - expected %s, got %s" %\
                               (identifier, key,
                                nice_type_name(self._kind),
                                nice_type_name(type(value)))
 
         # Check types of values in lists.
         if self._kind is list:
             if len(value) < 1:
-                raise ValueError, "%s: failed check for %s - list should not be empty" % (identifier, key)
+                raise ValueError, "%s: failed check for %s -"
+                "list should not be empty" % (identifier, key)
             for x in value:
                 if not isinstance(x, self._args[0]):
-                    raise ValueError, "%s: failed type check for %s - expected list value type %s, got %s" %\
+                    raise ValueError, "%s: failed type check for %s"
+                    "- expected list value type %s, got %s" %\
                                       (identifier, key,
                                        nice_type_name(self._args[0]),
                                        nice_type_name(type(x)))
 
         # Check types of keys and values in dictionaries.
         elif self._kind is dict:
             if len(value.keys()) < 1:
-                    raise ValueError, "%s: failed check for %s - dict should not be empty" % (identifier, key)
+                    raise ValueError, "%s: failed check for %s"
+                    "- dict should not be empty" % (identifier, key)
             for x in value.iterkeys():
                 if not isinstance(x, self._args[0]):
-                    raise ValueError, "%s: failed dict type check for %s - expected key type %s, got %s" %\
+                    raise ValueError, "%s: failed dict type check for %s -"
+                    "expected key type %s, got %s" %\
                                       (identifier, key,
                                        nice_type_name(self._args[0]),
                                        nice_type_name(type(x)))
             for k, v in value.iteritems():
                 if not isinstance(x, self._args[1]):
-                    raise ValueError, "%s: failed dict type check for %s - expected value type %s for key %s, got %s" %\
+                    raise ValueError, "%s: failed dict type check for %s -"
+                    "expected value type %s for key %s, got %s" %\
                                       (identifier, key,
                                        nice_type_name(self._args[1]),
                                        k,
                                        nice_type_name(type(x)))
 
 
 def type_check_event_fields(identifier, name, definition):
     """Perform a type/schema check on the event definition."""
@@ -172,42 +177,46 @@ class EventData:
         if not rcc in allowed_rcc:
             raise ValueError, "%s: value for %s should be one of: %s" %\
                               (self.identifier, rcc_key, ", ".join(allowed_rcc))
 
         # Check record_in_processes.
         record_in_processes = definition.get('record_in_processes')
         for proc in record_in_processes:
             if not utils.is_valid_process_name(proc):
-                raise ValueError(self.identifier + ': unknown value in record_in_processes: ' + proc)
+                raise ValueError(self.identifier + ': unknown value in'
+                    'record_in_processes: ' + proc)
 
         # Check extra_keys.
         extra_keys = definition.get('extra_keys', {})
         if len(extra_keys.keys()) > MAX_EXTRA_KEYS_COUNT:
             raise ValueError, "%s: number of extra_keys exceeds limit %d" %\
                               (self.identifier, MAX_EXTRA_KEYS_COUNT)
         for key in extra_keys.iterkeys():
             string_check(self.identifier, field='extra_keys', value=key,
                          min_length=1, max_length=MAX_EXTRA_KEY_NAME_LENGTH,
                          regex=IDENTIFIER_PATTERN)
 
         # Check expiry.
         if not 'expiry_version' in definition and not 'expiry_date' in definition:
-            raise KeyError, "%s: event is missing an expiration - either expiry_version or expiry_date is required" %\
+            raise KeyError, "%s: event is missing an expiration -"
+            "either expiry_version or expiry_date is required" %\
                             (self.identifier)
         expiry_date = definition.get('expiry_date')
         if expiry_date and isinstance(expiry_date, basestring) and expiry_date != 'never':
             if not re.match(DATE_PATTERN, expiry_date):
-                raise ValueError, "%s: event has invalid expiry_date, it should be either 'never' or match this format: %s" %\
+                raise ValueError, "%s: event has invalid expiry_date,"
+                "it should be either 'never' or match this format: %s" %\
                                   (self.identifier, DATE_PATTERN)
             # Parse into date.
             definition['expiry_date'] = datetime.datetime.strptime(expiry_date, '%Y-%m-%d')
 
         # Finish setup.
-        definition['expiry_version'] = utils.add_expiration_postfix(definition.get('expiry_version', 'never'))
+        definition['expiry_version'] = utils.add_expiration_postfix(
+                definition.get('expiry_version', 'never'))
 
     @property
     def category(self):
         return self._category
 
     @property
     def category_cpp(self):
         # Transform e.g. category.example into CategoryExample.
--- a/toolkit/components/telemetry/parse_scalars.py
+++ b/toolkit/components/telemetry/parse_scalars.py
@@ -50,17 +50,18 @@ class ScalarType:
             if len(n) > MAX_NAME_LENGTH:
                 raise ValueError("Name '{}' exceeds maximum name length of {} characters."
                                 .format(n, MAX_NAME_LENGTH))
 
         def check_name(name, error_msg_prefix, allowed_char_regexp):
             # Check if we only have the allowed characters.
             chars_regxp = r'^[a-zA-Z0-9' + allowed_char_regexp + r']+$'
             if not re.search(chars_regxp, name):
-                raise ValueError(error_msg_prefix + " name must be alpha-numeric. Got: '{}'".format(name))
+                raise ValueError(error_msg_prefix + " name must be"
+                    "alpha-numeric. Got: '{}'".format(name))
 
             # Don't allow leading/trailing digits, '.' or '_'.
             if re.search(r'(^[\d\._])|([\d\._])$', name):
                 raise ValueError(error_msg_prefix +
                     " name must not have a leading/trailing digit, a dot or underscore. Got: '{}'"
                     .format(name))
 
         check_name(group_name, 'Group', r'\.')
--- a/toolkit/components/telemetry/shared_telemetry_utils.py
+++ b/toolkit/components/telemetry/shared_telemetry_utils.py
@@ -85,17 +85,18 @@ class StringTable:
                     return "'\\''"
                 else:
                     return "'%s'" % s
             return ", ".join(map(toCChar, string))
 
         f.write("const char %s[] = {\n" % name)
         for (string, offset) in entries:
             if "*/" in string:
-                raise ValueError, "String in string table contains unexpected sequence '*/': %s" % string
+                raise ValueError, "String in string table contains"
+                "unexpected sequence '*/': %s" % string
 
             e = explodeToCharArray(string)
             if e:
                 f.write("  /* %5d - \"%s\" */ %s, '\\0',\n"
                         % (offset, string, explodeToCharArray(string)))
             else:
                 f.write("  /* %5d - \"%s\" */ '\\0',\n" % (offset, string))
         f.write("};\n\n")