--- a/compare_locales/checks.py
+++ b/compare_locales/checks.py
@@ -1,16 +1,20 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import re
from collections import Counter
from difflib import SequenceMatcher
from xml import sax
+import six
+from six.moves import range
+from six.moves import zip
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from fluent.syntax import ast as ftl
from compare_locales.parser import DTDParser, PropertiesEntity, FluentMessage
@@ -128,47 +132,47 @@ class PropertiesChecker(Checker):
return
if lpats - pats:
yield ('error', 0, 'unreplaced variables in l10n',
'plural')
def checkPrintf(self, refSpecs, l10nValue):
try:
l10nSpecs = self.getPrintfSpecs(l10nValue)
- except PrintfException, e:
+ except PrintfException as e:
yield ('error', e.pos, e.msg, 'printf')
return
if refSpecs != l10nSpecs:
sm = SequenceMatcher()
sm.set_seqs(refSpecs, l10nSpecs)
msgs = []
warn = None
for action, i1, i2, j1, j2 in sm.get_opcodes():
if action == 'equal':
continue
if action == 'delete':
# missing argument in l10n
if i2 == len(refSpecs):
# trailing specs missing, that's just a warning
warn = ', '.join('trailing argument %d `%s` missing' %
(i+1, refSpecs[i])
- for i in xrange(i1, i2))
+ for i in range(i1, i2))
else:
- for i in xrange(i1, i2):
+ for i in range(i1, i2):
msgs.append('argument %d `%s` missing' %
(i+1, refSpecs[i]))
continue
if action == 'insert':
# obsolete argument in l10n
- for i in xrange(j1, j2):
+ for i in range(j1, j2):
msgs.append('argument %d `%s` obsolete' %
(i+1, l10nSpecs[i]))
continue
if action == 'replace':
- for i, j in zip(xrange(i1, i2), xrange(j1, j2)):
+ for i, j in zip(range(i1, i2), range(j1, j2)):
msgs.append('argument %d `%s` should be `%s`' %
(j+1, l10nSpecs[j], refSpecs[i]))
if msgs:
yield ('error', 0, ', '.join(msgs), 'printf')
if warn is not None:
yield ('warning', 0, warn, 'printf')
def getPrintfSpecs(self, val):
@@ -285,17 +289,17 @@ class DTDChecker(Checker):
parser.setContentHandler(self.defaulthandler)
try:
parser.parse(StringIO(self.tmpl %
(entities, refValue.encode('utf-8'))))
# also catch stray %
parser.parse(StringIO(self.tmpl %
(refEnt.all.encode('utf-8') + entities,
'&%s;' % refEnt.key.encode('utf-8'))))
- except sax.SAXParseException, e:
+ except sax.SAXParseException as e:
yield ('warning',
(0, 0),
"can't parse en-US value", 'xmlparse')
# find entities the l10nValue references,
# reusing markup from DTDParser.
l10nlist = self.entities_for_value(l10nValue)
missing = sorted(l10nlist - reflist)
@@ -307,17 +311,17 @@ class DTDChecker(Checker):
parser.parse(StringIO(self.tmpl % (_entities,
l10nValue.encode('utf-8'))))
# also catch stray %
# if this fails, we need to substract the entity definition
parser.setContentHandler(self.defaulthandler)
parser.parse(StringIO(self.tmpl % (
l10nEnt.all.encode('utf-8') + _entities,
'&%s;' % l10nEnt.key.encode('utf-8'))))
- except sax.SAXParseException, e:
+ except sax.SAXParseException as e:
# xml parse error, yield error
# sometimes, the error is reported on our fake closing
# element, make that the end of the last line
lnr = e.getLineNumber() - 1
lines = l10nValue.splitlines()
if lnr > len(lines):
lnr = len(lines)
col = len(lines[lnr-1])
@@ -374,17 +378,17 @@ class DTDChecker(Checker):
if s not in refMap:
msgs.insert(0, '%s only in l10n' % s)
continue
else:
ru = refMap.pop(s)
if u != ru:
msgs.append("units for %s don't match "
"(%s != %s)" % (s, u, ru))
- for s in refMap.iterkeys():
+ for s in six.iterkeys(refMap):
msgs.insert(0, '%s only in reference' % s)
if msgs:
yield ('warning', 0, ', '.join(msgs), 'css')
if self.extra_tests is not None and 'android-dtd' in self.extra_tests:
for t in self.processAndroidContent(self.texthandler.textcontent):
yield t
@@ -402,17 +406,17 @@ class DTDChecker(Checker):
need to search for the actual error position in the original string,
as the backslashreplace code changes string positions quite badly.
See also the last check in TestAndroid.test_android_dtd, with a
lengthy chinese string.
"""
val = str.encode('ascii', 'backslashreplace')
try:
val.decode('unicode-escape')
- except UnicodeDecodeError, e:
+ except UnicodeDecodeError as e:
args = list(e.args)
badstring = args[1][args[2]:args[3]]
i = len(args[1][:args[2]].decode('unicode-escape'))
args[2] = i
args[3] = i + len(badstring)
raise UnicodeDecodeError(*args)
def processAndroidContent(self, val):
@@ -421,17 +425,17 @@ class DTDChecker(Checker):
http://developer.android.com/guide/topics/resources/string-resource.html#FormattingAndStyling # noqa
Check for unicode escapes and unescaped quotes and apostrophes,
if string's not quoted.
"""
# first, try to decode unicode escapes
try:
self.unicode_escape(val)
- except UnicodeDecodeError, e:
+ except UnicodeDecodeError as e:
yield ('error', e.args[2], e.args[4], 'android')
# check for unescaped single or double quotes.
# first, see if the complete string is single or double quoted,
# that changes the rules
m = self.quoted.match(val)
if m:
q = m.group('q')
offset = 0
--- a/compare_locales/commands.py
+++ b/compare_locales/commands.py
@@ -1,14 +1,16 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'Commands exposed to commandlines'
+from __future__ import absolute_import
+from __future__ import print_function
import logging
from argparse import ArgumentParser
import os
from compare_locales import version
from compare_locales.paths import EnumerateApp, TOMLParser, ConfigNotFound
from compare_locales.compare import compareProjects, Observer
@@ -148,25 +150,25 @@ data in a json useful for Exhibit
unified_observer = None
if unified:
unified_observer = Observer(quiet=quiet)
observers = compareProjects(
configs,
quiet=quiet,
stat_observer=unified_observer,
merge_stage=merge, clobber_merge=clobber)
- except (OSError, IOError), exc:
- print "FAIL: " + str(exc)
+ except (OSError, IOError) as exc:
+ print("FAIL: " + str(exc))
self.parser.exit(2)
if unified:
observers = [unified_observer]
rv = 0
for observer in observers:
- print observer.serialize(type=data)
+ print(observer.serialize(type=data))
# summary is a dict of lang-summary dicts
# find out if any of our results has errors, return 1 if so
if rv > 0:
continue # we already have errors
for loc, summary in observer.summary.items():
if summary.get('errors', 0) > 0:
rv = 1
# no need to check further summaries, but
--- a/compare_locales/compare.py
+++ b/compare_locales/compare.py
@@ -1,19 +1,24 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'Mozilla l10n compare locales tool'
+from __future__ import absolute_import
+from __future__ import print_function
import codecs
import os
import shutil
import re
from collections import defaultdict
+import six
+from six.moves import map
+from six.moves import zip
from json import dumps
from compare_locales import parser
from compare_locales import paths, mozpath
from compare_locales.checks import getChecker
@@ -34,17 +39,17 @@ class Tree(object):
parts = leaf.split('/')
return self.__get(parts)
def __get(self, parts):
common = None
old = None
new = tuple(parts)
t = self
- for k, v in self.branches.iteritems():
+ for k, v in six.iteritems(self.branches):
for i, part in enumerate(zip(k, parts)):
if part[0] != part[1]:
i -= 1
break
if i < 0:
continue
i += 1
common = tuple(k[:i])
@@ -71,18 +76,17 @@ class Tree(object):
indent = ' '
def getContent(self, depth=0):
'''
Returns iterator of (depth, flag, key_or_value) tuples.
If flag is 'value', key_or_value is a value object, otherwise
(flag is 'key') it's a key string.
'''
- keys = self.branches.keys()
- keys.sort()
+ keys = sorted(self.branches.keys())
if self.value is not None:
yield (depth, 'value', self.value)
for key in keys:
yield (depth, 'key', key)
for child in self.branches[key].getContent(depth + 1):
yield child
def toJSON(self):
@@ -96,17 +100,17 @@ class Tree(object):
for key in self.branches.keys())
def getStrRows(self):
def tostr(t):
if t[1] == 'key':
return self.indent * t[0] + '/'.join(t[2])
return self.indent * (t[0] + 1) + str(t[2])
- return map(tostr, self.getContent())
+ return [tostr(c) for c in self.getContent()]
def __str__(self):
return '\n'.join(self.getStrRows())
class AddRemove(object):
def __init__(self):
self.left = self.right = None
@@ -166,29 +170,29 @@ class Observer(object):
state = dict(summary=self._dictify(self.summary), details=self.details)
if self.file_stats is not None:
state['file_stats'] = self._dictify(self.file_stats)
return state
def __setstate__(self, state):
self.summary = defaultdict(lambda: defaultdict(int))
if 'summary' in state:
- for loc, stats in state['summary'].iteritems():
+ for loc, stats in six.iteritems(state['summary']):
self.summary[loc].update(stats)
self.file_stats = None
if 'file_stats' in state:
self.file_stats = defaultdict(lambda: defaultdict(dict))
- for k, d in state['file_stats'].iteritems():
+ for k, d in six.iteritems(state['file_stats']):
self.file_stats[k].update(d)
self.details = state['details']
self.filter = None
def _dictify(self, d):
plaindict = {}
- for k, v in d.iteritems():
+ for k, v in six.iteritems(d):
plaindict[k] = dict(v)
return plaindict
def toJSON(self):
# Don't export file stats, even if we collected them.
# Those are not part of the data we use toJSON for.
return {
'summary': self._dictify(self.summary),
@@ -199,17 +203,17 @@ class Observer(object):
# in multi-project scenarios, this file might not be ours,
# check that.
# Pass in a dummy entity key '' to avoid getting in to
# generic file filters. If we have stats for those,
# we want to aggregate the counts
if (self.filter is not None and
self.filter(file, entity='') == 'ignore'):
return
- for category, value in stats.iteritems():
+ for category, value in six.iteritems(stats):
self.summary[file.locale][category] += value
if self.file_stats is None:
return
if 'missingInFiles' in stats:
# keep track of how many strings are in a missing file
# we got the {'missingFile': 'error'} from the notify pass
self.details[file].append({'count': stats['missingInFiles']})
# missingInFiles should just be "missing" in file stats
@@ -236,17 +240,17 @@ class Observer(object):
return rv
if category in ('error', 'warning') and self.quiet < 3:
self.details[file].append({category: data})
self.summary[file.locale][category + 's'] += 1
return rv
def toExhibit(self):
items = []
- for locale in sorted(self.summary.iterkeys()):
+ for locale in sorted(six.iterkeys(self.summary)):
summary = self.summary[locale]
if locale is not None:
item = {'id': 'xxx/' + locale,
'label': locale,
'locale': locale}
else:
item = {'id': 'xxx',
'label': 'xxx',
@@ -314,30 +318,31 @@ class Observer(object):
o += [indent + '-' + item['obsoleteEntity']]
elif 'missingFile' in item:
o.append(indent + '// add and localize this file')
elif 'obsoleteFile' in item:
o.append(indent + '// remove this file')
return '\n'.join(o)
out = []
- for locale, summary in sorted(self.summary.iteritems()):
+ for locale, summary in sorted(six.iteritems(self.summary)):
if locale is not None:
out.append(locale + ':')
- out += [k + ': ' + str(v) for k, v in sorted(summary.iteritems())]
+ out += [
+ k + ': ' + str(v) for k, v in sorted(six.iteritems(summary))]
total = sum([summary[k]
for k in ['changed', 'unchanged', 'report', 'missing',
'missingInFiles']
if k in summary])
rate = 0
if total:
rate = (('changed' in summary and summary['changed'] * 100) or
0) / total
out.append('%d%% of entries changed' % rate)
- return '\n'.join(map(tostr, self.details.getContent()) + out)
+ return '\n'.join([tostr(c) for c in self.details.getContent()] + out)
def __str__(self):
return 'observer'
class ContentComparer:
keyRE = re.compile('[kK]ey')
nl = re.compile('\n', re.M)
@@ -362,17 +367,17 @@ class ContentComparer:
missing, skips, ctx, capabilities, encoding):
if capabilities == parser.CAN_NONE:
return
if capabilities & parser.CAN_COPY and (skips or missing):
self.create_merge_dir(merge_file)
shutil.copyfile(ref_file.fullpath, merge_file)
- print "copied reference to " + merge_file
+ print("copied reference to " + merge_file)
return
if not (capabilities & parser.CAN_SKIP):
return
# Start with None in case the merge file doesn't need to be created.
f = None
@@ -404,17 +409,17 @@ class ContentComparer:
[ref_entities[ref_map[skip.key]].all for skip in skips
if not isinstance(skip, parser.Junk)])
def ensureNewline(s):
if not s.endswith('\n'):
return s + '\n'
return s
- print "adding to " + merge_file
+ print("adding to " + merge_file)
f.write(''.join(map(ensureNewline, trailing)))
if f is not None:
f.close()
def notify(self, category, file, data):
"""Check observer for the found data, and if it's
not to ignore, notify stat_observers.
@@ -448,25 +453,25 @@ class ContentComparer:
def compare(self, ref_file, l10n, merge_file, extra_tests=None):
try:
p = parser.getParser(ref_file.file)
except UserWarning:
# no comparison, XXX report?
return
try:
p.readContents(ref_file.getContents())
- except Exception, e:
+ except Exception as e:
self.notify('error', ref_file, str(e))
return
ref_entities, ref_map = p.parse()
try:
p.readContents(l10n.getContents())
l10n_entities, l10n_map = p.parse()
l10n_ctx = p.ctx
- except Exception, e:
+ except Exception as e:
self.notify('error', l10n, str(e))
return
ar = AddRemove()
ar.set_left(e.key for e in ref_entities)
ar.set_right(e.key for e in l10n_entities)
report = missing = obsolete = changed = unchanged = keys = 0
missing_w = changed_w = unchanged_w = 0 # word stats
@@ -568,17 +573,17 @@ class ContentComparer:
f = orig
try:
p = parser.getParser(f.file)
except UserWarning:
return
try:
p.readContents(f.getContents())
entities, map = p.parse()
- except Exception, ex:
+ except Exception as ex:
self.notify('error', f, str(ex))
return
# strip parse errors
entities = [e for e in entities if not isinstance(e, parser.Junk)]
self.updateStats(missing, {'missingInFiles': len(entities)})
missing_w = 0
for e in entities:
missing_w += e.count_words()
@@ -623,17 +628,17 @@ def compareProjects(
if merge_stage is not None:
if clobber_merge:
mergematchers = set(_m.get('merge') for _m in files.matchers)
mergematchers.discard(None)
for matcher in mergematchers:
clobberdir = matcher.prefix
if os.path.exists(clobberdir):
shutil.rmtree(clobberdir)
- print "clobbered " + clobberdir
+ print("clobbered " + clobberdir)
for l10npath, refpath, mergepath, extra_tests in files:
# module and file path are needed for legacy filter.py support
module = None
fpath = mozpath.relpath(l10npath, root)
for _m in files.matchers:
if _m['l10n'].match(l10npath):
if _m['module']:
# legacy ini support, set module, and resolve
--- a/compare_locales/merge.py
+++ b/compare_locales/merge.py
@@ -1,16 +1,17 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'Merge resources across channels.'
from collections import OrderedDict, defaultdict
from codecs import encode
+import six
from compare_locales import parser as cl
from compare_locales.compare import AddRemove
class MergeNotSupportedError(ValueError):
pass
@@ -50,17 +51,17 @@ def merge_channels(name, *resources):
# (because a comment's key is its content). In merge_two we'll try to
# de-duplicate comments by looking at the entity they belong to. Set
# up the back-reference from the comment to its entity here.
if isinstance(entity, cl.Entity) and entity.pre_comment:
comments[entity.pre_comment] = entity.key
return (entity.key, entity)
- entities = reduce(
+ entities = six.moves.reduce(
lambda x, y: merge_two(comments, x, y),
map(parse_resource, resources))
return encode(serialize_legacy_resource(entities), parser.encoding)
def merge_two(comments, newer, older):
diff = AddRemove()
@@ -102,14 +103,14 @@ def merge_two(comments, newer, older):
# Prefer the longer whitespace.
if len(entity.all) > len(prev_entity.all):
acc[-1] = (entity, entity)
return acc
acc.append(cur)
return acc
- pruned = reduce(prune, contents, [])
+ pruned = six.moves.reduce(prune, contents, [])
return OrderedDict(pruned)
def serialize_legacy_resource(entities):
return "".join((entity.all for entity in entities.values()))
--- a/compare_locales/parser.py
+++ b/compare_locales/parser.py
@@ -1,27 +1,29 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import re
import bisect
import codecs
from collections import Counter
import logging
try:
from html import unescape as html_unescape
except ImportError:
from HTMLParser import HTMLParser
html_parser = HTMLParser()
html_unescape = html_parser.unescape
from fluent.syntax import FluentParser as FTLParser
from fluent.syntax import ast as ftl
+from six import unichr
__constructors = []
# The allowed capabilities for the Parsers. They define the exact strategy
# used by ContentComparer.merge.
# Don't perform any merging
@@ -240,17 +242,17 @@ class Parser(object):
self.encoding = 'utf-8'
self.ctx = None
self.last_comment = None
def readFile(self, file):
with open(file, 'rU') as f:
try:
self.readContents(f.read())
- except UnicodeDecodeError, e:
+ except UnicodeDecodeError as e:
(logging.getLogger('locales')
.error("Can't read file: " + file + '; ' + str(e)))
def readContents(self, contents):
'''Read contents and create parsing context.
contents are in native encoding, but with normalized line endings.
'''
--- a/compare_locales/paths.py
+++ b/compare_locales/paths.py
@@ -1,21 +1,23 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import os
import re
-from ConfigParser import ConfigParser, NoSectionError, NoOptionError
+from six.moves.configparser import ConfigParser, NoSectionError, NoOptionError
from collections import defaultdict
import errno
import itertools
import logging
from compare_locales import util, mozpath
import pytoml as toml
+import six
class Matcher(object):
'''Path pattern matcher
Supports path matching similar to mozpath.match(), but does
not match trailing file paths without trailing wildcards.
Also gets a prefix, which is the path before the first wildcard,
which is good for filesystem iterations, and allows to replace
@@ -31,17 +33,17 @@ class Matcher(object):
p = re.sub(r'(^|\\\/)\\\*\\\*\\\/', r'\1(.+/)?', p)
p = re.sub(r'(^|\\\/)\\\*\\\*$', r'(\1.+)?', p)
p = p.replace(r'\*', '([^/]*)') + '$'
r = re.escape(pattern)
r = re.sub(r'(^|\\\/)\\\*\\\*\\\/', r'\\\\0', r)
r = re.sub(r'(^|\\\/)\\\*\\\*$', r'\\\\0', r)
r = r.replace(r'\*', r'\\0')
backref = itertools.count(1)
- r = re.sub(r'\\0', lambda m: '\\%s' % backref.next(), r)
+ r = re.sub(r'\\0', lambda m: '\\%s' % next(backref), r)
r = re.sub(r'\\(.)', r'\1', r)
self.prefix = prefix
self.regex = re.compile(p)
self.placable = r
def match(self, path):
'''
True if the given path matches the file pattern.
@@ -240,22 +242,22 @@ class ProjectConfig(object):
assert 'path' in rule
if isinstance(rule['path'], list):
for path in rule['path']:
_rule = rule.copy()
_rule['path'] = self.lazy_expand(path)
for __rule in self._compile_rule(_rule):
yield __rule
return
- if isinstance(rule['path'], basestring):
+ if isinstance(rule['path'], six.string_types):
rule['path'] = self.lazy_expand(rule['path'])
if 'key' not in rule:
yield rule
return
- if not isinstance(rule['key'], basestring):
+ if not isinstance(rule['key'], six.string_types):
for key in rule['key']:
_rule = rule.copy()
_rule['key'] = key
for __rule in self._compile_rule(_rule):
yield __rule
return
rule = rule.copy()
key = rule['key']
@@ -461,17 +463,17 @@ class TOMLParser(object):
if 'reference' in data:
paths['reference'] = self.resolvepath(data['reference'])
self.pc.add_paths(paths)
def processFilters(self):
assert self.data is not None
for data in self.data.get('filters', []):
paths = data['path']
- if isinstance(paths, basestring):
+ if isinstance(paths, six.string_types):
paths = [paths]
# expand if path isn't relative to a variable
paths = [
self.resolvepath(path) if not path.startswith('{')
else path
for path in paths
]
rule = {
@@ -550,17 +552,17 @@ class L10nConfigParser(object):
'''Get the test functions from this ConfigParser and all children.
Only works with synchronous loads, used by compare-locales, which
is local anyway.
'''
filter_path = mozpath.join(mozpath.dirname(self.inipath), 'filter.py')
try:
local = {}
- execfile(filter_path, {}, local)
+ exec(compile(open(filter_path).read(), filter_path, 'exec'), {}, local)
if 'test' in local and callable(local['test']):
filters = [local['test']]
else:
filters = []
except BaseException: # we really want to handle EVERYTHING here
filters = []
for c in self.children:
--- a/compare_locales/tests/__init__.py
+++ b/compare_locales/tests/__init__.py
@@ -1,20 +1,23 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''Mixins for parser tests.
'''
-from itertools import izip_longest
+from __future__ import absolute_import
+
from pkg_resources import resource_string
import re
from compare_locales import parser
+import six
+from six.moves import zip_longest
class ParserTestMixin():
'''Utility methods used by the parser tests.
'''
filename = None
def setUp(self):
@@ -34,17 +37,19 @@ class ParserTestMixin():
def _test(self, content, refs):
'''Helper to test the parser.
Compares the result of parsing content with the given list
of reference keys and values.
'''
self.parser.readContents(content)
entities = list(self.parser.walk())
- for entity, ref in izip_longest(entities, refs):
- self.assertTrue(entity, 'excess reference entity ' + unicode(ref))
- self.assertTrue(ref, 'excess parsed entity ' + unicode(entity))
+ for entity, ref in zip_longest(entities, refs):
+ self.assertTrue(entity,
+ 'excess reference entity ' + six.text_type(ref))
+ self.assertTrue(ref,
+ 'excess parsed entity ' + six.text_type(entity))
if isinstance(entity, parser.Entity):
self.assertEqual(entity.key, ref[0])
self.assertEqual(entity.val, ref[1])
else:
self.assertIsInstance(entity, ref[0])
self.assertIn(ref[1], entity.all)
--- a/compare_locales/tests/test_apps.py
+++ b/compare_locales/tests/test_apps.py
@@ -1,8 +1,9 @@
+from __future__ import absolute_import
import unittest
import os
import tempfile
import shutil
from compare_locales import mozpath
from compare_locales.paths import EnumerateApp, ProjectFiles
--- a/compare_locales/tests/test_checks.py
+++ b/compare_locales/tests/test_checks.py
@@ -1,18 +1,20 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
from compare_locales.checks import getChecker
from compare_locales.parser import getParser, Parser, DTDEntity
from compare_locales.paths import File
+from six.moves import range
class BaseHelper(unittest.TestCase):
file = None
refContent = None
def setUp(self):
p = getParser(self.file.file)
@@ -281,17 +283,17 @@ class TestAndroid(unittest.TestCase):
self.assertEqual(tuple(checker.check(ref, l10n)),
())
# dtd warning
l10n = self.getDTDEntity("plain localized string &ref;")
self.assertEqual(tuple(checker.check(ref, l10n)),
(('warning', (0, 0),
'Referencing unknown entity `ref`', 'xmlparse'),))
# no report on stray ampersand or quote, if not completely quoted
- for i in xrange(3):
+ for i in range(3):
# make sure we're catching unescaped apostrophes,
# try 0..5 backticks
l10n = self.getDTDEntity("\\"*(2*i) + "'")
self.assertEqual(tuple(checker.check(ref, l10n)),
(('error', 2*i, self.apos_msg, 'android'),))
l10n = self.getDTDEntity("\\"*(2*i + 1) + "'")
self.assertEqual(tuple(checker.check(ref, l10n)),
())
--- a/compare_locales/tests/test_compare.py
+++ b/compare_locales/tests/test_compare.py
@@ -1,16 +1,17 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
from compare_locales import compare, paths
-from cPickle import loads, dumps
+from six.moves.cPickle import loads, dumps
class TestTree(unittest.TestCase):
'''Test the Tree utility class
Tree value classes need to be in-place editable
'''
--- a/compare_locales/tests/test_defines.py
+++ b/compare_locales/tests/test_defines.py
@@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
from compare_locales.tests import ParserTestMixin
from compare_locales.parser import (
Comment,
DefinesInstruction,
Junk,
Whitespace,
--- a/compare_locales/tests/test_dtd.py
+++ b/compare_locales/tests/test_dtd.py
@@ -1,15 +1,16 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''Tests for the DTD parser.
'''
+from __future__ import absolute_import
import unittest
import re
from compare_locales import parser
from compare_locales.parser import (
Comment,
Junk,
Whitespace,
--- a/compare_locales/tests/test_ftl.py
+++ b/compare_locales/tests/test_ftl.py
@@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
from compare_locales import parser
from compare_locales.tests import ParserTestMixin
class TestFluentParser(ParserTestMixin, unittest.TestCase):
maxDiff = None
--- a/compare_locales/tests/test_ini.py
+++ b/compare_locales/tests/test_ini.py
@@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
from compare_locales.tests import ParserTestMixin
from compare_locales.parser import (
Comment,
IniSection,
Junk,
Whitespace,
--- a/compare_locales/tests/test_merge.py
+++ b/compare_locales/tests/test_merge.py
@@ -1,12 +1,13 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
import os
from tempfile import mkdtemp
import shutil
from compare_locales.parser import getParser
from compare_locales.paths import File
from compare_locales.compare import ContentComparer, Observer
@@ -89,17 +90,17 @@ eff = effVal""")
{'missingEntity': u'eff'}]
}
})
mergefile = mozpath.join(self.tmp, "merge", "l10n.properties")
self.assertTrue(os.path.isfile(mergefile))
p = getParser(mergefile)
p.readFile(mergefile)
[m, n] = p.parse()
- self.assertEqual(map(lambda e: e.key, m), ["bar", "foo", "eff"])
+ self.assertEqual([e.key for e in m], ["bar", "foo", "eff"])
def testError(self):
self.assertTrue(os.path.isdir(self.tmp))
self.reference("""foo = fooVal
bar = %d barVal
eff = effVal""")
self.localized("""\
bar = %S lBar
@@ -260,17 +261,17 @@ class TestDTD(unittest.TestCase, Content
{'missingEntity': u'eff'}]
}
})
mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd")
self.assertTrue(os.path.isfile(mergefile))
p = getParser(mergefile)
p.readFile(mergefile)
[m, n] = p.parse()
- self.assertEqual(map(lambda e: e.key, m), ["bar", "foo", "eff"])
+ self.assertEqual([e.key for e in m], ["bar", "foo", "eff"])
def testJunk(self):
self.assertTrue(os.path.isdir(self.tmp))
self.reference("""<!ENTITY foo 'fooVal'>
<!ENTITY bar 'barVal'>
<!ENTITY eff 'effVal'>""")
self.localized("""<!ENTITY foo 'fooVal'>
<!ENTY bar 'gimmick'>
@@ -299,17 +300,17 @@ class TestDTD(unittest.TestCase, Content
{'missingEntity': u'bar'}]
}
})
mergefile = mozpath.join(self.tmp, "merge", "l10n.dtd")
self.assertTrue(os.path.isfile(mergefile))
p = getParser(mergefile)
p.readFile(mergefile)
[m, n] = p.parse()
- self.assertEqual(map(lambda e: e.key, m), ["foo", "eff", "bar"])
+ self.assertEqual([e.key for e in m], ["foo", "eff", "bar"])
def test_reference_junk(self):
self.assertTrue(os.path.isdir(self.tmp))
self.reference("""<!ENTITY foo 'fooVal'>
<!ENT bar 'bad val'>
<!ENTITY eff 'effVal'>""")
self.localized("""<!ENTITY foo 'fooVal'>
<!ENTITY eff 'effVal'>
--- a/compare_locales/tests/test_merge_comments.py
+++ b/compare_locales/tests/test_merge_comments.py
@@ -13,50 +13,50 @@ class TestMergeComments(unittest.TestCas
channels = (b"""
foo = Foo 1
# Bar Comment 1
bar = Bar 1
""", b"""
foo = Foo 2
bar = Bar 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
foo = Foo 1
# Bar Comment 1
bar = Bar 1
""")
def test_comment_still_in_last(self):
channels = (b"""
foo = Foo 1
bar = Bar 1
""", b"""
foo = Foo 2
# Bar Comment 2
bar = Bar 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
foo = Foo 1
# Bar Comment 2
bar = Bar 1
""")
def test_comment_changed(self):
channels = (b"""
foo = Foo 1
# Bar Comment 1
bar = Bar 1
""", b"""
foo = Foo 2
# Bar Comment 2
bar = Bar 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
foo = Foo 1
# Bar Comment 1
bar = Bar 1
""")
class TestMergeStandaloneComments(unittest.TestCase):
@@ -67,17 +67,17 @@ class TestMergeStandaloneComments(unitte
# Standalone Comment 1
# Foo Comment 1
foo = Foo 1
""", b"""
# Foo Comment 2
foo = Foo 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
# Standalone Comment 1
# Foo Comment 1
foo = Foo 1
""")
def test_comment_still_in_last(self):
@@ -85,17 +85,17 @@ foo = Foo 1
# Foo Comment 1
foo = Foo 1
""", b"""
# Standalone Comment 2
# Foo Comment 2
foo = Foo 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
# Standalone Comment 2
# Foo Comment 1
foo = Foo 1
""")
def test_comments_in_both(self):
@@ -105,17 +105,17 @@ foo = Foo 1
# Foo Comment 1
foo = Foo 1
""", b"""
# Standalone Comment 2
# Foo Comment 2
foo = Foo 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
# Standalone Comment 2
# Standalone Comment 1
# Foo Comment 1
foo = Foo 1
""")
@@ -127,17 +127,17 @@ foo = Foo 1
# Foo Comment 1
foo = Foo 1
""", b"""
# Standalone Comment
# Foo Comment 2
foo = Foo 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
# Standalone Comment
# Foo Comment 1
foo = Foo 1
""")
def test_standalone_which_is_attached_in_first(self):
@@ -148,17 +148,17 @@ foo = Foo 1
# Bar Comment 1
bar = Bar 1
""", b"""
# Ambiguous Comment
# Bar Comment 2
bar = Bar 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
# Ambiguous Comment
foo = Foo 1
# Bar Comment 1
bar = Bar 1
""")
@@ -171,16 +171,16 @@ bar = Bar 1
bar = Bar 1
""", b"""
# Ambiguous Comment
foo = Foo 1
# Bar Comment 2
bar = Bar 2
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""
# Ambiguous Comment
foo = Foo 1
# Bar Comment 1
bar = Bar 1
""")
--- a/compare_locales/tests/test_merge_dtd.py
+++ b/compare_locales/tests/test_merge_dtd.py
@@ -58,17 +58,17 @@ class TestMergeDTD(unittest.TestCase):
<!ENTITY mainWindow.title "&brandFullName;">
<!-- LOCALIZATION NOTE (mainWindow.titlemodifier) : DONT_TRANSLATE -->
<!ENTITY mainWindow.titlemodifier "&brandFullName;">
<!-- LOCALIZATION NOTE (mainWindow.privatebrowsing): This will be appended ...
inside the ... -->
<!ENTITY mainWindow.privatebrowsing "(Private Browsing)">
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""\
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<!-- LOCALIZATION NOTE : FILE This file contains the browser main menu ... -->
<!-- LOCALIZATION NOTE : FILE Do not translate commandkeys -->
@@ -111,17 +111,17 @@ class TestMergeDTD(unittest.TestCase):
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
<!ENTITY maintitle "Registered Service Workers">
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
<!ENTITY warning_not_enabled "Service Workers are not enabled.">
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
<!ENTITY warning_no_serviceworkers "No Service Workers registered.">
""")
- self.assertMultiLineEqual(
+ self.assertEqual(
merge_channels(self.name, *channels), b"""\
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
<!ENTITY title "About Service Workers">
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
--- a/compare_locales/tests/test_mozpath.py
+++ b/compare_locales/tests/test_mozpath.py
@@ -1,12 +1,13 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
from compare_locales.mozpath import (
relpath,
join,
normpath,
dirname,
commonprefix,
basename,
split,
--- a/compare_locales/tests/test_parser.py
+++ b/compare_locales/tests/test_parser.py
@@ -1,12 +1,13 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import textwrap
import unittest
from compare_locales import parser
class TestParserContext(unittest.TestCase):
def test_linecol(self):
--- a/compare_locales/tests/test_paths.py
+++ b/compare_locales/tests/test_paths.py
@@ -1,13 +1,14 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
from compare_locales.paths import ProjectConfig, File, ProjectFiles, Matcher
from compare_locales import mozpath
class TestMatcher(unittest.TestCase):
--- a/compare_locales/tests/test_properties.py
+++ b/compare_locales/tests/test_properties.py
@@ -1,15 +1,17 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
+from six.moves import zip
from compare_locales.tests import ParserTestMixin
from compare_locales.parser import (
Comment,
Junk,
Whitespace,
)
--- a/compare_locales/tests/test_util.py
+++ b/compare_locales/tests/test_util.py
@@ -1,12 +1,13 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
import unittest
from compare_locales import util
class ParseLocalesTest(unittest.TestCase):
def test_empty(self):
self.assertEquals(util.parseLocales(''), [])
--- a/setup.py
+++ b/setup.py
@@ -3,16 +3,17 @@
Localization of XUL applications in general and Mozilla applications in
particular are done by a number of different file formats. Independent
of the format, the Mozilla architecture does not provide fallback strings
at runtime. This library and the calling scripts provide a way to check
a given localization for completeness. For more information see
https://developer.mozilla.org/en/docs/Compare-locales
"""
+from __future__ import absolute_import
DOCSTRINGS = __doc__.split("\n")
from setuptools import setup
import sys
import os.path
sys.path.insert(0, os.path.dirname(__file__))
@@ -42,10 +43,11 @@ setup(name="compare-locales",
'compare-locales = compare_locales.commands:CompareLocales.call']},
packages=['compare_locales', 'compare_locales.tests'],
package_data={
'compare_locales.tests': ['data/*.properties', 'data/*.dtd']
},
install_requires=[
'fluent >=0.6.4, <0.7',
'pytoml',
+ 'six',
],
test_suite='compare_locales.tests')