Bug 1313640 - Add support for allowing duplicate files in multilocale builds. r?gps draft
authorChris AtLee <catlee@mozilla.com>
Fri, 28 Oct 2016 09:25:33 -0400
changeset 431292 ef239af03b26caf75587db054a23ab7117ee9511
parent 430701 944cb0fd05526894fcd90fbe7d1e625ee53cd73d
child 535389 3a9143da6e509e628534c88267edecb2a75cffcc
push id34028
push usercatlee@mozilla.com
push dateSat, 29 Oct 2016 04:15:03 +0000
reviewersgps
bugs1313640
milestone52.0a1
Bug 1313640 - Add support for allowing duplicate files in multilocale builds. r?gps MozReview-Commit-ID: CTmfSRs4JaS
toolkit/mozapps/installer/find-dupes.py
--- a/toolkit/mozapps/installer/find-dupes.py
+++ b/toolkit/mozapps/installer/find-dupes.py
@@ -1,14 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 import sys
 import hashlib
+import re
 from mozpack.packager.unpack import UnpackFinder
 from mozpack.files import DeflatedFile
 from collections import OrderedDict
 
 '''
 Find files duplicated in a given packaged directory, independently of its
 package format.
 '''
@@ -230,32 +231,73 @@ ALLOWED_DUPES = set([
     'modules/commonjs/sdk/ui/state/events.js',
     'plugin-container.app/Contents/PkgInfo',
     'res/table-remove-column-active.gif',
     'res/table-remove-column-hover.gif',
     'res/table-remove-column.gif',
     'res/table-remove-row-active.gif',
     'res/table-remove-row-hover.gif',
     'res/table-remove-row.gif',
+    # For android multilocale
+    'chrome/en-US/locale/branding/brand.dtd',
+    'chrome/en-US/locale/branding/brand.properties',
+    'chrome/en-US/locale/en-US/browser/aboutHealthReport.dtd',
+    'chrome/en-US/locale/en-US/browser/aboutHome.dtd',
+    'chrome/en-US/locale/en-US/browser/checkbox.dtd',
+    'chrome/en-US/locale/en-US/browser/devicePrompt.properties',
+    'chrome/en-US/locale/en-US/browser/overrides/aboutAbout.dtd',
+    'chrome/en-US/locale/en-US/browser/overrides/global.dtd',
+    'chrome/en-US/locale/en-US/browser/overrides/global/mozilla.dtd',
+    'chrome/en-US/locale/en-US/browser/overrides/intl.css',
+    'chrome/en-US/locale/en-US/browser/region.properties',
+    'chrome/en-US/locale/en-US/browser/searchplugins/amazondotcom.xml',
+    'chrome/en-US/locale/en-US/browser/searchplugins/bing.xml',
+    'chrome/en-US/locale/en-US/browser/searchplugins/duckduckgo.xml',
+    'chrome/en-US/locale/en-US/browser/searchplugins/google-nocodes.xml',
+    'chrome/en-US/locale/en-US/browser/searchplugins/google.xml',
+    'chrome/en-US/locale/en-US/browser/searchplugins/qwant.xml',
+    'chrome/en-US/locale/en-US/browser/searchplugins/twitter.xml',
 ])
 
 
 def normalize_osx_path(p):
     '''
     Strips the first 3 elements of an OSX app path
 
     >>> normalize_osx_path('Nightly.app/foo/bar/baz')
     'baz'
     '''
     bits = p.split('/')
     if len(bits) > 3 and bits[0].endswith('.app'):
         return '/'.join(bits[3:])
     return p
 
 
+def normalize_l10n_path(p):
+    '''
+    Normalizes localized paths to en-US
+
+    >>> normalize_l10n_path('chrome/es-ES/locale/branding/brand.properties')
+    'chrome/en-US/locale/branding/brand.properties'
+    >>> normalize_l10n_path('chrome/fr/locale/fr/browser/aboutHome.dtd')
+    'chrome/en-US/locale/en-US/browser/aboutHome.dtd'
+    '''
+    p = re.sub(r'chrome/(\S+)/locale/\1',
+               'chrome/en-US/locale/en-US',
+               p)
+    p = re.sub(r'chrome/(\S+)/locale',
+               'chrome/en-US/locale',
+               p)
+    return p
+
+
+def normalize_path(p):
+    return normalize_osx_path(normalize_l10n_path(p))
+
+
 def find_dupes(source):
     md5s = OrderedDict()
     for p, f in UnpackFinder(source):
         content = f.open().read()
         m = hashlib.md5(content).digest()
         if m not in md5s:
             if isinstance(f, DeflatedFile):
                 compressed = f.file.compressed_size
@@ -273,17 +315,17 @@ def find_dupes(source):
             print 'Duplicates %d bytes%s%s:' % (size,
                   ' (%d compressed)' % compressed if compressed != size else '',
                   ' (%d times)' % (len(paths) - 1) if len(paths) > 2 else '')
             print ''.join('  %s\n' % p for p in paths)
             total += (len(paths) - 1) * size
             total_compressed += (len(paths) - 1) * compressed
             num_dupes += 1
 
-            unexpected_dupes.extend([p for p in paths if normalize_osx_path(p) not in ALLOWED_DUPES])
+            unexpected_dupes.extend([p for p in paths if normalize_path(p) not in ALLOWED_DUPES])
 
     if num_dupes:
         print "WARNING: Found %d duplicated files taking %d bytes (%s)" % \
               (num_dupes, total,
                '%d compressed' % total_compressed if total_compressed != total
                                                   else 'uncompressed')
 
     if unexpected_dupes: