--- a/tools/docs/mach_commands.py
+++ b/tools/docs/mach_commands.py
@@ -171,9 +171,8 @@ class Documentation(MachCommandBase):
if project == 'main':
s3_upload(files)
def die(msg, exit_code=1):
msg = '%s: %s' % (sys.argv[0], msg)
print(msg, file=sys.stderr)
return exit_code
-
--- a/tools/docs/moztreedocs/__init__.py
+++ b/tools/docs/moztreedocs/__init__.py
@@ -53,17 +53,17 @@ class SphinxManager(object):
if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
self.add_python_package_dir(os.path.join(reldir, value))
def add_tree(self, source_dir, dest_dir):
"""Add a directory from where docs should be sourced."""
if dest_dir in self._trees:
raise Exception('%s has already been registered as a destination.'
- % dest_dir)
+ % dest_dir)
self._trees[dest_dir] = source_dir
def add_python_package_dir(self, source_dir):
"""Add a directory containing Python packages.
Added directories will have Python API docs generated automatically.
"""
--- a/tools/jprof/split-profile.py
+++ b/tools/jprof/split-profile.py
@@ -62,35 +62,38 @@ jprof = sys.argv[1]
splitfile = sys.argv[2]
passthrough = sys.argv[3:]
for f in [jprof, splitfile]:
if not os.path.isfile(f):
sys.stderr.write("could not find file: {0}\n".format(f))
sys.exit(1)
+
def read_splits(splitfile):
"""
Read splitfile (each line of which contains a name, a space, and
then a function name to split on), and return a list of pairs
representing exactly that. (Note that the name cannot contain
spaces, but the function name can, and often does.)
"""
def line_to_split(line):
line = line.strip("\r\n")
idx = line.index(" ")
- return (line[0:idx], line[idx+1:])
+ return (line[0:idx], line[idx + 1:])
io = open(splitfile, "r")
result = [line_to_split(line) for line in io]
io.close()
return result
+
splits = read_splits(splitfile)
+
def generate_profile(options, destfile):
"""
Run jprof to generate one split of the profile.
"""
args = [jprof] + options + passthrough
print "Generating {0}".format(destfile)
destio = open(destfile, "w")
# jprof expects the "jprof-map" file to be in its current working directory
@@ -100,19 +103,21 @@ def generate_profile(options, destfile):
cwd = os.path.dirname(option)
if cwd is None:
raise StandardError("no jprof-log option given")
process = subprocess.Popen(args, stdout=destio, cwd=cwd)
process.wait()
destio.close()
if process.returncode != 0:
os.remove(destfile)
- sys.stderr.write("Error {0} from command:\n {1}\n".format(process.returncode, " ".join(args)))
+ sys.stderr.write("Error {0} from command:\n {1}\n".format(
+ process.returncode, " ".join(args)))
sys.exit(process.returncode)
+
def output_filename(number, splitname):
"""
Return the filename (absolute path) we should use to output the
profile segment with the given number and splitname. Splitname
should be None for the complete profile and the remainder.
"""
def pad_count(i):
result = str(i)
@@ -122,16 +127,17 @@ def output_filename(number, splitname):
name = pad_count(number)
if splitname is not None:
name += "-" + splitname
return os.path.join(os.path.dirname(splitfile),
"jprof-{0}.html".format(name))
+
# generate the complete profile
generate_profile([], output_filename(0, None))
# generate the listed splits
count = 1
excludes = []
for (splitname, splitfunction) in splits:
generate_profile(excludes + ["-i" + splitfunction],
--- a/tools/power/mach_commands.py
+++ b/tools/power/mach_commands.py
@@ -23,26 +23,27 @@ def is_osx_10_10_or_greater(cls):
return release and StrictVersion(release) >= StrictVersion('10.10')
@CommandProvider
class MachCommands(MachCommandBase):
'''
Get system power consumption and related measurements.
'''
+
def __init__(self, context):
MachCommandBase.__init__(self, context)
@Command('power', category='misc',
- conditions=[is_osx_10_10_or_greater],
- description='Get system power consumption and related measurements for '
- 'all running browsers. Available only on Mac OS X 10.10 and above. '
- 'Requires root access.')
+ conditions=[is_osx_10_10_or_greater],
+ description='Get system power consumption and related measurements for '
+ 'all running browsers. Available only on Mac OS X 10.10 and above. '
+ 'Requires root access.')
@CommandArgument('-i', '--interval', type=int, default=30000,
- help='The sample period, measured in milliseconds. Defaults to 30000.')
+ help='The sample period, measured in milliseconds. Defaults to 30000.')
def power(self, interval):
import os
import re
import subprocess
rapl = os.path.join(self.topobjdir, 'dist', 'bin', 'rapl')
interval = str(interval)
--- a/tools/rb/find_leakers.py
+++ b/tools/rb/find_leakers.py
@@ -7,30 +7,32 @@
# This script processes a `refcount' log, and finds out if any object leaked.
# It simply goes through the log, finds `AddRef' or `Ctor' lines, and then
# sees if they `Release' or `Dtor'. If not, it reports them as leaks.
# Please see README file in the same directory.
import sys
+
def print_output(allocation, obj_to_class):
'''Formats and prints output.'''
items = []
for obj, count, in allocation.iteritems():
# Adding items to a list, so we can sort them.
items.append((obj, count))
# Sorting by count.
items.sort(key=lambda item: item[1])
for obj, count, in items:
print "{obj} ({count}) @ {class_name}".format(obj=obj,
count=count,
class_name=obj_to_class[obj])
+
def process_log(log_lines):
'''Process through the log lines, and print out the result.
@param log_lines: List of strings.
'''
allocation = {}
class_count = {}
obj_to_class = {}
@@ -44,26 +46,26 @@ def process_log(log_lines):
ignore,
operation,
count,) = log_line.strip('\r\n').split(' ')[:5]
# for AddRef/Release `count' is the refcount,
# for Ctor/Dtor it's the size.
if ((operation == 'AddRef' and count == '1') or
- operation == 'Ctor'):
+ operation == 'Ctor'):
# Examples:
# <nsStringBuffer> 0x01AFD3B8 1 AddRef 1
# <PStreamNotifyParent> 0x08880BD0 8 Ctor (20)
class_count[class_name] = class_count.setdefault(class_name, 0) + 1
allocation[obj] = class_count[class_name]
obj_to_class[obj] = class_name
elif ((operation == 'Release' and count == '0') or
- operation == 'Dtor'):
+ operation == 'Dtor'):
# Examples:
# <nsStringBuffer> 0x01AFD3B8 1 Release 0
# <PStreamNotifyParent> 0x08880BD0 8 Dtor (20)
if obj not in allocation:
print "An object was released that wasn't allocated!",
print obj, "@", class_name
else:
allocation.pop(obj)
@@ -76,25 +78,26 @@ def process_log(log_lines):
def print_usage():
print
print "Usage: find-leakers.py [log-file]"
print
print "If `log-file' provided, it will read that as the input log."
print "Else, it will read the stdin as the input log."
print
+
def main():
'''Main method of the script.'''
if len(sys.argv) == 1:
# Reading log from stdin.
process_log(sys.stdin.readlines())
elif len(sys.argv) == 2:
# Reading log from file.
with open(sys.argv[1], 'r') as log_file:
log_lines = log_file.readlines()
process_log(log_lines)
else:
print 'ERROR: Invalid number of arguments'
print_usage()
+
if __name__ == '__main__':
main()
-
--- a/tools/rb/fix_linux_stack.py
+++ b/tools/rb/fix_linux_stack.py
@@ -11,17 +11,20 @@
import subprocess
import sys
import re
import os
import pty
import termios
from StringIO import StringIO
-objdump_section_re = re.compile("^ [0-9a-f]* ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}).*")
+objdump_section_re = re.compile(
+ "^ [0-9a-f]* ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}) ([0-9a-f ]{8}).*")
+
+
def elf_section(file, section):
"""
Return the requested ELF section of the file as a str, representing
a sequence of bytes.
"""
# We can read the .gnu_debuglink section using either of:
# objdump -s --section=.gnu_debuglink $file
# readelf -x .gnu_debuglink $file
@@ -39,21 +42,22 @@ def elf_section(file, section):
# Turn hexadecimal dump into the bytes it represents
for line in StringIO(objdump_stdout).readlines():
m = objdump_section_re.match(line)
if m:
for gnum in [0, 1, 2, 3]:
word = m.groups()[gnum]
if word != " ":
for idx in [0, 2, 4, 6]:
- result += chr(int(word[idx:idx+2], 16))
+ result += chr(int(word[idx:idx + 2], 16))
return result
+
# FIXME: Hard-coded to gdb defaults (works on Fedora and Ubuntu).
-global_debug_dir = '/usr/lib/debug';
+global_debug_dir = '/usr/lib/debug'
endian_re = re.compile("\s*Data:\s+.*(little|big) endian.*$")
# Table of 256 values, per documentation of .gnu_debuglink sections.
gnu_debuglink_crc32_table = [
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
@@ -103,30 +107,32 @@ gnu_debuglink_crc32_table = [
0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
0x2d02ef8d
]
+
def gnu_debuglink_crc32(stream):
# Note that python treats bitwise operators as though integers have
# an infinite number of bits (and thus such that negative integers
# 1-pad out to infinity).
crc = 0xffffffff
while True:
# Choose to read in 4096 byte chunks.
bytes = stream.read(4096)
if len(bytes) == 0:
break
for byte in bytes:
crc = gnu_debuglink_crc32_table[(crc ^ ord(byte)) & 0xff] ^ (crc >> 8)
return ~crc & 0xffffffff
+
def separate_debug_file_for(file):
"""
Finds a separated file with the debug sections for a binary. Such
files are commonly installed by debug packages on linux distros.
Rules for finding them are documented in:
https://sourceware.org/gdb/current/onlinedocs/gdb/Separate-Debug-Files.html
"""
def have_debug_file(debugfile):
@@ -148,17 +154,17 @@ def separate_debug_file_for(file):
def word32(s):
if type(s) != str or len(s) != 4:
raise StandardError("expected 4 byte string input")
s = list(s)
if endian == "big":
s.reverse()
return sum(map(lambda idx: ord(s[idx]) * (256 ** idx), range(0, 4)))
- buildid = elf_section(file, ".note.gnu.build-id");
+ buildid = elf_section(file, ".note.gnu.build-id")
if buildid is not None:
# The build ID is an ELF note section, so it begins with a
# name size (4), a description size (size of contents), a
# type (3), and the name "GNU\0".
note_header = buildid[0:16]
buildid = buildid[16:]
if word32(note_header[0:4]) != 4 or \
word32(note_header[4:8]) != len(buildid) or \
@@ -166,17 +172,17 @@ def separate_debug_file_for(file):
note_header[12:16] != "GNU\0":
sys.stderr.write("malformed .note.gnu.build_id in " + file + "\n")
else:
buildid = "".join(map(lambda ch: "%02X" % ord(ch), buildid)).lower()
f = os.path.join(global_debug_dir, ".build-id", buildid[0:2], buildid[2:] + ".debug")
if have_debug_file(f):
return f
- debuglink = elf_section(file, ".gnu_debuglink");
+ debuglink = elf_section(file, ".gnu_debuglink")
if debuglink is not None:
# The debuglink section contains a string, ending with a
# null-terminator and then 0 to three bytes of padding to fill the
# current 32-bit unit. (This padding is usually null bytes, but
# I've seen null-null-H, on Ubuntu x86_64.) This is followed by
# a 4-byte CRC.
debuglink_name = debuglink[:-4]
null_idx = debuglink_name.find("\0")
@@ -197,19 +203,21 @@ def separate_debug_file_for(file):
if have_debug_file(f):
fio = open(f, mode="r")
file_crc = gnu_debuglink_crc32(fio)
fio.close()
if file_crc == debuglink_crc:
return f
return None
+
elf_type_re = re.compile("^\s*Type:\s+(\S+)")
elf_text_section_re = re.compile("^\s*\[\s*\d+\]\s+\.text\s+\w+\s+(\w+)\s+(\w+)\s+")
+
def address_adjustment_for(file):
"""
Return the address adjustment to use for a file.
addr2line wants offsets relative to the base address for shared
libraries, but it wants addresses including the base address offset
for executables. This returns the appropriate address adjustment to
add to an offset within file. See bug 230336.
@@ -231,25 +239,26 @@ def address_adjustment_for(file):
adjustment = 0
readelf = subprocess.Popen(['readelf', '-S', file],
stdout=subprocess.PIPE)
for line in readelf.stdout.readlines():
m = elf_text_section_re.match(line)
if m:
# Subtract the .text section's offset within the
# file from its base address.
- adjustment = int(m.groups()[0], 16) - int(m.groups()[1], 16);
+ adjustment = int(m.groups()[0], 16) - int(m.groups()[1], 16)
break
readelf.terminate()
return adjustment
devnull = open(os.devnull)
file_stuff = {}
+
def addressToSymbol(file, address):
if not file in file_stuff:
debug_file = separate_debug_file_for(file) or file
# Start an addr2line process for this file. Note that addr2line
# sometimes prints error messages, which we want to suppress.
args = ['/usr/bin/addr2line', '-C', '-f', '-e', debug_file]
addr2line = subprocess.Popen(args, stdin=subprocess.PIPE,
@@ -262,24 +271,26 @@ def addressToSymbol(file, address):
(addr2line, address_adjustment, cache) = file_stuff[file]
if address in cache:
return cache[address]
# For each line of input, addr2line produces two lines of output.
addr2line.stdin.write(hex(int(address, 16) + address_adjustment) + '\n')
addr2line.stdin.flush()
- result = (addr2line.stdout.readline().rstrip("\r\n"), \
+ result = (addr2line.stdout.readline().rstrip("\r\n"),
addr2line.stdout.readline().rstrip("\r\n"))
cache[address] = result
return result
+
# Matches lines produced by NS_FormatCodeAddress().
line_re = re.compile("^(.*#\d+: )(.+)\[(.+) \+(0x[0-9A-Fa-f]+)\](.*)$")
+
def fixSymbols(line):
result = line_re.match(line)
if result is not None:
(before, fn, file, address, after) = result.groups()
if os.path.exists(file) and os.path.isfile(file):
(name, fileline) = addressToSymbol(file, address)
@@ -292,11 +303,12 @@ def fixSymbols(line):
nl = '\n' if line[-1] == '\n' else ''
return "%s%s (%s)%s%s" % (before, name, fileline, after, nl)
else:
sys.stderr.write("Warning: File \"" + file + "\" does not exist.\n")
return line
else:
return line
+
if __name__ == "__main__":
for line in sys.stdin:
sys.stdout.write(fixSymbols(line))
--- a/tools/rb/fix_macosx_stack.py
+++ b/tools/rb/fix_macosx_stack.py
@@ -10,47 +10,55 @@
import subprocess
import sys
import re
import os
import pty
import termios
+
class unbufferedLineConverter:
"""
Wrap a child process that responds to each line of input with one line of
output. Uses pty to trick the child into providing unbuffered output.
"""
- def __init__(self, command, args = []):
+
+ def __init__(self, command, args=[]):
pid, fd = pty.fork()
if pid == 0:
# We're the child. Transfer control to command.
os.execvp(command, [command] + args)
else:
# Disable echoing.
attr = termios.tcgetattr(fd)
attr[3] = attr[3] & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, attr)
# Set up a file()-like interface to the child process
self.r = os.fdopen(fd, "r", 1)
self.w = os.fdopen(os.dup(fd), "w", 1)
+
def convert(self, line):
self.w.write(line + "\n")
return self.r.readline().rstrip("\r\n")
+
@staticmethod
def test():
assert unbufferedLineConverter("rev").convert("123") == "321"
assert unbufferedLineConverter("cut", ["-c3"]).convert("abcde") == "c"
print "Pass"
+
def separate_debug_file_for(file):
return None
+
address_adjustments = {}
+
+
def address_adjustment(file):
if not file in address_adjustments:
result = None
otool = subprocess.Popen(["otool", "-l", file], stdout=subprocess.PIPE)
while True:
line = otool.stdout.readline()
if line == "":
break
@@ -64,43 +72,52 @@ def address_adjustment(file):
if result is None:
raise StandardError("unexpected otool output")
address_adjustments[file] = result
return address_adjustments[file]
+
atoses = {}
+
+
def addressToSymbol(file, address):
converter = None
if not file in atoses:
debug_file = separate_debug_file_for(file) or file
- converter = unbufferedLineConverter('/usr/bin/xcrun', ['atos', '-arch', 'x86_64', '-o', debug_file])
+ converter = unbufferedLineConverter(
+ '/usr/bin/xcrun', ['atos', '-arch', 'x86_64', '-o', debug_file])
atoses[file] = converter
else:
converter = atoses[file]
return converter.convert("0x%X" % address)
+
cxxfilt_proc = None
+
+
def cxxfilt(sym):
if cxxfilt_proc is None:
# --no-strip-underscores because atos already stripped the underscore
globals()["cxxfilt_proc"] = subprocess.Popen(['c++filt',
'--no-strip-underscores',
'--format', 'gnu-v3'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
cxxfilt_proc.stdin.write(sym + "\n")
return cxxfilt_proc.stdout.readline().rstrip("\n")
+
# Matches lines produced by NS_FormatCodeAddress().
line_re = re.compile("^(.*#\d+: )(.+)\[(.+) \+(0x[0-9A-Fa-f]+)\](.*)$")
atos_name_re = re.compile("^(.+) \(in ([^)]+)\) \((.+)\)$")
+
def fixSymbols(line):
result = line_re.match(line)
if result is not None:
(before, fn, file, address, after) = result.groups()
address = int(address, 16)
if os.path.exists(file) and os.path.isfile(file):
address += address_adjustment(file)
@@ -123,11 +140,12 @@ def fixSymbols(line):
nl = '\n' if line[-1] == '\n' else ''
return before + info + after + nl
else:
sys.stderr.write("Warning: File \"" + file + "\" does not exist.\n")
return line
else:
return line
+
if __name__ == "__main__":
for line in sys.stdin:
sys.stdout.write(fixSymbols(line))
--- a/tools/rb/fix_stack_using_bpsyms.py
+++ b/tools/rb/fix_stack_using_bpsyms.py
@@ -13,151 +13,163 @@ from __future__ import with_statement
import sys
import os
import re
import subprocess
import bisect
here = os.path.dirname(__file__)
+
def prettyFileName(name):
- if name.startswith("../") or name.startswith("..\\"):
- # dom_quickstubs.cpp and many .h files show up with relative paths that are useless
- # and/or don't correspond to the layout of the source tree.
- return os.path.basename(name) + ":"
- elif name.startswith("hg:"):
- bits = name.split(":")
- if len(bits) == 4:
- (junk, repo, path, rev) = bits
- # We could construct an hgweb URL with /file/ or /annotate/, like this:
- # return "http://%s/annotate/%s/%s#l" % (repo, rev, path)
- return path + ":"
- return name + ":"
+ if name.startswith("../") or name.startswith("..\\"):
+ # dom_quickstubs.cpp and many .h files show up with relative paths that are useless
+ # and/or don't correspond to the layout of the source tree.
+ return os.path.basename(name) + ":"
+ elif name.startswith("hg:"):
+ bits = name.split(":")
+ if len(bits) == 4:
+ (junk, repo, path, rev) = bits
+ # We could construct an hgweb URL with /file/ or /annotate/, like this:
+ # return "http://%s/annotate/%s/%s#l" % (repo, rev, path)
+ return path + ":"
+ return name + ":"
+
class SymbolFile:
- def __init__(self, fn):
- addrs = [] # list of addresses, which will be sorted once we're done initializing
- funcs = {} # hash: address --> (function name + possible file/line)
- files = {} # hash: filenum (string) --> prettified filename ready to have a line number appended
- with open(fn) as f:
- for line in f:
- line = line.rstrip()
- # https://chromium.googlesource.com/breakpad/breakpad/+/master/docs/symbol_files.md
- if line.startswith("FUNC "):
- # FUNC <address> <size> <stack_param_size> <name>
- bits = line.split(None, 4)
- if len(bits) < 5:
- bits.append('unnamed_function')
- (junk, rva, size, ss, name) = bits
- rva = int(rva,16)
- funcs[rva] = name
- addrs.append(rva)
- lastFuncName = name
- elif line.startswith("PUBLIC "):
- # PUBLIC <address> <stack_param_size> <name>
- (junk, rva, ss, name) = line.split(None, 3)
- rva = int(rva,16)
- funcs[rva] = name
- addrs.append(rva)
- elif line.startswith("FILE "):
- # FILE <number> <name>
- (junk, filenum, name) = line.split(None, 2)
- files[filenum] = prettyFileName(name)
- elif line[0] in "0123456789abcdef":
- # This is one of the "line records" corresponding to the last FUNC record
- # <address> <size> <line> <filenum>
- (rva, size, line, filenum) = line.split(None)
- rva = int(rva,16)
- file = files[filenum]
- name = lastFuncName + " [" + file + line + "]"
- funcs[rva] = name
- addrs.append(rva)
- # skip everything else
- #print "Loaded %d functions from symbol file %s" % (len(funcs), os.path.basename(fn))
- self.addrs = sorted(addrs)
- self.funcs = funcs
+ def __init__(self, fn):
+ addrs = [] # list of addresses, which will be sorted once we're done initializing
+ funcs = {} # hash: address --> (function name + possible file/line)
+ # hash: filenum (string) --> prettified filename ready to have a line number appended
+ files = {}
+ with open(fn) as f:
+ for line in f:
+ line = line.rstrip()
+ # https://chromium.googlesource.com/breakpad/breakpad/+/master/docs/symbol_files.md
+ if line.startswith("FUNC "):
+ # FUNC <address> <size> <stack_param_size> <name>
+ bits = line.split(None, 4)
+ if len(bits) < 5:
+ bits.append('unnamed_function')
+ (junk, rva, size, ss, name) = bits
+ rva = int(rva, 16)
+ funcs[rva] = name
+ addrs.append(rva)
+ lastFuncName = name
+ elif line.startswith("PUBLIC "):
+ # PUBLIC <address> <stack_param_size> <name>
+ (junk, rva, ss, name) = line.split(None, 3)
+ rva = int(rva, 16)
+ funcs[rva] = name
+ addrs.append(rva)
+ elif line.startswith("FILE "):
+ # FILE <number> <name>
+ (junk, filenum, name) = line.split(None, 2)
+ files[filenum] = prettyFileName(name)
+ elif line[0] in "0123456789abcdef":
+ # This is one of the "line records" corresponding to the last FUNC record
+ # <address> <size> <line> <filenum>
+ (rva, size, line, filenum) = line.split(None)
+ rva = int(rva, 16)
+ file = files[filenum]
+ name = lastFuncName + " [" + file + line + "]"
+ funcs[rva] = name
+ addrs.append(rva)
+ # skip everything else
+ # print "Loaded %d functions from symbol file %s" % (len(funcs), os.path.basename(fn))
+ self.addrs = sorted(addrs)
+ self.funcs = funcs
- def addrToSymbol(self, address):
- i = bisect.bisect(self.addrs, address) - 1
- if i > 0:
- #offset = address - self.addrs[i]
- return self.funcs[self.addrs[i]]
- else:
- return ""
+ def addrToSymbol(self, address):
+ i = bisect.bisect(self.addrs, address) - 1
+ if i > 0:
+ #offset = address - self.addrs[i]
+ return self.funcs[self.addrs[i]]
+ else:
+ return ""
+
def findIdForPath(path):
- """Finds the breakpad id for the object file at the given path."""
- # We should always be packaged with a "fileid" executable.
- fileid_exe = os.path.join(here, 'fileid')
- if not os.path.isfile(fileid_exe):
- fileid_exe = fileid_exe + '.exe'
+ """Finds the breakpad id for the object file at the given path."""
+ # We should always be packaged with a "fileid" executable.
+ fileid_exe = os.path.join(here, 'fileid')
if not os.path.isfile(fileid_exe):
- raise Exception("Could not find fileid executable in %s" % here)
+ fileid_exe = fileid_exe + '.exe'
+ if not os.path.isfile(fileid_exe):
+ raise Exception("Could not find fileid executable in %s" % here)
- if not os.path.isfile(path):
- for suffix in ('.exe', '.dll'):
- if os.path.isfile(path + suffix):
- path = path + suffix
- try:
- return subprocess.check_output([fileid_exe, path]).rstrip()
- except subprocess.CalledProcessError as e:
- raise Exception("Error getting fileid for %s: %s" %
- (path, e.output))
+ if not os.path.isfile(path):
+ for suffix in ('.exe', '.dll'):
+ if os.path.isfile(path + suffix):
+ path = path + suffix
+ try:
+ return subprocess.check_output([fileid_exe, path]).rstrip()
+ except subprocess.CalledProcessError as e:
+ raise Exception("Error getting fileid for %s: %s" %
+ (path, e.output))
+
def guessSymbolFile(full_path, symbolsDir):
- """Guess a symbol file based on an object file's basename, ignoring the path and UUID."""
- fn = os.path.basename(full_path)
- d1 = os.path.join(symbolsDir, fn)
- root, _ = os.path.splitext(fn)
- if os.path.exists(os.path.join(symbolsDir, root) + '.pdb'):
- d1 = os.path.join(symbolsDir, root) + '.pdb'
- fn = root
- if not os.path.exists(d1):
- return None
- uuids = os.listdir(d1)
- if len(uuids) == 0:
- raise Exception("Missing symbol file for " + fn)
- if len(uuids) > 1:
- uuid = findIdForPath(full_path)
- else:
- uuid = uuids[0]
- return os.path.join(d1, uuid, fn + ".sym")
+ """Guess a symbol file based on an object file's basename, ignoring the path and UUID."""
+ fn = os.path.basename(full_path)
+ d1 = os.path.join(symbolsDir, fn)
+ root, _ = os.path.splitext(fn)
+ if os.path.exists(os.path.join(symbolsDir, root) + '.pdb'):
+ d1 = os.path.join(symbolsDir, root) + '.pdb'
+ fn = root
+ if not os.path.exists(d1):
+ return None
+ uuids = os.listdir(d1)
+ if len(uuids) == 0:
+ raise Exception("Missing symbol file for " + fn)
+ if len(uuids) > 1:
+ uuid = findIdForPath(full_path)
+ else:
+ uuid = uuids[0]
+ return os.path.join(d1, uuid, fn + ".sym")
+
parsedSymbolFiles = {}
+
+
def getSymbolFile(file, symbolsDir):
- p = None
- if not file in parsedSymbolFiles:
- symfile = guessSymbolFile(file, symbolsDir)
- if symfile:
- p = SymbolFile(symfile)
+ p = None
+ if not file in parsedSymbolFiles:
+ symfile = guessSymbolFile(file, symbolsDir)
+ if symfile:
+ p = SymbolFile(symfile)
+ else:
+ p = None
+ parsedSymbolFiles[file] = p
else:
- p = None
- parsedSymbolFiles[file] = p
- else:
- p = parsedSymbolFiles[file]
- return p
+ p = parsedSymbolFiles[file]
+ return p
+
def addressToSymbol(file, address, symbolsDir):
- p = getSymbolFile(file, symbolsDir)
- if p:
- return p.addrToSymbol(address)
- else:
- return ""
+ p = getSymbolFile(file, symbolsDir)
+ if p:
+ return p.addrToSymbol(address)
+ else:
+ return ""
+
# Matches lines produced by NS_FormatCodeAddress().
line_re = re.compile("^(.*#\d+: )(.+)\[(.+) \+(0x[0-9A-Fa-f]+)\](.*)$")
+
def fixSymbols(line, symbolsDir):
- result = line_re.match(line)
- if result is not None:
- (before, fn, file, address, after) = result.groups()
- address = int(address, 16)
- symbol = addressToSymbol(file, address, symbolsDir)
- if not symbol:
- symbol = "%s + 0x%x" % (os.path.basename(file), address)
- return before + symbol + after + "\n"
- else:
- return line
+ result = line_re.match(line)
+ if result is not None:
+ (before, fn, file, address, after) = result.groups()
+ address = int(address, 16)
+ symbol = addressToSymbol(file, address, symbolsDir)
+ if not symbol:
+ symbol = "%s + 0x%x" % (os.path.basename(file), address)
+ return before + symbol + after + "\n"
+ else:
+ return line
+
if __name__ == "__main__":
- symbolsDir = sys.argv[1]
- for line in iter(sys.stdin.readline, ''):
- print fixSymbols(line, symbolsDir),
+ symbolsDir = sys.argv[1]
+ for line in iter(sys.stdin.readline, ''):
+ print fixSymbols(line, symbolsDir),
--- a/tools/update-packaging/make_incremental_updates.py
+++ b/tools/update-packaging/make_incremental_updates.py
@@ -11,117 +11,121 @@ import re
import sys
import getopt
import time
import datetime
import string
import tempfile
import io
+
class PatchInfo:
""" Represents the meta-data associated with a patch
work_dir = working dir where files are stored for this patch
archive_files = list of files to include in this patch
manifestv2 = set of manifest version 2 patch instructions
manifestv3 = set of manifest version 3 patch instructions
file_exclusion_list =
files to exclude from this patch. names without slashes will be
excluded anywhere in the directory hiearchy. names with slashes
will only be excluded at that exact path
"""
+
def __init__(self, work_dir, file_exclusion_list, path_exclusion_list):
- self.work_dir=work_dir
- self.archive_files=[]
- self.manifestv2=[]
- self.manifestv3=[]
- self.file_exclusion_list=file_exclusion_list
- self.path_exclusion_list=path_exclusion_list
+ self.work_dir = work_dir
+ self.archive_files = []
+ self.manifestv2 = []
+ self.manifestv3 = []
+ self.file_exclusion_list = file_exclusion_list
+ self.path_exclusion_list = path_exclusion_list
def append_add_instruction(self, filename):
""" Appends an add instruction for this patch.
if filename starts with distribution/extensions/.*/ this will add an
add-if instruction that will add the file if the parent directory
of the file exists. This was ported from
mozilla/tools/update-packaging/common.sh's make_add_instruction.
"""
m = re.match("((?:|.*/)distribution/extensions/.*)/", filename)
if m:
# Directory immediately following extensions is used for the test
testdir = m.group(1)
- print(' add-if "'+testdir+'" "'+filename+'"')
- self.manifestv2.append('add-if "'+testdir+'" "'+filename+'"')
- self.manifestv3.append('add-if "'+testdir+'" "'+filename+'"')
+ print(' add-if "' + testdir + '" "' + filename + '"')
+ self.manifestv2.append('add-if "' + testdir + '" "' + filename + '"')
+ self.manifestv3.append('add-if "' + testdir + '" "' + filename + '"')
else:
- print(' add "'+filename+'"')
- self.manifestv2.append('add "'+filename+'"')
- self.manifestv3.append('add "'+filename+'"')
+ print(' add "' + filename + '"')
+ self.manifestv2.append('add "' + filename + '"')
+ self.manifestv3.append('add "' + filename + '"')
def append_add_if_not_instruction(self, filename):
""" Appends an add-if-not instruction to the version 3 manifest for this patch.
This was ported from mozilla/tools/update-packaging/common.sh's
make_add_if_not_instruction.
"""
- print(' add-if-not "'+filename+'" "'+filename+'"')
- self.manifestv3.append('add-if-not "'+filename+'" "'+filename+'"')
+ print(' add-if-not "' + filename + '" "' + filename + '"')
+ self.manifestv3.append('add-if-not "' + filename + '" "' + filename + '"')
def append_patch_instruction(self, filename, patchname):
""" Appends a patch instruction for this patch.
filename = file to patch
patchname = patchfile to apply to file
if filename starts with distribution/extensions/.*/ this will add a
patch-if instruction that will patch the file if the parent
directory of the file exists. This was ported from
mozilla/tools/update-packaging/common.sh's make_patch_instruction.
"""
m = re.match("((?:|.*/)distribution/extensions/.*)/", filename)
if m:
testdir = m.group(1)
- print(' patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"')
- self.manifestv2.append('patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"')
- self.manifestv3.append('patch-if "'+testdir+'" "'+patchname+'" "'+filename+'"')
+ print(' patch-if "' + testdir + '" "' + patchname + '" "' + filename + '"')
+ self.manifestv2.append('patch-if "' + testdir + '" "' +
+ patchname + '" "' + filename + '"')
+ self.manifestv3.append('patch-if "' + testdir + '" "' +
+ patchname + '" "' + filename + '"')
else:
- print(' patch "'+patchname+'" "'+filename+'"')
- self.manifestv2.append('patch "'+patchname+'" "'+filename+'"')
- self.manifestv3.append('patch "'+patchname+'" "'+filename+'"')
+ print(' patch "' + patchname + '" "' + filename + '"')
+ self.manifestv2.append('patch "' + patchname + '" "' + filename + '"')
+ self.manifestv3.append('patch "' + patchname + '" "' + filename + '"')
def append_remove_instruction(self, filename):
""" Appends an remove instruction for this patch.
This was ported from
mozilla/tools/update-packaging/common.sh/make_remove_instruction
"""
if filename.endswith("/"):
- print(' rmdir "'+filename+'"')
- self.manifestv2.append('rmdir "'+filename+'"')
- self.manifestv3.append('rmdir "'+filename+'"')
+ print(' rmdir "' + filename + '"')
+ self.manifestv2.append('rmdir "' + filename + '"')
+ self.manifestv3.append('rmdir "' + filename + '"')
elif filename.endswith("/*"):
filename = filename[:-1]
- print(' rmrfdir "'+filename+'"')
- self.manifestv2.append('rmrfdir "'+filename+'"')
- self.manifestv3.append('rmrfdir "'+filename+'"')
+ print(' rmrfdir "' + filename + '"')
+ self.manifestv2.append('rmrfdir "' + filename + '"')
+ self.manifestv3.append('rmrfdir "' + filename + '"')
else:
- print(' remove "'+filename+'"')
- self.manifestv2.append('remove "'+filename+'"')
- self.manifestv3.append('remove "'+filename+'"')
+ print(' remove "' + filename + '"')
+ self.manifestv2.append('remove "' + filename + '"')
+ self.manifestv3.append('remove "' + filename + '"')
def create_manifest_files(self):
""" Create the v2 manifest file in the root of the work_dir """
- manifest_file_path = os.path.join(self.work_dir,"updatev2.manifest")
+ manifest_file_path = os.path.join(self.work_dir, "updatev2.manifest")
manifest_file = open(manifest_file_path, "wb")
manifest_file.writelines(io.BytesIO(b"type \"partial\"\n"))
manifest_file.writelines(io.BytesIO('\n'.join(self.manifestv2).encode('ascii')))
manifest_file.writelines(io.BytesIO(b"\n"))
manifest_file.close()
xz_file(manifest_file_path)
self.archive_files.append('"updatev2.manifest"')
""" Create the v3 manifest file in the root of the work_dir """
- manifest_file_path = os.path.join(self.work_dir,"updatev3.manifest")
+ manifest_file_path = os.path.join(self.work_dir, "updatev3.manifest")
manifest_file = open(manifest_file_path, "wb")
manifest_file.writelines(io.BytesIO(b"type \"partial\"\n"))
manifest_file.writelines(io.BytesIO('\n'.join(self.manifestv3).encode('ascii')))
manifest_file.writelines(io.BytesIO(b"\n"))
manifest_file.close()
xz_file(manifest_file_path)
self.archive_files.append('"updatev3.manifest"')
@@ -131,187 +135,197 @@ class PatchInfo:
and directory in that path. Excludes any filenames in the file_exclusion_list
"""
mar_entry_hash = {}
filename_set = set()
dirname_set = set()
for root, dirs, files in os.walk(root_path):
for name in files:
# filename is the relative path from root directory
- partial_path = root[len(root_path)+1:]
+ partial_path = root[len(root_path) + 1:]
if name not in self.file_exclusion_list:
filename = os.path.join(partial_path, name)
- if "/"+filename not in self.path_exclusion_list:
- mar_entry_hash[filename]=MarFileEntry(root_path, filename)
+ if "/" + filename not in self.path_exclusion_list:
+ mar_entry_hash[filename] = MarFileEntry(root_path, filename)
filename_set.add(filename)
for name in dirs:
# dirname is the relative path from root directory
- partial_path = root[len(root_path)+1:]
+ partial_path = root[len(root_path) + 1:]
if name not in self.file_exclusion_list:
dirname = os.path.join(partial_path, name)
- if "/"+dirname not in self.path_exclusion_list:
- dirname = dirname+"/"
- mar_entry_hash[dirname]=MarFileEntry(root_path, dirname)
+ if "/" + dirname not in self.path_exclusion_list:
+ dirname = dirname + "/"
+ mar_entry_hash[dirname] = MarFileEntry(root_path, dirname)
dirname_set.add(dirname)
return mar_entry_hash, filename_set, dirname_set
class MarFileEntry:
"""Represents a file inside a Mozilla Archive Format (MAR)
abs_path = abspath to the the file
name = relative path within the mar. e.g.
foo.mar/dir/bar.txt extracted into /tmp/foo:
abs_path=/tmp/foo/dir/bar.txt
name = dir/bar.txt
"""
+
def __init__(self, root, name):
"""root = path the the top of the mar
name = relative path within the mar"""
- self.name=name.replace("\\", "/")
- self.abs_path=os.path.join(root,name)
- self.sha_cache=None
+ self.name = name.replace("\\", "/")
+ self.abs_path = os.path.join(root, name)
+ self.sha_cache = None
def __str__(self):
- return 'Name: %s FullPath: %s' %(self.name,self.abs_path)
+ return 'Name: %s FullPath: %s' % (self.name, self.abs_path)
def calc_file_sha_digest(self, filename):
""" Returns sha digest of given filename"""
file_content = open(filename, 'rb').read()
return hashlib.sha1(file_content).digest()
def sha(self):
""" Returns sha digest of file repreesnted by this _marfile_entry"""
if not self.sha_cache:
- self.sha_cache=self.calc_file_sha_digest(self.abs_path)
+ self.sha_cache = self.calc_file_sha_digest(self.abs_path)
return self.sha_cache
+
def exec_shell_cmd(cmd):
"""Execs shell cmd and raises an exception if the cmd fails"""
if (os.system(cmd)):
- raise Exception("cmd failed "+cmd)
+ raise Exception("cmd failed " + cmd)
def copy_file(src_file_abs_path, dst_file_abs_path):
""" Copies src to dst creating any parent dirs required in dst first """
- dst_file_dir=os.path.dirname(dst_file_abs_path)
+ dst_file_dir = os.path.dirname(dst_file_abs_path)
if not os.path.exists(dst_file_dir):
- os.makedirs(dst_file_dir)
+ os.makedirs(dst_file_dir)
# Copy the file over
shutil.copy2(src_file_abs_path, dst_file_abs_path)
+
def xz_file(filename):
""" XZ compresses the file in place. The original file is replaced with the xz compressed version of itself
assumes the path is absolute"""
- exec_shell_cmd('xz --compress --x86 --lzma2 --format=xz --check=crc64 "' + filename+'"')
- os.rename(filename+".xz",filename)
+ exec_shell_cmd('xz --compress --x86 --lzma2 --format=xz --check=crc64 "' + filename + '"')
+ os.rename(filename + ".xz", filename)
+
def xzunzip_file(filename):
""" xz decompresses the file in palce. The original file is replaced with a xz decompressed version of itself.
doesn't matter if the filename ends in .xz or not"""
if not filename.endswith(".xz"):
- os.rename(filename, filename+".xz")
- filename=filename+".xz"
- exec_shell_cmd('xz -d "' + filename+'"')
+ os.rename(filename, filename + ".xz")
+ filename = filename + ".xz"
+ exec_shell_cmd('xz -d "' + filename + '"')
def extract_mar(filename, work_dir):
""" Extracts the marfile intot he work_dir
assumes work_dir already exists otherwise will throw osError"""
- print("Extracting "+filename+" to "+work_dir)
+ print("Extracting " + filename + " to " + work_dir)
saved_path = os.getcwd()
try:
os.chdir(work_dir)
- exec_shell_cmd("mar -x "+filename)
+ exec_shell_cmd("mar -x " + filename)
finally:
os.chdir(saved_path)
+
def create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info):
""" Creates the partial patch file and manifest entry for the pair of files passed in
"""
- if not (from_marfile_entry.sha(),to_marfile_entry.sha()) in shas:
- print('diffing "'+from_marfile_entry.name+'\"')
- #bunzip to/from
+ if not (from_marfile_entry.sha(), to_marfile_entry.sha()) in shas:
+ print('diffing "' + from_marfile_entry.name + '\"')
+ # bunzip to/from
xzunzip_file(from_marfile_entry.abs_path)
xzunzip_file(to_marfile_entry.abs_path)
# The patch file will be created in the working directory with the
# name of the file in the mar + .patch
- patch_file_abs_path = os.path.join(patch_info.work_dir,from_marfile_entry.name+".patch")
- patch_file_dir=os.path.dirname(patch_file_abs_path)
+ patch_file_abs_path = os.path.join(patch_info.work_dir, from_marfile_entry.name + ".patch")
+ patch_file_dir = os.path.dirname(patch_file_abs_path)
if not os.path.exists(patch_file_dir):
os.makedirs(patch_file_dir)
# Create xz compressed patch file
- exec_shell_cmd("mbsdiff "+from_marfile_entry.abs_path+" "+to_marfile_entry.abs_path+" "+patch_file_abs_path)
+ exec_shell_cmd("mbsdiff " + from_marfile_entry.abs_path + " " +
+ to_marfile_entry.abs_path + " " + patch_file_abs_path)
xz_file(patch_file_abs_path)
# Create xz compressed full file
- full_file_abs_path = os.path.join(patch_info.work_dir, to_marfile_entry.name)
+ full_file_abs_path = os.path.join(patch_info.work_dir, to_marfile_entry.name)
shutil.copy2(to_marfile_entry.abs_path, full_file_abs_path)
xz_file(full_file_abs_path)
if os.path.getsize(patch_file_abs_path) < os.path.getsize(full_file_abs_path):
# Patch is smaller than file. Remove the file and add patch to manifest
os.remove(full_file_abs_path)
- file_in_manifest_name = from_marfile_entry.name+".patch"
+ file_in_manifest_name = from_marfile_entry.name + ".patch"
file_in_manifest_abspath = patch_file_abs_path
patch_info.append_patch_instruction(to_marfile_entry.name, file_in_manifest_name)
else:
# File is smaller than patch. Remove the patch and add file to manifest
os.remove(patch_file_abs_path)
file_in_manifest_name = from_marfile_entry.name
file_in_manifest_abspath = full_file_abs_path
patch_info.append_add_instruction(file_in_manifest_name)
- shas[from_marfile_entry.sha(),to_marfile_entry.sha()] = (file_in_manifest_name,file_in_manifest_abspath)
- patch_info.archive_files.append('"'+file_in_manifest_name+'"')
+ shas[from_marfile_entry.sha(), to_marfile_entry.sha()] = (
+ file_in_manifest_name, file_in_manifest_abspath)
+ patch_info.archive_files.append('"' + file_in_manifest_name + '"')
else:
- filename, src_file_abs_path = shas[from_marfile_entry.sha(),to_marfile_entry.sha()]
+ filename, src_file_abs_path = shas[from_marfile_entry.sha(), to_marfile_entry.sha()]
# We've already calculated the patch for this pair of files.
if (filename.endswith(".patch")):
# print "skipping diff: "+from_marfile_entry.name
# Patch was smaller than file - add patch instruction to manifest
- file_in_manifest_name = to_marfile_entry.name+'.patch';
+ file_in_manifest_name = to_marfile_entry.name + '.patch'
patch_info.append_patch_instruction(to_marfile_entry.name, file_in_manifest_name)
else:
# File was smaller than file - add file to manifest
file_in_manifest_name = to_marfile_entry.name
patch_info.append_add_instruction(file_in_manifest_name)
# Copy the pre-calculated file into our new patch work aread
copy_file(src_file_abs_path, os.path.join(patch_info.work_dir, file_in_manifest_name))
- patch_info.archive_files.append('"'+file_in_manifest_name+'"')
+ patch_info.archive_files.append('"' + file_in_manifest_name + '"')
+
def create_add_patch_for_file(to_marfile_entry, patch_info):
""" Copy the file to the working dir, add the add instruction, and add it to the list of archive files """
copy_file(to_marfile_entry.abs_path, os.path.join(patch_info.work_dir, to_marfile_entry.name))
patch_info.append_add_instruction(to_marfile_entry.name)
- patch_info.archive_files.append('"'+to_marfile_entry.name+'"')
+ patch_info.archive_files.append('"' + to_marfile_entry.name + '"')
+
def create_add_if_not_patch_for_file(to_marfile_entry, patch_info):
""" Copy the file to the working dir, add the add-if-not instruction, and add it to the list of archive files """
copy_file(to_marfile_entry.abs_path, os.path.join(patch_info.work_dir, to_marfile_entry.name))
patch_info.append_add_if_not_instruction(to_marfile_entry.name)
- patch_info.archive_files.append('"'+to_marfile_entry.name+'"')
+ patch_info.archive_files.append('"' + to_marfile_entry.name + '"')
+
def process_explicit_remove_files(dir_path, patch_info):
""" Looks for a 'removed-files' file in the dir_path. If the removed-files does not exist
this will throw. If found adds the removed-files
found in that file to the patch_info"""
# Windows and linux have this file at the root of the dir
list_file_path = os.path.join(dir_path, "removed-files")
if not os.path.exists(list_file_path):
list_file_path = os.path.join(dir_path, "Contents/Resources/removed-files")
if (os.path.exists(list_file_path)):
fd, tmppath = tempfile.mkstemp('', 'tmp', os.getcwd())
os.close(fd)
- exec_shell_cmd('xz -k -d --stdout "' + list_file_path + '" > "'+tmppath+'"')
+ exec_shell_cmd('xz -k -d --stdout "' + list_file_path + '" > "' + tmppath + '"')
list_file = open(tmppath)
lines = []
for line in list_file:
lines.append(line.strip())
list_file.close()
os.remove(tmppath)
@@ -319,74 +333,76 @@ def process_explicit_remove_files(dir_pa
for line in lines:
# Exclude any blank and comment lines.
if line and not line.startswith("#"):
# Python on windows uses \ for path separators and the update
# manifests expects / for path separators on all platforms.
line = line.replace("\\", "/")
patch_info.append_remove_instruction(line)
+
def create_partial_patch(from_dir_path, to_dir_path, patch_filename, shas, patch_info, forced_updates, add_if_not_list):
""" Builds a partial patch by comparing the files in from_dir_path to those of to_dir_path"""
# Cannocolize the paths for safey
from_dir_path = os.path.abspath(from_dir_path)
to_dir_path = os.path.abspath(to_dir_path)
# Create a hashtable of the from and to directories
- from_dir_hash,from_file_set,from_dir_set = patch_info.build_marfile_entry_hash(from_dir_path)
- to_dir_hash,to_file_set,to_dir_set = patch_info.build_marfile_entry_hash(to_dir_path)
+ from_dir_hash, from_file_set, from_dir_set = patch_info.build_marfile_entry_hash(from_dir_path)
+ to_dir_hash, to_file_set, to_dir_set = patch_info.build_marfile_entry_hash(to_dir_path)
# Create a list of the forced updates
forced_list = forced_updates.strip().split('|')
# Require that the precomplete file is included in the complete update
if "precomplete" in to_file_set:
forced_list.append("precomplete")
elif "Contents/Resources/precomplete" in to_file_set:
forced_list.append("Contents/Resources/precomplete")
# The check with \ file separators allows tests for Mac to run on Windows
elif "Contents\Resources\precomplete" in to_file_set:
forced_list.append("Contents\Resources\precomplete")
else:
- raise Exception("missing precomplete file in: "+to_dir_path)
+ raise Exception("missing precomplete file in: " + to_dir_path)
if "removed-files" in to_file_set:
forced_list.append("removed-files")
elif "Contents/Resources/removed-files" in to_file_set:
forced_list.append("Contents/Resources/removed-files")
# The check with \ file separators allows tests for Mac to run on Windows
elif "Contents\Resources\\removed-files" in to_file_set:
forced_list.append("Contents\Resources\\removed-files")
else:
- raise Exception("missing removed-files file in: "+to_dir_path)
+ raise Exception("missing removed-files file in: " + to_dir_path)
if "chrome.manifest" in to_file_set:
forced_list.append("chrome.manifest")
elif "Contents/Resources/chrome.manifest" in to_file_set:
forced_list.append("Contents/Resources/chrome.manifest")
# The check with \ file separators allows tests for Mac to run on Windows
elif "Contents\Resources\\chrome.manifest" in to_file_set:
forced_list.append("Contents\Resources\\chrome.manifest")
else:
- raise Exception("missing chrome.manifest file in: "+to_dir_path)
+ raise Exception("missing chrome.manifest file in: " + to_dir_path)
# Files which exist in both sets need to be patched
patch_filenames = list(from_file_set.intersection(to_file_set))
patch_filenames.sort(reverse=True)
for filename in patch_filenames:
from_marfile_entry = from_dir_hash[filename]
to_marfile_entry = to_dir_hash[filename]
if os.path.basename(filename) in add_if_not_list:
# This filename is in the add if not list, explicitly add-if-not
create_add_if_not_patch_for_file(to_dir_hash[filename], patch_info)
elif filename in forced_list:
- print('Forcing "'+filename+'"')
+ print('Forcing "' + filename + '"')
# This filename is in the forced list, explicitly add
create_add_patch_for_file(to_dir_hash[filename], patch_info)
else:
- if from_marfile_entry.sha() != to_marfile_entry.sha():
- # Not the same - calculate a patch
- create_partial_patch_for_file(from_marfile_entry, to_marfile_entry, shas, patch_info)
+ if from_marfile_entry.sha() != to_marfile_entry.sha():
+ # Not the same - calculate a patch
+ create_partial_patch_for_file(
+ from_marfile_entry, to_marfile_entry, shas, patch_info)
# files in to_dir not in from_dir need to added
add_filenames = list(to_file_set - from_file_set)
add_filenames.sort(reverse=True)
for filename in add_filenames:
if os.path.basename(filename) in add_if_not_list:
create_add_if_not_patch_for_file(to_dir_hash[filename], patch_info)
else:
@@ -405,55 +421,59 @@ def create_partial_patch(from_dir_path,
remove_dirnames.sort(reverse=True)
for dirname in remove_dirnames:
patch_info.append_remove_instruction(from_dir_hash[dirname].name)
# Construct the Manifest files
patch_info.create_manifest_files()
# And construct the mar
- mar_cmd = 'mar -C '+patch_info.work_dir+' -c output.mar '+' '.join(patch_info.archive_files)
+ mar_cmd = 'mar -C ' + patch_info.work_dir + \
+ ' -c output.mar ' + ' '.join(patch_info.archive_files)
exec_shell_cmd(mar_cmd)
# Copy mar to final destination
patch_file_dir = os.path.split(patch_filename)[0]
if not os.path.exists(patch_file_dir):
os.makedirs(patch_file_dir)
- shutil.copy2(os.path.join(patch_info.work_dir,"output.mar"), patch_filename)
+ shutil.copy2(os.path.join(patch_info.work_dir, "output.mar"), patch_filename)
return patch_filename
+
def usage():
print("-h for help")
print("-f for patchlist_file")
+
def get_buildid(work_dir):
""" extracts buildid from MAR
"""
ini = '%s/application.ini' % work_dir
if not os.path.exists(ini):
ini = '%s/Contents/Resources/application.ini' % work_dir
if not os.path.exists(ini):
print('WARNING: application.ini not found, cannot find build ID')
return ''
fd, tmppath = tempfile.mkstemp('', 'tmp', os.getcwd())
os.close(fd)
- exec_shell_cmd('xz -k -d --stdout "' + ini + '" > "'+tmppath+'"')
+ exec_shell_cmd('xz -k -d --stdout "' + ini + '" > "' + tmppath + '"')
file = open(tmppath)
for line in file:
if line.find('BuildID') == 0:
file.close()
os.remove(tmppath)
return line.strip().split('=')[1]
print('WARNING: cannot find build ID in application.ini')
file.close()
os.remove(tmppath)
return ''
+
def decode_filename(filepath):
""" Breaks filename/dir structure into component parts based on regex
for example: firefox-3.0b3pre.en-US.linux-i686.complete.mar
Or linux-i686/en-US/firefox-3.0b3.complete.mar
Returns dict with keys product, version, locale, platform, type
"""
try:
m = re.search(
@@ -464,108 +484,113 @@ def decode_filename(filepath):
try:
m = re.search(
'(?P<platform>.+?)\/(?P<locale>.+?)\/(?P<product>\w+)-(?P<version>\w+\.\w+)\.(?P<type>\w+).mar',
filepath)
return m.groupdict()
except:
raise Exception("could not parse filepath %s: %s" % (filepath, exc))
+
def create_partial_patches(patches):
""" Given the patches generates a set of partial patches"""
shas = {}
work_dir_root = None
metadata = []
try:
work_dir_root = tempfile.mkdtemp('-fastmode', 'tmp', os.getcwd())
print("Building patches using work dir: %s" % (work_dir_root))
# Iterate through every patch set in the patch file
patch_num = 1
for patch in patches:
startTime = time.time()
- from_filename,to_filename,patch_filename,forced_updates = patch.split(",")
- from_filename,to_filename,patch_filename = os.path.abspath(from_filename),os.path.abspath(to_filename),os.path.abspath(patch_filename)
+ from_filename, to_filename, patch_filename, forced_updates = patch.split(",")
+ from_filename, to_filename, patch_filename = os.path.abspath(
+ from_filename), os.path.abspath(to_filename), os.path.abspath(patch_filename)
# Each patch iteration uses its own work dir
- work_dir = os.path.join(work_dir_root,str(patch_num))
+ work_dir = os.path.join(work_dir_root, str(patch_num))
os.mkdir(work_dir)
# Extract from mar into from dir
- work_dir_from = os.path.join(work_dir,"from");
+ work_dir_from = os.path.join(work_dir, "from")
os.mkdir(work_dir_from)
- extract_mar(from_filename,work_dir_from)
+ extract_mar(from_filename, work_dir_from)
from_decoded = decode_filename(from_filename)
from_buildid = get_buildid(work_dir_from)
from_shasum = hashlib.sha1(open(from_filename, "rb").read()).hexdigest()
from_size = str(os.path.getsize(to_filename))
# Extract to mar into to dir
- work_dir_to = os.path.join(work_dir,"to")
+ work_dir_to = os.path.join(work_dir, "to")
os.mkdir(work_dir_to)
extract_mar(to_filename, work_dir_to)
to_decoded = decode_filename(from_filename)
to_buildid = get_buildid(work_dir_to)
to_shasum = hashlib.sha1(open(to_filename, 'rb').read()).hexdigest()
to_size = str(os.path.getsize(to_filename))
mar_extract_time = time.time()
- partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, ['update.manifest','updatev2.manifest','updatev3.manifest'],[]),forced_updates,['channel-prefs.js','update-settings.ini'])
+ partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, [
+ 'update.manifest', 'updatev2.manifest', 'updatev3.manifest'], []), forced_updates, ['channel-prefs.js', 'update-settings.ini'])
partial_buildid = to_buildid
partial_shasum = hashlib.sha1(open(partial_filename, "rb").read()).hexdigest()
partial_size = str(os.path.getsize(partial_filename))
metadata.append({
- 'to_filename': os.path.basename(to_filename),
- 'from_filename': os.path.basename(from_filename),
- 'partial_filename': os.path.basename(partial_filename),
- 'to_buildid':to_buildid,
- 'from_buildid':from_buildid,
- 'to_sha1sum':to_shasum,
- 'from_sha1sum':from_shasum,
- 'partial_sha1sum':partial_shasum,
- 'to_size':to_size,
- 'from_size':from_size,
- 'partial_size':partial_size,
- 'to_version':to_decoded['version'],
- 'from_version':from_decoded['version'],
- 'locale':from_decoded['locale'],
- 'platform':from_decoded['platform'],
+ 'to_filename': os.path.basename(to_filename),
+ 'from_filename': os.path.basename(from_filename),
+ 'partial_filename': os.path.basename(partial_filename),
+ 'to_buildid': to_buildid,
+ 'from_buildid': from_buildid,
+ 'to_sha1sum': to_shasum,
+ 'from_sha1sum': from_shasum,
+ 'partial_sha1sum': partial_shasum,
+ 'to_size': to_size,
+ 'from_size': from_size,
+ 'partial_size': partial_size,
+ 'to_version': to_decoded['version'],
+ 'from_version': from_decoded['version'],
+ 'locale': from_decoded['locale'],
+ 'platform': from_decoded['platform'],
})
- print("done with patch %s/%s time (%.2fs/%.2fs/%.2fs) (mar/patch/total)" % (str(patch_num),str(len(patches)),mar_extract_time-startTime,time.time()-mar_extract_time,time.time()-startTime))
+ print("done with patch %s/%s time (%.2fs/%.2fs/%.2fs) (mar/patch/total)" % (str(patch_num),
+ str(len(patches)), mar_extract_time - startTime, time.time() - mar_extract_time, time.time() - startTime))
patch_num += 1
return metadata
finally:
# If we fail or get a ctrl-c during run be sure to clean up temp dir
if (work_dir_root and os.path.exists(work_dir_root)):
shutil.rmtree(work_dir_root)
+
def main(argv):
patchlist_file = None
try:
- opts, args = getopt.getopt(argv, "hf:", ["help", "patchlist_file="])
- for opt, arg in opts:
+ opts, args = getopt.getopt(argv, "hf:", ["help", "patchlist_file="])
+ for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-f", "--patchlist_file"):
patchlist_file = arg
except getopt.GetoptError:
- usage()
- sys.exit(2)
+ usage()
+ sys.exit(2)
if not patchlist_file:
usage()
sys.exit(2)
patches = []
f = open(patchlist_file, 'r')
for line in f.readlines():
patches.append(line)
f.close()
create_partial_patches(patches)
+
if __name__ == "__main__":
main(sys.argv[1:])
-
--- a/tools/update-packaging/test_make_incremental_updates.py
+++ b/tools/update-packaging/test_make_incremental_updates.py
@@ -3,55 +3,62 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import make_incremental_updates as mkup
from make_incremental_updates import PatchInfo, MarFileEntry
+
class TestPatchInfo(unittest.TestCase):
def setUp(self):
self.work_dir = 'work_dir'
- self.file_exclusion_list = ['update.manifest','updatev2.manifest','updatev3.manifest']
+ self.file_exclusion_list = ['update.manifest', 'updatev2.manifest', 'updatev3.manifest']
self.path_exclusion_list = ['/readme.txt']
- self.patch_info = PatchInfo(self.work_dir, self.file_exclusion_list, self.path_exclusion_list)
+ self.patch_info = PatchInfo(
+ self.work_dir, self.file_exclusion_list, self.path_exclusion_list)
def testPatchInfo(self):
self.assertEquals(self.work_dir, self.patch_info.work_dir)
self.assertEquals([], self.patch_info.archive_files)
self.assertEquals([], self.patch_info.manifestv2)
self.assertEquals([], self.patch_info.manifestv3)
self.assertEquals(self.file_exclusion_list, self.patch_info.file_exclusion_list)
self.assertEquals(self.path_exclusion_list, self.patch_info.path_exclusion_list)
def test_append_add_instruction(self):
self.patch_info.append_add_instruction('file.test')
self.assertEquals(['add "file.test"'], self.patch_info.manifestv2)
self.assertEquals(['add "file.test"'], self.patch_info.manifestv3)
def test_append_add_if_instruction(self):
self.patch_info.append_add_instruction('distribution/extensions/extension/file.test')
- self.assertEquals(['add-if "distribution/extensions/extension" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv2)
- self.assertEquals(['add-if "distribution/extensions/extension" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv3)
+ self.assertEquals(
+ ['add-if "distribution/extensions/extension" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv2)
+ self.assertEquals(
+ ['add-if "distribution/extensions/extension" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv3)
def test_append_add_if_not_instruction(self):
self.patch_info.append_add_if_not_instruction('file.test')
self.assertEquals([], self.patch_info.manifestv2)
self.assertEquals(['add-if-not "file.test" "file.test"'], self.patch_info.manifestv3)
def test_append_patch_instruction(self):
self.patch_info.append_patch_instruction('file.test', 'patchname')
self.assertEquals(['patch "patchname" "file.test"'], self.patch_info.manifestv2)
self.assertEquals(['patch "patchname" "file.test"'], self.patch_info.manifestv3)
def test_append_patch_if_instruction(self):
- self.patch_info.append_patch_instruction('distribution/extensions/extension/file.test', 'patchname')
- self.assertEquals(['patch-if "distribution/extensions/extension" "patchname" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv2)
- self.assertEquals(['patch-if "distribution/extensions/extension" "patchname" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv3)
+ self.patch_info.append_patch_instruction(
+ 'distribution/extensions/extension/file.test', 'patchname')
+ self.assertEquals(
+ ['patch-if "distribution/extensions/extension" "patchname" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv2)
+ self.assertEquals(
+ ['patch-if "distribution/extensions/extension" "patchname" "distribution/extensions/extension/file.test"'], self.patch_info.manifestv3)
def test_append_remove_instruction(self):
self.patch_info.append_remove_instruction('file.test')
self.assertEquals(['remove "file.test"'], self.patch_info.manifestv2)
self.assertEquals(['remove "file.test"'], self.patch_info.manifestv3)
def test_append_rmdir_instruction(self):
self.patch_info.append_remove_instruction('dirtest/')
@@ -64,17 +71,19 @@ class TestPatchInfo(unittest.TestCase):
self.assertEquals(['rmrfdir "dirtest/"'], self.patch_info.manifestv3)
""" FIXME touches the filesystem, need refactoring
def test_create_manifest_file(self):
self.patch_info.create_manifest_file()
"""
def test_build_marfile_entry_hash(self):
- self.assertEquals(({}, set([]), set([])), self.patch_info.build_marfile_entry_hash('root_path'))
+ self.assertEquals(({}, set([]), set([])),
+ self.patch_info.build_marfile_entry_hash('root_path'))
+
""" FIXME touches the filesystem, need refactoring
class TestMarFileEntry(unittest.TestCase):
def setUp(self):
root_path = '.'
self.filename = 'file.test'
f = open(self.filename, 'w')
f.write('Test data\n')
@@ -91,20 +100,22 @@ class TestMarFileEntry(unittest.TestCase
def test_sha(self):
f = open('test.sha', 'r')
goodSha = f.read()
f.close()
sha = self.mar_file_entry.sha()
self.assertEquals(goodSha, sha)
"""
+
class TestMakeIncrementalUpdates(unittest.TestCase):
def setUp(self):
work_dir = '.'
- self.patch_info = PatchInfo(work_dir, ['update.manifest','updatev2.manifest','updatev3.manifest'],['/readme.txt'])
+ self.patch_info = PatchInfo(
+ work_dir, ['update.manifest', 'updatev2.manifest', 'updatev3.manifest'], ['/readme.txt'])
root_path = '/'
filename = 'test.file'
self.mar_file_entry = MarFileEntry(root_path, filename)
""" FIXME makes direct shell calls, need refactoring
def test_exec_shell_cmd(self):
mkup.exec_shell_cmd('echo test')
@@ -138,17 +149,19 @@ class TestMakeIncrementalUpdates(unittes
"""
""" FIXME touches the filesystem, need refactoring
def test_get_buildid(self):
mkup.get_buildid('work_dir', 'platform')
"""
def test_decode_filename(self):
- expected = {'locale': 'lang', 'platform': 'platform', 'product': 'product', 'version': '1.0', 'type': 'complete'}
+ expected = {'locale': 'lang', 'platform': 'platform',
+ 'product': 'product', 'version': '1.0', 'type': 'complete'}
self.assertEquals(expected, mkup.decode_filename('product-1.0.lang.platform.complete.mar'))
self.assertEquals(expected, mkup.decode_filename('platform/lang/product-1.0.complete.mar'))
with self.assertRaises(Exception) as cm:
mkup.decode_filename('fail')
self.assertTrue(cm.exception.args[0].startswith('could not parse filepath fail:'))
+
if __name__ == '__main__':
unittest.main()