Bug 1436369 Migrate HSTS and HPKP update scripts to docker image r=keeler draft
authorSimon Fraser <sfraser@mozilla.com>
Wed, 21 Feb 2018 11:04:24 +0000
changeset 757893 199e637679035ee654b71da1e78c11ecc5820a91
parent 757892 01fe9c3ea96083c2511cc0ee170de90a5574f787
child 757894 cd765bc3753aabea7f2f229f189bc758773af7c2
child 757896 bd738f539b2fb82219ae10c9f1aa1374847a59ba
push id99868
push usersfraser@mozilla.com
push dateWed, 21 Feb 2018 13:42:36 +0000
reviewerskeeler
bugs1436369
milestone60.0a1
Bug 1436369 Migrate HSTS and HPKP update scripts to docker image r=keeler MozReview-Commit-ID: 6N7PqYsAXup
taskcluster/docker/periodic_updates/scripts/genHPKPStaticPins.js
taskcluster/docker/periodic_updates/scripts/getHSTSPreloadList.js
taskcluster/docker/periodic_updates/scripts/periodic_file_updates.sh
copy from security/manager/tools/genHPKPStaticPins.js
copy to taskcluster/docker/periodic_updates/scripts/genHPKPStaticPins.js
--- a/security/manager/tools/genHPKPStaticPins.js
+++ b/taskcluster/docker/periodic_updates/scripts/genHPKPStaticPins.js
@@ -3,24 +3,22 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 // How to run this file:
 // 1. [obtain firefox source code]
 // 2. [build/obtain firefox binaries]
 // 3. run `[path to]/run-mozilla.sh [path to]/xpcshell \
 //                                  [path to]/genHPKPStaticpins.js \
 //                                  [absolute path to]/PreloadedHPKPins.json \
-//                                  [an unused argument - see bug 1205406] \
 //                                  [absolute path to]/StaticHPKPins.h
 "use strict";
 
-if (arguments.length != 3) {
+if (arguments.length != 2) {
   throw new Error("Usage: genHPKPStaticPins.js " +
                   "<absolute path to PreloadedHPKPins.json> " +
-                  "<an unused argument - see bug 1205406> " +
                   "<absolute path to StaticHPKPins.h>");
 }
 
 var { NetUtil } = ChromeUtils.import("resource://gre/modules/NetUtil.jsm", {});
 var { FileUtils } = ChromeUtils.import("resource://gre/modules/FileUtils.jsm", {});
 var { Services } = ChromeUtils.import("resource://gre/modules/Services.jsm", {});
 
 Cu.importGlobalProperties(["XMLHttpRequest"]);
@@ -61,21 +59,19 @@ const PINSETDEF = "/* Pinsets are each a
   "  // See bug 1338873 about making these fields const.\n" +
   "  size_t size;\n" +
   "  const char* const* data;\n" +
   "};\n\n";
 
 // Command-line arguments
 var gStaticPins = parseJson(arguments[0]);
 
-// arguments[1] is ignored for now. See bug 1205406.
-
 // Open the output file.
 var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
-file.initWithPath(arguments[2]);
+file.initWithPath(arguments[1]);
 var gFileOutputStream = FileUtils.openSafeFileOutputStream(file);
 
 function writeString(string) {
   gFileOutputStream.write(string, string.length);
 }
 
 function readFileToString(filename) {
   let file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
copy from security/manager/tools/getHSTSPreloadList.js
copy to taskcluster/docker/periodic_updates/scripts/getHSTSPreloadList.js
--- a/security/manager/tools/getHSTSPreloadList.js
+++ b/taskcluster/docker/periodic_updates/scripts/getHSTSPreloadList.js
@@ -1,74 +1,75 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+/* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 "use strict";
 
 // How to run this file:
 // 1. [obtain firefox source code]
 // 2. [build/obtain firefox binaries]
-// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell \
-//                                  [path to]/getHSTSPreloadlist.js \
-//                                  [absolute path to]/nsSTSPreloadlist.inc'
+// 3. run `[path to]/run-mozilla.sh [path to]/xpcshell [path to]/getHSTSPreloadlist.js [absolute path to]/nsSTSPreloadlist.inc'
 // Note: Running this file outputs a new nsSTSPreloadlist.inc in the current
 //       working directory.
 
-var gSSService = Cc["@mozilla.org/ssservice;1"]
-                   .getService(Ci.nsISiteSecurityService);
+/*
+var Cc = Components.classes;
+var Ci = Components.interfaces;
+var Cu = Components.utils;
+var Cr = Components.results;
+*/
+var gSSService = Cc["@mozilla.org/ssservice;1"].getService(Ci.nsISiteSecurityService);
 
 ChromeUtils.import("resource://gre/modules/Services.jsm");
 ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
 ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
 
 Cu.importGlobalProperties(["XMLHttpRequest"]);
 
 const SOURCE = "https://chromium.googlesource.com/chromium/src/net/+/master/http/transport_security_state_static.json?format=TEXT";
 const OUTPUT = "nsSTSPreloadList.inc";
 const ERROR_OUTPUT = "nsSTSPreloadList.errors";
 const MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 18;
-const MAX_CONCURRENT_REQUESTS = 5;
-const MAX_RETRIES = 3;
+const MAX_CONCURRENT_REQUESTS = 500;
+const MAX_RETRIES = 1;
 const REQUEST_TIMEOUT = 30 * 1000;
 const ERROR_NONE = "no error";
 const ERROR_CONNECTING_TO_HOST = "could not connect to host";
 const ERROR_NO_HSTS_HEADER = "did not receive HSTS header";
 const ERROR_MAX_AGE_TOO_LOW = "max-age too low: ";
-const HEADER = "/* This Source Code Form is subject to the terms of the Mozilla Public\n" +
-" * License, v. 2.0. If a copy of the MPL was not distributed with this\n" +
-" * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n" +
-"\n" +
-"/*****************************************************************************/\n" +
-"/* This is an automatically generated file. If you're not                    */\n" +
-"/* nsSiteSecurityService.cpp, you shouldn't be #including it.     */\n" +
-"/*****************************************************************************/\n" +
-"\n" +
-"#include <stdint.h>\n";
+const HEADER = `/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/*****************************************************************************/
+/* This is an automatically generated file. If you're not                    */
+/* nsSiteSecurityService.cpp, you shouldn't be #including it.                */
+/*****************************************************************************/
+
+#include <stdint.h>
+`;
+
 const GPERF_DELIM = "%%\n";
 
 function download() {
   let req = new XMLHttpRequest();
   req.open("GET", SOURCE, false); // doing the request synchronously
   try {
     req.send();
   } catch (e) {
     throw new Error(`ERROR: problem downloading '${SOURCE}': ${e}`);
   }
 
   if (req.status != 200) {
-    throw new Error("ERROR: problem downloading '" + SOURCE + "': status " +
-                    req.status);
+    throw new Error("ERROR: problem downloading '" + SOURCE + "': status " + req.status);
   }
 
   let resultDecoded;
   try {
     resultDecoded = atob(req.responseText);
   } catch (e) {
-    throw new Error("ERROR: could not decode data as base64 from '" + SOURCE +
-                    "': " + e);
+    throw new Error("ERROR: could not decode data as base64 from '" + SOURCE + "': " + e);
   }
 
   // we have to filter out '//' comments, while not mangling the json
   let result = resultDecoded.replace(/^(\s*)?\/\/[^\n]*\n/mg, "");
   let data = null;
   try {
     data = JSON.parse(result);
   } catch (e) {
@@ -76,18 +77,17 @@ function download() {
   }
   return data;
 }
 
 function getHosts(rawdata) {
   let hosts = [];
 
   if (!rawdata || !rawdata.entries) {
-    throw new Error("ERROR: source data not formatted correctly: 'entries' " +
-                    "not found");
+    throw new Error("ERROR: source data not formatted correctly: 'entries' not found");
   }
 
   for (let entry of rawdata.entries) {
     if (entry.mode && entry.mode == "force-https") {
       if (entry.name) {
         // We trim the entry name here to avoid malformed URI exceptions when we
         // later try to connect to the domain.
         entry.name = entry.name.trim();
@@ -100,49 +100,50 @@ function getHosts(rawdata) {
       }
     }
   }
 
   return hosts;
 }
 
 function processStsHeader(host, header, status, securityInfo) {
-  let maxAge = { value: 0 };
-  let includeSubdomains = { value: false };
+  let maxAge = {
+    value: 0
+  };
+  let includeSubdomains = {
+    value: false
+  };
   let error = ERROR_NONE;
   if (header != null && securityInfo != null) {
     try {
       let uri = Services.io.newURI("https://" + host.name);
-      let sslStatus = securityInfo.QueryInterface(Ci.nsISSLStatusProvider)
-                                  .SSLStatus;
-      gSSService.processHeader(Ci.nsISiteSecurityService.HEADER_HSTS,
-                               uri, header, sslStatus, 0,
-                               Ci.nsISiteSecurityService.SOURCE_PRELOAD_LIST,
-                               {}, maxAge, includeSubdomains);
+      let sslStatus = securityInfo.QueryInterface(Ci.nsISSLStatusProvider).SSLStatus;
+      gSSService.processHeader(Ci.nsISiteSecurityService.HEADER_HSTS, uri, header, sslStatus, 0, Ci.nsISiteSecurityService.SOURCE_PRELOAD_LIST, {}, maxAge, includeSubdomains);
     } catch (e) {
-      dump("ERROR: could not process header '" + header + "' from " +
-           host.name + ": " + e + "\n");
+      dump("ERROR: could not process header '" + header + "' from " + host.name + ": " + e + "\n");
       error = e;
     }
   } else if (status == 0) {
     error = ERROR_CONNECTING_TO_HOST;
   } else {
     error = ERROR_NO_HSTS_HEADER;
   }
 
   if (error == ERROR_NONE && maxAge.value < MINIMUM_REQUIRED_MAX_AGE) {
     error = ERROR_MAX_AGE_TOO_LOW;
   }
 
-  return { name: host.name,
-           maxAge: maxAge.value,
-           includeSubdomains: includeSubdomains.value,
-           error,
-           retries: host.retries - 1,
-           forceInclude: host.forceInclude };
+  return {
+    name: host.name,
+    maxAge: maxAge.value,
+    includeSubdomains: includeSubdomains.value,
+    error,
+    retries: host.retries - 1,
+    forceInclude: host.forceInclude
+  };
 }
 
 // RedirectAndAuthStopper prevents redirects and HTTP authentication
 function RedirectAndAuthStopper() {}
 
 RedirectAndAuthStopper.prototype = {
   // nsIChannelEventSink
   asyncOnChannelRedirect(oldChannel, newChannel, flags, callback) {
@@ -157,56 +158,46 @@ RedirectAndAuthStopper.prototype = {
   asyncPromptAuth(channel, callback, context, level, authInfo) {
     throw new Error(Cr.NS_ERROR_NOT_IMPLEMENTED);
   },
 
   getInterface(iid) {
     return this.QueryInterface(iid);
   },
 
-  QueryInterface: XPCOMUtils.generateQI([Ci.nsIChannelEventSink,
-                                         Ci.nsIAuthPrompt2])
+  QueryInterface: XPCOMUtils.generateQI([Ci.nsIChannelEventSink, Ci.nsIAuthPrompt2])
 };
 
-function getHSTSStatus(host, resultList) {
-  let req = new XMLHttpRequest();
-  let inResultList = false;
+function fetchstatus(host) {
+  let xhr = new XMLHttpRequest();
   let uri = "https://" + host.name + "/";
-  req.open("GET", uri, true);
-  req.setRequestHeader("X-Automated-Tool",
-                       "https://hg.mozilla.org/mozilla-central/file/tip/security/manager/tools/getHSTSPreloadList.js");
-  req.timeout = REQUEST_TIMEOUT;
 
-  let errorhandler = (evt) => {
-    dump(`ERROR: error making request to ${host.name} (type=${evt.type})\n`);
-    if (!inResultList) {
-      inResultList = true;
-      resultList.push(processStsHeader(host, null, req.status,
-                                       req.channel.securityInfo));
-    }
-  };
-  req.onerror = errorhandler;
-  req.ontimeout = errorhandler;
-  req.onabort = errorhandler;
-
-  req.onload = function(event) {
-    if (!inResultList) {
-      inResultList = true;
-      var header = req.getResponseHeader("strict-transport-security");
-      resultList.push(processStsHeader(host, header, req.status,
-                                       req.channel.securityInfo));
-    }
-  };
+  xhr.open("head", uri, true);
+  xhr.setRequestHeader("X-Automated-Tool", "https://hg.mozilla.org/mozilla-central/file/tip/security/manager/tools/getHSTSPreloadList.js");
+  xhr.timeout = REQUEST_TIMEOUT;
 
   try {
-    req.channel.notificationCallbacks = new RedirectAndAuthStopper();
-    req.send();
+    xhr.channel.notificationCallbacks = new RedirectAndAuthStopper();
+    xhr.send();
   } catch (e) {
     dump("ERROR: exception making request to " + host.name + ": " + e + "\n");
+    return processStsHeader(host, null, xhr.status, xhr.channel.securityInfo);
   }
+
+  let header = xhr.getResponseHeader("strict-transport-security");
+  return processStsHeader(host, header, xhr.status, xhr.channel.securityInfo);
+}
+
+async function getHSTSStatus(host) {
+  return new Promise((resolve, reject) => {
+    do {
+      host = fetchstatus(host);
+    } while (shouldRetry(host));
+    resolve(host);
+  });
 }
 
 function compareHSTSStatus(a, b) {
   if (a.name > b.name) {
     return 1;
   }
   if (a.name < b.name) {
     return -1;
@@ -225,146 +216,88 @@ function getExpirationTimeString() {
   let now = new Date();
   let nowMillis = now.getTime();
   // MINIMUM_REQUIRED_MAX_AGE is in seconds, so convert to milliseconds
   let expirationMillis = nowMillis + (MINIMUM_REQUIRED_MAX_AGE * 1000);
   let expirationMicros = expirationMillis * 1000;
   return "const PRTime gPreloadListExpirationTime = INT64_C(" + expirationMicros + ");\n";
 }
 
-function errorToString(status) {
-  return (status.error == ERROR_MAX_AGE_TOO_LOW
-          ? status.error + status.maxAge
-          : status.error);
+function shouldRetry(response) {
+  return (response.error != ERROR_NO_HSTS_HEADER && response.error != ERROR_MAX_AGE_TOO_LOW && response.error != ERROR_NONE && response.retries > 0);
 }
 
-function output(sortedStatuses, currentList) {
-  try {
-    let file = FileUtils.getFile("CurWorkD", [OUTPUT]);
-    let errorFile = FileUtils.getFile("CurWorkD", [ERROR_OUTPUT]);
-    let fos = FileUtils.openSafeFileOutputStream(file);
-    let eos = FileUtils.openSafeFileOutputStream(errorFile);
-    writeTo(HEADER, fos);
-    writeTo(getExpirationTimeString(), fos);
-
-    for (let status of sortedStatuses) {
-      // If we've encountered an error for this entry (other than the site not
-      // sending an HSTS header), be safe and don't remove it from the list
-      // (given that it was already on the list).
-      if (!status.forceInclude &&
-          status.error != ERROR_NONE &&
-          status.error != ERROR_NO_HSTS_HEADER &&
-          status.error != ERROR_MAX_AGE_TOO_LOW &&
-          status.name in currentList) {
-        dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n");
-        writeTo(status.name + ": " + errorToString(status) + "\n", eos);
-        status.maxAge = MINIMUM_REQUIRED_MAX_AGE;
-        status.includeSubdomains = currentList[status.name];
-      }
-    }
 
-    // Filter out entries we aren't including.
-    var includedStatuses = sortedStatuses.filter(function (status) {
-      if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) {
-        dump("INFO: " + status.name + " NOT ON the preload list\n");
-        writeTo(status.name + ": " + errorToString(status) + "\n", eos);
-        return false;
-      }
+// Copied from browser/components/migration/MigrationUtils.jsm
+function spinResolve(promise) {
+  if (!(promise instanceof Promise)) {
+    return promise;
+  }
+  let done = false;
+  let result = null;
+  let error = null;
+  promise.catch(e => {
+    error = e;
+  }).then(r => {
+    result = r;
+    done = true;
+  });
 
-      dump("INFO: " + status.name + " ON the preload list (includeSubdomains: "
-           + status.includeSubdomains + "\n");
-      if (status.forceInclude && status.error != ERROR_NONE) {
-        writeTo(status.name + ": " + errorToString(status) + " (error "
-                + "ignored - included regardless)\n", eos);
-      }
-      return true;
-    });
-
-    writeTo(GPERF_DELIM, fos);
-
-    for (let status of includedStatuses) {
-      let includeSubdomains = (status.includeSubdomains ? 1 : 0);
-      writeTo(status.name + ", " + includeSubdomains + "\n", fos);
-    }
-
-    writeTo(GPERF_DELIM, fos);
-    FileUtils.closeSafeFileOutputStream(fos);
-    FileUtils.closeSafeFileOutputStream(eos);
-  } catch (e) {
-    dump("ERROR: problem writing output to '" + OUTPUT + "': " + e + "\n");
+  Services.tm.spinEventLoopUntil(() => done);
+  if (error) {
+    throw error;
+  } else {
+    return result;
   }
 }
 
-function shouldRetry(response) {
-  return (response.error != ERROR_NO_HSTS_HEADER &&
-          response.error != ERROR_MAX_AGE_TOO_LOW &&
-          response.error != ERROR_NONE && response.retries > 0);
-}
+async function probeHSTSStatuses(inHosts) {
+  let promises = [];
+
+  dump("Examining " + inHosts.length + " hosts.\n");
 
-function probeHSTSStatuses(inHosts) {
-  let outStatuses = [];
-  let expectedOutputLength = inHosts.length;
-  let tmpOutput = [];
-  for (let i = 0; i < MAX_CONCURRENT_REQUESTS && inHosts.length > 0; i++) {
+  // Debug/testing on a small number of hosts
+  // while (inHosts.length > 40000) {
+
+  while (inHosts.length > 0) {
     let host = inHosts.shift();
-    dump("spinning off request to '" + host.name + "' (remaining retries: " +
-         host.retries + ")\n");
-    getHSTSStatus(host, tmpOutput);
+    promises.push(getHSTSStatus(host));
   }
 
-  while (outStatuses.length != expectedOutputLength) {
-    waitForAResponse(tmpOutput);
-    let response = tmpOutput.shift();
-    dump("request to '" + response.name + "' finished\n");
-    if (shouldRetry(response)) {
-      inHosts.push(response);
-    } else {
-      outStatuses.push(response);
-    }
+  dump("Waiting for " + promises.length + " responses.\n");
 
-    if (inHosts.length > 0) {
-      let host = inHosts.shift();
-      dump("spinning off request to '" + host.name + "' (remaining retries: " +
-           host.retries + ")\n");
-      getHSTSStatus(host, tmpOutput);
-    }
-  }
-
-  return outStatuses;
-}
-
-// Since all events are processed on the main thread, and since event
-// handlers are not preemptible, there shouldn't be any concurrency issues.
-function waitForAResponse(outputList) {
-  // From <https://developer.mozilla.org/en/XPConnect/xpcshell/HOWTO>
-  Services.tm.spinEventLoopUntil(() => outputList.length != 0);
+  let result = await Promise.all(promises);
+  dump("HSTS Probe received " + result.length + " statuses.\n");
+  return result;
 }
 
 function readCurrentList(filename) {
   var currentHosts = {};
   var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile);
   file.initWithPath(filename);
-  var fis = Cc["@mozilla.org/network/file-input-stream;1"]
-              .createInstance(Ci.nsILineInputStream);
+  var fis = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(Ci.nsILineInputStream);
   fis.init(file, -1, -1, Ci.nsIFileInputStream.CLOSE_ON_EOF);
   var line = {};
 
   // While we generate entries matching the latest version format,
   // we still need to be able to read entries in the previous version formats
   // for bootstrapping a latest version preload list from a previous version
   // preload list. Hence these regexes.
   const entryRegexes = [
-    /([^,]+), (0|1)/,                         // v3
-    / {2}\/\* "([^"]*)", (true|false) \*\//,  // v2
-    / {2}{ "([^"]*)", (true|false) },/,       // v1
+    /([^,]+), (0|1)/, // v3
+    / {2}\/\* "([^"]*)", (true|false) \*\//, // v2
+    / {2}{ "([^"]*)", (true|false) },/, // v1
   ];
 
   while (fis.readLine(line)) {
     let match;
-    entryRegexes.find((r) => { match = r.exec(line.value); return match; });
+    entryRegexes.find((r) => {
+      match = r.exec(line.value);
+      return match;
+    });
     if (match) {
       currentHosts[match[1]] = (match[2] == "1" || match[2] == "true");
     }
   }
   return currentHosts;
 }
 
 function combineLists(newHosts, currentHosts) {
@@ -372,101 +305,174 @@ function combineLists(newHosts, currentH
     let found = false;
     for (let newHost of newHosts) {
       if (newHost.name == currentHost) {
         found = true;
         break;
       }
     }
     if (!found) {
-      newHosts.push({ name: currentHost, retries: MAX_RETRIES });
+      newHosts.push({name: currentHost, retries: MAX_RETRIES});
     }
   }
 }
 
 const TEST_ENTRIES = [
-  { name: "includesubdomains.preloaded.test", includeSubdomains: true },
-  { name: "includesubdomains2.preloaded.test", includeSubdomains: true },
-  { name: "noincludesubdomains.preloaded.test", includeSubdomains: false },
+  {
+    name: "includesubdomains.preloaded.test",
+    includeSubdomains: true
+  }, {
+    name: "includesubdomains2.preloaded.test",
+    includeSubdomains: true
+  }, {
+    name: "noincludesubdomains.preloaded.test",
+    includeSubdomains: false
+  }
 ];
 
 function deleteTestHosts(currentHosts) {
   for (let testEntry of TEST_ENTRIES) {
     delete currentHosts[testEntry.name];
   }
 }
 
 function getTestHosts() {
   let hosts = [];
   for (let testEntry of TEST_ENTRIES) {
     hosts.push({
-      name: testEntry.name,
-      maxAge: MINIMUM_REQUIRED_MAX_AGE,
-      includeSubdomains: testEntry.includeSubdomains,
-      error: ERROR_NONE,
+      name: testEntry.name, maxAge: MINIMUM_REQUIRED_MAX_AGE, includeSubdomains: testEntry.includeSubdomains, error: ERROR_NONE,
       // This deliberately doesn't have a value for `retries` (because we should
       // never attempt to connect to this host).
-      forceInclude: true,
+      forceInclude: true
     });
   }
   return hosts;
 }
 
-function insertHosts(inoutHostList, inAddedHosts) {
+async function insertHosts(inoutHostList, inAddedHosts) {
   for (let host of inAddedHosts) {
     inoutHostList.push(host);
   }
 }
 
 function filterForcedInclusions(inHosts, outNotForced, outForced) {
   // Apply our filters (based on policy today) to determine which entries
   // will be included without being checked (forced); the others will be
   // checked using active probing.
   for (let host of inHosts) {
-    if (host.policy == "google" || host.policy == "public-suffix" ||
-        host.policy == "public-suffix-requested") {
+    if (host.policy == "google" || host.policy == "public-suffix" || host.policy == "public-suffix-requested") {
       host.forceInclude = true;
       host.error = ERROR_NONE;
       outForced.push(host);
     } else {
       outNotForced.push(host);
     }
   }
 }
 
-// ****************************************************************************
-// This is where the action happens:
-if (arguments.length != 1) {
-  throw new Error("Usage: getHSTSPreloadList.js " +
-                  "<absolute path to current nsSTSPreloadList.inc>");
+function output(statuses) {
+  dump("INFO: Writing output to " + OUTPUT + "\n");
+  try {
+    ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
+
+    let file = FileUtils.getFile("CurWorkD", [OUTPUT]);
+    let fos = FileUtils.openSafeFileOutputStream(file);
+    writeTo(HEADER, fos);
+    writeTo(getExpirationTimeString(), fos);
+
+    writeTo(GPERF_DELIM, fos);
+
+    for (let status of statuses) {
+      let includeSubdomains = (
+        status.includeSubdomains
+        ? 1
+        : 0);
+      writeTo(status.name + ", " + includeSubdomains + "\n", fos);
+    }
+
+    writeTo(GPERF_DELIM, fos);
+    FileUtils.closeSafeFileOutputStream(fos);
+    dump("finished writing output file\n");
+  } catch (e) {
+    dump("ERROR: problem writing output to '" + OUTPUT + "': " + e + "\n");
+  }
+}
+
+function errorToString(status) {
+  return (
+    status.error == ERROR_MAX_AGE_TOO_LOW
+    ? status.error + status.maxAge
+    : status.error);
 }
-// get the current preload list
-let currentHosts = readCurrentList(arguments[0]);
-// delete any hosts we use in tests so we don't actually connect to them
-deleteTestHosts(currentHosts);
-// disable the current preload list so it won't interfere with requests we make
-Services.prefs.setBoolPref("network.stricttransportsecurity.preloadlist", false);
-// download and parse the raw json file from the Chromium source
-let rawdata = download();
-// get just the hosts with mode: "force-https"
-let hosts = getHosts(rawdata);
-// add hosts in the current list to the new list (avoiding duplicates)
-combineLists(hosts, currentHosts);
-// Don't contact hosts that are forced to be included anyway
-let hostsToContact = [];
-let forcedHosts = [];
-filterForcedInclusions(hosts, hostsToContact, forcedHosts);
-// Initialize the final status list
-let hstsStatuses = [];
-// Add the hosts we use in tests
-insertHosts(hstsStatuses, getTestHosts());
-// Add in the hosts that are forced
-insertHosts(hstsStatuses, forcedHosts);
-// Probe the HSTS status of each host and add them into hstsStatuses
-let probedStatuses = probeHSTSStatuses(hostsToContact);
-insertHosts(hstsStatuses, probedStatuses);
-// Sort the hosts alphabetically
-hstsStatuses.sort(compareHSTSStatus);
-// Write the results to a file (this is where we filter out hosts that we
-// either couldn't connect to, didn't receive an HSTS header from, couldn't
-// parse the header, or had a header with too short a max-age)
-output(hstsStatuses, currentHosts);
-// ****************************************************************************
+
+async function main(args) {
+  if (args.length != 1) {
+    throw new Error("Usage: getHSTSPreloadList.js <absolute path to current nsSTSPreloadList.inc>");
+  }
+
+  // get the current preload list
+  let currentHosts = readCurrentList(args[0]);
+  // delete any hosts we use in tests so we don't actually connect to them
+  deleteTestHosts(currentHosts);
+  // disable the current preload list so it won't interfere with requests we make
+  Services.prefs.setBoolPref("network.stricttransportsecurity.preloadlist", false);
+  // download and parse the raw json file from the Chromium source
+  let rawdata = download();
+  // get just the hosts with mode: "force-https"
+  let hosts = getHosts(rawdata);
+  // add hosts in the current list to the new list (avoiding duplicates)
+  combineLists(hosts, currentHosts);
+
+  // Don't contact hosts that are forced to be included anyway
+  let hostsToContact = [];
+  let forcedHosts = [];
+  filterForcedInclusions(hosts, hostsToContact, forcedHosts);
+
+  // Initialize the final status list
+  let hstsStatuses = [];
+  // Add the hosts we use in tests
+  dump("Adding test hosts\n");
+  insertHosts(hstsStatuses, getTestHosts());
+  // Add in the hosts that are forced
+  dump("Adding forced hosts\n");
+  insertHosts(hstsStatuses, forcedHosts);
+
+  let total = await probeHSTSStatuses(hostsToContact).then(function(probedStatuses) {
+    return hstsStatuses.concat(probedStatuses);
+  }).then(function(statuses) {
+    return statuses.sort(compareHSTSStatus);
+  }).then(function(statuses) {
+    for (let status of statuses) {
+      // If we've encountered an error for this entry (other than the site not
+      // sending an HSTS header), be safe and don't remove it from the list
+      // (given that it was already on the list).
+      if (!status.forceInclude && status.error != ERROR_NONE && status.error != ERROR_NO_HSTS_HEADER && status.error != ERROR_MAX_AGE_TOO_LOW && status.name in currentHosts) {
+        // dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n");
+        status.maxAge = MINIMUM_REQUIRED_MAX_AGE;
+        status.includeSubdomains = currentHosts[status.name];
+      }
+    }
+    return statuses;
+  }).then(function(statuses) {
+    // Filter out entries we aren't including.
+    var includedStatuses = statuses.filter(function(status) {
+      if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) {
+        // dump("INFO: " + status.name + " NOT ON the preload list\n");
+        return false;
+      }
+
+      // dump("INFO: " + status.name + " ON the preload list (includeSubdomains: " + status.includeSubdomains + ")\n");
+      if (status.forceInclude && status.error != ERROR_NONE) {
+        dump(status.name + ": " + errorToString(status) + " (error ignored - included regardless)\n");
+      }
+      return true;
+    });
+    return includedStatuses;
+  });
+
+  // Write the output file
+  output(total);
+
+  dump("HSTS probing all done\n");
+}
+
+// arguments is a global within xpcshell
+spinResolve(main(arguments));
new file mode 100755
--- /dev/null
+++ b/taskcluster/docker/periodic_updates/scripts/periodic_file_updates.sh
@@ -0,0 +1,575 @@
+#!/bin/bash
+
+set -ex
+
+function usage {
+  cat <<EOF
+
+Usage: $(basename "$0") -h # Displays this usage/help text
+Usage: $(basename "$0") -x # lists exit codes
+Usage: $(basename "$0") [-p product]
+           [-r existing_repo_dir]
+           # Use mozilla-central builds to check HSTS & HPKP
+           [--use-mozilla-central]
+           # Use archive.m.o instead of the taskcluster index to get xpcshell
+           [--use-ftp-builds]
+           # One (or more) of the following actions must be specified.
+           --hsts | --hpkp | --blocklist
+           -b branch
+
+EOF
+}
+
+PRODUCT="firefox"
+BRANCH=""
+PLATFORM_EXT="tar.bz2"
+UNPACK_CMD="tar jxf"
+CLOSED_TREE=false
+DONTBUILD=false
+APPROVAL=false
+HG_SSH_USER='ffxbld'
+REPODIR=''
+APP_DIR=''
+APP_ID=''
+APP_NAME=''
+HGHOST="hg.mozilla.org"
+STAGEHOST="archive.mozilla.org"
+WGET="wget -nv"
+UNZIP="unzip -q"
+DIFF="$(which diff) -u"
+BASEDIR="${HOME}"
+TOOLSDIR="${HOME}/tools"
+HGTOOL="${TOOLSDIR}/buildfarm/utils/hgtool.py"
+
+SCRIPTDIR="$(realpath "$(dirname "$0")")"
+HG="$(which hg)"
+DATADIR="${BASEDIR}/data"
+mkdir -p "${DATADIR}"
+
+VERSION=''
+MCVERSION=''
+USE_MC=false
+USE_TC=true
+JQ="$(which jq)"
+
+DO_HSTS=false
+HSTS_PRELOAD_SCRIPT="${SCRIPTDIR}/getHSTSPreloadList.js"
+HSTS_PRELOAD_ERRORS="nsSTSPreloadList.errors"
+HSTS_PRELOAD_INC="${DATADIR}/nsSTSPreloadList.inc"
+HSTS_UPDATED=false
+
+DO_HPKP=false
+HPKP_PRELOAD_SCRIPT="${SCRIPTDIR}/genHPKPStaticPins.js"
+HPKP_PRELOAD_ERRORS="StaticHPKPins.errors"
+HPKP_PRELOAD_JSON="${DATADIR}/PreloadedHPKPins.json"
+HPKP_PRELOAD_INC="StaticHPKPins.h"
+HPKP_PRELOAD_INPUT="${DATADIR}/${HPKP_PRELOAD_INC}"
+HPKP_PRELOAD_OUTPUT="${DATADIR}/${HPKP_PRELOAD_INC}.out"
+HPKP_UPDATED=false
+
+DO_BLOCKLIST=false
+BLOCKLIST_URL_AMO=''
+BLOCKLIST_URL_HG=''
+BLOCKLIST_LOCAL_AMO="blocklist_amo.xml"
+BLOCKLIST_LOCAL_HG="blocklist_hg.xml"
+BLOCKLIST_UPDATED=false
+
+ARTIFACTS_DIR="${ARTIFACTS_DIR:-'.'}"
+# Defaults
+HSTS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HSTS_DIFF_ARTIFACT:-"nsSTSPreloadList.diff"}"
+HPKP_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${HPKP_DIFF_ARTIFACT:-"StaticHPKPins.h.diff"}"
+BLOCKLIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${BLOCKLIST_DIFF_ARTIFACT:-"blocklist.diff"}"
+
+
+# Get the current in-tree version for a code branch.
+function get_version {
+  VERSION_REPO=$1
+  VERSION_FILE='version.txt'
+
+  # TODO bypass temporary file
+
+  cd "${BASEDIR}"
+  VERSION_URL_HG="${VERSION_REPO}/raw-file/default/${APP_DIR}/config/version.txt"
+  rm -f ${VERSION_FILE}
+  ${WGET} -O "${VERSION_FILE}" "${VERSION_URL_HG}"
+  PARSED_VERSION=$(cat version.txt)
+  if [ "${PARSED_VERSION}" == "" ]; then
+    echo "ERROR: Unable to parse version from $VERSION_FILE" >&2
+    exit 21
+  fi
+  rm -f ${VERSION_FILE}
+  echo "${PARSED_VERSION}"
+}
+
+# Cleanup common artifacts.
+function preflight_cleanup {
+  cd "${BASEDIR}"
+  rm -rf "${PRODUCT}" tests "${BROWSER_ARCHIVE}" "${TESTS_ARCHIVE}"
+}
+
+function download_shared_artifacts_from_ftp {
+  cd "${BASEDIR}"
+
+  # Download everything we need to run js with xpcshell
+  echo "INFO: Downloading all the necessary pieces from ${STAGEHOST}..."
+  ARTIFACT_DIR="nightly/latest-${REPODIR}"
+  if [ "${USE_MC}" == "true" ]; then
+    ARTIFACT_DIR="nightly/latest-mozilla-central"
+  fi
+
+  BROWSER_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${BROWSER_ARCHIVE}"
+  TESTS_ARCHIVE_URL="https://${STAGEHOST}/pub/mozilla.org/${PRODUCT}/${ARTIFACT_DIR}/${TESTS_ARCHIVE}"
+
+  echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
+  ${WGET} "${BROWSER_ARCHIVE_URL}"
+  echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
+  ${WGET} "${TESTS_ARCHIVE_URL}"
+}
+
+function download_shared_artifacts_from_tc {
+  cd "${BASEDIR}"
+  TASKID_FILE="taskId.json"
+
+  # Download everything we need to run js with xpcshell
+  echo "INFO: Downloading all the necessary pieces from the taskcluster index..."
+  TASKID_URL="https://index.taskcluster.net/v1/task/gecko.v2.${REPODIR}.latest.${PRODUCT}.linux64-opt"
+  if [ "${USE_MC}" == "true" ]; then
+    TASKID_URL="https://index.taskcluster.net/v1/task/gecko.v2.mozilla-central.latest.${PRODUCT}.linux64-opt"
+  fi
+  ${WGET} -O ${TASKID_FILE} ${TASKID_URL}
+  INDEX_TASK_ID="$($JQ -r '.taskId' ${TASKID_FILE})"
+  if [ -z "${INDEX_TASK_ID}" ]; then
+    echo "Failed to look up taskId at ${TASKID_URL}"
+    exit 22
+  else
+    echo "INFO: Got taskId of $INDEX_TASK_ID"
+  fi
+
+  TASKSTATUS_FILE="taskstatus.json"
+  STATUS_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/status"
+  ${WGET} -O "${TASKSTATUS_FILE}" "${STATUS_URL}"
+  LAST_RUN_INDEX=$(($(jq '.status.runs | length' ${TASKSTATUS_FILE}) - 1))
+  echo "INFO: Examining run number ${LAST_RUN_INDEX}"
+
+  BROWSER_ARCHIVE_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}"
+  echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
+  ${WGET} "${BROWSER_ARCHIVE_URL}"
+
+  TESTS_ARCHIVE_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}"
+  echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
+  ${WGET} "${TESTS_ARCHIVE_URL}"
+}
+
+function unpack_artifacts {
+  cd "${BASEDIR}"
+  if [ ! -f "${BROWSER_ARCHIVE}" ]; then
+    echo "Downloaded file '${BROWSER_ARCHIVE}' not found in directory '$(pwd)'." >&2
+    exit 31
+  fi
+  if [ ! -f "${TESTS_ARCHIVE}" ]; then
+    echo "Downloaded file '${TESTS_ARCHIVE}' not found in directory '$(pwd)'." >&2
+    exit 32
+  fi
+  # Unpack the browser and move xpcshell in place for updating the preload list.
+  echo "INFO: Unpacking resources..."
+  ${UNPACK_CMD} "${BROWSER_ARCHIVE}"
+  mkdir -p tests
+  cd tests
+  ${UNZIP} "../${TESTS_ARCHIVE}"
+  cd "${BASEDIR}"
+  cp tests/bin/xpcshell "${PRODUCT}"
+}
+
+# Downloads the current in-tree HSTS (HTTP Strict Transport Security) files.
+# Runs a simple xpcshell script to generate up-to-date HSTS information.
+# Compares the new HSTS output with the old to determine whether we need to update.
+function compare_hsts_files {
+  cd "${BASEDIR}"
+
+  HSTS_PRELOAD_INC_HG="${HGREPO}/raw-file/default/security/manager/ssl/$(basename "${HSTS_PRELOAD_INC}")"
+
+  echo "INFO: Downloading existing include file..."
+  rm -rf "${HSTS_PRELOAD_ERRORS}" "${HSTS_PRELOAD_INC}"
+  echo "INFO: ${WGET} ${HSTS_PRELOAD_INC_HG}"
+  ${WGET} -O "${HSTS_PRELOAD_INC}" "${HSTS_PRELOAD_INC_HG}"
+
+  if [ ! -f "${HSTS_PRELOAD_INC}" ]; then
+    echo "Downloaded file '${HSTS_PRELOAD_INC}' not found in directory '$(pwd)' - this should have been downloaded above from ${HSTS_PRELOAD_INC_HG}." >&2
+    exit 41
+  fi
+
+  # Run the script to get an updated preload list.
+  echo "INFO: Generating new HSTS preload list..."
+  cd "${BASEDIR}/${PRODUCT}"
+  LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HSTS_PRELOAD_SCRIPT}" "${HSTS_PRELOAD_INC}"
+
+  # The created files should be non-empty.
+  echo "INFO: Checking whether new HSTS preload list is valid..."
+  if [ ! -s "${HSTS_PRELOAD_INC}" ]; then
+    echo "New HSTS preload list ${HSTS_PRELOAD_INC} is empty. That's less good." >&2
+    exit 42
+  fi
+  cd "${BASEDIR}"
+
+  # Check for differences
+  echo "INFO: diffing old/new HSTS preload lists into ${HSTS_DIFF_ARTIFACT}"
+  ${DIFF} "${BASEDIR}/${PRODUCT}/$(basename "${HSTS_PRELOAD_INC}")" "${HSTS_PRELOAD_INC}" | tee "${HSTS_DIFF_ARTIFACT}"
+  if [ -s "${HSTS_DIFF_ARTIFACT}" ]
+  then
+    return 0
+  fi
+  return 1
+}
+
+# Downloads the current in-tree HPKP (HTTP public key pinning) files.
+# Runs a simple xpcshell script to generate up-to-date HPKP information.
+# Compares the new HPKP output with the old to determine whether we need to update.
+function compare_hpkp_files {
+  cd "${BASEDIR}"
+  HPKP_PRELOAD_JSON_HG="${HGREPO}/raw-file/default/security/manager/tools/$(basename "${HPKP_PRELOAD_JSON}")"
+
+  HPKP_PRELOAD_OUTPUT_HG="${HGREPO}/raw-file/default/security/manager/ssl/${HPKP_PRELOAD_INC}"
+
+  rm -f "${HPKP_PRELOAD_OUTPUT}"
+  ${WGET} -O "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT_HG}"
+  ${WGET} -O "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_JSON_HG}"
+
+  # Run the script to get an updated preload list.
+  echo "INFO: Generating new HPKP preload list..."
+  cd "${BASEDIR}/${PRODUCT}"
+  LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:. ./xpcshell "${HPKP_PRELOAD_SCRIPT}" "${HPKP_PRELOAD_JSON}" "${HPKP_PRELOAD_OUTPUT}" > "${HPKP_PRELOAD_ERRORS}"
+
+  # The created files should be non-empty.
+  echo "INFO: Checking whether new HPKP preload list is valid..."
+
+  if [ ! -s "${HPKP_PRELOAD_OUTPUT}" ]; then
+    echo "${HPKP_PRELOAD_OUTPUT} is empty. That's less good." >&2
+    exit 52
+  fi
+  cd "${BASEDIR}"
+
+  echo "INFO: diffing old/new HPKP preload lists..."
+  ${DIFF} "${HPKP_PRELOAD_INPUT}" "${HPKP_PRELOAD_OUTPUT}" | tee "${HPKP_DIFF_ARTIFACT}"
+  if [ -s "${HPKP_DIFF_ARTIFACT}" ]
+  then
+    return 0
+  fi
+  return 1
+}
+
+function is_valid_xml {
+  xmlfile=$1
+  XMLLINT=$(which xmllint 2>/dev/null | head -n1)
+
+  if [ ! -x "${XMLLINT}" ]; then
+    echo "ERROR: xmllint not found in PATH"
+    exit 60
+  fi
+  ${XMLLINT} --nonet --noout "${xmlfile}"
+}
+
+# Downloads the current in-tree blocklist file.
+# Downloads the current blocklist file from AMO.
+# Compares the AMO blocklist with the in-tree blocklist to determine whether we need to update.
+function compare_blocklist_files {
+  BLOCKLIST_URL_AMO="https://blocklist.addons.mozilla.org/blocklist/3/${APP_ID}/${VERSION}/${APP_NAME}/20090105024647/blocklist-sync/en-US/nightly/blocklist-sync/default/default/"
+  BLOCKLIST_URL_HG="${HGREPO}/raw-file/default/${APP_DIR}/app/blocklist.xml"
+
+  cd "${BASEDIR}"
+  rm -f ${BLOCKLIST_LOCAL_AMO}
+  echo "INFO: ${WGET} -O ${BLOCKLIST_LOCAL_AMO} ${BLOCKLIST_URL_AMO}"
+  ${WGET} -O "${BLOCKLIST_LOCAL_AMO}" "${BLOCKLIST_URL_AMO}"
+
+  rm -f ${BLOCKLIST_LOCAL_HG}
+  echo "INFO: ${WGET} -O ${BLOCKLIST_LOCAL_HG} ${BLOCKLIST_URL_HG}"
+  ${WGET} -O "${BLOCKLIST_LOCAL_HG}" "${BLOCKLIST_URL_HG}"
+
+  # The downloaded files should be non-empty and have a valid xml header
+  # if they were retrieved properly, and some random HTML garbage if not.
+  # set -x catches these
+  is_valid_xml ${BLOCKLIST_LOCAL_AMO}
+  is_valid_xml ${BLOCKLIST_LOCAL_HG}
+
+  echo "INFO: diffing in-tree blocklist against the blocklist from AMO..."
+  ${DIFF} ${BLOCKLIST_LOCAL_HG} ${BLOCKLIST_LOCAL_AMO} | tee "${BLOCKLIST_DIFF_ARTIFACT}"
+  if [ -s "${BLOCKLIST_DIFF_ARTIFACT}" ]
+  then
+    return 0
+  fi
+  return 1
+}
+
+function clone_build_tools {
+    rm -fr "${TOOLSDIR}"
+    CLONE_CMD="${HG} clone https://hg.mozilla.org/build/tools ${TOOLSDIR}"
+    ${CLONE_CMD}
+}
+
+# Clones an hg repo, using hgtool preferentially.
+function clone_repo {
+  cd "${BASEDIR}"
+  if [ ! -d "${REPODIR}" ]; then
+    CLONE_CMD=""
+    if [ -f "${HGTOOL}" ]; then
+      # Need to pass the default branch here to avoid pollution from buildprops.json
+      # when hgtool.py is run in production.
+      CLONE_CMD="${HGTOOL} --branch default"
+    else
+      echo "INFO: hgtool.py not found. Falling back to vanilla hg."
+      CLONE_CMD="${HG} clone"
+    fi
+    CLONE_CMD="${CLONE_CMD} ${HGREPO} ${REPODIR}"
+    ${CLONE_CMD}
+  fi
+
+  ${HG} -R ${REPODIR} pull
+  ${HG} -R ${REPODIR} update -C default
+}
+
+# Copies new HSTS files in place, and commits them.
+function commit_hsts_files {
+  cd "${BASEDIR}"
+
+  cp -f "${BASEDIR}/${PRODUCT}/$(basename "${HSTS_PRELOAD_INC}")" "${REPODIR}/security/manager/ssl/"
+
+  COMMIT_MESSAGE="No bug, Automated HSTS preload list update"
+  if [ -n "${TASK_ID}" ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} from task ${TASK_ID}"
+  fi
+  if [ ${DONTBUILD} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - (DONTBUILD)"
+  fi
+  if [ ${CLOSED_TREE} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - CLOSED TREE"
+  fi
+  if [ ${APPROVAL} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - a=hsts-update"
+  fi
+  echo "INFO: committing HSTS changes"
+  ${HG} -R ${REPODIR} commit -u "${HG_SSH_USER}" -m "${COMMIT_MESSAGE}"
+}
+
+# Copies new HPKP files in place, and commits them.
+function commit_hpkp_files {
+  cd "${BASEDIR}"
+
+  cp -f "${HPKP_PRELOAD_OUTPUT}" "${REPODIR}/security/manager/ssl/${HPKP_PRELOAD_INC}"
+
+  COMMIT_MESSAGE="No bug, Automated HPKP preload list update"
+  if [ -n "${TASK_ID}" ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} from task ${TASK_ID}"
+  fi
+  if [ ${DONTBUILD} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - (DONTBUILD)"
+  fi
+  if [ ${CLOSED_TREE} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - CLOSED TREE"
+  fi
+  if [ ${APPROVAL} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - a=hpkp-update"
+  fi
+  echo "INFO: committing HPKP changes"
+  ${HG} -R ${REPODIR} commit -u "${HG_SSH_USER}" -m "${COMMIT_MESSAGE}"
+}
+
+# Copies new blocklist file in place, and commits it.
+function commit_blocklist_files {
+  cd "${BASEDIR}"
+  cp -f ${BLOCKLIST_LOCAL_AMO} ${REPODIR}/${APP_DIR}/app/blocklist.xml
+  COMMIT_MESSAGE="No bug, Automated blocklist update"
+  if [ -n "${TASK_ID}" ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} from task ${TASK_ID}"
+  fi
+  if [ ${DONTBUILD} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - (DONTBUILD)"
+  fi
+  if [ ${CLOSED_TREE} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - CLOSED TREE"
+  fi
+  if [ ${APPROVAL} == true ]; then
+    COMMIT_MESSAGE="${COMMIT_MESSAGE} - a=blocklist-update"
+  fi
+  echo "INFO: committing blocklist changes"
+  ${HG} -R ${REPODIR} commit -u "${HG_SSH_USER}" -m "${COMMIT_MESSAGE}"
+}
+
+# Push all pending commits to Phabricator
+function push_repo {
+  cd "${REPODIR}"
+  if [ ! -r "${HOME}/.arcrc" ]
+  then
+    return 1
+  fi
+  if ! ARC=$(which arc)
+  then
+    return 1
+  fi
+  if [ -z "${REVIEWERS}" ]
+  then
+    return 1
+  fi
+  $ARC diff --verbatim --reviewers "${REVIEWERS}"
+}
+
+
+
+# Main
+
+# Parse our command-line options.
+while [ $# -gt 0 ]; do
+  case "$1" in
+    -h) usage; exit 0 ;;
+    -p) PRODUCT="$2"; shift ;;
+    -b) BRANCH="$2"; shift ;;
+    -n) DRY_RUN=true ;;
+    -c) CLOSED_TREE=true ;;
+    -d) DONTBUILD=true ;;
+    -a) APPROVAL=true ;;
+    --pinset) DO_PRELOAD_PINSET=true ;;
+    --hsts) DO_HSTS=true ;;
+    --hpkp) DO_HPKP=true ;;
+    --blocklist) DO_BLOCKLIST=true ;;
+    -r) REPODIR="$2"; shift ;;
+    --use-mozilla-central) USE_MC=true ;;
+    --use-ftp-builds) USE_TC=false ;;
+    -*) usage
+      exit 11 ;;
+    *)  break ;; # terminate while loop
+  esac
+  shift
+done
+
+# Must supply a code branch to work with.
+if [ "${BRANCH}" == "" ]; then
+  echo "Error: You must specify a branch with -b branchname." >&2
+  usage
+  exit 12
+fi
+
+# Must choose at least one update action.
+if [ "$DO_HSTS" == "false" ] && [ "$DO_HPKP" == "false" ] && [ "$DO_BLOCKLIST" == "false" ]
+then
+  echo "Error: you must specify at least one action from: --hsts, --hpkp, --blocklist" >&2
+  usage
+  exit 13
+fi
+
+# per-product constants
+case "${PRODUCT}" in
+  thunderbird)
+    APP_DIR="mail"
+    APP_ID="%7B3550f703-e582-4d05-9a08-453d09bdfdc6%7D"
+    APP_NAME="Thunderbird"
+    ;;
+  firefox)
+    APP_DIR="browser"
+    APP_ID="%7Bec8030f7-c20a-464f-9b0e-13a3a9e97384%7D"
+    APP_NAME="Firefox"
+    ;;
+  *)
+    echo "Error: Invalid product specified"
+    usage
+    exit 14
+    ;;
+esac
+
+if [ "${REPODIR}" == "" ]; then
+  REPODIR="$(basename "${BRANCH}")"
+fi
+
+HGREPO="https://${HGHOST}/${BRANCH}"
+MCREPO="https://${HGHOST}/mozilla-central"
+
+# Remove once 52esr is off support
+VERSION=$(get_version "${HGREPO}")
+MAJOR_VERSION="${VERSION%.*}"
+echo "INFO: parsed version is ${VERSION}"
+if [ "${USE_MC}" == "true" ]; then
+  MCVERSION=$(get_version "${MCREPO}")
+  echo "INFO: parsed mozilla-central version is ${MCVERSION}"
+  MAJOR_VERSION="${MCVERSION%.*}"
+fi
+
+BROWSER_ARCHIVE="${PRODUCT}-${VERSION}.en-US.${PLATFORM}.${PLATFORM_EXT}"
+TESTS_ARCHIVE="${PRODUCT}-${VERSION}.en-US.${PLATFORM}.common.tests.zip"
+if [ "${USE_MC}" == "true" ]; then
+    BROWSER_ARCHIVE="${PRODUCT}-${MCVERSION}.en-US.${PLATFORM}.${PLATFORM_EXT}"
+    TESTS_ARCHIVE="${PRODUCT}-${MCVERSION}.en-US.${PLATFORM}.common.tests.zip"
+fi
+# Simple name builds on >=53.0.0
+if [ "${MAJOR_VERSION}" -ge 53 ] ; then
+    BROWSER_ARCHIVE="target.${PLATFORM_EXT}"
+    TESTS_ARCHIVE="target.common.tests.zip"
+fi
+# End 'remove once 52esr is off support'
+
+preflight_cleanup
+if [ "${DO_HSTS}" == "true" ] || [ "${DO_HPKP}" == "true" ] || [ "${DO_PRELOAD_PINSET}" == "true" ]
+then
+  if [ "${USE_TC}" == "true" ]; then
+    download_shared_artifacts_from_tc
+  else
+    download_shared_artifacts_from_ftp
+  fi
+  unpack_artifacts
+fi
+
+if [ "${DO_HSTS}" == "true" ]; then
+  if compare_hsts_files
+  then
+    HSTS_UPDATED=true
+  fi
+fi
+if [ "${DO_HPKP}" == "true" ]; then
+  if compare_hpkp_files
+  then
+    HPKP_UPDATED=true
+  fi
+fi
+if [ "${DO_BLOCKLIST}" == "true" ]; then
+  if compare_blocklist_files
+  then
+    BLOCKLIST_UPDATED=true
+  fi
+fi
+
+if [ "${HSTS_UPDATED}" == "false" ] && [ "${HPKP_UPDATED}" == "false" ] && [ "${BLOCKLIST_UPDATED}" == "false" ]; then
+  echo "INFO: no updates required. Exiting."
+  exit 0
+else
+  if [ "${DRY_RUN}" == "true" ]; then
+    echo "INFO: Updates are available, not updating hg in dry-run mode."
+    exit 2
+  fi
+fi
+
+# Currently less reliable than regular 'hg'
+# clone_build_tools
+
+clone_repo
+
+MUST_PUSH=false
+if [ "${HSTS_UPDATED}" == "true" ]
+then
+  commit_hsts_files
+  MUST_PUSH=true
+fi
+
+if [ "${HPKP_UPDATED}" == "true" ]
+then
+  commit_hpkp_files
+  MUST_PUSH=true
+fi
+
+if [ "${BLOCKLIST_UPDATED}" == "true" ]
+then
+  commit_blocklist_files
+  MUST_PUSH=true
+fi
+
+if [ -n "${MUST_PUSH}" ]
+then
+  push_repo
+fi
+
+echo "All done"