Bug 1210296 part 2 - Update tests. r?markh,tcsc,kitcambridge draft
authorEdouard Oger <eoger@fastmail.com>
Mon, 05 Jun 2017 18:49:43 -0400
changeset 607144 2121f9ada3417bb110c302258ba0d9f766a2416e
parent 607143 c03be2c8b9c0485f50b817ee91a9650ba942ea1c
child 607145 09cc5e18142a510957fdff0b6ff34f910daf5fe2
push id67903
push userbmo:eoger@fastmail.com
push dateTue, 11 Jul 2017 22:05:40 +0000
reviewersmarkh, tcsc, kitcambridge
bugs1210296
milestone56.0a1
Bug 1210296 part 2 - Update tests. r?markh,tcsc,kitcambridge MozReview-Commit-ID: 41dnyvdlNJv
services/sync/modules-testing/fakeservices.js
services/sync/modules-testing/rotaryengine.js
services/sync/tests/unit/head_errorhandler_common.js
services/sync/tests/unit/head_helpers.js
services/sync/tests/unit/test_addons_engine.js
services/sync/tests/unit/test_addons_reconciler.js
services/sync/tests/unit/test_addons_store.js
services/sync/tests/unit/test_addons_tracker.js
services/sync/tests/unit/test_bookmark_batch_fail.js
services/sync/tests/unit/test_bookmark_decline_undecline.js
services/sync/tests/unit/test_bookmark_duping.js
services/sync/tests/unit/test_bookmark_engine.js
services/sync/tests/unit/test_bookmark_invalid.js
services/sync/tests/unit/test_bookmark_livemarks.js
services/sync/tests/unit/test_bookmark_order.js
services/sync/tests/unit/test_bookmark_places_query_rewriting.js
services/sync/tests/unit/test_bookmark_repair.js
services/sync/tests/unit/test_bookmark_repair_requestor.js
services/sync/tests/unit/test_bookmark_repair_responder.js
services/sync/tests/unit/test_bookmark_smart_bookmarks.js
services/sync/tests/unit/test_bookmark_store.js
services/sync/tests/unit/test_bookmark_tracker.js
services/sync/tests/unit/test_clients_engine.js
services/sync/tests/unit/test_clients_escape.js
services/sync/tests/unit/test_collections_recovery.js
services/sync/tests/unit/test_corrupt_keys.js
services/sync/tests/unit/test_declined.js
services/sync/tests/unit/test_doctor.js
services/sync/tests/unit/test_engine.js
services/sync/tests/unit/test_engine_abort.js
services/sync/tests/unit/test_engine_changes_during_sync.js
services/sync/tests/unit/test_enginemanager.js
services/sync/tests/unit/test_errorhandler_1.js
services/sync/tests/unit/test_errorhandler_2.js
services/sync/tests/unit/test_errorhandler_eol.js
services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
services/sync/tests/unit/test_extension_storage_engine.js
services/sync/tests/unit/test_extension_storage_tracker.js
services/sync/tests/unit/test_form_validator.js
services/sync/tests/unit/test_forms_store.js
services/sync/tests/unit/test_forms_tracker.js
services/sync/tests/unit/test_fxa_node_reassignment.js
services/sync/tests/unit/test_history_store.js
services/sync/tests/unit/test_history_tracker.js
services/sync/tests/unit/test_hmac_error.js
services/sync/tests/unit/test_interval_triggers.js
services/sync/tests/unit/test_node_reassignment.js
services/sync/tests/unit/test_password_engine.js
services/sync/tests/unit/test_password_store.js
services/sync/tests/unit/test_password_tracker.js
services/sync/tests/unit/test_password_validator.js
services/sync/tests/unit/test_places_guid_downgrade.js
services/sync/tests/unit/test_postqueue.js
services/sync/tests/unit/test_prefs_store.js
services/sync/tests/unit/test_prefs_tracker.js
services/sync/tests/unit/test_resource_ua.js
services/sync/tests/unit/test_score_triggers.js
services/sync/tests/unit/test_service_detect_upgrade.js
services/sync/tests/unit/test_service_login.js
services/sync/tests/unit/test_service_startOver.js
services/sync/tests/unit/test_service_startup.js
services/sync/tests/unit/test_service_sync_401.js
services/sync/tests/unit/test_service_sync_locked.js
services/sync/tests/unit/test_service_sync_remoteSetup.js
services/sync/tests/unit/test_service_sync_specified.js
services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
services/sync/tests/unit/test_service_verifyLogin.js
services/sync/tests/unit/test_service_wipeClient.js
services/sync/tests/unit/test_service_wipeServer.js
services/sync/tests/unit/test_syncedtabs.js
services/sync/tests/unit/test_syncengine.js
services/sync/tests/unit/test_syncengine_sync.js
services/sync/tests/unit/test_syncscheduler.js
services/sync/tests/unit/test_tab_engine.js
services/sync/tests/unit/test_tab_store.js
services/sync/tests/unit/test_tab_tracker.js
services/sync/tests/unit/test_telemetry.js
services/sync/tests/unit/test_utils_catch.js
services/sync/tests/unit/test_utils_json.js
services/sync/tests/unit/test_utils_lock.js
services/sync/tests/unit/test_utils_notify.js
services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm
services/sync/tps/extensions/tps/resource/modules/addons.jsm
services/sync/tps/extensions/tps/resource/tps.jsm
--- a/services/sync/modules-testing/fakeservices.js
+++ b/services/sync/modules-testing/fakeservices.js
@@ -30,35 +30,28 @@ this.FakeFilesystemService = function Fa
   // always throw exceptions when the real ones do. Anyway...)
   for (let name of ["jsonSave", "jsonLoad", "jsonMove", "jsonRemove"]) {
     let origName = "_real_" + name;
     if (!Utils[origName]) {
       Utils[origName] = Utils[name];
     }
   }
 
-  Utils.jsonSave = function jsonSave(filePath, that, obj, callback) {
+  Utils.jsonSave = async function jsonSave(filePath, that, obj) {
     let json = typeof obj == "function" ? obj.call(that) : obj;
     self.fakeContents["weave/" + filePath + ".json"] = JSON.stringify(json);
-    if (callback) {
-      callback.call(that);
-    }
-    return Promise.resolve();
   };
 
-  Utils.jsonLoad = function jsonLoad(filePath, that, cb) {
+  Utils.jsonLoad = async function jsonLoad(filePath, that) {
     let obj;
     let json = self.fakeContents["weave/" + filePath + ".json"];
     if (json) {
       obj = JSON.parse(json);
     }
-    if (cb) {
-      cb.call(that, obj);
-    }
-    return Promise.resolve(obj);
+    return obj;
   };
 
   Utils.jsonMove = function jsonMove(aFrom, aTo, that) {
     const fromPath = "weave/" + aFrom + ".json";
     self.fakeContents["weave/" + aTo + ".json"] = self.fakeContents[fromPath];
     delete self.fakeContents[fromPath];
     return Promise.resolve();
   };
--- a/services/sync/modules-testing/rotaryengine.js
+++ b/services/sync/modules-testing/rotaryengine.js
@@ -34,61 +34,61 @@ Utils.deferGetSet(RotaryRecord, "clearte
 
 this.RotaryStore = function RotaryStore(name, engine) {
   Store.call(this, name, engine);
   this.items = {};
 }
 RotaryStore.prototype = {
   __proto__: Store.prototype,
 
-  create: function create(record) {
+  async create(record) {
     this.items[record.id] = record.denomination;
   },
 
-  remove: function remove(record) {
+  async remove(record) {
     delete this.items[record.id];
   },
 
-  update: function update(record) {
+  async update(record) {
     this.items[record.id] = record.denomination;
   },
 
-  itemExists: function itemExists(id) {
+  async itemExists(id) {
     return (id in this.items);
   },
 
-  createRecord: function createRecord(id, collection) {
+  async createRecord(id, collection) {
     let record = new RotaryRecord(collection, id);
 
     if (!(id in this.items)) {
       record.deleted = true;
       return record;
     }
 
     record.denomination = this.items[id] || "Data for new record: " + id;
     return record;
   },
 
-  changeItemID: function changeItemID(oldID, newID) {
+  async changeItemID(oldID, newID) {
     if (oldID in this.items) {
       this.items[newID] = this.items[oldID];
     }
 
     delete this.items[oldID];
   },
 
-  getAllIDs: function getAllIDs() {
+  async getAllIDs() {
     let ids = {};
     for (let id in this.items) {
       ids[id] = true;
     }
     return ids;
   },
 
-  wipe: function wipe() {
+  async wipe() {
     this.items = {};
   }
 };
 
 this.RotaryTracker = function RotaryTracker(name, engine) {
   Tracker.call(this, name, engine);
 }
 RotaryTracker.prototype = {
@@ -104,18 +104,18 @@ this.RotaryEngine = function RotaryEngin
   this.previousFailed = [];
 }
 RotaryEngine.prototype = {
   __proto__: SyncEngine.prototype,
   _storeObj: RotaryStore,
   _trackerObj: RotaryTracker,
   _recordObj: RotaryRecord,
 
-  _findDupe: function _findDupe(item) {
-    // This is a semaphore used for testing proper reconciling on dupe
+  async _findDupe(item) {
+    // This is a Special Value® used for testing proper reconciling on dupe
     // detection.
     if (item.id == "DUPE_INCOMING") {
       return "DUPE_LOCAL";
     }
 
     for (let [id, value] of Object.entries(this._store.items)) {
       if (item.denomination == value) {
         return id;
--- a/services/sync/tests/unit/head_errorhandler_common.js
+++ b/services/sync/tests/unit/head_errorhandler_common.js
@@ -20,17 +20,17 @@ const EHTestsCommon = {
 
   service_unavailable(request, response) {
     let body = "Service Unavailable";
     response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
     response.setHeader("Retry-After", "42");
     response.bodyOutputStream.write(body, body.length);
   },
 
-  sync_httpd_setup() {
+  async sync_httpd_setup() {
     let global = new ServerWBO("global", {
       syncID: Service.syncID,
       storageVersion: STORAGE_VERSION,
       engines: {clients: {version: Service.clientsEngine.version,
                           syncID: Service.clientsEngine.syncID},
                 catapult: {version: Service.engineManager.get("catapult").version,
                            syncID: Service.engineManager.get("catapult").syncID}}
     });
@@ -78,17 +78,17 @@ const EHTestsCommon = {
 
   CatapultEngine: (function() {
     function CatapultEngine() {
       SyncEngine.call(this, "Catapult", Service);
     }
     CatapultEngine.prototype = {
       __proto__: SyncEngine.prototype,
       exception: null, // tests fill this in
-      _sync: function _sync() {
+      async _sync() {
         if (this.exception) {
           throw this.exception;
         }
       }
     };
 
     return CatapultEngine;
   }()),
--- a/services/sync/tests/unit/head_helpers.js
+++ b/services/sync/tests/unit/head_helpers.js
@@ -13,16 +13,23 @@
 Cu.import("resource://services-common/async.js");
 Cu.import("resource://testing-common/services/common/utils.js");
 Cu.import("resource://testing-common/PlacesTestUtils.jsm");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://gre/modules/XPCOMUtils.jsm");
 Cu.import("resource://gre/modules/PlacesUtils.jsm");
 Cu.import("resource://gre/modules/ObjectUtils.jsm");
 
+add_task(async function head_setup() {
+  // If a test imports Service, make sure it is initialized first.
+  if (this.Service) {
+    await this.Service.promiseInitialized;
+  }
+});
+
 // ================================================
 // Load mocking/stubbing library, sinon
 // docs: http://sinonjs.org/releases/v2.3.2/
 Cu.import("resource://gre/modules/Timer.jsm");
 const {Loader} = Cu.import("resource://gre/modules/commonjs/toolkit/loader.js", {});
 const loader = new Loader.Loader({
   paths: {
     "": "resource://testing-common/",
@@ -379,17 +386,17 @@ function wait_for_pings(expectedPings) {
         resolve(pings);
       }
     };
   });
 }
 
 async function wait_for_ping(callback, allowErrorPings, getFullPing = false) {
   let pingsPromise = wait_for_pings(1);
-  callback();
+  await callback();
   let [record] = await pingsPromise;
   if (allowErrorPings) {
     assert_valid_ping(record);
   } else {
     assert_success_ping(record);
   }
   if (getFullPing) {
     return record;
@@ -402,37 +409,37 @@ async function wait_for_ping(callback, a
 function sync_and_validate_telem(allowErrorPings, getFullPing = false) {
   return wait_for_ping(() => Service.sync(), allowErrorPings, getFullPing);
 }
 
 // Used for the (many) cases where we do a 'partial' sync, where only a single
 // engine is actually synced, but we still want to ensure we're generating a
 // valid ping. Returns a promise that resolves to the ping, or rejects with the
 // thrown error after calling an optional callback.
-function sync_engine_and_validate_telem(engine, allowErrorPings, onError) {
-  return new Promise((resolve, reject) => {
-    let telem = get_sync_test_telemetry();
-    let caughtError = null;
-    // Clear out status, so failures from previous syncs won't show up in the
-    // telemetry ping.
-    let ns = {};
-    Cu.import("resource://services-sync/status.js", ns);
-    ns.Status._engines = {};
-    ns.Status.partial = false;
-    // Ideally we'd clear these out like we do with engines, (probably via
-    // Status.resetSync()), but this causes *numerous* tests to fail, so we just
-    // assume that if no failureReason or engine failures are set, and the
-    // status properties are the same as they were initially, that it's just
-    // a leftover.
-    // This is only an issue since we're triggering the sync of just one engine,
-    // without doing any other parts of the sync.
-    let initialServiceStatus = ns.Status._service;
-    let initialSyncStatus = ns.Status._sync;
+async function sync_engine_and_validate_telem(engine, allowErrorPings, onError) {
+  let telem = get_sync_test_telemetry();
+  let caughtError = null;
+  // Clear out status, so failures from previous syncs won't show up in the
+  // telemetry ping.
+  let ns = {};
+  Cu.import("resource://services-sync/status.js", ns);
+  ns.Status._engines = {};
+  ns.Status.partial = false;
+  // Ideally we'd clear these out like we do with engines, (probably via
+  // Status.resetSync()), but this causes *numerous* tests to fail, so we just
+  // assume that if no failureReason or engine failures are set, and the
+  // status properties are the same as they were initially, that it's just
+  // a leftover.
+  // This is only an issue since we're triggering the sync of just one engine,
+  // without doing any other parts of the sync.
+  let initialServiceStatus = ns.Status._service;
+  let initialSyncStatus = ns.Status._sync;
 
-    let oldSubmit = telem.submit;
+  let oldSubmit = telem.submit;
+  let submitPromise = new Promise((resolve, reject) => {
     telem.submit = function(ping) {
       telem.submit = oldSubmit;
       ping.syncs.forEach(record => {
         if (record && record.status) {
           // did we see anything to lead us to believe that something bad actually happened
           let realProblem = record.failureReason || record.engines.some(e => {
             if (e.failureReason || e.status) {
               return true;
@@ -467,37 +474,38 @@ function sync_engine_and_validate_telem(
         if (onError) {
           onError(ping.syncs[0], ping);
         }
         reject(caughtError);
       } else {
         resolve(ping.syncs[0]);
       }
     }
-    // neuter the scheduler as it interacts badly with some of the tests - the
-    // engine being synced usually isn't the registered engine, so we see
-    // scored incremented and not removed, which schedules unexpected syncs.
-    let oldObserve = Service.scheduler.observe;
-    Service.scheduler.observe = () => {};
+  });
+  // neuter the scheduler as it interacts badly with some of the tests - the
+  // engine being synced usually isn't the registered engine, so we see
+  // scored incremented and not removed, which schedules unexpected syncs.
+  let oldObserve = Service.scheduler.observe;
+  Service.scheduler.observe = () => {};
+  try {
+    Svc.Obs.notify("weave:service:sync:start");
     try {
-      Svc.Obs.notify("weave:service:sync:start");
-      try {
-        engine.sync();
-      } catch (e) {
-        caughtError = e;
-      }
-      if (caughtError) {
-        Svc.Obs.notify("weave:service:sync:error", caughtError);
-      } else {
-        Svc.Obs.notify("weave:service:sync:finish");
-      }
-    } finally {
-      Service.scheduler.observe = oldObserve;
+      await engine.sync();
+    } catch (e) {
+      caughtError = e;
     }
-  });
+    if (caughtError) {
+      Svc.Obs.notify("weave:service:sync:error", caughtError);
+    } else {
+      Svc.Obs.notify("weave:service:sync:finish");
+    }
+  } finally {
+    Service.scheduler.observe = oldObserve;
+  }
+  return submitPromise;
 }
 
 // Returns a promise that resolves once the specified observer notification
 // has fired.
 function promiseOneObserver(topic, callback) {
   return new Promise((resolve, reject) => {
     let observer = function(subject, data) {
       Svc.Obs.remove(topic, observer);
@@ -505,37 +513,31 @@ function promiseOneObserver(topic, callb
     }
     Svc.Obs.add(topic, observer)
   });
 }
 
 function promiseStopServer(server) {
   return new Promise(resolve => server.stop(resolve));
 }
-
-function promiseNextTick() {
-  return new Promise(resolve => {
-    Utils.nextTick(resolve);
-  });
-}
 // Avoid an issue where `client.name2` containing unicode characters causes
 // a number of tests to fail, due to them assuming that we do not need to utf-8
 // encode or decode data sent through the mocked server (see bug 1268912).
 // We stash away the original implementation so test_utils_misc.js can test it.
 Utils._orig_getDefaultDeviceName = Utils.getDefaultDeviceName;
 Utils.getDefaultDeviceName = function() {
   return "Test device name";
 };
 
-function registerRotaryEngine() {
+async function registerRotaryEngine() {
   let {RotaryEngine} =
     Cu.import("resource://testing-common/services/sync/rotaryengine.js", {});
   Service.engineManager.clear();
 
-  Service.engineManager.register(RotaryEngine);
+  await Service.engineManager.register(RotaryEngine);
   let engine = Service.engineManager.get("rotary");
   engine.enabled = true;
 
   return { engine, tracker: engine._tracker };
 }
 
 // Set the validation prefs to attempt validation every time to avoid non-determinism.
 function enableValidationPrefs() {
--- a/services/sync/tests/unit/test_addons_engine.js
+++ b/services/sync/tests/unit/test_addons_engine.js
@@ -8,120 +8,138 @@ Cu.import("resource://gre/modules/Prefer
 Cu.import("resource://gre/modules/Services.jsm");
 Cu.import("resource://services-common/async.js");
 Cu.import("resource://services-sync/addonsreconciler.js");
 Cu.import("resource://services-sync/engines/addons.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-var prefs = new Preferences();
+const prefs = new Preferences();
 prefs.set("extensions.getAddons.get.url",
           "http://localhost:8888/search/guid:%IDS%");
 prefs.set("extensions.install.requireSecureOrigin", false);
 
-loadAddonTestFunctions();
-startupManager();
-
-var engineManager = Service.engineManager;
+let engine;
+let reconciler;
+let tracker;
 
-engineManager.register(AddonsEngine);
-var engine = engineManager.get("addons");
-var reconciler = engine._reconciler;
-var tracker = engine._tracker;
-
-function advance_test() {
+async function resetReconciler() {
   reconciler._addons = {};
   reconciler._changes = [];
 
-  let cb = Async.makeSpinningCallback();
-  reconciler.saveState(null, cb);
-  cb.wait();
+  await reconciler.saveState();
 
   tracker.clearChangedIDs();
-  run_next_test();
 }
 
+add_task(async function setup() {
+  initTestLogging("Trace");
+  Log.repository.getLogger("Sync.Engine.Addons").level =
+    Log.Level.Trace;
+  Log.repository.getLogger("Sync.Store.Addons").level = Log.Level.Trace;
+  Log.repository.getLogger("Sync.Tracker.Addons").level =
+    Log.Level.Trace;
+  Log.repository.getLogger("Sync.AddonsRepository").level =
+    Log.Level.Trace;
+
+  loadAddonTestFunctions();
+  startupManager();
+
+  await Service.engineManager.register(AddonsEngine);
+  engine = Service.engineManager.get("addons");
+  reconciler = engine._reconciler;
+  tracker = engine._tracker;
+
+  reconciler.startListening();
+
+  // Don't flush to disk in the middle of an event listener!
+  // This causes test hangs on WinXP.
+  reconciler._shouldPersist = false;
+
+  await resetReconciler();
+});
+
 // This is a basic sanity test for the unit test itself. If this breaks, the
 // add-ons API likely changed upstream.
-add_test(function test_addon_install() {
+add_task(async function test_addon_install() {
   _("Ensure basic add-on APIs work as expected.");
 
   let install = getAddonInstall("test_bootstrap1_1");
   do_check_neq(install, null);
   do_check_eq(install.type, "extension");
   do_check_eq(install.name, "Test Bootstrap 1");
 
-  advance_test();
+  await resetReconciler();
 });
 
-add_test(function test_find_dupe() {
+add_task(async function test_find_dupe() {
   _("Ensure the _findDupe() implementation is sane.");
 
   // This gets invoked at the top of sync, which is bypassed by this
   // test, so we do it manually.
-  engine._refreshReconcilerState();
+  await engine._refreshReconcilerState();
 
   let addon = installAddon("test_bootstrap1_1");
 
   let record = {
     id:            Utils.makeGUID(),
     addonID:       addon.id,
     enabled:       true,
     applicationID: Services.appinfo.ID,
     source:        "amo"
   };
 
-  let dupe = engine._findDupe(record);
+  let dupe = await engine._findDupe(record);
   do_check_eq(addon.syncGUID, dupe);
 
   record.id = addon.syncGUID;
-  dupe = engine._findDupe(record);
+  dupe = await engine._findDupe(record);
   do_check_eq(null, dupe);
 
   uninstallAddon(addon);
-  advance_test();
+  await resetReconciler();
 });
 
-add_test(function test_get_changed_ids() {
+add_task(async function test_get_changed_ids() {
   _("Ensure getChangedIDs() has the appropriate behavior.");
 
   _("Ensure getChangedIDs() returns an empty object by default.");
-  let changes = engine.getChangedIDs();
+  let changes = await engine.getChangedIDs();
   do_check_eq("object", typeof(changes));
   do_check_eq(0, Object.keys(changes).length);
 
   _("Ensure tracker changes are populated.");
   let now = new Date();
   let changeTime = now.getTime() / 1000;
   let guid1 = Utils.makeGUID();
   tracker.addChangedID(guid1, changeTime);
 
-  changes = engine.getChangedIDs();
+  changes = await engine.getChangedIDs();
   do_check_eq("object", typeof(changes));
   do_check_eq(1, Object.keys(changes).length);
   do_check_true(guid1 in changes);
   do_check_eq(changeTime, changes[guid1]);
 
   tracker.clearChangedIDs();
 
   _("Ensure reconciler changes are populated.");
   let addon = installAddon("test_bootstrap1_1");
   tracker.clearChangedIDs(); // Just in case.
-  changes = engine.getChangedIDs();
+  changes = await engine.getChangedIDs();
   do_check_eq("object", typeof(changes));
   do_check_eq(1, Object.keys(changes).length);
   do_check_true(addon.syncGUID in changes);
   _("Change time: " + changeTime + ", addon change: " + changes[addon.syncGUID]);
   do_check_true(changes[addon.syncGUID] >= changeTime);
 
   let oldTime = changes[addon.syncGUID];
   let guid2 = addon.syncGUID;
   uninstallAddon(addon);
-  changes = engine.getChangedIDs();
+  changes = await engine.getChangedIDs();
   do_check_eq(1, Object.keys(changes).length);
   do_check_true(guid2 in changes);
   do_check_true(changes[guid2] > oldTime);
 
   _("Ensure non-syncable add-ons aren't picked up by reconciler changes.");
   reconciler._addons  = {};
   reconciler._changes = [];
   let record = {
@@ -132,21 +150,21 @@ add_test(function test_get_changed_ids()
     modified:       new Date(),
     type:           "UNSUPPORTED",
     scope:          0,
     foreignInstall: false
   };
   reconciler.addons["DUMMY"] = record;
   reconciler._addChange(record.modified, CHANGE_INSTALLED, record);
 
-  changes = engine.getChangedIDs();
+  changes = await engine.getChangedIDs();
   _(JSON.stringify(changes));
   do_check_eq(0, Object.keys(changes).length);
 
-  advance_test();
+  await resetReconciler();
 });
 
 add_task(async function test_disabled_install_semantics() {
   _("Ensure that syncing a disabled add-on preserves proper state.");
 
   // This is essentially a test for bug 712542, which snuck into the original
   // add-on sync drop. It ensures that when an add-on is installed that the
   // disabled state and incoming syncGUID is preserved, even on the next sync.
@@ -189,63 +207,44 @@ add_task(async function test_disabled_in
     enabled:       false,
     deleted:       false,
     source:        "amo",
   });
   let wbo = new ServerWBO(id, record, now - 2);
   server.insertWBO(USER, "addons", wbo);
 
   _("Performing sync of add-ons engine.");
-  engine._sync();
+  await engine._sync();
 
   // At this point the non-restartless extension should be staged for install.
 
   // Don't need this server any more.
   await promiseStopServer(amoServer);
 
   // We ensure the reconciler has recorded the proper ID and enabled state.
   let addon = reconciler.getAddonStateFromSyncGUID(id);
   do_check_neq(null, addon);
   do_check_eq(false, addon.enabled);
 
   // We fake an app restart and perform another sync, just to make sure things
   // are sane.
   restartManager();
 
-  engine._sync();
+  await engine._sync();
 
   // The client should not upload a new record. The old record should be
   // retained and unmodified.
   let collection = server.getCollection(USER, "addons");
   do_check_eq(1, collection.count());
 
   let payload = collection.payloads()[0];
   do_check_neq(null, collection.wbo(id));
   do_check_eq(ADDON_ID, payload.addonID);
   do_check_false(payload.enabled);
 
-  promiseStopServer(server);
+  await promiseStopServer(server);
 });
 
 add_test(function cleanup() {
   // There's an xpcom-shutdown hook for this, but let's give this a shot.
   reconciler.stopListening();
   run_next_test();
 });
-
-function run_test() {
-  initTestLogging("Trace");
-  Log.repository.getLogger("Sync.Engine.Addons").level =
-    Log.Level.Trace;
-  Log.repository.getLogger("Sync.Store.Addons").level = Log.Level.Trace;
-  Log.repository.getLogger("Sync.Tracker.Addons").level =
-    Log.Level.Trace;
-  Log.repository.getLogger("Sync.AddonsRepository").level =
-    Log.Level.Trace;
-
-  reconciler.startListening();
-
-  // Don't flush to disk in the middle of an event listener!
-  // This causes test hangs on WinXP.
-  reconciler._shouldPersist = false;
-
-  advance_test();
-}
--- a/services/sync/tests/unit/test_addons_reconciler.js
+++ b/services/sync/tests/unit/test_addons_reconciler.js
@@ -7,63 +7,58 @@ Cu.import("resource://gre/modules/AddonM
 Cu.import("resource://services-sync/addonsreconciler.js");
 Cu.import("resource://services-sync/engines/addons.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
 loadAddonTestFunctions();
 startupManager();
 
-function run_test() {
+add_task(async function run_test() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.AddonsReconciler").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.AddonsReconciler").level =
     Log.Level.Trace;
 
   Svc.Prefs.set("engine.addons", true);
-  Service.engineManager.register(AddonsEngine);
+  await Service.engineManager.register(AddonsEngine);
+});
 
-  run_next_test();
-}
-
-add_test(function test_defaults() {
+add_task(async function test_defaults() {
   _("Ensure new objects have reasonable defaults.");
 
   let reconciler = new AddonsReconciler();
+  await reconciler.ensureStateLoaded();
 
   do_check_false(reconciler._listening);
   do_check_eq("object", typeof(reconciler.addons));
   do_check_eq(0, Object.keys(reconciler.addons).length);
   do_check_eq(0, reconciler._changes.length);
   do_check_eq(0, reconciler._listeners.length);
-
-  run_next_test();
 });
 
-add_test(function test_load_state_empty_file() {
+add_task(async function test_load_state_empty_file() {
   _("Ensure loading from a missing file results in defaults being set.");
 
   let reconciler = new AddonsReconciler();
-
-  reconciler.loadState(null, function(error, loaded) {
-    do_check_eq(null, error);
-    do_check_false(loaded);
+  await reconciler.ensureStateLoaded();
 
-    do_check_eq("object", typeof(reconciler.addons));
-    do_check_eq(0, Object.keys(reconciler.addons).length);
-    do_check_eq(0, reconciler._changes.length);
+  let loaded = await reconciler.loadState();
+  do_check_false(loaded);
 
-    run_next_test();
-  });
+  do_check_eq("object", typeof(reconciler.addons));
+  do_check_eq(0, Object.keys(reconciler.addons).length);
+  do_check_eq(0, reconciler._changes.length);
 });
 
-add_test(function test_install_detection() {
+add_task(async function test_install_detection() {
   _("Ensure that add-on installation results in appropriate side-effects.");
 
   let reconciler = new AddonsReconciler();
+  await reconciler.ensureStateLoaded();
   reconciler.startListening();
 
   let before = new Date();
   let addon = installAddon("test_bootstrap1_1");
   let after = new Date();
 
   do_check_eq(1, Object.keys(reconciler.addons).length);
   do_check_true(addon.id in reconciler.addons);
@@ -86,24 +81,23 @@ add_test(function test_install_detection
 
   do_check_eq(1, reconciler._changes.length);
   let change = reconciler._changes[0];
   do_check_true(change[0] >= before && change[1] <= after);
   do_check_eq(CHANGE_INSTALLED, change[1]);
   do_check_eq(addon.id, change[2]);
 
   uninstallAddon(addon);
-
-  run_next_test();
 });
 
-add_test(function test_uninstall_detection() {
+add_task(async function test_uninstall_detection() {
   _("Ensure that add-on uninstallation results in appropriate side-effects.");
 
   let reconciler = new AddonsReconciler();
+  await reconciler.ensureStateLoaded();
   reconciler.startListening();
 
   reconciler._addons = {};
   reconciler._changes = [];
 
   let addon = installAddon("test_bootstrap1_1");
   let id = addon.id;
 
@@ -115,53 +109,46 @@ add_test(function test_uninstall_detecti
 
   let record = reconciler.addons[id];
   do_check_false(record.installed);
 
   do_check_eq(1, reconciler._changes.length);
   let change = reconciler._changes[0];
   do_check_eq(CHANGE_UNINSTALLED, change[1]);
   do_check_eq(id, change[2]);
-
-  run_next_test();
 });
 
-add_test(function test_load_state_future_version() {
+add_task(async function test_load_state_future_version() {
   _("Ensure loading a file from a future version results in no data loaded.");
 
   const FILENAME = "TEST_LOAD_STATE_FUTURE_VERSION";
 
   let reconciler = new AddonsReconciler();
+  await reconciler.ensureStateLoaded();
 
   // First we populate our new file.
   let state = {version: 100, addons: {foo: {}}, changes: [[1, 1, "foo"]]};
-  let cb = Async.makeSyncCallback();
 
   // jsonSave() expects an object with ._log, so we give it a reconciler
   // instance.
-  Utils.jsonSave(FILENAME, reconciler, state, cb);
-  Async.waitForSyncCallback(cb);
-
-  reconciler.loadState(FILENAME, function(error, loaded) {
-    do_check_eq(null, error);
-    do_check_false(loaded);
+  await Utils.jsonSave(FILENAME, reconciler, state);
 
-    do_check_eq("object", typeof(reconciler.addons));
-    do_check_eq(1, Object.keys(reconciler.addons).length);
-    do_check_eq(1, reconciler._changes.length);
+  let loaded = await reconciler.loadState(FILENAME);
+  do_check_false(loaded);
 
-    run_next_test();
-  });
+  do_check_eq("object", typeof(reconciler.addons));
+  do_check_eq(0, Object.keys(reconciler.addons).length);
+  do_check_eq(0, reconciler._changes.length);
 });
 
-add_test(function test_prune_changes_before_date() {
+add_task(async function test_prune_changes_before_date() {
   _("Ensure that old changes are pruned properly.");
 
   let reconciler = new AddonsReconciler();
-  reconciler._ensureStateLoaded();
+  await reconciler.ensureStateLoaded();
   reconciler._changes = [];
 
   let now = new Date();
   const HOUR_MS = 1000 * 60 * 60;
 
   _("Ensure pruning an empty changes array works.");
   reconciler.pruneChangesBeforeDate(now);
   do_check_eq(0, reconciler._changes.length);
@@ -184,11 +171,9 @@ add_test(function test_prune_changes_bef
   do_check_neq(undefined, reconciler._changes[0]);
   do_check_eq(young, reconciler._changes[0][0]);
   do_check_eq("bar", reconciler._changes[0][2]);
 
   _("Ensure pruning all changes works.");
   reconciler._changes.push([old, CHANGE_INSTALLED, "foo"]);
   reconciler.pruneChangesBeforeDate(now);
   do_check_eq(0, reconciler._changes.length);
-
-  run_next_test();
 });
--- a/services/sync/tests/unit/test_addons_store.js
+++ b/services/sync/tests/unit/test_addons_store.js
@@ -9,17 +9,17 @@ Cu.import("resource://services-sync/addo
 Cu.import("resource://services-sync/engines/addons.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://gre/modules/FileUtils.jsm");
 
 const HTTP_PORT = 8888;
 
-var prefs = new Preferences();
+const prefs = new Preferences();
 
 prefs.set("extensions.getAddons.get.url", "http://localhost:8888/search/guid:%IDS%");
 prefs.set("extensions.install.requireSecureOrigin", false);
 
 const SYSTEM_ADDON_ID = "system1@tests.mozilla.org";
 let systemAddonFile;
 
 // The system add-on must be installed before AddonManager is started.
@@ -34,21 +34,20 @@ function loadSystemAddon() {
   // used by system add-ons.
   registerDirectory("XREAppFeat", distroDir);
 }
 
 loadAddonTestFunctions();
 loadSystemAddon();
 startupManager();
 
-Service.engineManager.register(AddonsEngine);
-var engine     = Service.engineManager.get("addons");
-var tracker    = engine._tracker;
-var store      = engine._store;
-var reconciler = engine._reconciler;
+let engine;
+let tracker;
+let store;
+let reconciler;
 
 /**
  * Create a AddonsRec for this application with the fields specified.
  *
  * @param  id       Sync GUID of record
  * @param  addonId  ID of add-on
  * @param  enabled  Boolean whether record is enabled
  * @param  deleted  Boolean whether record was deleted
@@ -98,174 +97,167 @@ function createAndStartHTTPServer(port) 
 // it missed.
 function checkReconcilerUpToDate(addon) {
   let stateBefore = Object.assign({}, store.reconciler.addons[addon.id]);
   store.reconciler.rectifyStateFromAddon(addon);
   let stateAfter = store.reconciler.addons[addon.id];
   deepEqual(stateBefore, stateAfter);
 }
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Addons").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.Tracker.Addons").level = Log.Level.Trace;
-  Log.repository.getLogger("Sync.AddonsRepository").level =
-    Log.Level.Trace;
+  Log.repository.getLogger("Sync.AddonsRepository").level = Log.Level.Trace;
+
+  await Service.engineManager.register(AddonsEngine);
+  engine     = Service.engineManager.get("addons");
+  tracker    = engine._tracker;
+  store      = engine._store;
+  reconciler = engine._reconciler;
 
   reconciler.startListening();
 
   // Don't flush to disk in the middle of an event listener!
   // This causes test hangs on WinXP.
   reconciler._shouldPersist = false;
+});
 
-  run_next_test();
-}
-
-add_test(function test_remove() {
+add_task(async function test_remove() {
   _("Ensure removing add-ons from deleted records works.");
 
   let addon = installAddon("test_bootstrap1_1");
   let record = createRecordForThisApp(addon.syncGUID, addon.id, true, true);
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   let newAddon = getAddonFromAddonManagerByID(addon.id);
   do_check_eq(null, newAddon);
-
-  run_next_test();
 });
 
-add_test(function test_apply_enabled() {
+add_task(async function test_apply_enabled() {
   _("Ensures that changes to the userEnabled flag apply.");
 
   let addon = installAddon("test_bootstrap1_1");
   do_check_true(addon.isActive);
   do_check_false(addon.userDisabled);
 
   _("Ensure application of a disable record works as expected.");
   let records = [];
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, false, false));
-  let failed = store.applyIncomingBatch(records);
+  let failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_true(addon.userDisabled);
   checkReconcilerUpToDate(addon);
   records = [];
 
   _("Ensure enable record works as expected.");
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, false));
-  failed = store.applyIncomingBatch(records);
+  failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(addon.userDisabled);
   checkReconcilerUpToDate(addon);
   records = [];
 
   _("Ensure enabled state updates don't apply if the ignore pref is set.");
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, false, false));
   Svc.Prefs.set("addons.ignoreUserEnabledChanges", true);
-  failed = store.applyIncomingBatch(records);
+  failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(addon.userDisabled);
   records = [];
 
   uninstallAddon(addon);
   Svc.Prefs.reset("addons.ignoreUserEnabledChanges");
-  run_next_test();
 });
 
-add_test(function test_apply_enabled_appDisabled() {
+add_task(async function test_apply_enabled_appDisabled() {
   _("Ensures that changes to the userEnabled flag apply when the addon is appDisabled.");
 
   let addon = installAddon("test_install3"); // this addon is appDisabled by default.
   do_check_true(addon.appDisabled);
   do_check_false(addon.isActive);
   do_check_false(addon.userDisabled);
 
   _("Ensure application of a disable record works as expected.");
   store.reconciler.pruneChangesBeforeDate(Date.now() + 10);
   store.reconciler._changes = [];
   let records = [];
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, false, false));
-  let failed = store.applyIncomingBatch(records);
+  let failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_true(addon.userDisabled);
   checkReconcilerUpToDate(addon);
   records = [];
 
   _("Ensure enable record works as expected.");
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, false));
-  failed = store.applyIncomingBatch(records);
+  failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(addon.userDisabled);
   checkReconcilerUpToDate(addon);
   records = [];
 
   uninstallAddon(addon);
-  run_next_test();
 });
 
-add_test(function test_ignore_different_appid() {
+add_task(async function test_ignore_different_appid() {
   _("Ensure that incoming records with a different application ID are ignored.");
 
   // We test by creating a record that should result in an update.
   let addon = installAddon("test_bootstrap1_1");
   do_check_false(addon.userDisabled);
 
   let record = createRecordForThisApp(addon.syncGUID, addon.id, false, false);
   record.applicationID = "FAKE_ID";
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   let newAddon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(newAddon.userDisabled);
 
   uninstallAddon(addon);
-
-  run_next_test();
 });
 
-add_test(function test_ignore_unknown_source() {
+add_task(async function test_ignore_unknown_source() {
   _("Ensure incoming records with unknown source are ignored.");
 
   let addon = installAddon("test_bootstrap1_1");
 
   let record = createRecordForThisApp(addon.syncGUID, addon.id, false, false);
   record.source = "DUMMY_SOURCE";
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   let newAddon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(newAddon.userDisabled);
 
   uninstallAddon(addon);
-
-  run_next_test();
 });
 
-add_test(function test_apply_uninstall() {
+add_task(async function test_apply_uninstall() {
   _("Ensures that uninstalling an add-on from a record works.");
 
   let addon = installAddon("test_bootstrap1_1");
 
   let records = [];
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, true));
-  let failed = store.applyIncomingBatch(records);
+  let failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
 
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_eq(null, addon);
-
-  run_next_test();
 });
 
 add_test(function test_addon_syncability() {
   _("Ensure isAddonSyncable functions properly.");
 
   Svc.Prefs.set("addons.trustedSourceHostnames",
                 "addons.mozilla.org,other.example.com");
 
@@ -375,17 +367,17 @@ add_test(function test_ignore_hotfixes()
   uninstallAddon(addon);
 
   extensionPrefs.reset("hotfix.id");
 
   run_next_test();
 });
 
 
-add_test(function test_get_all_ids() {
+add_task(async function test_get_all_ids() {
   _("Ensures that getAllIDs() returns an appropriate set.");
 
   _("Installing two addons.");
   // XXX - this test seems broken - at this point, before we've installed the
   // addons below, store.getAllIDs() returns all addons installed by previous
   // tests, even though those tests uninstalled the addon.
   // So if any tests above ever add a new addon ID, they are going to need to
   // be added here too.
@@ -394,202 +386,196 @@ add_test(function test_get_all_ids() {
   let addon2 = installAddon("test_bootstrap1_1");
   let addon3 = installAddon("test_install3");
 
   _("Ensure they're syncable.");
   do_check_true(store.isAddonSyncable(addon1));
   do_check_true(store.isAddonSyncable(addon2));
   do_check_true(store.isAddonSyncable(addon3));
 
-  let ids = store.getAllIDs();
+  let ids = await store.getAllIDs();
 
   do_check_eq("object", typeof(ids));
   do_check_eq(3, Object.keys(ids).length);
   do_check_true(addon1.syncGUID in ids);
   do_check_true(addon2.syncGUID in ids);
   do_check_true(addon3.syncGUID in ids);
 
   addon1.install.cancel();
   uninstallAddon(addon2);
   uninstallAddon(addon3);
-
-  run_next_test();
 });
 
-add_test(function test_change_item_id() {
+add_task(async function test_change_item_id() {
   _("Ensures that changeItemID() works properly.");
 
   let addon = installAddon("test_bootstrap1_1");
 
   let oldID = addon.syncGUID;
   let newID = Utils.makeGUID();
 
-  store.changeItemID(oldID, newID);
+  await store.changeItemID(oldID, newID);
 
   let newAddon = getAddonFromAddonManagerByID(addon.id);
   do_check_neq(null, newAddon);
   do_check_eq(newID, newAddon.syncGUID);
 
   uninstallAddon(newAddon);
-
-  run_next_test();
 });
 
-add_test(function test_create() {
+add_task(async function test_create() {
   _("Ensure creating/installing an add-on from a record works.");
 
   let server = createAndStartHTTPServer(HTTP_PORT);
 
   let addon = installAddon("test_bootstrap1_1");
   let id = addon.id;
   uninstallAddon(addon);
 
   let guid = Utils.makeGUID();
   let record = createRecordForThisApp(guid, id, true, false);
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   let newAddon = getAddonFromAddonManagerByID(id);
   do_check_neq(null, newAddon);
   do_check_eq(guid, newAddon.syncGUID);
   do_check_false(newAddon.userDisabled);
 
   uninstallAddon(newAddon);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
-add_test(function test_create_missing_search() {
+add_task(async function test_create_missing_search() {
   _("Ensures that failed add-on searches are handled gracefully.");
 
   let server = createAndStartHTTPServer(HTTP_PORT);
 
   // The handler for this ID is not installed, so a search should 404.
   const id = "missing@tests.mozilla.org";
   let guid = Utils.makeGUID();
   let record = createRecordForThisApp(guid, id, true, false);
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(1, failed.length);
   do_check_eq(guid, failed[0]);
 
   let addon = getAddonFromAddonManagerByID(id);
   do_check_eq(null, addon);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
-add_test(function test_create_bad_install() {
+add_task(async function test_create_bad_install() {
   _("Ensures that add-ons without a valid install are handled gracefully.");
 
   let server = createAndStartHTTPServer(HTTP_PORT);
 
   // The handler returns a search result but the XPI will 404.
   const id = "missing-xpi@tests.mozilla.org";
   let guid = Utils.makeGUID();
   let record = createRecordForThisApp(guid, id, true, false);
 
-  /* let failed = */ store.applyIncomingBatch([record]);
+  /* let failed = */ await store.applyIncomingBatch([record]);
   // This addon had no source URI so was skipped - but it's not treated as
   // failure.
   // XXX - this test isn't testing what we thought it was. Previously the addon
   // was not being installed due to requireSecureURL checking *before* we'd
   // attempted to get the XPI.
   // With requireSecureURL disabled we do see a download failure, but the addon
   // *does* get added to |failed|.
   // FTR: onDownloadFailed() is called with ERROR_NETWORK_FAILURE, so it's going
   // to be tricky to distinguish a 404 from other transient network errors
   // where we do want the addon to end up in |failed|.
   // This is being tracked in bug 1284778.
   // do_check_eq(0, failed.length);
 
   let addon = getAddonFromAddonManagerByID(id);
   do_check_eq(null, addon);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
-add_test(function test_ignore_system() {
+add_task(async function test_ignore_system() {
   _("Ensure we ignore system addons");
   // Our system addon should not appear in getAllIDs
-  engine._refreshReconcilerState();
+  await engine._refreshReconcilerState();
   let num = 0;
-  for (let guid in store.getAllIDs()) {
+  let ids = await store.getAllIDs();
+  for (let guid in ids) {
     num += 1;
     let addon = reconciler.getAddonStateFromSyncGUID(guid);
     do_check_neq(addon.id, SYSTEM_ADDON_ID);
   }
-  do_check_true(num > 1, "should have seen at least one.")
-  run_next_test();
+  do_check_true(num > 1, "should have seen at least one.");
 });
 
-add_test(function test_incoming_system() {
+add_task(async function test_incoming_system() {
   _("Ensure we handle incoming records that refer to a system addon");
   // eg, loop initially had a normal addon but it was then "promoted" to be a
   // system addon but wanted to keep the same ID. The server record exists due
   // to this.
 
   // before we start, ensure the system addon isn't disabled.
   do_check_false(getAddonFromAddonManagerByID(SYSTEM_ADDON_ID).userDisabled);
 
   // Now simulate an incoming record with the same ID as the system addon,
   // but flagged as disabled - it should not be applied.
   let server = createAndStartHTTPServer(HTTP_PORT);
   // We make the incoming record flag the system addon as disabled - it should
   // be ignored.
   let guid = Utils.makeGUID();
   let record = createRecordForThisApp(guid, SYSTEM_ADDON_ID, false, false);
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   // The system addon should still not be userDisabled.
   do_check_false(getAddonFromAddonManagerByID(SYSTEM_ADDON_ID).userDisabled);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
-add_test(function test_wipe() {
+add_task(async function test_wipe() {
   _("Ensures that wiping causes add-ons to be uninstalled.");
 
   let addon1 = installAddon("test_bootstrap1_1");
 
-  store.wipe();
+  await store.wipe();
 
   let addon = getAddonFromAddonManagerByID(addon1.id);
   do_check_eq(null, addon);
-
-  run_next_test();
 });
 
-add_test(function test_wipe_and_install() {
+add_task(async function test_wipe_and_install() {
   _("Ensure wipe followed by install works.");
 
   // This tests the reset sync flow where remote data is replaced by local. The
   // receiving client will see a wipe followed by a record which should undo
   // the wipe.
   let installed = installAddon("test_bootstrap1_1");
 
   let record = createRecordForThisApp(installed.syncGUID, installed.id, true,
                                       false);
 
-  store.wipe();
+  await store.wipe();
 
   let deleted = getAddonFromAddonManagerByID(installed.id);
   do_check_null(deleted);
 
   // Re-applying the record can require re-fetching the XPI.
   let server = createAndStartHTTPServer(HTTP_PORT);
 
-  store.applyIncoming(record);
+  await store.applyIncoming(record);
 
   let fetched = getAddonFromAddonManagerByID(record.addonID);
   do_check_true(!!fetched);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
 add_test(function cleanup() {
   // There's an xpcom-shutdown hook for this, but let's give this a shot.
   reconciler.stopListening();
   run_next_test();
 });
--- a/services/sync/tests/unit/test_addons_tracker.js
+++ b/services/sync/tests/unit/test_addons_tracker.js
@@ -8,113 +8,113 @@ Cu.import("resource://services-sync/engi
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
 loadAddonTestFunctions();
 startupManager();
 Svc.Prefs.set("engine.addons", true);
 
-Service.engineManager.register(AddonsEngine);
-var engine     = Service.engineManager.get("addons");
-var reconciler = engine._reconciler;
-var store      = engine._store;
-var tracker    = engine._tracker;
-
-// Don't write out by default.
-tracker.persistChangedIDs = false;
+let engine;
+let reconciler;
+let store;
+let tracker;
 
 const addon1ID = "addon1@tests.mozilla.org";
 
-function cleanup_and_advance() {
+async function cleanup() {
   Svc.Obs.notify("weave:engine:stop-tracking");
   tracker.stopTracking();
 
   tracker.resetScore();
   tracker.clearChangedIDs();
 
   reconciler._addons = {};
   reconciler._changes = [];
-  let cb = Async.makeSpinningCallback();
-  reconciler.saveState(null, cb);
-  cb.wait();
-
-  run_next_test();
+  await reconciler.saveState();
 }
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Addons").level = Log.Level.Trace;
-  Log.repository.getLogger("Sync.AddonsReconciler").level =
-    Log.Level.Trace;
+  Log.repository.getLogger("Sync.AddonsReconciler").level = Log.Level.Trace;
+
+  await Service.engineManager.register(AddonsEngine);
+  engine     = Service.engineManager.get("addons");
+  reconciler = engine._reconciler;
+  store      = engine._store;
+  tracker    = engine._tracker;
 
-  cleanup_and_advance();
-}
+  // Don't write out by default.
+  tracker.persistChangedIDs = false;
 
-add_test(function test_empty() {
+  await cleanup();
+});
+
+add_task(async function test_empty() {
   _("Verify the tracker is empty to start with.");
 
   do_check_eq(0, Object.keys(tracker.changedIDs).length);
   do_check_eq(0, tracker.score);
 
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_not_tracking() {
+add_task(async function test_not_tracking() {
   _("Ensures the tracker doesn't do anything when it isn't tracking.");
 
   let addon = installAddon("test_bootstrap1_1");
   uninstallAddon(addon);
 
   do_check_eq(0, Object.keys(tracker.changedIDs).length);
   do_check_eq(0, tracker.score);
 
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_track_install() {
+add_task(async function test_track_install() {
   _("Ensure that installing an add-on notifies tracker.");
 
   reconciler.startListening();
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   do_check_eq(0, tracker.score);
   let addon = installAddon("test_bootstrap1_1");
   let changed = tracker.changedIDs;
 
   do_check_eq(1, Object.keys(changed).length);
   do_check_true(addon.syncGUID in changed);
   do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
 
   uninstallAddon(addon);
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_track_uninstall() {
+add_task(async function test_track_uninstall() {
   _("Ensure that uninstalling an add-on notifies tracker.");
 
   reconciler.startListening();
 
   let addon = installAddon("test_bootstrap1_1");
   let guid = addon.syncGUID;
   do_check_eq(0, tracker.score);
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   uninstallAddon(addon);
   let changed = tracker.changedIDs;
   do_check_eq(1, Object.keys(changed).length);
   do_check_true(guid in changed);
   do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
 
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_track_user_disable() {
+add_task(async function test_track_user_disable() {
   _("Ensure that tracker sees disabling of add-on");
 
   reconciler.startListening();
 
   let addon = installAddon("test_bootstrap1_1");
   do_check_false(addon.userDisabled);
   do_check_false(addon.appDisabled);
   do_check_true(addon.isActive);
@@ -144,34 +144,34 @@ add_test(function test_track_user_disabl
   Async.waitForSyncCallback(cb);
 
   let changed = tracker.changedIDs;
   do_check_eq(1, Object.keys(changed).length);
   do_check_true(addon.syncGUID in changed);
   do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
 
   uninstallAddon(addon);
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_track_enable() {
+add_task(async function test_track_enable() {
   _("Ensure that enabling a disabled add-on notifies tracker.");
 
   reconciler.startListening();
 
   let addon = installAddon("test_bootstrap1_1");
   addon.userDisabled = true;
-  store._sleep(0);
+  await Async.promiseYield();
 
   do_check_eq(0, tracker.score);
 
   Svc.Obs.notify("weave:engine:start-tracking");
   addon.userDisabled = false;
-  store._sleep(0);
+  await Async.promiseYield();
 
   let changed = tracker.changedIDs;
   do_check_eq(1, Object.keys(changed).length);
   do_check_true(addon.syncGUID in changed);
   do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
 
   uninstallAddon(addon);
-  cleanup_and_advance();
+  await cleanup();
 });
--- a/services/sync/tests/unit/test_bookmark_batch_fail.js
+++ b/services/sync/tests/unit/test_bookmark_batch_fail.js
@@ -1,22 +1,23 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 _("Making sure a failing sync reports a useful error");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 
-function run_test() {
+add_task(async function run_test() {
   let engine = new BookmarksEngine(Service);
-  engine._syncStartup = function() {
+  await engine.initialize();
+  engine._syncStartup = async function() {
     throw "FAIL!";
   };
 
   try {
     _("Try calling the sync that should throw right away");
-    engine._sync();
+    await engine._sync();
     do_throw("Should have failed sync!");
   } catch (ex) {
     _("Making sure what we threw ended up as the exception:", ex);
     do_check_eq(ex, "FAIL!");
   }
-}
+});
--- a/services/sync/tests/unit/test_bookmark_decline_undecline.js
+++ b/services/sync/tests/unit/test_bookmark_decline_undecline.js
@@ -6,19 +6,20 @@ Cu.import("resource://gre/modules/Bookma
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-initTestLogging("Trace");
-
-Service.engineManager.register(BookmarksEngine);
+add_task(async function setup() {
+  initTestLogging("Trace");
+  await Service.engineManager.register(BookmarksEngine);
+});
 
 // A stored reference to the collection won't be valid after disabling.
 function getBookmarkWBO(server, guid) {
   let coll = server.user("foo").collection("bookmarks");
   if (!coll) {
     return null;
   }
   return coll.wbo(guid);
@@ -33,25 +34,25 @@ add_task(async function test_decline_und
     let { guid: bzGuid } = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.menuGuid,
       url: "https://bugzilla.mozilla.org",
       index: PlacesUtils.bookmarks.DEFAULT_INDEX,
       title: "bugzilla",
     });
 
     ok(!getBookmarkWBO(server, bzGuid), "Shouldn't have been uploaded yet");
-    Service.sync();
+    await Service.sync();
     ok(getBookmarkWBO(server, bzGuid), "Should be present on server");
 
     engine.enabled = false;
-    Service.sync();
+    await Service.sync();
     ok(!getBookmarkWBO(server, bzGuid), "Shouldn't be present on server anymore");
 
     engine.enabled = true;
-    Service.sync();
+    await Service.sync();
     ok(getBookmarkWBO(server, bzGuid), "Should be present on server again");
 
   } finally {
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
--- a/services/sync/tests/unit/test_bookmark_duping.js
+++ b/services/sync/tests/unit/test_bookmark_duping.js
@@ -6,50 +6,51 @@ Cu.import("resource://services-common/as
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://services-sync/bookmark_validator.js");
 
-
-initTestLogging("Trace");
-
 const bms = PlacesUtils.bookmarks;
-
-Service.engineManager.register(BookmarksEngine);
+let engine;
+let store;
 
-const engine = new BookmarksEngine(Service);
-const store = engine._store;
-store._log.level = Log.Level.Trace;
-engine._log.level = Log.Level.Trace;
+add_task(async function setup() {
+  initTestLogging("Trace");
+  await Service.engineManager.register(BookmarksEngine);
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+  store._log.level = Log.Level.Trace;
+  engine._log.level = Log.Level.Trace;
+});
 
-async function setup() {
+async function sharedSetup() {
  let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");   // We skip usual startup...
 
   return { server, collection };
 }
 
 async function cleanup(server) {
   Svc.Obs.notify("weave:engine:stop-tracking");
   let promiseStartOver = promiseOneObserver("weave:service:start-over:finish");
-  Service.startOver();
+  await Service.startOver();
   await promiseStartOver;
   await promiseStopServer(server);
   await bms.eraseEverything();
 }
 
 async function syncIdToId(syncId) {
-  let guid = await PlacesSyncUtils.bookmarks.syncIdToGuid(syncId);
+  let guid = PlacesSyncUtils.bookmarks.syncIdToGuid(syncId);
   return PlacesUtils.promiseItemId(guid);
 }
 
 async function getFolderChildrenIDs(folderId) {
   let folderSyncId = PlacesSyncUtils.bookmarks.guidToSyncId(await PlacesUtils.promiseItemGuid(folderId));
   let syncIds = await PlacesSyncUtils.bookmarks.fetchChildSyncIds(folderSyncId);
   return Promise.all(syncIds.map(async (syncId) => await syncIdToId(syncId)));
 }
@@ -116,24 +117,24 @@ async function validate(collection, expe
     do_print("Local bookmark tree:\n" + JSON.stringify(tree, undefined, 2));
     ok(false);
   }
 }
 
 add_task(async function test_dupe_bookmark() {
   _("Ensure that a bookmark we consider a dupe is handled correctly.");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {id: localId, guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, its parent (folder1) plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 6);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     // Now create a new incoming record that looks alot like a dupe.
     let newGUID = Utils.makeGUID();
     let to_apply = {
@@ -160,17 +161,17 @@ add_task(async function test_dupe_bookma
         equal(source, PlacesUtils.bookmarks.SOURCE_SYNC);
         onItemChangedObserved = true;
       }
     };
     PlacesUtils.bookmarks.addObserver(obs, false);
 
     _("Syncing so new dupe record is processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // We should have logically deleted the dupe record.
     equal(collection.count(), 7);
     ok(getServerRecord(collection, bmk1_guid).deleted);
     // and physically removed from the local store.
     await promiseNoLocalItem(bmk1_guid);
     // Parent should still only have 1 item.
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
@@ -187,28 +188,28 @@ add_task(async function test_dupe_bookma
   } finally {
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_bookmark() {
   _("Ensure that a bookmark we consider a dupe from a different parent is handled correctly");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // Another parent folder *with the same name*
     let {id: folder2_id, guid: folder2_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
 
     do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
     equal((await getFolderChildrenIDs(folder2_id)).length, 0);
 
     // Now create a new incoming record that looks alot like a dupe of the
     // item in folder1_guid, but with a record that points to folder2_guid.
@@ -221,17 +222,17 @@ add_task(async function test_dupe_repare
       parentName: "Folder 1",
       parentid: folder2_guid,
     };
 
     collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 500);
 
     _("Syncing so new dupe record is processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // We should have logically deleted the dupe record.
     equal(collection.count(), 8);
     ok(getServerRecord(collection, bmk1_guid).deleted);
     // and physically removed from the local store.
     await promiseNoLocalItem(bmk1_guid);
     // The original folder no longer has the item
     equal((await getFolderChildrenIDs(folder1_id)).length, 0);
@@ -253,28 +254,28 @@ add_task(async function test_dupe_repare
   } finally {
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_locally_changed_bookmark() {
   _("Ensure that a bookmark with local changes we consider a dupe from a different parent is handled correctly");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // Another parent folder *with the same name*
     let {id: folder2_id, guid: folder2_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
 
     do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
     equal((await getFolderChildrenIDs(folder2_id)).length, 0);
 
     // Now create a new incoming record that looks alot like a dupe of the
     // item in folder1_guid, but with a record that points to folder2_guid.
@@ -298,17 +299,17 @@ add_task(async function test_dupe_repare
     await PlacesTestUtils.setBookmarkSyncFields({
       guid: bmk1_guid,
       syncChangeCounter: 1,
       lastModified: Date.now() + (deltaSeconds + 10) * 1000,
     });
 
     _("Syncing so new dupe record is processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // We should have logically deleted the dupe record.
     equal(collection.count(), 8);
     ok(getServerRecord(collection, bmk1_guid).deleted);
     // and physically removed from the local store.
     await promiseNoLocalItem(bmk1_guid);
     // The original folder still longer has the item
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
@@ -331,28 +332,28 @@ add_task(async function test_dupe_repare
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_to_earlier_appearing_parent_bookmark() {
   _("Ensure that a bookmark we consider a dupe from a different parent that " +
     "appears in the same sync before the dupe item");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // One more folder we'll use later.
     let {guid: folder2_guid} = await createFolder(bms.toolbarFolder, "A second folder");
 
     do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     let newGUID = Utils.makeGUID();
     let newParentGUID = Utils.makeGUID();
 
@@ -389,47 +390,47 @@ add_task(async function test_dupe_repare
       parentName: "Folder 1",
       parentid: newParentGUID,
       tags: [],
     }), Date.now() / 1000 + 500);
 
 
     _("Syncing so new records are processed.");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // Everything should be parented correctly.
     equal((await getFolderChildrenIDs(folder1_id)).length, 0);
-    let newParentID = store.idForGUID(newParentGUID);
-    let newID = store.idForGUID(newGUID);
+    let newParentID = await store.idForGUID(newParentGUID);
+    let newID = await store.idForGUID(newGUID);
     deepEqual(await getFolderChildrenIDs(newParentID), [newID]);
 
     // Make sure the validator thinks everything is hunky-dory.
     await validate(collection);
   } finally {
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_to_later_appearing_parent_bookmark() {
   _("Ensure that a bookmark we consider a dupe from a different parent that " +
     "doesn't exist locally as we process the child, but does appear in the same sync");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // One more folder we'll use later.
     let {guid: folder2_guid} = await createFolder(bms.toolbarFolder, "A second folder");
 
     do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     // Now create a new incoming record that looks alot like a dupe of the
     // item in folder1_guid, but with a record that points to a parent with the
     // same name, but a non-existing local ID.
@@ -465,48 +466,48 @@ add_task(async function test_dupe_repare
       parentName: "Bookmarks Toolbar",
       parentid: "toolbar",
       children: [newParentGUID],
       tags: [],
     }), Date.now() / 1000 + 500);
 
     _("Syncing so out-of-order records are processed.");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // The intended parent did end up existing, so it should be parented
     // correctly after de-duplication.
     equal((await getFolderChildrenIDs(folder1_id)).length, 0);
-    let newParentID = store.idForGUID(newParentGUID);
-    let newID = store.idForGUID(newGUID);
+    let newParentID = await store.idForGUID(newParentGUID);
+    let newID = await store.idForGUID(newGUID);
     deepEqual(await getFolderChildrenIDs(newParentID), [newID]);
 
     // Make sure the validator thinks everything is hunky-dory.
     await validate(collection);
   } finally {
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_to_future_arriving_parent_bookmark() {
   _("Ensure that a bookmark we consider a dupe from a different parent that " +
     "doesn't exist locally and doesn't appear in this Sync is handled correctly");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // One more folder we'll use later.
     let {guid: folder2_guid} = await createFolder(bms.toolbarFolder, "A second folder");
 
     do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     // Now create a new incoming record that looks alot like a dupe of the
     // item in folder1_guid, but with a record that points to a parent with the
     // same name, but a non-existing local ID.
@@ -521,34 +522,34 @@ add_task(async function test_dupe_repare
       parentName: "Folder 1",
       parentid: newParentGUID,
       tags: [],
       dateAdded: Date.now() - 10000
     }), Date.now() / 1000 + 500);
 
     _("Syncing so new dupe record is processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // We should have logically deleted the dupe record.
     equal(collection.count(), 8);
     ok(getServerRecord(collection, bmk1_guid).deleted);
     // and physically removed from the local store.
     await promiseNoLocalItem(bmk1_guid);
     // The intended parent doesn't exist, so it remains in the original folder
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     // The record for folder1 on the server should reference the new GUID.
     let serverRecord1 = getServerRecord(collection, folder1_guid);
     ok(!serverRecord1.children.includes(bmk1_guid));
     ok(serverRecord1.children.includes(newGUID));
 
     // As the incoming parent is missing the item should have been annotated
     // with that missing parent.
-    equal(PlacesUtils.annotations.getItemAnnotation(store.idForGUID(newGUID), "sync/parent"),
+    equal(PlacesUtils.annotations.getItemAnnotation((await store.idForGUID(newGUID)), "sync/parent"),
           newParentGUID);
 
     // Check the validator. Sadly, this is known to cause a mismatch between
     // the server and client views of the tree.
     let expected = [
       // We haven't fixed the incoming record that referenced the missing parent.
       { name: "orphans", count: 1 },
     ];
@@ -577,22 +578,22 @@ add_task(async function test_dupe_repare
       children: [newParentGUID],
       tags: [],
       dateAdded: Date.now() - 11000,
     }), Date.now() / 1000 + 500);
 
 
     _("Syncing so missing parent appears");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // The intended parent now does exist, so it should have been reparented.
     equal((await getFolderChildrenIDs(folder1_id)).length, 0);
-    let newParentID = store.idForGUID(newParentGUID);
-    let newID = store.idForGUID(newGUID);
+    let newParentID = await store.idForGUID(newParentGUID);
+    let newID = await store.idForGUID(newGUID);
     deepEqual(await getFolderChildrenIDs(newParentID), [newID]);
 
     // validation now has different errors :(
     expected = [
       // The validator reports multipleParents because:
       // * The incoming record newParentGUID still (and correctly) references
       //   newGUID as a child.
       // * Our original Folder1 was updated to include newGUID when it
@@ -609,23 +610,23 @@ add_task(async function test_dupe_repare
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_empty_folder() {
   _("Ensure that an empty folder we consider a dupe is handled correctly.");
   // Empty folders aren't particularly interesting in practice (as that seems
   // an edge-case) but duping folders with items is broken - bug 1293163.
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The folder we will end up duping away.
     let {guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
 
-    engine.sync();
+    await engine.sync();
 
     // We've added 1 folder, "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 5);
 
     // Now create new incoming records that looks alot like a dupe of "Folder 1".
     let newFolderGUID = Utils.makeGUID();
     collection.insert(newFolderGUID, encryptPayload({
       id: newFolderGUID,
@@ -633,17 +634,17 @@ add_task(async function test_dupe_empty_
       title: "Folder 1",
       parentName: "Bookmarks Toolbar",
       parentid: "toolbar",
       children: [],
     }), Date.now() / 1000 + 500);
 
     _("Syncing so new dupe records are processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     await validate(collection);
 
     // Collection now has one additional record - the logically deleted dupe.
     equal(collection.count(), 6);
     // original folder should be logically deleted.
     ok(getServerRecord(collection, folder1_guid).deleted);
     await promiseNoLocalItem(folder1_guid);
--- a/services/sync/tests/unit/test_bookmark_engine.js
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -34,26 +34,28 @@ async function fetchAllSyncIds() {
   }
   return syncIds;
 }
 
 add_task(async function test_delete_invalid_roots_from_server() {
   _("Ensure that we delete the Places and Reading List roots from the server.");
 
   let engine  = new BookmarksEngine(Service);
+  await engine.initialize();
   let store   = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
-    collection.insert("places", encryptPayload(store.createRecord("places").cleartext));
+    let placesRecord = await store.createRecord("places");
+    collection.insert("places", encryptPayload(placesRecord.cleartext));
 
     let listBmk = new Bookmark("bookmarks", Utils.makeGUID());
     listBmk.bmkUri = "https://example.com";
     listBmk.title = "Example reading list entry";
     listBmk.parentName = "Reading List";
     listBmk.parentid = "readinglist";
     collection.insert(listBmk.id, encryptPayload(listBmk.cleartext));
 
@@ -71,24 +73,24 @@ add_task(async function test_delete_inva
     newBmk.parentid = "toolbar";
     collection.insert(newBmk.id, encryptPayload(newBmk.cleartext));
 
     deepEqual(collection.keys().sort(), ["places", "readinglist", listBmk.id, newBmk.id].sort(),
       "Should store Places root, reading list items, and new bookmark on server");
 
     await sync_engine_and_validate_telem(engine, false);
 
-    ok(!store.itemExists("readinglist"), "Should not apply Reading List root");
-    ok(!store.itemExists(listBmk.id), "Should not apply items in Reading List");
-    ok(store.itemExists(newBmk.id), "Should apply new bookmark");
+    ok(!(await store.itemExists("readinglist")), "Should not apply Reading List root");
+    ok(!(await store.itemExists(listBmk.id)), "Should not apply items in Reading List");
+    ok((await store.itemExists(newBmk.id)), "Should apply new bookmark");
 
     deepEqual(collection.keys().sort(), ["menu", "mobile", "toolbar", "unfiled", newBmk.id].sort(),
       "Should remove Places root and reading list items from server; upload local roots");
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await promiseStopServer(server);
     Svc.Obs.notify("weave:engine:stop-tracking");
   }
 });
 
 add_task(async function bad_record_allIDs() {
@@ -119,39 +121,41 @@ add_task(async function bad_record_allID
   await PlacesSyncUtils.bookmarks.reset();
   await promiseStopServer(server);
 });
 
 add_task(async function test_processIncoming_error_orderChildren() {
   _("Ensure that _orderChildren() is called even when _processIncoming() throws an error.");
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   try {
 
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
 
     let fxuri = Utils.makeURI("http://getfirefox.com/");
     let tburi = Utils.makeURI("http://getthunderbird.com/");
 
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
     let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
 
     // Create a server record for folder1 where we flip the order of
     // the children.
-    let folder1_payload = store.createRecord(folder1_guid).cleartext;
+    let folder1_record = await store.createRecord(folder1_guid);
+    let folder1_payload = folder1_record.cleartext;
     folder1_payload.children.reverse();
     collection.insert(folder1_guid, encryptPayload(folder1_payload));
 
     // Create a bogus record that when synced down will provoke a
     // network error which in turn provokes an exception in _processIncoming.
     const BOGUS_GUID = "zzzzzzzzzzzz";
     let bogus_record = collection.insert(BOGUS_GUID, "I'm a bogus record!");
     bogus_record.get = function get() {
@@ -167,58 +171,60 @@ add_task(async function test_processInco
     try {
       await sync_engine_and_validate_telem(engine, true)
     } catch (ex) {
       error = ex;
     }
     ok(!!error);
 
     // Verify that the bookmark order has been applied.
-    let new_children = store.createRecord(folder1_guid).children;
+    folder1_record = await store.createRecord(folder1_guid);
+    let new_children = folder1_record.children;
     do_check_eq(new_children.length, 2);
     do_check_eq(new_children[0], folder1_payload.children[0]);
     do_check_eq(new_children[1], folder1_payload.children[1]);
 
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 1);
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 0);
 
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_restorePromptsReupload() {
   _("Ensure that restoring from a backup will reupload all records.");
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");   // We skip usual startup...
 
   try {
 
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
     _("Folder 1: " + folder1_id + ", " + folder1_guid);
 
     let fxuri = Utils.makeURI("http://getfirefox.com/");
     let tburi = Utils.makeURI("http://getthunderbird.com/");
 
     _("Create a single record.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
     _("Get Firefox!: " + bmk1_id + ", " + bmk1_guid);
 
 
     let dirSvc = Cc["@mozilla.org/file/directory_service;1"]
       .getService(Ci.nsIProperties);
 
     let backupFile = dirSvc.get("TmpD", Ci.nsILocalFile);
 
@@ -226,17 +232,17 @@ add_task(async function test_restoreProm
     backupFile.append("t_b_e_" + Date.now() + ".json");
 
     _("Backing up to file " + backupFile.path);
     await BookmarkJSONUtils.exportToFile(backupFile.path);
 
     _("Create a different record and sync.");
     let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
-    let bmk2_guid = store.GUIDForId(bmk2_id);
+    let bmk2_guid = await store.GUIDForId(bmk2_id);
     _("Get Thunderbird!: " + bmk2_id + ", " + bmk2_guid);
 
     PlacesUtils.bookmarks.removeItem(bmk1_id);
 
     let error;
     try {
       await sync_engine_and_validate_telem(engine, false);
     } catch (ex) {
@@ -259,17 +265,17 @@ add_task(async function test_restoreProm
     _("Ensure we have the bookmarks we expect locally.");
     let guids = await fetchAllSyncIds();
     _("GUIDs: " + JSON.stringify([...guids]));
     let found = false;
     let count = 0;
     let newFX;
     for (let guid of guids) {
       count++;
-      let id = store.idForGUID(guid, true);
+      let id = await store.idForGUID(guid, true);
       // Only one bookmark, so _all_ should be Firefox!
       if (PlacesUtils.bookmarks.getItemType(id) == PlacesUtils.bookmarks.TYPE_BOOKMARK) {
         let uri = PlacesUtils.bookmarks.getBookmarkURI(id);
         _("Found URI " + uri.spec + " for GUID " + guid);
         do_check_eq(uri.spec, fxuri.spec);
         newFX = guid;   // Save the new GUID after restore.
         found = true;   // Only runs if the above check passes.
       }
@@ -308,17 +314,17 @@ add_task(async function test_restoreProm
     do_check_eq(bookmarkWBOs[0].bmkUri, fxuri.spec);
     do_check_eq(bookmarkWBOs[0].title, "Get Firefox!");
 
     _("Our old friend Folder 1 is still in play.");
     do_check_eq(folderWBOs.length, 1);
     do_check_eq(folderWBOs[0].title, "Folder 1");
 
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
 function FakeRecord(constructor, r) {
@@ -357,112 +363,115 @@ add_task(async function test_mismatched_
       ["HCRq40Rnxhrd", "YeyWCV1RVsYw", "GCceVZMhvMbP", "sYi2hevdArlF",
        "vjbZlPlSyGY8", "UtjUhVyrpeG6", "rVq8WMG2wfZI", "Lx0tcy43ZKhZ",
        "oT74WwV8_j4P", "IztsItWVSo3-"],
     "parentid": "toolbar"
   };
   newRecord.cleartext = newRecord;
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
-  _("GUID: " + store.GUIDForId(6, true));
+  _("GUID: " + (await store.GUIDForId(6, true)));
 
   try {
     let bms = PlacesUtils.bookmarks;
     let oldR = new FakeRecord(BookmarkFolder, oldRecord);
     let newR = new FakeRecord(Livemark, newRecord);
     oldR.parentid = PlacesUtils.bookmarks.toolbarGuid;
     newR.parentid = PlacesUtils.bookmarks.toolbarGuid;
 
-    store.applyIncoming(oldR);
+    await store.applyIncoming(oldR);
     _("Applied old. It's a folder.");
-    let oldID = store.idForGUID(oldR.id);
+    let oldID = await store.idForGUID(oldR.id);
     _("Old ID: " + oldID);
     do_check_eq(bms.getItemType(oldID), bms.TYPE_FOLDER);
     do_check_false(PlacesUtils.annotations
                               .itemHasAnnotation(oldID, PlacesUtils.LMANNO_FEEDURI));
 
-    store.applyIncoming(newR);
-    let newID = store.idForGUID(newR.id);
+    await store.applyIncoming(newR);
+    let newID = await store.idForGUID(newR.id);
     _("New ID: " + newID);
 
     _("Applied new. It's a livemark.");
     do_check_eq(bms.getItemType(newID), bms.TYPE_FOLDER);
     do_check_true(PlacesUtils.annotations
                              .itemHasAnnotation(newID, PlacesUtils.LMANNO_FEEDURI));
 
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_bookmark_guidMap_fail() {
   _("Ensure that failures building the GUID map cause early death.");
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store = engine._store;
 
   let server = serverForFoo(engine);
   let coll   = server.user("foo").collection("bookmarks");
   await SyncTestingInfrastructure(server);
 
   // Add one item to the server.
   let itemID = PlacesUtils.bookmarks.createFolder(
     PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
-  let itemGUID    = store.GUIDForId(itemID);
-  let itemPayload = store.createRecord(itemGUID).cleartext;
+  let itemGUID = await store.GUIDForId(itemID);
+  let itemRecord = await store.createRecord(itemGUID);
+  let itemPayload = itemRecord.cleartext;
   coll.insert(itemGUID, encryptPayload(itemPayload));
 
   engine.lastSync = 1;   // So we don't back up.
 
   // Make building the GUID map fail.
 
   let pbt = PlacesUtils.promiseBookmarksTree;
   PlacesUtils.promiseBookmarksTree = function() { return Promise.reject("Nooo"); };
 
-  // Ensure that we throw when accessing _guidMap.
-  engine._syncStartup();
+  // Ensure that we throw when calling getGuidMap().
+  await engine._syncStartup();
   _("No error.");
-  do_check_false(engine._guidMapFailed);
 
   _("We get an error if building _guidMap fails in use.");
   let err;
   try {
-    _(engine._guidMap);
+    _(await engine.getGuidMap());
   } catch (ex) {
     err = ex;
   }
   do_check_eq(err.code, Engine.prototype.eEngineAbortApplyIncoming);
   do_check_eq(err.cause, "Nooo");
 
   _("We get an error and abort during processIncoming.");
   err = undefined;
   try {
-    engine._processIncoming();
+    await engine._processIncoming();
   } catch (ex) {
     err = ex;
   }
   do_check_eq(err, "Nooo");
 
   PlacesUtils.promiseBookmarksTree = pbt;
   await PlacesSyncUtils.bookmarks.reset();
   await promiseStopServer(server);
 });
 
 add_task(async function test_bookmark_tag_but_no_uri() {
   _("Ensure that a bookmark record with tags, but no URI, doesn't throw an exception.");
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store = engine._store;
 
   // We're simply checking that no exception is thrown, so
   // no actual checks in this test.
 
   await PlacesSyncUtils.bookmarks.insert({
     kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK,
     syncId: Utils.makeGUID(),
@@ -489,42 +498,43 @@ add_task(async function test_bookmark_ta
     parentid:    "toolbar",
     id:          Utils.makeGUID(),
     description: "",
     tags:        ["foo"],
     title:       "Taggy tag",
     type:        "folder"
   });
 
-  store.create(record);
+  await store.create(record);
   record.tags = ["bar"];
-  store.update(record);
+  await store.update(record);
 });
 
 add_task(async function test_misreconciled_root() {
   _("Ensure that we don't reconcile an arbitrary record with a root.");
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   // Log real hard for this test.
   store._log.trace = store._log.debug;
   engine._log.trace = engine._log.debug;
 
-  engine._syncStartup();
+  await engine._syncStartup();
 
   // Let's find out where the toolbar is right now.
-  let toolbarBefore = store.createRecord("toolbar", "bookmarks");
-  let toolbarIDBefore = store.idForGUID("toolbar");
+  let toolbarBefore = await store.createRecord("toolbar", "bookmarks");
+  let toolbarIDBefore = await store.idForGUID("toolbar");
   do_check_neq(-1, toolbarIDBefore);
 
   let parentGUIDBefore = toolbarBefore.parentid;
-  let parentIDBefore = store.idForGUID(parentGUIDBefore);
+  let parentIDBefore = await store.idForGUID(parentGUIDBefore);
   do_check_neq(-1, parentIDBefore);
   do_check_eq("string", typeof(parentGUIDBefore));
 
   _("Current parent: " + parentGUIDBefore + " (" + parentIDBefore + ").");
 
   let to_apply = {
     id: "zzzzzzzzzzzz",
     type: "folder",
@@ -538,32 +548,33 @@ add_task(async function test_misreconcil
   let rec = new FakeRecord(BookmarkFolder, to_apply);
 
   _("Applying record.");
   store.applyIncoming(rec);
 
   // Ensure that afterwards, toolbar is still there.
   // As of 2012-12-05, this only passes because Places doesn't use "toolbar" as
   // the real GUID, instead using a generated one. Sync does the translation.
-  let toolbarAfter = store.createRecord("toolbar", "bookmarks");
+  let toolbarAfter = await store.createRecord("toolbar", "bookmarks");
   let parentGUIDAfter = toolbarAfter.parentid;
-  let parentIDAfter = store.idForGUID(parentGUIDAfter);
-  do_check_eq(store.GUIDForId(toolbarIDBefore), "toolbar");
+  let parentIDAfter = await store.idForGUID(parentGUIDAfter);
+  do_check_eq((await store.GUIDForId(toolbarIDBefore)), "toolbar");
   do_check_eq(parentGUIDBefore, parentGUIDAfter);
   do_check_eq(parentIDBefore, parentIDAfter);
 
   await store.wipe();
   await PlacesSyncUtils.bookmarks.reset();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_dateAdded() {
   await Service.recordManager.clearCache();
   await PlacesSyncUtils.bookmarks.reset();
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   // TODO: Avoid random orange (bug 1374599), this is only necessary
   // intermittently - reset the last sync date so that we'll get all bookmarks.
@@ -629,18 +640,18 @@ add_task(async function test_sync_dateAd
     item6.bmkUri = "https://example.com/6";
     item6.title = "asdf6";
     item6.parentName = "Bookmarks Toolbar";
     item6.parentid = "toolbar";
     const item6LastModified = (now - oneYearMS) / 1000;
     collection.insert(item6GUID, encryptPayload(item6.cleartext), item6LastModified);
 
     let origBuildWeakReuploadMap = engine.buildWeakReuploadMap;
-    engine.buildWeakReuploadMap = set => {
-      let fullMap = origBuildWeakReuploadMap.call(engine, set);
+    engine.buildWeakReuploadMap = async (set) => {
+      let fullMap = await origBuildWeakReuploadMap.call(engine, set);
       fullMap.delete(item6GUID);
       return fullMap;
     };
 
     await sync_engine_and_validate_telem(engine, false);
 
     let record1 = await store.createRecord(item1GUID);
     let record2 = await store.createRecord(item2GUID);
@@ -705,17 +716,17 @@ add_task(async function test_sync_dateAd
 
     let newerRecord2 = await store.createRecord(item2GUID);
     equal(newerRecord2.dateAdded, newRecord2.dateAdded,
       "dateAdded update should be ignored for later date if we know an earlier one ");
 
 
 
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
 function run_test() {
--- a/services/sync/tests/unit/test_bookmark_invalid.js
+++ b/services/sync/tests/unit/test_bookmark_invalid.js
@@ -1,19 +1,25 @@
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-Service.engineManager.register(BookmarksEngine);
+let engine;
+let store;
+let tracker;
 
-var engine = Service.engineManager.get("bookmarks");
-var store = engine._store;
-var tracker = engine._tracker;
+add_task(async function setup() {
+  initTestLogging("Trace");
+  await Service.engineManager.register(BookmarksEngine);
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+  tracker = engine._tracker;
+});
 
 add_task(async function test_ignore_invalid_uri() {
   _("Ensure that we don't die with invalid bookmarks.");
 
   // First create a valid bookmark.
   let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
                                                   Services.io.newURI("http://example.com/"),
                                                   PlacesUtils.bookmarks.DEFAULT_INDEX,
@@ -25,17 +31,17 @@ add_task(async function test_ignore_inva
       `UPDATE moz_places SET url = :url, url_hash = hash(:url)
        WHERE id = (SELECT b.fk FROM moz_bookmarks b
        WHERE b.id = :id LIMIT 1)`,
       { id: bmid, url: "<invalid url>" });
   });
 
   // Ensure that this doesn't throw even though the DB is now in a bad state (a
   // bookmark has an illegal url).
-  engine._buildGUIDMap();
+  await engine._buildGUIDMap();
 });
 
 add_task(async function test_ignore_missing_uri() {
   _("Ensure that we don't die with a bookmark referencing an invalid bookmark id.");
 
   // First create a valid bookmark.
   let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
                                                   Services.io.newURI("http://example.com/"),
@@ -47,15 +53,10 @@ add_task(async function test_ignore_miss
     await db.execute(
       `UPDATE moz_bookmarks SET fk = 999999
        WHERE id = :id`
       , { id: bmid });
   });
 
   // Ensure that this doesn't throw even though the DB is now in a bad state (a
   // bookmark has an illegal url).
-  engine._buildGUIDMap();
+  await engine._buildGUIDMap();
 });
-
-function run_test() {
-  initTestLogging("Trace");
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_bookmark_livemarks.js
+++ b/services/sync/tests/unit/test_bookmark_livemarks.js
@@ -6,21 +6,21 @@ Cu.import("resource://services-sync/reco
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://testing-common/services/common/utils.js");
 
 const DESCRIPTION_ANNO = "bookmarkProperties/description";
 
-var engine = Service.engineManager.get("bookmarks");
-var store = engine._store;
+let engine;
+let store;
 
 // Record borrowed from Bug 631361.
-var record631361 = {
+const record631361 = {
   id: "M5bwUKK8hPyF",
   index: 150,
   modified: 1296768176.49,
   payload:
   {"id": "M5bwUKK8hPyF",
    "type": "livemark",
    "siteUri": "http://www.bbc.co.uk/go/rss/int/news/-/news/",
    "feedUri": "http://fxfeeds.mozilla.com/en-US/firefox/headlines.xml",
@@ -47,87 +47,79 @@ var record631361 = {
       "E3H04Wn2RfSi", "eaSIMI6kSrcz", "rtkRxFoG5Vqi", "dectkUglV0Dz",
       "B4vUE0BE15No", "qgQFW5AQrgB0", "SxAXvwOhu8Zi", "0S6cRPOg-5Z2",
       "zcZZBGeLnaWW", "B0at8hkQqVZQ", "sgPtgGulbP66", "lwtwGHSCPYaQ",
       "mNTdpgoRZMbW", "-L8Vci6CbkJY", "bVzudKSQERc1", "Gxl9lb4DXsmL",
       "3Qr13GucOtEh"]},
   collection: "bookmarks"
 };
 
-// Clean up after other tests. Only necessary in XULRunner.
-store.wipe();
-
 function makeLivemark(p, mintGUID) {
   let b = new Livemark("bookmarks", p.id);
   // Copy here, because tests mutate the contents.
   b.cleartext = TestingUtils.deepCopy(p);
 
   if (mintGUID)
     b.id = Utils.makeGUID();
 
   return b;
 }
 
-
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.Store.Bookmarks").level  = Log.Level.Trace;
 
-  run_next_test();
-}
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+});
 
-add_test(function test_livemark_descriptions() {
+add_task(async function test_livemark_descriptions() {
   let record = record631361.payload;
 
-  function doRecord(r) {
+  async function doRecord(r) {
     store._childrenToOrder = {};
-    store.applyIncoming(r);
-    store._orderChildren();
+    await store.applyIncoming(r);
+    await store._orderChildren();
     delete store._childrenToOrder;
   }
 
   // Attempt to provoke an error by messing around with the description.
   record.description = null;
-  doRecord(makeLivemark(record));
+  await doRecord(makeLivemark(record));
   record.description = "";
-  doRecord(makeLivemark(record));
+  await doRecord(makeLivemark(record));
 
   // Attempt to provoke an error by adding a bad description anno.
-  let id = store.idForGUID(record.id);
+  let id = await store.idForGUID(record.id);
   PlacesUtils.annotations.setItemAnnotation(id, DESCRIPTION_ANNO, "", 0,
                                             PlacesUtils.annotations.EXPIRE_NEVER);
-
-  run_next_test();
 });
 
-add_test(function test_livemark_invalid() {
+add_task(async function test_livemark_invalid() {
   _("Livemarks considered invalid by nsLivemarkService are skipped.");
 
   _("Parent is unknown. Will be set to unfiled.");
   let lateParentRec = makeLivemark(record631361.payload, true);
   let parentGUID = Utils.makeGUID();
   lateParentRec.parentid = parentGUID;
-  do_check_eq(-1, store.idForGUID(parentGUID));
+  do_check_eq(-1, (await store.idForGUID(parentGUID)));
 
-  store.create(lateParentRec);
-  let recID = store.idForGUID(lateParentRec.id, true);
+  await store.create(lateParentRec);
+  let recID = await store.idForGUID(lateParentRec.id, true);
   do_check_true(recID > 0);
   do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(recID),
               PlacesUtils.bookmarks.unfiledBookmarksFolder);
 
   _("No feed URI, which is invalid. Will be skipped.");
   let noFeedURIRec = makeLivemark(record631361.payload, true);
   delete noFeedURIRec.cleartext.feedUri;
-  store.create(noFeedURIRec);
+  await store.create(noFeedURIRec);
   // No exception, but no creation occurs.
-  do_check_eq(-1, store.idForGUID(noFeedURIRec.id, true));
+  do_check_eq(-1, (await store.idForGUID(noFeedURIRec.id, true)));
 
   _("Parent is a Livemark. Will be skipped.");
   let lmParentRec = makeLivemark(record631361.payload, true);
-  lmParentRec.parentid = store.GUIDForId(recID);
-  store.create(lmParentRec);
+  lmParentRec.parentid = await store.GUIDForId(recID);
+  await store.create(lmParentRec);
   // No exception, but no creation occurs.
-  do_check_eq(-1, store.idForGUID(lmParentRec.id, true));
-
-  // Clear event loop.
-  Utils.nextTick(run_next_test);
+  do_check_eq(-1, (await store.idForGUID(lmParentRec.id, true)));
 });
--- a/services/sync/tests/unit/test_bookmark_order.js
+++ b/services/sync/tests/unit/test_bookmark_order.js
@@ -4,23 +4,19 @@
 _("Making sure after processing incoming bookmarks, they show up in the right order");
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/main.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-function run_test() {
-  Svc.Prefs.set("log.logger.engine.bookmarks", "Trace");
-  initTestLogging("Trace");
-  Log.repository.getLogger("Sqlite").level = Log.Level.Info;
-
-  run_next_test();
-}
+Svc.Prefs.set("log.logger.engine.bookmarks", "Trace");
+initTestLogging("Trace");
+Log.repository.getLogger("Sqlite").level = Log.Level.Info;
 
 function serverForFoo(engine) {
   generateNewKeys(Service.collectionKeys);
 
   let clientsEngine = Service.clientsEngine;
   return serverForUsers({"foo": "password"}, {
     meta: {
       global: {
@@ -185,18 +181,18 @@ add_task(async function test_local_order
         guid: guids.mdn,
         index: 5,
       }],
     }, {
       guid: guids.tb,
       index: 2,
     }], "Should use local order as base if remote is older");
   } finally {
-    engine.wipeClient();
-    Service.startOver();
+    await engine.wipeClient();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_remote_order_newer() {
   let engine = Service.engineManager.get("bookmarks");
 
   let server = serverForFoo(engine);
@@ -230,28 +226,28 @@ add_task(async function test_remote_orde
         guid: guids.customize,
         index: 5,
       }],
     }, {
       guid: guids.fx,
       index: 2,
     }], "Should use remote order as base if local is older");
   } finally {
-    engine.wipeClient();
-    Service.startOver();
+    await engine.wipeClient();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_bookmark_order() {
   let engine = new BookmarksEngine(Service);
   let store = engine._store;
 
   _("Starting with a clean slate of no bookmarks");
-  store.wipe();
+  await store.wipe();
   await assertBookmarksTreeMatches("", [{
     guid: PlacesUtils.bookmarks.menuGuid,
     index: 0,
   }, {
     guid: PlacesUtils.bookmarks.toolbarGuid,
     index: 1,
   }, {
     // Index 2 is the tags root. (Root indices depend on the order of the
@@ -279,17 +275,17 @@ add_task(async function test_bookmark_or
     bmFolder.title = name;
     bmFolder.parentid = parent || "unfiled";
     bmFolder.children = children;
     return bmFolder;
   }
 
   async function apply(record) {
     store._childrenToOrder = {};
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
     await store._orderChildren();
     delete store._childrenToOrder;
   }
   let id10 = "10_aaaaaaaaa";
   _("basic add first bookmark");
   await apply(bookmark(id10, ""));
   await assertBookmarksTreeMatches("", [{
     guid: PlacesUtils.bookmarks.menuGuid,
--- a/services/sync/tests/unit/test_bookmark_places_query_rewriting.js
+++ b/services/sync/tests/unit/test_bookmark_places_query_rewriting.js
@@ -1,59 +1,59 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 _("Rewrite place: URIs.");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-var engine = new BookmarksEngine(Service);
-var store = engine._store;
+let engine = new BookmarksEngine(Service);
+let store = engine._store;
 
 function makeTagRecord(id, uri) {
   let tagRecord = new BookmarkQuery("bookmarks", id);
   tagRecord.queryId = "MagicTags";
   tagRecord.parentName = "Bookmarks Toolbar";
   tagRecord.bmkUri = uri;
   tagRecord.title = "tagtag";
   tagRecord.folderName = "bar";
   tagRecord.parentid = PlacesUtils.bookmarks.toolbarGuid;
   return tagRecord;
 }
 
-function run_test() {
+add_task(async function run_test() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.Store.Bookmarks").level = Log.Level.Trace;
 
   let uri = "place:folder=499&type=7&queryType=1";
   let tagRecord = makeTagRecord("abcdefabcdef", uri);
 
   _("Type: " + tagRecord.type);
   _("Folder name: " + tagRecord.folderName);
-  store.applyIncoming(tagRecord);
+  await store.applyIncoming(tagRecord);
 
   let tags = PlacesUtils.getFolderContents(PlacesUtils.tagsFolderId).root;
   let tagID;
   try {
     for (let i = 0; i < tags.childCount; ++i) {
       let child = tags.getChild(i);
       if (child.title == "bar") {
         tagID = child.itemId;
       }
     }
   } finally {
     tags.containerOpen = false;
   }
 
   _("Tag ID: " + tagID);
-  let insertedRecord = store.createRecord("abcdefabcdef", "bookmarks");
+  let insertedRecord = await store.createRecord("abcdefabcdef", "bookmarks");
   do_check_eq(insertedRecord.bmkUri, uri.replace("499", tagID));
 
   _("... but not if the type is wrong.");
   let wrongTypeURI = "place:folder=499&type=2&queryType=1";
   let wrongTypeRecord = makeTagRecord("fedcbafedcba", wrongTypeURI);
-  store.applyIncoming(wrongTypeRecord);
+  await store.applyIncoming(wrongTypeRecord);
 
-  insertedRecord = store.createRecord("fedcbafedcba", "bookmarks");
+  insertedRecord = await store.createRecord("fedcbafedcba", "bookmarks");
   do_check_eq(insertedRecord.bmkUri, wrongTypeURI);
-}
+});
--- a/services/sync/tests/unit/test_bookmark_repair.js
+++ b/services/sync/tests/unit/test_bookmark_repair.js
@@ -8,44 +8,49 @@ Cu.import("resource://gre/modules/osfile
 Cu.import("resource://services-sync/bookmark_repair.js");
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/doctor.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/engines/clients.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
+initTestLogging("Trace");
+Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace
+Log.repository.getLogger("Sync.Engine.Clients").level = Log.Level.Trace
+Log.repository.getLogger("Sqlite").level = Log.Level.Info; // less noisy
+
 const LAST_BOOKMARK_SYNC_PREFS = [
   "bookmarks.lastSync",
   "bookmarks.lastSyncLocal",
 ];
 
 const BOOKMARK_REPAIR_STATE_PREFS = [
   "client.GUID",
   "doctor.lastRepairAdvance",
   ...LAST_BOOKMARK_SYNC_PREFS,
   ...Object.values(BookmarkRepairRequestor.PREF).map(name =>
     `repairs.bookmarks.${name}`
   ),
 ];
 
-initTestLogging("Trace");
-Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace
-Log.repository.getLogger("Sync.Engine.Clients").level = Log.Level.Trace
-Log.repository.getLogger("Sqlite").level = Log.Level.Info; // less noisy
+let clientsEngine;
+let bookmarksEngine;
+var recordedEvents = [];
 
-let clientsEngine = Service.clientsEngine;
-let bookmarksEngine = Service.engineManager.get("bookmarks");
-
-generateNewKeys(Service.collectionKeys);
+add_task(async function setup() {
+  clientsEngine = Service.clientsEngine;
+  bookmarksEngine = Service.engineManager.get("bookmarks");
 
-var recordedEvents = [];
-Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
-  recordedEvents.push({ object, method, value, extra });
-};
+  generateNewKeys(Service.collectionKeys);
+
+  Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
+    recordedEvents.push({ object, method, value, extra });
+  };
+});
 
 function checkRecordedEvents(expected, message) {
   deepEqual(recordedEvents, expected, message);
   // and clear the list so future checks are easier to write.
   recordedEvents = [];
 }
 
 // Backs up and resets all preferences to their default values. Returns a
@@ -71,18 +76,18 @@ async function promiseValidationDone(exp
   let summary = validationResult.problems.getSummary();
   let actual = summary.filter(({name, count}) => count);
   actual.sort((a, b) => String(a.name).localeCompare(b.name));
   expected.sort((a, b) => String(a.name).localeCompare(b.name));
   deepEqual(actual, expected);
 }
 
 async function cleanup(server) {
-  bookmarksEngine._store.wipe();
-  clientsEngine._store.wipe();
+  await bookmarksEngine._store.wipe();
+  await clientsEngine._store.wipe();
   Svc.Prefs.resetBranch("");
   Service.recordManager.clearCache();
   await promiseStopServer(server);
 }
 
 add_task(async function test_bookmark_repair_integration() {
   enableValidationPrefs();
 
@@ -93,17 +98,17 @@ add_task(async function test_bookmark_re
 
   let user = server.user("foo");
 
   let initialID = Service.clientsEngine.localID;
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
-    Service.sync();
+    await Service.sync();
 
     _("Create remote client record");
     server.insertWBO("foo", "clients", new ServerWBO(remoteID, encryptPayload({
       id: remoteID,
       name: "Remote client",
       type: "desktop",
       commands: [],
       version: "54",
@@ -119,42 +124,42 @@ add_task(async function test_bookmark_re
     let bookmarkInfo = await PlacesUtils.bookmarks.insert({
       parentGuid: folderInfo.guid,
       url: "http://getfirefox.com/",
       title: "Get Firefox!",
     });
 
     _(`Upload ${folderInfo.guid} and ${bookmarkInfo.guid} to server`);
     let validationPromise = promiseValidationDone([]);
-    Service.sync();
+    await Service.sync();
     equal(clientsEngine.stats.numClients, 2, "Clients collection should have 2 records");
     await validationPromise;
     checkRecordedEvents([], "Should not start repair after first sync");
 
     _("Back up last sync timestamps for remote client");
     let restoreRemoteLastBookmarkSync = backupPrefs(LAST_BOOKMARK_SYNC_PREFS);
 
     _(`Delete ${bookmarkInfo.guid} locally and on server`);
     // Now we will reach into the server and hard-delete the bookmark
     user.collection("bookmarks").remove(bookmarkInfo.guid);
     // And delete the bookmark, but cheat by telling places that Sync did
     // it, so we don't end up with a tombstone.
     await PlacesUtils.bookmarks.remove(bookmarkInfo.guid, {
       source: PlacesUtils.bookmarks.SOURCE_SYNC,
     });
-    deepEqual(bookmarksEngine.pullNewChanges(), {},
+    deepEqual((await bookmarksEngine.pullNewChanges()), {},
       `Should not upload tombstone for ${bookmarkInfo.guid}`);
 
     // sync again - we should have a few problems...
     _("Sync again to trigger repair");
     validationPromise = promiseValidationDone([
       {"name": "missingChildren", "count": 1},
       {"name": "structuralDifferences", "count": 1},
     ]);
-    Service.sync();
+    await Service.sync();
     await validationPromise;
     let flowID = Svc.Prefs.get("repairs.bookmarks.flowID");
     checkRecordedEvents([{
       object: "repair",
       method: "started",
       value: undefined,
       extra: {
         flowID,
@@ -175,42 +180,43 @@ add_task(async function test_bookmark_re
       extra: {
         deviceID: Service.identity.hashedDeviceID(remoteID),
         flowID,
         numIDs: "1",
       },
     }], "Should record telemetry events for repair request");
 
     // We should have started a repair with our second client.
-    equal(clientsEngine.getClientCommands(remoteID).length, 1,
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 1,
       "Should queue repair request for remote client after repair");
     _("Sync to send outgoing repair request");
-    Service.sync();
-    equal(clientsEngine.getClientCommands(remoteID).length, 0,
+    await Service.sync();
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 0,
       "Should send repair request to remote client after next sync");
     checkRecordedEvents([],
       "Should not record repair telemetry after sending repair request");
 
     _("Back up repair state to restore later");
     let restoreInitialRepairState = backupPrefs(BOOKMARK_REPAIR_STATE_PREFS);
 
     // so now let's take over the role of that other client!
     _("Create new clients engine pretending to be remote client");
     let remoteClientsEngine = Service.clientsEngine = new ClientEngine(Service);
+    await remoteClientsEngine.initialize();
     remoteClientsEngine.localID = remoteID;
 
     _("Restore missing bookmark");
     // Pretend Sync wrote the bookmark, so that we upload it as part of the
     // repair instead of the sync.
     bookmarkInfo.source = PlacesUtils.bookmarks.SOURCE_SYNC;
     await PlacesUtils.bookmarks.insert(bookmarkInfo);
     restoreRemoteLastBookmarkSync();
 
     _("Sync as remote client");
-    Service.sync();
+    await Service.sync();
     checkRecordedEvents([{
       object: "processcommand",
       method: "repairRequest",
       value: undefined,
       extra: {
         flowID,
       },
     }, {
@@ -235,24 +241,24 @@ add_task(async function test_bookmark_re
       value: undefined,
       extra: {
         flowID,
         numIDs: "1",
       }
     }], "Should record telemetry events for repair response");
 
     // We should queue the repair response for the initial client.
-    equal(remoteClientsEngine.getClientCommands(initialID).length, 1,
+    equal((await remoteClientsEngine.getClientCommands(initialID)).length, 1,
       "Should queue repair response for initial client after repair");
     ok(user.collection("bookmarks").wbo(bookmarkInfo.guid),
       "Should upload missing bookmark");
 
     _("Sync to upload bookmark and send outgoing repair response");
-    Service.sync();
-    equal(remoteClientsEngine.getClientCommands(initialID).length, 0,
+    await Service.sync();
+    equal((await remoteClientsEngine.getClientCommands(initialID)).length, 0,
       "Should send repair response to initial client after next sync");
     checkRecordedEvents([],
       "Should not record repair telemetry after sending repair response");
     ok(!Services.prefs.prefHasUserValue("services.sync.repairs.bookmarks.state"),
       "Remote client should not be repairing");
 
     _("Pretend to be initial client again");
     Service.clientsEngine = clientsEngine;
@@ -262,17 +268,17 @@ add_task(async function test_bookmark_re
       source: PlacesUtils.bookmarks.SOURCE_SYNC,
     });
     restoreInitialRepairState();
     ok(Services.prefs.prefHasUserValue("services.sync.repairs.bookmarks.state"),
       "Initial client should still be repairing");
 
     _("Sync as initial client");
     let revalidationPromise = promiseValidationDone([]);
-    Service.sync();
+    await Service.sync();
     let restoredBookmarkInfo = await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid);
     ok(restoredBookmarkInfo, "Missing bookmark should be downloaded to initial client");
     checkRecordedEvents([{
       object: "processcommand",
       method: "repairResponse",
       value: undefined,
       extra: {
         flowID,
@@ -296,32 +302,33 @@ add_task(async function test_bookmark_re
       },
     }]);
     await revalidationPromise;
     ok(!Services.prefs.prefHasUserValue("services.sync.repairs.bookmarks.state"),
       "Should clear repair pref after successfully completing repair");
   } finally {
     await cleanup(server);
     clientsEngine = Service.clientsEngine = new ClientEngine(Service);
+    clientsEngine.initialize();
   }
 });
 
 add_task(async function test_repair_client_missing() {
   enableValidationPrefs();
 
   _("Ensure that a record missing from the client only will get re-downloaded from the server");
 
   let server = serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
-    Service.sync();
+    await Service.sync();
 
     _("Create remote client record");
     server.insertWBO("foo", "clients", new ServerWBO(remoteID, encryptPayload({
       id: remoteID,
       name: "Remote client",
       type: "desktop",
       commands: [],
       version: "54",
@@ -331,45 +338,45 @@ add_task(async function test_repair_clie
     let bookmarkInfo = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.toolbarGuid,
       url: "http://getfirefox.com/",
       title: "Get Firefox!",
     });
 
     let validationPromise = promiseValidationDone([]);
     _("Syncing.");
-    Service.sync();
+    await Service.sync();
     // should have 2 clients
     equal(clientsEngine.stats.numClients, 2)
     await validationPromise;
 
     // Delete the bookmark localy, but cheat by telling places that Sync did
     // it, so Sync still thinks we have it.
     await PlacesUtils.bookmarks.remove(bookmarkInfo.guid, {
       source: PlacesUtils.bookmarks.SOURCE_SYNC,
     });
     // sanity check we aren't going to sync this removal.
-    do_check_empty(bookmarksEngine.pullNewChanges());
+    do_check_empty((await bookmarksEngine.pullNewChanges()));
     // sanity check that the bookmark is not there anymore
     do_check_false(await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid));
 
     // sync again - we should have a few problems...
     _("Syncing again.");
     validationPromise = promiseValidationDone([
       {"name": "clientMissing", "count": 1},
       {"name": "structuralDifferences", "count": 1},
     ]);
-    Service.sync();
+    await Service.sync();
     await validationPromise;
 
     // We shouldn't have started a repair with our second client.
-    equal(clientsEngine.getClientCommands(remoteID).length, 0);
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 0);
 
     // Trigger a sync (will request the missing item)
-    Service.sync();
+    await Service.sync();
 
     // And we got our bookmark back
     do_check_true(await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid));
   } finally {
     await cleanup(server);
   }
 });
 
@@ -382,17 +389,17 @@ add_task(async function test_repair_serv
   await SyncTestingInfrastructure(server);
 
   let user = server.user("foo");
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
-    Service.sync();
+    await Service.sync();
 
     _("Create remote client record");
     server.insertWBO("foo", "clients", new ServerWBO(remoteID, encryptPayload({
       id: remoteID,
       name: "Remote client",
       type: "desktop",
       commands: [],
       version: "54",
@@ -402,38 +409,38 @@ add_task(async function test_repair_serv
     let bookmarkInfo = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.toolbarGuid,
       url: "http://getfirefox.com/",
       title: "Get Firefox!",
     });
 
     let validationPromise = promiseValidationDone([]);
     _("Syncing.");
-    Service.sync();
+    await Service.sync();
     // should have 2 clients
     equal(clientsEngine.stats.numClients, 2)
     await validationPromise;
 
     // Now we will reach into the server and hard-delete the bookmark
     user.collection("bookmarks").wbo(bookmarkInfo.guid).delete();
 
     // sync again - we should have a few problems...
     _("Syncing again.");
     validationPromise = promiseValidationDone([
       {"name": "serverMissing", "count": 1},
       {"name": "missingChildren", "count": 1},
     ]);
-    Service.sync();
+    await Service.sync();
     await validationPromise;
 
     // We shouldn't have started a repair with our second client.
-    equal(clientsEngine.getClientCommands(remoteID).length, 0);
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 0);
 
     // Trigger a sync (will upload the missing item)
-    Service.sync();
+    await Service.sync();
 
     // And the server got our bookmark back
     do_check_true(user.collection("bookmarks").wbo(bookmarkInfo.guid));
   } finally {
     await cleanup(server);
   }
 });
 
@@ -444,17 +451,17 @@ add_task(async function test_repair_serv
 
   let server = serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
-    Service.sync();
+    await Service.sync();
 
     _("Create remote client record");
     server.insertWBO("foo", "clients", new ServerWBO(remoteID, encryptPayload({
       id: remoteID,
       name: "Remote client",
       type: "desktop",
       commands: [],
       version: "54",
@@ -464,17 +471,17 @@ add_task(async function test_repair_serv
     let bookmarkInfo = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.toolbarGuid,
       url: "http://getfirefox.com/",
       title: "Get Firefox!",
     });
 
     let validationPromise = promiseValidationDone([]);
     _("Syncing.");
-    Service.sync();
+    await Service.sync();
     // should have 2 clients
     equal(clientsEngine.stats.numClients, 2)
     await validationPromise;
 
     // Now we will reach into the server and create a tombstone for that bookmark
     server.insertWBO("foo", "bookmarks", new ServerWBO(bookmarkInfo.guid, encryptPayload({
       id: bookmarkInfo.guid,
       deleted: true,
@@ -482,23 +489,23 @@ add_task(async function test_repair_serv
 
     // sync again - we should have a few problems...
     _("Syncing again.");
     validationPromise = promiseValidationDone([
       {"name": "serverDeleted", "count": 1},
       {"name": "deletedChildren", "count": 1},
       {"name": "orphans", "count": 1}
     ]);
-    Service.sync();
+    await Service.sync();
     await validationPromise;
 
     // We shouldn't have started a repair with our second client.
-    equal(clientsEngine.getClientCommands(remoteID).length, 0);
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 0);
 
     // Trigger a sync (will upload the missing item)
-    Service.sync();
+    await Service.sync();
 
     // And the client deleted our bookmark
     do_check_true(!(await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid)));
   } finally {
     await cleanup(server);
   }
 });
--- a/services/sync/tests/unit/test_bookmark_repair_requestor.js
+++ b/services/sync/tests/unit/test_bookmark_repair_requestor.js
@@ -23,23 +23,23 @@ class MockClientsEngine {
   get remoteClients() {
     return Object.values(this._clientList);
   }
 
   remoteClient(id) {
     return this._clientList[id];
   }
 
-  sendCommand(command, args, clientID) {
+  async sendCommand(command, args, clientID) {
     let cc = this._sentCommands[clientID] || [];
     cc.push({ command, args });
     this._sentCommands[clientID] = cc;
   }
 
-  getClientCommands(clientID) {
+  async getClientCommands(clientID) {
     return this._sentCommands[clientID] || [];
   }
 }
 
 class MockIdentity {
   hashedDeviceID(did) {
     return did; // don't hash it to make testing easier.
   }
@@ -93,17 +93,17 @@ add_task(async function test_requestor_n
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
 
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // there are no clients, so we should end up in "finished" (which we need to
   // check via telemetry)
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
     },
@@ -124,36 +124,36 @@ add_task(async function test_requestor_o
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   // asking it to continue stays in that state until we timeout or the command
   // is removed.
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
 
   // now pretend that client synced.
   mockService.clientsEngine._sentCommands = {};
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
   checkState(BookmarkRepairRequestor.STATE.SENT_SECOND_REQUEST);
   // the command should be outgoing again.
   checkOutgoingCommand(mockService, "client-a");
 
   // pretend that client synced again without writing a command.
   mockService.clientsEngine._sentCommands = {};
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
   // There are no more clients, so we've given up.
 
   checkRepairFinished();
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
@@ -185,27 +185,27 @@ add_task(async function test_requestor_o
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
 
   // pretend we are now in the future.
   let theFuture = Date.now() + 300000000;
   requestor._now = () => theFuture;
 
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
 
   // We should be finished as we gave up in disgust.
   checkRepairFinished();
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
@@ -237,17 +237,17 @@ add_task(async function test_requestor_l
   let validationInfo = {
     problems: {
       missingChildren: [
         { parent: "x", child: "a" },
       ],
       orphans: [],
     }
   }
-  requestor.startRepairs(validationInfo, Utils.makeGUID());
+  await requestor.startRepairs(validationInfo, Utils.makeGUID());
   // the repair command should be outgoing to the most-recent client.
   checkOutgoingCommand(mockService, "client-late");
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   // and this test is done - reset the repair.
   requestor.prefs.resetBranch();
 });
 
 add_task(async function test_requestor_client_vanishes() {
@@ -262,40 +262,40 @@ add_task(async function test_requestor_c
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
 
   mockService.clientsEngine._sentCommands = {};
   // Now let's pretend the client vanished.
   delete mockService.clientsEngine._clientList["client-a"];
 
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
   // We should have moved on to client-b.
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   checkOutgoingCommand(mockService, "client-b");
 
   // Now let's pretend client B wrote all missing IDs.
   let response = {
     collection: "bookmarks",
     request: "upload",
     flowID: requestor._flowID,
     clientID: "client-b",
     ids: ["a", "b", "c"],
   }
-  requestor.continueRepairs(response);
+  await requestor.continueRepairs(response);
 
   // We should be finished as we got all our IDs.
   checkRepairFinished();
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
@@ -340,45 +340,45 @@ add_task(async function test_requestor_s
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
 
   mockService.clientsEngine._sentCommands = {};
   // Now let's pretend the client wrote a response.
   let response = {
     collection: "bookmarks",
     request: "upload",
     clientID: "client-a",
     flowID: requestor._flowID,
     ids: ["a", "b"],
   }
-  requestor.continueRepairs(response);
+  await requestor.continueRepairs(response);
   // We should have moved on to client 2.
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   checkOutgoingCommand(mockService, "client-b");
 
   // Now let's pretend client B write the missing ID.
   response = {
     collection: "bookmarks",
     request: "upload",
     clientID: "client-b",
     flowID: requestor._flowID,
     ids: ["c"],
   }
-  requestor.continueRepairs(response);
+  await requestor.continueRepairs(response);
 
   // We should be finished as we got all our IDs.
   checkRepairFinished();
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
@@ -436,17 +436,17 @@ add_task(async function test_requestor_a
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
 
-  ok(!requestor.startRepairs(validationInfo, flowID),
+  ok(!(await requestor.startRepairs(validationInfo, flowID)),
      "Shouldn't start repairs");
   equal(mockService._recordedEvents.length, 1);
   equal(mockService._recordedEvents[0].method, "aborted");
 });
 
 add_task(async function test_requestor_already_repairing_continue() {
   let clientB = makeClientRecord("client-b")
   let mockService = new MockService({
@@ -460,17 +460,17 @@ add_task(async function test_requestor_a
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   mockService.clientsEngine._sentCommands = {};
 
   // Now let's pretend the client wrote a response (it doesn't matter what's in here)
   let response = {
@@ -483,17 +483,17 @@ add_task(async function test_requestor_a
 
   // and another client also started a request
   clientB.commands = [{
     args: [{ collection: "bookmarks", flowID: "asdf" }],
     command: "repairRequest",
   }];
 
 
-  requestor.continueRepairs(response);
+  await requestor.continueRepairs(response);
 
   // We should have aborted now
   checkRepairFinished();
   const expected = [
     { method: "started",
       object: "repair",
       value: undefined,
       extra: { flowID, numIDs: "3" },
--- a/services/sync/tests/unit/test_bookmark_repair_responder.js
+++ b/services/sync/tests/unit/test_bookmark_repair_responder.js
@@ -11,58 +11,61 @@ Cu.import("resource://services-sync/serv
 Cu.import("resource://services-sync/bookmark_repair.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 initTestLogging("Trace");
 Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
 // sqlite logging generates lots of noise and typically isn't helpful here.
 Log.repository.getLogger("Sqlite").level = Log.Level.Error;
 
+// Disable validation so that we don't try to automatically repair the server
+// when we sync.
+Svc.Prefs.set("engine.bookmarks.validation.enabled", false);
+
 // stub telemetry so we can easily check the right things are recorded.
 var recordedEvents = [];
-Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
-  recordedEvents.push({ object, method, value, extra });
-};
 
 function checkRecordedEvents(expected) {
   deepEqual(recordedEvents, expected);
   // and clear the list so future checks are easier to write.
   recordedEvents = [];
 }
 
 function getServerBookmarks(server) {
   return server.user("foo").collection("bookmarks");
 }
 
-async function setup() {
-  let bookmarksEngine = Service.engineManager.get("bookmarks");
-
+async function makeServer() {
   let server = serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
-
-  // Disable validation so that we don't try to automatically repair the server
-  // when we sync.
-  Svc.Prefs.set("engine.bookmarks.validation.enabled", false);
-
   return server;
 }
 
 async function cleanup(server) {
   await promiseStopServer(server);
   await PlacesSyncUtils.bookmarks.wipe();
-  Svc.Prefs.reset("engine.bookmarks.validation.enabled");
   // clear keys so when each test finds a different server it accepts its keys.
   Service.collectionKeys.clear();
 }
 
+let bookmarksEngine;
+
+add_task(async function setup() {
+  bookmarksEngine = Service.engineManager.get("bookmarks");
+
+  Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
+    recordedEvents.push({ object, method, value, extra });
+  };
+});
+
 add_task(async function test_responder_error() {
-  let server = await setup();
+  let server = await makeServer();
 
   // sync so the collection is created.
-  Service.sync();
+  await Service.sync();
 
   let request = {
     request: "upload",
     ids: [Utils.makeGUID()],
     flowID: Utils.makeGUID(),
   }
   let responder = new BookmarkRepairResponder();
   // mock the responder to simulate an error.
@@ -81,20 +84,20 @@ add_task(async function test_responder_e
       }
     },
   ]);
 
   await cleanup(server);
 });
 
 add_task(async function test_responder_no_items() {
-  let server = await setup();
+  let server = await makeServer();
 
   // sync so the collection is created.
-  Service.sync();
+  await Service.sync();
 
   let request = {
     request: "upload",
     ids: [Utils.makeGUID()],
     flowID: Utils.makeGUID(),
   }
   let responder = new BookmarkRepairResponder();
   await responder.repair(request, null);
@@ -107,26 +110,26 @@ add_task(async function test_responder_n
     },
   ]);
 
   await cleanup(server);
 });
 
 // One item requested and we have it locally, but it's not yet on the server.
 add_task(async function test_responder_upload() {
-  let server = await setup();
+  let server = await makeServer();
 
   // Pretend we've already synced this bookmark, so that we can ensure it's
   // uploaded in response to our repair request.
   let bm = await PlacesUtils.bookmarks.insert({ parentGuid: PlacesUtils.bookmarks.unfiledGuid,
                                                 title: "Get Firefox",
                                                 url: "http://getfirefox.com/",
                                                 source: PlacesUtils.bookmarks.SOURCES.SYNC });
 
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
   ], "Should only upload roots on first sync");
 
   let request = {
@@ -140,17 +143,17 @@ add_task(async function test_responder_u
   checkRecordedEvents([
     { object: "repairResponse",
       method: "uploading",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "1"},
     },
   ]);
 
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
     bm.guid,
   ].sort(), "Should upload requested bookmark on second sync");
 
@@ -163,24 +166,24 @@ add_task(async function test_responder_u
   ]);
 
   await cleanup(server);
 });
 
 // One item requested and we have it locally and it's already on the server.
 // As it was explicitly requested, we should upload it.
 add_task(async function test_responder_item_exists_locally() {
-  let server = await setup();
+  let server = await makeServer();
 
   let bm = await PlacesUtils.bookmarks.insert({ parentGuid: PlacesUtils.bookmarks.unfiledGuid,
                                                 title: "Get Firefox",
                                                 url: "http://getfirefox.com/" });
   // first sync to get the item on the server.
   _("Syncing to get item on the server");
-  Service.sync();
+  await Service.sync();
 
   // issue a repair request for it.
   let request = {
     request: "upload",
     ids: [bm.guid],
     flowID: Utils.makeGUID(),
   }
   let responder = new BookmarkRepairResponder();
@@ -191,57 +194,57 @@ add_task(async function test_responder_i
     { object: "repairResponse",
       method: "uploading",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "1"},
     },
   ]);
 
   _("Syncing to do the upload.");
-  Service.sync();
+  await Service.sync();
 
   checkRecordedEvents([
     { object: "repairResponse",
       method: "finished",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "1"},
     },
   ]);
   await cleanup(server);
 });
 
 add_task(async function test_responder_tombstone() {
-  let server = await setup();
+  let server = await makeServer();
 
   // TODO: Request an item for which we have a tombstone locally. Decide if
   // we want to store tombstones permanently for this. In the integration
   // test, we can also try requesting a deleted child or ancestor.
 
   // For now, we'll handle this identically to `test_responder_missing_items`.
   // Bug 1343103 is a follow-up to better handle this.
   await cleanup(server);
 });
 
 add_task(async function test_responder_missing_items() {
-  let server = await setup();
+  let server = await makeServer();
 
   let fxBmk = await PlacesUtils.bookmarks.insert({
     parentGuid: PlacesUtils.bookmarks.unfiledGuid,
     title: "Get Firefox",
     url: "http://getfirefox.com/",
   });
   let tbBmk = await PlacesUtils.bookmarks.insert({
     parentGuid: PlacesUtils.bookmarks.unfiledGuid,
     title: "Get Thunderbird",
     url: "http://getthunderbird.com/",
     // Pretend we've already synced Thunderbird.
     source: PlacesUtils.bookmarks.SOURCES.SYNC,
   });
 
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
     fxBmk.guid,
   ].sort(), "Should upload roots and Firefox on first sync");
 
@@ -258,17 +261,17 @@ add_task(async function test_responder_m
     { object: "repairResponse",
       method: "uploading",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "2"},
     },
   ]);
 
   _("Sync after requesting IDs");
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
     fxBmk.guid,
     tbBmk.guid,
   ].sort(), "Second sync should upload Thunderbird; skip nonexistent");
@@ -280,19 +283,19 @@ add_task(async function test_responder_m
       extra: {flowID: request.flowID, numIDs: "2"},
     },
   ]);
 
   await cleanup(server);
 });
 
 add_task(async function test_non_syncable() {
-  let server = await setup();
+  let server = await makeServer();
 
-  Service.sync(); // to create the collections on the server.
+  await Service.sync(); // to create the collections on the server.
 
   // Creates the left pane queries as a side effect.
   let leftPaneId = PlacesUIUtils.leftPaneFolderId;
   _(`Left pane root ID: ${leftPaneId}`);
   await PlacesTestUtils.promiseAsyncUpdates();
 
   // A child folder of the left pane root, containing queries for the menu,
   // toolbar, and unfiled queries.
@@ -325,17 +328,17 @@ add_task(async function test_non_syncabl
       method: "uploading",
       value: undefined,
       // Tombstones for the 2 items we requested and for bookmarksMenu
       extra: {flowID: request.flowID, numIDs: "3"},
     },
   ]);
 
   _("Sync to upload tombstones for items");
-  Service.sync();
+  await Service.sync();
 
   let toolbarQueryId = PlacesUIUtils.leftPaneQueries.BookmarksToolbar;
   let menuQueryId = PlacesUIUtils.leftPaneQueries.BookmarksMenu;
   let queryGuids = [
     allBookmarksGuid,
     await PlacesUtils.promiseItemGuid(toolbarQueryId),
     await PlacesUtils.promiseItemGuid(menuQueryId),
     unfiledQueryGuid,
@@ -371,17 +374,17 @@ add_task(async function test_non_syncabl
       extra: {flowID: request.flowID, numIDs: "3"},
     },
   ]);
 
   await cleanup(server);
 });
 
 add_task(async function test_folder_descendants() {
-  let server = await setup();
+  let server = await makeServer();
 
   let parentFolder = await PlacesUtils.bookmarks.insert({
     type: PlacesUtils.bookmarks.TYPE_FOLDER,
     parentGuid: PlacesUtils.bookmarks.menuGuid,
     title: "Parent folder",
   });
   let childFolder = await PlacesUtils.bookmarks.insert({
     type: PlacesUtils.bookmarks.TYPE_FOLDER,
@@ -399,17 +402,17 @@ add_task(async function test_folder_desc
   // the repair because we explicitly request its ID.
   let childSiblingBmk = await PlacesUtils.bookmarks.insert({
     parentGuid: parentFolder.guid,
     title: "Get Thunderbird",
     url: "http://getthunderbird.com",
   });
 
   _("Initial sync to upload roots and parent folder");
-  Service.sync();
+  await Service.sync();
 
   let initialSyncIds = [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
     parentFolder.guid,
     existingChildBmk.guid,
@@ -441,17 +444,17 @@ add_task(async function test_folder_desc
     kind: "bookmark",
     syncId: Utils.makeGUID(),
     parentSyncId: childFolder.guid,
     title: "Mozilla",
     url: "https://mozilla.org",
   });
 
   _("Sync again; server contents shouldn't change");
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), initialSyncIds,
     "Second sync should not upload missing bookmarks");
 
   // This assumes the parent record on the server is correct, and the server
   // is just missing the children. This isn't a correct assumption if the
   // parent's `children` array is wrong, or if the parent and children disagree.
   _("Request missing bookmarks");
   let request = {
@@ -474,17 +477,17 @@ add_task(async function test_folder_desc
     { object: "repairResponse",
       method: "uploading",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "5"},
     },
   ]);
 
   _("Sync after requesting repair; should upload missing records");
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     ...initialSyncIds,
     childBmk.syncId,
     grandChildBmk.syncId,
     grandChildSiblingBmk.syncId,
   ].sort(), "Third sync should upload requested items");
 
   checkRecordedEvents([
@@ -495,17 +498,17 @@ add_task(async function test_folder_desc
     },
   ]);
 
   await cleanup(server);
 });
 
 // Error handling.
 add_task(async function test_aborts_unknown_request() {
-  let server = await setup();
+  let server = await makeServer();
 
   let request = {
     request: "not-upload",
     ids: [],
     flowID: Utils.makeGUID(),
   }
   let responder = new BookmarkRepairResponder();
   await responder.repair(request, null);
@@ -516,8 +519,12 @@ add_task(async function test_aborts_unkn
       value: undefined,
       extra: { flowID: request.flowID,
                reason: "Don't understand request type 'not-upload'",
              },
     },
   ]);
   await cleanup(server);
 });
+
+add_task(async function teardown() {
+  Svc.Prefs.reset("engine.bookmarks.validation.enabled");
+});
--- a/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
+++ b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
@@ -4,27 +4,18 @@
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 const SMART_BOOKMARKS_ANNO = "Places/SmartBookmark";
-var IOService = Cc["@mozilla.org/network/io-service;1"]
+const IOService = Cc["@mozilla.org/network/io-service;1"]
                 .getService(Ci.nsIIOService);
-("http://www.mozilla.com", null, null);
-
-
-Service.engineManager.register(BookmarksEngine);
-var engine = Service.engineManager.get("bookmarks");
-var store = engine._store;
-
-// Clean up after other tests. Only necessary in XULRunner.
-store.wipe();
 
 function newSmartBookmark(parent, uri, position, title, queryID) {
   let id = PlacesUtils.bookmarks.insertBookmark(parent, uri, position, title);
   PlacesUtils.annotations.setItemAnnotation(id, SMART_BOOKMARKS_ANNO,
                                             queryID, 0,
                                             PlacesUtils.annotations.EXPIRE_NEVER);
   return id;
 }
@@ -40,16 +31,25 @@ function smartBookmarkCount() {
 function clearBookmarks() {
   _("Cleaning up existing items.");
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.bookmarksMenuFolder);
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.tagsFolder);
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.toolbarFolder);
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.unfiledBookmarksFolder);
 }
 
+let engine;
+let store;
+
+add_task(async function setup() {
+  await Service.engineManager.register(BookmarksEngine);
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+});
+
 // Verify that Places smart bookmarks have their annotation uploaded and
 // handled locally.
 add_task(async function test_annotation_uploaded() {
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let startCount = smartBookmarkCount();
 
@@ -74,22 +74,22 @@ add_task(async function test_annotation_
   _("New item ID: " + mostVisitedID);
   do_check_true(!!mostVisitedID);
 
   let annoValue = PlacesUtils.annotations.getItemAnnotation(mostVisitedID,
                                               SMART_BOOKMARKS_ANNO);
   _("Anno: " + annoValue);
   do_check_eq("MostVisited", annoValue);
 
-  let guid = store.GUIDForId(mostVisitedID);
+  let guid = await store.GUIDForId(mostVisitedID);
   _("GUID: " + guid);
   do_check_true(!!guid);
 
   _("Create record object and verify that it's sane.");
-  let record = store.createRecord(guid);
+  let record = await store.createRecord(guid);
   do_check_true(record instanceof Bookmark);
   do_check_true(record instanceof BookmarkQuery);
 
   do_check_eq(record.bmkUri, uri.spec);
 
   _("Make sure the new record carries with it the annotation.");
   do_check_eq("MostVisited", record.queryId);
 
@@ -121,46 +121,46 @@ add_task(async function test_annotation_
 
     // "Clear" by changing attributes: if we delete it, apparently it sticks
     // around as a deleted record...
     PlacesUtils.bookmarks.setItemTitle(mostVisitedID, "Not Most Visited");
     PlacesUtils.bookmarks.changeBookmarkURI(
       mostVisitedID, Utils.makeURI("http://something/else"));
     PlacesUtils.annotations.removeItemAnnotation(mostVisitedID,
                                                  SMART_BOOKMARKS_ANNO);
-    store.wipe();
-    engine.resetClient();
+    await store.wipe();
+    await engine.resetClient();
     do_check_eq(smartBookmarkCount(), startCount);
 
     _("Sync. Verify that the downloaded record carries the annotation.");
     await sync_engine_and_validate_telem(engine, false);
 
     _("Verify that the Places DB now has an annotated bookmark.");
     _("Our count has increased again.");
     do_check_eq(smartBookmarkCount(), startCount + 1);
 
     _("Find by GUID and verify that it's annotated.");
-    let newID = store.idForGUID(serverGUID);
+    let newID = await store.idForGUID(serverGUID);
     let newAnnoValue = PlacesUtils.annotations.getItemAnnotation(
       newID, SMART_BOOKMARKS_ANNO);
     do_check_eq(newAnnoValue, "MostVisited");
     do_check_eq(PlacesUtils.bookmarks.getBookmarkURI(newID).spec, uri.spec);
 
     _("Test updating.");
-    let newRecord = store.createRecord(serverGUID);
+    let newRecord = await store.createRecord(serverGUID);
     do_check_eq(newRecord.queryId, newAnnoValue);
     newRecord.queryId = "LeastVisited";
-    store.update(newRecord);
+    await store.update(newRecord);
     do_check_eq("LeastVisited", PlacesUtils.annotations.getItemAnnotation(
       newID, SMART_BOOKMARKS_ANNO));
 
 
   } finally {
     // Clean up.
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_smart_bookmarks_duped() {
   let server = serverForFoo(engine);
@@ -168,50 +168,50 @@ add_task(async function test_smart_bookm
 
   let parent = PlacesUtils.toolbarFolderId;
   let uri =
     Utils.makeURI("place:sort=" +
                   Ci.nsINavHistoryQueryOptions.SORT_BY_VISITCOUNT_DESCENDING +
                   "&maxResults=10");
   let title = "Most Visited";
   let mostVisitedID = newSmartBookmark(parent, uri, -1, title, "MostVisited");
-  let mostVisitedGUID = store.GUIDForId(mostVisitedID);
+  let mostVisitedGUID = await store.GUIDForId(mostVisitedID);
 
-  let record = store.createRecord(mostVisitedGUID);
+  let record = await store.createRecord(mostVisitedGUID);
 
   _("Prepare sync.");
   try {
-    engine._syncStartup();
+    await engine._syncStartup();
 
     _("Verify that mapDupe uses the anno, discovering a dupe regardless of URI.");
-    do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+    do_check_eq(mostVisitedGUID, (await engine._mapDupe(record)));
 
     record.bmkUri = "http://foo/";
-    do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+    do_check_eq(mostVisitedGUID, (await engine._mapDupe(record)));
     do_check_neq(PlacesUtils.bookmarks.getBookmarkURI(mostVisitedID).spec,
                  record.bmkUri);
 
     _("Verify that different annos don't dupe.");
     let other = new BookmarkQuery("bookmarks", "abcdefabcdef");
     other.queryId = "LeastVisited";
     other.parentName = "Bookmarks Toolbar";
     other.bmkUri = "place:foo";
     other.title = "";
-    do_check_eq(undefined, engine._findDupe(other));
+    do_check_eq(undefined, (await engine._findDupe(other)));
 
     _("Handle records without a queryId entry.");
     record.bmkUri = uri;
     delete record.queryId;
-    do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+    do_check_eq(mostVisitedGUID, (await engine._mapDupe(record)));
 
-    engine._syncFinish();
+    await engine._syncFinish();
 
   } finally {
     // Clean up.
-    store.wipe();
+    await store.wipe();
     await promiseStopServer(server);
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
   }
 });
 
 function run_test() {
   initTestLogging("Trace");
--- a/services/sync/tests/unit/test_bookmark_store.js
+++ b/services/sync/tests/unit/test_bookmark_store.js
@@ -3,475 +3,472 @@
 
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
 const PARENT_ANNO = "sync/parent";
 
-Service.engineManager.register(BookmarksEngine);
+let engine;
+let store;
+let tracker;
 
-var engine = Service.engineManager.get("bookmarks");
-var store = engine._store;
-var tracker = engine._tracker;
+const fxuri = Utils.makeURI("http://getfirefox.com/");
+const tburi = Utils.makeURI("http://getthunderbird.com/");
 
-// Don't write some persistence files asynchronously.
-tracker.persistChangedIDs = false;
+add_task(async function setup() {
+  await Service.engineManager.register(BookmarksEngine);
 
-var fxuri = Utils.makeURI("http://getfirefox.com/");
-var tburi = Utils.makeURI("http://getthunderbird.com/");
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+  tracker = engine._tracker;
+
+  // Don't write some persistence files asynchronously.
+  tracker.persistChangedIDs = false;
+});
 
 add_task(async function test_ignore_specials() {
   _("Ensure that we can't delete bookmark roots.");
 
   // Belt...
   let record = new BookmarkFolder("bookmarks", "toolbar", "folder");
   record.deleted = true;
-  do_check_neq(null, store.idForGUID("toolbar"));
+  do_check_neq(null, (await store.idForGUID("toolbar")));
 
-  store.applyIncoming(record);
+  await store.applyIncoming(record);
   await store.deletePending();
 
   // Ensure that the toolbar exists.
-  do_check_neq(null, store.idForGUID("toolbar"));
+  do_check_neq(null, (await store.idForGUID("toolbar")));
 
   // This will fail painfully in getItemType if the deletion worked.
-  engine._buildGUIDMap();
+  await engine._buildGUIDMap();
 
   // Braces...
-  store.remove(record);
+  await store.remove(record);
   await store.deletePending();
-  do_check_neq(null, store.idForGUID("toolbar"));
-  engine._buildGUIDMap();
+  do_check_neq(null, (await store.idForGUID("toolbar")));
+  await engine._buildGUIDMap();
 
-  store.wipe();
+  await store.wipe();
 });
 
-add_test(function test_bookmark_create() {
+add_task(async function test_bookmark_create() {
   try {
     _("Ensure the record isn't present yet.");
     let ids = PlacesUtils.bookmarks.getBookmarkIdsForURI(fxuri, {});
     do_check_eq(ids.length, 0);
 
     _("Let's create a new record.");
     let fxrecord = new Bookmark("bookmarks", "get-firefox1");
     fxrecord.bmkUri        = fxuri.spec;
     fxrecord.description   = "Firefox is awesome.";
     fxrecord.title         = "Get Firefox!";
     fxrecord.tags          = ["firefox", "awesome", "browser"];
     fxrecord.keyword       = "awesome";
     fxrecord.loadInSidebar = false;
     fxrecord.parentName    = "Bookmarks Toolbar";
     fxrecord.parentid      = "toolbar";
-    store.applyIncoming(fxrecord);
+    await store.applyIncoming(fxrecord);
 
     _("Verify it has been created correctly.");
-    let id = store.idForGUID(fxrecord.id);
-    do_check_eq(store.GUIDForId(id), fxrecord.id);
+    let id = await store.idForGUID(fxrecord.id);
+    do_check_eq((await store.GUIDForId(id)), fxrecord.id);
     do_check_eq(PlacesUtils.bookmarks.getItemType(id),
                 PlacesUtils.bookmarks.TYPE_BOOKMARK);
     do_check_true(PlacesUtils.bookmarks.getBookmarkURI(id).equals(fxuri));
     do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), fxrecord.title);
     do_check_eq(PlacesUtils.annotations.getItemAnnotation(id, "bookmarkProperties/description"),
                 fxrecord.description);
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
                 PlacesUtils.bookmarks.toolbarFolder);
     do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(id), fxrecord.keyword);
 
     _("Have the store create a new record object. Verify that it has the same data.");
-    let newrecord = store.createRecord(fxrecord.id);
+    let newrecord = await store.createRecord(fxrecord.id);
     do_check_true(newrecord instanceof Bookmark);
     for (let property of ["type", "bmkUri", "description", "title",
                           "keyword", "parentName", "parentid"]) {
       do_check_eq(newrecord[property], fxrecord[property]);
     }
     do_check_true(Utils.deepEquals(newrecord.tags.sort(),
                                    fxrecord.tags.sort()));
 
     _("The calculated sort index is based on frecency data.");
     do_check_true(newrecord.sortindex >= 150);
 
     _("Create a record with some values missing.");
     let tbrecord = new Bookmark("bookmarks", "thunderbird1");
     tbrecord.bmkUri        = tburi.spec;
     tbrecord.parentName    = "Bookmarks Toolbar";
     tbrecord.parentid      = "toolbar";
-    store.applyIncoming(tbrecord);
+    await store.applyIncoming(tbrecord);
 
     _("Verify it has been created correctly.");
-    id = store.idForGUID(tbrecord.id);
-    do_check_eq(store.GUIDForId(id), tbrecord.id);
+    id = await store.idForGUID(tbrecord.id);
+    do_check_eq((await store.GUIDForId(id)), tbrecord.id);
     do_check_eq(PlacesUtils.bookmarks.getItemType(id),
                 PlacesUtils.bookmarks.TYPE_BOOKMARK);
     do_check_true(PlacesUtils.bookmarks.getBookmarkURI(id).equals(tburi));
     do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), "");
     let error;
     try {
       PlacesUtils.annotations.getItemAnnotation(id, "bookmarkProperties/description");
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.result, Cr.NS_ERROR_NOT_AVAILABLE);
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
                 PlacesUtils.bookmarks.toolbarFolder);
     do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(id), null);
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_bookmark_update() {
+add_task(async function test_bookmark_update() {
   try {
     _("Create a bookmark whose values we'll change.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
     PlacesUtils.annotations.setItemAnnotation(
       bmk1_id, "bookmarkProperties/description", "Firefox is awesome.", 0,
       PlacesUtils.annotations.EXPIRE_NEVER);
     PlacesUtils.bookmarks.setKeywordForBookmark(bmk1_id, "firefox");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
 
     _("Update the record with some null values.");
-    let record = store.createRecord(bmk1_guid);
+    let record = await store.createRecord(bmk1_guid);
     record.title = null;
     record.description = null;
     record.keyword = null;
     record.tags = null;
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
 
     _("Verify that the values have been cleared.");
     do_check_throws(function() {
       PlacesUtils.annotations.getItemAnnotation(
         bmk1_id, "bookmarkProperties/description");
     }, Cr.NS_ERROR_NOT_AVAILABLE);
     do_check_eq(PlacesUtils.bookmarks.getItemTitle(bmk1_id), "");
     do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(bmk1_id), null);
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_bookmark_createRecord() {
+add_task(async function test_bookmark_createRecord() {
   try {
     _("Create a bookmark without a description or title.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, null);
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
 
     _("Verify that the record is created accordingly.");
-    let record = store.createRecord(bmk1_guid);
+    let record = await store.createRecord(bmk1_guid);
     do_check_eq(record.title, "");
     do_check_eq(record.description, null);
     do_check_eq(record.keyword, null);
 
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_folder_create() {
+add_task(async function test_folder_create() {
   try {
     _("Create a folder.");
     let folder = new BookmarkFolder("bookmarks", "testfolder-1");
     folder.parentName = "Bookmarks Toolbar";
     folder.parentid   = "toolbar";
     folder.title      = "Test Folder";
-    store.applyIncoming(folder);
+    await store.applyIncoming(folder);
 
     _("Verify it has been created correctly.");
-    let id = store.idForGUID(folder.id);
+    let id = await store.idForGUID(folder.id);
     do_check_eq(PlacesUtils.bookmarks.getItemType(id),
                 PlacesUtils.bookmarks.TYPE_FOLDER);
     do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), folder.title);
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
                 PlacesUtils.bookmarks.toolbarFolder);
 
     _("Have the store create a new record object. Verify that it has the same data.");
-    let newrecord = store.createRecord(folder.id);
+    let newrecord = await store.createRecord(folder.id);
     do_check_true(newrecord instanceof BookmarkFolder);
     for (let property of ["title", "parentName", "parentid"])
       do_check_eq(newrecord[property], folder[property]);
 
     _("Folders have high sort index to ensure they're synced first.");
     do_check_eq(newrecord.sortindex, 1000000);
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_folder_createRecord() {
+add_task(async function test_folder_createRecord() {
   try {
     _("Create a folder.");
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
 
     _("Create two bookmarks in that folder without assigning them GUIDs.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
     let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
 
     _("Create a record for the folder and verify basic properties.");
-    let record = store.createRecord(folder1_guid);
+    let record = await store.createRecord(folder1_guid);
     do_check_true(record instanceof BookmarkFolder);
     do_check_eq(record.title, "Folder1");
     do_check_eq(record.parentid, "toolbar");
     do_check_eq(record.parentName, "Bookmarks Toolbar");
 
     _("Verify the folder's children. Ensures that the bookmarks were given GUIDs.");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
-    let bmk2_guid = store.GUIDForId(bmk2_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
+    let bmk2_guid = await store.GUIDForId(bmk2_id);
     do_check_eq(record.children.length, 2);
     do_check_eq(record.children[0], bmk1_guid);
     do_check_eq(record.children[1], bmk2_guid);
 
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
 add_task(async function test_deleted() {
   try {
     _("Create a bookmark that will be deleted.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
 
     _("Delete the bookmark through the store.");
     let record = new PlacesItem("bookmarks", bmk1_guid);
     record.deleted = true;
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
     await store.deletePending();
     _("Ensure it has been deleted.");
     let error;
     try {
       PlacesUtils.bookmarks.getBookmarkURI(bmk1_id);
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.result, Cr.NS_ERROR_ILLEGAL_VALUE);
 
-    let newrec = store.createRecord(bmk1_guid);
+    let newrec = await store.createRecord(bmk1_guid);
     do_check_eq(newrec.deleted, true);
 
   } finally {
     _("Clean up.");
-    store.wipe();
+    await store.wipe();
   }
 });
 
-add_test(function test_move_folder() {
+add_task(async function test_move_folder() {
   try {
     _("Create two folders and a bookmark in one of them.");
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
     let folder2_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder2", 0);
-    let folder2_guid = store.GUIDForId(folder2_id);
+    let folder2_guid = await store.GUIDForId(folder2_id);
     let bmk_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk_guid = store.GUIDForId(bmk_id);
+    let bmk_guid = await store.GUIDForId(bmk_id);
 
     _("Get a record, reparent it and apply it to the store.");
-    let record = store.createRecord(bmk_guid);
+    let record = await store.createRecord(bmk_guid);
     do_check_eq(record.parentid, folder1_guid);
     record.parentid = folder2_guid;
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
 
     _("Verify the new parent.");
     let new_folder_id = PlacesUtils.bookmarks.getFolderIdForItem(bmk_id);
-    do_check_eq(store.GUIDForId(new_folder_id), folder2_guid);
+    do_check_eq((await store.GUIDForId(new_folder_id)), folder2_guid);
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
 add_task(async function test_move_order() {
   // Make sure the tracker is turned on.
   Svc.Obs.notify("weave:engine:start-tracking");
   try {
     _("Create two bookmarks");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
     let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, tburi,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
-    let bmk2_guid = store.GUIDForId(bmk2_id);
+    let bmk2_guid = await store.GUIDForId(bmk2_id);
 
     _("Verify order.");
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 0);
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 1);
-    let toolbar = store.createRecord("toolbar");
+    let toolbar = await store.createRecord("toolbar");
     do_check_eq(toolbar.children.length, 2);
     do_check_eq(toolbar.children[0], bmk1_guid);
     do_check_eq(toolbar.children[1], bmk2_guid);
 
     _("Move bookmarks around.");
     store._childrenToOrder = {};
     toolbar.children = [bmk2_guid, bmk1_guid];
-    store.applyIncoming(toolbar);
+    await store.applyIncoming(toolbar);
     // Bookmarks engine does this at the end of _processIncoming
     tracker.ignoreAll = true;
     await store._orderChildren();
     tracker.ignoreAll = false;
     delete store._childrenToOrder;
 
     _("Verify new order.");
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 0);
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 1);
 
   } finally {
     Svc.Obs.notify("weave:engine:stop-tracking");
     _("Clean up.");
-    store.wipe();
+    await store.wipe();
   }
 });
 
-add_test(function test_orphan() {
+add_task(async function test_orphan() {
   try {
 
     _("Add a new bookmark locally.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(bmk1_id),
                 PlacesUtils.bookmarks.toolbarFolder);
     let error;
     try {
       PlacesUtils.annotations.getItemAnnotation(bmk1_id, PARENT_ANNO);
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.result, Cr.NS_ERROR_NOT_AVAILABLE);
 
     _("Apply a server record that is the same but refers to non-existent folder.");
-    let record = store.createRecord(bmk1_guid);
+    let record = await store.createRecord(bmk1_guid);
     record.parentid = "non-existent";
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
 
     _("Verify that bookmark has been flagged as orphan, has not moved.");
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(bmk1_id),
                 PlacesUtils.bookmarks.toolbarFolder);
     do_check_eq(PlacesUtils.annotations.getItemAnnotation(bmk1_id, PARENT_ANNO),
                 "non-existent");
 
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_reparentOrphans() {
+add_task(async function test_reparentOrphans() {
   try {
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
 
     _("Create a bogus orphan record and write the record back to the store to trigger _reparentOrphans.");
     PlacesUtils.annotations.setItemAnnotation(
       folder1_id, PARENT_ANNO, folder1_guid, 0,
       PlacesUtils.annotations.EXPIRE_NEVER);
-    let record = store.createRecord(folder1_guid);
+    let record = await store.createRecord(folder1_guid);
     record.title = "New title for Folder 1";
     store._childrenToOrder = {};
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
 
     _("Verify that is has been marked as an orphan even though it couldn't be moved into itself.");
     do_check_eq(PlacesUtils.annotations.getItemAnnotation(folder1_id, PARENT_ANNO),
                 folder1_guid);
 
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
 // Tests Bug 806460, in which query records arrive with empty folder
 // names and missing bookmark URIs.
-add_test(function test_empty_query_doesnt_die() {
+add_task(async function test_empty_query_doesnt_die() {
   let record = new BookmarkQuery("bookmarks", "8xoDGqKrXf1P");
   record.folderName    = "";
   record.queryId       = "";
   record.parentName    = "Toolbar";
   record.parentid      = "toolbar";
 
   // These should not throw.
-  store.applyIncoming(record);
+  await store.applyIncoming(record);
 
   delete record.folderName;
-  store.applyIncoming(record);
+  await store.applyIncoming(record);
 
-  run_next_test();
 });
 
 function assertDeleted(id) {
   let error;
   try {
     PlacesUtils.bookmarks.getItemType(id);
   } catch (e) {
     error = e;
   }
   equal(error.result, Cr.NS_ERROR_ILLEGAL_VALUE)
 }
 
 add_task(async function test_delete_buffering() {
-  store.wipe();
+  await store.wipe();
   await PlacesTestUtils.markBookmarksAsSynced();
 
   try {
     _("Create a folder with two bookmarks.");
     let folder = new BookmarkFolder("bookmarks", "testfolder-1");
     folder.parentName = "Bookmarks Toolbar";
     folder.parentid = "toolbar";
     folder.title = "Test Folder";
-    store.applyIncoming(folder);
+    await store.applyIncoming(folder);
 
 
     let fxRecord = new Bookmark("bookmarks", "get-firefox1");
     fxRecord.bmkUri        = fxuri.spec;
     fxRecord.title         = "Get Firefox!";
     fxRecord.parentName    = "Test Folder";
     fxRecord.parentid      = "testfolder-1";
 
     let tbRecord = new Bookmark("bookmarks", "get-tndrbrd1");
     tbRecord.bmkUri        = tburi.spec;
     tbRecord.title         = "Get Thunderbird!";
     tbRecord.parentName    = "Test Folder";
     tbRecord.parentid      = "testfolder-1";
 
-    store.applyIncoming(fxRecord);
-    store.applyIncoming(tbRecord);
+    await store.applyIncoming(fxRecord);
+    await store.applyIncoming(tbRecord);
 
-    let folderId = store.idForGUID(folder.id);
-    let fxRecordId = store.idForGUID(fxRecord.id);
-    let tbRecordId = store.idForGUID(tbRecord.id);
+    let folderId = await store.idForGUID(folder.id);
+    let fxRecordId = await store.idForGUID(fxRecord.id);
+    let tbRecordId = await store.idForGUID(tbRecord.id);
 
     _("Check everything was created correctly.");
 
     equal(PlacesUtils.bookmarks.getItemType(fxRecordId),
           PlacesUtils.bookmarks.TYPE_BOOKMARK);
     equal(PlacesUtils.bookmarks.getItemType(tbRecordId),
           PlacesUtils.bookmarks.TYPE_BOOKMARK);
     equal(PlacesUtils.bookmarks.getItemType(folderId),
@@ -485,18 +482,18 @@ add_task(async function test_delete_buff
     _("Delete the folder and one bookmark.");
 
     let deleteFolder = new PlacesItem("bookmarks", "testfolder-1");
     deleteFolder.deleted = true;
 
     let deleteFxRecord = new PlacesItem("bookmarks", "get-firefox1");
     deleteFxRecord.deleted = true;
 
-    store.applyIncoming(deleteFolder);
-    store.applyIncoming(deleteFxRecord);
+    await store.applyIncoming(deleteFolder);
+    await store.applyIncoming(deleteFxRecord);
 
     _("Check that we haven't deleted them yet, but that the deletions are queued");
     // these will throw if we've deleted them
     equal(PlacesUtils.bookmarks.getItemType(fxRecordId),
            PlacesUtils.bookmarks.TYPE_BOOKMARK);
 
     equal(PlacesUtils.bookmarks.getItemType(folderId),
            PlacesUtils.bookmarks.TYPE_FOLDER);
@@ -518,17 +515,17 @@ add_task(async function test_delete_buff
     ok(!store._itemsToDelete.has(folder.id));
     ok(!store._itemsToDelete.has(fxRecord.id));
 
     equal(PlacesUtils.bookmarks.getFolderIdForItem(tbRecordId),
           PlacesUtils.bookmarks.toolbarFolder);
 
   } finally {
     _("Clean up.");
-    store.wipe();
+    await store.wipe();
   }
 });
 
 
 function run_test() {
   initTestLogging("Trace");
   run_next_test();
 }
--- a/services/sync/tests/unit/test_bookmark_tracker.js
+++ b/services/sync/tests/unit/test_bookmark_tracker.js
@@ -11,43 +11,47 @@ Cu.import("resource://services-sync/cons
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://gre/modules/osfile.jsm");
 Cu.import("resource://testing-common/PlacesTestUtils.jsm");
 Cu.import("resource:///modules/PlacesUIUtils.jsm");
 
-Service.engineManager.register(BookmarksEngine);
-var engine = Service.engineManager.get("bookmarks");
-var store  = engine._store;
-var tracker = engine._tracker;
-
-store.wipe();
-tracker.persistChangedIDs = false;
+let engine;
+let store;
+let tracker;
 
 const DAY_IN_MS = 24 * 60 * 60 * 1000;
 
+add_task(async function setup() {
+  await Service.engineManager.register(BookmarksEngine);
+  engine = Service.engineManager.get("bookmarks");
+  store  = engine._store;
+  tracker = engine._tracker;
+  tracker.persistChangedIDs = false;
+});
+
 // Test helpers.
 async function verifyTrackerEmpty() {
   await PlacesTestUtils.promiseAsyncUpdates();
   let changes = await tracker.promiseChangedIDs();
   deepEqual(changes, {});
   equal(tracker.score, 0);
 }
 
 async function resetTracker() {
   await PlacesTestUtils.markBookmarksAsSynced();
   tracker.resetScore();
 }
 
 async function cleanup() {
   engine.lastSync = 0;
   engine._needWeakReupload.clear()
-  store.wipe();
+  await store.wipe();
   await resetTracker();
   await stopTracking();
 }
 
 // startTracking is a signal that the test wants to notice things that happen
 // after this is called (ie, things already tracked should be discarded.)
 async function startTracking() {
   Svc.Obs.notify("weave:engine:start-tracking");
@@ -441,40 +445,40 @@ add_task(async function test_onItemAdded
 
   try {
     await startTracking();
 
     _("Insert a folder using the sync API");
     let syncFolderID = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder, "Sync Folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let syncFolderGUID = engine._store.GUIDForId(syncFolderID);
+    let syncFolderGUID = await engine._store.GUIDForId(syncFolderID);
     await verifyTrackedItems(["menu", syncFolderGUID]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
 
     await resetTracker();
     await startTracking();
 
     _("Insert a bookmark using the sync API");
     let syncBmkID = PlacesUtils.bookmarks.insertBookmark(syncFolderID,
       Utils.makeURI("https://example.org/sync"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Sync Bookmark");
-    let syncBmkGUID = engine._store.GUIDForId(syncBmkID);
+    let syncBmkGUID = await engine._store.GUIDForId(syncBmkID);
     await verifyTrackedItems([syncFolderGUID, syncBmkGUID]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
 
     await resetTracker();
     await startTracking();
 
     _("Insert a separator using the sync API");
     let syncSepID = PlacesUtils.bookmarks.insertSeparator(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       PlacesUtils.bookmarks.getItemIndex(syncFolderID));
-    let syncSepGUID = engine._store.GUIDForId(syncSepID);
+    let syncSepGUID = await engine._store.GUIDForId(syncSepID);
     await verifyTrackedItems(["menu", syncSepGUID]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
   } finally {
     _("Clean up.");
     await cleanup();
   }
 });
 
@@ -565,17 +569,17 @@ add_task(async function test_onItemChang
     await stopTracking();
 
     _("Insert a bookmark");
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     await startTracking();
 
     _("Reset the bookmark's added date");
     // Convert to microseconds for PRTime.
     let dateAdded = (Date.now() - DAY_IN_MS) * 1000;
     PlacesUtils.bookmarks.setItemDateAdded(fx_id, dateAdded);
@@ -601,17 +605,17 @@ add_task(async function test_onItemChang
     await stopTracking();
 
     _("Insert a bookmark");
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     _("Set a tracked annotation to make sure we only notify once");
     PlacesUtils.annotations.setItemAnnotation(
       fx_id, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
       PlacesUtils.annotations.EXPIRE_NEVER);
 
     await startTracking();
@@ -632,26 +636,26 @@ add_task(async function test_onItemTagge
 
   try {
     await stopTracking();
 
     _("Create a folder");
     let folder = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folderGUID = engine._store.GUIDForId(folder);
+    let folderGUID = await engine._store.GUIDForId(folder);
     _("Folder ID: " + folder);
     _("Folder GUID: " + folderGUID);
 
     _("Track changes to tags");
     let uri = Utils.makeURI("http://getfirefox.com");
     let b = PlacesUtils.bookmarks.insertBookmark(
       folder, uri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bGUID = engine._store.GUIDForId(b);
+    let bGUID = await engine._store.GUIDForId(b);
     _("New item is " + b);
     _("GUID: " + bGUID);
 
     await startTracking();
 
     _("Tag the item");
     PlacesUtils.tagging.tagURI(uri, ["foo"]);
 
@@ -670,22 +674,22 @@ add_task(async function test_onItemUntag
   try {
     await stopTracking();
 
     _("Insert tagged bookmarks");
     let uri = Utils.makeURI("http://getfirefox.com");
     let fx1ID = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder, uri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let fx1GUID = engine._store.GUIDForId(fx1ID);
+    let fx1GUID = await engine._store.GUIDForId(fx1ID);
     // Different parent and title; same URL.
     let fx2ID = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, uri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Download Firefox");
-    let fx2GUID = engine._store.GUIDForId(fx2ID);
+    let fx2GUID = await engine._store.GUIDForId(fx2ID);
     PlacesUtils.tagging.tagURI(uri, ["foo"]);
 
     await startTracking();
 
     _("Remove the tag");
     PlacesUtils.tagging.untagURI(uri, ["foo"]);
 
     await verifyTrackedItems([fx1GUID, fx2GUID]);
@@ -805,17 +809,17 @@ add_task(async function test_onItemKeywo
     let folder = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
     _("Track changes to keywords");
     let uri = Utils.makeURI("http://getfirefox.com");
     let b = PlacesUtils.bookmarks.insertBookmark(
       folder, uri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bGUID = engine._store.GUIDForId(b);
+    let bGUID = await engine._store.GUIDForId(b);
     _("New item is " + b);
     _("GUID: " + bGUID);
 
     await startTracking();
 
     _("Give the item a keyword");
     PlacesUtils.bookmarks.setKeywordForBookmark(b, "the_keyword");
 
@@ -910,17 +914,17 @@ add_task(async function test_onItemPostD
     await stopTracking();
 
     _("Insert a bookmark");
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     await startTracking();
 
     // PlacesUtils.setPostDataForBookmark is deprecated, but still used by
     // PlacesTransactions.NewBookmark.
     _("Post data for the bookmark should be ignored");
     await PlacesUtils.setPostDataForBookmark(fx_id, "postData");
@@ -939,17 +943,17 @@ add_task(async function test_onItemAnnoC
     await stopTracking();
     let folder = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
     _("Track changes to annos.");
     let b = PlacesUtils.bookmarks.insertBookmark(
       folder, Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bGUID = engine._store.GUIDForId(b);
+    let bGUID = await engine._store.GUIDForId(b);
     _("New item is " + b);
     _("GUID: " + bGUID);
 
     await startTracking();
     PlacesUtils.annotations.setItemAnnotation(
       b, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
       PlacesUtils.annotations.EXPIRE_NEVER);
     // bookmark should be tracked, folder should not.
@@ -973,34 +977,34 @@ add_task(async function test_onItemAdded
   try {
     await startTracking();
 
     _("Create a new root");
     let rootID = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.placesRoot,
       "New root",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let rootGUID = engine._store.GUIDForId(rootID);
+    let rootGUID = await engine._store.GUIDForId(rootID);
     _(`New root GUID: ${rootGUID}`);
 
     _("Insert a bookmark underneath the new root");
     let untrackedBmkID = PlacesUtils.bookmarks.insertBookmark(
       rootID,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let untrackedBmkGUID = engine._store.GUIDForId(untrackedBmkID);
+    let untrackedBmkGUID = await engine._store.GUIDForId(untrackedBmkID);
     _(`New untracked bookmark GUID: ${untrackedBmkGUID}`);
 
     _("Insert a bookmark underneath the Places root");
     let rootBmkID = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.placesRoot,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let rootBmkGUID = engine._store.GUIDForId(rootBmkID);
+    let rootBmkGUID = await engine._store.GUIDForId(rootBmkID);
     _(`New Places root bookmark GUID: ${rootBmkGUID}`);
 
     _("New root and bookmark should be ignored");
     await verifyTrackedItems([]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
   } finally {
     _("Clean up.");
     await cleanup();
@@ -1013,17 +1017,17 @@ add_task(async function test_onItemDelet
   try {
     await stopTracking();
 
     _("Insert a bookmark underneath the Places root");
     let rootBmkID = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.placesRoot,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let rootBmkGUID = engine._store.GUIDForId(rootBmkID);
+    let rootBmkGUID = await engine._store.GUIDForId(rootBmkID);
     _(`New Places root bookmark GUID: ${rootBmkGUID}`);
 
     await startTracking();
 
     PlacesUtils.bookmarks.removeItem(rootBmkID);
 
     await verifyTrackedItems([]);
     // We'll still increment the counter for the removed item.
@@ -1166,24 +1170,24 @@ add_task(async function test_onItemMoved
   _("Items moved via the synchronous API should be tracked");
 
   try {
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _("Firefox GUID: " + fx_guid);
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
     _("Thunderbird GUID: " + tb_guid);
 
     await startTracking();
 
     // Moving within the folder will just track the folder.
     PlacesUtils.bookmarks.moveItem(
       tb_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0);
     await verifyTrackedItems(["menu"]);
@@ -1301,42 +1305,42 @@ add_task(async function test_onItemMoved
 
   try {
     await stopTracking();
 
     let folder_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       "Test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder_guid = engine._store.GUIDForId(folder_id);
+    let folder_guid = await engine._store.GUIDForId(folder_id);
     _(`Folder GUID: ${folder_guid}`);
 
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       folder_id,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Thunderbird");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
     _(`Thunderbird GUID: ${tb_guid}`);
 
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       folder_id,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Firefox");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     let moz_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("https://mozilla.org"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Mozilla"
     );
-    let moz_guid = engine._store.GUIDForId(moz_id);
+    let moz_guid = await engine._store.GUIDForId(moz_id);
     _(`Mozilla GUID: ${moz_guid}`);
 
     await startTracking();
 
     // PlacesSortFolderByNameTransaction exercises
     // PlacesUtils.bookmarks.setItemIndex.
     let txn = new PlacesSortFolderByNameTransaction(folder_id);
 
@@ -1364,31 +1368,31 @@ add_task(async function test_onItemDelet
   try {
     await stopTracking();
 
     _("Create a folder with two children");
     let folder_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       "Test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder_guid = engine._store.GUIDForId(folder_id);
+    let folder_guid = await engine._store.GUIDForId(folder_id);
     _(`Folder GUID: ${folder_guid}`);
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       folder_id,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       folder_id,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
     _(`Thunderbird GUID: ${tb_guid}`);
 
     await startTracking();
 
     let txn = PlacesUtils.bookmarks.getRemoveFolderTransaction(folder_id);
     // We haven't executed the transaction yet.
     await verifyTrackerEmpty();
 
@@ -1422,24 +1426,24 @@ add_task(async function test_treeMoved()
   _("Moving an entire tree of bookmarks should track the parents");
 
   try {
     // Create a couple of parent folders.
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       "First test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder1_guid = engine._store.GUIDForId(folder1_id);
+    let folder1_guid = await engine._store.GUIDForId(folder1_id);
 
     // A second folder in the first.
     let folder2_id = PlacesUtils.bookmarks.createFolder(
       folder1_id,
       "Second test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder2_guid = engine._store.GUIDForId(folder2_id);
+    let folder2_guid = await engine._store.GUIDForId(folder2_id);
 
     // Create a couple of bookmarks in the second folder.
     PlacesUtils.bookmarks.insertBookmark(
       folder2_id,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
     PlacesUtils.bookmarks.insertBookmark(
@@ -1471,17 +1475,17 @@ add_task(async function test_onItemDelet
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
 
     await startTracking();
 
     // Delete the last item - the item and parent should be tracked.
     PlacesUtils.bookmarks.removeItem(tb_id);
 
     await verifyTrackedItems(["menu", tb_guid]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
@@ -1611,34 +1615,34 @@ add_task(async function test_onItemDelet
   _("Removing a folder's children should track the folder and its children");
 
   try {
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.mobileFolderId,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.mobileFolderId,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
     _(`Thunderbird GUID: ${tb_guid}`);
 
     let moz_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("https://mozilla.org"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Mozilla"
     );
-    let moz_guid = engine._store.GUIDForId(moz_id);
+    let moz_guid = await engine._store.GUIDForId(moz_id);
     _(`Mozilla GUID: ${moz_guid}`);
 
     await startTracking();
 
     _(`Mobile root ID: ${PlacesUtils.mobileFolderId}`);
     PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.mobileFolderId);
 
     await verifyTrackedItems(["mobile", fx_guid, tb_guid]);
@@ -1653,38 +1657,38 @@ add_task(async function test_onItemDelet
   _("Deleting a tree of bookmarks should track all items");
 
   try {
     // Create a couple of parent folders.
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       "First test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder1_guid = engine._store.GUIDForId(folder1_id);
+    let folder1_guid = await engine._store.GUIDForId(folder1_id);
 
     // A second folder in the first.
     let folder2_id = PlacesUtils.bookmarks.createFolder(
       folder1_id,
       "Second test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder2_guid = engine._store.GUIDForId(folder2_id);
+    let folder2_guid = await engine._store.GUIDForId(folder2_id);
 
     // Create a couple of bookmarks in the second folder.
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       folder2_id,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       folder2_id,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
 
     await startTracking();
 
     // Delete folder2 - everything we created should be tracked.
     PlacesUtils.bookmarks.removeItem(folder2_id);
 
     await verifyTrackedItems([fx_guid, tb_guid, folder1_guid, folder2_guid]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -7,17 +7,17 @@ Cu.import("resource://services-sync/engi
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 const MORE_THAN_CLIENTS_TTL_REFRESH = 691200; // 8 days
 const LESS_THAN_CLIENTS_TTL_REFRESH = 86400;  // 1 day
 
-var engine = Service.clientsEngine;
+let engine;
 
 /**
  * Unpack the record with this ID, and verify that it has the same version that
  * we should be putting into records.
  */
 function check_record_version(user, id) {
     let payload = JSON.parse(user.collection("clients").wbo(id).payload);
 
@@ -43,20 +43,24 @@ function compareCommands(actual, expecte
   let tweakedActual = JSON.parse(JSON.stringify(actual));
   tweakedActual.map(elt => delete elt.flowID);
   deepEqual(tweakedActual, expected, description);
   // each item must have a unique flowID.
   let allIDs = new Set(actual.map(elt => elt.flowID).filter(fid => !!fid));
   equal(allIDs.size, actual.length, "all items have unique IDs");
 }
 
-function cleanup() {
+add_task(async function setup() {
+  engine = Service.clientsEngine;
+});
+
+async function cleanup() {
   Svc.Prefs.resetBranch("");
   engine._tracker.clearChangedIDs();
-  engine._resetClient();
+  await engine._resetClient();
   // We don't finalize storage at cleanup, since we use the same clients engine
   // instance across all tests.
 }
 
 add_task(async function test_bad_hmac() {
   _("Ensure that Clients engine deletes corrupt records.");
   let deletedCollections = [];
   let deletedItems       = [];
@@ -90,120 +94,120 @@ add_task(async function test_bad_hmac() 
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
     ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
   }
 
   try {
     await configureIdentity({username: "foo"}, server);
-    Service.login();
+    await Service.login();
 
     generateNewKeys(Service.collectionKeys);
 
     _("First sync, client record is uploaded");
     equal(engine.lastRecordUpload, 0);
     check_clients_count(0);
-    engine._sync();
+    await engine._sync();
     check_clients_count(1);
     ok(engine.lastRecordUpload > 0);
 
     // Our uploaded record has a version.
     check_record_version(user, engine.localID);
 
     // Initial setup can wipe the server, so clean up.
     deletedCollections = [];
     deletedItems       = [];
 
     _("Change our keys and our client ID, reupload keys.");
     let oldLocalID  = engine.localID;     // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
-    engine.resetClient();
+    await engine.resetClient();
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
     ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     _("Sync.");
-    engine._sync();
+    await engine._sync();
 
     _("Old record " + oldLocalID + " was deleted, new one uploaded.");
     check_clients_count(1);
     check_client_deleted(oldLocalID);
 
     _("Now change our keys but don't upload them. " +
       "That means we get an HMAC error but redownload keys.");
     Service.lastHMACEvent = 0;
     engine.localID = Utils.makeGUID();
-    engine.resetClient();
+    await engine.resetClient();
     generateNewKeys(Service.collectionKeys);
     deletedCollections = [];
     deletedItems       = [];
     check_clients_count(1);
-    engine._sync();
+    await engine._sync();
 
     _("Old record was not deleted, new one uploaded.");
     equal(deletedCollections.length, 0);
     equal(deletedItems.length, 0);
     check_clients_count(2);
 
     _("Now try the scenario where our keys are wrong *and* there's a bad record.");
     // Clean up and start fresh.
     user.collection("clients")._wbos = {};
     Service.lastHMACEvent = 0;
     engine.localID = Utils.makeGUID();
-    engine.resetClient();
+    await engine.resetClient();
     deletedCollections = [];
     deletedItems       = [];
     check_clients_count(0);
 
     await uploadNewKeys();
 
     // Sync once to upload a record.
-    engine._sync();
+    await engine._sync();
     check_clients_count(1);
 
     // Generate and upload new keys, so the old client record is wrong.
     await uploadNewKeys();
 
     // Create a new client record and new keys. Now our keys are wrong, as well
     // as the object on the server. We'll download the new keys and also delete
     // the bad client record.
     oldLocalID  = engine.localID;         // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
-    engine.resetClient();
+    await engine.resetClient();
     generateNewKeys(Service.collectionKeys);
     let oldKey = Service.collectionKeys.keyForCollection();
 
     equal(deletedCollections.length, 0);
     equal(deletedItems.length, 0);
-    engine._sync();
+    await engine._sync();
     equal(deletedItems.length, 1);
     check_client_deleted(oldLocalID);
     check_clients_count(1);
     let newKey = Service.collectionKeys.keyForCollection();
     ok(!oldKey.equals(newKey));
 
   } finally {
-    cleanup();
+    await cleanup();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_properties() {
   _("Test lastRecordUpload property");
   try {
     equal(Svc.Prefs.get("clients.lastRecordUpload"), undefined);
     equal(engine.lastRecordUpload, 0);
 
     let now = Date.now();
     engine.lastRecordUpload = now / 1000;
     equal(engine.lastRecordUpload, Math.floor(now / 1000));
   } finally {
-    cleanup();
+    await cleanup();
   }
 });
 
 add_task(async function test_full_sync() {
   _("Ensure that Clients engine fetches all records for each sync.");
 
   let now = Date.now() / 1000;
   let server = serverForFoo(engine);
@@ -232,38 +236,40 @@ add_task(async function test_full_sync()
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     let store = engine._store;
 
     _("First sync. 2 records downloaded; our record uploaded.");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
     ok(engine.lastRecordUpload > 0);
     deepEqual(user.collection("clients").keys().sort(),
               [activeID, deletedID, engine.localID].sort(),
               "Our record should be uploaded on first sync");
-    deepEqual(Object.keys(store.getAllIDs()).sort(),
+    let ids = await store.getAllIDs();
+    deepEqual(Object.keys(ids).sort(),
               [activeID, deletedID, engine.localID].sort(),
               "Other clients should be downloaded on first sync");
 
     _("Delete a record, then sync again");
     let collection = server.getCollection("foo", "clients");
     collection.remove(deletedID);
     // Simulate a timestamp update in info/collections.
     engine.lastModified = now;
-    engine._sync();
+    await engine._sync();
 
     _("Record should be updated");
-    deepEqual(Object.keys(store.getAllIDs()).sort(),
+    ids = await store.getAllIDs();
+    deepEqual(Object.keys(ids).sort(),
               [activeID, engine.localID].sort(),
               "Deleted client should be removed on next sync");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -281,41 +287,41 @@ add_task(async function test_sync() {
     return user.collection("clients").wbo(engine.localID);
   }
 
   try {
 
     _("First sync. Client record is uploaded.");
     equal(clientWBO(), undefined);
     equal(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
     ok(!!clientWBO().payload);
     ok(engine.lastRecordUpload > 0);
 
     _("Let's time travel more than a week back, new record should've been uploaded.");
     engine.lastRecordUpload -= MORE_THAN_CLIENTS_TTL_REFRESH;
     let lastweek = engine.lastRecordUpload;
     clientWBO().payload = undefined;
-    engine._sync();
+    await engine._sync();
     ok(!!clientWBO().payload);
     ok(engine.lastRecordUpload > lastweek);
 
     _("Remove client record.");
     await engine.removeClientData();
     equal(clientWBO().payload, undefined);
 
     _("Time travel one day back, no record uploaded.");
     engine.lastRecordUpload -= LESS_THAN_CLIENTS_TTL_REFRESH;
     let yesterday = engine.lastRecordUpload;
-    engine._sync();
+    await engine._sync();
     equal(clientWBO().payload, undefined);
     equal(engine.lastRecordUpload, yesterday);
 
   } finally {
-    cleanup();
+    await cleanup();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_client_name_change() {
   _("Ensure client name change incurs a client record update.");
 
   let tracker = engine._tracker;
@@ -339,17 +345,17 @@ add_task(async function test_client_name
   notEqual(initialName, engine.localName);
   equal(Object.keys(tracker.changedIDs).length, 1);
   ok(engine.localID in tracker.changedIDs);
   ok(tracker.score > initialScore);
   ok(tracker.score >= SCORE_INCREMENT_XLARGE);
 
   Svc.Obs.notify("weave:engine:stop-tracking");
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_last_modified() {
   _("Ensure that remote records have a sane serverLastModified attribute.");
 
   let now = Date.now() / 1000;
   let server = serverForFoo(engine);
   let user   = server.user("foo");
@@ -366,73 +372,73 @@ add_task(async function test_last_modifi
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     let collection = user.collection("clients");
 
     _("Sync to download the record");
-    engine._sync();
+    await engine._sync();
 
     equal(engine._store._remoteClients[activeID].serverLastModified, now - 10,
           "last modified in the local record is correctly the server last-modified");
 
     _("Modify the record and re-upload it");
     // set a new name to make sure we really did upload.
     engine._store._remoteClients[activeID].name = "New name";
     engine._modified.set(activeID, 0);
-    engine._uploadOutgoing();
+    await engine._uploadOutgoing();
 
     _("Local record should have updated timestamp");
     ok(engine._store._remoteClients[activeID].serverLastModified >= now);
 
     _("Record on the server should have new name but not serverLastModified");
     let payload = JSON.parse(JSON.parse(collection.payload(activeID)).ciphertext);
     equal(payload.name, "New name");
     equal(payload.serverLastModified, undefined);
 
   } finally {
-    cleanup();
+    await cleanup();
     server.deleteCollections("foo");
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_send_command() {
   _("Verifies _sendCommandToClient puts commands in the outbound queue.");
 
   let store = engine._store;
   let tracker = engine._tracker;
   let remoteId = Utils.makeGUID();
   let rec = new ClientsRec("clients", remoteId);
 
-  store.create(rec);
-  store.createRecord(remoteId, "clients");
+  await store.create(rec);
+  await store.createRecord(remoteId, "clients");
 
   let action = "testCommand";
   let args = ["foo", "bar"];
   let extra = { flowID: "flowy" }
 
-  engine._sendCommandToClient(action, args, remoteId, extra);
+  await engine._sendCommandToClient(action, args, remoteId, extra);
 
   let newRecord = store._remoteClients[remoteId];
-  let clientCommands = engine._readCommands()[remoteId];
+  let clientCommands = (await engine._readCommands())[remoteId];
   notEqual(newRecord, undefined);
   equal(clientCommands.length, 1);
 
   let command = clientCommands[0];
   equal(command.command, action);
   equal(command.args.length, 2);
   deepEqual(command.args, args);
   ok(command.flowID);
 
   notEqual(tracker.changedIDs[remoteId], undefined);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_command_validation() {
   _("Verifies that command validation works properly.");
 
   let store = engine._store;
 
   let testCommands = [
@@ -448,25 +454,25 @@ add_task(async function test_command_val
     ["logout",      ["foo"],  false],
     ["__UNKNOWN__", [],       false]
   ];
 
   for (let [action, args, expectedResult] of testCommands) {
     let remoteId = Utils.makeGUID();
     let rec = new ClientsRec("clients", remoteId);
 
-    store.create(rec);
-    store.createRecord(remoteId, "clients");
+    await store.create(rec);
+    await store.createRecord(remoteId, "clients");
 
-    engine.sendCommand(action, args, remoteId);
+    await engine.sendCommand(action, args, remoteId);
 
     let newRecord = store._remoteClients[remoteId];
     notEqual(newRecord, undefined);
 
-    let clientCommands = engine._readCommands()[remoteId];
+    let clientCommands = (await engine._readCommands())[remoteId];
 
     if (expectedResult) {
       _("Ensuring command is sent: " + action);
       equal(clientCommands.length, 1);
 
       let command = clientCommands[0];
       equal(command.command, action);
       deepEqual(command.args, args);
@@ -478,68 +484,68 @@ add_task(async function test_command_val
       equal(clientCommands, undefined);
 
       if (store._tracker) {
         equal(engine._tracker[remoteId], undefined);
       }
     }
 
   }
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_command_duplication() {
   _("Ensures duplicate commands are detected and not added");
 
   let store = engine._store;
   let remoteId = Utils.makeGUID();
   let rec = new ClientsRec("clients", remoteId);
-  store.create(rec);
-  store.createRecord(remoteId, "clients");
+  await store.create(rec);
+  await store.createRecord(remoteId, "clients");
 
   let action = "resetAll";
   let args = [];
 
-  engine.sendCommand(action, args, remoteId);
-  engine.sendCommand(action, args, remoteId);
+  await engine.sendCommand(action, args, remoteId);
+  await engine.sendCommand(action, args, remoteId);
 
-  let clientCommands = engine._readCommands()[remoteId];
+  let clientCommands = (await engine._readCommands())[remoteId];
   equal(clientCommands.length, 1);
 
   _("Check variant args length");
-  engine._saveCommands({});
+  await engine._saveCommands({});
 
   action = "resetEngine";
-  engine.sendCommand(action, [{ x: "foo" }], remoteId);
-  engine.sendCommand(action, [{ x: "bar" }], remoteId);
+  await engine.sendCommand(action, [{ x: "foo" }], remoteId);
+  await engine.sendCommand(action, [{ x: "bar" }], remoteId);
 
   _("Make sure we spot a real dupe argument.");
-  engine.sendCommand(action, [{ x: "bar" }], remoteId);
+  await engine.sendCommand(action, [{ x: "bar" }], remoteId);
 
-  clientCommands = engine._readCommands()[remoteId];
+  clientCommands = (await engine._readCommands())[remoteId];
   equal(clientCommands.length, 2);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_command_invalid_client() {
   _("Ensures invalid client IDs are caught");
 
   let id = Utils.makeGUID();
   let error;
 
   try {
-    engine.sendCommand("wipeAll", [], id);
+    await engine.sendCommand("wipeAll", [], id);
   } catch (ex) {
     error = ex;
   }
 
   equal(error.message.indexOf("Unknown remote client ID: "), 0);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_process_incoming_commands() {
   _("Ensures local commands are executed");
 
   engine.localCommands = [{ command: "logout", args: [] }];
 
   let ev = "weave:service:logout:finish";
@@ -550,21 +556,21 @@ add_task(async function test_process_inc
 
       resolve();
     };
 
     Svc.Obs.add(ev, handler);
   });
 
   // logout command causes processIncomingCommands to return explicit false.
-  ok(!engine.processIncomingCommands());
+  ok(!(await engine.processIncomingCommands()));
 
   await logoutPromise;
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_filter_duplicate_names() {
   _("Ensure that we exclude clients with identical names that haven't synced in a week.");
 
   let now = Date.now() / 1000;
   let server = serverForFoo(engine);
   let user   = server.user("foo");
@@ -605,26 +611,27 @@ add_task(async function test_filter_dupl
     protocols: ["1.5"],
   }), now - 604820));
 
   try {
     let store = engine._store;
 
     _("First sync");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
     ok(engine.lastRecordUpload > 0);
     deepEqual(user.collection("clients").keys().sort(),
               [recentID, dupeID, oldID, engine.localID].sort(),
               "Our record should be uploaded on first sync");
 
-    deepEqual(Object.keys(store.getAllIDs()).sort(),
+    let ids = await store.getAllIDs();
+    deepEqual(Object.keys(ids).sort(),
               [recentID, dupeID, oldID, engine.localID].sort(),
               "Duplicate ID should remain in getAllIDs");
-    ok(engine._store.itemExists(dupeID), "Dupe ID should be considered as existing for Sync methods.");
+    ok((await engine._store.itemExists(dupeID)), "Dupe ID should be considered as existing for Sync methods.");
     ok(!engine.remoteClientExists(dupeID), "Dupe ID should not be considered as existing for external methods.");
 
     // dupe desktop should not appear in .deviceTypes.
     equal(engine.deviceTypes.get("desktop"), 2);
     equal(engine.deviceTypes.get("mobile"), 1);
 
     // dupe desktop should not appear in stats
     deepEqual(engine.stats, {
@@ -639,26 +646,26 @@ add_task(async function test_filter_dupl
 
     // Check that a subsequent Sync doesn't report anything as being processed.
     let counts;
     Svc.Obs.add("weave:engine:sync:applied", function observe(subject, data) {
       Svc.Obs.remove("weave:engine:sync:applied", observe);
       counts = subject;
     });
 
-    engine._sync();
+    await engine._sync();
     equal(counts.applied, 0); // We didn't report applying any records.
     equal(counts.reconciled, 4); // We reported reconcilliation for all records
     equal(counts.succeeded, 0);
     equal(counts.failed, 0);
     equal(counts.newFailed, 0);
 
     _("Broadcast logout to all clients");
-    engine.sendCommand("logout", []);
-    engine._sync();
+    await engine.sendCommand("logout", []);
+    await engine._sync();
 
     let collection = server.getCollection("foo", "clients");
     let recentPayload = JSON.parse(JSON.parse(collection.payload(recentID)).ciphertext);
     compareCommands(recentPayload.commands, [{ command: "logout", args: [] }],
                     "Should send commands to the recent client");
 
     let oldPayload = JSON.parse(JSON.parse(collection.payload(oldID)).ciphertext);
     compareCommands(oldPayload.commands, [{ command: "logout", args: [] }],
@@ -674,19 +681,20 @@ add_task(async function test_filter_dupl
       name: engine.localName,
       type: "desktop",
       commands: [],
       version: "48",
       protocols: ["1.5"],
     }), now - 10));
 
     _("Second sync.");
-    engine._sync();
+    await engine._sync();
 
-    deepEqual(Object.keys(store.getAllIDs()).sort(),
+    ids = await store.getAllIDs();
+    deepEqual(Object.keys(ids).sort(),
               [recentID, oldID, dupeID, engine.localID].sort(),
               "Stale client synced, so it should no longer be marked as a dupe");
 
     ok(engine.remoteClientExists(dupeID), "Dupe ID should appear as it synced.");
 
     // Recently synced dupe desktop should appear in .deviceTypes.
     equal(engine.deviceTypes.get("desktop"), 3);
 
@@ -696,30 +704,30 @@ add_task(async function test_filter_dupl
       names: [engine.localName, "My Phone", engine.localName, "My old desktop"],
       numClients: 4,
     });
 
     ok(engine.remoteClientExists(dupeID), "recently synced dupe ID should now exist");
     equal(engine.remoteClients.length, 3, "recently synced dupe should now be in remoteClients");
 
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
 
 add_task(async function test_command_sync() {
   _("Ensure that commands are synced across clients.");
 
-  engine._store.wipe();
+  await engine._store.wipe();
   generateNewKeys(Service.collectionKeys);
 
   let server   = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let user     = server.user("foo");
   let remoteId = Utils.makeGUID();
 
@@ -734,63 +742,63 @@ add_task(async function test_command_syn
     type: "desktop",
     commands: [],
     version: "48",
     protocols: ["1.5"],
   }), Date.now() / 1000));
 
   try {
     _("Syncing.");
-    engine._sync();
+    await engine._sync();
 
     _("Checking remote record was downloaded.");
     let clientRecord = engine._store._remoteClients[remoteId];
     notEqual(clientRecord, undefined);
     equal(clientRecord.commands.length, 0);
 
     _("Send a command to the remote client.");
-    engine.sendCommand("wipeAll", []);
-    let clientCommands = engine._readCommands()[remoteId];
+    await engine.sendCommand("wipeAll", []);
+    let clientCommands = (await engine._readCommands())[remoteId];
     equal(clientCommands.length, 1);
-    engine._sync();
+    await engine._sync();
 
     _("Checking record was uploaded.");
     notEqual(clientWBO(engine.localID).payload, undefined);
     ok(engine.lastRecordUpload > 0);
 
     notEqual(clientWBO(remoteId).payload, undefined);
 
     Svc.Prefs.set("client.GUID", remoteId);
     engine._resetClient();
     equal(engine.localID, remoteId);
     _("Performing sync on resetted client.");
-    engine._sync();
+    await engine._sync();
     notEqual(engine.localCommands, undefined);
     equal(engine.localCommands.length, 1);
 
     let command = engine.localCommands[0];
     equal(command.command, "wipeAll");
     equal(command.args.length, 0);
 
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       let collection = server.getCollection("foo", "clients");
       collection.remove(remoteId);
     } finally {
       await promiseStopServer(server);
     }
   }
 });
 
 add_task(async function test_clients_not_in_fxa_list() {
   _("Ensure that clients not in the FxA devices list are marked as stale.");
 
-  engine._store.wipe();
+  await engine._store.wipe();
   generateNewKeys(Service.collectionKeys);
 
   let server   = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let remoteId = Utils.makeGUID();
   let remoteId2 = Utils.makeGUID();
 
@@ -818,24 +826,24 @@ add_task(async function test_clients_not
   engine.fxAccounts = {
     notifyDevices() { return Promise.resolve(true); },
     getDeviceId() { return fxAccounts.getDeviceId(); },
     getDeviceList() { return Promise.resolve([{ id: remoteId }]); }
   };
 
   try {
     _("Syncing.");
-    engine._sync();
+    await engine._sync();
 
     ok(!engine._store._remoteClients[remoteId].stale);
     ok(engine._store._remoteClients[remoteId2].stale);
 
   } finally {
     engine.fxAccounts = fxAccounts;
-    cleanup();
+    await cleanup();
 
     try {
       let collection = server.getCollection("foo", "clients");
       collection.remove(remoteId);
     } finally {
       await promiseStopServer(server);
     }
   }
@@ -845,30 +853,30 @@ add_task(async function test_send_uri_to
   _("Ensure sendURIToClientForDisplay() sends command properly.");
 
   let tracker = engine._tracker;
   let store = engine._store;
 
   let remoteId = Utils.makeGUID();
   let rec = new ClientsRec("clients", remoteId);
   rec.name = "remote";
-  store.create(rec);
-  store.createRecord(remoteId, "clients");
+  await store.create(rec);
+  await store.createRecord(remoteId, "clients");
 
   tracker.clearChangedIDs();
   let initialScore = tracker.score;
 
   let uri = "http://www.mozilla.org/";
   let title = "Title of the Page";
-  engine.sendURIToClientForDisplay(uri, remoteId, title);
+  await engine.sendURIToClientForDisplay(uri, remoteId, title);
 
   let newRecord = store._remoteClients[remoteId];
 
   notEqual(newRecord, undefined);
-  let clientCommands = engine._readCommands()[remoteId];
+  let clientCommands = (await engine._readCommands())[remoteId];
   equal(clientCommands.length, 1);
 
   let command = clientCommands[0];
   equal(command.command, "displayURI");
   equal(command.args.length, 3);
   equal(command.args[0], uri);
   equal(command.args[1], engine.localID);
   equal(command.args[2], title);
@@ -876,24 +884,24 @@ add_task(async function test_send_uri_to
   ok(tracker.score > initialScore);
   ok(tracker.score - initialScore >= SCORE_INCREMENT_XLARGE);
 
   _("Ensure unknown client IDs result in exception.");
   let unknownId = Utils.makeGUID();
   let error;
 
   try {
-    engine.sendURIToClientForDisplay(uri, unknownId);
+    await engine.sendURIToClientForDisplay(uri, unknownId);
   } catch (ex) {
     error = ex;
   }
 
   equal(error.message.indexOf("Unknown remote client ID: "), 0);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_receive_display_uri() {
   _("Ensure processing of received 'displayURI' commands works.");
 
   // We don't set up WBOs and perform syncing because other tests verify
   // the command API works as advertised. This saves us a little work.
 
@@ -917,33 +925,33 @@ add_task(async function test_receive_dis
       Svc.Obs.remove(ev, handler);
 
       resolve({ subject, data });
     };
 
     Svc.Obs.add(ev, handler);
   });
 
-  ok(engine.processIncomingCommands());
+  ok((await engine.processIncomingCommands()));
 
   let { subject, data } = await promiseDisplayURI;
 
   equal(subject[0].uri, uri);
   equal(subject[0].clientId, remoteId);
   equal(subject[0].title, title);
   equal(data, null);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_optional_client_fields() {
   _("Ensure that we produce records with the fields added in Bug 1097222.");
 
   const SUPPORTED_PROTOCOL_VERSIONS = ["1.5"];
-  let local = engine._store.createRecord(engine.localID, "clients");
+  let local = await engine._store.createRecord(engine.localID, "clients");
   equal(local.name, engine.localName);
   equal(local.type, engine.localType);
   equal(local.version, Services.appinfo.version);
   deepEqual(local.protocols, SUPPORTED_PROTOCOL_VERSIONS);
 
   // Optional fields.
   // Make sure they're what they ought to be...
   equal(local.os, Services.appinfo.OS);
@@ -952,17 +960,17 @@ add_task(async function test_optional_cl
   // ... and also that they're non-empty.
   ok(!!local.os);
   ok(!!local.appPackage);
   ok(!!local.application);
 
   // We don't currently populate device or formfactor.
   // See Bug 1100722, Bug 1100723.
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_merge_commands() {
   _("Verifies local commands for remote clients are merged with the server's");
 
   let now = Date.now() / 1000;
   let server = serverForFoo(engine);
 
@@ -995,37 +1003,37 @@ add_task(async function test_merge_comma
     }],
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     _("First sync. 2 records downloaded.");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
 
     _("Broadcast logout to all clients");
-    engine.sendCommand("logout", []);
-    engine._sync();
+    await engine.sendCommand("logout", []);
+    await engine._sync();
 
     let collection = server.getCollection("foo", "clients");
     let desktopPayload = JSON.parse(JSON.parse(collection.payload(desktopID)).ciphertext);
     compareCommands(desktopPayload.commands, [{
       command: "displayURI",
       args: ["https://example.com", engine.localID, "Yak Herders Anonymous"],
     }, {
       command: "logout",
       args: [],
     }], "Should send the logout command to the desktop client");
 
     let mobilePayload = JSON.parse(JSON.parse(collection.payload(mobileID)).ciphertext);
     compareCommands(mobilePayload.commands, [{ command: "logout", args: [] }],
                     "Should not send a duplicate logout to the mobile client");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -1047,44 +1055,44 @@ add_task(async function test_duplicate_r
     commands: [],
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     _("First sync. 1 record downloaded.");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
 
     _("Send tab to client");
-    engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"]);
-    engine._sync();
+    await engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"]);
+    await engine._sync();
 
     _("Simulate the desktop client consuming the command and syncing to the server");
     server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
       id: desktopID,
       name: "Desktop client",
       type: "desktop",
       commands: [],
       version: "48",
       protocols: ["1.5"],
     }), now - 10));
 
     _("Send another tab to the desktop client");
-    engine.sendCommand("displayURI", ["https://foobar.com", engine.localID, "Foo bar!"], desktopID);
-    engine._sync();
+    await engine.sendCommand("displayURI", ["https://foobar.com", engine.localID, "Foo bar!"], desktopID);
+    await engine._sync();
 
     let collection = server.getCollection("foo", "clients");
     let desktopPayload = JSON.parse(JSON.parse(collection.payload(desktopID)).ciphertext);
     compareCommands(desktopPayload.commands, [{
       command: "displayURI",
       args: ["https://foobar.com", engine.localID, "Foo bar!"],
     }], "Should only send the second command to the desktop client");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -1119,24 +1127,24 @@ add_task(async function test_upload_afte
     commands: [],
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     _("First sync. 2 records downloaded.");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
 
     _("Send tab to client");
-    engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"], deviceBID);
+    await engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"], deviceBID);
 
     const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing;
-    SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten([], [deviceBID]);
-    engine._sync();
+    SyncEngine.prototype._uploadOutgoing = async () => engine._onRecordsWritten([], [deviceBID]);
+    await engine._sync();
 
     let collection = server.getCollection("foo", "clients");
     let deviceBPayload = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
     compareCommands(deviceBPayload.commands, [{
       command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
     }], "Should be the same because the upload failed");
 
     _("Simulate the client B consuming the command and syncing to the server");
@@ -1147,26 +1155,27 @@ add_task(async function test_upload_afte
       commands: [],
       version: "48",
       protocols: ["1.5"],
     }), now - 10));
 
     // Simulate reboot
     SyncEngine.prototype._uploadOutgoing = oldUploadOutgoing;
     engine = Service.clientsEngine = new ClientEngine(Service);
+    await engine.initialize();
 
-    engine._sync();
+    await engine._sync();
 
     deviceBPayload = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
     compareCommands(deviceBPayload.commands, [{
       command: "displayURI",
       args: ["https://example.com", engine.localID, "Yak Herders Anonymous"],
     }], "Should only had written our outgoing command");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -1217,22 +1226,22 @@ add_task(async function test_keep_cleare
   }), now - 10));
 
   try {
     _("First sync. Download remote and our record.");
     strictEqual(engine.lastRecordUpload, 0);
 
     let collection = server.getCollection("foo", "clients");
     const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing;
-    SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten([], [deviceBID]);
+    SyncEngine.prototype._uploadOutgoing = async () => engine._onRecordsWritten([], [deviceBID]);
     let commandsProcessed = 0;
     engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length };
 
-    engine._sync();
-    engine.processIncomingCommands(); // Not called by the engine.sync(), gotta call it ourselves
+    await engine._sync();
+    await engine.processIncomingCommands(); // Not called by the engine.sync(), gotta call it ourselves
     equal(commandsProcessed, 2, "We processed 2 commands");
 
     let localRemoteRecord = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
     compareCommands(localRemoteRecord.commands, [{
       command: "displayURI", args: ["https://deviceblink.com", deviceBID, "Device B link"]
     },
     {
       command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
@@ -1260,30 +1269,32 @@ add_task(async function test_keep_cleare
       }],
       version: "48",
       protocols: ["1.5"],
     }), now - 10));
 
     // Simulate reboot
     SyncEngine.prototype._uploadOutgoing = oldUploadOutgoing;
     engine = Service.clientsEngine = new ClientEngine(Service);
+    await engine.initialize();
 
     commandsProcessed = 0;
     engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length };
-    engine._sync();
-    engine.processIncomingCommands();
+    await engine._sync();
+    await engine.processIncomingCommands();
     equal(commandsProcessed, 1, "We processed one command (the other were cleared)");
 
     localRemoteRecord = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
     deepEqual(localRemoteRecord.commands, [], "Should be empty");
   } finally {
-    cleanup();
+    await cleanup();
 
     // Reset service (remove mocks)
     engine = Service.clientsEngine = new ClientEngine(Service);
+    await engine.initialize();
     engine._resetClient();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
@@ -1315,34 +1326,34 @@ add_task(async function test_deleted_com
     type: "desktop",
     commands: [],
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     _("First sync. 2 records downloaded.");
-    engine._sync();
+    await engine._sync();
 
     _("Delete a record on the server.");
     let collection = server.getCollection("foo", "clients");
     collection.remove(deletedID);
 
     _("Broadcast a command to all clients");
-    engine.sendCommand("logout", []);
-    engine._sync();
+    await engine.sendCommand("logout", []);
+    await engine._sync();
 
     deepEqual(collection.keys().sort(), [activeID, engine.localID].sort(),
       "Should not reupload deleted clients");
 
     let activePayload = JSON.parse(JSON.parse(collection.payload(activeID)).ciphertext);
     compareCommands(activePayload.commands, [{ command: "logout", args: [] }],
       "Should send the command to the active client");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -1355,62 +1366,62 @@ add_task(async function test_send_uri_ac
 
   await SyncTestingInfrastructure(server);
   generateNewKeys(Service.collectionKeys);
 
   try {
     let fakeSenderID = Utils.makeGUID();
 
     _("Initial sync for empty clients collection");
-    engine._sync();
+    await engine._sync();
     let collection = server.getCollection("foo", "clients");
     let ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
     ok(ourPayload, "Should upload our client record");
 
     _("Send a URL to the device on the server");
     ourPayload.commands = [{
       command: "displayURI",
       args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
       flowID: Utils.makeGUID(),
     }];
     server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload(ourPayload), now));
 
     _("Sync again");
-    engine._sync();
+    await engine._sync();
     compareCommands(engine.localCommands, [{
       command: "displayURI",
       args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
     }], "Should receive incoming URI");
-    ok(engine.processIncomingCommands(), "Should process incoming commands");
-    const clearedCommands = engine._readCommands()[engine.localID];
+    ok((await engine.processIncomingCommands()), "Should process incoming commands");
+    const clearedCommands = (await engine._readCommands())[engine.localID];
     compareCommands(clearedCommands, [{
       command: "displayURI",
       args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
     }], "Should mark the commands as cleared after processing");
 
     _("Check that the command was removed on the server");
-    engine._sync();
+    await engine._sync();
     ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
     ok(ourPayload, "Should upload the synced client record");
     deepEqual(ourPayload.commands, [], "Should not reupload cleared commands");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
 
 add_task(async function test_command_sync() {
   _("Notify other clients when writing their record.");
 
-  engine._store.wipe();
+  await engine._store.wipe();
   generateNewKeys(Service.collectionKeys);
 
   let server    = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.getCollection("foo", "clients");
   let remoteId   = Utils.makeGUID();
   let remoteId2  = Utils.makeGUID();
@@ -1432,33 +1443,33 @@ add_task(async function test_command_syn
     type: "mobile",
     commands: [],
     version: "48",
     protocols: ["1.5"]
   }), Date.now() / 1000));
 
   try {
     equal(collection.count(), 2, "2 remote records written");
-    engine._sync();
+    await engine._sync();
     equal(collection.count(), 3, "3 remote records written (+1 for the synced local record)");
 
-    engine.sendCommand("wipeAll", []);
+    await engine.sendCommand("wipeAll", []);
     engine._tracker.addChangedID(engine.localID);
     const getClientFxaDeviceId = sinon.stub(engine, "getClientFxaDeviceId", (id) => "fxa-" + id);
     const engineMock = sinon.mock(engine);
     let _notifyCollectionChanged = engineMock.expects("_notifyCollectionChanged")
                                              .withArgs(["fxa-" + remoteId, "fxa-" + remoteId2]);
     _("Syncing.");
-    engine._sync();
+    await engine._sync();
     _notifyCollectionChanged.verify();
 
     engineMock.restore();
     getClientFxaDeviceId.restore();
   } finally {
-    cleanup();
+    await cleanup();
     engine._tracker.clearChangedIDs();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
@@ -1495,57 +1506,57 @@ add_task(async function ensureSameFlowID
       id: remoteId2,
       name: "Remote client 2",
       type: "mobile",
       commands: [],
       version: "48",
       protocols: ["1.5"]
     }), Date.now() / 1000));
 
-    engine._sync();
-    engine.sendCommand("wipeAll", []);
-    engine._sync();
+    await engine._sync();
+    await engine.sendCommand("wipeAll", []);
+    await engine._sync();
     equal(events.length, 2);
     // we don't know what the flowID is, but do know it should be the same.
     equal(events[0].extra.flowID, events[1].extra.flowID);
     // Wipe remote clients to ensure deduping doesn't prevent us from adding the command.
     for (let client of Object.values(engine._store._remoteClients)) {
       client.commands = [];
     }
     // check it's correctly used when we specify a flow ID
     events.length = 0;
     let flowID = Utils.makeGUID();
-    engine.sendCommand("wipeAll", [], null, { flowID });
-    engine._sync();
+    await engine.sendCommand("wipeAll", [], null, { flowID });
+    await engine._sync();
     equal(events.length, 2);
     equal(events[0].extra.flowID, flowID);
     equal(events[1].extra.flowID, flowID);
 
     // Wipe remote clients to ensure deduping doesn't prevent us from adding the command.
     for (let client of Object.values(engine._store._remoteClients)) {
       client.commands = [];
     }
 
     // and that it works when something else is in "extra"
     events.length = 0;
-    engine.sendCommand("wipeAll", [], null, { reason: "testing" });
-    engine._sync();
+    await engine.sendCommand("wipeAll", [], null, { reason: "testing" });
+    await engine._sync();
     equal(events.length, 2);
     equal(events[0].extra.flowID, events[1].extra.flowID);
     equal(events[0].extra.reason, "testing");
     equal(events[1].extra.reason, "testing");
     // Wipe remote clients to ensure deduping doesn't prevent us from adding the command.
     for (let client of Object.values(engine._store._remoteClients)) {
       client.commands = [];
     }
 
     // and when both are specified.
     events.length = 0;
-    engine.sendCommand("wipeAll", [], null, { reason: "testing", flowID });
-    engine._sync();
+    await engine.sendCommand("wipeAll", [], null, { reason: "testing", flowID });
+    await engine._sync();
     equal(events.length, 2);
     equal(events[0].extra.flowID, flowID);
     equal(events[1].extra.flowID, flowID);
     equal(events[0].extra.reason, "testing");
     equal(events[1].extra.reason, "testing");
     // Wipe remote clients to ensure deduping doesn't prevent us from adding the command.
     for (let client of Object.values(engine._store._remoteClients)) {
       client.commands = [];
@@ -1587,27 +1598,27 @@ add_task(async function test_duplicate_c
       id: remoteId2,
       name: "Remote client 2",
       type: "mobile",
       commands: [],
       version: "48",
       protocols: ["1.5"]
     }), Date.now() / 1000));
 
-    engine._sync();
+    await engine._sync();
     // Make sure deduping works before syncing
-    engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
-    engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
+    await engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
+    await engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
     equal(events.length, 1);
-    engine._sync();
+    await engine._sync();
     // And after syncing.
-    engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
+    await engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
     equal(events.length, 1);
     // Ensure we aren't deduping commands to different clients
-    engine.sendURIToClientForDisplay("https://example.com", remoteId2, "Example");
+    await engine.sendURIToClientForDisplay("https://example.com", remoteId2, "Example");
     equal(events.length, 2);
   } finally {
     Service.recordTelemetryEvent = origRecordTelemetryEvent;
     cleanup();
     await promiseStopServer(server);
   }
 });
 
@@ -1629,21 +1640,21 @@ add_task(async function test_other_clien
       calls++;
       return Promise.resolve(true);
     }
   };
 
   try {
     engine.lastRecordUpload = 0;
     _("First sync, should notify other clients");
-    engine._sync();
+    await engine._sync();
     equal(calls, 1);
 
     _("Second sync, should not notify other clients");
-    engine._sync();
+    await engine._sync();
     equal(calls, 1);
   } finally {
     engine.fxAccounts = fxAccounts;
     cleanup();
     await promiseStopServer(server);
   }
 });
 
@@ -1684,21 +1695,21 @@ add_task(async function device_disconnec
 
 add_task(async function process_incoming_refreshes_known_stale_clients() {
   const stubProcessIncoming = sinon.stub(SyncEngine.prototype, "_processIncoming");
   const stubRefresh = sinon.stub(engine, "_refreshKnownStaleClients", () => {
     engine._knownStaleFxADeviceIds = ["one", "two"];
   });
 
   engine._knownStaleFxADeviceIds = null;
-  engine._processIncoming();
+  await engine._processIncoming();
   ok(stubRefresh.calledOnce, "Should refresh the known stale clients");
   stubRefresh.reset();
 
-  engine._processIncoming();
+  await engine._processIncoming();
   ok(stubRefresh.notCalled, "Should not refresh the known stale clients since it's already populated");
 
   stubProcessIncoming.restore();
   stubRefresh.restore();
 });
 
 function run_test() {
   initTestLogging("Trace");
--- a/services/sync/tests/unit/test_clients_escape.js
+++ b/services/sync/tests/unit/test_clients_escape.js
@@ -16,17 +16,17 @@ add_task(async function test_clients_esc
   let engine = Service.clientsEngine;
 
   try {
     _("Test that serializing client records results in uploadable ascii");
     engine.localID = "ascii";
     engine.localName = "wéävê";
 
     _("Make sure we have the expected record");
-    let record = engine._createRecord("ascii");
+    let record = await engine._createRecord("ascii");
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
 
     _("Encrypting record...");
     record.encrypt(keyBundle);
     _("Encrypted.");
 
     let serialized = JSON.stringify(record);
@@ -43,15 +43,15 @@ add_task(async function test_clients_esc
     do_check_eq(checkCount, serialized.length);
 
     _("Making sure the record still looks like it did before");
     record.decrypt(keyBundle);
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
 
     _("Sanity check that creating the record also gives the same");
-    record = engine._createRecord("ascii");
+    record = await engine._createRecord("ascii");
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
   } finally {
     Svc.Prefs.resetBranch("");
   }
 });
--- a/services/sync/tests/unit/test_collections_recovery.js
+++ b/services/sync/tests/unit/test_collections_recovery.js
@@ -1,16 +1,18 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 // Verify that we wipe the server if we have to regenerate keys.
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
+initTestLogging("Trace");
+
 add_task(async function test_missing_crypto_collection() {
   enableValidationPrefs();
 
   let johnHelper = track_collections_helper();
   let johnU      = johnHelper.with_updated_collection;
   let johnColls  = johnHelper.collections;
 
   let empty = false;
@@ -41,29 +43,29 @@ add_task(async function test_missing_cry
       johnU(coll, new ServerCollection({}, true).handler());
   }
   let server = httpd_setup(handlers);
   await configureIdentity({username: "johndoe"}, server);
 
   try {
     let fresh = 0;
     let orig  = Service._freshStart;
-    Service._freshStart = function() {
+    Service._freshStart = async function() {
       _("Called _freshStart.");
-      orig.call(Service);
+      await orig.call(Service);
       fresh++;
     };
 
     _("Startup, no meta/global: freshStart called once.");
     await sync_and_validate_telem();
     do_check_eq(fresh, 1);
     fresh = 0;
 
     _("Regular sync: no need to freshStart.");
-    Service.sync();
+    await Service.sync();
     do_check_eq(fresh, 0);
 
     _("Simulate a bad info/collections.");
     delete johnColls.crypto;
     await sync_and_validate_telem();
     do_check_eq(fresh, 1);
     fresh = 0;
 
@@ -71,13 +73,8 @@ add_task(async function test_missing_cry
     await sync_and_validate_telem();
     do_check_eq(fresh, 0);
 
   } finally {
     Svc.Prefs.resetBranch("");
     await promiseStopServer(server);
   }
 });
-
-function run_test() {
-  initTestLogging("Trace");
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_corrupt_keys.js
+++ b/services/sync/tests/unit/test_corrupt_keys.js
@@ -13,17 +13,17 @@ Cu.import("resource://services-sync/stat
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 add_task(async function test_locally_changed_keys() {
   enableValidationPrefs();
 
   let hmacErrorCount = 0;
   function counting(f) {
-    return function() {
+    return async function() {
       hmacErrorCount++;
       return f.call(this);
     };
   }
 
   Service.handleHMACEvent = counting(Service.handleHMACEvent);
 
   let server  = new SyncServer();
@@ -37,17 +37,17 @@ add_task(async function test_locally_cha
 
   try {
     Svc.Prefs.set("registerEngines", "Tab");
 
     await configureIdentity({ username: "johndoe" }, server);
     // We aren't doing a .login yet, so fudge the cluster URL.
     Service.clusterURL = Service.identity._token.endpoint;
 
-    Service.engineManager.register(HistoryEngine);
+    await Service.engineManager.register(HistoryEngine);
     Service.engineManager.unregister("addons");
 
     function corrupt_local_keys() {
       Service.collectionKeys._default.keyPair = [Weave.Crypto.generateRandomKey(),
                                                  Weave.Crypto.generateRandomKey()];
     }
 
     _("Setting meta.");
@@ -62,17 +62,17 @@ add_task(async function test_locally_cha
 
     // Upload keys.
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
     do_check_true((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     // Check that login works.
-    do_check_true(Service.login());
+    do_check_true((await Service.login()));
     do_check_true(Service.isLoggedIn);
 
     // Sync should upload records.
     await sync_and_validate_telem();
 
     // Tabs exist.
     _("Tabs modified: " + johndoe.modified("tabs"));
     do_check_true(johndoe.modified("tabs") > 0);
--- a/services/sync/tests/unit/test_declined.js
+++ b/services/sync/tests/unit/test_declined.js
@@ -2,20 +2,16 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/stages/declined.js");
 Cu.import("resource://services-sync/stages/enginesync.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-common/observers.js");
 
-function run_test() {
-  run_next_test();
-}
-
 function PetrolEngine() {}
 PetrolEngine.prototype.name = "petrol";
 
 function DieselEngine() {}
 DieselEngine.prototype.name = "diesel";
 
 function DummyEngine() {}
 DummyEngine.prototype.name = "dummy";
@@ -41,34 +37,34 @@ function getEngineManager() {
  *
  * Push it into the EngineSynchronizer to set enabled; verify that those are
  * correct.
  *
  * Then push it into DeclinedEngines to set declined; verify that none are
  * declined, and a notification is sent for our locally disabled-but-not-
  * declined engines.
  */
-add_test(function testOldMeta() {
+add_task(async function testOldMeta() {
   let meta = {
     payload: {
       engines: {
         "petrol": 1,
         "diesel": 2,
         "nonlocal": 3,             // Enabled but not supported.
       },
     },
   };
 
   _("Record: " + JSON.stringify(meta));
 
   let manager = getEngineManager();
 
   // Update enabled from meta/global.
   let engineSync = new EngineSynchronizer(Service);
-  engineSync._updateEnabledFromMeta(meta, 3, manager);
+  await engineSync._updateEnabledFromMeta(meta, 3, manager);
 
   Assert.ok(manager._engines["petrol"].enabled, "'petrol' locally enabled.");
   Assert.ok(manager._engines["diesel"].enabled, "'diesel' locally enabled.");
   Assert.ok(!("nonlocal" in manager._engines), "We don't know anything about the 'nonlocal' engine.");
   Assert.ok(!manager._engines["actual"].enabled, "'actual' not locally enabled.");
   Assert.ok(!manager.isDeclined("actual"), "'actual' not declined, though.");
 
   let declinedEngines = new DeclinedEngines(Service);
@@ -91,17 +87,17 @@ add_test(function testOldMeta() {
 
 /**
  * 'Fetch' a meta/global that declines an engine we don't
  * recognize. Ensure that we track that declined engine along
  * with any we locally declined, and that the meta/global
  * record is marked as changed and includes all declined
  * engines.
  */
-add_test(function testDeclinedMeta() {
+add_task(async function testDeclinedMeta() {
   let meta = {
     payload: {
       engines: {
         "petrol": 1,
         "diesel": 2,
         "nonlocal": 3,             // Enabled but not supported.
       },
       declined: ["nonexistent"],   // Declined and not supported.
@@ -137,17 +133,15 @@ add_test(function testDeclinedMeta() {
     Assert.ok(0 <= declined.indexOf("nonexistent"), "'nonexistent' was declined on the server.");
 
     Assert.ok(0 <= declined.indexOf("localdecline"), "'localdecline' was declined locally.");
 
     // The meta/global is modified, too.
     Assert.ok(0 <= meta.payload.declined.indexOf("nonexistent"), "meta/global's declined contains 'nonexistent'.");
     Assert.ok(0 <= meta.payload.declined.indexOf("localdecline"), "meta/global's declined contains 'localdecline'.");
     Assert.strictEqual(true, meta.changed, "meta/global was changed.");
-
-    run_next_test();
   }
 
   Observers.add("weave:engines:notdeclined", onNotDeclined);
 
   declinedEngines.updateDeclined(meta, manager);
 });
 
--- a/services/sync/tests/unit/test_doctor.js
+++ b/services/sync/tests/unit/test_doctor.js
@@ -72,17 +72,17 @@ add_task(async function test_repairs_sta
   }
   let engine = {
     name: "test-engine",
     getValidator() {
       return validator;
     }
   }
   let requestor = {
-    startRepairs(validationInfo, flowID) {
+    async startRepairs(validationInfo, flowID) {
       ok(flowID, "got a flow ID");
       equal(validationInfo, problems);
       repairStarted = true;
       return true;
     },
     tryServerOnlyRepairs() {
       return false;
     }
@@ -106,17 +106,17 @@ add_task(async function test_repairs_sta
   await doctor.consult([engine]);
   await promiseValidationDone;
   ok(repairStarted);
 });
 
 add_task(async function test_repairs_advanced_daily() {
   let repairCalls = 0;
   let requestor = {
-    continueRepairs() {
+    async continueRepairs() {
       repairCalls++;
     },
     tryServerOnlyRepairs() {
       return false;
     }
   }
   // start now at just after REPAIR_ADVANCE_PERIOD so we do a a first one.
   let now = REPAIR_ADVANCE_PERIOD + 1;
@@ -160,17 +160,17 @@ add_task(async function test_repairs_ski
   }
   let engine = {
     name: "test-engine",
     getValidator() {
       return validator;
     }
   }
   let requestor = {
-    startRepairs(validationInfo, flowID) {
+    async startRepairs(validationInfo, flowID) {
       assert.ok(false, "Never should start repairs");
     },
     tryServerOnlyRepairs() {
       return false;
     }
   }
   let doctor = mockDoctor({
     _getEnginesToValidate(recentlySyncedEngines) {
--- a/services/sync/tests/unit/test_engine.js
+++ b/services/sync/tests/unit/test_engine.js
@@ -9,17 +9,17 @@ Cu.import("resource://services-sync/util
 
 function SteamStore(engine) {
   Store.call(this, "Steam", engine);
   this.wasWiped = false;
 }
 SteamStore.prototype = {
   __proto__: Store.prototype,
 
-  wipe() {
+  async wipe() {
     this.wasWiped = true;
   }
 };
 
 function SteamTracker(name, engine) {
   Tracker.call(this, name || "Steam", engine);
 }
 SteamTracker.prototype = {
@@ -32,21 +32,21 @@ function SteamEngine(name, service) {
   this.wasReset = false;
   this.wasSynced = false;
 }
 SteamEngine.prototype = {
   __proto__: Engine.prototype,
   _storeObj: SteamStore,
   _trackerObj: SteamTracker,
 
-  _resetClient() {
+  async _resetClient() {
     this.wasReset = true;
   },
 
-  _sync() {
+  async _sync() {
     this.wasSynced = true;
   }
 };
 
 var engineObserver = {
   topics: [],
 
   observe(subject, topic, data) {
@@ -100,17 +100,17 @@ add_task(async function test_score() {
   do_check_eq(engine.score, 5);
 });
 
 add_task(async function test_resetClient() {
   _("Engine.resetClient calls _resetClient");
   let engine = new SteamEngine("Steam", Service);
   do_check_false(engine.wasReset);
 
-  engine.resetClient();
+  await engine.resetClient();
   do_check_true(engine.wasReset);
   do_check_eq(engineObserver.topics[0], "weave:engine:reset-client:start");
   do_check_eq(engineObserver.topics[1], "weave:engine:reset-client:finish");
 
   await cleanup(engine);
 });
 
 add_task(async function test_invalidChangedIDs() {
@@ -135,17 +135,17 @@ add_task(async function test_invalidChan
 add_task(async function test_wipeClient() {
   _("Engine.wipeClient calls resetClient, wipes store, clears changed IDs");
   let engine = new SteamEngine("Steam", Service);
   do_check_false(engine.wasReset);
   do_check_false(engine._store.wasWiped);
   do_check_true(engine._tracker.addChangedID("a-changed-id"));
   do_check_true("a-changed-id" in engine._tracker.changedIDs);
 
-  engine.wipeClient();
+  await engine.wipeClient();
   do_check_true(engine.wasReset);
   do_check_true(engine._store.wasWiped);
   do_check_eq(JSON.stringify(engine._tracker.changedIDs), "{}");
   do_check_eq(engineObserver.topics[0], "weave:engine:wipe-client:start");
   do_check_eq(engineObserver.topics[1], "weave:engine:reset-client:start");
   do_check_eq(engineObserver.topics[2], "weave:engine:reset-client:finish");
   do_check_eq(engineObserver.topics[3], "weave:engine:wipe-client:finish");
 
@@ -168,24 +168,24 @@ add_task(async function test_enabled() {
 });
 
 add_task(async function test_sync() {
   let engine = new SteamEngine("Steam", Service);
   try {
     _("Engine.sync doesn't call _sync if it's not enabled");
     do_check_false(engine.enabled);
     do_check_false(engine.wasSynced);
-    engine.sync();
+    await engine.sync();
 
     do_check_false(engine.wasSynced);
 
     _("Engine.sync calls _sync if it's enabled");
     engine.enabled = true;
 
-    engine.sync();
+    await engine.sync();
     do_check_true(engine.wasSynced);
     do_check_eq(engineObserver.topics[0], "weave:engine:sync:start");
     do_check_eq(engineObserver.topics[1], "weave:engine:sync:finish");
   } finally {
     await cleanup(engine);
   }
 });
 
--- a/services/sync/tests/unit/test_engine_abort.js
+++ b/services/sync/tests/unit/test_engine_abort.js
@@ -25,39 +25,39 @@ add_task(async function test_processInco
   generateNewKeys(Service.collectionKeys);
 
   _("Create some server data.");
   let meta_global = Service.recordManager.set(engine.metaURL,
                                               new WBORecord(engine.metaURL));
   meta_global.payload.engines = {rotary: {version: engine.version,
                                           syncID: engine.syncID}};
   _("Fake applyIncoming to abort.");
-  engine._store.applyIncoming = function(record) {
+  engine._store.applyIncoming = async function(record) {
     let ex = {code: Engine.prototype.eEngineAbortApplyIncoming,
               cause: "Nooo"};
     _("Throwing: " + JSON.stringify(ex));
     throw ex;
   };
 
   _("Trying _processIncoming. It will throw after aborting.");
   let err;
   try {
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
   } catch (ex) {
     err = ex;
   }
 
   do_check_eq(err, "Nooo");
   err = undefined;
 
   _("Trying engine.sync(). It will abort without error.");
   try {
     // This will quietly fail.
-    engine.sync();
+    await engine.sync();
   } catch (ex) {
     err = ex;
   }
 
   do_check_eq(err, undefined);
 
   await promiseStopServer(server);
   Svc.Prefs.resetBranch("");
--- a/services/sync/tests/unit/test_engine_changes_during_sync.js
+++ b/services/sync/tests/unit/test_engine_changes_during_sync.js
@@ -1,11 +1,10 @@
 Cu.import("resource://gre/modules/FormHistory.jsm");
 Cu.import("resource://gre/modules/Log.jsm");
-Cu.import("resource://services-common/async.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/engines/history.js");
 Cu.import("resource://services-sync/engines/forms.js");
 Cu.import("resource://services-sync/engines/passwords.js");
 Cu.import("resource://services-sync/engines/prefs.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
@@ -25,17 +24,17 @@ Log.repository.getLogger("Sqlite").level
 async function assertChildGuids(folderGuid, expectedChildGuids, message) {
   let tree = await PlacesUtils.promiseBookmarksTree(folderGuid);
   let childGuids = tree.children.map(child => child.guid);
   deepEqual(childGuids, expectedChildGuids, message);
 }
 
 async function cleanup(engine, server) {
   Svc.Obs.notify("weave:engine:stop-tracking");
-  engine._store.wipe();
+  await engine._store.wipe();
   Svc.Prefs.resetBranch("");
   Service.recordManager.clearCache();
   await promiseStopServer(server);
 }
 
 add_task(async function test_history_change_during_sync() {
   _("Ensure that we don't bump the score when applying history records.");
 
@@ -44,24 +43,24 @@ add_task(async function test_history_cha
   let engine = Service.engineManager.get("history");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("history");
 
   // Override `applyIncomingBatch` to insert a record while we're applying
   // changes. The tracker should ignore this change.
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Inserting local history visit");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
-      Async.promiseSpinningly(addVisit("during_sync"));
+      await addVisit("during_sync");
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     let remoteRec = new HistoryRec("history", "UrOOuzE5QM-e");
@@ -100,26 +99,26 @@ add_task(async function test_passwords_c
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("passwords");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("passwords");
 
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Inserting local password");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
       let login = new LoginInfo("https://example.com", "", null, "username",
         "password", "", "");
       Services.logins.addLogin(login);
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     let remoteRec = new LoginRec("passwords", "{765e3d6e-071d-d640-a83d-81a7eb62d3ed}");
@@ -161,25 +160,25 @@ add_task(async function test_prefs_chang
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("prefs");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("prefs");
 
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Updating local pref value");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
       // Change the value of a synced pref.
       Services.prefs.setCharPref(TEST_PREF, "hello");
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     // All synced prefs are stored in a single record, so we'll only ever
@@ -222,32 +221,32 @@ add_task(async function test_forms_chang
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("forms");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("forms");
 
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Inserting local form history entry");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
-      Async.promiseSpinningly(new Promise(resolve => {
+      await new Promise(resolve => {
         FormHistory.update([{
           op: "add",
           fieldname: "favoriteDrink",
           value: "cocoa",
         }], {
           handleCompletion: resolve,
         });
-      }));
+      });
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     // Add an existing remote form history entry. We shouldn't bump the score when
@@ -300,28 +299,28 @@ add_task(async function test_bookmark_ch
   let engine = Service.engineManager.get("bookmarks");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("bookmarks");
 
   let bmk3; // New child of Folder 1, created locally during sync.
 
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Inserting bookmark into local store");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
-      bmk3 = Async.promiseSpinningly(PlacesUtils.bookmarks.insert({
+      bmk3 = await PlacesUtils.bookmarks.insert({
         parentGuid: folder1.guid,
         url: "https://mozilla.org/",
         title: "Mozilla",
-      }));
+      });
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   // New bookmarks that should be uploaded during the first sync.
   let folder1 = await PlacesUtils.bookmarks.insert({
     type: PlacesUtils.bookmarks.TYPE_FOLDER,
     parentGuid: PlacesUtils.bookmarks.toolbarGuid,
@@ -394,17 +393,17 @@ add_task(async function test_bookmark_ch
       collection.insert(bmk4_guid, encryptPayload(remoteTaggedBmk.cleartext));
     }
 
     await assertChildGuids(folder1.guid, [tbBmk.guid],
       "Folder should have 1 child before first sync");
 
     let pingsPromise = wait_for_pings(2);
 
-    let changes = engine.pullNewChanges();
+    let changes = await engine.pullNewChanges();
     deepEqual(Object.keys(changes).sort(), [
       folder1.guid,
       tbBmk.guid,
       "menu",
       "mobile",
       "toolbar",
       "unfiled",
     ].sort(), "Should track bookmark and folder created before first sync");
@@ -436,17 +435,17 @@ add_task(async function test_bookmark_ch
       "Folder 1 should have 3 children after first sync");
     await assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
       "Folder 2 should have 2 children after first sync");
     let taggedURIs = PlacesUtils.tagging.getURIsForTag("taggy");
     equal(taggedURIs.length, 1, "Should have 1 tagged URI");
     equal(taggedURIs[0].spec, "https://example.org/",
       "Synced tagged bookmark should appear in tagged URI list");
 
-    changes = engine.pullNewChanges();
+    changes = await engine.pullNewChanges();
     deepEqual(changes, {},
       "Should have already uploaded changes in follow-up sync");
 
     // First ping won't include validation data, since we've changed bookmarks
     // and `canValidate` will indicate it can't proceed.
     let engineData = pings.map(p =>
       p.syncs[0].engines.find(e => e.name == "bookmarks")
     );
--- a/services/sync/tests/unit/test_enginemanager.js
+++ b/services/sync/tests/unit/test_enginemanager.js
@@ -1,117 +1,111 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 
-function run_test() {
-  run_next_test();
-}
-
 function PetrolEngine() {}
 PetrolEngine.prototype.name = "petrol";
 PetrolEngine.prototype.finalize = async function() {};
 
 function DieselEngine() {}
 DieselEngine.prototype.name = "diesel";
 DieselEngine.prototype.finalize = async function() {};
 
 function DummyEngine() {}
 DummyEngine.prototype.name = "dummy";
 DummyEngine.prototype.finalize = async function() {};
 
 function ActualEngine() {}
 ActualEngine.prototype = {__proto__: Engine.prototype,
                           name: "actual"};
 
-add_test(function test_basics() {
+add_task(async function test_basics() {
   _("We start out with a clean slate");
 
   let manager = new EngineManager(Service);
 
-  let engines = manager.getAll();
+  let engines = await manager.getAll();
   do_check_eq(engines.length, 0);
-  do_check_eq(manager.get("dummy"), undefined);
+  do_check_eq((await manager.get("dummy")), undefined);
 
   _("Register an engine");
-  manager.register(DummyEngine);
-  let dummy = manager.get("dummy");
+  await manager.register(DummyEngine);
+  let dummy = await manager.get("dummy");
   do_check_true(dummy instanceof DummyEngine);
 
-  engines = manager.getAll();
+  engines = await manager.getAll();
   do_check_eq(engines.length, 1);
   do_check_eq(engines[0], dummy);
 
   _("Register an already registered engine is ignored");
-  manager.register(DummyEngine);
-  do_check_eq(manager.get("dummy"), dummy);
+  await manager.register(DummyEngine);
+  do_check_eq((await manager.get("dummy")), dummy);
 
   _("Register multiple engines in one go");
-  manager.register([PetrolEngine, DieselEngine]);
-  let petrol = manager.get("petrol");
-  let diesel = manager.get("diesel");
+  await manager.register([PetrolEngine, DieselEngine]);
+  let petrol = await manager.get("petrol");
+  let diesel = await manager.get("diesel");
   do_check_true(petrol instanceof PetrolEngine);
   do_check_true(diesel instanceof DieselEngine);
 
-  engines = manager.getAll();
+  engines = await manager.getAll();
   do_check_eq(engines.length, 3);
   do_check_neq(engines.indexOf(petrol), -1);
   do_check_neq(engines.indexOf(diesel), -1);
 
   _("Retrieve multiple engines in one go");
-  engines = manager.get(["dummy", "diesel"]);
+  engines = await manager.get(["dummy", "diesel"]);
   do_check_eq(engines.length, 2);
   do_check_neq(engines.indexOf(dummy), -1);
   do_check_neq(engines.indexOf(diesel), -1);
 
   _("getEnabled() only returns enabled engines");
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
   do_check_eq(engines.length, 0);
 
   petrol.enabled = true;
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
   do_check_eq(engines.length, 1);
   do_check_eq(engines[0], petrol);
 
   dummy.enabled = true;
   diesel.enabled = true;
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
   do_check_eq(engines.length, 3);
 
   _("getEnabled() returns enabled engines in sorted order");
   petrol.syncPriority = 1;
   dummy.syncPriority = 2;
   diesel.syncPriority = 3;
 
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
 
   do_check_array_eq(engines, [petrol, dummy, diesel]);
 
   _("Changing the priorities should change the order in getEnabled()");
 
   dummy.syncPriority = 4;
 
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
 
   do_check_array_eq(engines, [petrol, diesel, dummy]);
 
   _("Unregister an engine by name");
   manager.unregister("dummy");
-  do_check_eq(manager.get("dummy"), undefined);
-  engines = manager.getAll();
+  do_check_eq((await manager.get("dummy")), undefined);
+  engines = await manager.getAll();
   do_check_eq(engines.length, 2);
   do_check_eq(engines.indexOf(dummy), -1);
 
   _("Unregister an engine by value");
   // manager.unregister() checks for instanceof Engine, so let's make one:
-  manager.register(ActualEngine);
-  let actual = manager.get("actual");
+  await manager.register(ActualEngine);
+  let actual = await manager.get("actual");
   do_check_true(actual instanceof ActualEngine);
   do_check_true(actual instanceof Engine);
 
   manager.unregister(actual);
-  do_check_eq(manager.get("actual"), undefined);
-
-  run_next_test();
+  do_check_eq((await manager.get("actual")), undefined);
 });
 
--- a/services/sync/tests/unit/test_errorhandler_1.js
+++ b/services/sync/tests/unit/test_errorhandler_1.js
@@ -28,96 +28,93 @@ var fakeServerUrl = "http://localhost:" 
 const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
 
 const PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 2) * 1000;
 
 const NON_PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get("errorhandler.networkFailureReportTimeout") / 2) * 1000;
 
-Service.engineManager.clear();
-
 function setLastSync(lastSyncValue) {
   Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
 }
 
-var engineManager = Service.engineManager;
-engineManager.register(EHTestsCommon.CatapultEngine);
-
 // This relies on Service/ErrorHandler being a singleton. Fixing this will take
 // a lot of work.
-var errorHandler = Service.errorHandler;
+let errorHandler = Service.errorHandler;
+let engine;
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
 
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
 
-  run_next_test();
-}
+  Service.engineManager.clear();
+  await Service.engineManager.register(EHTestsCommon.CatapultEngine);
+  engine = Service.engineManager.get("catapult");
+});
 
-
-function clean() {
-  Service.startOver();
+async function clean() {
+  await Service.startOver();
   Status.resetSync();
   Status.resetBackoff();
   errorHandler.didReportProlongedError = false;
 }
 
 add_task(async function test_401_logout() {
   enableValidationPrefs();
 
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   await sync_and_validate_telem();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  let deferred = PromiseUtils.defer();
-  Svc.Obs.add("weave:service:sync:error", onSyncError);
-  function onSyncError() {
-    _("Got weave:service:sync:error in first sync.");
-    Svc.Obs.remove("weave:service:sync:error", onSyncError);
-
-    // Wait for the automatic next sync.
-    function onLoginError() {
-      _("Got weave:service:login:error in second sync.");
-      Svc.Obs.remove("weave:service:login:error", onLoginError);
+  let promiseErrors = new Promise(res => {
+    Svc.Obs.add("weave:service:sync:error", onSyncError);
+    function onSyncError() {
+      _("Got weave:service:sync:error in first sync.");
+      Svc.Obs.remove("weave:service:sync:error", onSyncError);
 
-      do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
-      do_check_false(Service.isLoggedIn);
-
-      // Clean up.
-      Utils.nextTick(function() {
-        Service.startOver();
-        server.stop(deferred.resolve);
-      });
+      // Wait for the automatic next sync.
+      Svc.Obs.add("weave:service:login:error", onLoginError);
+      function onLoginError() {
+        _("Got weave:service:login:error in second sync.");
+        Svc.Obs.remove("weave:service:login:error", onLoginError);
+        res();
+      }
     }
-    Svc.Obs.add("weave:service:login:error", onLoginError);
-  }
+  });
 
   // Make sync fail due to login rejected.
   await configureIdentity({username: "janedoe"}, server);
   Service._updateCachedURLs();
 
   _("Starting first sync.");
   let ping = await sync_and_validate_telem(true);
   deepEqual(ping.failureReason, { name: "httperror", code: 401 });
   _("First sync done.");
-  await deferred.promise;
+
+  await promiseErrors;
+  do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+  do_check_false(Service.isLoggedIn);
+
+  // Clean up.
+  await Service.startOver();
+  await promiseStopServer(server);
 });
 
 add_task(async function test_credentials_changed_logout() {
   enableValidationPrefs();
 
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   await sync_and_validate_telem();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
@@ -128,17 +125,17 @@ add_task(async function test_credentials
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
 
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   do_check_false(Service.isLoggedIn);
 
   // Clean up.
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 add_task(function test_no_lastSync_pref() {
   // Test reported error.
   Status.resetSync();
   errorHandler.dontIgnoreErrors = true;
   Status.sync = CREDENTIALS_CHANGED;
@@ -321,34 +318,34 @@ add_task(function test_shouldReportError
   errorHandler.dontIgnoreErrors = true;
   Status.login = SERVER_MAINTENANCE;
   do_check_true(errorHandler.shouldReportError());
   do_check_false(errorHandler.didReportProlongedError);
 });
 
 add_task(async function test_shouldReportError_master_password() {
   _("Test error ignored due to locked master password");
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // Monkey patch Service.verifyLogin to imitate
   // master password being locked.
   Service._verifyLogin = Service.verifyLogin;
-  Service.verifyLogin = function() {
+  Service.verifyLogin = async function() {
     Status.login = MASTER_PASSWORD_LOCKED;
     return false;
   };
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   do_check_false(errorHandler.shouldReportError());
 
   // Clean up.
   Service.verifyLogin = Service._verifyLogin;
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 // Test that even if we don't have a cluster URL, a login failure due to
 // authentication errors is always reported.
 add_task(function test_shouldReportLoginFailureWithNoCluster() {
   // Ensure no clusterURL - any error not specific to login should not be reported.
   Service.clusterURL = "";
@@ -364,41 +361,41 @@ add_task(function test_shouldReportLogin
   do_check_false(errorHandler.shouldReportError());
 });
 
 add_task(async function test_login_syncAndReportErrors_non_network_error() {
   enableValidationPrefs();
 
   // Test non-network errors are reported
   // when calling syncAndReportErrors
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
   Service.identity.resetSyncKeyBundle();
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_syncAndReportErrors_non_network_error() {
   enableValidationPrefs();
 
   // Test non-network errors are reported
   // when calling syncAndReportErrors
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
@@ -407,51 +404,51 @@ add_task(async function test_sync_syncAn
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
   await promiseObserved;
 
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   // If we clean this tick, telemetry won't get the right error
-  await promiseNextTick();
-  clean();
+  await Async.promiseYield();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_login_syncAndReportErrors_prolonged_non_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, non-network errors are
   // reported when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
   Service.identity.resetSyncKeyBundle();
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_syncAndReportErrors_prolonged_non_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, non-network errors are
   // reported when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
@@ -460,18 +457,18 @@ add_task(async function test_sync_syncAn
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
   await promiseObserved;
 
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   // If we clean this tick, telemetry won't get the right error
-  await promiseNextTick();
-  clean();
+  await Async.promiseYield();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_login_syncAndReportErrors_network_error() {
   enableValidationPrefs();
 
   // Test network errors are reported when calling syncAndReportErrors.
   await configureIdentity({username: "broken.wipe"});
@@ -480,37 +477,35 @@ add_task(async function test_login_syncA
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
 
   do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
 
-  clean();
+  await clean();
 });
 
 
-add_test(function test_sync_syncAndReportErrors_network_error() {
+add_task(async function test_sync_syncAndReportErrors_network_error() {
   enableValidationPrefs();
 
   // Test network errors are reported when calling syncAndReportErrors.
   Services.io.offline = true;
 
-  Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
-    Svc.Obs.remove("weave:ui:sync:error", onSyncError);
-    do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
-
-    Services.io.offline = false;
-    clean();
-    run_next_test();
-  });
+  let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
+  await promiseUISyncError;
+  do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+
+  Services.io.offline = false;
+  await clean();
 });
 
 add_task(async function test_login_syncAndReportErrors_prolonged_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, network errors are reported
   // when calling syncAndReportErrors.
   await configureIdentity({username: "johndoe"});
@@ -519,68 +514,66 @@ add_task(async function test_login_syncA
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
 
-  clean();
+  await clean();
 });
 
-add_test(function test_sync_syncAndReportErrors_prolonged_network_error() {
+add_task(async function test_sync_syncAndReportErrors_prolonged_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, network errors are reported
   // when calling syncAndReportErrors.
   Services.io.offline = true;
 
-  Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
-    Svc.Obs.remove("weave:ui:sync:error", onSyncError);
-    do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
-
-    Services.io.offline = false;
-    clean();
-    run_next_test();
-  });
+  let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
+  await promiseUISyncError;
+  do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+
+  Services.io.offline = false;
+  await clean();
 });
 
 add_task(async function test_login_prolonged_non_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, non-network errors are reported
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
   Service.identity.resetSyncKeyBundle();
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_prolonged_non_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, non-network errors are reported
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
@@ -589,152 +582,147 @@ add_task(async function test_sync_prolon
   equal(ping.status.sync, PROLONGED_SYNC_FAILURE);
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
   await promiseObserved;
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_login_prolonged_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, network errors are reported
   await configureIdentity({username: "johndoe"});
   Service.clusterURL = fakeServerUrl;
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
 });
 
-add_test(function test_sync_prolonged_network_error() {
+add_task(async function test_sync_prolonged_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, network errors are reported
   Services.io.offline = true;
 
-  Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
-    Svc.Obs.remove("weave:ui:sync:error", onSyncError);
-    do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
-    do_check_true(errorHandler.didReportProlongedError);
-
-    Services.io.offline = false;
-    clean();
-    run_next_test();
-  });
+  let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
+  await promiseUISyncError;
+  do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+  do_check_true(errorHandler.didReportProlongedError);
+
+  Services.io.offline = false;
+  await clean();
 });
 
 add_task(async function test_login_non_network_error() {
   enableValidationPrefs();
 
   // Test non-network errors are reported
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
   Service.identity.resetSyncKeyBundle();
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_non_network_error() {
   enableValidationPrefs();
 
   // Test non-network errors are reported
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_login_network_error() {
   enableValidationPrefs();
 
   await configureIdentity({username: "johndoe"});
   Service.clusterURL = fakeServerUrl;
 
   let promiseObserved = promiseOneObserver("weave:ui:clear-error");
   // Test network errors are not reported.
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
   do_check_false(errorHandler.didReportProlongedError);
 
   Services.io.offline = false;
-  clean();
+  await clean();
 });
 
-add_test(function test_sync_network_error() {
+add_task(async function test_sync_network_error() {
   enableValidationPrefs();
 
   // Test network errors are not reported.
   Services.io.offline = true;
 
-  Svc.Obs.add("weave:ui:sync:finish", function onUIUpdate() {
-    Svc.Obs.remove("weave:ui:sync:finish", onUIUpdate);
-    do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
-    do_check_false(errorHandler.didReportProlongedError);
-
-    Services.io.offline = false;
-    clean();
-    run_next_test();
-  });
+  let promiseSyncFinished = promiseOneObserver("weave:ui:sync:finish");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
+  await promiseSyncFinished;
+  do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+  do_check_false(errorHandler.didReportProlongedError);
+
+  Services.io.offline = false;
+  await clean();
 });
 
 add_task(async function test_sync_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test server maintenance errors are not reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   const BACKOFF = 42;
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
   function onSyncError() {
     do_throw("Shouldn't get here!");
   }
   Svc.Obs.add("weave:ui:sync:error", onSyncError);
@@ -748,25 +736,25 @@ add_task(async function test_sync_server
   equal(ping.status.sync, SERVER_MAINTENANCE);
   deepEqual(ping.engines.find(e => e.failureReason).failureReason, { name: "httperror", code: 503 })
 
   await promiseObserved;
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   do_check_eq(Status.sync, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_info_collections_login_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test info/collections server maintenance errors are not reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.info"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -778,35 +766,35 @@ add_task(async function test_info_collec
   Svc.Obs.add("weave:ui:login:error", onUIUpdate);
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   let promiseObserved = promiseOneObserver("weave:ui:clear-error")
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
   Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_meta_global_login_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test meta/global server maintenance errors are not reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.meta"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -818,21 +806,21 @@ add_task(async function test_meta_global
   Svc.Obs.add("weave:ui:login:error", onUIUpdate);
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   let promiseObserved = promiseOneObserver("weave:ui:clear-error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
   Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
--- a/services/sync/tests/unit/test_errorhandler_2.js
+++ b/services/sync/tests/unit/test_errorhandler_2.js
@@ -27,53 +27,50 @@ var fakeServerUrl = "http://localhost:" 
 const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
 
 const PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 2) * 1000;
 
 const NON_PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get("errorhandler.networkFailureReportTimeout") / 2) * 1000;
 
-Service.engineManager.clear();
-
 function setLastSync(lastSyncValue) {
   Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
 }
 
-var engineManager = Service.engineManager;
-engineManager.register(EHTestsCommon.CatapultEngine);
-
 // This relies on Service/ErrorHandler being a singleton. Fixing this will take
 // a lot of work.
 var errorHandler = Service.errorHandler;
+let engine;
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
 
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
 
-  run_next_test();
-}
+  Service.engineManager.clear();
+  await Service.engineManager.register(EHTestsCommon.CatapultEngine);
+  engine = Service.engineManager.get("catapult");
+});
 
-
-function clean() {
-  Service.startOver();
+async function clean() {
+  await Service.startOver();
   Status.resetSync();
   Status.resetBackoff();
   errorHandler.didReportProlongedError = false;
 }
 
 add_task(async function test_crypto_keys_login_server_maintenance_error() {
   enableValidationPrefs();
 
   Status.resetSync();
   // Test crypto/keys server maintenance errors are not reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.keys"}, server);
 
   // Force re-download of keys
   Service.collectionKeys.clear();
 
   let backoffInterval;
@@ -88,39 +85,38 @@ add_task(async function test_crypto_keys
   Svc.Obs.add("weave:ui:login:error", onUIUpdate);
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   let promiseObserved = promiseOneObserver("weave:ui:clear-error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
   Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   const BACKOFF = 42;
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   do_check_eq(Status.service, STATUS_OK);
 
@@ -131,92 +127,92 @@ add_task(async function test_sync_prolon
             { name: "httperror", code: 503 });
   await promiseObserved;
 
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
   await promiseStopServer(server);
-  clean();
+  await clean();
 });
 
 add_task(async function test_info_collections_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test info/collections prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.info"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_meta_global_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test meta/global prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.meta"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_download_crypto_keys_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.keys"}, server);
   // Force re-download of keys
   Service.collectionKeys.clear();
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@@ -225,109 +221,108 @@ add_task(async function test_download_cr
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_upload_crypto_keys_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.keys"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeServer_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test that we report prolonged server maintenance errors that occur whilst
   // wiping the server.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.wipe"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeRemote_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test that we report prolonged server maintenance errors that occur whilst
   // wiping all remote devices.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   server.registerPathHandler("/1.1/broken.wipe/storage/catapult", EHTestsCommon.service_unavailable);
   await configureIdentity({username: "broken.wipe"}, server);
   EHTestsCommon.generateAndUploadKeys();
 
-  let engine = engineManager.get("catapult");
   engine.exception = null;
   engine.enabled = true;
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
@@ -345,55 +340,54 @@ add_task(async function test_wipeRemote_
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
   do_check_true(errorHandler.didReportProlongedError);
   await promiseStopServer(server);
-  clean();
+  await clean();
 });
 
 add_task(async function test_sync_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   const BACKOFF = 42;
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
 
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   do_check_eq(Status.sync, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_info_collections_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test info/collections server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.info"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -409,26 +403,26 @@ add_task(async function test_info_collec
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_meta_global_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test meta/global server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.meta"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -444,26 +438,26 @@ add_task(async function test_meta_global
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_download_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.keys"}, server);
   // Force re-download of keys
   Service.collectionKeys.clear();
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@@ -481,26 +475,26 @@ add_task(async function test_download_cr
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_upload_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.keys"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -516,26 +510,26 @@ add_task(async function test_upload_cryp
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeServer_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.wipe"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -551,31 +545,30 @@ add_task(async function test_wipeServer_
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeRemote_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test that we report prolonged server maintenance errors that occur whilst
   // wiping all remote devices.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   await configureIdentity({username: "broken.wipe"}, server);
   EHTestsCommon.generateAndUploadKeys();
 
-  let engine = engineManager.get("catapult");
   engine.exception = null;
   engine.enabled = true;
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
@@ -592,30 +585,29 @@ add_task(async function test_wipeRemote_
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, SERVER_MAINTENANCE);
   do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test prolonged server maintenance errors are
   // reported when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   const BACKOFF = 42;
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   do_check_eq(Status.service, STATUS_OK);
 
@@ -624,26 +616,26 @@ add_task(async function test_sync_syncAn
   await promiseObserved;
 
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   do_check_eq(Status.sync, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_info_collections_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test info/collections server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.info"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -661,26 +653,26 @@ add_task(async function test_info_collec
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_meta_global_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test meta/global server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.meta"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -698,26 +690,26 @@ add_task(async function test_meta_global
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_download_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.keys"}, server);
   // Force re-download of keys
   Service.collectionKeys.clear();
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@@ -737,26 +729,26 @@ add_task(async function test_download_cr
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_upload_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.keys"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -774,26 +766,26 @@ add_task(async function test_upload_cryp
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeServer_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.wipe"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -811,163 +803,152 @@ add_task(async function test_wipeServer_
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_engine_generic_fail() {
   enableValidationPrefs();
 
-  let server = EHTestsCommon.sync_httpd_setup();
-
-let engine = engineManager.get("catapult");
+  let server = await EHTestsCommon.sync_httpd_setup();
   engine.enabled = true;
-  engine.sync = function sync() {
+  engine.sync = async function sync() {
     Svc.Obs.notify("weave:engine:sync:error", ENGINE_UNKNOWN_FAIL, "catapult");
   };
 
   let log = Log.repository.getLogger("Sync.ErrorHandler");
   Svc.Prefs.set("log.appender.file.logOnError", true);
 
   do_check_eq(Status.engines["catapult"], undefined);
 
-  let deferred = PromiseUtils.defer();
-  // Don't wait for reset-file-log until the sync is underway.
-  // This avoids us catching a delayed notification from an earlier test.
-  Svc.Obs.add("weave:engine:sync:finish", function onEngineFinish() {
-    Svc.Obs.remove("weave:engine:sync:finish", onEngineFinish);
-
-    log.info("Adding reset-file-log observer.");
-    Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
-      Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+  let promiseObserved = new Promise(res => {
+    Svc.Obs.add("weave:engine:sync:finish", function onEngineFinish() {
+      Svc.Obs.remove("weave:engine:sync:finish", onEngineFinish);
 
-      // Put these checks here, not after sync(), so that we aren't racing the
-      // log handler... which resets everything just a few lines below!
-      _("Status.engines: " + JSON.stringify(Status.engines));
-      do_check_eq(Status.engines["catapult"], ENGINE_UNKNOWN_FAIL);
-      do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
-
-      // Test Error log was written on SYNC_FAILED_PARTIAL.
-      let entries = logsdir.directoryEntries;
-      do_check_true(entries.hasMoreElements());
-      let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
-      do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
-
-      clean();
-
-      let syncErrors = sumHistogram("WEAVE_ENGINE_SYNC_ERRORS", { key: "catapult" });
-      do_check_true(syncErrors, 1);
-
-      server.stop(() => {
-        clean();
-        deferred.resolve();
+      log.info("Adding reset-file-log observer.");
+      Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+        Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+        res();
       });
     });
   });
 
   do_check_true(await EHTestsCommon.setUp(server));
   let ping = await sync_and_validate_telem(true);
   deepEqual(ping.status.service, SYNC_FAILED_PARTIAL);
   deepEqual(ping.engines.find(e => e.status).status, ENGINE_UNKNOWN_FAIL);
 
-  await deferred.promise;
+  await promiseObserved;
+
+  _("Status.engines: " + JSON.stringify(Status.engines));
+  do_check_eq(Status.engines["catapult"], ENGINE_UNKNOWN_FAIL);
+  do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+
+  // Test Error log was written on SYNC_FAILED_PARTIAL.
+  let entries = logsdir.directoryEntries;
+  do_check_true(entries.hasMoreElements());
+  let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+  do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+
+  await clean();
+
+  let syncErrors = sumHistogram("WEAVE_ENGINE_SYNC_ERRORS", { key: "catapult" });
+  do_check_true(syncErrors, 1);
+
+  await clean();
+  await promiseStopServer(server);
 });
 
-add_test(function test_logs_on_sync_error_despite_shouldReportError() {
+add_task(async function test_logs_on_sync_error_despite_shouldReportError() {
   enableValidationPrefs();
 
   _("Ensure that an error is still logged when weave:service:sync:error " +
     "is notified, despite shouldReportError returning false.");
 
   let log = Log.repository.getLogger("Sync.ErrorHandler");
   Svc.Prefs.set("log.appender.file.logOnError", true);
   log.info("TESTING");
 
   // Ensure that we report no error.
   Status.login = MASTER_PASSWORD_LOCKED;
   do_check_false(errorHandler.shouldReportError());
 
-  Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
-    Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+  let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
+  Svc.Obs.notify("weave:service:sync:error", {});
+  await promiseObserved;
 
-    // Test that error log was written.
-    let entries = logsdir.directoryEntries;
-    do_check_true(entries.hasMoreElements());
-    let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
-    do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+  // Test that error log was written.
+  let entries = logsdir.directoryEntries;
+  do_check_true(entries.hasMoreElements());
+  let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+  do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
 
-    clean();
-    run_next_test();
-  });
-  Svc.Obs.notify("weave:service:sync:error", {});
+  await clean();
 });
 
-add_test(function test_logs_on_login_error_despite_shouldReportError() {
+add_task(async function test_logs_on_login_error_despite_shouldReportError() {
   enableValidationPrefs();
 
   _("Ensure that an error is still logged when weave:service:login:error " +
     "is notified, despite shouldReportError returning false.");
 
   let log = Log.repository.getLogger("Sync.ErrorHandler");
   Svc.Prefs.set("log.appender.file.logOnError", true);
   log.info("TESTING");
 
   // Ensure that we report no error.
   Status.login = MASTER_PASSWORD_LOCKED;
   do_check_false(errorHandler.shouldReportError());
 
-  Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
-    Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+  let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
+  Svc.Obs.notify("weave:service:login:error", {});
+  await promiseObserved;
 
-    // Test that error log was written.
-    let entries = logsdir.directoryEntries;
-    do_check_true(entries.hasMoreElements());
-    let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
-    do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+  // Test that error log was written.
+  let entries = logsdir.directoryEntries;
+  do_check_true(entries.hasMoreElements());
+  let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+  do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
 
-    clean();
-    run_next_test();
-  });
-  Svc.Obs.notify("weave:service:login:error", {});
+  await clean();
 });
 
 // This test should be the last one since it monkeypatches the engine object
 // and we should only have one engine object throughout the file (bug 629664).
 add_task(async function test_engine_applyFailed() {
   enableValidationPrefs();
 
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   delete engine.exception;
-  engine.sync = function sync() {
+  engine.sync = async function sync() {
     Svc.Obs.notify("weave:engine:sync:applied", {newFailed: 1}, "catapult");
   };
 
   Svc.Prefs.set("log.appender.file.logOnError", true);
 
   let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
 
   do_check_eq(Status.engines["catapult"], undefined);
   do_check_true(await EHTestsCommon.setUp(server));
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_eq(Status.engines["catapult"], ENGINE_APPLY_FAIL);
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
 
   // Test Error log was written on SYNC_FAILED_PARTIAL.
   let entries = logsdir.directoryEntries;
   do_check_true(entries.hasMoreElements());
   let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
   do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
--- a/services/sync/tests/unit/test_errorhandler_eol.js
+++ b/services/sync/tests/unit/test_errorhandler_eol.js
@@ -67,17 +67,17 @@ function do_check_hard_eol(eh, start) {
 add_task(async function test_200_hard() {
   let eh = Service.errorHandler;
   let start = Date.now();
   let server = sync_httpd_setup(handler200("hard-eol"));
   await setUp(server);
 
   let promiseObserved = promiseOneObserver("weave:eol");
 
-  Service._fetchInfo();
+  await Service._fetchInfo();
   Service.scheduler.adjustSyncInterval();   // As if we failed or succeeded in syncing.
 
   let { subject } = await promiseObserved;
   do_check_eq("hard-eol", subject.code);
   do_check_hard_eol(eh, start);
   do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
   eh.clearServerAlerts();
   await promiseStopServer(server);
@@ -87,17 +87,17 @@ add_task(async function test_513_hard() 
   let eh = Service.errorHandler;
   let start = Date.now();
   let server = sync_httpd_setup(handler513);
   await setUp(server);
 
   let promiseObserved = promiseOneObserver("weave:eol");
 
   try {
-    Service._fetchInfo();
+    await Service._fetchInfo();
     Service.scheduler.adjustSyncInterval();   // As if we failed or succeeded in syncing.
   } catch (ex) {
     // Because fetchInfo will fail on a 513.
   }
   let { subject } = await promiseObserved;
   do_check_eq("hard-eol", subject.code);
   do_check_hard_eol(eh, start);
   do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
@@ -109,17 +109,17 @@ add_task(async function test_513_hard() 
 add_task(async function test_200_soft() {
   let eh = Service.errorHandler;
   let start = Date.now();
   let server = sync_httpd_setup(handler200("soft-eol"));
   await setUp(server);
 
   let promiseObserved = promiseOneObserver("weave:eol");
 
-  Service._fetchInfo();
+  await Service._fetchInfo();
   Service.scheduler.adjustSyncInterval();   // As if we failed or succeeded in syncing.
   let { subject } = await promiseObserved;
   do_check_eq("soft-eol", subject.code);
   do_check_soft_eol(eh, start);
   do_check_eq(Service.scheduler.singleDeviceInterval, Service.scheduler.syncInterval);
   eh.clearServerAlerts();
 
   await promiseStopServer(server);
--- a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
+++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
@@ -17,22 +17,22 @@ var engineManager = Service.engineManage
 engineManager.clear();
 
 function CatapultEngine() {
   SyncEngine.call(this, "Catapult", Service);
 }
 CatapultEngine.prototype = {
   __proto__: SyncEngine.prototype,
   exception: null, // tests fill this in
-  _sync: function _sync() {
+  async _sync() {
     throw this.exception;
   }
 };
 
-function sync_httpd_setup() {
+async function sync_httpd_setup() {
   let collectionsHelper = track_collections_helper();
   let upd = collectionsHelper.with_updated_collection;
 
   let catapultEngine = engineManager.get("catapult");
   let engines        = {catapult: {version: catapultEngine.version,
                                    syncID:  catapultEngine.syncID}};
 
   // Track these using the collections helper, which keeps modified times
@@ -60,51 +60,55 @@ async function setUp(server) {
 async function generateAndUploadKeys(server) {
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
   let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
   return (await serverKeys.upload(res)).success;
 }
 
+add_task(async function run_test() {
+  validate_all_future_pings();
+  await engineManager.register(CatapultEngine);
+});
 
 add_task(async function test_backoff500() {
   enableValidationPrefs();
 
   _("Test: HTTP 500 sets backoff status.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 500};
 
   try {
     do_check_false(Status.enforceBackoff);
 
     // Forcibly create and upload keys here -- otherwise we don't get to the 500!
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
     do_check_true(Status.enforceBackoff);
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetBackoff();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
 
 add_task(async function test_backoff503() {
   enableValidationPrefs();
 
   _("Test: HTTP 503 with Retry-After header leads to backoff notification and sets backoff status.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   const BACKOFF = 42;
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
@@ -113,168 +117,162 @@ add_task(async function test_backoff503(
     backoffInterval = subject;
   });
 
   try {
     do_check_false(Status.enforceBackoff);
 
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
 
     do_check_true(Status.enforceBackoff);
     do_check_eq(backoffInterval, BACKOFF);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
     do_check_eq(Status.sync, SERVER_MAINTENANCE);
   } finally {
     Status.resetBackoff();
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
 
 add_task(async function test_overQuota() {
   enableValidationPrefs();
 
   _("Test: HTTP 400 with body error code 14 means over quota.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 400,
                       toString() {
                         return "14";
                       }};
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
 
     do_check_eq(Status.sync, OVER_QUOTA);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
 
 add_task(async function test_service_networkError() {
   enableValidationPrefs();
 
   _("Test: Connection refused error from Service.sync() leads to the right status code.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
   await promiseStopServer(server);
   // Provoke connection refused.
   Service.clusterURL = "http://localhost:12345/";
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     Service._loggedIn = true;
-    Service.sync();
+    await Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
 });
 
 add_task(async function test_service_offline() {
   enableValidationPrefs();
 
   _("Test: Wanting to sync in offline mode leads to the right status code but does not increment the ignorable error count.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   await promiseStopServer(server);
   Services.io.offline = true;
   Services.prefs.setBoolPref("network.dns.offline-localhost", false);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     Service._loggedIn = true;
-    Service.sync();
+    await Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   Services.io.offline = false;
   Services.prefs.clearUserPref("network.dns.offline-localhost");
 });
 
 add_task(async function test_engine_networkError() {
   enableValidationPrefs();
 
   _("Test: Network related exceptions from engine.sync() lead to the right status code.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = Components.Exception("NS_ERROR_UNKNOWN_HOST",
                                           Cr.NS_ERROR_UNKNOWN_HOST);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
 
 add_task(async function test_resource_timeout() {
   enableValidationPrefs();
 
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   // Resource throws this when it encounters a timeout.
   engine.exception = Components.Exception("Aborting due to channel inactivity.",
                                           Cr.NS_ERROR_NET_TIMEOUT);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
-
-function run_test() {
-  validate_all_future_pings();
-  engineManager.register(CatapultEngine);
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_extension_storage_engine.js
+++ b/services/sync/tests/unit/test_extension_storage_engine.js
@@ -6,36 +6,40 @@
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/extension-storage.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
 /* globals extensionStorageSync */
 
-Service.engineManager.register(ExtensionStorageEngine);
-const engine = Service.engineManager.get("extension-storage");
-do_get_profile();   // so we can use FxAccounts
-loadWebExtensionTestFunctions();
+let engine;
 
 function mock(options) {
   let calls = [];
   let ret = function() {
     calls.push(arguments);
     return options.returns;
   }
   Object.setPrototypeOf(ret, {
     __proto__: Function.prototype,
     get calls() {
       return calls;
     }
   });
   return ret;
 }
 
+add_task(async function setup() {
+  await Service.engineManager.register(ExtensionStorageEngine);
+  engine = Service.engineManager.get("extension-storage");
+  do_get_profile();   // so we can use FxAccounts
+  loadWebExtensionTestFunctions();
+});
+
 add_task(async function test_calling_sync_calls__sync() {
   let oldSync = ExtensionStorageEngine.prototype._sync;
   let syncMock = ExtensionStorageEngine.prototype._sync = mock({returns: true});
   try {
     // I wanted to call the main sync entry point for the entire
     // package, but that fails because it tries to sync ClientEngine
     // first, which fails.
     await engine.sync();
--- a/services/sync/tests/unit/test_extension_storage_tracker.js
+++ b/services/sync/tests/unit/test_extension_storage_tracker.js
@@ -6,20 +6,24 @@
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/extension-storage.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
 /* globals extensionStorageSync */
 
-Service.engineManager.register(ExtensionStorageEngine);
-const engine = Service.engineManager.get("extension-storage");
-do_get_profile();   // so we can use FxAccounts
-loadWebExtensionTestFunctions();
+let engine;
+
+add_task(async function setup() {
+  await Service.engineManager.register(ExtensionStorageEngine);
+  engine = Service.engineManager.get("extension-storage");
+  do_get_profile();   // so we can use FxAccounts
+  loadWebExtensionTestFunctions();
+});
 
 add_task(async function test_changing_extension_storage_changes_score() {
   const tracker = engine._tracker;
   const extension = {id: "my-extension-id"};
   Svc.Obs.notify("weave:engine:start-tracking");
   await withSyncContext(async function(context) {
     await extensionStorageSync.set(extension, {"a": "b"}, context);
   });
--- a/services/sync/tests/unit/test_form_validator.js
+++ b/services/sync/tests/unit/test_form_validator.js
@@ -49,45 +49,41 @@ function getDummyServerAndClient() {
         name: "foo3",
         fieldname: "foo3",
         value: "bar3",
       }
     ]
   };
 }
 
-add_test(function test_valid() {
+add_task(async function test_valid() {
   let { server, client } = getDummyServerAndClient();
   let validator = new FormValidator();
   let { problemData, clientRecords, records, deletedRecords } =
-      validator.compareClientWithServer(client, server);
+      await validator.compareClientWithServer(client, server);
   equal(clientRecords.length, 3);
   equal(records.length, 3)
   equal(deletedRecords.length, 0);
   deepEqual(problemData, validator.emptyProblemData());
-
-  run_next_test();
 });
 
 
-add_test(function test_formValidatorIgnoresMissingClients() {
+add_task(async function test_formValidatorIgnoresMissingClients() {
   // Since history form records are not deleted from the server, the
   // |FormValidator| shouldn't set the |missingClient| flag in |problemData|.
   let { server, client } = getDummyServerAndClient();
   client.pop();
 
   let validator = new FormValidator();
   let { problemData, clientRecords, records, deletedRecords } =
-      validator.compareClientWithServer(client, server);
+      await validator.compareClientWithServer(client, server);
 
   equal(clientRecords.length, 2);
   equal(records.length, 3);
   equal(deletedRecords.length, 0);
 
   let expected = validator.emptyProblemData();
   deepEqual(problemData, expected);
-
-  run_next_test();
 });
 
 function run_test() {
   run_next_test();
 }
--- a/services/sync/tests/unit/test_forms_store.js
+++ b/services/sync/tests/unit/test_forms_store.js
@@ -2,149 +2,150 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 _("Make sure the form store follows the Store api and correctly accesses the backend form storage");
 Cu.import("resource://services-sync/engines/forms.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://gre/modules/Services.jsm");
 
-function run_test() {
+add_task(async function run_test() {
   let engine = new FormEngine(Service);
+  await engine.initialize();
   let store = engine._store;
 
-  function applyEnsureNoFailures(records) {
-    do_check_eq(store.applyIncomingBatch(records).length, 0);
+  async function applyEnsureNoFailures(records) {
+    do_check_eq((await store.applyIncomingBatch(records)).length, 0);
   }
 
   _("Remove any existing entries");
-  store.wipe();
-  if (store.getAllIDs().length) {
+  await store.wipe();
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Add a form entry");
-  applyEnsureNoFailures([{
+  await applyEnsureNoFailures([{
     id: Utils.makeGUID(),
     name: "name!!",
     value: "value??"
   }]);
 
   _("Should have 1 entry now");
   let id = "";
-  for (let _id in store.getAllIDs()) {
+  for (let _id in (await store.getAllIDs())) {
     if (id == "")
       id = _id;
     else
       do_throw("Should have only gotten one!");
   }
-  do_check_true(store.itemExists(id));
+  do_check_true((store.itemExists(id)));
 
   _("Should be able to find this entry as a dupe");
-  do_check_eq(engine._findDupe({name: "name!!", value: "value??"}), id);
+  do_check_eq((await engine._findDupe({name: "name!!", value: "value??"})), id);
 
-  let rec = store.createRecord(id);
+  let rec = await store.createRecord(id);
   _("Got record for id", id, rec);
   do_check_eq(rec.name, "name!!");
   do_check_eq(rec.value, "value??");
 
   _("Create a non-existent id for delete");
-  do_check_true(store.createRecord("deleted!!").deleted);
+  do_check_true((await store.createRecord("deleted!!")).deleted);
 
   _("Try updating.. doesn't do anything yet");
-  store.update({});
+  await store.update({});
 
   _("Remove all entries");
-  store.wipe();
-  if (store.getAllIDs().length) {
+  await store.wipe();
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Add another entry");
-  applyEnsureNoFailures([{
+  await applyEnsureNoFailures([{
     id: Utils.makeGUID(),
     name: "another",
     value: "entry"
   }]);
   id = "";
-  for (let _id in store.getAllIDs()) {
+  for (let _id in (await store.getAllIDs())) {
     if (id == "")
       id = _id;
     else
       do_throw("Should have only gotten one!");
   }
 
   _("Change the id of the new entry to something else");
-  store.changeItemID(id, "newid");
+  await store.changeItemID(id, "newid");
 
   _("Make sure it's there");
-  do_check_true(store.itemExists("newid"));
+  do_check_true((store.itemExists("newid")));
 
   _("Remove the entry");
-  store.remove({
+  await store.remove({
     id: "newid"
   });
-  if (store.getAllIDs().length) {
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Removing the entry again shouldn't matter");
-  store.remove({
+  await store.remove({
     id: "newid"
   });
-  if (store.getAllIDs().length) {
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Add another entry to delete using applyIncomingBatch");
   let toDelete = {
     id: Utils.makeGUID(),
     name: "todelete",
     value: "entry"
   };
-  applyEnsureNoFailures([toDelete]);
+  await applyEnsureNoFailures([toDelete]);
   id = "";
-  for (let _id in store.getAllIDs()) {
+  for (let _id in (await store.getAllIDs())) {
     if (id == "")
       id = _id;
     else
       do_throw("Should have only gotten one!");
   }
-  do_check_true(store.itemExists(id));
+  do_check_true((store.itemExists(id)));
   // mark entry as deleted
   toDelete.id = id;
   toDelete.deleted = true;
-  applyEnsureNoFailures([toDelete]);
-  if (store.getAllIDs().length) {
+  await applyEnsureNoFailures([toDelete]);
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Add an entry to wipe");
-  applyEnsureNoFailures([{
+  await applyEnsureNoFailures([{
     id: Utils.makeGUID(),
     name: "towipe",
     value: "entry"
   }]);
 
-  store.wipe();
+  await store.wipe();
 
-  if (store.getAllIDs().length) {
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Ensure we work if formfill is disabled.");
   Services.prefs.setBoolPref("browser.formfill.enable", false);
   try {
     // a search
-    if (store.getAllIDs().length) {
+    if ((await store.getAllIDs()).length) {
       do_throw("Shouldn't get any ids!");
     }
     // an update.
-    applyEnsureNoFailures([{
+    await applyEnsureNoFailures([{
       id: Utils.makeGUID(),
       name: "some",
       value: "entry"
     }]);
   } finally {
     Services.prefs.clearUserPref("browser.formfill.enable");
-    store.wipe();
+    await store.wipe();
   }
-}
+});
--- a/services/sync/tests/unit/test_forms_tracker.js
+++ b/services/sync/tests/unit/test_forms_tracker.js
@@ -1,72 +1,73 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines/forms.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-function run_test() {
+add_task(async function run_test() {
   _("Verify we've got an empty tracker to work with.");
   let engine = new FormEngine(Service);
+  await engine.initialize();
   let tracker = engine._tracker;
   // Don't do asynchronous writes.
   tracker.persistChangedIDs = false;
 
   do_check_empty(tracker.changedIDs);
   Log.repository.rootLogger.addAppender(new Log.DumpAppender());
 
-  function addEntry(name, value) {
-    engine._store.create({name, value});
+  async function addEntry(name, value) {
+    await engine._store.create({name, value});
   }
-  function removeEntry(name, value) {
-    let guid = engine._findDupe({name, value});
-    engine._store.remove({id: guid});
+  async function removeEntry(name, value) {
+    let guid = await engine._findDupe({name, value});
+    await engine._store.remove({id: guid});
   }
 
   try {
     _("Create an entry. Won't show because we haven't started tracking yet");
-    addEntry("name", "John Doe");
+    await addEntry("name", "John Doe");
     do_check_empty(tracker.changedIDs);
 
     _("Tell the tracker to start tracking changes.");
     Svc.Obs.notify("weave:engine:start-tracking");
-    removeEntry("name", "John Doe");
-    addEntry("email", "john@doe.com");
+    await removeEntry("name", "John Doe");
+    await addEntry("email", "john@doe.com");
     do_check_attribute_count(tracker.changedIDs, 2);
 
     _("Notifying twice won't do any harm.");
     Svc.Obs.notify("weave:engine:start-tracking");
-    addEntry("address", "Memory Lane");
+    await addEntry("address", "Memory Lane");
     do_check_attribute_count(tracker.changedIDs, 3);
 
 
     _("Check that ignoreAll is respected");
     tracker.clearChangedIDs();
     tracker.score = 0;
     tracker.ignoreAll = true;
-    addEntry("username", "johndoe123");
-    addEntry("favoritecolor", "green");
-    removeEntry("name", "John Doe");
+    await addEntry("username", "johndoe123");
+    await addEntry("favoritecolor", "green");
+    await removeEntry("name", "John Doe");
     tracker.ignoreAll = false;
     do_check_empty(tracker.changedIDs);
     equal(tracker.score, 0);
 
     _("Let's stop tracking again.");
     tracker.clearChangedIDs();
     Svc.Obs.notify("weave:engine:stop-tracking");
-    removeEntry("address", "Memory Lane");
+    await removeEntry("address", "Memory Lane");
     do_check_empty(tracker.changedIDs);
 
     _("Notifying twice won't do any harm.");
     Svc.Obs.notify("weave:engine:stop-tracking");
-    removeEntry("email", "john@doe.com");
+    await removeEntry("email", "john@doe.com");
     do_check_empty(tracker.changedIDs);
 
 
 
   } finally {
     _("Clean up.");
-    engine._store.wipe();
+    await engine._store.wipe();
   }
-}
+});
--- a/services/sync/tests/unit/test_fxa_node_reassignment.js
+++ b/services/sync/tests/unit/test_fxa_node_reassignment.js
@@ -13,42 +13,40 @@ Cu.import("resource://services-sync/cons
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/status.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/rotaryengine.js");
 Cu.import("resource://services-sync/browserid_identity.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://gre/modules/PromiseUtils.jsm");
 
-// Disables all built-in engines. Important for avoiding errors thrown by the
-// add-ons engine.
-Service.engineManager.clear();
-
-function run_test() {
+add_task(async function setup() {
   Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level  = Log.Level.Trace;
   Log.repository.getLogger("Sync.Resource").level      = Log.Level.Trace;
   Log.repository.getLogger("Sync.RESTRequest").level   = Log.Level.Trace;
   Log.repository.getLogger("Sync.Service").level       = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
   initTestLogging();
 
+  // Disables all built-in engines. Important for avoiding errors thrown by the
+  // add-ons engine.
+  Service.engineManager.clear();
+
   // Setup the FxA identity manager and cluster manager.
   Status.__authManager = Service.identity = new BrowserIDManager();
   Service._clusterManager = Service.identity.createClusterManager(Service);
 
   // None of the failures in this file should result in a UI error.
   function onUIError() {
     do_throw("Errors should not be presented in the UI.");
   }
   Svc.Obs.add("weave:ui:login:error", onUIError);
   Svc.Obs.add("weave:ui:sync:error", onUIError);
-
-  run_next_test();
-}
+});
 
 
 // API-compatible with SyncServer handler. Bind `handler` to something to use
 // as a ServerCollection handler.
 function handleReassign(handler, req, resp) {
   resp.setStatusLine(req.httpVersion, 401, "Node reassignment");
   resp.setHeader("Content-Type", "application/json");
   let reassignBody = JSON.stringify({error: "401inator in place"});
@@ -117,17 +115,17 @@ function getReassigned() {
  * to ensure that a node request was made.
  * Runs `between` between the two. This can be used to undo deliberate failure
  * setup, detach observers, etc.
  */
 async function syncAndExpectNodeReassignment(server, firstNotification, between,
                                              secondNotification, url) {
   _("Starting syncAndExpectNodeReassignment\n");
   let deferred = PromiseUtils.defer();
-  function onwards() {
+  async function onwards() {
     let numTokenRequestsBefore;
     function onFirstSync() {
       _("First sync completed.");
       Svc.Obs.remove(firstNotification, onFirstSync);
       Svc.Obs.add(secondNotification, onSecondSync);
 
       do_check_eq(Service.clusterURL, "");
 
@@ -142,23 +140,24 @@ async function syncAndExpectNodeReassign
       Svc.Obs.remove(secondNotification, onSecondSync);
       Service.scheduler.clearSyncTriggers();
 
       // Make absolutely sure that any event listeners are done with their work
       // before we proceed.
       waitForZeroTimer(function() {
         _("Second sync nextTick.");
         do_check_eq(numTokenRequests, numTokenRequestsBefore + 1, "fetched a new token");
-        Service.startOver();
-        server.stop(deferred.resolve);
+        Service.startOver().then(() => {
+          server.stop(deferred.resolve);
+        });
       });
     }
 
     Svc.Obs.add(firstNotification, onFirstSync);
-    Service.sync();
+    await Service.sync();
   }
 
   // Make sure that we really do get a 401 (but we can only do that if we are
   // already logged in, as the login process is what sets up the URLs)
   if (Service.isLoggedIn) {
     _("Making request to " + url + " which should 401");
     let request = new RESTRequest(url);
     request.get(function() {
@@ -191,47 +190,47 @@ add_task(async function test_single_toke
   // we got from the token (and as above, we are also checking we don't grab
   // a new token). If the test actually attempts to connect to this URL
   // it will crash.
   Service.clusterURL = "http://example.com/";
 
   let server = await prepareServer(afterTokenFetch);
 
   do_check_false(Service.isLoggedIn, "not already logged in");
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
   do_check_eq(numTokenFetches, 0, "didn't fetch a new token");
   // A bit hacky, but given we know how prepareServer works we can deduce
   // that clusterURL we expect.
   let expectedClusterURL = server.baseURI + "1.1/johndoe/";
   do_check_eq(Service.clusterURL, expectedClusterURL);
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 add_task(async function test_momentary_401_engine() {
   enableValidationPrefs();
 
   _("Test a failure for engine URLs that's resolved by reassignment.");
   let server = await prepareServer();
   let john   = server.user("johndoe");
 
   _("Enabling the Rotary engine.");
-  let { engine, tracker } = registerRotaryEngine();
+  let { engine, tracker } = await registerRotaryEngine();
 
   // We need the server to be correctly set up prior to experimenting. Do this
   // through a sync.
   let global = {syncID: Service.syncID,
                 storageVersion: STORAGE_VERSION,
                 rotary: {version: engine.version,
                          syncID:  engine.syncID}}
   john.createCollection("meta").insert("global", global);
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   _("Setting up Rotary collection to 401.");
   let rotary = john.createCollection("rotary");
   let oldHandler = rotary.collectionHandler;
   rotary.collectionHandler = handleReassign.bind(this, undefined);
 
   // We want to verify that the clusterURL pref has been cleared after a 401
   // inside a sync. Flag the Rotary engine to need syncing.
@@ -265,17 +264,17 @@ add_task(async function test_momentary_4
 // This test ends up being a failing info fetch *after we're already logged in*.
 add_task(async function test_momentary_401_info_collections_loggedin() {
   enableValidationPrefs();
 
   _("Test a failure for info/collections after login that's resolved by reassignment.");
   let server = await prepareServer();
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   _("Arrange for info/collections to return a 401.");
   let oldHandler = server.toplevelHandlers.info;
   server.toplevelHandlers.info = handleReassign;
 
   function undo() {
     _("Undoing test changes.");
     server.toplevelHandlers.info = oldHandler;
@@ -312,35 +311,35 @@ add_task(async function test_momentary_4
 
   // Return a 401 for the next /info request - it will be reset immediately
   // after a new token is fetched.
   oldHandler = server.toplevelHandlers.info
   server.toplevelHandlers.info = handleReassign;
 
   do_check_false(Service.isLoggedIn, "not already logged in");
 
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
   // sync was successful - check we grabbed a new token.
   do_check_true(sawTokenFetch, "a new token was fetched by this test.")
   // and we are done.
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 // This test ends up being a failing meta/global fetch *after we're already logged in*.
 add_task(async function test_momentary_401_storage_loggedin() {
   enableValidationPrefs();
 
   _("Test a failure for any storage URL after login that's resolved by" +
     "reassignment.");
   let server = await prepareServer();
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   _("Arrange for meta/global to return a 401.");
   let oldHandler = server.toplevelHandlers.storage;
   server.toplevelHandlers.storage = handleReassign;
 
   function undo() {
     _("Undoing test changes.");
     server.toplevelHandlers.storage = oldHandler;
--- a/services/sync/tests/unit/test_history_store.js
+++ b/services/sync/tests/unit/test_history_store.js
@@ -28,153 +28,149 @@ function queryPlaces(uri, options) {
 function queryHistoryVisits(uri) {
   let options = PlacesUtils.history.getNewQueryOptions();
   options.queryType = Ci.nsINavHistoryQueryOptions.QUERY_TYPE_HISTORY;
   options.resultType = Ci.nsINavHistoryQueryOptions.RESULTS_AS_VISIT;
   options.sortingMode = Ci.nsINavHistoryQueryOptions.SORT_BY_DATE_ASCENDING;
   return queryPlaces(uri, options);
 }
 
-function onNextVisit(callback) {
-  PlacesUtils.history.addObserver({
-    onBeginUpdateBatch: function onBeginUpdateBatch() {},
-    onEndUpdateBatch: function onEndUpdateBatch() {},
-    onPageChanged: function onPageChanged() {},
-    onTitleChanged: function onTitleChanged() {
-    },
-    onVisit: function onVisit() {
-      PlacesUtils.history.removeObserver(this);
-      Utils.nextTick(callback);
-    },
-    onDeleteVisits: function onDeleteVisits() {},
-    onPageExpired: function onPageExpired() {},
-    onDeleteURI: function onDeleteURI() {},
-    onClearHistory: function onClearHistory() {},
-    QueryInterface: XPCOMUtils.generateQI([
-      Ci.nsINavHistoryObserver,
-      Ci.nsINavHistoryObserver_MOZILLA_1_9_1_ADDITIONS,
-      Ci.nsISupportsWeakReference
-    ])
-  }, true);
+function promiseOnVisitObserved() {
+  return new Promise(res => {
+    PlacesUtils.history.addObserver({
+      onBeginUpdateBatch: function onBeginUpdateBatch() {},
+      onEndUpdateBatch: function onEndUpdateBatch() {},
+      onPageChanged: function onPageChanged() {},
+      onTitleChanged: function onTitleChanged() {
+      },
+      onVisit: function onVisit() {
+        PlacesUtils.history.removeObserver(this);
+        res();
+      },
+      onDeleteVisits: function onDeleteVisits() {},
+      onPageExpired: function onPageExpired() {},
+      onDeleteURI: function onDeleteURI() {},
+      onClearHistory: function onClearHistory() {},
+      QueryInterface: XPCOMUtils.generateQI([
+        Ci.nsINavHistoryObserver,
+        Ci.nsINavHistoryObserver_MOZILLA_1_9_1_ADDITIONS,
+        Ci.nsISupportsWeakReference
+      ])
+    }, true);
+  });
 }
 
-// Ensure exceptions from inside callbacks leads to test failures while
-// we still clean up properly.
-function ensureThrows(func) {
-  return function() {
-    try {
-      func.apply(this, arguments);
-    } catch (ex) {
-      PlacesTestUtils.clearHistory();
-      do_throw(ex);
-    }
-  };
-}
-
-var store = new HistoryEngine(Service)._store;
-function applyEnsureNoFailures(records) {
-  do_check_eq(store.applyIncomingBatch(records).length, 0);
+var engine = new HistoryEngine(Service);
+Async.promiseSpinningly(engine.initialize());
+var store = engine._store;
+async function applyEnsureNoFailures(records) {
+  do_check_eq((await store.applyIncomingBatch(records)).length, 0);
 }
 
 var fxuri, fxguid, tburi, tbguid;
 
 function run_test() {
   initTestLogging("Trace");
   run_next_test();
 }
 
-add_test(function test_store() {
+add_task(async function test_store() {
   _("Verify that we've got an empty store to work with.");
-  do_check_empty(store.getAllIDs());
+  do_check_empty((await store.getAllIDs()));
 
   _("Let's create an entry in the database.");
   fxuri = Utils.makeURI("http://getfirefox.com/");
 
-  PlacesTestUtils.addVisits({ uri: fxuri, title: "Get Firefox!",
-                              visitDate: TIMESTAMP1 })
-                 .then(() => {
-    _("Verify that the entry exists.");
-    let ids = Object.keys(store.getAllIDs());
-    do_check_eq(ids.length, 1);
-    fxguid = ids[0];
-    do_check_true(store.itemExists(fxguid));
+  await PlacesTestUtils.addVisits({ uri: fxuri, title: "Get Firefox!",
+                                  visitDate: TIMESTAMP1 });
+  _("Verify that the entry exists.");
+  let ids = Object.keys((await store.getAllIDs()));
+  do_check_eq(ids.length, 1);
+  fxguid = ids[0];
+  do_check_true((await store.itemExists(fxguid)));
 
-    _("If we query a non-existent record, it's marked as deleted.");
-    let record = store.createRecord("non-existent");
-    do_check_true(record.deleted);
+  _("If we query a non-existent record, it's marked as deleted.");
+  let record = await store.createRecord("non-existent");
+  do_check_true(record.deleted);
+
+  _("Verify createRecord() returns a complete record.");
+  record = await store.createRecord(fxguid);
+  do_check_eq(record.histUri, fxuri.spec);
+  do_check_eq(record.title, "Get Firefox!");
+  do_check_eq(record.visits.length, 1);
+  do_check_eq(record.visits[0].date, TIMESTAMP1);
+  do_check_eq(record.visits[0].type, Ci.nsINavHistoryService.TRANSITION_LINK);
 
-    _("Verify createRecord() returns a complete record.");
-    record = store.createRecord(fxguid);
-    do_check_eq(record.histUri, fxuri.spec);
-    do_check_eq(record.title, "Get Firefox!");
-    do_check_eq(record.visits.length, 1);
-    do_check_eq(record.visits[0].date, TIMESTAMP1);
-    do_check_eq(record.visits[0].type, Ci.nsINavHistoryService.TRANSITION_LINK);
-
-    _("Let's modify the record and have the store update the database.");
-    let secondvisit = {date: TIMESTAMP2,
-                       type: Ci.nsINavHistoryService.TRANSITION_TYPED};
-    onNextVisit(ensureThrows(function() {
-      let queryres = queryHistoryVisits(fxuri);
-      do_check_eq(queryres.length, 2);
-      do_check_eq(queryres[0].time, TIMESTAMP1);
-      do_check_eq(queryres[0].title, "Hol Dir Firefox!");
-      do_check_eq(queryres[1].time, TIMESTAMP2);
-      do_check_eq(queryres[1].title, "Hol Dir Firefox!");
-      run_next_test();
-    }));
-    applyEnsureNoFailures([
-      {id: fxguid,
-       histUri: record.histUri,
-       title: "Hol Dir Firefox!",
-       visits: [record.visits[0], secondvisit]}
-    ]);
-  });
+  _("Let's modify the record and have the store update the database.");
+  let secondvisit = {date: TIMESTAMP2,
+                     type: Ci.nsINavHistoryService.TRANSITION_TYPED};
+  let onVisitObserved = promiseOnVisitObserved();
+  await applyEnsureNoFailures([
+    {id: fxguid,
+     histUri: record.histUri,
+     title: "Hol Dir Firefox!",
+     visits: [record.visits[0], secondvisit]}
+  ]);
+  await onVisitObserved;
+  try {
+    let queryres = queryHistoryVisits(fxuri);
+    do_check_eq(queryres.length, 2);
+    do_check_eq(queryres[0].time, TIMESTAMP1);
+    do_check_eq(queryres[0].title, "Hol Dir Firefox!");
+    do_check_eq(queryres[1].time, TIMESTAMP2);
+    do_check_eq(queryres[1].title, "Hol Dir Firefox!");
+  } catch (ex) {
+    PlacesTestUtils.clearHistory();
+    do_throw(ex);
+  }
 });
 
-add_test(function test_store_create() {
+add_task(async function test_store_create() {
   _("Create a brand new record through the store.");
   tbguid = Utils.makeGUID();
   tburi = Utils.makeURI("http://getthunderbird.com");
-  onNextVisit(ensureThrows(function() {
-    do_check_attribute_count(store.getAllIDs(), 2);
-    let queryres = queryHistoryVisits(tburi);
-    do_check_eq(queryres.length, 1);
-    do_check_eq(queryres[0].time, TIMESTAMP3);
-    do_check_eq(queryres[0].title, "The bird is the word!");
-    run_next_test();
-  }));
-  applyEnsureNoFailures([
+  let onVisitObserved = promiseOnVisitObserved();
+  await applyEnsureNoFailures([
     {id: tbguid,
      histUri: tburi.spec,
      title: "The bird is the word!",
      visits: [{date: TIMESTAMP3,
                type: Ci.nsINavHistoryService.TRANSITION_TYPED}]}
   ]);
+  await onVisitObserved;
+  try {
+    do_check_attribute_count(Async.promiseSpinningly(store.getAllIDs()), 2);
+    let queryres = queryHistoryVisits(tburi);
+    do_check_eq(queryres.length, 1);
+    do_check_eq(queryres[0].time, TIMESTAMP3);
+    do_check_eq(queryres[0].title, "The bird is the word!");
+  } catch (ex) {
+    PlacesTestUtils.clearHistory();
+    do_throw(ex);
+  }
 });
 
-add_test(function test_null_title() {
+add_task(async function test_null_title() {
   _("Make sure we handle a null title gracefully (it can happen in some cases, e.g. for resource:// URLs)");
   let resguid = Utils.makeGUID();
   let resuri = Utils.makeURI("unknown://title");
-  applyEnsureNoFailures([
+  await applyEnsureNoFailures([
     {id: resguid,
      histUri: resuri.spec,
      title: null,
      visits: [{date: TIMESTAMP3,
                type: Ci.nsINavHistoryService.TRANSITION_TYPED}]}
   ]);
-  do_check_attribute_count(store.getAllIDs(), 3);
+  do_check_attribute_count((await store.getAllIDs()), 3);
   let queryres = queryHistoryVisits(resuri);
   do_check_eq(queryres.length, 1);
   do_check_eq(queryres[0].time, TIMESTAMP3);
-  run_next_test();
 });
 
-add_test(function test_invalid_records() {
+add_task(async function test_invalid_records() {
   _("Make sure we handle invalid URLs in places databases gracefully.");
   let connection = PlacesUtils.history
                               .QueryInterface(Ci.nsPIPlacesDatabase)
                               .DBConnection;
   let stmt = connection.createAsyncStatement(
     "INSERT INTO moz_places "
   + "(url, url_hash, title, rev_host, visit_count, last_visit_date) "
   + "VALUES ('invalid-uri', hash('invalid-uri'), 'Invalid URI', '.', 1, " + TIMESTAMP3 + ")"
@@ -185,45 +181,45 @@ add_test(function test_invalid_records()
   stmt = connection.createAsyncStatement(
     "INSERT INTO moz_historyvisits "
   + "(place_id, visit_date, visit_type, session) "
   + "VALUES ((SELECT id FROM moz_places WHERE url_hash = hash('invalid-uri') AND url = 'invalid-uri'), "
   + TIMESTAMP3 + ", " + Ci.nsINavHistoryService.TRANSITION_TYPED + ", 1)"
   );
   Async.querySpinningly(stmt);
   stmt.finalize();
-  do_check_attribute_count(store.getAllIDs(), 4);
+  do_check_attribute_count((await store.getAllIDs()), 4);
 
   _("Make sure we report records with invalid URIs.");
   let invalid_uri_guid = Utils.makeGUID();
-  let failed = store.applyIncomingBatch([{
+  let failed = await store.applyIncomingBatch([{
     id: invalid_uri_guid,
     histUri: ":::::::::::::::",
     title: "Doesn't have a valid URI",
     visits: [{date: TIMESTAMP3,
               type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
   ]);
   do_check_eq(failed.length, 1);
   do_check_eq(failed[0], invalid_uri_guid);
 
   _("Make sure we handle records with invalid GUIDs gracefully (ignore).");
-  applyEnsureNoFailures([
+  await applyEnsureNoFailures([
     {id: "invalid",
      histUri: "http://invalid.guid/",
      title: "Doesn't have a valid GUID",
      visits: [{date: TIMESTAMP3,
                type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
   ]);
 
   _("Make sure we handle records with invalid visit codes or visit dates, gracefully ignoring those visits.");
   let no_date_visit_guid = Utils.makeGUID();
   let no_type_visit_guid = Utils.makeGUID();
   let invalid_type_visit_guid = Utils.makeGUID();
   let non_integer_visit_guid = Utils.makeGUID();
-  failed = store.applyIncomingBatch([
+  failed = await store.applyIncomingBatch([
     {id: no_date_visit_guid,
      histUri: "http://no.date.visit/",
      title: "Visit has no date",
      visits: [{type: Ci.nsINavHistoryService.TRANSITION_EMBED}]},
     {id: no_type_visit_guid,
      histUri: "http://no.type.visit/",
      title: "Visit has no type",
      visits: [{date: TIMESTAMP3}]},
@@ -236,49 +232,46 @@ add_test(function test_invalid_records()
      histUri: "http://non.integer.visit/",
      title: "Visit has non-integer date",
      visits: [{date: 1234.567,
                type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
   ]);
   do_check_eq(failed.length, 0);
 
   _("Make sure we handle records with javascript: URLs gracefully.");
-  applyEnsureNoFailures([
+  await applyEnsureNoFailures([
     {id: Utils.makeGUID(),
      histUri: "javascript:''",
      title: "javascript:''",
      visits: [{date: TIMESTAMP3,
                type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
   ]);
 
   _("Make sure we handle records without any visits gracefully.");
-  applyEnsureNoFailures([
+  await applyEnsureNoFailures([
     {id: Utils.makeGUID(),
      histUri: "http://getfirebug.com",
      title: "Get Firebug!",
      visits: []}
   ]);
-
-  run_next_test();
 });
 
-add_test(function test_remove() {
+add_task(async function test_remove() {
   _("Remove an existent record and a non-existent from the store.");
-  applyEnsureNoFailures([{id: fxguid, deleted: true},
+  await applyEnsureNoFailures([{id: fxguid, deleted: true},
                          {id: Utils.makeGUID(), deleted: true}]);
-  do_check_false(store.itemExists(fxguid));
+  do_check_false((await store.itemExists(fxguid)));
   let queryres = queryHistoryVisits(fxuri);
   do_check_eq(queryres.length, 0);
 
   _("Make sure wipe works.");
-  store.wipe();
-  do_check_empty(store.getAllIDs());
+  await store.wipe();
+  do_check_empty((await store.getAllIDs()));
   queryres = queryHistoryVisits(fxuri);
   do_check_eq(queryres.length, 0);
   queryres = queryHistoryVisits(tburi);
   do_check_eq(queryres.length, 0);
-  run_next_test();
 });
 
 add_test(function cleanup() {
   _("Clean up.");
   PlacesTestUtils.clearHistory().then(run_next_test);
 });
--- a/services/sync/tests/unit/test_history_tracker.js
+++ b/services/sync/tests/unit/test_history_tracker.js
@@ -4,43 +4,45 @@
 Cu.import("resource://gre/modules/PlacesDBUtils.jsm");
 Cu.import("resource://gre/modules/XPCOMUtils.jsm");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines/history.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-Service.engineManager.clear();
-Service.engineManager.register(HistoryEngine);
-var engine = Service.engineManager.get("history");
-var tracker = engine._tracker;
+let engine;
+let tracker;
 
-// Don't write out by default.
-tracker.persistChangedIDs = false;
-
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Tracker.History").level = Log.Level.Trace;
-  run_next_test();
-}
+
+  Service.engineManager.clear();
+  await Service.engineManager.register(HistoryEngine);
+  engine = Service.engineManager.get("history");
+  tracker = engine._tracker;
+
+  // Don't write out by default.
+  tracker.persistChangedIDs = false;
+});
 
 async function verifyTrackerEmpty() {
-  let changes = engine.pullNewChanges();
+  let changes = await engine.pullNewChanges();
   do_check_empty(changes);
   equal(tracker.score, 0);
 }
 
 async function verifyTrackedCount(expected) {
-  let changes = engine.pullNewChanges();
+  let changes = await engine.pullNewChanges();
   do_check_attribute_count(changes, expected);
 }
 
 async function verifyTrackedItems(tracked) {
-  let changes = engine.pullNewChanges();
+  let changes = await engine.pullNewChanges();
   let trackedIDs = new Set(Object.keys(changes));
   for (let guid of tracked) {
     ok(guid in changes, `${guid} should be tracked`);
     ok(changes[guid] > 0, `${guid} should have a modified time`);
     trackedIDs.delete(guid);
   }
   equal(trackedIDs.size, 0, `Unhandled tracked IDs: ${
     JSON.stringify(Array.from(trackedIDs))}`);
--- a/services/sync/tests/unit/test_hmac_error.js
+++ b/services/sync/tests/unit/test_hmac_error.js
@@ -6,29 +6,29 @@ Cu.import("resource://services-sync/serv
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/rotaryengine.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 // Track HMAC error counts.
 var hmacErrorCount = 0;
 (function() {
   let hHE = Service.handleHMACEvent;
-  Service.handleHMACEvent = function() {
+  Service.handleHMACEvent = async function() {
     hmacErrorCount++;
     return hHE.call(Service);
   };
 })();
 
-function shared_setup() {
+async function shared_setup() {
   enableValidationPrefs();
 
   hmacErrorCount = 0;
 
   // Make sure RotaryEngine is the only one we sync.
-  let { engine, tracker } = registerRotaryEngine();
+  let { engine, tracker } = await registerRotaryEngine();
   engine.lastSync = 123; // Needs to be non-zero so that tracker is queried.
   engine._store.items = {flying: "LNER Class A3 4472",
                          scotsman: "Flying Scotsman"};
   tracker.addChangedID("scotsman", 0);
   do_check_eq(1, Service.engineManager.getEnabled().length);
 
   let engines = {rotary:  {version: engine.version,
                            syncID:  engine.syncID},
@@ -41,17 +41,17 @@ function shared_setup() {
   let rotaryColl  = new ServerCollection({}, true);
   let clientsColl = new ServerCollection({}, true);
 
   return [engine, rotaryColl, clientsColl, keysWBO, global, tracker];
 }
 
 add_task(async function hmac_error_during_404() {
   _("Attempt to replicate the HMAC error setup.");
-  let [engine, rotaryColl, clientsColl, keysWBO, global, tracker] = shared_setup();
+  let [engine, rotaryColl, clientsColl, keysWBO, global, tracker] = await shared_setup();
 
   // Hand out 404s for crypto/keys.
   let keysHandler    = keysWBO.handler();
   let key404Counter  = 0;
   let keys404Handler = function(request, response) {
     if (key404Counter > 0) {
       let body = "Not Found";
       response.setStatusLine(request.httpVersion, 404, body);
@@ -70,17 +70,17 @@ add_task(async function hmac_error_durin
     "/1.1/foo/storage/crypto/keys": upd("crypto", keys404Handler),
     "/1.1/foo/storage/clients": upd("clients", clientsColl.handler()),
     "/1.1/foo/storage/rotary": upd("rotary", rotaryColl.handler())
   };
 
   let server = sync_httpd_setup(handlers);
   // Do not instantiate SyncTestingInfrastructure; we need real crypto.
   await configureIdentity({ username: "foo" }, server);
-  Service.login();
+  await Service.login();
 
   try {
     _("Syncing.");
     await sync_and_validate_telem();
 
     _("Partially resetting client, as if after a restart, and forcing redownload.");
     Service.collectionKeys.clear();
     engine.lastSync = 0;        // So that we redownload records.
@@ -97,17 +97,17 @@ add_task(async function hmac_error_durin
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function hmac_error_during_node_reassignment() {
   _("Attempt to replicate an HMAC error during node reassignment.");
-  let [engine, rotaryColl, clientsColl, keysWBO, global, tracker] = shared_setup();
+  let [engine, rotaryColl, clientsColl, keysWBO, global, tracker] = await shared_setup();
 
   let collectionsHelper = track_collections_helper();
   let upd = collectionsHelper.with_updated_collection;
 
   // We'll provide a 401 mid-way through the sync. This function
   // simulates shifting to a node which has no data.
   function on401() {
     _("Deleting server data...");
@@ -175,17 +175,17 @@ add_task(async function hmac_error_durin
 
   Svc.Obs.add("weave:service:sync:finish", obs);
   Svc.Obs.add("weave:service:sync:error", obs);
 
   // This kicks off the actual test. Split into a function here to allow this
   // source file to broadly follow actual execution order.
   function onwards() {
     _("== Invoking first sync.");
-    Service.sync();
+    Async.promiseSpinningly(Service.sync());
     _("We should not simultaneously have data but no keys on the server.");
     let hasData = rotaryColl.wbo("flying") ||
                   rotaryColl.wbo("scotsman");
     let hasKeys = keysWBO.modified;
 
     _("We correctly handle 401s by aborting the sync and starting again.");
     do_check_true(!hasData == !hasKeys);
 
@@ -222,17 +222,17 @@ add_task(async function hmac_error_durin
 
             tracker.clearChangedIDs();
             Service.engineManager.unregister(engine);
             Svc.Prefs.resetBranch("");
             Service.recordManager.clearCache();
             server.stop(resolve);
           };
 
-          Service.sync();
+          Async.promiseSpinningly(Service.sync());
         },
         this);
       };
     };
 
     onwards();
   });
 });
--- a/services/sync/tests/unit/test_interval_triggers.js
+++ b/services/sync/tests/unit/test_interval_triggers.js
@@ -5,23 +5,18 @@ Cu.import("resource://services-sync/cons
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/clients.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 Svc.Prefs.set("registerEngines", "");
 Cu.import("resource://services-sync/service.js");
 
-var scheduler = Service.scheduler;
-var clientsEngine = Service.clientsEngine;
-
-// Don't remove stale clients when syncing. This is a test-only workaround
-// that lets us add clients directly to the store, without losing them on
-// the next sync.
-clientsEngine._removeRemoteClient = id => {};
+let scheduler;
+let clientsEngine;
 
 function sync_httpd_setup() {
   let global = new ServerWBO("global", {
     syncID: Service.syncID,
     storageVersion: STORAGE_VERSION,
     engines: {clients: {version: clientsEngine.version,
                         syncID: clientsEngine.syncID}}
   });
@@ -43,24 +38,30 @@ function sync_httpd_setup() {
 async function setUp(server) {
   await configureIdentity({username: "johndoe"}, server);
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
   serverKeys.upload(Service.resource(Service.cryptoKeysURL));
 }
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
 
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
 
-  run_next_test();
-}
+  scheduler = Service.scheduler;
+  clientsEngine = Service.clientsEngine;
+
+  // Don't remove stale clients when syncing. This is a test-only workaround
+  // that lets us add clients directly to the store, without losing them on
+  // the next sync.
+  clientsEngine._removeRemoteClient = async (id) => {};
+});
 
 add_task(async function test_successful_sync_adjustSyncInterval() {
   enableValidationPrefs();
 
   _("Test successful sync calling adjustSyncInterval");
   let syncSuccesses = 0;
   function onSyncFinish() {
     _("Sync success.");
@@ -75,91 +76,91 @@ add_task(async function test_successful_
   do_check_false(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
   do_check_false(scheduler.hasIncomingItems);
 
   _("Test as long as numClients <= 1 our sync interval is SINGLE_USER.");
   // idle == true && numClients <= 1 && hasIncomingItems == false
   scheduler.idle = true;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncSuccesses, 1);
   do_check_true(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   // idle == false && numClients <= 1 && hasIncomingItems == false
   scheduler.idle = false;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncSuccesses, 2);
   do_check_false(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   // idle == false && numClients <= 1 && hasIncomingItems == true
   scheduler.hasIncomingItems = true;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncSuccesses, 3);
   do_check_false(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_true(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   // idle == true && numClients <= 1 && hasIncomingItems == true
   scheduler.idle = true;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncSuccesses, 4);
   do_check_true(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_true(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   _("Test as long as idle && numClients > 1 our sync interval is idleInterval.");
   // idle == true && numClients > 1 && hasIncomingItems == true
-  Service.clientsEngine._store.create({ id: "foo", cleartext: { name: "bar", type: "mobile" } });
-  Service.sync();
+  await Service.clientsEngine._store.create({ id: "foo", cleartext: { name: "bar", type: "mobile" } });
+  await Service.sync();
   do_check_eq(syncSuccesses, 5);
   do_check_true(scheduler.idle);
   do_check_true(scheduler.numClients > 1);
   do_check_true(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
 
   // idle == true && numClients > 1 && hasIncomingItems == false
   scheduler.hasIncomingItems = false;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncSuccesses, 6);
   do_check_true(scheduler.idle);
   do_check_true(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
 
   _("Test non-idle, numClients > 1, no incoming items => activeInterval.");
   // idle == false && numClients > 1 && hasIncomingItems == false
   scheduler.idle = false;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncSuccesses, 7);
   do_check_false(scheduler.idle);
   do_check_true(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
 
   _("Test non-idle, numClients > 1, incoming items => immediateInterval.");
   // idle == false && numClients > 1 && hasIncomingItems == true
   scheduler.hasIncomingItems = true;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncSuccesses, 8);
   do_check_false(scheduler.idle);
   do_check_true(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems); // gets reset to false
   do_check_eq(scheduler.syncInterval, scheduler.immediateInterval);
 
   Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 add_task(async function test_unsuccessful_sync_adjustSyncInterval() {
   enableValidationPrefs();
 
   _("Test unsuccessful sync calling adjustSyncInterval");
 
@@ -181,92 +182,92 @@ add_task(async function test_unsuccessfu
   do_check_false(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
   do_check_false(scheduler.hasIncomingItems);
 
   _("Test as long as numClients <= 1 our sync interval is SINGLE_USER.");
   // idle == true && numClients <= 1 && hasIncomingItems == false
   scheduler.idle = true;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncFailures, 1);
   do_check_true(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   // idle == false && numClients <= 1 && hasIncomingItems == false
   scheduler.idle = false;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncFailures, 2);
   do_check_false(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   // idle == false && numClients <= 1 && hasIncomingItems == true
   scheduler.hasIncomingItems = true;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncFailures, 3);
   do_check_false(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_true(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   // idle == true && numClients <= 1 && hasIncomingItems == true
   scheduler.idle = true;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncFailures, 4);
   do_check_true(scheduler.idle);
   do_check_false(scheduler.numClients > 1);
   do_check_true(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   _("Test as long as idle && numClients > 1 our sync interval is idleInterval.");
   // idle == true && numClients > 1 && hasIncomingItems == true
   Svc.Prefs.set("clients.devices.mobile", 2);
   scheduler.updateClientMode();
 
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncFailures, 5);
   do_check_true(scheduler.idle);
   do_check_true(scheduler.numClients > 1);
   do_check_true(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
 
   // idle == true && numClients > 1 && hasIncomingItems == false
   scheduler.hasIncomingItems = false;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncFailures, 6);
   do_check_true(scheduler.idle);
   do_check_true(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
 
   _("Test non-idle, numClients > 1, no incoming items => activeInterval.");
   // idle == false && numClients > 1 && hasIncomingItems == false
   scheduler.idle = false;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncFailures, 7);
   do_check_false(scheduler.idle);
   do_check_true(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
 
   _("Test non-idle, numClients > 1, incoming items => immediateInterval.");
   // idle == false && numClients > 1 && hasIncomingItems == true
   scheduler.hasIncomingItems = true;
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncFailures, 8);
   do_check_false(scheduler.idle);
   do_check_true(scheduler.numClients > 1);
   do_check_false(scheduler.hasIncomingItems); // gets reset to false
   do_check_eq(scheduler.syncInterval, scheduler.immediateInterval);
 
-  Service.startOver();
+  await Service.startOver();
   Svc.Obs.remove("weave:service:sync:error", onSyncError);
   await promiseStopServer(server);
 });
 
 add_task(async function test_back_triggers_sync() {
   enableValidationPrefs();
 
   let server = sync_httpd_setup();
@@ -286,19 +287,19 @@ add_task(async function test_back_trigge
   scheduler.idle = true;
   scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
   do_check_false(scheduler.idle);
   await promiseDone;
 
   Service.recordManager.clearCache();
   Svc.Prefs.resetBranch("");
   scheduler.setDefaults();
-  clientsEngine.resetClient();
+  await clientsEngine.resetClient();
 
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 add_task(async function test_adjust_interval_on_sync_error() {
   enableValidationPrefs();
 
   let server = sync_httpd_setup();
   await setUp(server);
@@ -314,24 +315,24 @@ add_task(async function test_adjust_inte
   // Force a sync fail.
   Svc.Prefs.set("firstSync", "notReady");
 
   do_check_eq(syncFailures, 0);
   do_check_false(scheduler.numClients > 1);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   Svc.Prefs.set("clients.devices.mobile", 2);
-  Service.sync();
+  await Service.sync();
 
   do_check_eq(syncFailures, 1);
   do_check_true(scheduler.numClients > 1);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
 
   Svc.Obs.remove("weave:service:sync:error", onSyncError);
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 add_task(async function test_bug671378_scenario() {
   enableValidationPrefs();
 
   // Test scenario similar to bug 671378. This bug appeared when a score
   // update occurred that wasn't large enough to trigger a sync so
@@ -344,17 +345,17 @@ add_task(async function test_bug671378_s
   let syncSuccesses = 0;
   function onSyncFinish() {
     _("Sync success.");
     syncSuccesses++;
   }
   Svc.Obs.add("weave:service:sync:finish", onSyncFinish);
 
   // After first sync call, syncInterval & syncTimer are singleDeviceInterval.
-  Service.sync();
+  await Service.sync();
   do_check_eq(syncSuccesses, 1);
   do_check_false(scheduler.numClients > 1);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
   do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
 
   let promiseDone = new Promise(resolve => {
     // Wrap scheduleNextSync so we are notified when it is finished.
     scheduler._scheduleNextSync = scheduler.scheduleNextSync;
@@ -365,18 +366,19 @@ add_task(async function test_bug671378_s
       // syncInterval and syncTimer values.
       if (syncSuccesses == 2) {
         do_check_neq(scheduler.nextSync, 0);
         do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
         do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
 
         scheduler.scheduleNextSync = scheduler._scheduleNextSync;
         Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
-        Service.startOver();
-        server.stop(resolve);
+        Service.startOver().then(() => {
+          server.stop(resolve);
+        });
       }
     };
   });
 
   // Set nextSync != 0
   // syncInterval still hasn't been set by call to updateClientMode.
   // Explicitly trying to invoke scheduleNextSync during a sync
   // (to immitate a score update that isn't big enough to trigger a sync).
@@ -388,51 +390,50 @@ add_task(async function test_bug671378_s
 
       scheduler.scheduleNextSync();
       do_check_neq(scheduler.nextSync, 0);
       do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
       do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
     });
   });
 
-  Service.clientsEngine._store.create({ id: "foo", cleartext: { name: "bar", type: "mobile" } });
-  Service.sync();
+  await Service.clientsEngine._store.create({ id: "foo", cleartext: { name: "bar", type: "mobile" } });
+  await Service.sync();
   await promiseDone;
 });
 
-add_test(function test_adjust_timer_larger_syncInterval() {
+add_task(async function test_adjust_timer_larger_syncInterval() {
   _("Test syncInterval > current timout period && nextSync != 0, syncInterval is NOT used.");
   Svc.Prefs.set("clients.devices.mobile", 2);
   scheduler.updateClientMode();
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
 
   scheduler.scheduleNextSync();
 
   // Ensure we have a small interval.
   do_check_neq(scheduler.nextSync, 0);
   do_check_eq(scheduler.syncTimer.delay, scheduler.activeInterval);
 
   // Make interval large again
-  clientsEngine._wipeClient();
+  await clientsEngine._wipeClient();
   Svc.Prefs.reset("clients.devices.mobile");
   scheduler.updateClientMode();
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 
   scheduler.scheduleNextSync();
 
   // Ensure timer delay remains as the small interval.
   do_check_neq(scheduler.nextSync, 0);
   do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
 
   // SyncSchedule.
-  Service.startOver();
-  run_next_test();
+  await Service.startOver();
 });
 
-add_test(function test_adjust_timer_smaller_syncInterval() {
+add_task(async function test_adjust_timer_smaller_syncInterval() {
   _("Test current timout > syncInterval period && nextSync != 0, syncInterval is used.");
   scheduler.scheduleNextSync();
 
   // Ensure we have a large interval.
   do_check_neq(scheduler.nextSync, 0);
   do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
 
   // Make interval smaller
@@ -442,11 +443,10 @@ add_test(function test_adjust_timer_smal
 
   scheduler.scheduleNextSync();
 
   // Ensure smaller timer delay is used.
   do_check_neq(scheduler.nextSync, 0);
   do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
 
   // SyncSchedule.
-  Service.startOver();
-  run_next_test();
+  await Service.startOver();
 });
--- a/services/sync/tests/unit/test_node_reassignment.js
+++ b/services/sync/tests/unit/test_node_reassignment.js
@@ -9,35 +9,34 @@ Cu.import("resource://services-common/re
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/status.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/rotaryengine.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://gre/modules/PromiseUtils.jsm");
 
-function run_test() {
+
+add_task(async function setup() {
   Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level  = Log.Level.Trace;
   Log.repository.getLogger("Sync.Resource").level      = Log.Level.Trace;
   Log.repository.getLogger("Sync.RESTRequest").level   = Log.Level.Trace;
   Log.repository.getLogger("Sync.Service").level       = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
   initTestLogging();
   validate_all_future_pings();
 
   // None of the failures in this file should result in a UI error.
   function onUIError() {
     do_throw("Errors should not be presented in the UI.");
   }
   Svc.Obs.add("weave:ui:login:error", onUIError);
   Svc.Obs.add("weave:ui:sync:error", onUIError);
-
-  run_next_test();
-}
+});
 
 /**
  * Emulate the following Zeus config:
  * $draining = data.get($prefix . $host . " draining");
  * if ($draining == "drain.") {
  *   log.warn($log_host_db_status . " migrating=1 (node-reassignment)" .
  *            $log_suffix);
  *   http.sendResponse("401 Node reassignment", $content_type,
@@ -90,75 +89,77 @@ async function syncAndExpectNodeReassign
       getTokenCount++;
       cb(null, {
         endpoint: server.baseURI + "1.1/johndoe/"
       });
     },
   };
   Service.identity._tokenServerClient = mockTSC;
 
-  function onwards() {
-    function onFirstSync() {
-      _("First sync completed.");
-      Svc.Obs.remove(firstNotification, onFirstSync);
-      Svc.Obs.add(secondNotification, onSecondSync);
+  // Make sure that it works!
+  await new Promise(res => {
+    let request = new RESTRequest(url);
+    request.get(function() {
+      do_check_eq(request.response.status, 401);
+      res();
+    });
+  });
 
-      do_check_eq(Service.clusterURL, "");
+  function onFirstSync() {
+    _("First sync completed.");
+    Svc.Obs.remove(firstNotification, onFirstSync);
+    Svc.Obs.add(secondNotification, onSecondSync);
 
-      // Allow for tests to clean up error conditions.
-      between();
-    }
-    function onSecondSync() {
-      _("Second sync completed.");
-      Svc.Obs.remove(secondNotification, onSecondSync);
-      Service.scheduler.clearSyncTriggers();
+    do_check_eq(Service.clusterURL, "");
 
-      // Make absolutely sure that any event listeners are done with their work
-      // before we proceed.
-      waitForZeroTimer(function() {
-        _("Second sync nextTick.");
-        do_check_eq(getTokenCount, 1);
-        Service.startOver();
+    // Allow for tests to clean up error conditions.
+    between();
+  }
+  function onSecondSync() {
+    _("Second sync completed.");
+    Svc.Obs.remove(secondNotification, onSecondSync);
+    Service.scheduler.clearSyncTriggers();
+
+    // Make absolutely sure that any event listeners are done with their work
+    // before we proceed.
+    waitForZeroTimer(function() {
+      _("Second sync nextTick.");
+      do_check_eq(getTokenCount, 1);
+      Service.startOver().then(() => {
         server.stop(deferred.resolve);
       });
-    }
-
-    Svc.Obs.add(firstNotification, onFirstSync);
-    Service.sync();
+    });
   }
 
-  // Make sure that it works!
-  let request = new RESTRequest(url);
-  request.get(function() {
-    do_check_eq(request.response.status, 401);
-    Utils.nextTick(onwards);
-  });
+  Svc.Obs.add(firstNotification, onFirstSync);
+  await Service.sync();
+
   await deferred.promise;
 }
 
 add_task(async function test_momentary_401_engine() {
   enableValidationPrefs();
 
   _("Test a failure for engine URLs that's resolved by reassignment.");
   let server = await prepareServer();
   let john   = server.user("johndoe");
 
   _("Enabling the Rotary engine.");
-  let { engine, tracker } = registerRotaryEngine();
+  let { engine, tracker } = await registerRotaryEngine();
 
   // We need the server to be correctly set up prior to experimenting. Do this
   // through a sync.
   let global = {syncID: Service.syncID,
                 storageVersion: STORAGE_VERSION,
                 rotary: {version: engine.version,
                          syncID:  engine.syncID}}
   john.createCollection("meta").insert("global", global);
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   _("Setting up Rotary collection to 401.");
   let rotary = john.createCollection("rotary");
   let oldHandler = rotary.collectionHandler;
   rotary.collectionHandler = handleReassign.bind(this, undefined);
 
   // We want to verify that the clusterURL pref has been cleared after a 401
   // inside a sync. Flag the Rotary engine to need syncing.
@@ -192,17 +193,17 @@ add_task(async function test_momentary_4
 // This test ends up being a failing fetch *after we're already logged in*.
 add_task(async function test_momentary_401_info_collections() {
   enableValidationPrefs();
 
   _("Test a failure for info/collections that's resolved by reassignment.");
   let server = await prepareServer();
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   // Return a 401 for info requests, particularly info/collections.
   let oldHandler = server.toplevelHandlers.info;
   server.toplevelHandlers.info = handleReassign;
 
   function undo() {
     _("Undoing test changes.");
     server.toplevelHandlers.info = oldHandler;
@@ -218,17 +219,17 @@ add_task(async function test_momentary_4
 add_task(async function test_momentary_401_storage_loggedin() {
   enableValidationPrefs();
 
   _("Test a failure for any storage URL, not just engine parts. " +
     "Resolved by reassignment.");
   let server = await prepareServer();
 
   _("Performing initial sync to ensure we are logged in.")
-  Service.sync();
+  await Service.sync();
 
   // Return a 401 for all storage requests.
   let oldHandler = server.toplevelHandlers.storage;
   server.toplevelHandlers.storage = handleReassign;
 
   function undo() {
     _("Undoing test changes.");
     server.toplevelHandlers.storage = oldHandler;
@@ -350,38 +351,39 @@ add_task(async function test_loop_avoida
     Service.scheduler.clearSyncTriggers();
 
     // Make absolutely sure that any event listeners are done with their work
     // before we proceed.
     waitForZeroTimer(function() {
       _("Third sync nextTick.");
       do_check_false(getReassigned());
       do_check_eq(getTokenCount, 2);
-      Service.startOver();
-      server.stop(deferred.resolve);
+      Service.startOver().then(() => {
+        server.stop(deferred.resolve);
+      });
     });
   }
 
   Svc.Obs.add(firstNotification, onFirstSync);
 
   now = Date.now();
-  Service.sync();
+  await Service.sync();
   await deferred.promise;
 });
 
 add_task(async function test_loop_avoidance_engine() {
   enableValidationPrefs();
 
   _("Test that a repeated 401 in an engine doesn't result in a sync loop " +
     "if node reassignment cannot resolve the failure.");
   let server = await prepareServer();
   let john   = server.user("johndoe");
 
   _("Enabling the Rotary engine.");
-  let { engine, tracker } = registerRotaryEngine();
+  let { engine, tracker } = await registerRotaryEngine();
   let deferred = PromiseUtils.defer();
 
   let getTokenCount = 0;
   let mockTSC = { // TokenServerClient
     getTokenFromBrowserIDAssertion(uri, assertion, cb) {
       getTokenCount++;
       cb(null, {
         endpoint: server.baseURI + "1.1/johndoe/"
@@ -394,17 +396,17 @@ add_task(async function test_loop_avoida
   // through a sync.
   let global = {syncID: Service.syncID,
                 storageVersion: STORAGE_VERSION,
                 rotary: {version: engine.version,
                          syncID:  engine.syncID}}
   john.createCollection("meta").insert("global", global);
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   _("Setting up Rotary collection to 401.");
   let rotary = john.createCollection("rotary");
   let oldHandler = rotary.collectionHandler;
   rotary.collectionHandler = handleReassign.bind(this, undefined);
 
   // Flag the Rotary engine to need syncing.
   john.collection("rotary").timestamp += 1000;
@@ -415,22 +417,16 @@ add_task(async function test_loop_avoida
     do_check_true(getReassigned());
   }
 
   function beforeSuccessfulSync() {
     _("Undoing test changes.");
     rotary.collectionHandler = oldHandler;
   }
 
-  function afterSuccessfulSync() {
-    Svc.Obs.remove("weave:service:login:start", onLoginStart);
-    Service.startOver();
-    server.stop(deferred.resolve);
-  }
-
   let firstNotification  = "weave:service:sync:finish";
   let secondNotification = "weave:service:sync:finish";
   let thirdNotification  = "weave:service:sync:finish";
 
   // Track the time. We want to make sure the duration between the first and
   // second sync is small, and then that the duration between second and third
   // is set to be large.
   let now;
@@ -489,21 +485,24 @@ add_task(async function test_loop_avoida
     Service.scheduler.clearSyncTriggers();
 
     // Make absolutely sure that any event listeners are done with their work
     // before we proceed.
     waitForZeroTimer(function() {
       _("Third sync nextTick.");
       do_check_false(getReassigned());
       do_check_eq(getTokenCount, 2);
-      afterSuccessfulSync();
+      Svc.Obs.remove("weave:service:login:start", onLoginStart);
+      Service.startOver().then(() => {
+        server.stop(deferred.resolve);
+      });
     });
   }
 
   Svc.Obs.add(firstNotification, onFirstSync);
 
   now = Date.now();
-  Service.sync();
+  await Service.sync();
   await deferred.promise;
 
   tracker.clearChangedIDs();
   Service.engineManager.unregister(engine);
 });
--- a/services/sync/tests/unit/test_password_engine.js
+++ b/services/sync/tests/unit/test_password_engine.js
@@ -11,17 +11,17 @@ const PropertyBag = Components.Construct
 
 function run_test() {
   Service.engineManager.unregister("addons"); // To silence errors.
   run_next_test();
 }
 
 async function cleanup(engine, server) {
   Svc.Obs.notify("weave:engine:stop-tracking");
-  engine.wipeClient();
+  await engine.wipeClient();
   Svc.Prefs.resetBranch("");
   Service.recordManager.clearCache();
   await promiseStopServer(server);
 }
 
 add_task(async function test_ignored_fields() {
   _("Only changes to syncable fields should be tracked");
 
--- a/services/sync/tests/unit/test_password_store.js
+++ b/services/sync/tests/unit/test_password_store.js
@@ -1,57 +1,57 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/engines/passwords.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
 
-function checkRecord(name, record, expectedCount, timeCreated,
+async function checkRecord(name, record, expectedCount, timeCreated,
                      expectedTimeCreated, timePasswordChanged,
                      expectedTimePasswordChanged, recordIsUpdated) {
   let engine = Service.engineManager.get("passwords");
   let store = engine._store;
 
   let count = {};
   let logins = Services.logins.findLogins(count, record.hostname,
                                           record.formSubmitURL, null);
 
   _("Record" + name + ":" + JSON.stringify(logins));
   _("Count" + name + ":" + count.value);
 
   do_check_eq(count.value, expectedCount);
 
   if (expectedCount > 0) {
-    do_check_true(!!store.getAllIDs()[record.id]);
+    do_check_true(!!(await store.getAllIDs())[record.id]);
     let stored_record = logins[0].QueryInterface(Ci.nsILoginMetaInfo);
 
     if (timeCreated !== undefined) {
       do_check_eq(stored_record.timeCreated, expectedTimeCreated);
     }
 
     if (timePasswordChanged !== undefined) {
       if (recordIsUpdated) {
         do_check_true(stored_record.timePasswordChanged >= expectedTimePasswordChanged);
       } else {
         do_check_eq(stored_record.timePasswordChanged, expectedTimePasswordChanged);
       }
       return stored_record.timePasswordChanged;
     }
   } else {
-    do_check_true(!store.getAllIDs()[record.id]);
+    do_check_true(!(await store.getAllIDs())[record.id]);
   }
   return undefined;
 }
 
 
-function changePassword(name, hostname, password, expectedCount, timeCreated,
-                        expectedTimeCreated, timePasswordChanged,
-                        expectedTimePasswordChanged, insert, recordIsUpdated) {
+async function changePassword(name, hostname, password, expectedCount, timeCreated,
+                              expectedTimeCreated, timePasswordChanged,
+                              expectedTimePasswordChanged, insert, recordIsUpdated) {
 
   const BOGUS_GUID = "zzzzzz" + hostname;
 
   let record = {id: BOGUS_GUID,
                   hostname,
                   formSubmitURL: hostname,
                   username: "john",
                   password,
@@ -66,88 +66,88 @@ function changePassword(name, hostname, 
     record.timePasswordChanged = timePasswordChanged;
   }
 
 
   let engine = Service.engineManager.get("passwords");
   let store = engine._store;
 
   if (insert) {
-    do_check_eq(store.applyIncomingBatch([record]).length, 0);
+    do_check_eq((await store.applyIncomingBatch([record])).length, 0);
   }
 
   return checkRecord(name, record, expectedCount, timeCreated,
                      expectedTimeCreated, timePasswordChanged,
                      expectedTimePasswordChanged, recordIsUpdated);
 
 }
 
 
-function test_apply_records_with_times(hostname, timeCreated, timePasswordChanged) {
+async function test_apply_records_with_times(hostname, timeCreated, timePasswordChanged) {
   // The following record is going to be inserted in the store and it needs
   // to be found there. Then its timestamps are going to be compared to
   // the expected values.
-  changePassword(" ", hostname, "password", 1, timeCreated, timeCreated,
+  await changePassword(" ", hostname, "password", 1, timeCreated, timeCreated,
                  timePasswordChanged, timePasswordChanged, true);
 }
 
 
-function test_apply_multiple_records_with_times() {
+async function test_apply_multiple_records_with_times() {
   // The following records are going to be inserted in the store and they need
   // to be found there. Then their timestamps are going to be compared to
   // the expected values.
-  changePassword("A", "http://foo.a.com", "password", 1, undefined, undefined,
+  await changePassword("A", "http://foo.a.com", "password", 1, undefined, undefined,
                  undefined, undefined, true);
-  changePassword("B", "http://foo.b.com", "password", 1, 1000, 1000, undefined,
+  await changePassword("B", "http://foo.b.com", "password", 1, 1000, 1000, undefined,
                  undefined, true);
-  changePassword("C", "http://foo.c.com", "password", 1, undefined, undefined,
+  await changePassword("C", "http://foo.c.com", "password", 1, undefined, undefined,
                  1000, 1000, true);
-  changePassword("D", "http://foo.d.com", "password", 1, 1000, 1000, 1000,
+  await changePassword("D", "http://foo.d.com", "password", 1, 1000, 1000, 1000,
                  1000, true);
 
   // The following records are not going to be inserted in the store and they
   // are not going to be found there.
-  changePassword("NotInStoreA", "http://foo.aaaa.com", "password", 0,
+  await changePassword("NotInStoreA", "http://foo.aaaa.com", "password", 0,
                  undefined, undefined, undefined, undefined, false);
-  changePassword("NotInStoreB", "http://foo.bbbb.com", "password", 0, 1000,
+  await changePassword("NotInStoreB", "http://foo.bbbb.com", "password", 0, 1000,
                  1000, undefined, undefined, false);
-  changePassword("NotInStoreC", "http://foo.cccc.com", "password", 0,
+  await changePassword("NotInStoreC", "http://foo.cccc.com", "password", 0,
                  undefined, undefined, 1000, 1000, false);
-  changePassword("NotInStoreD", "http://foo.dddd.com", "password", 0, 1000,
+  await changePassword("NotInStoreD", "http://foo.dddd.com", "password", 0, 1000,
                  1000, 1000, 1000, false);
 }
 
 
-function test_apply_same_record_with_different_times() {
+async function test_apply_same_record_with_different_times() {
   // The following record is going to be inserted multiple times in the store
   // and it needs to be found there. Then its timestamps are going to be
   // compared to the expected values.
 
   /* eslint-disable no-unused-vars */
   /* The eslint linter thinks that timePasswordChanged is unused, even though
      it is passed as an argument to changePassword. */
   var timePasswordChanged = 100;
-  timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 100,
+  timePasswordChanged = await changePassword("A", "http://a.tn", "password", 1, 100,
                                        100, 100, timePasswordChanged, true);
-  timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 100,
+  timePasswordChanged = await changePassword("A", "http://a.tn", "password", 1, 100,
                                        100, 800, timePasswordChanged, true,
                                        true);
-  timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 500,
+  timePasswordChanged = await changePassword("A", "http://a.tn", "password", 1, 500,
                                        100, 800, timePasswordChanged, true,
                                        true);
-  timePasswordChanged = changePassword("A", "http://a.tn", "password2", 1, 500,
+  timePasswordChanged = await changePassword("A", "http://a.tn", "password2", 1, 500,
                                        100, 1536213005222, timePasswordChanged,
                                        true, true);
-  timePasswordChanged = changePassword("A", "http://a.tn", "password2", 1, 500,
+  timePasswordChanged = await changePassword("A", "http://a.tn", "password2", 1, 500,
                                        100, 800, timePasswordChanged, true, true);
   /* eslint-enable no-unsed-vars */
 }
 
 
-function run_test() {
+add_task(async function run_test() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Passwords").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.Store.Passwords").level = Log.Level.Trace;
 
   const BOGUS_GUID_A = "zzzzzzzzzzzz";
   const BOGUS_GUID_B = "yyyyyyyyyyyy";
   let recordA = {id: BOGUS_GUID_A,
                   hostname: "http://foo.bar.com",
@@ -164,17 +164,17 @@ function run_test() {
                   password: "smith",
                   usernameField: "username",
                   passwordField: "password"};
 
   let engine = Service.engineManager.get("passwords");
   let store = engine._store;
 
   try {
-    do_check_eq(store.applyIncomingBatch([recordA, recordB]).length, 0);
+    do_check_eq((await store.applyIncomingBatch([recordA, recordB])).length, 0);
 
     // Only the good record makes it to Services.logins.
     let badCount = {};
     let goodCount = {};
     let badLogins = Services.logins.findLogins(badCount, recordA.hostname,
                                                recordA.formSubmitURL,
                                                recordA.httpRealm);
     let goodLogins = Services.logins.findLogins(goodCount, recordB.hostname,
@@ -182,24 +182,24 @@ function run_test() {
 
     _("Bad: " + JSON.stringify(badLogins));
     _("Good: " + JSON.stringify(goodLogins));
     _("Count: " + badCount.value + ", " + goodCount.value);
 
     do_check_eq(goodCount.value, 1);
     do_check_eq(badCount.value, 0);
 
-    do_check_true(!!store.getAllIDs()[BOGUS_GUID_B]);
-    do_check_true(!store.getAllIDs()[BOGUS_GUID_A]);
+    do_check_true(!!(await store.getAllIDs())[BOGUS_GUID_B]);
+    do_check_true(!(await store.getAllIDs())[BOGUS_GUID_A]);
 
-    test_apply_records_with_times("http://afoo.baz.com", undefined, undefined);
-    test_apply_records_with_times("http://bfoo.baz.com", 1000, undefined);
-    test_apply_records_with_times("http://cfoo.baz.com", undefined, 2000);
-    test_apply_records_with_times("http://dfoo.baz.com", 1000, 2000);
+    await test_apply_records_with_times("http://afoo.baz.com", undefined, undefined);
+    await test_apply_records_with_times("http://bfoo.baz.com", 1000, undefined);
+    await test_apply_records_with_times("http://cfoo.baz.com", undefined, 2000);
+    await test_apply_records_with_times("http://dfoo.baz.com", 1000, 2000);
 
-    test_apply_multiple_records_with_times();
+    await test_apply_multiple_records_with_times();
 
-    test_apply_same_record_with_different_times();
+    await test_apply_same_record_with_different_times();
 
   } finally {
-    store.wipe();
+    await store.wipe();
   }
-}
+});
--- a/services/sync/tests/unit/test_password_tracker.js
+++ b/services/sync/tests/unit/test_password_tracker.js
@@ -2,30 +2,32 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines/passwords.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-Service.engineManager.register(PasswordEngine);
-var engine = Service.engineManager.get("passwords");
-var store  = engine._store;
-var tracker = engine._tracker;
+let engine;
+let store;
+let tracker;
 
-// Don't do asynchronous writes.
-tracker.persistChangedIDs = false;
-
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
-  run_next_test();
-}
+  await Service.engineManager.register(PasswordEngine);
+  engine = Service.engineManager.get("passwords");
+  store  = engine._store;
+  tracker = engine._tracker;
 
-add_test(function test_tracking() {
+  // Don't do asynchronous writes.
+  tracker.persistChangedIDs = false;
+});
+
+add_task(async function test_tracking() {
   let recordNum = 0;
 
   _("Verify we've got an empty tracker to work with.");
   do_check_empty(tracker.changedIDs);
 
   function createPassword() {
     _("RECORD NUM: " + recordNum);
     let record = {id: "GUID" + recordNum,
@@ -69,33 +71,31 @@ add_test(function test_tracking() {
     _("Notifying twice won't do any harm.");
     Svc.Obs.notify("weave:engine:stop-tracking");
     createPassword();
     do_check_empty(tracker.changedIDs);
     do_check_eq(tracker.score, 0);
 
   } finally {
     _("Clean up.");
-    store.wipe();
+    await store.wipe();
     tracker.clearChangedIDs();
     tracker.resetScore();
     Svc.Obs.notify("weave:engine:stop-tracking");
-    run_next_test();
   }
 });
 
-add_test(function test_onWipe() {
+add_task(async function test_onWipe() {
   _("Verify we've got an empty tracker to work with.");
   do_check_empty(tracker.changedIDs);
   do_check_eq(tracker.score, 0);
 
   try {
     _("A store wipe should increment the score");
     Svc.Obs.notify("weave:engine:start-tracking");
-    store.wipe();
+    await store.wipe();
 
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
   } finally {
     tracker.resetScore();
     Svc.Obs.notify("weave:engine:stop-tracking");
-    run_next_test();
   }
 });
--- a/services/sync/tests/unit/test_password_validator.js
+++ b/services/sync/tests/unit/test_password_validator.js
@@ -75,84 +75,78 @@ function getDummyServerAndClient() {
         usernameField: "user",
         httpRealm: null,
       }
     ]
   };
 }
 
 
-add_test(function test_valid() {
+add_task(async function test_valid() {
   let { server, client } = getDummyServerAndClient();
   let validator = new PasswordValidator();
   let { problemData, clientRecords, records, deletedRecords } =
-      validator.compareClientWithServer(client, server);
+      await validator.compareClientWithServer(client, server);
   equal(clientRecords.length, 3);
   equal(records.length, 3)
   equal(deletedRecords.length, 0);
   deepEqual(problemData, validator.emptyProblemData());
-
-  run_next_test();
 });
 
-add_test(function test_missing() {
+add_task(async function test_missing() {
   let validator = new PasswordValidator();
   {
     let { server, client } = getDummyServerAndClient();
 
     client.pop();
 
     let { problemData, clientRecords, records, deletedRecords } =
-        validator.compareClientWithServer(client, server);
+        await validator.compareClientWithServer(client, server);
 
     equal(clientRecords.length, 2);
     equal(records.length, 3)
     equal(deletedRecords.length, 0);
 
     let expected = validator.emptyProblemData();
     expected.clientMissing.push("33333");
     deepEqual(problemData, expected);
   }
   {
     let { server, client } = getDummyServerAndClient();
 
     server.pop();
 
     let { problemData, clientRecords, records, deletedRecords } =
-        validator.compareClientWithServer(client, server);
+        await validator.compareClientWithServer(client, server);
 
     equal(clientRecords.length, 3);
     equal(records.length, 2)
     equal(deletedRecords.length, 0);
 
     let expected = validator.emptyProblemData();
     expected.serverMissing.push("33333");
     deepEqual(problemData, expected);
   }
-
-  run_next_test();
 });
 
 
-add_test(function test_deleted() {
+add_task(async function test_deleted() {
   let { server, client } = getDummyServerAndClient();
   let deletionRecord = { id: "444444", guid: "444444", deleted: true };
 
   server.push(deletionRecord);
   let validator = new PasswordValidator();
 
   let { problemData, clientRecords, records, deletedRecords } =
-      validator.compareClientWithServer(client, server);
+      await validator.compareClientWithServer(client, server);
 
   equal(clientRecords.length, 3);
   equal(records.length, 4);
   deepEqual(deletedRecords, [deletionRecord]);
 
   let expected = validator.emptyProblemData();
   deepEqual(problemData, expected);
-
-  run_next_test();
 });
 
 
 function run_test() {
   run_next_test();
 }
--- a/services/sync/tests/unit/test_places_guid_downgrade.js
+++ b/services/sync/tests/unit/test_places_guid_downgrade.js
@@ -81,18 +81,19 @@ add_test(function test_initial_state() {
   // Check our schema version to make sure it is actually at 10.
   do_check_eq(db.schemaVersion, 10);
 
   db.close();
 
   run_next_test();
 });
 
-add_test(function test_history_guids() {
+add_task(async function test_history_guids() {
   let engine = new HistoryEngine(Service);
+  await engine.initialize();
   let store = engine._store;
 
   let places = [
     {
       uri: fxuri,
       title: "Get Firefox!",
       visits: [{
         visitDate: Date.now() * 1000,
@@ -150,33 +151,33 @@ add_test(function test_history_guids() {
     result = Async.querySpinningly(stmt, ["guid"]);
     do_check_eq(result.length, 0);
     stmt.finalize();
 
     run_next_test();
   }
 });
 
-add_test(function test_bookmark_guids() {
+add_task(async function test_bookmark_guids() {
   let engine = new BookmarksEngine(Service);
   let store = engine._store;
 
   let fxid = PlacesUtils.bookmarks.insertBookmark(
     PlacesUtils.bookmarks.toolbarFolder,
     fxuri,
     PlacesUtils.bookmarks.DEFAULT_INDEX,
     "Get Firefox!");
   let tbid = PlacesUtils.bookmarks.insertBookmark(
     PlacesUtils.bookmarks.toolbarFolder,
     tburi,
     PlacesUtils.bookmarks.DEFAULT_INDEX,
     "Get Thunderbird!");
 
-  let fxguid = store.GUIDForId(fxid);
-  let tbguid = store.GUIDForId(tbid);
+  let fxguid = await store.GUIDForId(fxid);
+  let tbguid = await store.GUIDForId(tbid);
 
   _("Bookmarks: Verify GUIDs are added to the guid column.");
   let connection = PlacesUtils.history
                               .QueryInterface(Ci.nsPIPlacesDatabase)
                               .DBConnection;
   let stmt = connection.createAsyncStatement(
     "SELECT id FROM moz_bookmarks WHERE guid = :guid");
 
@@ -198,17 +199,15 @@ add_test(function test_bookmark_guids() 
   stmt.params.guid = fxguid;
   result = Async.querySpinningly(stmt, ["guid"]);
   do_check_eq(result.length, 0);
 
   stmt.params.guid = tbguid;
   result = Async.querySpinningly(stmt, ["guid"]);
   do_check_eq(result.length, 0);
   stmt.finalize();
-
-  run_next_test();
 });
 
 function run_test() {
   setPlacesDatabase("places_v10_from_v11.sqlite");
 
   run_next_test();
 }
--- a/services/sync/tests/unit/test_postqueue.js
+++ b/services/sync/tests/unit/test_postqueue.js
@@ -25,159 +25,151 @@ function makePostQueue(config, lastModTi
     return Promise.resolve(responseGenerator.next().value);
   }
 
   let done = () => {}
   let pq = new PostQueue(poster, lastModTime, config, getTestLogger(), done);
   return { pq, stats };
 }
 
-add_test(function test_simple() {
+add_task(async function test_simple() {
   let config = {
     max_post_bytes: 1000,
     max_post_records: 100,
     max_batch_bytes: Infinity,
     max_batch_records: Infinity,
   }
 
   const time = 11111111;
 
   function* responseGenerator() {
     yield { success: true, status: 200, headers: { "x-weave-timestamp": time + 100, "x-last-modified": time + 100 } };
   }
 
   let { pq, stats } = makePostQueue(config, time, responseGenerator());
-  pq.enqueue(makeRecord(10));
-  pq.flush(true);
+  await pq.enqueue(makeRecord(10));
+  await pq.flush(true);
 
   deepEqual(stats.posts, [{
     nbytes: 12, // expect our 10 byte record plus "[]" to wrap it.
     commit: true, // we don't know if we have batch semantics, so committed.
     headers: [["x-if-unmodified-since", time]],
     batch: "true"}]);
-
-  run_next_test();
 });
 
 // Test we do the right thing when we need to make multiple posts when there
 // are no batch semantics
-add_test(function test_max_post_bytes_no_batch() {
+add_task(async function test_max_post_bytes_no_batch() {
   let config = {
     max_post_bytes: 50,
     max_post_records: 4,
     max_batch_bytes: Infinity,
     max_batch_records: Infinity,
   }
 
   const time = 11111111;
   function* responseGenerator() {
     yield { success: true, status: 200, headers: { "x-weave-timestamp": time + 100, "x-last-modified": time + 100 } };
     yield { success: true, status: 200, headers: { "x-weave-timestamp": time + 200, "x-last-modified": time + 200 } };
   }
 
   let { pq, stats } = makePostQueue(config, time, responseGenerator());
-  pq.enqueue(makeRecord(20)); // total size now 22 bytes - "[" + record + "]"
-  pq.enqueue(makeRecord(20)); // total size now 43 bytes - "[" + record + "," + record + "]"
-  pq.enqueue(makeRecord(20)); // this will exceed our byte limit, so will be in the 2nd POST.
-  pq.flush(true);
+  await pq.enqueue(makeRecord(20)); // total size now 22 bytes - "[" + record + "]"
+  await pq.enqueue(makeRecord(20)); // total size now 43 bytes - "[" + record + "," + record + "]"
+  await pq.enqueue(makeRecord(20)); // this will exceed our byte limit, so will be in the 2nd POST.
+  await pq.flush(true);
 
   deepEqual(stats.posts, [
     {
       nbytes: 43, // 43 for the first post
       commit: false,
       headers: [["x-if-unmodified-since", time]],
       batch: "true",
     }, {
       nbytes: 22,
       commit: false, // we know we aren't in a batch, so never commit.
       headers: [["x-if-unmodified-since", time + 100]],
       batch: null,
     }
   ]);
   equal(pq.lastModified, time + 200);
-
-  run_next_test();
 });
 
 // Similar to the above, but we've hit max_records instead of max_bytes.
-add_test(function test_max_post_records_no_batch() {
+add_task(async function test_max_post_records_no_batch() {
   let config = {
     max_post_bytes: 100,
     max_post_records: 2,
     max_batch_bytes: Infinity,
     max_batch_records: Infinity,
   }
 
   const time = 11111111;
 
   function* responseGenerator() {
     yield { success: true, status: 200, headers: { "x-weave-timestamp": time + 100, "x-last-modified": time + 100 } };
     yield { success: true, status: 200, headers: { "x-weave-timestamp": time + 200, "x-last-modified": time + 200 } };
   }
 
   let { pq, stats } = makePostQueue(config, time, responseGenerator());
-  pq.enqueue(makeRecord(20)); // total size now 22 bytes - "[" + record + "]"
-  pq.enqueue(makeRecord(20)); // total size now 43 bytes - "[" + record + "," + record + "]"
-  pq.enqueue(makeRecord(20)); // this will exceed our records limit, so will be in the 2nd POST.
-  pq.flush(true);
+  await pq.enqueue(makeRecord(20)); // total size now 22 bytes - "[" + record + "]"
+  await pq.enqueue(makeRecord(20)); // total size now 43 bytes - "[" + record + "," + record + "]"
+  await pq.enqueue(makeRecord(20)); // this will exceed our records limit, so will be in the 2nd POST.
+  await pq.flush(true);
 
   deepEqual(stats.posts, [
     {
       nbytes: 43, // 43 for the first post
       commit: false,
       batch: "true",
       headers: [["x-if-unmodified-since", time]],
     }, {
       nbytes: 22,
       commit: false, // we know we aren't in a batch, so never commit.
       batch: null,
       headers: [["x-if-unmodified-since", time + 100]],
     }
   ]);
   equal(pq.lastModified, time + 200);
-
-  run_next_test();
 });
 
 // Batch tests.
 
 // Test making a single post when batch semantics are in place.
-add_test(function test_single_batch() {
+add_task(async function test_single_batch() {
   let config = {
     max_post_bytes: 1000,
     max_post_records: 100,
     max_batch_bytes: 2000,
     max_batch_records: 200,
   }
   const time = 11111111;
   function* responseGenerator() {
     yield { success: true, status: 202, obj: { batch: 1234 },
             headers: { "x-last-modified": time, "x-weave-timestamp": time + 100 },
     };
   }
 
   let { pq, stats } = makePostQueue(config, time, responseGenerator());
-  ok(pq.enqueue(makeRecord(10)).enqueued);
-  pq.flush(true);
+  ok((await pq.enqueue(makeRecord(10))).enqueued);
+  await pq.flush(true);
 
   deepEqual(stats.posts, [
     {
       nbytes: 12, // expect our 10 byte record plus "[]" to wrap it.
       commit: true, // we don't know if we have batch semantics, so committed.
       batch: "true",
       headers: [["x-if-unmodified-since", time]],
     }
   ]);
-
-  run_next_test();
 });
 
 // Test we do the right thing when we need to make multiple posts when there
 // are batch semantics in place.
-add_test(function test_max_post_bytes_batch() {
+add_task(async function test_max_post_bytes_batch() {
   let config = {
     max_post_bytes: 50,
     max_post_records: 4,
     max_batch_bytes: 5000,
     max_batch_records: 100,
   }
 
   const time = 11111111;
@@ -186,42 +178,40 @@ add_test(function test_max_post_bytes_ba
             headers: { "x-last-modified": time, "x-weave-timestamp": time + 100 },
     };
     yield { success: true, status: 202, obj: { batch: 1234 },
             headers: { "x-last-modified": time + 200, "x-weave-timestamp": time + 200 },
    };
   }
 
   let { pq, stats } = makePostQueue(config, time, responseGenerator());
-  ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]"
-  ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
-  ok(pq.enqueue(makeRecord(20)).enqueued); // this will exceed our byte limit, so will be in the 2nd POST.
-  pq.flush(true);
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // total size now 22 bytes - "[" + record + "]"
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // this will exceed our byte limit, so will be in the 2nd POST.
+  await pq.flush(true);
 
   deepEqual(stats.posts, [
     {
       nbytes: 43, // 43 for the first post
       commit: false,
       batch: "true",
       headers: [["x-if-unmodified-since", time]],
     }, {
       nbytes: 22,
       commit: true,
       batch: 1234,
       headers: [["x-if-unmodified-since", time]],
     }
   ]);
 
   equal(pq.lastModified, time + 200);
-
-  run_next_test();
 });
 
 // Test we do the right thing when the batch bytes limit is exceeded.
-add_test(function test_max_post_bytes_batch() {
+add_task(async function test_max_post_bytes_batch() {
   let config = {
     max_post_bytes: 50,
     max_post_records: 20,
     max_batch_bytes: 70,
     max_batch_records: 100,
   }
 
   const time0 = 11111111;
@@ -237,26 +227,26 @@ add_test(function test_max_post_bytes_ba
             headers: { "x-last-modified": time1, "x-weave-timestamp": time1 + 100 },
     };
     yield { success: true, status: 202, obj: { batch: 5678 },
             headers: { "x-last-modified": time1 + 200, "x-weave-timestamp": time1 + 200 },
     };
   }
 
   let { pq, stats } = makePostQueue(config, time0, responseGenerator());
-  ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]"
-  ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // total size now 22 bytes - "[" + record + "]"
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
   // this will exceed our POST byte limit, so will be in the 2nd POST - but still in the first batch.
-  ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes for 2nd post, 55 bytes in the batch.
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // 22 bytes for 2nd post, 55 bytes in the batch.
   // this will exceed our batch byte limit, so will be in a new batch.
-  ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes in 3rd post/2nd batch
-  ok(pq.enqueue(makeRecord(20)).enqueued); // 43 bytes in 3rd post/2nd batch
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // 22 bytes in 3rd post/2nd batch
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // 43 bytes in 3rd post/2nd batch
   // This will exceed POST byte limit, so will be in the 4th post, part of the 2nd batch.
-  ok(pq.enqueue(makeRecord(20)).enqueued); // 22 bytes for 4th post/2nd batch
-  pq.flush(true);
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // 22 bytes for 4th post/2nd batch
+  await pq.flush(true);
 
   deepEqual(stats.posts, [
     {
       nbytes: 43, // 43 for the first post
       commit: false,
       batch: "true",
       headers: [["x-if-unmodified-since", time0]],
     }, {
@@ -276,23 +266,21 @@ add_test(function test_max_post_bytes_ba
       nbytes: 22,
       commit: true,
       batch: 5678,
       headers: [["x-if-unmodified-since", time1]],
     },
   ]);
 
   equal(pq.lastModified, time1 + 200);
-
-  run_next_test();
 });
 
 // Test we split up the posts when we exceed the record limit when batch semantics
 // are in place.
-add_test(function test_max_post_bytes_batch() {
+add_task(async function test_max_post_bytes_batch() {
   let config = {
     max_post_bytes: 1000,
     max_post_records: 2,
     max_batch_bytes: 5000,
     max_batch_records: 100,
   }
 
   const time = 11111111;
@@ -301,42 +289,40 @@ add_test(function test_max_post_bytes_ba
             headers: { "x-last-modified": time, "x-weave-timestamp": time + 100 },
     };
     yield { success: true, status: 202, obj: { batch: 1234 },
             headers: { "x-last-modified": time + 200, "x-weave-timestamp": time + 200 },
    };
   }
 
   let { pq, stats } = makePostQueue(config, time, responseGenerator());
-  ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 22 bytes - "[" + record + "]"
-  ok(pq.enqueue(makeRecord(20)).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
-  ok(pq.enqueue(makeRecord(20)).enqueued); // will exceed record limit, so will be in 2nd post.
-  pq.flush(true);
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // total size now 22 bytes - "[" + record + "]"
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // total size now 43 bytes - "[" + record + "," + record + "]"
+  ok((await pq.enqueue(makeRecord(20))).enqueued); // will exceed record limit, so will be in 2nd post.
+  await pq.flush(true);
 
   deepEqual(stats.posts, [
     {
       nbytes: 43, // 43 for the first post
       commit: false,
       batch: "true",
       headers: [["x-if-unmodified-since", time]],
     }, {
       nbytes: 22,
       commit: true,
       batch: 1234,
       headers: [["x-if-unmodified-since", time]],
     }
   ]);
 
   equal(pq.lastModified, time + 200);
-
-  run_next_test();
 });
 
 // Test that a single huge record fails to enqueue
-add_test(function test_huge_record() {
+add_task(async function test_huge_record() {
   let config = {
     max_post_bytes: 50,
     max_post_records: 100,
     max_batch_bytes: 5000,
     max_batch_records: 100,
   }
 
   const time = 11111111;
@@ -345,50 +331,48 @@ add_test(function test_huge_record() {
             headers: { "x-last-modified": time, "x-weave-timestamp": time + 100 },
     };
     yield { success: true, status: 202, obj: { batch: 1234 },
             headers: { "x-last-modified": time + 200, "x-weave-timestamp": time + 200 },
    };
   }
 
   let { pq, stats } = makePostQueue(config, time, responseGenerator());
-  ok(pq.enqueue(makeRecord(20)).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
 
-  let { enqueued, error } = pq.enqueue(makeRecord(1000));
+  let { enqueued, error } = await pq.enqueue(makeRecord(1000));
   ok(!enqueued);
   notEqual(error, undefined);
 
   // make sure that we keep working, skipping the bad record entirely
   // (handling the error the queue reported is left up to caller)
-  ok(pq.enqueue(makeRecord(20)).enqueued);
-  ok(pq.enqueue(makeRecord(20)).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
 
-  pq.flush(true);
+  await pq.flush(true);
 
   deepEqual(stats.posts, [
     {
       nbytes: 43, // 43 for the first post
       commit: false,
       batch: "true",
       headers: [["x-if-unmodified-since", time]],
     }, {
       nbytes: 22,
       commit: true,
       batch: 1234,
       headers: [["x-if-unmodified-since", time]],
     }
   ]);
 
   equal(pq.lastModified, time + 200);
-
-  run_next_test();
 });
 
 // Test we do the right thing when the batch record limit is exceeded.
-add_test(function test_max_records_batch() {
+add_task(async function test_max_records_batch() {
   let config = {
     max_post_bytes: 1000,
     max_post_records: 3,
     max_batch_bytes: 10000,
     max_batch_records: 5,
   }
 
   const time0 = 11111111;
@@ -405,30 +389,30 @@ add_test(function test_max_records_batch
     };
     yield { success: true, status: 202, obj: { batch: 5678 },
             headers: { "x-last-modified": time1 + 200, "x-weave-timestamp": time1 + 200 },
     };
   }
 
   let { pq, stats } = makePostQueue(config, time0, responseGenerator());
 
-  ok(pq.enqueue(makeRecord(20)).enqueued);
-  ok(pq.enqueue(makeRecord(20)).enqueued);
-  ok(pq.enqueue(makeRecord(20)).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
 
-  ok(pq.enqueue(makeRecord(20)).enqueued);
-  ok(pq.enqueue(makeRecord(20)).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
 
-  ok(pq.enqueue(makeRecord(20)).enqueued);
-  ok(pq.enqueue(makeRecord(20)).enqueued);
-  ok(pq.enqueue(makeRecord(20)).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
 
-  ok(pq.enqueue(makeRecord(20)).enqueued);
+  ok((await pq.enqueue(makeRecord(20))).enqueued);
 
-  pq.flush(true);
+  await pq.flush(true);
 
   deepEqual(stats.posts, [
     { // 3 records
       nbytes: 64,
       commit: false,
       batch: "true",
       headers: [["x-if-unmodified-since", time0]],
     }, { // 2 records -- end batch1
@@ -445,11 +429,9 @@ add_test(function test_max_records_batch
       nbytes: 22,
       commit: true,
       batch: 5678,
       headers: [["x-if-unmodified-since", time1]],
     },
   ]);
 
   equal(pq.lastModified, time1 + 200);
-
-  run_next_test();
 });
--- a/services/sync/tests/unit/test_prefs_store.js
+++ b/services/sync/tests/unit/test_prefs_store.js
@@ -17,41 +17,42 @@ startupManager();
 function makePersona(id) {
   return {
     id: id || Math.random().toString(),
     name: Math.random().toString(),
     headerURL: "http://localhost:1234/a"
   };
 }
 
-function run_test() {
+add_task(async function run_test() {
   _("Test fixtures.");
   // read our custom prefs file before doing anything.
   Services.prefs.readUserPrefsFromFile(do_get_file("prefs_test_prefs_store.js"));
 
-  let store = Service.engineManager.get("prefs")._store;
+  let engine = Service.engineManager.get("prefs");
+  let store = engine._store;
   let prefs = new Preferences();
   try {
 
     _("The GUID corresponds to XUL App ID.");
-    let allIDs = store.getAllIDs();
+    let allIDs = await store.getAllIDs();
     let ids = Object.keys(allIDs);
     do_check_eq(ids.length, 1);
     do_check_eq(ids[0], PREFS_GUID);
     do_check_true(allIDs[PREFS_GUID], true);
 
-    do_check_true(store.itemExists(PREFS_GUID));
-    do_check_false(store.itemExists("random-gibberish"));
+    do_check_true((await store.itemExists(PREFS_GUID)));
+    do_check_false((await store.itemExists("random-gibberish")));
 
     _("Unknown prefs record is created as deleted.");
-    let record = store.createRecord("random-gibberish", "prefs");
+    let record = await store.createRecord("random-gibberish", "prefs");
     do_check_true(record.deleted);
 
     _("Prefs record contains only prefs that should be synced.");
-    record = store.createRecord(PREFS_GUID, "prefs");
+    record = await store.createRecord(PREFS_GUID, "prefs");
     do_check_eq(record.value["testing.int"], 123);
     do_check_eq(record.value["testing.string"], "ohai");
     do_check_eq(record.value["testing.bool"], true);
     // non-existing prefs get null as the value
     do_check_eq(record.value["testing.nonexistent"], null);
     // as do prefs that have a default value.
     do_check_eq(record.value["testing.default"], null);
     do_check_false("testing.turned.off" in record.value);
@@ -73,17 +74,17 @@ function run_test() {
     record.value = {
       "testing.int": 42,
       "testing.string": "im in ur prefs",
       "testing.bool": false,
       "testing.deleteme": null,
       "testing.somepref": "im a new pref from other device",
       "services.sync.prefs.sync.testing.somepref": true
     };
-    store.update(record);
+    await store.update(record);
     do_check_eq(prefs.get("testing.int"), 42);
     do_check_eq(prefs.get("testing.string"), "im in ur prefs");
     do_check_eq(prefs.get("testing.bool"), false);
     do_check_eq(prefs.get("testing.deleteme"), undefined);
     do_check_eq(prefs.get("testing.dont.change"), "Please don't change me.");
     do_check_eq(prefs.get("testing.somepref"), "im a new pref from other device");
     do_check_eq(Svc.Prefs.get("prefs.sync.testing.somepref"), true);
 
@@ -96,65 +97,65 @@ function run_test() {
 
     let persona1 = makePersona();
     let persona2 = makePersona();
     let usedThemes = JSON.stringify([persona1, persona2]);
     record.value = {
       "lightweightThemes.selectedThemeID": persona1.id,
       "lightweightThemes.usedThemes": usedThemes
     };
-    store.update(record);
+    await store.update(record);
     do_check_eq(prefs.get("lightweightThemes.selectedThemeID"), persona1.id);
     do_check_true(Utils.deepEquals(LightweightThemeManager.currentTheme,
                   persona1));
 
     _("Disable persona");
     record.value = {
       "lightweightThemes.selectedThemeID": null,
       "lightweightThemes.usedThemes": usedThemes
     };
-    store.update(record);
+    await store.update(record);
     do_check_false(!!prefs.get("lightweightThemes.selectedThemeID"));
     do_check_eq(LightweightThemeManager.currentTheme, null);
 
     _("Only the current app's preferences are applied.");
     record = new PrefRec("prefs", "some-fake-app");
     record.value = {
       "testing.int": 98
     };
-    store.update(record);
+    await store.update(record);
     do_check_eq(prefs.get("testing.int"), 42);
 
     _("The light-weight theme preference is handled correctly.");
     let lastThemeID = undefined;
     let orig_updateLightWeightTheme = store._updateLightWeightTheme;
     store._updateLightWeightTheme = function(themeID) {
       lastThemeID = themeID;
     }
     try {
       record = new PrefRec("prefs", PREFS_GUID);
       record.value = {
         "testing.int": 42,
       };
-      store.update(record);
+      await store.update(record);
       do_check_true(lastThemeID === undefined,
                     "should not have tried to change the theme with an unrelated pref.");
       Services.prefs.setCharPref("lightweightThemes.selectedThemeID", "foo");
       record.value = {
         "lightweightThemes.selectedThemeID": "foo",
       };
-      store.update(record);
+      await store.update(record);
       do_check_true(lastThemeID === undefined,
                     "should not have tried to change the theme when the incoming pref matches current value.");
 
       record.value = {
         "lightweightThemes.selectedThemeID": "bar",
       };
-      store.update(record);
+      await store.update(record);
       do_check_eq(lastThemeID, "bar",
                   "should have tried to change the theme when the incoming pref was different.");
     } finally {
       store._updateLightWeightTheme = orig_updateLightWeightTheme;
     }
   } finally {
     prefs.resetBranch("");
   }
-}
+});
--- a/services/sync/tests/unit/test_prefs_tracker.js
+++ b/services/sync/tests/unit/test_prefs_tracker.js
@@ -3,17 +3,17 @@
 
 Cu.import("resource://gre/modules/Preferences.jsm");
 Cu.import("resource://services-common/utils.js");
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines/prefs.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-function run_test() {
+add_task(async function run_test() {
   let engine = Service.engineManager.get("prefs");
   let tracker = engine._tracker;
 
   // Don't write out by default.
   tracker.persistChangedIDs = false;
 
   let prefs = new Preferences();
 
@@ -23,26 +23,26 @@ function run_test() {
     do_check_eq(Svc.Prefs.get("engine.prefs.modified"), undefined);
     do_check_false(tracker.modified);
 
     tracker.modified = true;
     do_check_eq(Svc.Prefs.get("engine.prefs.modified"), true);
     do_check_true(tracker.modified);
 
     _("Engine's getChangedID() just returns the one GUID we have.");
-    let changedIDs = engine.getChangedIDs();
+    let changedIDs = await engine.getChangedIDs();
     let ids = Object.keys(changedIDs);
     do_check_eq(ids.length, 1);
     do_check_eq(ids[0], CommonUtils.encodeBase64URL(Services.appinfo.ID));
 
     Svc.Prefs.set("engine.prefs.modified", false);
     do_check_false(tracker.modified);
 
     _("No modified state, so no changed IDs.");
-    do_check_empty(engine.getChangedIDs());
+    do_check_empty((await engine.getChangedIDs()));
 
     _("Initial score is 0");
     do_check_eq(tracker.score, 0);
 
     _("Test fixtures.");
     Svc.Prefs.set("prefs.sync.testing.int", true);
 
     _("Test fixtures haven't upped the tracker score yet because it hasn't started tracking yet.");
@@ -80,9 +80,9 @@ function run_test() {
     prefs.set("testing.other", "blergh");
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
     do_check_eq(tracker.modified, false);
 
   } finally {
     Svc.Obs.notify("weave:engine:stop-tracking");
     prefs.resetBranch("");
   }
-}
+});
--- a/services/sync/tests/unit/test_resource_ua.js
+++ b/services/sync/tests/unit/test_resource_ua.js
@@ -40,26 +40,25 @@ add_task(async function setup() {
 
   // Note this string is missing the trailing ".destkop" as the test
   // adjusts the "client.type" pref where that portion comes from.
   expectedUA = Services.appinfo.name + "/" + Services.appinfo.version +
                " (" + httpProtocolHandler.oscpu + ")" +
                " FxSync/" + WEAVE_VERSION + "." +
                Services.appinfo.appBuildID;
 
-})
+});
 
-add_test(function test_fetchInfo() {
+add_task(async function test_fetchInfo() {
   _("Testing _fetchInfo.");
-  Service.login();
-  Service._fetchInfo();
+  await Service.login();
+  await Service._fetchInfo();
   _("User-Agent: " + ua);
   do_check_eq(ua, expectedUA + ".desktop");
   ua = "";
-  run_next_test();
 });
 
 add_task(async function test_desktop_post() {
   _("Testing direct Resource POST.");
   let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
   await r.post("foo=bar");
   _("User-Agent: " + ua);
   do_check_eq(ua, expectedUA + ".desktop");
--- a/services/sync/tests/unit/test_score_triggers.js
+++ b/services/sync/tests/unit/test_score_triggers.js
@@ -33,32 +33,32 @@ function sync_httpd_setup() {
   let cl = new ServerCollection();
   handlers["/1.1/johndoe/storage/clients"] =
     upd("clients", cl.handler());
 
   return httpd_setup(handlers);
 }
 
 async function setUp(server) {
-  let engineInfo = registerRotaryEngine();
+  let engineInfo = await registerRotaryEngine();
   await SyncTestingInfrastructure(server, "johndoe", "ilovejane");
   return engineInfo;
 }
 
 function run_test() {
   initTestLogging("Trace");
 
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
 
   run_next_test();
 }
 
-add_test(function test_tracker_score_updated() {
+add_task(async function test_tracker_score_updated() {
   enableValidationPrefs();
-  let { engine, tracker } = registerRotaryEngine();
+  let { engine, tracker } = await registerRotaryEngine();
 
   let scoreUpdated = 0;
 
   function onScoreUpdated() {
     scoreUpdated++;
   }
 
   Svc.Obs.add("weave:engine:score:updated", onScoreUpdated());
@@ -70,35 +70,34 @@ add_test(function test_tracker_score_upd
     do_check_eq(engine.score, SCORE_INCREMENT_SMALL);
 
     do_check_eq(scoreUpdated, 1);
   } finally {
     Svc.Obs.remove("weave:engine:score:updated", onScoreUpdated);
     tracker.resetScore();
     tracker.clearChangedIDs();
     Service.engineManager.unregister(engine);
-    run_next_test();
   }
 });
 
 add_task(async function test_sync_triggered() {
   let server = sync_httpd_setup();
   let { engine, tracker } = await setUp(server);
 
-  Service.login();
+  await Service.login();
 
   Service.scheduler.syncThreshold = MULTI_DEVICE_THRESHOLD;
 
 
   do_check_eq(Status.login, LOGIN_SUCCEEDED);
   tracker.score += SCORE_INCREMENT_XLARGE;
 
   await promiseOneObserver("weave:service:sync:finish");
 
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 
   tracker.clearChangedIDs();
   Service.engineManager.unregister(engine);
 });
 
 add_task(async function test_clients_engine_sync_triggered() {
   enableValidationPrefs();
@@ -106,26 +105,26 @@ add_task(async function test_clients_eng
   _("Ensure that client engine score changes trigger a sync.");
 
   // The clients engine is not registered like other engines. Therefore,
   // it needs special treatment throughout the code. Here, we verify the
   // global score tracker gives it that treatment. See bug 676042 for more.
 
   let server = sync_httpd_setup();
   let { engine, tracker } = await setUp(server);
-  Service.login();
+  await Service.login();
 
   Service.scheduler.syncThreshold = MULTI_DEVICE_THRESHOLD;
   do_check_eq(Status.login, LOGIN_SUCCEEDED);
   Service.clientsEngine._tracker.score += SCORE_INCREMENT_XLARGE;
 
   await promiseOneObserver("weave:service:sync:finish");
   _("Sync due to clients engine change completed.");
 
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 
   tracker.clearChangedIDs();
   Service.engineManager.unregister(engine);
 });
 
 add_task(async function test_incorrect_credentials_sync_not_triggered() {
   enableValidationPrefs();
@@ -143,20 +142,20 @@ add_task(async function test_incorrect_c
   // Faking incorrect credentials to prevent score update.
   Status.login = LOGIN_FAILED_LOGIN_REJECTED;
   tracker.score += SCORE_INCREMENT_XLARGE;
 
   // First wait >100ms (nsITimers can take up to that much time to fire, so
   // we can account for the timer in delayedAutoconnect) and then one event
   // loop tick (to account for a possible call to weave:service:sync:start).
   await promiseNamedTimer(150, {}, "timer");
-  await promiseNextTick();
+  await Async.promiseYield();
 
   Svc.Obs.remove("weave:service:sync:start", onSyncStart);
 
   do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED);
 
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 
   tracker.clearChangedIDs();
   Service.engineManager.unregister(engine);
 });
--- a/services/sync/tests/unit/test_service_detect_upgrade.js
+++ b/services/sync/tests/unit/test_service_detect_upgrade.js
@@ -6,18 +6,16 @@ Cu.import("resource://services-sync/cons
 Cu.import("resource://services-sync/keys.js");
 Cu.import("resource://services-sync/engines/tabs.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-Service.engineManager.register(TabEngine);
-
 add_task(async function v4_upgrade() {
   enableValidationPrefs();
 
   let clients = new ServerCollection();
   let meta_global = new ServerWBO("global");
 
   // Tracking info/collections.
   let collectionsHelper = track_collections_helper();
@@ -46,55 +44,55 @@ add_task(async function v4_upgrade() {
   try {
 
     Service.status.resetSync();
 
     _("Logging in.");
 
     await configureIdentity({ "username": "johndoe" }, server);
 
-    Service.login();
+    await Service.login();
     do_check_true(Service.isLoggedIn);
-    Service.verifyAndFetchSymmetricKeys();
-    do_check_true(Service._remoteSetup());
+    await Service.verifyAndFetchSymmetricKeys();
+    do_check_true((await Service._remoteSetup()));
 
-    function test_out_of_date() {
+    async function test_out_of_date() {
       _("Old meta/global: " + JSON.stringify(meta_global));
       meta_global.payload = JSON.stringify({"syncID": "foooooooooooooooooooooooooo",
                                             "storageVersion": STORAGE_VERSION + 1});
       collections.meta = Date.now() / 1000;
       _("New meta/global: " + JSON.stringify(meta_global));
       Service.recordManager.set(Service.metaURL, meta_global);
       try {
-        Service.sync();
+        await Service.sync();
       } catch (ex) {
       }
       do_check_eq(Service.status.sync, VERSION_OUT_OF_DATE);
     }
 
     // See what happens when we bump the storage version.
     _("Syncing after server has been upgraded.");
-    test_out_of_date();
+    await test_out_of_date();
 
     // Same should happen after a wipe.
     _("Syncing after server has been upgraded and wiped.");
-    Service.wipeServer();
-    test_out_of_date();
+    await Service.wipeServer();
+    await test_out_of_date();
 
     // Now's a great time to test what happens when keys get replaced.
     _("Syncing afresh...");
     Service.logout();
     Service.collectionKeys.clear();
     meta_global.payload = JSON.stringify({"syncID": "foooooooooooooobbbbbbbbbbbb",
                                           "storageVersion": STORAGE_VERSION});
     collections.meta = Date.now() / 1000;
     Service.recordManager.set(Service.metaURL, meta_global);
-    Service.login();
+    await Service.login();
     do_check_true(Service.isLoggedIn);
-    Service.sync();
+    await Service.sync();
     do_check_true(Service.isLoggedIn);
 
     let serverDecrypted;
     let serverKeys;
     let serverResp;
 
 
     async function retrieve_server_default() {
@@ -145,29 +143,29 @@ add_task(async function v4_upgrade() {
     _("Indeed, they're what we set them to...");
     do_check_eq("KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
                 (await retrieve_server_default())[0]);
 
     _("Sync. Should download changed keys automatically.");
     let oldClientsModified = collections.clients;
     let oldTabsModified = collections.tabs;
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
     _("New key should have forced upload of data.");
     _("Tabs: " + oldTabsModified + " < " + collections.tabs);
     _("Clients: " + oldClientsModified + " < " + collections.clients);
     do_check_true(collections.clients > oldClientsModified);
     do_check_true(collections.tabs > oldTabsModified);
 
     _("... and keys will now match.");
     await retrieve_and_compare_default(true);
 
     // Clean up.
-    Service.startOver();
+    await Service.startOver();
 
   } finally {
     Svc.Prefs.resetBranch("");
     await promiseStopServer(server);
   }
 });
 
 add_task(async function v5_upgrade() {
@@ -229,26 +227,26 @@ add_task(async function v5_upgrade() {
     _("Generating new keys.");
     generateNewKeys(Service.collectionKeys);
 
     // Now sync and see what happens. It should be a version fail, not a crypto
     // fail.
 
     _("Logging in.");
     try {
-      Service.login();
+      await Service.login();
     } catch (e) {
       _("Exception: " + e);
     }
     _("Status: " + Service.status);
     do_check_false(Service.isLoggedIn);
     do_check_eq(VERSION_OUT_OF_DATE, Service.status.sync);
 
     // Clean up.
-    Service.startOver();
+    await Service.startOver();
 
   } finally {
     Svc.Prefs.resetBranch("");
     await promiseStopServer(server);
   }
 });
 
 function run_test() {
--- a/services/sync/tests/unit/test_service_login.js
+++ b/services/sync/tests/unit/test_service_login.js
@@ -3,45 +3,40 @@
 
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/policies.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
+Log.repository.rootLogger.addAppender(new Log.DumpAppender());
+
 function login_handling(handler) {
   return function(request, response) {
     if (has_hawk_header(request)) {
       handler(request, response);
     } else {
       let body = "Unauthorized";
       response.setStatusLine(request.httpVersion, 401, "Unauthorized");
       response.setHeader("Content-Type", "text/plain");
       response.bodyOutputStream.write(body, body.length);
     }
   };
 }
 
-function run_test() {
-  Log.repository.rootLogger.addAppender(new Log.DumpAppender());
-
-  run_next_test();
-}
-
-add_test(function test_offline() {
+add_task(async function test_offline() {
   try {
     _("The right bits are set when we're offline.");
     Services.io.offline = true;
-    do_check_false(!!Service.login());
+    do_check_false(!!(await Service.login()));
     do_check_eq(Service.status.login, LOGIN_FAILED_NETWORK_ERROR);
     Services.io.offline = false;
   } finally {
     Svc.Prefs.resetBranch("");
-    run_next_test();
   }
 });
 
 function setup() {
   let janeHelper = track_collections_helper();
   let janeU      = janeHelper.with_updated_collection;
   let johnHelper = track_collections_helper();
   let johnU      = johnHelper.with_updated_collection;
@@ -68,35 +63,35 @@ add_task(async function test_login_logou
   let server = setup();
 
   try {
     _("Force the initial state.");
     Service.status.service = STATUS_OK;
     do_check_eq(Service.status.service, STATUS_OK);
 
     _("Try logging in. It won't work because we're not configured yet.");
-    Service.login();
+    await Service.login();
     do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
     do_check_eq(Service.status.login, LOGIN_FAILED_NO_USERNAME);
     do_check_false(Service.isLoggedIn);
 
     _("Try again with a configured account");
     await configureIdentity({ username: "johndoe" }, server);
-    Service.login();
+    await Service.login();
     do_check_eq(Service.status.service, STATUS_OK);
     do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
     do_check_true(Service.isLoggedIn);
 
     _("Profile refresh edge case: FxA configured but prefs reset");
-    Service.startOver();
+    await Service.startOver();
     let config = makeIdentityConfig({ username: "johndoe" }, server);
     config.fxaccount.token.endpoint = server.baseURI + "/1.1/" + config.username + "/";
     configureFxAccountIdentity(Service.identity, config);
 
-    Service.login();
+    await Service.login();
     do_check_eq(Service.status.service, STATUS_OK);
     do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
     do_check_true(Service.isLoggedIn);
 
     _("Logout.");
     Service.logout();
     do_check_false(Service.isLoggedIn);
 
@@ -115,24 +110,24 @@ add_task(async function test_login_on_sy
 
   let server = setup();
   await configureIdentity({ username: "johndoe" }, server);
 
   try {
     _("Sync calls login.");
     let oldLogin = Service.login;
     let loginCalled = false;
-    Service.login = function() {
+    Service.login = async function() {
       loginCalled = true;
       Service.status.login = LOGIN_SUCCEEDED;
       this._loggedIn = false;           // So that sync aborts.
       return true;
     };
 
-    Service.sync();
+    await Service.sync();
 
     do_check_true(loginCalled);
     Service.login = oldLogin;
 
     // Stub mpLocked.
     let mpLocked = true;
     Utils.mpLocked = () => mpLocked;
 
@@ -177,28 +172,28 @@ add_task(async function test_login_on_sy
     // Testing exception handling if master password dialog is canceled.
     // Do this by monkeypatching.
     Service.identity.unlockAndVerifyAuthState = () => Promise.resolve(MASTER_PASSWORD_LOCKED);
 
     let cSTCalled = false;
     let lockedSyncCalled = false;
 
     Service.scheduler.clearSyncTriggers = function() { cSTCalled = true; };
-    Service._lockedSync = function() { lockedSyncCalled = true; };
+    Service._lockedSync = async function() { lockedSyncCalled = true; };
 
     _("If master password is canceled, login fails and we report lockage.");
-    do_check_false(!!Service.login());
+    do_check_false(!!(await Service.login()));
     do_check_eq(Service.status.login, MASTER_PASSWORD_LOCKED);
     do_check_eq(Service.status.service, LOGIN_FAILED);
     _("Locked? " + Utils.mpLocked());
     _("checkSync reports the correct term.");
     do_check_eq(Service._checkSync(), kSyncMasterPasswordLocked);
 
     _("Sync doesn't proceed and clears triggers if MP is still locked.");
-    Service.sync();
+    await Service.sync();
 
     do_check_true(cSTCalled);
     do_check_false(lockedSyncCalled);
 
     // N.B., a bunch of methods are stubbed at this point. Be careful putting
     // new tests after this point!
 
   } finally {
--- a/services/sync/tests/unit/test_service_startOver.js
+++ b/services/sync/tests/unit/test_service_startOver.js
@@ -2,36 +2,34 @@
  * http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
+initTestLogging("Trace");
+
 function BlaEngine() {
   SyncEngine.call(this, "Bla", Service);
 }
 BlaEngine.prototype = {
   __proto__: SyncEngine.prototype,
 
   removed: false,
   async removeClientData() {
     this.removed = true;
   }
 
 };
 
-Service.engineManager.register(BlaEngine);
-
-
-function run_test() {
-  initTestLogging("Trace");
-  run_next_test();
-}
+add_task(async function setup() {
+  await Service.engineManager.register(BlaEngine);
+});
 
 add_task(async function test_resetLocalData() {
   await configureIdentity();
   Service.status.enforceBackoff = true;
   Service.status.backoffInterval = 42;
   Service.status.minimumNextSync = 23;
 
   // Verify set up.
@@ -41,57 +39,53 @@ add_task(async function test_resetLocalD
   let observerCalled = false;
   Svc.Obs.add("weave:service:start-over", function onStartOver() {
     Svc.Obs.remove("weave:service:start-over", onStartOver);
     observerCalled = true;
 
     do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
   });
 
-  Service.startOver();
+  await Service.startOver();
   do_check_true(observerCalled);
 
   // Verify the site was nuked from orbit.
   do_check_eq(Svc.Prefs.get("username"), undefined);
 
   do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
   do_check_false(Service.status.enforceBackoff);
   do_check_eq(Service.status.backoffInterval, 0);
   do_check_eq(Service.status.minimumNextSync, 0);
 });
 
-add_test(function test_removeClientData() {
+add_task(async function test_removeClientData() {
   let engine = Service.engineManager.get("bla");
 
   // No cluster URL = no removal.
   do_check_false(engine.removed);
-  Service.startOver();
+  await Service.startOver();
   do_check_false(engine.removed);
 
   Service.clusterURL = "https://localhost/";
 
   do_check_false(engine.removed);
-  Service.startOver();
+  await Service.startOver();
   do_check_true(engine.removed);
-
-  run_next_test();
 });
 
-add_test(function test_reset_SyncScheduler() {
+add_task(async function test_reset_SyncScheduler() {
   // Some non-default values for SyncScheduler's attributes.
   Service.scheduler.idle = true;
   Service.scheduler.hasIncomingItems = true;
   Svc.Prefs.set("clients.devices.desktop", 42);
   Service.scheduler.nextSync = Date.now();
   Service.scheduler.syncThreshold = MULTI_DEVICE_THRESHOLD;
   Service.scheduler.syncInterval = Service.scheduler.activeInterval;
 
-  Service.startOver();
+  await Service.startOver();
 
   do_check_false(Service.scheduler.idle);
   do_check_false(Service.scheduler.hasIncomingItems);
   do_check_eq(Service.scheduler.numClients, 0);
   do_check_eq(Service.scheduler.nextSync, 0);
   do_check_eq(Service.scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
   do_check_eq(Service.scheduler.syncInterval, Service.scheduler.singleDeviceInterval);
-
-  run_next_test();
 });
--- a/services/sync/tests/unit/test_service_startup.js
+++ b/services/sync/tests/unit/test_service_startup.js
@@ -23,27 +23,29 @@ function run_test() {
   Service.identity.username = "johndoe";
   do_check_true(xps.enabled);
 
   Cu.import("resource://services-sync/service.js");
 
   _("Service is enabled.");
   do_check_eq(Service.enabled, true);
 
-  _("Engines are registered.");
-  let engines = Service.engineManager.getAll();
-  do_check_true(Utils.deepEquals(engines.map(engine => engine.name),
-                                 ["tabs", "bookmarks", "forms", "history"]));
-
   _("Observers are notified of startup");
   do_test_pending();
 
   do_check_false(Service.status.ready);
   do_check_false(xps.ready);
-  Observers.add("weave:service:ready", function(subject, data) {
-    do_check_true(Service.status.ready);
-    do_check_true(xps.ready);
+
+  Async.promiseSpinningly(promiseOneObserver("weave:service:ready"));
+
+  do_check_true(Service.status.ready);
+  do_check_true(xps.ready);
 
-    // Clean up.
-    Svc.Prefs.resetBranch("");
-    do_test_finished();
-  });
+  _("Engines are registered.");
+  let engines = Service.engineManager.getAll();
+  do_check_true(Utils.deepEquals(engines.map(engine => engine.name),
+                                 ["tabs", "bookmarks", "forms", "history"]));
+
+  // Clean up.
+  Svc.Prefs.resetBranch("");
+
+  do_test_finished();
 }
--- a/services/sync/tests/unit/test_service_sync_401.js
+++ b/services/sync/tests/unit/test_service_sync_401.js
@@ -42,41 +42,41 @@ add_task(async function run_test() {
     Svc.Prefs.set("lastPing", Math.floor(Date.now() / 1000));
 
     let threw = false;
     Svc.Obs.add("weave:service:sync:error", function(subject, data) {
       threw = true;
     });
 
     _("Initial state: We're successfully logged in.");
-    Service.login();
+    await Service.login();
     do_check_true(Service.isLoggedIn);
     do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
 
     _("Simulate having changed the password somewhere else.");
     Service.identity._token.id = "somethingelse";
     Service.identity.unlockAndVerifyAuthState = () => Promise.resolve(LOGIN_FAILED_LOGIN_REJECTED);
 
     _("Let's try to sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Verify that sync() threw an exception.");
     do_check_true(threw);
 
     _("We're no longer logged in.");
     do_check_false(Service.isLoggedIn);
 
     _("Sync status won't have changed yet, because we haven't tried again.");
 
     _("globalScore is reset upon starting a sync.");
     do_check_eq(Service.scheduler.globalScore, 0);
 
     _("Our next sync will fail appropriately.");
     try {
-      Service.sync();
+      await Service.sync();
     } catch (ex) {
     }
     do_check_eq(Service.status.login, LOGIN_FAILED_LOGIN_REJECTED);
 
   } finally {
     Svc.Prefs.resetBranch("");
     await promiseStopServer(server);
   }
--- a/services/sync/tests/unit/test_service_sync_locked.js
+++ b/services/sync/tests/unit/test_service_sync_locked.js
@@ -1,15 +1,15 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-function run_test() {
+add_task(async function run_test() {
   validate_all_future_pings();
   let debug = [];
   let info  = [];
 
   function augmentLogger(old) {
     let d = old.debug;
     let i = old.info;
     // For the purposes of this test we don't need to do full formatting
@@ -23,15 +23,15 @@ function run_test() {
 
   augmentLogger(Service._log);
 
   // Avoid daily ping
   Svc.Prefs.set("lastPing", Math.floor(Date.now() / 1000));
 
   _("Check that sync will log appropriately if already in 'progress'.");
   Service._locked = true;
-  Service.sync();
+  await Service.sync();
   Service._locked = false;
 
   do_check_true(debug[debug.length - 2].startsWith("Exception calling WrappedLock: Could not acquire lock. Label: \"service.js: login\"."));
   do_check_eq(info[info.length - 1], "Cannot start sync: already syncing?");
-}
+});
 
--- a/services/sync/tests/unit/test_service_sync_remoteSetup.js
+++ b/services/sync/tests/unit/test_service_sync_remoteSetup.js
@@ -72,127 +72,127 @@ add_task(async function run_test() {
       restore() { server.registerPathHandler(path, handlers[path]); }
     }
   }
 
   let server = httpd_setup(handlers);
 
   try {
     _("Checking Status.sync with no credentials.");
-    Service.verifyAndFetchSymmetricKeys();
+    await Service.verifyAndFetchSymmetricKeys();
     do_check_eq(Service.status.sync, CREDENTIALS_CHANGED);
     do_check_eq(Service.status.login, LOGIN_FAILED_NO_PASSPHRASE);
 
     await configureIdentity({ username: "johndoe" }, server);
 
-    Service.login();
+    await Service.login();
     _("Checking that remoteSetup returns true when credentials have changed.");
     (await Service.recordManager.get(Service.metaURL)).payload.syncID = "foobar";
-    do_check_true(Service._remoteSetup());
+    do_check_true((await Service._remoteSetup()));
 
     let returnStatusCode = (method, code) => (oldMethod) => (req, res) => {
       if (req.method === method) {
         res.setStatusLine(req.httpVersion, code, "");
       } else {
         oldMethod(req, res);
       }
     };
 
     let mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 401));
     Service.recordManager.del(Service.metaURL);
     _("Checking that remoteSetup returns false on 401 on first get /meta/global.");
-    do_check_false(Service._remoteSetup());
+    do_check_false((await Service._remoteSetup()));
     mock.restore();
 
-    Service.login();
+    await Service.login();
     mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 503));
     Service.recordManager.del(Service.metaURL);
     _("Checking that remoteSetup returns false on 503 on first get /meta/global.");
-    do_check_false(Service._remoteSetup());
+    do_check_false((await Service._remoteSetup()));
     do_check_eq(Service.status.sync, METARECORD_DOWNLOAD_FAIL);
     mock.restore();
 
-    Service.login();
+    await Service.login();
     mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 404));
     Service.recordManager.del(Service.metaURL);
     _("Checking that remoteSetup recovers on 404 on first get /meta/global.");
-    do_check_true(Service._remoteSetup());
+    do_check_true((await Service._remoteSetup()));
     mock.restore();
 
-    let makeOutdatedMeta = () => {
+    let makeOutdatedMeta = async () => {
       Service.metaModified = 0;
-      let infoResponse = Service._fetchInfo();
+      let infoResponse = await Service._fetchInfo();
       return {
         status: infoResponse.status,
         obj: {
           crypto: infoResponse.obj.crypto,
           clients: infoResponse.obj.clients,
           meta: 1
         }
       };
     }
 
     _("Checking that remoteSetup recovers on 404 on get /meta/global after clear cached one.");
     mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 404));
     Service.recordManager.set(Service.metaURL, { isNew: false });
-    do_check_true(Service._remoteSetup(makeOutdatedMeta()));
+    do_check_true((await Service._remoteSetup((await makeOutdatedMeta()))));
     mock.restore();
 
     _("Checking that remoteSetup returns false on 503 on get /meta/global after clear cached one.");
     mock = mockHandler(GLOBAL_PATH, returnStatusCode("GET", 503));
     Service.status.sync = "";
     Service.recordManager.set(Service.metaURL, { isNew: false });
-    do_check_false(Service._remoteSetup(makeOutdatedMeta()));
+    do_check_false((await Service._remoteSetup((await makeOutdatedMeta()))));
     do_check_eq(Service.status.sync, "");
     mock.restore();
 
     metaColl.delete({});
 
     _("Do an initial sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Checking that remoteSetup returns true.");
-    do_check_true(Service._remoteSetup());
+    do_check_true((await Service._remoteSetup()));
 
     _("Verify that the meta record was uploaded.");
     do_check_eq(meta_global.data.syncID, Service.syncID);
     do_check_eq(meta_global.data.storageVersion, STORAGE_VERSION);
     do_check_eq(meta_global.data.engines.clients.version, Service.clientsEngine.version);
     do_check_eq(meta_global.data.engines.clients.syncID, Service.clientsEngine.syncID);
 
     _("Set the collection info hash so that sync() will remember the modified times for future runs.");
     collections.meta = Service.clientsEngine.lastSync;
     collections.clients = Service.clientsEngine.lastSync;
-    Service.sync();
+    await Service.sync();
 
     _("Sync again and verify that meta/global wasn't downloaded again");
     meta_global.wasCalled = false;
-    Service.sync();
+    await Service.sync();
     do_check_false(meta_global.wasCalled);
 
     _("Fake modified records. This will cause a redownload, but not reupload since it hasn't changed.");
     collections.meta += 42;
     meta_global.wasCalled = false;
 
     let metaModified = meta_global.modified;
 
-    Service.sync();
+    await Service.sync();
     do_check_true(meta_global.wasCalled);
     do_check_eq(metaModified, meta_global.modified);
 
     // Try to screw up HMAC calculation.
     // Re-encrypt keys with a new random keybundle, and upload them to the
     // server, just as might happen with a second client.
     _("Attempting to screw up HMAC by re-encrypting keys.");
     let keys = Service.collectionKeys.asWBO();
     let b = new BulkKeyBundle("hmacerror");
     b.generateRandom();
     collections.crypto = keys.modified = 100 + (Date.now() / 1000);  // Future modification time.
     keys.encrypt(b);
     keys.upload(Service.resource(Service.cryptoKeysURL));
 
-    do_check_false(Service.verifyAndFetchSymmetricKeys());
+    do_check_false((await Service.verifyAndFetchSymmetricKeys()));
     do_check_eq(Service.status.login, LOGIN_FAILED_INVALID_PASSPHRASE);
   } finally {
     Svc.Prefs.resetBranch("");
     server.stop(do_test_finished);
   }
 });
--- a/services/sync/tests/unit/test_service_sync_specified.js
+++ b/services/sync/tests/unit/test_service_sync_specified.js
@@ -4,42 +4,37 @@
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/clients.js");
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-initTestLogging();
-Service.engineManager.clear();
-
 let syncedEngines = []
 
 function SteamEngine() {
   SyncEngine.call(this, "Steam", Service);
 }
 SteamEngine.prototype = {
   __proto__: SyncEngine.prototype,
-  _sync: function _sync() {
+  async _sync() {
     syncedEngines.push(this.name);
   }
 };
-Service.engineManager.register(SteamEngine);
 
 function StirlingEngine() {
   SyncEngine.call(this, "Stirling", Service);
 }
 StirlingEngine.prototype = {
   __proto__: SteamEngine.prototype,
-  _sync: function _sync() {
+  async _sync() {
     syncedEngines.push(this.name);
   }
 };
-Service.engineManager.register(StirlingEngine);
 
 // Tracking info/collections.
 var collectionsHelper = track_collections_helper();
 var upd = collectionsHelper.with_updated_collection;
 
 function sync_httpd_setup(handlers) {
 
   handlers["/1.1/johndoe/info/collections"] = collectionsHelper.handler;
@@ -69,101 +64,105 @@ async function setUp() {
 
   let server = sync_httpd_setup({
     "/1.1/johndoe/storage/meta/global": new ServerWBO("global", {}).handler(),
   });
   await SyncTestingInfrastructure(server, "johndoe", "ilovejane");
   return server;
 }
 
-function run_test() {
+add_task(async function setup() {
+  initTestLogging();
+  Service.engineManager.clear();
+
   initTestLogging("Trace");
   validate_all_future_pings();
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
 
-  run_next_test();
-}
+  await Service.engineManager.register(SteamEngine);
+  await Service.engineManager.register(StirlingEngine);
+});
 
 add_task(async function test_noEngines() {
   enableValidationPrefs();
 
   _("Test: An empty array of engines to sync does nothing.");
   let server = await setUp();
 
   try {
     _("Sync with no engines specified.");
-    Service.sync([]);
+    await Service.sync([]);
     deepEqual(syncedEngines, [], "no engines were synced");
 
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_oneEngine() {
   enableValidationPrefs();
 
   _("Test: Only one engine is synced.");
   let server = await setUp();
 
   try {
 
     _("Sync with 1 engine specified.");
-    Service.sync(["steam"]);
+    await Service.sync(["steam"]);
     deepEqual(syncedEngines, ["steam"])
 
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_bothEnginesSpecified() {
   enableValidationPrefs();
 
   _("Test: All engines are synced when specified in the correct order (1).");
   let server = await setUp();
 
   try {
     _("Sync with both engines specified.");
-    Service.sync(["steam", "stirling"]);
+    await Service.sync(["steam", "stirling"]);
     deepEqual(syncedEngines, ["steam", "stirling"])
 
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_bothEnginesSpecified() {
   enableValidationPrefs();
 
   _("Test: All engines are synced when specified in the correct order (2).");
   let server = await setUp();
 
   try {
     _("Sync with both engines specified.");
-    Service.sync(["stirling", "steam"]);
+    await Service.sync(["stirling", "steam"]);
     deepEqual(syncedEngines, ["stirling", "steam"])
 
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_bothEnginesDefault() {
   enableValidationPrefs();
 
   _("Test: All engines are synced when nothing is specified.");
   let server = await setUp();
 
   try {
-    Service.sync();
+    await Service.sync();
     deepEqual(syncedEngines, ["steam", "stirling"])
 
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
--- a/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
+++ b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
@@ -4,53 +4,48 @@
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/clients.js");
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-initTestLogging();
-Service.engineManager.clear();
-
 function QuietStore() {
   Store.call("Quiet");
 }
 QuietStore.prototype = {
-  getAllIDs: function getAllIDs() {
+  async getAllIDs() {
     return [];
   }
 }
 
 function SteamEngine() {
   SyncEngine.call(this, "Steam", Service);
 }
 SteamEngine.prototype = {
   __proto__: SyncEngine.prototype,
   // We're not interested in engine sync but what the service does.
   _storeObj: QuietStore,
 
-  _sync: function _sync() {
-    this._syncStartup();
+  _sync: async function _sync() {
+    await this._syncStartup();
   }
 };
-Service.engineManager.register(SteamEngine);
 
 function StirlingEngine() {
   SyncEngine.call(this, "Stirling", Service);
 }
 StirlingEngine.prototype = {
   __proto__: SteamEngine.prototype,
   // This engine's enabled state is the same as the SteamEngine's.
   get prefName() {
     return "steam";
   }
 };
-Service.engineManager.register(StirlingEngine);
 
 // Tracking info/collections.
 var collectionsHelper = track_collections_helper();
 var upd = collectionsHelper.with_updated_collection;
 
 function sync_httpd_setup(handlers) {
 
   handlers["/1.1/johndoe/info/collections"] = collectionsHelper.handler;
@@ -75,25 +70,28 @@ async function setUp(server) {
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
   return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
 }
 
 const PAYLOAD = 42;
 
+add_task(async function setup() {
+  initTestLogging();
+  Service.engineManager.clear();
 
-function run_test() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
   validate_all_future_pings();
 
-  run_next_test();
-}
+  await Service.engineManager.register(SteamEngine);
+  await Service.engineManager.register(StirlingEngine);
+});
 
 add_task(async function test_newAccount() {
   enableValidationPrefs();
 
   _("Test: New account does not disable locally enabled engines.");
   let engine = Service.engineManager.get("steam");
   let server = sync_httpd_setup({
     "/1.1/johndoe/storage/meta/global": new ServerWBO("global", {}).handler(),
@@ -103,22 +101,22 @@ add_task(async function test_newAccount(
 
   try {
     _("Engine is enabled from the beginning.");
     Service._ignorePrefObserver = true;
     engine.enabled = true;
     Service._ignorePrefObserver = false;
 
     _("Sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Engine continues to be enabled.");
     do_check_true(engine.enabled);
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_enabledLocally() {
   enableValidationPrefs();
 
   _("Test: Engine is disabled on remote clients and enabled locally");
@@ -133,25 +131,25 @@ add_task(async function test_enabledLoca
   });
   await setUp(server);
 
   try {
     _("Enable engine locally.");
     engine.enabled = true;
 
     _("Sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Meta record now contains the new engine.");
     do_check_true(!!metaWBO.data.engines.steam);
 
     _("Engine continues to be enabled.");
     do_check_true(engine.enabled);
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_disabledLocally() {
   enableValidationPrefs();
 
   _("Test: Engine is enabled on remote clients and disabled locally");
@@ -174,28 +172,28 @@ add_task(async function test_disabledLoc
   try {
     _("Disable engine locally.");
     Service._ignorePrefObserver = true;
     engine.enabled = true;
     Service._ignorePrefObserver = false;
     engine.enabled = false;
 
     _("Sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Meta record no longer contains engine.");
     do_check_false(!!metaWBO.data.engines.steam);
 
     _("Server records are wiped.");
     do_check_eq(steamCollection.payload, undefined);
 
     _("Engine continues to be disabled.");
     do_check_false(engine.enabled);
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_disabledLocally_wipe503() {
   enableValidationPrefs();
 
   _("Test: Engine is enabled on remote clients and disabled locally");
@@ -229,17 +227,17 @@ add_task(async function test_disabledLoc
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   _("Sync.");
   Service.errorHandler.syncAndReportErrors();
   await promiseObserved;
   do_check_eq(Service.status.sync, SERVER_MAINTENANCE);
 
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 add_task(async function test_enabledRemotely() {
   enableValidationPrefs();
 
   _("Test: Engine is disabled locally and enabled on a remote client");
   Service.syncID = "abcdefghij";
@@ -266,25 +264,25 @@ add_task(async function test_enabledRemo
     let wbo = Service.collectionKeys.generateNewKeysWBO();
     wbo.encrypt(Service.identity.syncKeyBundle);
     do_check_eq(200, (await wbo.upload(Service.resource(Service.cryptoKeysURL))).status);
 
     _("Engine is disabled.");
     do_check_false(engine.enabled);
 
     _("Sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Engine is enabled.");
     do_check_true(engine.enabled);
 
     _("Meta record still present.");
     do_check_eq(metaWBO.data.engines.steam.syncID, engine.syncID);
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_disabledRemotelyTwoClients() {
   enableValidationPrefs();
 
   _("Test: Engine is enabled locally and disabled on a remote client... with two clients.");
@@ -304,33 +302,33 @@ add_task(async function test_disabledRem
 
   try {
     _("Enable engine locally.");
     Service._ignorePrefObserver = true;
     engine.enabled = true;
     Service._ignorePrefObserver = false;
 
     _("Sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Disable engine by deleting from meta/global.");
     let d = metaWBO.data;
     delete d.engines["steam"];
     metaWBO.payload = JSON.stringify(d);
     metaWBO.modified = Date.now() / 1000;
 
     _("Add a second client and verify that the local pref is changed.");
     Service.clientsEngine._store._remoteClients["foobar"] = {name: "foobar", type: "desktop"};
-    Service.sync();
+    await Service.sync();
 
     _("Engine is disabled.");
     do_check_false(engine.enabled);
 
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_disabledRemotely() {
   enableValidationPrefs();
 
   _("Test: Engine is enabled locally and disabled on a remote client");
@@ -347,23 +345,23 @@ add_task(async function test_disabledRem
 
   try {
     _("Enable engine locally.");
     Service._ignorePrefObserver = true;
     engine.enabled = true;
     Service._ignorePrefObserver = false;
 
     _("Sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Engine is not disabled: only one client.");
     do_check_true(engine.enabled);
 
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_dependentEnginesEnabledLocally() {
   enableValidationPrefs();
 
   _("Test: Engine is disabled on remote clients and enabled locally");
@@ -380,27 +378,27 @@ add_task(async function test_dependentEn
   });
   await setUp(server);
 
   try {
     _("Enable engine locally. Doing it on one is enough.");
     steamEngine.enabled = true;
 
     _("Sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Meta record now contains the new engines.");
     do_check_true(!!metaWBO.data.engines.steam);
     do_check_true(!!metaWBO.data.engines.stirling);
 
     _("Engines continue to be enabled.");
     do_check_true(steamEngine.enabled);
     do_check_true(stirlingEngine.enabled);
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_dependentEnginesDisabledLocally() {
   enableValidationPrefs();
 
   _("Test: Two dependent engines are enabled on remote clients and disabled locally");
@@ -431,26 +429,26 @@ add_task(async function test_dependentEn
     Service._ignorePrefObserver = true;
     steamEngine.enabled = true;
     do_check_true(stirlingEngine.enabled);
     Service._ignorePrefObserver = false;
     steamEngine.enabled = false;
     do_check_false(stirlingEngine.enabled);
 
     _("Sync.");
-    Service.sync();
+    await Service.sync();
 
     _("Meta record no longer contains engines.");
     do_check_false(!!metaWBO.data.engines.steam);
     do_check_false(!!metaWBO.data.engines.stirling);
 
     _("Server records are wiped.");
     do_check_eq(steamCollection.payload, undefined);
     do_check_eq(stirlingCollection.payload, undefined);
 
     _("Engines continue to be disabled.");
     do_check_false(steamEngine.enabled);
     do_check_false(stirlingEngine.enabled);
   } finally {
-    Service.startOver();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
--- a/services/sync/tests/unit/test_service_verifyLogin.js
+++ b/services/sync/tests/unit/test_service_verifyLogin.js
@@ -50,55 +50,55 @@ add_task(async function test_verifyLogin
 
   try {
     _("Force the initial state.");
     Service.status.service = STATUS_OK;
     do_check_eq(Service.status.service, STATUS_OK);
 
     _("Credentials won't check out because we're not configured yet.");
     Service.status.resetSync();
-    do_check_false(Service.verifyLogin());
+    do_check_false((await Service.verifyLogin()));
     do_check_eq(Service.status.service, CLIENT_NOT_CONFIGURED);
     do_check_eq(Service.status.login, LOGIN_FAILED_NO_USERNAME);
 
     _("Success if syncBundleKey is set.");
     Service.status.resetSync();
     await configureIdentity({ username: "johndoe" }, server);
-    do_check_true(Service.verifyLogin());
+    do_check_true((await Service.verifyLogin()));
     do_check_eq(Service.status.service, STATUS_OK);
     do_check_eq(Service.status.login, LOGIN_SUCCEEDED);
 
     _("If verifyLogin() encounters a server error, it flips on the backoff flag and notifies observers on a 503 with Retry-After.");
     Service.status.resetSync();
     await configureIdentity({ username: "janedoe" }, server);
     Service._updateCachedURLs();
     do_check_false(Service.status.enforceBackoff);
     let backoffInterval;
     Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
       Svc.Obs.remove("weave:service:backoff:interval", observe);
       backoffInterval = subject;
     });
-    do_check_false(Service.verifyLogin());
+    do_check_false((await Service.verifyLogin()));
     do_check_true(Service.status.enforceBackoff);
     do_check_eq(backoffInterval, 42);
     do_check_eq(Service.status.service, LOGIN_FAILED);
     do_check_eq(Service.status.login, SERVER_MAINTENANCE);
 
     _("Ensure a network error when finding the cluster sets the right Status bits.");
     Service.status.resetSync();
     Service.clusterURL = "";
     Service._clusterManager._findCluster = () => "http://localhost:12345/";
-    do_check_false(Service.verifyLogin());
+    do_check_false((await Service.verifyLogin()));
     do_check_eq(Service.status.service, LOGIN_FAILED);
     do_check_eq(Service.status.login, LOGIN_FAILED_NETWORK_ERROR);
 
     _("Ensure a network error when getting the collection info sets the right Status bits.");
     Service.status.resetSync();
     Service.clusterURL = "http://localhost:12345/";
-    do_check_false(Service.verifyLogin());
+    do_check_false((await Service.verifyLogin()));
     do_check_eq(Service.status.service, LOGIN_FAILED);
     do_check_eq(Service.status.login, LOGIN_FAILED_NETWORK_ERROR);
 
   } finally {
     Svc.Prefs.resetBranch("");
     server.stop(do_test_finished);
   }
 });
--- a/services/sync/tests/unit/test_service_wipeClient.js
+++ b/services/sync/tests/unit/test_service_wipeClient.js
@@ -3,83 +3,81 @@
 
 Cu.import("resource://services-sync/browserid_identity.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-Service.engineManager.clear();
-
 function CanDecryptEngine() {
   SyncEngine.call(this, "CanDecrypt", Service);
 }
 CanDecryptEngine.prototype = {
   __proto__: SyncEngine.prototype,
 
   // Override these methods with mocks for the test
-  canDecrypt: function canDecrypt() {
+  async canDecrypt() {
     return true;
   },
 
   wasWiped: false,
-  wipeClient: function wipeClient() {
+  async wipeClient() {
     this.wasWiped = true;
   }
 };
-Service.engineManager.register(CanDecryptEngine);
 
 
 function CannotDecryptEngine() {
   SyncEngine.call(this, "CannotDecrypt", Service);
 }
 CannotDecryptEngine.prototype = {
   __proto__: SyncEngine.prototype,
 
   // Override these methods with mocks for the test
-  canDecrypt: function canDecrypt() {
+  async canDecrypt() {
     return false;
   },
 
   wasWiped: false,
-  wipeClient: function wipeClient() {
+  async wipeClient() {
     this.wasWiped = true;
   }
 };
-Service.engineManager.register(CannotDecryptEngine);
+
+let canDecryptEngine;
+let cannotDecryptEngine;
+
+add_task(async function setup() {
+  initTestLogging();
+  Service.engineManager.clear();
 
+  await Service.engineManager.register(CanDecryptEngine);
+  await Service.engineManager.register(CannotDecryptEngine);
+  canDecryptEngine = Service.engineManager.get("candecrypt");
+  cannotDecryptEngine = Service.engineManager.get("cannotdecrypt");
+});
 
-add_test(function test_withEngineList() {
+add_task(async function test_withEngineList() {
   try {
     _("Ensure initial scenario.");
-    do_check_false(Service.engineManager.get("candecrypt").wasWiped);
-    do_check_false(Service.engineManager.get("cannotdecrypt").wasWiped);
+    do_check_false(canDecryptEngine.wasWiped);
+    do_check_false(cannotDecryptEngine.wasWiped);
 
     _("Wipe local engine data.");
-    Service.wipeClient(["candecrypt", "cannotdecrypt"]);
+    await Service.wipeClient(["candecrypt", "cannotdecrypt"]);
 
     _("Ensure only the engine that can decrypt was wiped.");
-    do_check_true(Service.engineManager.get("candecrypt").wasWiped);
-    do_check_false(Service.engineManager.get("cannotdecrypt").wasWiped);
+    do_check_true(canDecryptEngine.wasWiped);
+    do_check_false(cannotDecryptEngine.wasWiped);
   } finally {
-    Service.engineManager.get("candecrypt").wasWiped = false;
-    Service.engineManager.get("cannotdecrypt").wasWiped = false;
-    Service.startOver();
+    canDecryptEngine.wasWiped = false;
+    cannotDecryptEngine.wasWiped = false;
+    await Service.startOver();
   }
-
-  run_next_test();
 });
 
-add_test(function test_startOver_clears_keys() {
+add_task(async function test_startOver_clears_keys() {
   generateNewKeys(Service.collectionKeys);
   do_check_true(!!Service.collectionKeys.keyForCollection());
-  Service.startOver();
+  await Service.startOver();
   do_check_false(!!Service.collectionKeys.keyForCollection());
-
-  run_next_test();
 });
-
-function run_test() {
-  initTestLogging();
-
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_service_wipeServer.js
+++ b/services/sync/tests/unit/test_service_wipeServer.js
@@ -59,17 +59,17 @@ add_task(async function test_wipeServer_
     await setUpTestFixtures(server);
     await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
 
     _("Confirm initial environment.");
     do_check_false(steam_coll.deleted);
     do_check_false(diesel_coll.deleted);
 
     _("wipeServer() will happily ignore the non-existent collection and use the timestamp of the last DELETE that was successful.");
-    let timestamp = Service.wipeServer(["steam", "diesel", "petrol"]);
+    let timestamp = await Service.wipeServer(["steam", "diesel", "petrol"]);
     do_check_eq(timestamp, diesel_coll.timestamp);
 
     _("wipeServer stopped deleting after encountering an error with the 'petrol' collection, thus only 'steam' has been deleted.");
     do_check_true(steam_coll.deleted);
     do_check_true(diesel_coll.deleted);
 
   } finally {
     await promiseStopServer(server);
@@ -95,17 +95,17 @@ add_task(async function test_wipeServer_
 
     _("Confirm initial environment.");
     do_check_false(steam_coll.deleted);
     do_check_false(diesel_coll.deleted);
 
     _("wipeServer() will happily ignore the non-existent collection, delete the 'steam' collection and abort after an receiving an error on the 'petrol' collection.");
     let error;
     try {
-      Service.wipeServer(["non-existent", "steam", "petrol", "diesel"]);
+      await Service.wipeServer(["non-existent", "steam", "petrol", "diesel"]);
       do_throw("Should have thrown!");
     } catch (ex) {
       error = ex;
     }
     _("wipeServer() threw this exception: " + error);
     do_check_eq(error.status, 503);
 
     _("wipeServer stopped deleting after encountering an error with the 'petrol' collection, thus only 'steam' has been deleted.");
@@ -135,17 +135,17 @@ add_task(async function test_wipeServer_
 
   let server = httpd_setup({
     "/1.1/johndoe/storage": storageHandler
   });
   await setUpTestFixtures(server);
 
   _("Try deletion.");
   await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
-  let returnedTimestamp = Service.wipeServer();
+  let returnedTimestamp = await Service.wipeServer();
   do_check_true(deleted);
   do_check_eq(returnedTimestamp, serverTimestamp);
 
   await promiseStopServer(server);
   Svc.Prefs.resetBranch("");
 });
 
 add_task(async function test_wipeServer_all_404() {
@@ -167,17 +167,17 @@ add_task(async function test_wipeServer_
 
   let server = httpd_setup({
     "/1.1/johndoe/storage": storageHandler
   });
   await setUpTestFixtures(server);
 
   _("Try deletion.");
   await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
-  let returnedTimestamp = Service.wipeServer();
+  let returnedTimestamp = await Service.wipeServer();
   do_check_true(deleted);
   do_check_eq(returnedTimestamp, serverTimestamp);
 
   await promiseStopServer(server);
   Svc.Prefs.resetBranch("");
 });
 
 add_task(async function test_wipeServer_all_503() {
@@ -196,17 +196,17 @@ add_task(async function test_wipeServer_
     "/1.1/johndoe/storage": storageHandler
   });
   await setUpTestFixtures(server);
 
   _("Try deletion.");
   let error;
   try {
     await SyncTestingInfrastructure(server, "johndoe", "irrelevant");
-    Service.wipeServer();
+    await Service.wipeServer();
     do_throw("Should have thrown!");
   } catch (ex) {
     error = ex;
   }
   do_check_eq(error.status, 503);
 
   await promiseStopServer(server);
   Svc.Prefs.resetBranch("");
@@ -216,17 +216,17 @@ add_task(async function test_wipeServer_
   _("Service.wipeServer() throws if it encounters a network problem.");
   let server = httpd_setup({});
   await setUpTestFixtures(server);
 
   Service.clusterURL = "http://localhost:4352/";
 
   _("Try deletion.");
   try {
-    Service.wipeServer();
+    await Service.wipeServer();
     do_throw("Should have thrown!");
   } catch (ex) {
     do_check_eq(ex.result, Cr.NS_ERROR_CONNECTION_REFUSED);
   }
 
   Svc.Prefs.resetBranch("");
   await promiseStopServer(server);
 });
--- a/services/sync/tests/unit/test_syncedtabs.js
+++ b/services/sync/tests/unit/test_syncedtabs.js
@@ -26,16 +26,18 @@ MockTabsEngine.prototype = {
     return this.clients;
   },
 
   getOpenURLs() {
     return new Set();
   },
 }
 
+let tabsEngine;
+
 // A clients engine that doesn't need to be a constructor.
 let MockClientsEngine = {
   clientSettings: null, // Set in `configureClients`.
 
   isMobile(guid) {
     if (!guid.endsWith("desktop") && !guid.endsWith("mobile")) {
       throw new Error("this module expected guids to end with 'desktop' or 'mobile'");
     }
@@ -43,46 +45,47 @@ let MockClientsEngine = {
   },
   remoteClientExists(id) {
     return this.clientSettings[id] !== false;
   },
   getClientName(id) {
     if (this.clientSettings[id]) {
       return this.clientSettings[id];
     }
-    let engine = Weave.Service.engineManager.get("tabs");
-    return engine.clients[id].clientName;
+    return tabsEngine.clients[id].clientName;
   },
 }
 
-// Configure Sync with our mock tabs engine and force it to become initialized.
-Weave.Service.engineManager.unregister("tabs");
-Weave.Service.engineManager.register(MockTabsEngine);
-Weave.Service.clientsEngine = MockClientsEngine;
-
-// Tell the Sync XPCOM service it is initialized.
-let weaveXPCService = Cc["@mozilla.org/weave/service;1"]
-                        .getService(Ci.nsISupports)
-                        .wrappedJSObject;
-weaveXPCService.ready = true;
-
 function configureClients(clients, clientSettings = {}) {
-  // Configure the instance Sync created.
-  let engine = Weave.Service.engineManager.get("tabs");
   // each client record is expected to have an id.
   for (let [guid, client] of Object.entries(clients)) {
     client.id = guid;
   }
-  engine.clients = clients;
+  tabsEngine.clients = clients;
   // Apply clients collection overrides.
   MockClientsEngine.clientSettings = clientSettings;
   // Send an observer that pretends the engine just finished a sync.
   Services.obs.notifyObservers(null, "weave:engine:sync:finish", "tabs");
 }
 
+add_task(async function setup() {
+  await Weave.Service.promiseInitialized;
+  // Configure Sync with our mock tabs engine and force it to become initialized.
+  Weave.Service.engineManager.unregister("tabs");
+  await Weave.Service.engineManager.register(MockTabsEngine);
+  Weave.Service.clientsEngine = MockClientsEngine;
+  tabsEngine = Weave.Service.engineManager.get("tabs");
+
+  // Tell the Sync XPCOM service it is initialized.
+  let weaveXPCService = Cc["@mozilla.org/weave/service;1"]
+                          .getService(Ci.nsISupports)
+                          .wrappedJSObject;
+  weaveXPCService.ready = true;
+});
+
 // The tests.
 add_task(async function test_noClients() {
   // no clients, can't be tabs.
   await configureClients({});
 
   let tabs = await SyncedTabs.getTabClients();
   equal(Object.keys(tabs).length, 0);
 });
--- a/services/sync/tests/unit/test_syncengine.js
+++ b/services/sync/tests/unit/test_syncengine.js
@@ -1,41 +1,46 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-function makeSteamEngine() {
-  return new SyncEngine("Steam", Service);
+async function makeSteamEngine() {
+  let engine = new SyncEngine("Steam", Service);
+  await engine.initialize();
+  return engine;
 }
 
-var server = httpd_setup({});
+let server;
 
+add_task(async function setup() {
+  server = httpd_setup({});
+});
 
 add_task(async function test_url_attributes() {
   _("SyncEngine url attributes");
   await SyncTestingInfrastructure(server);
   Service.clusterURL = "https://cluster/1.1/foo/";
-  let engine = makeSteamEngine();
+  let engine = await makeSteamEngine();
   try {
     do_check_eq(engine.storageURL, "https://cluster/1.1/foo/storage/");
     do_check_eq(engine.engineURL, "https://cluster/1.1/foo/storage/steam");
     do_check_eq(engine.metaURL, "https://cluster/1.1/foo/storage/meta/global");
   } finally {
     Svc.Prefs.resetBranch("");
   }
 });
 
 add_task(async function test_syncID() {
   _("SyncEngine.syncID corresponds to preference");
   await SyncTestingInfrastructure(server);
-  let engine = makeSteamEngine();
+  let engine = await makeSteamEngine();
   try {
     // Ensure pristine environment
     do_check_eq(Svc.Prefs.get("steam.syncID"), undefined);
 
     // Performing the first get on the attribute will generate a new GUID.
     do_check_eq(engine.syncID, "fake-guid-00");
     do_check_eq(Svc.Prefs.get("steam.syncID"), "fake-guid-00");
 
@@ -45,17 +50,17 @@ add_task(async function test_syncID() {
   } finally {
     Svc.Prefs.resetBranch("");
   }
 })
 
 add_task(async function test_lastSync() {
   _("SyncEngine.lastSync and SyncEngine.lastSyncLocal correspond to preferences");
   await SyncTestingInfrastructure(server);
-  let engine = makeSteamEngine();
+  let engine = await makeSteamEngine();
   try {
     // Ensure pristine environment
     do_check_eq(Svc.Prefs.get("steam.lastSync"), undefined);
     do_check_eq(engine.lastSync, 0);
     do_check_eq(Svc.Prefs.get("steam.lastSyncLocal"), undefined);
     do_check_eq(engine.lastSyncLocal, 0);
 
     // Floats are properly stored as floats and synced with the preference
@@ -76,116 +81,116 @@ add_task(async function test_lastSync() 
     Svc.Prefs.resetBranch("");
   }
 })
 
 add_task(async function test_toFetch() {
   _("SyncEngine.toFetch corresponds to file on disk");
   let syncTesting = await SyncTestingInfrastructure(server);
   const filename = "weave/toFetch/steam.json";
-  let engine = makeSteamEngine();
+  let engine = await makeSteamEngine();
   try {
     // Ensure pristine environment
     do_check_eq(engine.toFetch.length, 0);
 
     // Write file to disk
     let toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
     engine.toFetch = toFetch;
     do_check_eq(engine.toFetch, toFetch);
     // toFetch is written asynchronously
-    engine._store._sleep(0);
+    await Async.promiseYield();
     let fakefile = syncTesting.fakeFilesystem.fakeContents[filename];
     do_check_eq(fakefile, JSON.stringify(toFetch));
 
     // Read file from disk
     toFetch = [Utils.makeGUID(), Utils.makeGUID()];
     syncTesting.fakeFilesystem.fakeContents[filename] = JSON.stringify(toFetch);
-    engine.loadToFetch();
+    await engine.loadToFetch();
     do_check_eq(engine.toFetch.length, 2);
     do_check_eq(engine.toFetch[0], toFetch[0]);
     do_check_eq(engine.toFetch[1], toFetch[1]);
   } finally {
     Svc.Prefs.resetBranch("");
   }
 });
 
 add_task(async function test_previousFailed() {
   _("SyncEngine.previousFailed corresponds to file on disk");
   let syncTesting = await SyncTestingInfrastructure(server);
   const filename = "weave/failed/steam.json";
-  let engine = makeSteamEngine();
+  let engine = await makeSteamEngine();
   try {
     // Ensure pristine environment
     do_check_eq(engine.previousFailed.length, 0);
 
     // Write file to disk
     let previousFailed = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
     engine.previousFailed = previousFailed;
     do_check_eq(engine.previousFailed, previousFailed);
     // previousFailed is written asynchronously
-    engine._store._sleep(0);
+    await Async.promiseYield();
     let fakefile = syncTesting.fakeFilesystem.fakeContents[filename];
     do_check_eq(fakefile, JSON.stringify(previousFailed));
 
     // Read file from disk
     previousFailed = [Utils.makeGUID(), Utils.makeGUID()];
     syncTesting.fakeFilesystem.fakeContents[filename] = JSON.stringify(previousFailed);
-    engine.loadPreviousFailed();
+    await engine.loadPreviousFailed();
     do_check_eq(engine.previousFailed.length, 2);
     do_check_eq(engine.previousFailed[0], previousFailed[0]);
     do_check_eq(engine.previousFailed[1], previousFailed[1]);
   } finally {
     Svc.Prefs.resetBranch("");
   }
 });
 
 add_task(async function test_resetClient() {
   _("SyncEngine.resetClient resets lastSync and toFetch");
   await SyncTestingInfrastructure(server);
-  let engine = makeSteamEngine();
+  let engine = await makeSteamEngine();
   try {
     // Ensure pristine environment
     do_check_eq(Svc.Prefs.get("steam.lastSync"), undefined);
     do_check_eq(Svc.Prefs.get("steam.lastSyncLocal"), undefined);
     do_check_eq(engine.toFetch.length, 0);
 
     engine.lastSync = 123.45;
     engine.lastSyncLocal = 67890;
     engine.toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
     engine.previousFailed = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
 
-    engine.resetClient();
+    await engine.resetClient();
     do_check_eq(engine.lastSync, 0);
     do_check_eq(engine.lastSyncLocal, 0);
     do_check_eq(engine.toFetch.length, 0);
     do_check_eq(engine.previousFailed.length, 0);
   } finally {
     Svc.Prefs.resetBranch("");
   }
 });
 
 add_task(async function test_wipeServer() {
   _("SyncEngine.wipeServer deletes server data and resets the client.");
-  let engine = makeSteamEngine();
+  let engine = await makeSteamEngine();
 
   const PAYLOAD = 42;
   let steamCollection = new ServerWBO("steam", PAYLOAD);
   let steamServer = httpd_setup({
     "/1.1/foo/storage/steam": steamCollection.handler()
   });
   await SyncTestingInfrastructure(steamServer);
   do_test_pending();
 
   try {
     // Some data to reset.
     engine.lastSync = 123.45;
     engine.toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
 
     _("Wipe server data and reset client.");
-    engine.wipeServer();
+    await engine.wipeServer();
     do_check_eq(steamCollection.payload, undefined);
     do_check_eq(engine.lastSync, 0);
     do_check_eq(engine.toFetch.length, 0);
 
   } finally {
     steamServer.stop(do_test_finished);
     Svc.Prefs.resetBranch("");
   }
--- a/services/sync/tests/unit/test_syncengine_sync.js
+++ b/services/sync/tests/unit/test_syncengine_sync.js
@@ -105,17 +105,17 @@ add_task(async function test_syncStartup
     do_check_true(!!collection.payload("flying"));
     do_check_true(!!collection.payload("scotsman"));
 
     engine.lastSync = Date.now() / 1000;
     engine.lastSyncLocal = Date.now();
 
     // Trying to prompt a wipe -- we no longer track CryptoMeta per engine,
     // so it has nothing to check.
-    engine._syncStartup();
+    await engine._syncStartup();
 
     // The meta/global WBO has been filled with data about the engine
     let engineData = metaGlobal.payload.engines["rotary"];
     do_check_eq(engineData.version, engine.version);
     do_check_eq(engineData.syncID, engine.syncID);
 
     // Sync was reset and server data was wiped
     do_check_eq(engine.lastSync, 0);
@@ -139,17 +139,17 @@ add_task(async function test_syncStartup
 
   let engine = makeRotaryEngine();
   try {
 
     // The server has a newer version of the data and our engine can
     // handle.  That should give us an exception.
     let error;
     try {
-      engine._syncStartup();
+      await engine._syncStartup();
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.failureCode, VERSION_OUT_OF_DATE);
 
   } finally {
     await cleanAndGo(engine, server);
   }
@@ -173,17 +173,17 @@ add_task(async function test_syncStartup
   try {
 
     // Confirm initial environment
     do_check_eq(engine.syncID, "fake-guid-00");
     do_check_eq(engine._tracker.changedIDs["rekolok"], undefined);
 
     engine.lastSync = Date.now() / 1000;
     engine.lastSyncLocal = Date.now();
-    engine._syncStartup();
+    await engine._syncStartup();
 
     // The engine has assumed the server's syncID
     do_check_eq(engine.syncID, "foobar");
 
     // Sync was reset
     do_check_eq(engine.lastSync, 0);
 
   } finally {
@@ -201,17 +201,17 @@ add_task(async function test_processInco
   });
 
   await SyncTestingInfrastructure(server);
 
   let engine = makeRotaryEngine();
   try {
 
     // Merely ensure that this code path is run without any errors
-    engine._processIncoming();
+    await engine._processIncoming();
     do_check_eq(engine.lastSync, 0);
 
   } finally {
     await cleanAndGo(engine, server);
   }
 });
 
 
@@ -252,18 +252,18 @@ add_task(async function test_processInco
 
     // Confirm initial environment
     do_check_eq(engine.lastSync, 0);
     do_check_eq(engine.lastModified, null);
     do_check_eq(engine._store.items.flying, undefined);
     do_check_eq(engine._store.items.scotsman, undefined);
     do_check_eq(engine._store.items["../pathological"], undefined);
 
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     // Timestamps of last sync and last server modification are set.
     do_check_true(engine.lastSync > 0);
     do_check_true(engine.lastModified > 0);
 
     // Local records have been created from the server data.
     do_check_eq(engine._store.items.flying, "LNER Class A3 4472");
     do_check_eq(engine._store.items.scotsman, "Flying Scotsman");
@@ -346,18 +346,18 @@ add_task(async function test_processInco
     // Confirm initial environment
     do_check_eq(engine._store.items.newrecord, undefined);
     do_check_eq(engine._store.items.newerserver, "New data, but not as new as server!");
     do_check_eq(engine._store.items.olderidentical, "Older but identical");
     do_check_eq(engine._store.items.updateclient, "Got data?");
     do_check_eq(engine._store.items.nukeme, "Nuke me!");
     do_check_true(engine._tracker.changedIDs["olderidentical"] > 0);
 
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     // Timestamps of last sync and last server modification are set.
     do_check_true(engine.lastSync > 0);
     do_check_true(engine.lastModified > 0);
 
     // The new record is created.
     do_check_eq(engine._store.items.newrecord, "New stuff...");
 
@@ -398,21 +398,21 @@ add_task(async function test_processInco
   let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
   let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
   server.insertWBO(user, "rotary", wbo);
 
   record = encryptPayload({id: "DUPE_LOCAL", denomination: "local"});
   wbo = new ServerWBO("DUPE_LOCAL", record, now - 1);
   server.insertWBO(user, "rotary", wbo);
 
-  engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
-  do_check_true(engine._store.itemExists("DUPE_LOCAL"));
-  do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+  await engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
+  do_check_true((await engine._store.itemExists("DUPE_LOCAL")));
+  do_check_eq("DUPE_LOCAL", (await engine._findDupe({id: "DUPE_INCOMING"})));
 
-  engine._sync();
+  await engine._sync();
 
   do_check_attribute_count(engine._store.items, 1);
   do_check_true("DUPE_INCOMING" in engine._store.items);
 
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
   do_check_neq(undefined, collection.wbo("DUPE_INCOMING"));
 
@@ -428,19 +428,19 @@ add_task(async function test_processInco
   engine.lastSync = now;
   engine.lastModified = now + 1;
 
   let record = encryptPayload({id: "entry", denomination: "denomination"});
   let wbo = new ServerWBO("entry", record, now + 2);
   server.insertWBO(user, "rotary", wbo);
 
   engine._store.items = {entry: "denomination"};
-  do_check_true(engine._store.itemExists("entry"));
+  do_check_true((await engine._store.itemExists("entry")));
 
-  engine._sync();
+  await engine._sync();
 
   do_check_attribute_count(engine._store.items, 1);
 
   await cleanAndGo(engine, server);
 });
 
 add_task(async function test_processIncoming_reconcile_locally_deleted_dupe_new() {
   _("Ensure locally deleted duplicate record newer than incoming is handled.");
@@ -457,21 +457,21 @@ add_task(async function test_processInco
 
   let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
   let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
   server.insertWBO(user, "rotary", wbo);
 
   // Simulate a locally-deleted item.
   engine._store.items = {};
   engine._tracker.addChangedID("DUPE_LOCAL", now + 3);
-  do_check_false(engine._store.itemExists("DUPE_LOCAL"));
-  do_check_false(engine._store.itemExists("DUPE_INCOMING"));
-  do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+  do_check_false((await engine._store.itemExists("DUPE_LOCAL")));
+  do_check_false((await engine._store.itemExists("DUPE_INCOMING")));
+  do_check_eq("DUPE_LOCAL", (await engine._findDupe({id: "DUPE_INCOMING"})));
 
-  engine._sync();
+  await engine._sync();
 
   // After the sync, the server's payload for the original ID should be marked
   // as deleted.
   do_check_empty(engine._store.items);
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
   wbo = collection.wbo("DUPE_INCOMING");
   do_check_neq(null, wbo);
@@ -495,21 +495,21 @@ add_task(async function test_processInco
 
   let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
   let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
   server.insertWBO(user, "rotary", wbo);
 
   // Simulate a locally-deleted item.
   engine._store.items = {};
   engine._tracker.addChangedID("DUPE_LOCAL", now + 1);
-  do_check_false(engine._store.itemExists("DUPE_LOCAL"));
-  do_check_false(engine._store.itemExists("DUPE_INCOMING"));
-  do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+  do_check_false((await engine._store.itemExists("DUPE_LOCAL")));
+  do_check_false((await engine._store.itemExists("DUPE_INCOMING")));
+  do_check_eq("DUPE_LOCAL", (await engine._findDupe({id: "DUPE_INCOMING"})));
 
-  engine._sync();
+  await engine._sync();
 
   // Since the remote change is newer, the incoming item should exist locally.
   do_check_attribute_count(engine._store.items, 1);
   do_check_true("DUPE_INCOMING" in engine._store.items);
   do_check_eq("incoming", engine._store.items.DUPE_INCOMING);
 
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
@@ -529,22 +529,22 @@ add_task(async function test_processInco
   engine.lastSync = now;
   engine.lastModified = now + 1;
 
   // The local record is newer than the incoming one, so it should be retained.
   let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
   let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
   server.insertWBO(user, "rotary", wbo);
 
-  engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
+  await engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
   engine._tracker.addChangedID("DUPE_LOCAL", now + 3);
-  do_check_true(engine._store.itemExists("DUPE_LOCAL"));
-  do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+  do_check_true((await engine._store.itemExists("DUPE_LOCAL")));
+  do_check_eq("DUPE_LOCAL", (await engine._findDupe({id: "DUPE_INCOMING"})));
 
-  engine._sync();
+  await engine._sync();
 
   // The ID should have been changed to incoming.
   do_check_attribute_count(engine._store.items, 1);
   do_check_true("DUPE_INCOMING" in engine._store.items);
 
   // On the server, the local ID should be deleted and the incoming ID should
   // have its payload set to what was in the local record.
   let collection = server.getCollection(user, "rotary");
@@ -567,39 +567,38 @@ add_task(async function test_processInco
   let now = Date.now() / 1000 - 10;
   engine.lastSync = now;
   engine.lastModified = now + 1;
 
   let record = encryptPayload({id: "DUPE_INCOMING", denomination: "incoming"});
   let wbo = new ServerWBO("DUPE_INCOMING", record, now + 2);
   server.insertWBO(user, "rotary", wbo);
 
-  engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
+  await engine._store.create({id: "DUPE_LOCAL", denomination: "local"});
   engine._tracker.addChangedID("DUPE_LOCAL", now + 1);
-  do_check_true(engine._store.itemExists("DUPE_LOCAL"));
-  do_check_eq("DUPE_LOCAL", engine._findDupe({id: "DUPE_INCOMING"}));
+  do_check_true((await engine._store.itemExists("DUPE_LOCAL")));
+  do_check_eq("DUPE_LOCAL", (await engine._findDupe({id: "DUPE_INCOMING"})));
 
-  engine._sync();
+  await engine._sync();
 
   // The ID should have been changed to incoming.
   do_check_attribute_count(engine._store.items, 1);
   do_check_true("DUPE_INCOMING" in engine._store.items);
 
   // On the server, the local ID should be deleted and the incoming ID should
   // have its payload retained.
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
   wbo = collection.wbo("DUPE_INCOMING");
   do_check_neq(undefined, wbo);
   let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
   do_check_eq("incoming", payload.denomination);
   await cleanAndGo(engine, server);
 });
 
-
 add_task(async function test_processIncoming_resume_toFetch() {
   _("toFetch and previousFailed items left over from previous syncs are fetched on the next sync, along with new items.");
 
   const LASTSYNC = Date.now() / 1000;
 
   // Server records that will be downloaded
   let collection = new ServerCollection();
   collection.insert("flying",
@@ -641,18 +640,18 @@ add_task(async function test_processInco
                                          syncID: engine.syncID}};
   try {
 
     // Confirm initial environment
     do_check_eq(engine._store.items.flying, undefined);
     do_check_eq(engine._store.items.scotsman, undefined);
     do_check_eq(engine._store.items.rekolok, undefined);
 
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     // Local records have been created from the server data.
     do_check_eq(engine._store.items.flying, "LNER Class A3 4472");
     do_check_eq(engine._store.items.scotsman, "Flying Scotsman");
     do_check_eq(engine._store.items.rekolok, "Rekonstruktionslokomotive");
     do_check_eq(engine._store.items.failed0, "Record No. 0");
     do_check_eq(engine._store.items.failed1, "Record No. 1");
     do_check_eq(engine._store.items.failed2, "Record No. 2");
@@ -666,20 +665,20 @@ add_task(async function test_processInco
 add_task(async function test_processIncoming_applyIncomingBatchSize_smaller() {
   _("Ensure that a number of incoming items less than applyIncomingBatchSize is still applied.");
 
   // Engine that doesn't like the first and last record it's given.
   const APPLY_BATCH_SIZE = 10;
   let engine = makeRotaryEngine();
   engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
   engine._store._applyIncomingBatch = engine._store.applyIncomingBatch;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     let failed1 = records.shift();
     let failed2 = records.pop();
-    this._applyIncomingBatch(records);
+    await this._applyIncomingBatch(records);
     return [failed1.id, failed2.id];
   };
 
   // Let's create less than a batch worth of server side records.
   let collection = new ServerCollection();
   for (let i = 0; i < APPLY_BATCH_SIZE - 1; i++) {
     let id = "record-no-" + i;
     let payload = encryptPayload({id, denomination: "Record No. " + id});
@@ -696,18 +695,18 @@ add_task(async function test_processInco
                                               new WBORecord(engine.metaURL));
   meta_global.payload.engines = {rotary: {version: engine.version,
                                          syncID: engine.syncID}};
   try {
 
     // Confirm initial environment
     do_check_empty(engine._store.items);
 
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     // Records have been applied and the expected failures have failed.
     do_check_attribute_count(engine._store.items, APPLY_BATCH_SIZE - 1 - 2);
     do_check_eq(engine.toFetch.length, 0);
     do_check_eq(engine.previousFailed.length, 2);
     do_check_eq(engine.previousFailed[0], "record-no-0");
     do_check_eq(engine.previousFailed[1], "record-no-8");
 
@@ -722,20 +721,20 @@ add_task(async function test_processInco
 
   const APPLY_BATCH_SIZE = 10;
 
   // Engine that applies records in batches.
   let engine = makeRotaryEngine();
   engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
   let batchCalls = 0;
   engine._store._applyIncomingBatch = engine._store.applyIncomingBatch;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     batchCalls += 1;
     do_check_eq(records.length, APPLY_BATCH_SIZE);
-    this._applyIncomingBatch.apply(this, arguments);
+    await this._applyIncomingBatch.apply(this, arguments);
   };
 
   // Let's create three batches worth of server side records.
   let collection = new ServerCollection();
   for (let i = 0; i < APPLY_BATCH_SIZE * 3; i++) {
     let id = "record-no-" + i;
     let payload = encryptPayload({id, denomination: "Record No. " + id});
     collection.insert(id, payload);
@@ -751,18 +750,18 @@ add_task(async function test_processInco
                                               new WBORecord(engine.metaURL));
   meta_global.payload.engines = {rotary: {version: engine.version,
                                          syncID: engine.syncID}};
   try {
 
     // Confirm initial environment
     do_check_empty(engine._store.items);
 
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     // Records have been applied in 3 batches.
     do_check_eq(batchCalls, 3);
     do_check_attribute_count(engine._store.items, APPLY_BATCH_SIZE * 3);
 
   } finally {
     await cleanAndGo(engine, server);
   }
@@ -774,18 +773,18 @@ add_task(async function test_processInco
 
   const APPLY_BATCH_SIZE = 5;
   const NUMBER_OF_RECORDS = 15;
 
   // Engine that fails the first record.
   let engine = makeRotaryEngine();
   engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
   engine._store._applyIncomingBatch = engine._store.applyIncomingBatch;
-  engine._store.applyIncomingBatch = function(records) {
-    engine._store._applyIncomingBatch(records.slice(1));
+  engine._store.applyIncomingBatch = async function(records) {
+    await engine._store._applyIncomingBatch(records.slice(1));
     return [records[0].id];
   };
 
   // Create a batch of server side records.
   let collection = new ServerCollection();
   for (var i = 0; i < NUMBER_OF_RECORDS; i++) {
     let id = "record-no-" + i;
     let payload = encryptPayload({id, denomination: "Record No. " + id});
@@ -814,35 +813,35 @@ add_task(async function test_processInco
     function onApplied(count) {
       _("Called with " + JSON.stringify(counts));
       counts = count;
       called++;
     }
     Svc.Obs.add("weave:engine:sync:applied", onApplied);
 
     // Do sync.
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     // Confirm failures.
     do_check_attribute_count(engine._store.items, 12);
     do_check_eq(engine.previousFailed.length, 3);
     do_check_eq(engine.previousFailed[0], "record-no-0");
     do_check_eq(engine.previousFailed[1], "record-no-5");
     do_check_eq(engine.previousFailed[2], "record-no-10");
 
     // There are newly failed records and they are reported.
     do_check_eq(called, 1);
     do_check_eq(counts.failed, 3);
     do_check_eq(counts.applied, 15);
     do_check_eq(counts.newFailed, 3);
     do_check_eq(counts.succeeded, 12);
 
     // Sync again, 1 of the failed items are the same, the rest didn't fail.
-    engine._processIncoming();
+    await engine._processIncoming();
 
     // Confirming removed failures.
     do_check_attribute_count(engine._store.items, 14);
     do_check_eq(engine.previousFailed.length, 1);
     do_check_eq(engine.previousFailed[0], "record-no-0");
 
     do_check_eq(called, 2);
     do_check_eq(counts.failed, 1);
@@ -863,18 +862,18 @@ add_task(async function test_processInco
 
   const APPLY_BATCH_SIZE = 4;
   const NUMBER_OF_RECORDS = 14;
 
   // Engine that fails the first 2 records.
   let engine = makeRotaryEngine();
   engine.mobileGUIDFetchBatchSize = engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
   engine._store._applyIncomingBatch = engine._store.applyIncomingBatch;
-  engine._store.applyIncomingBatch = function(records) {
-    engine._store._applyIncomingBatch(records.slice(2));
+  engine._store.applyIncomingBatch = async function(records) {
+    await engine._store._applyIncomingBatch(records.slice(2));
     return [records[0].id, records[1].id];
   };
 
   // Create a batch of server side records.
   let collection = new ServerCollection();
   for (var i = 0; i < NUMBER_OF_RECORDS; i++) {
     let id = "record-no-" + i;
     let payload = encryptPayload({id, denomination: "Record No. " + i});
@@ -899,33 +898,33 @@ add_task(async function test_processInco
     do_check_empty(engine._store.items);
 
     // Initial failed items in previousFailed to be reset.
     let previousFailed = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
     engine.previousFailed = previousFailed;
     do_check_eq(engine.previousFailed, previousFailed);
 
     // Do sync.
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     // Expected result: 4 sync batches with 2 failures each => 8 failures
     do_check_attribute_count(engine._store.items, 6);
     do_check_eq(engine.previousFailed.length, 8);
     do_check_eq(engine.previousFailed[0], "record-no-0");
     do_check_eq(engine.previousFailed[1], "record-no-1");
     do_check_eq(engine.previousFailed[2], "record-no-4");
     do_check_eq(engine.previousFailed[3], "record-no-5");
     do_check_eq(engine.previousFailed[4], "record-no-8");
     do_check_eq(engine.previousFailed[5], "record-no-9");
     do_check_eq(engine.previousFailed[6], "record-no-12");
     do_check_eq(engine.previousFailed[7], "record-no-13");
 
     // Sync again with the same failed items (records 0, 1, 8, 9).
-    engine._processIncoming();
+    await engine._processIncoming();
 
     // A second sync with the same failed items should not add the same items again.
     // Items that did not fail a second time should no longer be in previousFailed.
     do_check_attribute_count(engine._store.items, 10);
     do_check_eq(engine.previousFailed.length, 4);
     do_check_eq(engine.previousFailed[0], "record-no-0");
     do_check_eq(engine.previousFailed[1], "record-no-1");
     do_check_eq(engine.previousFailed[2], "record-no-8");
@@ -967,24 +966,24 @@ add_task(async function test_processInco
                          "record-no-" + (42 + APPLY_BATCH_SIZE * 2),
                          "record-no-" + (23 + APPLY_BATCH_SIZE * 2),
                          "record-no-" + (2 + APPLY_BATCH_SIZE * 3),
                          "record-no-" + (1 + APPLY_BATCH_SIZE * 3)];
   let engine = makeRotaryEngine();
   engine.applyIncomingBatchSize = APPLY_BATCH_SIZE;
 
   engine.__reconcile = engine._reconcile;
-  engine._reconcile = function _reconcile(record) {
+  engine._reconcile = async function _reconcile(record) {
     if (BOGUS_RECORDS.indexOf(record.id) % 2 == 0) {
       throw "I don't like this record! Baaaaaah!";
     }
     return this.__reconcile.apply(this, arguments);
   };
   engine._store._applyIncoming = engine._store.applyIncoming;
-  engine._store.applyIncoming = function(record) {
+  engine._store.applyIncoming = async function(record) {
     if (BOGUS_RECORDS.indexOf(record.id) % 2 == 1) {
       throw "I don't like this record! Baaaaaah!";
     }
     return this._applyIncoming.apply(this, arguments);
   };
 
   // Keep track of requests made of a collection.
   let count = 0;
@@ -1019,18 +1018,18 @@ add_task(async function test_processInco
     let observerSubject;
     let observerData;
     Svc.Obs.add("weave:engine:sync:applied", function onApplied(subject, data) {
       Svc.Obs.remove("weave:engine:sync:applied", onApplied);
       observerSubject = subject;
       observerData = data;
     });
 
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     // Ensure that all records but the bogus 4 have been applied.
     do_check_attribute_count(engine._store.items,
                              NUMBER_OF_RECORDS - BOGUS_RECORDS.length);
 
     // Ensure that the bogus records will be fetched again on the next sync.
     do_check_eq(engine.previousFailed.length, BOGUS_RECORDS.length);
     engine.previousFailed.sort();
@@ -1042,33 +1041,32 @@ add_task(async function test_processInco
     // Ensure the observer was notified
     do_check_eq(observerData, engine.name);
     do_check_eq(observerSubject.failed, BOGUS_RECORDS.length);
     do_check_eq(observerSubject.newFailed, BOGUS_RECORDS.length);
 
     // Testing batching of failed item fetches.
     // Try to sync again. Ensure that we split the request into chunks to avoid
     // URI length limitations.
-    function batchDownload(batchSize) {
+    async function batchDownload(batchSize) {
       count = 0;
       uris  = [];
       engine.guidFetchBatchSize = batchSize;
-      engine._processIncoming();
+      await engine._processIncoming();
       _("Tried again. Requests: " + count + "; URIs: " + JSON.stringify(uris));
       return count;
     }
 
     // There are 8 bad records, so this needs 3 fetches.
     _("Test batching with ID batch size 3, normal mobile batch size.");
-    do_check_eq(batchDownload(3), 3);
+    do_check_eq((await batchDownload(3)), 3);
 
     // Now see with a more realistic limit.
     _("Test batching with sufficient ID batch size.");
-    do_check_eq(batchDownload(BOGUS_RECORDS.length), 1);
-
+    do_check_eq((await batchDownload(BOGUS_RECORDS.length)), 1);
   } finally {
     await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_decrypt_failed() {
   _("Ensure that records failing to decrypt are either replaced or refetched.");
@@ -1179,18 +1177,18 @@ add_task(async function test_uploadOutgo
 
   try {
 
     // Confirm initial environment
     do_check_eq(engine.lastSyncLocal, 0);
     do_check_eq(collection.payload("flying"), undefined);
     do_check_eq(collection.payload("scotsman"), undefined);
 
-    engine._syncStartup();
-    engine._uploadOutgoing();
+    await engine._syncStartup();
+    await engine._uploadOutgoing();
 
     // Local timestamp has been set.
     do_check_true(engine.lastSyncLocal > 0);
 
     // Ensure the marked record ('scotsman') has been uploaded and is
     // no longer marked.
     do_check_eq(collection.payload("flying"), undefined);
     do_check_true(!!collection.payload("scotsman"));
@@ -1235,36 +1233,36 @@ async function test_uploadOutgoing_max_r
                                          syncID: engine.syncID}};
 
   try {
     // Confirm initial environment
     do_check_eq(engine.lastSyncLocal, 0);
     do_check_eq(collection.payload("flying"), undefined);
     do_check_eq(collection.payload("scotsman"), undefined);
 
-    engine._syncStartup();
-    engine._uploadOutgoing();
+    await engine._syncStartup();
+    await engine._uploadOutgoing();
 
     if (!allowSkippedRecord) {
       do_throw("should not get here");
     }
 
-    engine.trackRemainingChanges();
+    await engine.trackRemainingChanges();
 
     // Check we uploaded the other record to the server
     do_check_true(collection.payload("scotsman"));
     // And that we won't try to upload the huge record next time.
     do_check_eq(engine._tracker.changedIDs["flying"], undefined);
 
   } catch (e) {
     if (allowSkippedRecord) {
       do_throw("should not get here");
     }
 
-    engine.trackRemainingChanges();
+    await engine.trackRemainingChanges();
 
     // Check that we will try to upload the huge record next time
     do_check_eq(engine._tracker.changedIDs["flying"], 1000);
   } finally {
     // Check we didn't upload the oversized record to the server
     do_check_eq(collection.payload("flying"), undefined);
     await cleanAndGo(engine, server);
   }
@@ -1388,18 +1386,18 @@ add_task(async function test_uploadOutgo
   });
 
   await SyncTestingInfrastructure(server);
   try {
 
     // Confirm initial environment.
     do_check_eq(noOfUploads, 0);
 
-    engine._syncStartup();
-    engine._uploadOutgoing();
+    await engine._syncStartup();
+    await engine._uploadOutgoing();
 
     // Ensure all records have been uploaded.
     for (i = 0; i < 234; i++) {
       do_check_true(!!collection.payload("record-no-" + i));
     }
 
     // Ensure that the uploads were performed in batches of MAX_UPLOAD_RECORDS.
     do_check_eq(noOfUploads, Math.ceil(234 / MAX_UPLOAD_RECORDS));
@@ -1419,17 +1417,17 @@ async function createRecordFailTelemetry
       "/1.1/foo/storage/rotary": collection.handler()
   });
 
   await SyncTestingInfrastructure(server);
 
   let engine = makeRotaryEngine();
   engine.allowSkippedRecord = allowSkippedRecord;
   let oldCreateRecord = engine._store.createRecord;
-  engine._store.createRecord = (id, col) => {
+  engine._store.createRecord = async (id, col) => {
     if (id != "flying") {
       throw new Error("oops");
     }
     return oldCreateRecord.call(engine._store, id, col);
   }
   engine.lastSync = 123; // needs to be non-zero so that tracker is queried
   engine._store.items = {flying: "LNER Class A3 4472",
                          scotsman: "Flying Scotsman"};
@@ -1518,20 +1516,20 @@ add_task(async function test_uploadOutgo
 
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler()
   });
 
   await SyncTestingInfrastructure(server);
 
   try {
-    engine._syncStartup();
+    await engine._syncStartup();
     let error = null;
     try {
-      engine._uploadOutgoing();
+      await engine._uploadOutgoing();
     } catch (e) {
       error = e;
     }
     ok(!!error);
   } finally {
     await cleanAndGo(engine, server);
   }
 });
@@ -1543,17 +1541,17 @@ add_task(async function test_syncFinish_
   let server = httpd_setup({});
 
   await SyncTestingInfrastructure(server);
   let engine = makeRotaryEngine();
   engine._delete = {}; // Nothing to delete
   engine._tracker.score = 100;
 
   // _syncFinish() will reset the engine's score.
-  engine._syncFinish();
+  await engine._syncFinish();
   do_check_eq(engine.score, 0);
   server.stop(run_next_test);
 });
 
 
 add_task(async function test_syncFinish_deleteByIds() {
   _("SyncEngine._syncFinish deletes server records slated for deletion (list of record IDs).");
 
@@ -1571,17 +1569,17 @@ add_task(async function test_syncFinish_
   let server = httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler()
   });
   await SyncTestingInfrastructure(server);
 
   let engine = makeRotaryEngine();
   try {
     engine._delete = {ids: ["flying", "rekolok"]};
-    engine._syncFinish();
+    await engine._syncFinish();
 
     // The 'flying' and 'rekolok' records were deleted while the
     // 'scotsman' one wasn't.
     do_check_eq(collection.payload("flying"), undefined);
     do_check_true(!!collection.payload("scotsman"));
     do_check_eq(collection.payload("rekolok"), undefined);
 
     // The deletion todo list has been reset.
@@ -1633,17 +1631,17 @@ add_task(async function test_syncFinish_
     // up and all records that are less than 200 mins old (which are
     // records 0 thru 90).
     engine._delete = {ids: [],
                       newer: now / 1000 - 60 * 200.5};
     for (i = 100; i < 234; i++) {
       engine._delete.ids.push("record-no-" + i);
     }
 
-    engine._syncFinish();
+    await engine._syncFinish();
 
     // Ensure that the appropriate server data has been wiped while
     // preserving records 90 thru 200.
     for (i = 0; i < 234; i++) {
       let id = "record-no-" + i;
       if (i <= 90 || i >= 100) {
         do_check_eq(collection.payload(id), undefined);
       } else {
@@ -1750,17 +1748,17 @@ add_task(async function test_canDecrypt_
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler()
   });
 
   await SyncTestingInfrastructure(server);
   let engine = makeRotaryEngine();
   try {
 
-    do_check_false(engine.canDecrypt());
+    do_check_false((await engine.canDecrypt()));
 
   } finally {
     await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_canDecrypt_true() {
   _("SyncEngine.canDecrypt returns true if the engine can decrypt the items on the server.");
@@ -1775,17 +1773,17 @@ add_task(async function test_canDecrypt_
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler()
   });
 
   await SyncTestingInfrastructure(server);
   let engine = makeRotaryEngine();
   try {
 
-    do_check_true(engine.canDecrypt());
+    do_check_true((await engine.canDecrypt()));
 
   } finally {
     await cleanAndGo(engine, server);
   }
 
 });
 
 add_task(async function test_syncapplied_observer() {
@@ -1822,18 +1820,18 @@ add_task(async function test_syncapplied
   }
 
   Svc.Obs.add("weave:engine:sync:applied", onApplied);
 
   try {
     Service.scheduler.hasIncomingItems = false;
 
     // Do sync.
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
 
     do_check_attribute_count(engine._store.items, 10);
 
     do_check_eq(numApplyCalls, 1);
     do_check_eq(engine_name, "rotary");
     do_check_eq(count.applied, 10);
 
     do_check_true(Service.scheduler.hasIncomingItems);
--- a/services/sync/tests/unit/test_syncscheduler.js
+++ b/services/sync/tests/unit/test_syncscheduler.js
@@ -7,38 +7,29 @@ Cu.import("resource://services-sync/engi
 Cu.import("resource://services-sync/engines/clients.js");
 Cu.import("resource://services-sync/policies.js");
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/status.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-Service.engineManager.clear();
-
 function CatapultEngine() {
   SyncEngine.call(this, "Catapult", Service);
 }
 CatapultEngine.prototype = {
   __proto__: SyncEngine.prototype,
   exception: null, // tests fill this in
-  _sync: function _sync() {
+  async _sync() {
     throw this.exception;
   }
 };
 
-Service.engineManager.register(CatapultEngine);
-
 var scheduler = new SyncScheduler(Service);
-var clientsEngine = Service.clientsEngine;
-
-// Don't remove stale clients when syncing. This is a test-only workaround
-// that lets us add clients directly to the store, without losing them on
-// the next sync.
-clientsEngine._removeRemoteClient = id => {};
+let clientsEngine;
 
 function sync_httpd_setup() {
   let global = new ServerWBO("global", {
     syncID: Service.syncID,
     storageVersion: STORAGE_VERSION,
     engines: {clients: {version: clientsEngine.version,
                         syncID: clientsEngine.syncID}}
   });
@@ -63,35 +54,42 @@ async function setUp(server) {
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
   let result = (await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success;
   return result;
 }
 
 async function cleanUpAndGo(server) {
-  await promiseNextTick();
-  clientsEngine._store.wipe();
-  Service.startOver();
+  await Async.promiseYield();
+  await clientsEngine._store.wipe();
+  await Service.startOver();
   if (server) {
     await promiseStopServer(server);
   }
 }
 
-function run_test() {
+add_task(async function setup() {
+  await Service.promiseInitialized;
+  clientsEngine = Service.clientsEngine;
+  // Don't remove stale clients when syncing. This is a test-only workaround
+  // that lets us add clients directly to the store, without losing them on
+  // the next sync.
+  clientsEngine._removeRemoteClient = async (id) => {};
+  Service.engineManager.clear();
   initTestLogging("Trace");
 
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.scheduler").level = Log.Level.Trace;
   validate_all_future_pings();
 
   scheduler.setDefaults();
 
-  run_next_test();
-}
+  await Service.engineManager.register(CatapultEngine);
+});
 
 add_test(function test_prefAttributes() {
   _("Test various attributes corresponding to preferences.");
 
   const INTERVAL = 42 * 60 * 1000;   // 42 minutes
   const THRESHOLD = 3142;
   const SCORE = 2718;
   const TIMESTAMP1 = 1275493471649;
@@ -173,17 +171,17 @@ add_task(async function test_updateClien
   scheduler.updateClientMode();
 
   do_check_eq(scheduler.syncThreshold, MULTI_DEVICE_THRESHOLD);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
   do_check_true(scheduler.numClients > 1);
   do_check_false(scheduler.idle);
 
   // Resets the number of clients to 0.
-  clientsEngine.resetClient();
+  await clientsEngine.resetClient();
   Svc.Prefs.reset("clients.devices.mobile");
   scheduler.updateClientMode();
 
   // Goes back to single user if # clients is 1.
   do_check_eq(scheduler.numClients, 1);
   do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
   do_check_false(scheduler.numClients > 1);
@@ -206,25 +204,25 @@ add_task(async function test_masterpassw
 
   let oldScheduleAtInterval = SyncScheduler.prototype.scheduleAtInterval;
   SyncScheduler.prototype.scheduleAtInterval = function(interval) {
     rescheduleInterval = true;
     do_check_eq(interval, MASTER_PASSWORD_LOCKED_RETRY_INTERVAL);
   };
 
   let oldVerifyLogin = Service.verifyLogin;
-  Service.verifyLogin = function() {
+  Service.verifyLogin = async function() {
     Status.login = MASTER_PASSWORD_LOCKED;
     return false;
   };
 
   let server = sync_httpd_setup();
   await setUp(server);
 
-  Service.sync();
+  await Service.sync();
 
   do_check_true(loginFailed);
   do_check_eq(Status.login, MASTER_PASSWORD_LOCKED);
   do_check_true(rescheduleInterval);
 
   Service.verifyLogin = oldVerifyLogin;
   SyncScheduler.prototype.scheduleAtInterval = oldScheduleAtInterval;
 
@@ -383,48 +381,48 @@ add_task(async function test_handleSyncE
   do_check_eq(scheduler._syncErrors, 0);
   do_check_false(Status.enforceBackoff);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
   do_check_eq(Status.backoffInterval, 0);
 
   // Trigger sync with an error several times & observe
   // functionality of handleSyncError()
   _("Test first error calls scheduleNextSync on default interval");
-  Service.sync();
+  await Service.sync();
   do_check_true(scheduler.nextSync <= Date.now() + scheduler.singleDeviceInterval);
   do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
   do_check_eq(scheduler._syncErrors, 1);
   do_check_false(Status.enforceBackoff);
   scheduler.syncTimer.clear();
 
   _("Test second error still calls scheduleNextSync on default interval");
-  Service.sync();
+  await Service.sync();
   do_check_true(scheduler.nextSync <= Date.now() + scheduler.singleDeviceInterval);
   do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
   do_check_eq(scheduler._syncErrors, 2);
   do_check_false(Status.enforceBackoff);
   scheduler.syncTimer.clear();
 
   _("Test third error sets Status.enforceBackoff and calls scheduleAtInterval");
-  Service.sync();
+  await Service.sync();
   let maxInterval = scheduler._syncErrors * (2 * MINIMUM_BACKOFF_INTERVAL);
   do_check_eq(Status.backoffInterval, 0);
   do_check_true(scheduler.nextSync <= (Date.now() + maxInterval));
   do_check_true(scheduler.syncTimer.delay <= maxInterval);
   do_check_eq(scheduler._syncErrors, 3);
   do_check_true(Status.enforceBackoff);
 
   // Status.enforceBackoff is false but there are still errors.
   Status.resetBackoff();
   do_check_false(Status.enforceBackoff);
   do_check_eq(scheduler._syncErrors, 3);
   scheduler.syncTimer.clear();
 
   _("Test fourth error still calls scheduleAtInterval even if enforceBackoff was reset");
-  Service.sync();
+  await Service.sync();
   maxInterval = scheduler._syncErrors * (2 * MINIMUM_BACKOFF_INTERVAL);
   do_check_true(scheduler.nextSync <= Date.now() + maxInterval);
   do_check_true(scheduler.syncTimer.delay <= maxInterval);
   do_check_eq(scheduler._syncErrors, 4);
   do_check_true(Status.enforceBackoff);
   scheduler.syncTimer.clear();
 
   _("Arrange for a successful sync to reset the scheduler error count");
@@ -442,34 +440,34 @@ add_task(async function test_client_sync
   await setUp(server);
 
   // Confirm defaults.
   do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
   do_check_false(scheduler.idle);
 
   // Trigger a change in interval & threshold by adding a client.
-  clientsEngine._store.create(
+  await clientsEngine._store.create(
     { id: "foo", cleartext: { os: "mobile", version: "0.01", type: "desktop" } }
   );
   do_check_false(scheduler.numClients > 1);
   scheduler.updateClientMode();
-  Service.sync();
+  await Service.sync();
 
   do_check_eq(scheduler.syncThreshold, MULTI_DEVICE_THRESHOLD);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
   do_check_true(scheduler.numClients > 1);
   do_check_false(scheduler.idle);
 
   // Resets the number of clients to 0.
-  clientsEngine.resetClient();
+  await clientsEngine.resetClient();
   // Also re-init the server, or we suck our "foo" client back down.
   await setUp(server);
 
-  Service.sync();
+  await Service.sync();
 
   // Goes back to single user if # clients is 1.
   do_check_eq(scheduler.numClients, 1);
   do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
   do_check_false(scheduler.numClients > 1);
   do_check_false(scheduler.idle);
 
@@ -535,17 +533,17 @@ add_task(async function test_autoconnect
 
   // A locked master password will still trigger a sync, but then we'll hit
   // MASTER_PASSWORD_LOCKED and hence MASTER_PASSWORD_LOCKED_RETRY_INTERVAL.
   let promiseObserved = promiseOneObserver("weave:service:login:error");
 
   scheduler.delayedAutoConnect(0);
   await promiseObserved;
 
-  await promiseNextTick();
+  await Async.promiseYield();
 
   do_check_eq(Status.login, MASTER_PASSWORD_LOCKED);
 
   Utils.mpLocked = origLocked;
   Utils.ensureMPUnlocked = origEnsureMPUnlocked;
   Service.identity._canFetchKeys = origCanFetchKeys;
 
   await cleanUpAndGo(server);
@@ -714,17 +712,17 @@ add_task(async function test_no_sync_nod
   // it is not overwritten on sync:finish
   let server = sync_httpd_setup();
   await setUp(server);
 
   let oldfc = Service._clusterManager._findCluster;
   Service._clusterManager._findCluster = () => null;
   Service.clusterURL = "";
   try {
-    Service.sync();
+    await Service.sync();
     do_check_eq(Status.sync, NO_SYNC_NODE_FOUND);
     do_check_eq(scheduler.syncTimer.delay, NO_SYNC_NODE_INTERVAL);
 
     await cleanUpAndGo(server);
   } finally {
     Service._clusterManager._findCluster = oldfc;
   }
 });
@@ -739,17 +737,17 @@ add_task(async function test_sync_failed
   let engine = Service.engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 500};
 
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
   do_check_true(await setUp(server));
 
-  Service.sync();
+  await Service.sync();
 
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
 
   let maxInterval = scheduler._syncErrors * (2 * MINIMUM_BACKOFF_INTERVAL);
   do_check_eq(Status.backoffInterval, 0);
   do_check_true(Status.enforceBackoff);
   do_check_eq(scheduler._syncErrors, 4);
   do_check_true(scheduler.nextSync <= (Date.now() + maxInterval));
@@ -765,25 +763,25 @@ add_task(async function test_sync_failed
   scheduler._syncErrors = MAX_ERROR_COUNT_BEFORE_BACKOFF;
   let server = sync_httpd_setup();
 
   let engine = Service.engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 400};
 
   // Have multiple devices for an active interval.
-  clientsEngine._store.create(
+  await clientsEngine._store.create(
     { id: "foo", cleartext: { os: "mobile", version: "0.01", type: "desktop" } }
   );
 
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
   do_check_true(await setUp(server));
 
-  Service.sync();
+  await Service.sync();
 
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
 
   do_check_eq(Status.backoffInterval, 0);
   do_check_false(Status.enforceBackoff);
   do_check_eq(scheduler._syncErrors, 0);
   do_check_true(scheduler.nextSync <= (Date.now() + scheduler.activeInterval));
@@ -811,37 +809,37 @@ add_task(async function test_sync_X_Weav
       response.setHeader("X-Weave-Backoff", "" + BACKOFF);
     }
     infoColl(request, response);
   }
   server.registerPathHandler(INFO_COLLECTIONS, infoCollWithBackoff);
 
   // Pretend we have two clients so that the regular sync interval is
   // sufficiently low.
-  clientsEngine._store.create(
+  await clientsEngine._store.create(
     { id: "foo", cleartext: { os: "mobile", version: "0.01", type: "desktop" } }
   );
-  let rec = clientsEngine._store.createRecord("foo", "clients");
+  let rec = await clientsEngine._store.createRecord("foo", "clients");
   rec.encrypt(Service.collectionKeys.keyForCollection("clients"));
   rec.upload(Service.resource(clientsEngine.engineURL + rec.id));
 
   // Sync once to log in and get everything set up. Let's verify our initial
   // values.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.backoffInterval, 0);
   do_check_eq(Status.minimumNextSync, 0);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
   do_check_true(scheduler.nextSync <=
                 Date.now() + scheduler.syncInterval);
   // Sanity check that we picked the right value for BACKOFF:
   do_check_true(scheduler.syncInterval < BACKOFF * 1000);
 
   // Turn on server maintenance and sync again.
   serverBackoff = true;
-  Service.sync();
+  await Service.sync();
 
   do_check_true(Status.backoffInterval >= BACKOFF * 1000);
   // Allowing 20 seconds worth of of leeway between when Status.minimumNextSync
   // was set and when this line gets executed.
   let minimumExpectedDelay = (BACKOFF - 20) * 1000;
   do_check_true(Status.minimumNextSync >= Date.now() + minimumExpectedDelay);
 
   // Verify that the next sync is actually going to wait that long.
@@ -872,38 +870,38 @@ add_task(async function test_sync_503_Re
     }
     response.setHeader("Retry-After", "" + BACKOFF);
     response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
   }
   server.registerPathHandler(INFO_COLLECTIONS, infoCollWithMaintenance);
 
   // Pretend we have two clients so that the regular sync interval is
   // sufficiently low.
-  clientsEngine._store.create(
+  await clientsEngine._store.create(
     { id: "foo", cleartext: { os: "mobile", version: "0.01", type: "desktop" } }
   );
-  let rec = clientsEngine._store.createRecord("foo", "clients");
+  let rec = await clientsEngine._store.createRecord("foo", "clients");
   rec.encrypt(Service.collectionKeys.keyForCollection("clients"));
   rec.upload(Service.resource(clientsEngine.engineURL + rec.id));
 
   // Sync once to log in and get everything set up. Let's verify our initial
   // values.
-  Service.sync();
+  await Service.sync();
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.backoffInterval, 0);
   do_check_eq(Status.minimumNextSync, 0);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
   do_check_true(scheduler.nextSync <=
                 Date.now() + scheduler.syncInterval);
   // Sanity check that we picked the right value for BACKOFF:
   do_check_true(scheduler.syncInterval < BACKOFF * 1000);
 
   // Turn on server maintenance and sync again.
   serverMaintenance = true;
-  Service.sync();
+  await Service.sync();
 
   do_check_true(Status.enforceBackoff);
   do_check_true(Status.backoffInterval >= BACKOFF * 1000);
   // Allowing 3 seconds worth of of leeway between when Status.minimumNextSync
   // was set and when this line gets executed.
   let minimumExpectedDelay = (BACKOFF - 3) * 1000;
   do_check_true(Status.minimumNextSync >= Date.now() + minimumExpectedDelay);
 
@@ -934,17 +932,17 @@ add_task(async function test_loginError_
 
   // Sanity check.
   do_check_eq(scheduler.syncTimer, null);
   do_check_eq(Status.checkSetup(), STATUS_OK);
   do_check_eq(Status.login, LOGIN_SUCCEEDED);
 
   scheduler.scheduleNextSync(0);
   await promiseObserved;
-  await promiseNextTick();
+  await Async.promiseYield();
 
   do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
 
   let expectedNextSync = Date.now() + scheduler.syncInterval;
   do_check_true(scheduler.nextSync > Date.now());
   do_check_true(scheduler.nextSync <= expectedNextSync);
   do_check_true(scheduler.syncTimer.delay > 0);
   do_check_true(scheduler.syncTimer.delay <= scheduler.syncInterval);
@@ -969,17 +967,17 @@ add_task(async function test_loginError_
   // Sanity check.
   do_check_eq(scheduler.nextSync, 0);
   do_check_eq(scheduler.syncTimer, null);
   do_check_eq(Status.checkSetup(), STATUS_OK);
   do_check_eq(Status.login, LOGIN_SUCCEEDED);
 
   scheduler.scheduleNextSync(0);
   await promiseObserved;
-  await promiseNextTick();
+  await Async.promiseYield();
 
   // For the FxA identity, a 401 on info/collections means a transient
   // error, probably due to an inability to fetch a token.
   do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
   // syncs should still be scheduled.
   do_check_true(scheduler.nextSync > Date.now());
   do_check_true(scheduler.syncTimer.delay > 0);
 
@@ -998,13 +996,13 @@ add_task(async function test_proper_inte
   Svc.Obs.notify("weave:service:sync:applied", {
     applied: 2,
     succeeded: 0,
     failed: 2,
     newFailed: 2,
     reconciled: 0
   });
 
-  await promiseNextTick();
+  await Async.promiseYield();
   scheduler.adjustSyncInterval();
   do_check_false(scheduler.hasIncomingItems);
   do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
 });
--- a/services/sync/tests/unit/test_tab_engine.js
+++ b/services/sync/tests/unit/test_tab_engine.js
@@ -3,31 +3,28 @@
 
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines/tabs.js");
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-function getMocks() {
+async function getMocks() {
   let engine = new TabEngine(Service);
+  await engine.initialize();
   let store = engine._store;
   store.getTabState = mockGetTabState;
   store.shouldSkipWindow = mockShouldSkipWindow;
   return [engine, store];
 }
 
-function run_test() {
-  run_next_test();
-}
-
-add_test(function test_getOpenURLs() {
+add_task(async function test_getOpenURLs() {
   _("Test getOpenURLs.");
-  let [engine, store] = getMocks();
+  let [engine, store] = await getMocks();
 
   let superLongURL = "http://" + (new Array(MAX_UPLOAD_BYTES).join("w")) + ".com/";
   let urls = ["http://bar.com", "http://foo.com", "http://foobar.com", superLongURL];
   function fourURLs() {
     return urls.pop();
   }
   store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, fourURLs, 1, 4);
 
@@ -40,28 +37,26 @@ add_test(function test_getOpenURLs() {
 
   _("  test matching works (false)");
   matches = openurlsset.has("http://barfoo.com");
   ok(!matches);
 
   _("  test matching works (too long)");
   matches = openurlsset.has(superLongURL);
   ok(!matches);
-
-  run_next_test();
 });
 
 add_task(async function test_tab_engine_skips_incoming_local_record() {
   _("Ensure incoming records that match local client ID are never applied.");
-  let [engine, store] = getMocks();
+  let [engine, store] = await getMocks();
   let localID = engine.service.clientsEngine.localID;
   let apply = store.applyIncoming;
   let applied = [];
 
-  store.applyIncoming = function(record) {
+  store.applyIncoming = async function(record) {
     notEqual(record.id, localID, "Only apply tab records from remote clients");
     applied.push(record);
     apply.call(store, record);
   }
 
   let collection = new ServerCollection();
 
   _("Creating remote tab record with local client ID");
@@ -84,60 +79,58 @@ add_task(async function test_tab_engine_
                                               new WBORecord(engine.metaURL));
   meta_global.payload.engines = {tabs: {version: engine.version,
                                         syncID: engine.syncID}};
 
   generateNewKeys(Service.collectionKeys);
 
   let promiseFinished = new Promise(resolve => {
     let syncFinish = engine._syncFinish;
-    engine._syncFinish = function() {
+    engine._syncFinish = async function() {
       equal(applied.length, 1, "Remote client record was applied");
       equal(applied[0].id, remoteID, "Remote client ID matches");
 
-      syncFinish.call(engine);
+      await syncFinish.call(engine);
       resolve();
     }
   });
 
   _("Start sync");
-  engine._sync();
+  await engine._sync();
   await promiseFinished;
 });
 
-add_test(function test_reconcile() {
-  let [engine, ] = getMocks();
+add_task(async function test_reconcile() {
+  let [engine, ] = await getMocks();
 
   _("Setup engine for reconciling");
-  engine._syncStartup();
+  await engine._syncStartup();
 
   _("Create an incoming remote record");
   let remoteRecord = {id: "remote id",
                       cleartext: "stuff and things!",
                       modified: 1000};
 
-  ok(engine._reconcile(remoteRecord), "Apply a recently modified remote record");
+  ok((await engine._reconcile(remoteRecord)), "Apply a recently modified remote record");
 
   remoteRecord.modified = 0;
-  ok(engine._reconcile(remoteRecord), "Apply a remote record modified long ago");
+  ok((await engine._reconcile(remoteRecord)), "Apply a remote record modified long ago");
 
   // Remote tab records are never tracked locally, so the only
   // time they're skipped is when they're marked as deleted.
   remoteRecord.deleted = true;
-  ok(!engine._reconcile(remoteRecord), "Skip a deleted remote record");
+  ok(!(await engine._reconcile(remoteRecord)), "Skip a deleted remote record");
 
   _("Create an incoming local record");
   // The locally tracked tab record always takes precedence over its
   // remote counterparts.
   let localRecord = {id: engine.service.clientsEngine.localID,
                      cleartext: "this should always be skipped",
                      modified: 2000};
 
-  ok(!engine._reconcile(localRecord), "Skip incoming local if recently modified");
+  ok(!(await engine._reconcile(localRecord)), "Skip incoming local if recently modified");
 
   localRecord.modified = 0;
-  ok(!engine._reconcile(localRecord), "Skip incoming local if modified long ago");
+  ok(!(await engine._reconcile(localRecord)), "Skip incoming local if modified long ago");
 
   localRecord.deleted = true;
-  ok(!engine._reconcile(localRecord), "Skip incoming local if deleted");
-
-  run_next_test();
+  ok(!(await engine._reconcile(localRecord)), "Skip incoming local if deleted");
 });
--- a/services/sync/tests/unit/test_tab_store.js
+++ b/services/sync/tests/unit/test_tab_store.js
@@ -1,54 +1,57 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/engines/tabs.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/common/utils.js");
 
-function getMockStore() {
+async function getMockStore() {
   let engine = new TabEngine(Service);
+  await engine.initialize();
   let store = engine._store;
   store.getTabState = mockGetTabState;
   store.shouldSkipWindow = mockShouldSkipWindow;
   return store;
 }
 
-function test_create() {
-  let store = new TabEngine(Service)._store;
+add_task(async function test_create() {
+  let engine = new TabEngine(Service);
+  await engine.initialize();
+  let store = engine._store;
 
   _("Create a first record");
   let rec = {id: "id1",
              clientName: "clientName1",
              cleartext: { "foo": "bar" },
              modified: 1000};
-  store.applyIncoming(rec);
+  await store.applyIncoming(rec);
   deepEqual(store._remoteClients["id1"], { lastModified: 1000, foo: "bar" });
 
   _("Create a second record");
   rec = {id: "id2",
          clientName: "clientName2",
          cleartext: { "foo2": "bar2" },
          modified: 2000};
-  store.applyIncoming(rec);
+  await store.applyIncoming(rec);
   deepEqual(store._remoteClients["id2"], { lastModified: 2000, foo2: "bar2" });
 
   _("Create a third record");
   rec = {id: "id3",
          clientName: "clientName3",
          cleartext: { "foo3": "bar3" },
          modified: 3000};
-  store.applyIncoming(rec);
+  await store.applyIncoming(rec);
   deepEqual(store._remoteClients["id3"], { lastModified: 3000, foo3: "bar3" });
-}
+});
 
-function test_getAllTabs() {
-  let store = getMockStore();
+add_task(async function test_getAllTabs() {
+  let store = await getMockStore();
   let tabs;
 
   let threeUrls = ["http://foo.com", "http://fuubar.com", "http://barbar.com"];
 
   store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://bar.com", 1, 1, () => 2, () => threeUrls);
 
   _("Get all tabs.");
   tabs = store.getAllTabs();
@@ -78,37 +81,31 @@ function test_getAllTabs() {
   store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://bar.com", 1, 1, () => 45, () => allURLs);
   tabs = store.getAllTabs((url) => url.startsWith("about"));
 
   _("Sliced: " + JSON.stringify(tabs));
   equal(tabs.length, 1);
   equal(tabs[0].urlHistory.length, 5);
   equal(tabs[0].urlHistory[0], "http://foo40.bar");
   equal(tabs[0].urlHistory[4], "http://foo36.bar");
-}
+});
 
-function test_createRecord() {
-  let store = getMockStore();
+add_task(async function test_createRecord() {
+  let store = await getMockStore();
   let record;
 
   store.getTabState = mockGetTabState;
   store.shouldSkipWindow = mockShouldSkipWindow;
   store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1);
 
   let numtabs = 2600; // Note: this number is connected to DEFAULT_MAX_RECORD_PAYLOAD_BYTES
 
   store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1);
-  record = store.createRecord("fake-guid");
+  record = await store.createRecord("fake-guid");
   ok(record instanceof TabSetRecord);
   equal(record.tabs.length, 1);
 
   _("create a big record");
   store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, numtabs);
-  record = store.createRecord("fake-guid");
+  record = await store.createRecord("fake-guid");
   ok(record instanceof TabSetRecord);
   equal(record.tabs.length, 2501);
-}
-
-function run_test() {
-  test_create();
-  test_getAllTabs();
-  test_createRecord();
-}
+});
--- a/services/sync/tests/unit/test_tab_tracker.js
+++ b/services/sync/tests/unit/test_tab_tracker.js
@@ -1,16 +1,21 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/engines/tabs.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-var clientsEngine = Service.clientsEngine;
+let clientsEngine;
+
+add_task(async function setup() {
+  await Service.promiseInitialized;
+  clientsEngine = Service.clientsEngine;
+});
 
 function fakeSvcWinMediator() {
   // actions on windows are captured in logs
   let logs = [];
   delete Services.wm;
   Services.wm = {
     getEnumerator() {
       return {
@@ -39,25 +44,25 @@ function fakeSvcWinMediator() {
           };
         }
       };
     }
   };
   return logs;
 }
 
-function run_test() {
+add_task(async function run_test() {
   let engine = Service.engineManager.get("tabs");
 
   _("We assume that tabs have changed at startup.");
   let tracker = engine._tracker;
   tracker.persistChangedIDs = false;
 
   do_check_true(tracker.modified);
-  do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()),
+  do_check_true(Utils.deepEquals(Object.keys((await engine.getChangedIDs())),
                                  [clientsEngine.localID]));
 
   let logs;
 
   _("Test listeners are registered on windows");
   logs = fakeSvcWinMediator();
   Svc.Obs.notify("weave:engine:start-tracking");
   do_check_eq(logs.length, 2);
@@ -93,35 +98,35 @@ function run_test() {
   for (let evttype of ["TabOpen", "TabClose", "TabSelect"]) {
     // Pretend we just synced.
     tracker.clearChangedIDs();
     do_check_false(tracker.modified);
 
     // Send a fake tab event
     tracker.onTab({type: evttype, originalTarget: evttype});
     do_check_true(tracker.modified);
-    do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()),
+    do_check_true(Utils.deepEquals(Object.keys((await engine.getChangedIDs())),
                                    [clientsEngine.localID]));
   }
 
   // Pretend we just synced.
   tracker.clearChangedIDs();
   do_check_false(tracker.modified);
 
   tracker.onTab({type: "pageshow", originalTarget: "pageshow"});
-  do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()),
+  do_check_true(Utils.deepEquals(Object.keys((await engine.getChangedIDs())),
                                  [clientsEngine.localID]));
 
   // Pretend we just synced and saw some progress listeners.
   tracker.clearChangedIDs();
   do_check_false(tracker.modified);
   tracker.onLocationChange({ isTopLevel: false }, undefined, undefined, 0);
   do_check_false(tracker.modified, "non-toplevel request didn't flag as modified");
 
   tracker.onLocationChange({ isTopLevel: true }, undefined, undefined,
                            Ci.nsIWebProgressListener.LOCATION_CHANGE_SAME_DOCUMENT);
   do_check_false(tracker.modified, "location change within the same document request didn't flag as modified");
 
   tracker.onLocationChange({ isTopLevel: true }, undefined, undefined, 0);
   do_check_true(tracker.modified, "location change for a new top-level document flagged as modified");
-  do_check_true(Utils.deepEquals(Object.keys(engine.getChangedIDs()),
+  do_check_true(Utils.deepEquals(Object.keys((await engine.getChangedIDs())),
                                  [clientsEngine.localID]));
-}
+});
--- a/services/sync/tests/unit/test_telemetry.js
+++ b/services/sync/tests/unit/test_telemetry.js
@@ -12,17 +12,16 @@ Cu.import("resource://services-sync/engi
 Cu.import("resource://services-sync/engines/clients.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://testing-common/services/sync/fxa_utils.js");
 Cu.import("resource://testing-common/services/sync/rotaryengine.js");
 Cu.import("resource://gre/modules/osfile.jsm", this);
 
 Cu.import("resource://services-sync/util.js");
 
-initTestLogging("Trace");
 
 function SteamStore(engine) {
   Store.call(this, "Steam", engine);
 }
 
 SteamStore.prototype = {
   __proto__: Store.prototype,
 };
@@ -40,17 +39,17 @@ function SteamEngine(service) {
   Engine.call(this, "steam", service);
 }
 
 SteamEngine.prototype = {
   __proto__: Engine.prototype,
   _storeObj: SteamStore,
   _trackerObj: SteamTracker,
   _errToThrow: null,
-  _sync() {
+  async _sync() {
     if (this._errToThrow) {
       throw this._errToThrow;
     }
   }
 };
 
 function BogusEngine(service) {
   Engine.call(this, "bogus", service);
@@ -61,18 +60,21 @@ BogusEngine.prototype = Object.create(St
 async function cleanAndGo(engine, server) {
   engine._tracker.clearChangedIDs();
   Svc.Prefs.resetBranch("");
   Svc.Prefs.set("log.logger.engine.rotary", "Trace");
   Service.recordManager.clearCache();
   await promiseStopServer(server);
 }
 
-// Avoid addon manager complaining about not being initialized
-Service.engineManager.unregister("addons");
+add_task(async function setup() {
+  initTestLogging("Trace");
+  // Avoid addon manager complaining about not being initialized
+  Service.engineManager.unregister("addons");
+});
 
 add_task(async function test_basic() {
   enableValidationPrefs();
 
   let helper = track_collections_helper();
   let upd = helper.with_updated_collection;
 
   let handlers = {
@@ -100,16 +102,17 @@ add_task(async function test_basic() {
   ok("locale" in ping.os, "there is an OS locale");
 
   Svc.Prefs.resetBranch("");
   await promiseStopServer(server);
 });
 
 add_task(async function test_processIncoming_error() {
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("bookmarks");
   try {
     // Create a bogus record that when synced down will provoke a
     // network error which in turn provokes an exception in _processIncoming.
     const BOGUS_GUID = "zzzzzzzzzzzz";
@@ -143,23 +146,24 @@ add_task(async function test_processInco
     equal(pingPayload.engines.length, 1);
     equal(pingPayload.engines[0].name, "bookmarks");
     deepEqual(pingPayload.engines[0].failureReason, {
       name: "othererror",
       error: "error.engine.reason.record_download_fail"
     });
 
   } finally {
-    store.wipe();
+    await store.wipe();
     await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_uploading() {
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let parent = PlacesUtils.toolbarFolderId;
   let uri = Utils.makeURI("http://getfirefox.com/");
 
   let bmk_id = PlacesUtils.bookmarks.insertBookmark(parent, uri,
@@ -171,28 +175,28 @@ add_task(async function test_uploading()
     equal(ping.engines.length, 1);
     equal(ping.engines[0].name, "bookmarks");
     ok(!!ping.engines[0].outgoing);
     greater(ping.engines[0].outgoing[0].sent, 0)
     ok(!ping.engines[0].incoming);
 
     PlacesUtils.bookmarks.setItemTitle(bmk_id, "New Title");
 
-    store.wipe();
-    engine.resetClient();
+    await store.wipe();
+    await engine.resetClient();
 
     ping = await sync_engine_and_validate_telem(engine, false);
     equal(ping.engines.length, 1);
     equal(ping.engines[0].name, "bookmarks");
     equal(ping.engines[0].outgoing.length, 1);
     ok(!!ping.engines[0].incoming);
 
   } finally {
     // Clean up.
-    store.wipe();
+    await store.wipe();
     await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_upload_failed() {
   let collection = new ServerCollection();
   collection._wbos.flying = new ServerWBO("flying");
 
@@ -328,17 +332,17 @@ add_task(async function test_sync_partia
     await cleanAndGo(engine, server);
     await engine.finalize();
   }
 });
 
 add_task(async function test_generic_engine_fail() {
   enableValidationPrefs();
 
-  Service.engineManager.register(SteamEngine);
+  await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   let e = new Error("generic failure message")
   engine._errToThrow = e;
 
   try {
@@ -354,17 +358,17 @@ add_task(async function test_generic_eng
     await cleanAndGo(engine, server);
     Service.engineManager.unregister(engine);
   }
 });
 
 add_task(async function test_engine_fail_ioerror() {
   enableValidationPrefs();
 
-  Service.engineManager.register(SteamEngine);
+  await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   // create an IOError to re-throw as part of Sync.
   try {
     // (Note that fakeservices.js has replaced Utils.jsonMove etc, but for
     // this test we need the real one so we get real exceptions from the
@@ -421,17 +425,17 @@ add_task(async function test_clean_urls(
     Service.engineManager.unregister(engine);
   }
 });
 
 
 add_task(async function test_initial_sync_engines() {
   enableValidationPrefs();
 
-  Service.engineManager.register(SteamEngine);
+  await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   // These are the only ones who actually have things to sync at startup.
   let engineNames = ["clients", "bookmarks", "prefs", "tabs"];
   let server = serverForEnginesWithKeys({"foo": "password"}, ["bookmarks", "prefs", "tabs"].map(name =>
     Service.engineManager.get(name)
   ));
   await SyncTestingInfrastructure(server);
@@ -458,17 +462,17 @@ add_task(async function test_initial_syn
     await cleanAndGo(engine, server);
     Service.engineManager.unregister(engine);
   }
 });
 
 add_task(async function test_nserror() {
   enableValidationPrefs();
 
-  Service.engineManager.register(SteamEngine);
+  await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   engine._errToThrow = Components.Exception("NS_ERROR_UNKNOWN_HOST", Cr.NS_ERROR_UNKNOWN_HOST);
   try {
     _(`test_nserror: Steam tracker contents: ${
       JSON.stringify(engine._tracker.changedIDs)}`);
@@ -513,17 +517,17 @@ add_task(async function test_discarding(
       handlers["/1.1/johndoe/storage/" + coll] = upd(coll, new ServerCollection({}, true).handler());
     }
 
     server = httpd_setup(handlers);
     await configureIdentity({ username: "johndoe" }, server);
     telem.submit = () => ok(false, "Submitted telemetry ping when we should not have");
 
     for (let i = 0; i < 5; ++i) {
-      Service.sync();
+      await Service.sync();
     }
     telem.submit = oldSubmit;
     telem.submissionInterval = -1;
     let ping = await sync_and_validate_telem(true, true); // with this we've synced 6 times
     equal(ping.syncs.length, 2);
     equal(ping.discarded, 4);
   } finally {
     telem.maxPayloadCount = 500;
@@ -533,17 +537,17 @@ add_task(async function test_discarding(
       await promiseStopServer(server);
     }
   }
 })
 
 add_task(async function test_no_foreign_engines_in_error_ping() {
   enableValidationPrefs();
 
-  Service.engineManager.register(BogusEngine);
+  await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
   let server = serverForFoo(engine);
   engine._errToThrow = new Error("Oh no!");
   await SyncTestingInfrastructure(server);
   try {
     let ping = await sync_and_validate_telem(true);
     equal(ping.status.service, SYNC_FAILED_PARTIAL);
@@ -552,17 +556,17 @@ add_task(async function test_no_foreign_
     await cleanAndGo(engine, server);
     Service.engineManager.unregister(engine);
   }
 });
 
 add_task(async function test_sql_error() {
   enableValidationPrefs();
 
-  Service.engineManager.register(SteamEngine);
+  await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   engine._sync = function() {
     // Just grab a DB connection and issue a bogus SQL statement synchronously.
     let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase).DBConnection;
     Async.querySpinningly(db.createAsyncStatement("select bar from foo"));
@@ -577,17 +581,17 @@ add_task(async function test_sql_error()
     await cleanAndGo(engine, server);
     Service.engineManager.unregister(engine);
   }
 });
 
 add_task(async function test_no_foreign_engines_in_success_ping() {
   enableValidationPrefs();
 
-  Service.engineManager.register(BogusEngine);
+  await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
   let server = serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
   try {
     let ping = await sync_and_validate_telem();
     ok(ping.engines.every(e => e.name !== "bogus"));
@@ -595,17 +599,17 @@ add_task(async function test_no_foreign_
     await cleanAndGo(engine, server);
     Service.engineManager.unregister(engine);
   }
 });
 
 add_task(async function test_events() {
   enableValidationPrefs();
 
-  Service.engineManager.register(BogusEngine);
+  await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
   let server = serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
   try {
     let serverTime = AsyncResource.serverTime;
     Service.recordTelemetryEvent("object", "method", "value", { foo: "bar" });
@@ -639,17 +643,17 @@ add_task(async function test_events() {
     await cleanAndGo(engine, server);
     Service.engineManager.unregister(engine);
   }
 });
 
 add_task(async function test_invalid_events() {
   enableValidationPrefs();
 
-  Service.engineManager.register(BogusEngine);
+  await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
   let server = serverForFoo(engine);
 
   async function checkNotRecorded(...args) {
     Service.recordTelemetryEvent.call(args);
     let ping = await wait_for_ping(() => Service.sync(), false, true);
     equal(ping.events, undefined);
@@ -685,30 +689,30 @@ add_task(async function test_invalid_eve
 });
 
 add_task(async function test_no_ping_for_self_hosters() {
   enableValidationPrefs();
 
   let telem = get_sync_test_telemetry();
   let oldSubmit = telem.submit;
 
-  Service.engineManager.register(BogusEngine);
+  await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
   let server = serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
   try {
     let submitPromise = new Promise(resolve => {
       telem.submit = function() {
         let result = oldSubmit.apply(this, arguments);
         resolve(result);
       };
     });
-    Service.sync();
+    await Service.sync();
     let pingSubmitted = await submitPromise;
     // The Sync testing infrastructure already sets up a custom token server,
     // so we don't need to do anything to simulate a self-hosted user.
     ok(!pingSubmitted, "Should not submit ping with custom token server URL");
   } finally {
     telem.submit = oldSubmit;
     await cleanAndGo(engine, server);
     Service.engineManager.unregister(engine);
--- a/services/sync/tests/unit/test_utils_catch.js
+++ b/services/sync/tests/unit/test_utils_catch.js
@@ -1,94 +1,113 @@
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://services-sync/service.js");
 
-function run_test() {
+add_task(async function run_test() {
   _("Make sure catch when copied to an object will correctly catch stuff");
   let ret, rightThis, didCall, didThrow, wasTen, wasLocked;
   let obj = {
-    catch: Utils.catch,
+    _catch: Utils.catch,
     _log: {
       debug(str) {
         didThrow = str.search(/^Exception/) == 0;
       },
       info(str) {
         wasLocked = str.indexOf("Cannot start sync: already syncing?") == 0;
       }
     },
 
     func() {
-      return this.catch(function() {
+      return this._catch(async function() {
         rightThis = this == obj;
         didCall = true;
         return 5;
       })();
     },
 
     throwy() {
-      return this.catch(function() {
+      return this._catch(async function() {
         rightThis = this == obj;
         didCall = true;
         throw 10;
       })();
     },
 
     callbacky() {
-      return this.catch(function() {
+      return this._catch(async function() {
         rightThis = this == obj;
         didCall = true;
         throw 10;
-      }, function(ex) {
+      }, async function(ex) {
         wasTen = (ex == 10)
       })();
     },
 
     lockedy() {
-      return this.catch(function() {
+      return this._catch(async function() {
         rightThis = this == obj;
         didCall = true;
         throw ("Could not acquire lock.");
       })();
-    }
+    },
+
+    lockedy_chained() {
+      return this._catch(function() {
+        rightThis = this == obj;
+        didCall = true;
+        return Promise.resolve().then( () => { throw ("Could not acquire lock.") });
+      })();
+    },
   };
 
   _("Make sure a normal call will call and return");
   rightThis = didCall = didThrow = wasLocked = false;
-  ret = obj.func();
+  ret = await obj.func();
   do_check_eq(ret, 5);
   do_check_true(rightThis);
   do_check_true(didCall);
   do_check_false(didThrow);
   do_check_eq(wasTen, undefined);
   do_check_false(wasLocked);
 
   _("Make sure catch/throw results in debug call and caller doesn't need to handle exception");
   rightThis = didCall = didThrow = wasLocked = false;
-  ret = obj.throwy();
+  ret = await obj.throwy();
   do_check_eq(ret, undefined);
   do_check_true(rightThis);
   do_check_true(didCall);
   do_check_true(didThrow);
   do_check_eq(wasTen, undefined);
   do_check_false(wasLocked);
 
   _("Test callback for exception testing.");
   rightThis = didCall = didThrow = wasLocked = false;
-  ret = obj.callbacky();
+  ret = await obj.callbacky();
   do_check_eq(ret, undefined);
   do_check_true(rightThis);
   do_check_true(didCall);
   do_check_true(didThrow);
   do_check_true(wasTen);
   do_check_false(wasLocked);
 
   _("Test the lock-aware catch that Service uses.");
-  obj.catch = Service._catch;
+  obj._catch = Service._catch;
   rightThis = didCall = didThrow = wasLocked = false;
   wasTen = undefined;
-  ret = obj.lockedy();
+  ret = await obj.lockedy();
   do_check_eq(ret, undefined);
   do_check_true(rightThis);
   do_check_true(didCall);
   do_check_true(didThrow);
   do_check_eq(wasTen, undefined);
   do_check_true(wasLocked);
-}
+
+  _("Test the lock-aware catch that Service uses with a chained promise.");
+  rightThis = didCall = didThrow = wasLocked = false;
+  wasTen = undefined;
+  ret = await obj.lockedy_chained();
+  do_check_eq(ret, undefined);
+  do_check_true(rightThis);
+  do_check_true(didCall);
+  do_check_true(didThrow);
+  do_check_eq(wasTen, undefined);
+  do_check_true(wasLocked);
+});
--- a/services/sync/tests/unit/test_utils_json.js
+++ b/services/sync/tests/unit/test_utils_json.js
@@ -1,84 +1,61 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://gre/modules/FileUtils.jsm");
 Cu.import("resource://services-sync/util.js");
 
-function run_test() {
-  initTestLogging();
-  run_next_test();
-}
+initTestLogging();
 
-add_test(function test_roundtrip() {
+add_task(async function test_roundtrip() {
   _("Do a simple write of an array to json and read");
-  Utils.jsonSave("foo", {}, ["v1", "v2"], ensureThrows(function(error) {
-    do_check_eq(error, null);
+  await Utils.jsonSave("foo", {}, ["v1", "v2"]);
 
-    Utils.jsonLoad("foo", {}, ensureThrows(function(val) {
-      let foo = val;
-      do_check_eq(typeof foo, "object");
-      do_check_eq(foo.length, 2);
-      do_check_eq(foo[0], "v1");
-      do_check_eq(foo[1], "v2");
-      run_next_test();
-    }));
-  }));
+  let foo = await Utils.jsonLoad("foo", {});
+  do_check_eq(typeof foo, "object");
+  do_check_eq(foo.length, 2);
+  do_check_eq(foo[0], "v1");
+  do_check_eq(foo[1], "v2");
+});
+
+add_task(async function test_string() {
+  _("Try saving simple strings");
+  await Utils.jsonSave("str", {}, "hi");
+
+  let str = await Utils.jsonLoad("str", {});
+  do_check_eq(typeof str, "string");
+  do_check_eq(str.length, 2);
+  do_check_eq(str[0], "h");
+  do_check_eq(str[1], "i");
 });
 
-add_test(function test_string() {
-  _("Try saving simple strings");
-  Utils.jsonSave("str", {}, "hi", ensureThrows(function(error) {
-    do_check_eq(error, null);
+add_task(async function test_number() {
+  _("Try saving a number");
+  await Utils.jsonSave("num", {}, 42);
 
-    Utils.jsonLoad("str", {}, ensureThrows(function(val) {
-      let str = val;
-      do_check_eq(typeof str, "string");
-      do_check_eq(str.length, 2);
-      do_check_eq(str[0], "h");
-      do_check_eq(str[1], "i");
-      run_next_test();
-    }));
-  }));
+  let num = await Utils.jsonLoad("num", {});
+  do_check_eq(typeof num, "number");
+  do_check_eq(num, 42);
 });
 
-add_test(function test_number() {
-  _("Try saving a number");
-  Utils.jsonSave("num", {}, 42, ensureThrows(function(error) {
-    do_check_eq(error, null);
-
-    Utils.jsonLoad("num", {}, ensureThrows(function(val) {
-      let num = val;
-      do_check_eq(typeof num, "number");
-      do_check_eq(num, 42);
-      run_next_test();
-    }));
-  }));
+add_task(async function test_nonexistent_file() {
+  _("Try loading a non-existent file.");
+  let val = await Utils.jsonLoad("non-existent", {});
+  do_check_eq(val, undefined);
 });
 
-add_test(function test_nonexistent_file() {
-  _("Try loading a non-existent file.");
-  Utils.jsonLoad("non-existent", {}, ensureThrows(function(val) {
-    do_check_eq(val, undefined);
-    run_next_test();
-  }));
+add_task(async function test_save_logging() {
+  _("Verify that writes are logged.");
+  let trace;
+  await Utils.jsonSave("log", {_log: {trace(msg) { trace = msg; }}}, "hi");
+  do_check_true(!!trace);
 });
 
-add_test(function test_save_logging() {
-  _("Verify that writes are logged.");
-  let trace;
-  Utils.jsonSave("log", {_log: {trace(msg) { trace = msg; }}},
-                       "hi", ensureThrows(function() {
-    do_check_true(!!trace);
-    run_next_test();
-  }));
-});
-
-add_test(function test_load_logging() {
+add_task(async function test_load_logging() {
   _("Verify that reads and read errors are logged.");
 
   // Write a file with some invalid JSON
   let filePath = "weave/log.json";
   let file = FileUtils.getFile("ProfD", filePath.split("/"), true);
   let fos = Cc["@mozilla.org/network/file-output-stream;1"]
               .createInstance(Ci.nsIFileOutputStream);
   let flags = FileUtils.MODE_WRONLY | FileUtils.MODE_CREATE
@@ -96,19 +73,13 @@ add_test(function test_load_logging() {
       trace(msg) {
         trace = msg;
       },
       debug(msg) {
         debug = msg;
       }
     }
   };
-  Utils.jsonLoad("log", obj, ensureThrows(function(val) {
-    do_check_true(!val);
-    do_check_true(!!trace);
-    do_check_true(!!debug);
-    run_next_test();
-  }));
+  let val = await Utils.jsonLoad("log", obj);
+  do_check_true(!val);
+  do_check_true(!!trace);
+  do_check_true(!!debug);
 });
-
-add_task(async function test_undefined_callback() {
-  await Utils.jsonSave("foo", {}, ["v1", "v2"]);
-});
--- a/services/sync/tests/unit/test_utils_lock.js
+++ b/services/sync/tests/unit/test_utils_lock.js
@@ -3,17 +3,17 @@ Cu.import("resource://services-sync/util
 
 // Utility that we only use here.
 
 function do_check_begins(thing, startsWith) {
   if (!(thing && thing.indexOf && (thing.indexOf(startsWith) == 0)))
     do_throw(thing + " doesn't begin with " + startsWith);
 }
 
-function run_test() {
+add_task(async function run_test() {
   let ret, rightThis, didCall;
   let state, lockState, lockedState, unlockState;
   let obj = {
     _lock: Utils.lock,
     lock() {
       lockState = ++state;
       if (this._locked) {
         lockedState = ++state;
@@ -24,55 +24,55 @@ function run_test() {
     },
     unlock() {
       unlockState = ++state;
       this._locked = false;
     },
 
     func() {
       return this._lock("Test utils lock",
-                        function() {
-                          rightThis = this == obj;
-                          didCall = true;
-                          return 5;
-                        })();
+                              async function() {
+                                rightThis = this == obj;
+                                didCall = true;
+                                return 5;
+                              })();
     },
 
     throwy() {
       return this._lock("Test utils lock throwy",
-                        function() {
-                          rightThis = this == obj;
-                          didCall = true;
-                          this.throwy();
-                        })();
+                              async function() {
+                                rightThis = this == obj;
+                                didCall = true;
+                                return this.throwy();
+                              })();
     }
   };
 
   _("Make sure a normal call will call and return");
   rightThis = didCall = false;
   state = 0;
-  ret = obj.func();
+  ret = await obj.func();
   do_check_eq(ret, 5);
   do_check_true(rightThis);
   do_check_true(didCall);
   do_check_eq(lockState, 1);
   do_check_eq(unlockState, 2);
   do_check_eq(state, 2);
 
   _("Make sure code that calls locked code throws");
   ret = null;
   rightThis = didCall = false;
   try {
-    ret = obj.throwy();
+    ret = await obj.throwy();
     do_throw("throwy internal call should have thrown!");
   } catch (ex) {
     // Should throw an Error, not a string.
     do_check_begins(ex, "Could not acquire lock");
   }
   do_check_eq(ret, null);
   do_check_true(rightThis);
   do_check_true(didCall);
   _("Lock should be called twice so state 3 is skipped");
   do_check_eq(lockState, 4);
   do_check_eq(lockedState, 5);
   do_check_eq(unlockState, 6);
   do_check_eq(state, 6);
-}
+});
--- a/services/sync/tests/unit/test_utils_notify.js
+++ b/services/sync/tests/unit/test_utils_notify.js
@@ -1,29 +1,29 @@
 _("Make sure notify sends out the right notifications");
 Cu.import("resource://services-sync/util.js");
 
-function run_test() {
+add_task(async function run_test() {
   let ret, rightThis, didCall;
   let obj = {
     notify: Utils.notify("foo:"),
     _log: {
       trace() {}
     },
 
     func() {
-      return this.notify("bar", "baz", function() {
+      return this.notify("bar", "baz", async function() {
         rightThis = this == obj;
         didCall = true;
         return 5;
       })();
     },
 
     throwy() {
-      return this.notify("bad", "one", function() {
+      return this.notify("bad", "one", async function() {
         rightThis = this == obj;
         didCall = true;
         throw 10;
       })();
     }
   };
 
   let state = 0;
@@ -41,17 +41,17 @@ function run_test() {
     return obj2;
   };
 
   _("Make sure a normal call will call and return with notifications");
   rightThis = didCall = false;
   let fs = makeObs("foo:bar:start");
   let ff = makeObs("foo:bar:finish");
   let fe = makeObs("foo:bar:error");
-  ret = obj.func();
+  ret = await obj.func();
   do_check_eq(ret, 5);
   do_check_true(rightThis);
   do_check_true(didCall);
 
   do_check_eq(fs.state, 1);
   do_check_eq(fs.subject, undefined);
   do_check_eq(fs.topic, "foo:bar:start");
   do_check_eq(fs.data, "baz");
@@ -68,17 +68,17 @@ function run_test() {
 
   _("Make sure a throwy call will call and throw with notifications");
   ret = null;
   rightThis = didCall = false;
   let ts = makeObs("foo:bad:start");
   let tf = makeObs("foo:bad:finish");
   let te = makeObs("foo:bad:error");
   try {
-    ret = obj.throwy();
+    ret = await obj.throwy();
     do_throw("throwy should have thrown!");
   } catch (ex) {
     do_check_eq(ex, 10);
   }
   do_check_eq(ret, null);
   do_check_true(rightThis);
   do_check_true(didCall);
 
@@ -91,9 +91,9 @@ function run_test() {
   do_check_eq(tf.subject, undefined);
   do_check_eq(tf.topic, undefined);
   do_check_eq(tf.data, undefined);
 
   do_check_eq(te.state, 4);
   do_check_eq(te.subject, 10);
   do_check_eq(te.topic, "foo:bad:error");
   do_check_eq(te.data, "one");
-}
+});
--- a/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm
+++ b/services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm
@@ -190,17 +190,17 @@ var Authentication = {
       cb(error, false);
     });
 
     try {
       cb.wait();
 
       if (Weave.Status.login !== Weave.LOGIN_SUCCEEDED) {
         Logger.logInfo("Logging into Weave.");
-        Weave.Service.login();
+        Async.promiseSpinningly(Weave.Service.login());
         Logger.AssertEqual(Weave.Status.login, Weave.LOGIN_SUCCEEDED,
                            "Weave logged in");
       }
 
       return true;
     } catch (error) {
       throw new Error("signIn() failed with: " + error.message);
     }
--- a/services/sync/tps/extensions/tps/resource/modules/addons.jsm
+++ b/services/sync/tps/extensions/tps/resource/modules/addons.jsm
@@ -49,31 +49,23 @@ function Addon(TPS, id) {
   this.id = id;
 }
 
 Addon.prototype = {
   addon: null,
 
   uninstall: function uninstall() {
     // find our addon locally
-    let cb = Async.makeSyncCallback();
-    AddonManager.getAddonByID(this.id, cb);
-    let addon = Async.waitForSyncCallback(cb);
-
+    let addon = Async.promiseSpinningly(AddonManager.getAddonByID(this.id));
     Logger.AssertTrue(!!addon, "could not find addon " + this.id + " to uninstall");
-
-    cb = Async.makeSpinningCallback();
-    AddonUtils.uninstallAddon(addon, cb);
-    cb.wait();
+    Async.promiseSpinningly(AddonUtils.uninstallAddon(addon));
   },
 
   find: function find(state) {
-    let cb = Async.makeSyncCallback();
-    AddonManager.getAddonByID(this.id, cb);
-    let addon = Async.waitForSyncCallback(cb);
+    let addon = Async.promiseSpinningly(AddonManager.getAddonByID(this.id));
 
     if (!addon) {
       Logger.logInfo("Could not find add-on with ID: " + this.id);
       return false;
     }
 
     this.addon = addon;
 
--- a/services/sync/tps/extensions/tps/resource/tps.jsm
+++ b/services/sync/tps/extensions/tps/resource/tps.jsm
@@ -690,17 +690,17 @@ var TPS = {
         clientRecordDumpStr = "<Cyclic value>";
       }
       try {
         serverRecordDumpStr = JSON.stringify(serverRecords);
       } catch (e) {
         // as above
         serverRecordDumpStr = "<Cyclic value>";
       }
-      let { problemData } = validator.compareClientWithServer(clientRecords, serverRecords);
+      let { problemData } = Async.promiseSpinningly(validator.compareClientWithServer(clientRecords, serverRecords));
       for (let { name, count } of problemData.getSummary()) {
         if (count) {
           Logger.logInfo(`Validation problem: "${name}": ${JSON.stringify(problemData[name])}`);
         }
         Logger.AssertEqual(count, 0, `Validation error for "${engineName}" of type "${name}"`);
       }
     } catch (e) {
       // Dump the client records if possible
@@ -926,17 +926,16 @@ var TPS = {
 
       // If we have restricted the active engines, unregister engines we don't
       // care about.
       if (settings.ignoreUnusedEngines && Array.isArray(this._enabledEngines)) {
         let names = {};
         for (let name of this._enabledEngines) {
           names[name] = true;
         }
-
         for (let engine of Weave.Service.engineManager.getEnabled()) {
           if (!(engine.name in names)) {
             Logger.logInfo("Unregistering unused engine: " + engine.name);
             Weave.Service.engineManager.unregister(engine);
           }
         }
       }
       Logger.logInfo("Starting phase " + this._currentPhase);
@@ -1176,26 +1175,26 @@ var TPS = {
       Weave.Svc.Prefs.reset("firstSync");
     }
 
     this.Login(false);
     ++this._syncCount;
 
     this._triggeredSync = true;
     this.StartAsyncOperation();
-    Weave.Service.sync();
+    Async.promiseSpinningly(Weave.Service.sync());
     Logger.logInfo("Sync is complete");
   },
 
   WipeServer: function TPS__WipeServer() {
     Logger.logInfo("Wiping data from server.");
 
     this.Login(false);
-    Weave.Service.login();
-    Weave.Service.wipeServer();
+    Async.promiseSpinningly(Weave.Service.login());
+    Async.promiseSpinningly(Weave.Service.wipeServer());
   },
 
   /**
    * Action which ensures changes are being tracked before returning.
    */
   EnsureTracking: function EnsureTracking() {
     this.Login(false);
     this.waitForTracking();