Bug 633062 p4 - Remove miscellaneous uses of event loop spinning in services/. r?markh draft
authorEdouard Oger <eoger@fastmail.com>
Fri, 08 Dec 2017 14:41:02 -0500
changeset 749589 da71b1d5fb337ec94dc9d182eeda6d8617a86d9f
parent 749588 046b023350ad3b3f04fd1e09d89167c3b1d86f50
child 749590 c3238b5ac6bb2c3e362e712a8718d2ac4114bf28
push id97449
push userbmo:eoger@fastmail.com
push dateWed, 31 Jan 2018 19:28:47 +0000
reviewersmarkh
bugs633062
milestone60.0a1
Bug 633062 p4 - Remove miscellaneous uses of event loop spinning in services/. r?markh MozReview-Commit-ID: IDGWJevEHLK
services/common/tests/unit/test_tokenauthenticatedrequest.js
services/sync/modules/browserid_identity.js
services/sync/modules/collection_validator.js
services/sync/modules/engines/addons.js
services/sync/modules/resource.js
services/sync/modules/service.js
services/sync/modules/stages/enginesync.js
services/sync/tests/unit/test_addons_store.js
services/sync/tests/unit/test_addons_tracker.js
services/sync/tests/unit/test_browserid_identity.js
services/sync/tests/unit/test_clients_engine.js
services/sync/tests/unit/test_fxa_service_cluster.js
services/sync/tests/unit/test_history_store.js
services/sync/tests/unit/test_hmac_error.js
services/sync/tests/unit/test_service_cluster.js
services/sync/tests/unit/test_telemetry.js
services/sync/tps/extensions/tps/resource/modules/addons.jsm
services/sync/tps/extensions/tps/resource/modules/history.jsm
services/sync/tps/extensions/tps/resource/tps.jsm
toolkit/modules/tests/xpcshell/test_sqlite.js
--- a/services/common/tests/unit/test_tokenauthenticatedrequest.js
+++ b/services/common/tests/unit/test_tokenauthenticatedrequest.js
@@ -6,17 +6,17 @@ ChromeUtils.import("resource://services-
 ChromeUtils.import("resource://services-common/rest.js");
 ChromeUtils.import("resource://services-common/utils.js");
 
 function run_test() {
   initTestLogging("Trace");
   run_next_test();
 }
 
-add_test(function test_authenticated_request() {
+add_task(async function test_authenticated_request() {
   _("Ensure that sending a MAC authenticated GET request works as expected.");
 
   let message = "Great Success!";
 
   // TODO: We use a preset key here, but use getTokenFromBrowserIDAssertion()
   // from TokenServerClient to get a real one when possible. (Bug 745800)
   let id = "eyJleHBpcmVzIjogMTM2NTAxMDg5OC4x";
   let key = "qTZf4ZFpAMpMoeSsX3zVRjiqmNs=";
@@ -36,17 +36,15 @@ add_test(function test_authenticated_req
       response.bodyOutputStream.write(message, message.length);
     }
   });
   let uri = CommonUtils.makeURI(server.baseURI + "/foo");
   let sig = CryptoUtils.computeHTTPMACSHA1(id, key, method, uri, extra);
   auth = sig.getHeader();
 
   let req = new TokenAuthenticatedRESTRequest(uri, {id, key}, extra);
-  let cb = Async.makeSpinningCallback();
-  req.get(cb);
-  let result = cb.wait();
+  let error = await new Promise(res => req.get(res));
 
-  Assert.equal(null, result);
+  Assert.equal(null, error);
   Assert.equal(message, req.response.body);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
--- a/services/sync/modules/browserid_identity.js
+++ b/services/sync/modules/browserid_identity.js
@@ -723,29 +723,24 @@ this.BrowserIDManager.prototype = {
   getResourceAuthenticator() {
     return this._getAuthenticationHeader.bind(this);
   },
 
   /**
    * @return a Hawk HTTP Authorization Header, lightly wrapped, for the .uri
    * of a RESTRequest or AsyncResponse object.
    */
-  _getAuthenticationHeader(httpObject, method) {
-    let cb = Async.makeSpinningCallback();
-    this._ensureValidToken().then(cb, cb);
+  async _getAuthenticationHeader(httpObject, method) {
     // Note that in failure states we return null, causing the request to be
     // made without authorization headers, thereby presumably causing a 401,
     // which causes Sync to log out. If we throw, this may not happen as
     // expected.
     try {
-      cb.wait();
+      await this._ensureValidToken();
     } catch (ex) {
-      if (Async.isShutdownException(ex)) {
-        throw ex;
-      }
       this._log.error("Failed to fetch a token for authentication", ex);
       return null;
     }
     if (!this._token) {
       return null;
     }
     let credentials = {algorithm: "sha256",
                        id: this._token.id,
@@ -791,19 +786,19 @@ function BrowserIDClusterManager(service
 BrowserIDClusterManager.prototype = {
   get identity() {
     return this.service.identity;
   },
 
   /**
    * Determine the cluster for the current user and update state.
    */
-  setCluster() {
+  async setCluster() {
     // Make sure we didn't get some unexpected response for the cluster.
-    let cluster = this._findCluster();
+    let cluster = await this._findCluster();
     this._log.debug("Cluster value = " + cluster);
     if (cluster == null) {
       return false;
     }
 
     // Convert from the funky "String object with additional properties" that
     // resource.js returns to a plain-old string.
     cluster = cluster.toString();
@@ -813,18 +808,30 @@ BrowserIDClusterManager.prototype = {
     }
 
     this._log.debug("Setting cluster to " + cluster);
     this.service.clusterURL = cluster;
 
     return true;
   },
 
-  _findCluster() {
-    let endPointFromIdentityToken = () => {
+  async _findCluster() {
+    try {
+      // Ensure we are ready to authenticate and have a valid token.
+      await this.identity.whenReadyToAuthenticate.promise;
+      // We need to handle node reassignment here.  If we are being asked
+      // for a clusterURL while the service already has a clusterURL, then
+      // it's likely a 401 was received using the existing token - in which
+      // case we just discard the existing token and fetch a new one.
+      if (this.service.clusterURL) {
+        log.debug("_findCluster has a pre-existing clusterURL, so discarding the current token");
+        this.identity._token = null;
+      }
+      await this.identity._ensureValidToken();
+
       // The only reason (in theory ;) that we can end up with a null token
       // is when this._fxaService.canGetKeys() returned false.  In turn, this
       // should only happen if the master-password is locked or the credentials
       // storage is screwed, and in those cases we shouldn't have started
       // syncing so shouldn't get here anyway.
       // But better safe than sorry! To keep things clearer, throw an explicit
       // exception - the message will appear in the logs and the error will be
       // treated as transient.
@@ -835,62 +842,35 @@ BrowserIDClusterManager.prototype = {
       // For Sync 1.5 storage endpoints, we use the base endpoint verbatim.
       // However, it should end in "/" because we will extend it with
       // well known path components. So we add a "/" if it's missing.
       if (!endpoint.endsWith("/")) {
         endpoint += "/";
       }
       log.debug("_findCluster returning " + endpoint);
       return endpoint;
-    };
-
-    // Spinningly ensure we are ready to authenticate and have a valid token.
-    let promiseClusterURL = () => {
-      return this.identity.whenReadyToAuthenticate.promise.then(
-        () => {
-          // We need to handle node reassignment here.  If we are being asked
-          // for a clusterURL while the service already has a clusterURL, then
-          // it's likely a 401 was received using the existing token - in which
-          // case we just discard the existing token and fetch a new one.
-          if (this.service.clusterURL) {
-            log.debug("_findCluster has a pre-existing clusterURL, so discarding the current token");
-            this.identity._token = null;
-          }
-          return this.identity._ensureValidToken();
-        }
-      ).then(endPointFromIdentityToken
-      );
-    };
-
-    let cb = Async.makeSpinningCallback();
-    promiseClusterURL().then(function(clusterURL) {
-      cb(null, clusterURL);
-    }).catch(err => {
+    } catch (err) {
       log.info("Failed to fetch the cluster URL", err);
       // service.js's verifyLogin() method will attempt to fetch a cluster
       // URL when it sees a 401.  If it gets null, it treats it as a "real"
       // auth error and sets Status.login to LOGIN_FAILED_LOGIN_REJECTED, which
       // in turn causes a notification bar to appear informing the user they
       // need to re-authenticate.
       // On the other hand, if fetching the cluster URL fails with an exception,
       // verifyLogin() assumes it is a transient error, and thus doesn't show
       // the notification bar under the assumption the issue will resolve
       // itself.
       // Thus:
       // * On a real 401, we must return null.
       // * On any other problem we must let an exception bubble up.
       if (err instanceof AuthenticationError) {
-        // callback with no error and a null result - cb.wait() returns null.
-        cb(null, null);
-      } else {
-        // callback with an error - cb.wait() completes by raising an exception.
-        cb(err);
+        return null;
       }
-    });
-    return cb.wait();
+      throw err;
+    }
   },
 
   getUserBaseURL() {
     // Legacy Sync and FxA Sync construct the userBaseURL differently. Legacy
     // Sync appends path components onto an empty path, and in FxA Sync the
     // token server constructs this for us in an opaque manner. Since the
     // cluster manager already sets the clusterURL on Service and also has
     // access to the current identity, we added this functionality here.
--- a/services/sync/modules/collection_validator.js
+++ b/services/sync/modules/collection_validator.js
@@ -118,17 +118,17 @@ class CollectionValidator {
   // Return whether or not a server item should be present on the client. Expected
   // to be overridden.
   clientUnderstands(item) {
     return true;
   }
 
   // Return whether or not a client item should be present on the server. Expected
   // to be overridden
-  syncedByClient(item) {
+  async syncedByClient(item) {
     return true;
   }
 
   // Compare the server item and the client item, and return a list of property
   // names that are different. Can be overridden if needed.
   getDifferences(client, server) {
     let differences = [];
     for (let prop of this.props) {
@@ -181,17 +181,17 @@ class CollectionValidator {
         }
         record.understood = this.clientUnderstands(record);
       }
     }
 
     let seenClient = new Map();
     for (let record of clientRecords) {
       let id = record[this.idProp];
-      record.shouldSync = this.syncedByClient(record);
+      record.shouldSync = await this.syncedByClient(record);
       let clientHasPossibleDupe = seenClient.has(id);
       if (clientHasPossibleDupe && record.shouldSync) {
         // Only report duplicate client IDs for syncable records.
         problems.clientDuplicates.push(id);
         continue;
       }
       seenClient.set(id, record);
       let combined = allRecords.get(id);
--- a/services/sync/modules/engines/addons.js
+++ b/services/sync/modules/engines/addons.js
@@ -178,17 +178,17 @@ AddonsEngine.prototype = {
         continue;
       }
 
       // Keep newest modified time.
       if (id in changes && changeTime < changes[id]) {
           continue;
       }
 
-      if (!this.isAddonSyncable(addons[id])) {
+      if (!(await this.isAddonSyncable(addons[id]))) {
         continue;
       }
 
       this._log.debug("Adding changed add-on from changes log: " + id);
       let addon = addons[id];
       changes[addon.guid] = changeTime.getTime() / 1000;
     }
 
@@ -234,16 +234,17 @@ AddonsEngine.prototype = {
    * This will load the reconciler's state from the file
    * system (if needed) and refresh the state of the reconciler.
    */
   async _refreshReconcilerState() {
     this._log.debug("Refreshing reconciler state");
     return this._reconciler.refreshGlobalState();
   },
 
+  // Returns a promise
   isAddonSyncable(addon, ignoreRepoCheck) {
     return this._store.isAddonSyncable(addon, ignoreRepoCheck);
   }
 };
 
 /**
  * This is the primary interface between Sync and the Addons Manager.
  *
@@ -286,17 +287,17 @@ AddonsStore.prototype = {
                        " for " + record.id);
         return;
       }
     }
 
     // Ignore incoming records for which an existing non-syncable addon
     // exists.
     let existingMeta = this.reconciler.addons[record.addonID];
-    if (existingMeta && !this.isAddonSyncable(existingMeta)) {
+    if (existingMeta && !(await this.isAddonSyncable(existingMeta))) {
       this._log.info("Ignoring incoming record for an existing but non-syncable addon", record.addonID);
       return;
     }
 
     await Store.prototype.applyIncoming.call(this, record);
   },
 
 
@@ -460,17 +461,17 @@ AddonsStore.prototype = {
    * This implements a core Store API.
    */
   async getAllIDs() {
     let ids = {};
 
     let addons = this.reconciler.addons;
     for (let id in addons) {
       let addon = addons[id];
-      if (this.isAddonSyncable(addon)) {
+      if ((await this.isAddonSyncable(addon))) {
         ids[addon.guid] = true;
       }
     }
 
     return ids;
   },
 
   /**
@@ -531,17 +532,17 @@ AddonsStore.prototype = {
    *
    * @param  addon
    *         Addon instance
    * @param ignoreRepoCheck
    *         Should we skip checking the Addons repository (primarially useful
    *         for testing and validation).
    * @return Boolean indicating whether it is appropriate for Sync
    */
-  isAddonSyncable: function isAddonSyncable(addon, ignoreRepoCheck = false) {
+  async isAddonSyncable(addon, ignoreRepoCheck = false) {
     // Currently, we limit syncable add-ons to those that are:
     //   1) In a well-defined set of types
     //   2) Installed in the current profile
     //   3) Not installed by a foreign entity (i.e. installed by the app)
     //      since they act like global extensions.
     //   4) Is not a hotfix.
     //   5) The addons XPIProvider doesn't veto it (i.e not being installed in
     //      the profile directory, or any other reasons it says the addon can't
@@ -584,19 +585,19 @@ AddonsStore.prototype = {
     // If the AddonRepository's cache isn't enabled (which it typically isn't
     // in tests), getCachedAddonByID always returns null - so skip the check
     // in that case. We also provide a way to specifically opt-out of the check
     // even if the cache is enabled, which is used by the validators.
     if (ignoreRepoCheck || !AddonRepository.cacheEnabled) {
       return true;
     }
 
-    let cb = Async.makeSyncCallback();
-    AddonRepository.getCachedAddonByID(addon.id, cb);
-    let result = Async.waitForSyncCallback(cb);
+    let result = await new Promise(res => {
+      AddonRepository.getCachedAddonByID(addon.id, res);
+    });
 
     if (!result) {
       this._log.debug(addon.id + " not syncable: add-on not found in add-on " +
                       "repository.");
       return false;
     }
 
     return this.isSourceURITrusted(result.sourceURI);
@@ -700,17 +701,17 @@ AddonsTracker.prototype = {
    */
   changeListener: function changeHandler(date, change, addon) {
     this._log.debug("changeListener invoked: " + change + " " + addon.id);
     // Ignore changes that occur during sync.
     if (this.ignoreAll) {
       return;
     }
 
-    if (!this.store.isAddonSyncable(addon)) {
+    if (!Async.promiseSpinningly(this.store.isAddonSyncable(addon))) {
       this._log.debug("Ignoring change because add-on isn't syncable: " +
                       addon.id);
       return;
     }
 
     if (this.addChangedID(addon.guid, date.getTime() / 1000)) {
       this.score += SCORE_INCREMENT_XLARGE;
     }
@@ -778,15 +779,17 @@ class AddonValidator extends CollectionV
     }
     return item;
   }
 
   clientUnderstands(item) {
     return item.applicationID === Services.appinfo.ID;
   }
 
-  syncedByClient(item) {
+  async syncedByClient(item) {
     return !item.original.hidden &&
            !item.original.isSystem &&
            !(item.original.pendingOperations & AddonManager.PENDING_UNINSTALL) &&
+           // No need to await the returned promise explicitely:
+           // |expr1 && expr2| evaluates to expr2 if expr1 is true.
            this.engine.isAddonSyncable(item.original, true);
   }
 }
--- a/services/sync/modules/resource.js
+++ b/services/sync/modules/resource.js
@@ -42,16 +42,17 @@ XPCOMUtils.defineLazyPreferenceGetter(Re
                                       "SEND_VERSION_INFO",
                                       "services.sync.sendVersionInfo",
                                       true);
 Resource.prototype = {
   _logName: "Sync.Resource",
 
   /**
    * Callback to be invoked at request time to add authentication details.
+   * If the callback returns a promise, it will be awaited upon.
    *
    * By default, a global authenticator is provided. If this is set, it will
    * be used instead of the global one.
    */
   authenticator: null,
 
   // Wait 5 minutes before killing a request.
   ABORT_TIMEOUT: 300000,
@@ -88,25 +89,25 @@ Resource.prototype = {
     }
     return null;
   },
 
   /**
    * @param {string} method HTTP method
    * @returns {Headers}
    */
-  _buildHeaders(method) {
+  async _buildHeaders(method) {
     const headers = new Headers(this._headers);
 
     if (Resource.SEND_VERSION_INFO) {
       headers.append("user-agent", Utils.userAgent);
     }
 
     if (this.authenticator) {
-      const result = this.authenticator(this, method);
+      const result = await this.authenticator(this, method);
       if (result && result.headers) {
         for (const [k, v] of Object.entries(result.headers)) {
           headers.append(k.toLowerCase(), v);
         }
       }
     } else {
       this._log.debug("No authenticator found.");
     }
@@ -130,18 +131,18 @@ Resource.prototype = {
   },
 
   /**
    * @param {string} method HTTP method
    * @param {string} data HTTP body
    * @param {object} signal AbortSignal instance
    * @returns {Request}
    */
-  _createRequest(method, data, signal) {
-    const headers = this._buildHeaders(method);
+  async _createRequest(method, data, signal) {
+    const headers = await this._buildHeaders(method);
     const init = {
       cache: "no-store", // No cache.
       headers,
       method,
       signal,
       mozErrors: true // Return nsresult error codes instead of a generic
                       // NetworkError when fetch rejects.
     };
@@ -159,17 +160,17 @@ Resource.prototype = {
 
   /**
    * @param {string} method HTTP method
    * @param {string} [data] HTTP body
    * @returns {Response}
    */
   async _doRequest(method, data = null) {
     const controller = new AbortController();
-    const request = this._createRequest(method, data, controller.signal);
+    const request = await this._createRequest(method, data, controller.signal);
     const responsePromise = fetch(request); // Rejects on network failure.
     let didTimeout = false;
     const timeoutId = setTimeout(() => {
       didTimeout = true;
       this._log.error(`Request timed out after ${this.ABORT_TIMEOUT}ms. Aborting.`);
       controller.abort();
     }, this.ABORT_TIMEOUT);
     let response;
--- a/services/sync/modules/service.js
+++ b/services/sync/modules/service.js
@@ -436,17 +436,20 @@ Sync11Service.prototype = {
           this.sync({why: "collection_changed", engines: []}).catch(e => {
             this._log.error(e);
           });
         }
         break;
       case "fxaccounts:device_disconnected":
         data = JSON.parse(data);
         if (!data.isLocalDevice) {
-          Async.promiseSpinningly(this.clientsEngine.updateKnownStaleClients());
+          // Refresh the known stale clients list in the background.
+          this.clientsEngine.updateKnownStaleClients().catch(e => {
+            this._log.error(e);
+          });
         }
         break;
       case "weave:service:setup-complete":
         let status = this._checkSetup();
         if (status != STATUS_DISABLED && status != CLIENT_NOT_CONFIGURED)
             Svc.Obs.notify("weave:engine:start-tracking");
         break;
       case "nsPref:changed":
@@ -646,17 +649,17 @@ Sync11Service.prototype = {
       this.status.login = unlockedState;
       return false;
     }
 
     try {
       // Make sure we have a cluster to verify against.
       // This is a little weird, if we don't get a node we pretend
       // to succeed, since that probably means we just don't have storage.
-      if (this.clusterURL == "" && !this._clusterManager.setCluster()) {
+      if (this.clusterURL == "" && !(await this._clusterManager.setCluster())) {
         this.status.sync = NO_SYNC_NODE_FOUND;
         return true;
       }
 
       // Fetch collection info on every startup.
       let test = await this.resource(this.infoURL).get();
 
       switch (test.status) {
@@ -685,17 +688,17 @@ Sync11Service.prototype = {
           return false;
 
         case 401:
           this._log.warn("401: login failed.");
           // Fall through to the 404 case.
 
         case 404:
           // Check that we're verifying with the correct cluster
-          if (allow40XRecovery && this._clusterManager.setCluster()) {
+          if (allow40XRecovery && (await this._clusterManager.setCluster())) {
             return await this.verifyLogin(false);
           }
 
           // We must have the right cluster, but the server doesn't expect us.
           // The implications of this depend on the identity being used - for
           // the legacy identity, it's an authoritatively "incorrect password",
           // (ie, LOGIN_FAILED_LOGIN_REJECTED) but for FxA it probably means
           // "transient error fetching auth token".
--- a/services/sync/modules/stages/enginesync.js
+++ b/services/sync/modules/stages/enginesync.js
@@ -46,17 +46,17 @@ EngineSynchronizer.prototype = {
 
       // this is a purposeful abort rather than a failure, so don't set
       // any status bits
       reason = "Can't sync: " + reason;
       throw new Error(reason);
     }
 
     // If we don't have a node, get one. If that fails, retry in 10 minutes.
-    if (!this.service.clusterURL && !this.service._clusterManager.setCluster()) {
+    if (!this.service.clusterURL && !(await this.service._clusterManager.setCluster())) {
       this.service.status.sync = NO_SYNC_NODE_FOUND;
       this._log.info("No cluster URL found. Cannot sync.");
       return;
     }
 
     // Ping the server with a special info request once a day.
     let infoURL = this.service.infoURL;
     let now = Math.floor(Date.now() / 1000);
--- a/services/sync/tests/unit/test_addons_store.js
+++ b/services/sync/tests/unit/test_addons_store.js
@@ -245,49 +245,49 @@ add_task(async function test_apply_unins
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, true));
   let failed = await store.applyIncomingBatch(records);
   Assert.equal(0, failed.length);
 
   addon = getAddonFromAddonManagerByID(addon.id);
   Assert.equal(null, addon);
 });
 
-add_test(function test_addon_syncability() {
+add_task(async function test_addon_syncability() {
   _("Ensure isAddonSyncable functions properly.");
 
   Svc.Prefs.set("addons.trustedSourceHostnames",
                 "addons.mozilla.org,other.example.com");
 
-  Assert.ok(!store.isAddonSyncable(null));
+  Assert.ok(!(await store.isAddonSyncable(null)));
 
   let addon = installAddon("test_bootstrap1_1");
-  Assert.ok(store.isAddonSyncable(addon));
+  Assert.ok((await store.isAddonSyncable(addon)));
 
   let dummy = {};
   const KEYS = ["id", "syncGUID", "type", "scope", "foreignInstall", "isSyncable"];
   for (let k of KEYS) {
     dummy[k] = addon[k];
   }
 
-  Assert.ok(store.isAddonSyncable(dummy));
+  Assert.ok((await store.isAddonSyncable(dummy)));
 
   dummy.type = "UNSUPPORTED";
-  Assert.ok(!store.isAddonSyncable(dummy));
+  Assert.ok(!(await store.isAddonSyncable(dummy)));
   dummy.type = addon.type;
 
   dummy.scope = 0;
-  Assert.ok(!store.isAddonSyncable(dummy));
+  Assert.ok(!(await store.isAddonSyncable(dummy)));
   dummy.scope = addon.scope;
 
   dummy.isSyncable = false;
-  Assert.ok(!store.isAddonSyncable(dummy));
+  Assert.ok(!(await store.isAddonSyncable(dummy)));
   dummy.isSyncable = addon.isSyncable;
 
   dummy.foreignInstall = true;
-  Assert.ok(!store.isAddonSyncable(dummy));
+  Assert.ok(!(await store.isAddonSyncable(dummy)));
   dummy.foreignInstall = false;
 
   uninstallAddon(addon);
 
   Assert.ok(!store.isSourceURITrusted(null));
 
   let trusted = [
     "https://addons.mozilla.org/foo",
@@ -312,18 +312,16 @@ add_test(function test_addon_syncability
   for (let uri of trusted) {
     Assert.ok(!store.isSourceURITrusted(Services.io.newURI(uri)));
   }
 
   Svc.Prefs.set("addons.trustedSourceHostnames", "addons.mozilla.org");
   Assert.ok(store.isSourceURITrusted(Services.io.newURI("https://addons.mozilla.org/foo")));
 
   Svc.Prefs.reset("addons.trustedSourceHostnames");
-
-  run_next_test();
 });
 
 add_task(async function test_get_all_ids() {
   _("Ensures that getAllIDs() returns an appropriate set.");
 
   _("Installing two addons.");
   // XXX - this test seems broken - at this point, before we've installed the
   // addons below, store.getAllIDs() returns all addons installed by previous
@@ -331,19 +329,19 @@ add_task(async function test_get_all_ids
   // So if any tests above ever add a new addon ID, they are going to need to
   // be added here too.
   // do_check_eq(0, Object.keys(store.getAllIDs()).length);
   let addon1 = installAddon("test_install1");
   let addon2 = installAddon("test_bootstrap1_1");
   let addon3 = installAddon("test_install3");
 
   _("Ensure they're syncable.");
-  Assert.ok(store.isAddonSyncable(addon1));
-  Assert.ok(store.isAddonSyncable(addon2));
-  Assert.ok(store.isAddonSyncable(addon3));
+  Assert.ok((await store.isAddonSyncable(addon1)));
+  Assert.ok((await store.isAddonSyncable(addon2)));
+  Assert.ok((await store.isAddonSyncable(addon3)));
 
   let ids = await store.getAllIDs();
 
   Assert.equal("object", typeof(ids));
   Assert.equal(3, Object.keys(ids).length);
   Assert.ok(addon1.syncGUID in ids);
   Assert.ok(addon2.syncGUID in ids);
   Assert.ok(addon3.syncGUID in ids);
--- a/services/sync/tests/unit/test_addons_tracker.js
+++ b/services/sync/tests/unit/test_addons_tracker.js
@@ -113,36 +113,36 @@ add_task(async function test_track_user_
   let addon = installAddon("test_bootstrap1_1");
   Assert.ok(!addon.userDisabled);
   Assert.ok(!addon.appDisabled);
   Assert.ok(addon.isActive);
 
   Svc.Obs.notify("weave:engine:start-tracking");
   Assert.equal(0, tracker.score);
 
-  let cb = Async.makeSyncCallback();
-
-  let listener = {
-    onDisabled(disabled) {
-      _("onDisabled");
-      if (disabled.id == addon.id) {
-        AddonManager.removeAddonListener(listener);
-        cb();
+  let disabledPromise = new Promise(res => {
+    let listener = {
+      onDisabled(disabled) {
+        _("onDisabled");
+        if (disabled.id == addon.id) {
+          AddonManager.removeAddonListener(listener);
+          res();
+        }
+      },
+      onDisabling(disabling) {
+        _("onDisabling add-on");
       }
-    },
-    onDisabling(disabling) {
-      _("onDisabling add-on");
-    }
-  };
-  AddonManager.addAddonListener(listener);
+    };
+    AddonManager.addAddonListener(listener);
+  });
 
   _("Disabling add-on");
   addon.userDisabled = true;
   _("Disabling started...");
-  Async.waitForSyncCallback(cb);
+  await disabledPromise;
 
   let changed = tracker.changedIDs;
   Assert.equal(1, Object.keys(changed).length);
   Assert.ok(addon.syncGUID in changed);
   Assert.equal(SCORE_INCREMENT_XLARGE, tracker.score);
 
   uninstallAddon(addon);
   await cleanup();
--- a/services/sync/tests/unit/test_browserid_identity.js
+++ b/services/sync/tests/unit/test_browserid_identity.js
@@ -139,35 +139,34 @@ add_task(async function test_initialiali
     configureFxAccountIdentity(globalBrowseridManager, identityConfig);
 
     await globalBrowseridManager.initializeWithCurrentIdentity();
     await globalBrowseridManager.whenReadyToAuthenticate.promise;
     Assert.equal(Status.login, LOGIN_SUCCEEDED, "login succeeded even without keys");
     Assert.equal(globalBrowseridManager._token, null, "we don't have a token");
 });
 
-add_test(function test_getResourceAuthenticator() {
+add_task(async function test_getResourceAuthenticator() {
     _("BrowserIDManager supplies a Resource Authenticator callback which returns a Hawk header.");
     configureFxAccountIdentity(globalBrowseridManager);
     let authenticator = globalBrowseridManager.getResourceAuthenticator();
     Assert.ok(!!authenticator);
     let req = {uri: CommonUtils.makeURI(
       "https://example.net/somewhere/over/the/rainbow"),
                method: "GET"};
-    let output = authenticator(req, "GET");
+    let output = await authenticator(req, "GET");
     Assert.ok("headers" in output);
     Assert.ok("authorization" in output.headers);
     Assert.ok(output.headers.authorization.startsWith("Hawk"));
     _("Expected internal state after successful call.");
     Assert.equal(globalBrowseridManager._token.uid, globalIdentityConfig.fxaccount.token.uid);
-    run_next_test();
   }
 );
 
-add_test(function test_resourceAuthenticatorSkew() {
+add_task(async function test_resourceAuthenticatorSkew() {
   _("BrowserIDManager Resource Authenticator compensates for clock skew in Hawk header.");
 
   // Clock is skewed 12 hours into the future
   // We pick a date in the past so we don't risk concealing bugs in code that
   // uses new Date() instead of our given date.
   let now = new Date("Fri Apr 09 2004 00:00:00 GMT-0700").valueOf() + 12 * HOUR_MS;
   let browseridManager = new BrowserIDManager();
   let hawkClient = new HawkClient("https://example.net/v1", "/foo");
@@ -206,31 +205,29 @@ add_test(function test_resourceAuthentic
       localtimeOffsetMsec);
 
   Assert.equal(browseridManager._fxaService.now(), now);
   Assert.equal(browseridManager._fxaService.localtimeOffsetMsec,
       localtimeOffsetMsec);
 
   let request = new Resource("https://example.net/i/like/pie/");
   let authenticator = browseridManager.getResourceAuthenticator();
-  let output = authenticator(request, "GET");
+  let output = await authenticator(request, "GET");
   dump("output" + JSON.stringify(output));
   let authHeader = output.headers.authorization;
   Assert.ok(authHeader.startsWith("Hawk"));
 
   // Skew correction is applied in the header and we're within the two-minute
   // window.
   Assert.equal(getTimestamp(authHeader), now - 12 * HOUR_MS);
   Assert.ok(
       (getTimestampDelta(authHeader, now) - 12 * HOUR_MS) < 2 * MINUTE_MS);
-
-  run_next_test();
 });
 
-add_test(function test_RESTResourceAuthenticatorSkew() {
+add_task(async function test_RESTResourceAuthenticatorSkew() {
   _("BrowserIDManager REST Resource Authenticator compensates for clock skew in Hawk header.");
 
   // Clock is skewed 12 hours into the future from our arbitary date
   let now = new Date("Fri Apr 09 2004 00:00:00 GMT-0700").valueOf() + 12 * HOUR_MS;
   let browseridManager = new BrowserIDManager();
   let hawkClient = new HawkClient("https://example.net/v1", "/foo");
 
   // mock fxa hawk client skew
@@ -250,28 +247,26 @@ add_test(function test_RESTResourceAuthe
   fxaInternal.fxAccountsClient = fxaClient;
 
   configureFxAccountIdentity(browseridManager, globalIdentityConfig, fxaInternal);
 
   Assert.equal(browseridManager._fxaService.internal.now(), now);
 
   let request = new Resource("https://example.net/i/like/pie/");
   let authenticator = browseridManager.getResourceAuthenticator();
-  let output = authenticator(request, "GET");
+  let output = await authenticator(request, "GET");
   dump("output" + JSON.stringify(output));
   let authHeader = output.headers.authorization;
   Assert.ok(authHeader.startsWith("Hawk"));
 
   // Skew correction is applied in the header and we're within the two-minute
   // window.
   Assert.equal(getTimestamp(authHeader), now - 12 * HOUR_MS);
   Assert.ok(
       (getTimestampDelta(authHeader, now) - 12 * HOUR_MS) < 2 * MINUTE_MS);
-
-  run_next_test();
 });
 
 add_task(async function test_ensureLoggedIn() {
   configureFxAccountIdentity(globalBrowseridManager);
   await globalBrowseridManager.initializeWithCurrentIdentity();
   await globalBrowseridManager.whenReadyToAuthenticate.promise;
   Assert.equal(Status.login, LOGIN_SUCCEEDED, "original initialize worked");
   await globalBrowseridManager.ensureLoggedIn();
@@ -297,40 +292,39 @@ add_task(async function test_ensureLogge
                        "LOGIN_FAILED_LOGIN_REJECTED should have caused immediate rejection");
   Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED,
                "status should remain LOGIN_FAILED_LOGIN_REJECTED");
   Status.login = LOGIN_FAILED_NETWORK_ERROR;
   await globalBrowseridManager.ensureLoggedIn();
   Assert.equal(Status.login, LOGIN_SUCCEEDED, "final ensureLoggedIn worked");
 });
 
-add_test(function test_tokenExpiration() {
+add_task(async function test_tokenExpiration() {
     _("BrowserIDManager notices token expiration:");
     let bimExp = new BrowserIDManager();
     configureFxAccountIdentity(bimExp, globalIdentityConfig);
 
     let authenticator = bimExp.getResourceAuthenticator();
     Assert.ok(!!authenticator);
     let req = {uri: CommonUtils.makeURI(
       "https://example.net/somewhere/over/the/rainbow"),
                method: "GET"};
-    authenticator(req, "GET");
+    await authenticator(req, "GET");
 
     // Mock the clock.
     _("Forcing the token to expire ...");
     Object.defineProperty(bimExp, "_now", {
       value: function customNow() {
         return (Date.now() + 3000001);
       },
       writable: true,
     });
     Assert.ok(bimExp._token.expiration < bimExp._now());
     _("... means BrowserIDManager knows to re-fetch it on the next call.");
     Assert.ok(!bimExp.hasValidToken());
-    run_next_test();
   }
 );
 
 add_task(async function test_getTokenErrors() {
   _("BrowserIDManager correctly handles various failures to get a token.");
 
   _("Arrange for a 401 - Sync should reflect an auth error.");
   initializeIdentityWithTokenServerResponse({
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -1740,46 +1740,46 @@ add_task(async function test_other_clien
     engine.fxAccounts = fxAccounts;
     cleanup();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function device_disconnected_notification_updates_known_stale_clients() {
   const spyUpdate = sinon.spy(engine, "updateKnownStaleClients");
+
+  Services.obs.notifyObservers(null, "fxaccounts:device_disconnected",
+                               JSON.stringify({ isLocalDevice: false }));
+  ok(spyUpdate.calledOnce, "updateKnownStaleClients should be called");
+  spyUpdate.reset();
+
+  Services.obs.notifyObservers(null, "fxaccounts:device_disconnected",
+                               JSON.stringify({ isLocalDevice: true }));
+  ok(spyUpdate.notCalled, "updateKnownStaleClients should not be called");
+
+  spyUpdate.restore();
+});
+
+add_task(async function update_known_stale_clients() {
   const makeFakeClient = (id) => ({ id, fxaDeviceId: `fxa-${id}` });
   const clients = [makeFakeClient("one"), makeFakeClient("two"), makeFakeClient("three")];
   const stubRemoteClients = sinon.stub(engine._store, "_remoteClients").get(() => {
     return clients;
   });
   const stubRefresh = sinon.stub(engine, "_refreshKnownStaleClients", () => {
     engine._knownStaleFxADeviceIds = ["fxa-one", "fxa-two"];
   });
 
   engine._knownStaleFxADeviceIds = null;
-  Services.obs.notifyObservers(null, "fxaccounts:device_disconnected",
-                               JSON.stringify({ isLocalDevice: false }));
-  ok(spyUpdate.calledOnce, "updateKnownStaleClients should be called");
+  await engine.updateKnownStaleClients();
   ok(clients[0].stale);
   ok(clients[1].stale);
   ok(!clients[2].stale);
-  spyUpdate.reset();
-
-  ok(engine._knownStaleFxADeviceIds);
-  Services.obs.notifyObservers(null, "fxaccounts:device_disconnected",
-                               JSON.stringify({ isLocalDevice: false }));
-  ok(spyUpdate.calledOnce, "updateKnownStaleClients should be called");
-  spyUpdate.reset();
-
-  Services.obs.notifyObservers(null, "fxaccounts:device_disconnected",
-                               JSON.stringify({ isLocalDevice: true }));
-  ok(spyUpdate.notCalled, "updateKnownStaleClients should not be called");
 
   stubRemoteClients.restore();
-  spyUpdate.restore();
   stubRefresh.restore();
 });
 
 add_task(async function process_incoming_refreshes_known_stale_clients() {
   const stubProcessIncoming = sinon.stub(SyncEngine.prototype, "_processIncoming");
   const stubRefresh = sinon.stub(engine, "_refreshKnownStaleClients", () => {
     engine._knownStaleFxADeviceIds = ["one", "two"];
   });
--- a/services/sync/tests/unit/test_fxa_service_cluster.js
+++ b/services/sync/tests/unit/test_fxa_service_cluster.js
@@ -14,32 +14,30 @@ add_task(async function test_findCluster
     headers: [],
     body: "",
   });
 
   await Service.identity.initializeWithCurrentIdentity();
   await Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
                        "should reject due to 500");
 
-  Assert.throws(function() {
-    Service._clusterManager._findCluster();
-  });
+  await Assert.rejects(Service._clusterManager._findCluster());
 
   _("_findCluster() returns null on authentication errors.");
   initializeIdentityWithTokenServerResponse({
     status: 401,
     headers: {"content-type": "application/json"},
     body: "{}",
   });
 
   await Service.identity.initializeWithCurrentIdentity();
   await Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
                        "should reject due to 401");
 
-  let cluster = Service._clusterManager._findCluster();
+  let cluster = await Service._clusterManager._findCluster();
   Assert.strictEqual(cluster, null);
 
   _("_findCluster() works with correct tokenserver response.");
   let endpoint = "http://example.com/something";
   initializeIdentityWithTokenServerResponse({
     status: 200,
     headers: {"content-type": "application/json"},
     body:
@@ -49,14 +47,14 @@ add_task(async function test_findCluster
         id: "id",
         key: "key",
         uid: "uid",
       })
   });
 
   await Service.identity.initializeWithCurrentIdentity();
   await Service.identity.whenReadyToAuthenticate.promise;
-  cluster = Service._clusterManager._findCluster();
+  cluster = await Service._clusterManager._findCluster();
   // The cluster manager ensures a trailing "/"
   Assert.strictEqual(cluster, endpoint + "/");
 
   Svc.Prefs.resetBranch("");
 });
--- a/services/sync/tests/unit/test_history_store.js
+++ b/services/sync/tests/unit/test_history_store.js
@@ -1,13 +1,12 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
-ChromeUtils.import("resource://services-common/async.js");
 ChromeUtils.import("resource://services-common/utils.js");
 ChromeUtils.import("resource://services-sync/engines/history.js");
 ChromeUtils.import("resource://services-sync/service.js");
 ChromeUtils.import("resource://services-sync/util.js");
 
 const TIMESTAMP1 = (Date.now() - 103406528) * 1000;
 const TIMESTAMP2 = (Date.now() - 6592903) * 1000;
 const TIMESTAMP3 = (Date.now() - 123894) * 1000;
@@ -38,24 +37,27 @@ function promiseOnVisitObserved() {
 }
 
 function isDateApproximately(actual, expected, skewMillis = 1000) {
   let lowerBound = expected - skewMillis;
   let upperBound = expected + skewMillis;
   return actual >= lowerBound && actual <= upperBound;
 }
 
-var engine = new HistoryEngine(Service);
-Async.promiseSpinningly(engine.initialize());
-var store = engine._store;
+let engine, store, fxuri, fxguid, tburi, tbguid;
+
 async function applyEnsureNoFailures(records) {
   Assert.equal((await store.applyIncomingBatch(records)).length, 0);
 }
 
-var fxuri, fxguid, tburi, tbguid;
+add_task(async function setup() {
+  engine = new HistoryEngine(Service);
+  await engine.initialize();
+  store = engine._store;
+});
 
 add_task(async function test_store() {
   _("Verify that we've got an empty store to work with.");
   do_check_empty((await store.getAllIDs()));
 
   _("Let's create an entry in the database.");
   fxuri = CommonUtils.makeURI("http://getfirefox.com/");
 
--- a/services/sync/tests/unit/test_hmac_error.js
+++ b/services/sync/tests/unit/test_hmac_error.js
@@ -173,32 +173,32 @@ add_task(async function hmac_error_durin
     }
   };
 
   Svc.Obs.add("weave:service:sync:finish", obs);
   Svc.Obs.add("weave:service:sync:error", obs);
 
   // This kicks off the actual test. Split into a function here to allow this
   // source file to broadly follow actual execution order.
-  function onwards() {
+  async function onwards() {
     _("== Invoking first sync.");
-    Async.promiseSpinningly(Service.sync());
+    await Service.sync();
     _("We should not simultaneously have data but no keys on the server.");
     let hasData = rotaryColl.wbo("flying") ||
                   rotaryColl.wbo("scotsman");
     let hasKeys = keysWBO.modified;
 
     _("We correctly handle 401s by aborting the sync and starting again.");
     Assert.ok(!hasData == !hasKeys);
 
     _("Be prepared for the second (automatic) sync...");
   }
 
   _("Make sure that syncing again causes recovery.");
-  await new Promise(resolve => {
+  let callbacksPromise = new Promise(resolve => {
     onSyncFinished = function() {
       _("== First sync done.");
       _("---------------------------");
       onSyncFinished = function() {
         _("== Second (automatic) sync done.");
         let hasData = rotaryColl.wbo("flying") ||
                       rotaryColl.wbo("scotsman");
         let hasKeys = keysWBO.modified;
@@ -208,33 +208,33 @@ add_task(async function hmac_error_durin
         // lock...
         CommonUtils.nextTick(function() {
           _("Now a fresh sync will get no HMAC errors.");
           _("Partially resetting client, as if after a restart, and forcing redownload.");
           Service.collectionKeys.clear();
           engine.lastSync = 0;
           hmacErrorCount = 0;
 
-          onSyncFinished = function() {
+          onSyncFinished = async function() {
             // Two rotary items, one client record... no errors.
             Assert.equal(hmacErrorCount, 0);
 
             Svc.Obs.remove("weave:service:sync:finish", obs);
             Svc.Obs.remove("weave:service:sync:error", obs);
 
             (async () => {
               tracker.clearChangedIDs();
               await Service.engineManager.unregister(engine);
               Svc.Prefs.resetBranch("");
               Service.recordManager.clearCache();
               server.stop(resolve);
             })();
           };
 
-          Async.promiseSpinningly(Service.sync());
+          Service.sync();
         },
         this);
       };
     };
-
-    onwards();
   });
+  await onwards();
+  await callbacksPromise;
 });
--- a/services/sync/tests/unit/test_service_cluster.js
+++ b/services/sync/tests/unit/test_service_cluster.js
@@ -1,73 +1,57 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 ChromeUtils.import("resource://gre/modules/PromiseUtils.jsm");
 ChromeUtils.import("resource://services-sync/service.js");
 ChromeUtils.import("resource://services-sync/util.js");
 
-function do_check_throws(func) {
-  var raised = false;
-  try {
-    func();
-  } catch (ex) {
-    raised = true;
-  }
-  Assert.ok(raised);
-}
-
-add_test(function test_findCluster() {
+add_task(async function test_findCluster() {
   syncTestLogging();
   _("Test Service._findCluster()");
   try {
-
     let whenReadyToAuthenticate = PromiseUtils.defer();
     Service.identity.whenReadyToAuthenticate = whenReadyToAuthenticate;
     whenReadyToAuthenticate.resolve(true);
 
     Service.identity._ensureValidToken = () => Promise.reject(new Error("Connection refused"));
 
     _("_findCluster() throws on network errors (e.g. connection refused).");
-    do_check_throws(function() {
-      Service._clusterManager._findCluster();
-    });
+    await Assert.rejects(Service._clusterManager._findCluster());
 
     Service.identity._ensureValidToken = () => Promise.resolve(true);
     Service.identity._token = { endpoint: "http://weave.user.node" };
 
     _("_findCluster() returns the user's cluster node");
-    let cluster = Service._clusterManager._findCluster();
+    let cluster = await Service._clusterManager._findCluster();
     Assert.equal(cluster, "http://weave.user.node/");
 
   } finally {
     Svc.Prefs.resetBranch("");
-    run_next_test();
   }
 });
 
-add_test(function test_setCluster() {
+add_task(async function test_setCluster() {
   syncTestLogging();
   _("Test Service._setCluster()");
   try {
     _("Check initial state.");
     Assert.equal(Service.clusterURL, "");
 
     Service._clusterManager._findCluster = () => "http://weave.user.node/";
 
     _("Set the cluster URL.");
-    Assert.ok(Service._clusterManager.setCluster());
+    Assert.ok((await Service._clusterManager.setCluster()));
     Assert.equal(Service.clusterURL, "http://weave.user.node/");
 
     _("Setting it again won't make a difference if it's the same one.");
-    Assert.ok(!Service._clusterManager.setCluster());
+    Assert.ok(!(await Service._clusterManager.setCluster()));
     Assert.equal(Service.clusterURL, "http://weave.user.node/");
 
     _("A 'null' response won't make a difference either.");
     Service._clusterManager._findCluster = () => null;
-    Assert.ok(!Service._clusterManager.setCluster());
+    Assert.ok(!(await Service._clusterManager.setCluster()));
     Assert.equal(Service.clusterURL, "http://weave.user.node/");
-
   } finally {
     Svc.Prefs.resetBranch("");
-    run_next_test();
   }
 });
--- a/services/sync/tests/unit/test_telemetry.js
+++ b/services/sync/tests/unit/test_telemetry.js
@@ -425,17 +425,17 @@ add_task(async function test_engine_fail
     await cleanAndGo(engine, server);
     await Service.engineManager.unregister(engine);
   }
 });
 
 add_task(async function test_clean_urls() {
   enableValidationPrefs();
 
-  Service.engineManager.register(SteamEngine);
+  await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   engine._errToThrow = new TypeError("http://www.google .com is not a valid URL.");
 
   try {
     _(`test_clean_urls: Steam tracker contents: ${
--- a/services/sync/tps/extensions/tps/resource/modules/addons.jsm
+++ b/services/sync/tps/extensions/tps/resource/modules/addons.jsm
@@ -47,25 +47,25 @@ function GetFileAsText(file) {
 function Addon(TPS, id) {
   this.TPS = TPS;
   this.id = id;
 }
 
 Addon.prototype = {
   addon: null,
 
-  uninstall: function uninstall() {
+  async uninstall() {
     // find our addon locally
-    let addon = Async.promiseSpinningly(AddonManager.getAddonByID(this.id));
+    let addon = await AddonManager.getAddonByID(this.id);
     Logger.AssertTrue(!!addon, "could not find addon " + this.id + " to uninstall");
-    Async.promiseSpinningly(AddonUtils.uninstallAddon(addon));
+    await AddonUtils.uninstallAddon(addon);
   },
 
-  find: function find(state) {
-    let addon = Async.promiseSpinningly(AddonManager.getAddonByID(this.id));
+  async find(state) {
+    let addon = await AddonManager.getAddonByID(this.id);
 
     if (!addon) {
       Logger.logInfo("Could not find add-on with ID: " + this.id);
       return false;
     }
 
     this.addon = addon;
 
@@ -93,18 +93,18 @@ Addon.prototype = {
     AddonUtils.installAddons([{id: this.id, requireSecureURI: false}], cb);
     let result = cb.wait();
 
     Logger.AssertEqual(1, result.installedIDs.length, "Exactly 1 add-on was installed.");
     Logger.AssertEqual(this.id, result.installedIDs[0],
                        "Add-on was installed successfully: " + this.id);
   },
 
-  setEnabled: function setEnabled(flag) {
-    Logger.AssertTrue(this.find(), "Add-on is available.");
+  async setEnabled(flag) {
+    Logger.AssertTrue((await this.find()), "Add-on is available.");
 
     let userDisabled;
     if (flag == STATE_ENABLED) {
       userDisabled = false;
     } else if (flag == STATE_DISABLED) {
       userDisabled = true;
     } else {
       throw new Error("Unknown flag to setEnabled: " + flag);
--- a/services/sync/tps/extensions/tps/resource/modules/history.jsm
+++ b/services/sync/tps/extensions/tps/resource/modules/history.jsm
@@ -10,17 +10,16 @@
 var EXPORTED_SYMBOLS = ["HistoryEntry", "DumpHistory"];
 
 const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
 
 ChromeUtils.import("resource://gre/modules/Services.jsm");
 ChromeUtils.import("resource://gre/modules/PlacesUtils.jsm");
 ChromeUtils.import("resource://gre/modules/PlacesSyncUtils.jsm");
 ChromeUtils.import("resource://tps/logger.jsm");
-ChromeUtils.import("resource://services-common/async.js");
 
 var DumpHistory = async function TPS_History__DumpHistory() {
   let query = PlacesUtils.history.getNewQuery();
   let options = PlacesUtils.history.getNewQueryOptions();
   let root = PlacesUtils.history.executeQuery(query, options).root;
   root.containerOpen = true;
   Logger.logInfo("\n\ndumping history\n", true);
   for (var i = 0; i < root.childCount; i++) {
--- a/services/sync/tps/extensions/tps/resource/tps.jsm
+++ b/services/sync/tps/extensions/tps/resource/tps.jsm
@@ -482,26 +482,26 @@ var TPS = {
       Logger.logInfo("executing action " + action.toUpperCase() +
                      " on addon " + JSON.stringify(entry));
       let addon = new Addon(this, entry);
       switch (action) {
         case ACTION_ADD:
           addon.install();
           break;
         case ACTION_DELETE:
-          addon.uninstall();
+          await addon.uninstall();
           break;
         case ACTION_VERIFY:
-          Logger.AssertTrue(addon.find(state), "addon " + addon.id + " not found");
+          Logger.AssertTrue((await addon.find(state)), "addon " + addon.id + " not found");
           break;
         case ACTION_VERIFY_NOT:
-          Logger.AssertFalse(addon.find(state), "addon " + addon.id + " is present, but it shouldn't be");
+          Logger.AssertFalse((await addon.find(state)), "addon " + addon.id + " is present, but it shouldn't be");
           break;
         case ACTION_SET_ENABLED:
-          Logger.AssertTrue(addon.setEnabled(state), "addon " + addon.id + " not found");
+          Logger.AssertTrue((await addon.setEnabled(state)), "addon " + addon.id + " not found");
           break;
         default:
           throw new Error("Unknown action for add-on: " + action);
       }
     }
     Logger.logPass("executing action " + action.toUpperCase() +
                    " on addons");
   },
--- a/toolkit/modules/tests/xpcshell/test_sqlite.js
+++ b/toolkit/modules/tests/xpcshell/test_sqlite.js
@@ -6,19 +6,17 @@ do_get_profile();
 
 ChromeUtils.import("resource://gre/modules/Promise.jsm");
 ChromeUtils.import("resource://gre/modules/PromiseUtils.jsm");
 ChromeUtils.import("resource://gre/modules/osfile.jsm");
 ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
 ChromeUtils.import("resource://gre/modules/Services.jsm");
 ChromeUtils.import("resource://gre/modules/Sqlite.jsm");
 ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
-
-// To spin the event loop in test.
-ChromeUtils.import("resource://services-common/async.js");
+ChromeUtils.import("resource://gre/modules/PromiseUtils.jsm");
 
 function sleep(ms) {
   return new Promise(resolve => {
 
     let timer = Cc["@mozilla.org/timer;1"]
                   .createInstance(Ci.nsITimer);
 
     timer.initWithCallback({
@@ -640,49 +638,36 @@ add_task(async function test_in_progress
   Assert.equal(c._connectionData._pendingStatements.size, 0);
   await c.executeCached("INSERT INTO dirs (path) VALUES ('foo')");
   Assert.equal(c._connectionData._statementCounter, c._initialStatementCount + 1);
   Assert.equal(c._connectionData._pendingStatements.size, 0);
 
   let expectOne;
   let expectTwo;
 
-  // Please forgive me.
-  let inner = Async.makeSpinningCallback();
-  let outer = Async.makeSpinningCallback();
 
   // We want to make sure that two queries executing simultaneously
   // result in `_pendingStatements.size` reaching 2, then dropping back to 0.
   //
   // To do so, we kick off a second statement within the row handler
   // of the first, then wait for both to finish.
 
+  let inner = PromiseUtils.defer();
   await c.executeCached("SELECT * from dirs", null, function onRow() {
     // In the onRow handler, we're still an outstanding query.
     // Expect a single in-progress entry.
     expectOne = c._connectionData._pendingStatements.size;
 
     // Start another query, checking that after its statement has been created
     // there are two statements in progress.
-    let p = c.executeCached("SELECT 10, path from dirs");
+    c.executeCached("SELECT 10, path from dirs").then(inner.resolve);
     expectTwo = c._connectionData._pendingStatements.size;
-
-    // Now wait for it to be done before we return from the row handler …
-    p.then(function onInner() {
-      inner();
-    });
-  }).then(function onOuter() {
-    // … and wait for the inner to be done before we finish …
-    inner.wait();
-    outer();
   });
 
-  // … and wait for both queries to have finished before we go on and
-  // test postconditions.
-  outer.wait();
+  await inner.promise;
 
   Assert.equal(expectOne, 1);
   Assert.equal(expectTwo, 2);
   Assert.equal(c._connectionData._statementCounter, c._initialStatementCount + 3);
   Assert.equal(c._connectionData._pendingStatements.size, 0);
 
   await c.close();
 });