Bug 1355677 - Make all Sync network requests promise based. r?tcsc,kitcambridge draft
authorMark Hammond <mhammond@skippinet.com.au>
Tue, 11 Apr 2017 23:40:53 +1000
changeset 566162 37fbfcab544d8693fc0f1cfb2a978e8ac949937d
parent 564706 3f9f6d6086b2d247831d1d03d530095bebd5a6b2
child 625215 1e1c8772b9748d9b14f77a742b8279651c403c39
push id55109
push userbmo:markh@mozilla.com
push dateFri, 21 Apr 2017 01:40:45 +0000
reviewerstcsc, kitcambridge
bugs1355677
milestone55.0a1
Bug 1355677 - Make all Sync network requests promise based. r?tcsc,kitcambridge MozReview-Commit-ID: JSwAS4Xd1uy
services/sync/modules/bookmark_repair.js
services/sync/modules/bookmark_validator.js
services/sync/modules/collection_validator.js
services/sync/modules/engines.js
services/sync/modules/engines/clients.js
services/sync/modules/engines/tabs.js
services/sync/modules/record.js
services/sync/modules/resource.js
services/sync/modules/service.js
services/sync/modules/stages/enginesync.js
services/sync/tests/unit/head_errorhandler_common.js
services/sync/tests/unit/test_bookmark_engine.js
services/sync/tests/unit/test_clients_engine.js
services/sync/tests/unit/test_collection_getBatched.js
services/sync/tests/unit/test_corrupt_keys.js
services/sync/tests/unit/test_errorhandler_1.js
services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
services/sync/tests/unit/test_postqueue.js
services/sync/tests/unit/test_records_wbo.js
services/sync/tests/unit/test_resource.js
services/sync/tests/unit/test_resource_async.js
services/sync/tests/unit/test_resource_header.js
services/sync/tests/unit/test_resource_ua.js
services/sync/tests/unit/test_service_detect_upgrade.js
services/sync/tests/unit/test_service_startOver.js
services/sync/tests/unit/test_service_sync_remoteSetup.js
services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
services/sync/tests/unit/test_syncengine_sync.js
services/sync/tests/unit/test_syncscheduler.js
services/sync/tests/unit/test_warn_on_truncated_response.js
services/sync/tps/extensions/tps/resource/tps.jsm
--- a/services/sync/modules/bookmark_repair.js
+++ b/services/sync/modules/bookmark_repair.js
@@ -634,17 +634,17 @@ class BookmarkRepairResponder extends Co
       // anything in that case, so bail now.
       return { toUpload, toDelete };
     }
 
     // which of these items exist on the server?
     let itemSource = engine.itemSource();
     itemSource.ids = repairable.map(item => item.syncId);
     log.trace(`checking the server for items`, itemSource.ids);
-    let itemsResponse = itemSource.get();
+    let itemsResponse = await itemSource.get();
     // If the response failed, don't bother trying to parse the output.
     // Throwing here means we abort the repair, which isn't ideal for transient
     // errors (eg, no network, 500 service outage etc), but we don't currently
     // have a sane/safe way to try again later (we'd need to implement a kind
     // of timeout, otherwise we might end up retrying forever and never remove
     // our request command.) Bug 1347805.
     if (!itemsResponse.success) {
       throw new Error(`request for server IDs failed: ${itemsResponse.status}`);
--- a/services/sync/modules/bookmark_validator.js
+++ b/services/sync/modules/bookmark_validator.js
@@ -808,40 +808,40 @@ class BookmarkValidator {
       }
       if (structuralDifferences.length) {
         problemData.structuralDifferences.push({ id, differences: structuralDifferences });
       }
     }
     return inspectionInfo;
   }
 
-  _getServerState(engine) {
+  async _getServerState(engine) {
 // XXXXX - todo - we need to capture last-modified of the server here and
 // ensure the repairer only applys with if-unmodified-since that date.
     let collection = engine.itemSource();
     let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
     collection.full = true;
     let items = [];
     collection.recordHandler = function(item) {
       item.decrypt(collectionKey);
       items.push(item.cleartext);
     };
-    let resp = collection.getBatched();
+    let resp = await collection.getBatched();
     if (!resp.success) {
       throw resp;
     }
     return items;
   }
 
   async validate(engine) {
     let start = Date.now();
     let clientTree = await PlacesUtils.promiseBookmarksTree("", {
       includeItemIds: true
     });
-    let serverState = this._getServerState(engine);
+    let serverState = await this._getServerState(engine);
     let serverRecordCount = serverState.length;
     let result = await this.compareServerWithClient(serverState, clientTree);
     let end = Date.now();
     let duration = end - start;
     return {
       duration,
       version: this.version,
       problems: result.problemData,
--- a/services/sync/modules/collection_validator.js
+++ b/services/sync/modules/collection_validator.js
@@ -54,26 +54,26 @@ class CollectionValidator {
     this.idProp = idProp;
   }
 
   // Should a custom ProblemData type be needed, return it here.
   emptyProblemData() {
     return new CollectionProblemData();
   }
 
-  getServerItems(engine) {
+  async getServerItems(engine) {
     let collection = engine.itemSource();
     let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
     collection.full = true;
     let items = [];
     collection.recordHandler = function(item) {
       item.decrypt(collectionKey);
       items.push(item.cleartext);
     };
-    let resp = collection.getBatched();
+    let resp = await collection.getBatched();
     if (!resp.success) {
       throw resp;
     }
     return items;
   }
 
   // Should return a promise that resolves to an array of client items.
   getClientItems() {
--- a/services/sync/modules/engines.js
+++ b/services/sync/modules/engines.js
@@ -963,17 +963,17 @@ SyncEngine.prototype = {
     tombstone.deleted = true;
     return tombstone;
   },
 
   // Any setup that needs to happen at the beginning of each sync.
   _syncStartup() {
 
     // Determine if we need to wipe on outdated versions
-    let metaGlobal = this.service.recordManager.get(this.metaURL);
+    let metaGlobal = Async.promiseSpinningly(this.service.recordManager.get(this.metaURL));
     let engines = metaGlobal.payload.engines || {};
     let engineData = engines[this.name] || {};
 
     let needsWipe = false;
 
     // Assume missing versions are 0 and wipe the server
     if ((engineData.version || 0) < this.version) {
       this._log.debug("Old engine data: " + [engineData.version, this.version]);
@@ -1228,17 +1228,17 @@ SyncEngine.prototype = {
       if (applyBatch.length == self.applyIncomingBatchSize) {
         doApplyBatch.call(self);
       }
       self._store._sleep(0);
     };
 
     // Only bother getting data from the server if there's new things
     if (this.lastModified == null || this.lastModified > this.lastSync) {
-      let resp = newitems.getBatched();
+      let resp = Async.promiseSpinningly(newitems.getBatched());
       doApplyBatchAndPersistFailed.call(this);
       if (!resp.success) {
         resp.failureCode = ENGINE_DOWNLOAD_FAIL;
         throw resp;
       }
 
       if (aborting) {
         throw aborting;
@@ -1251,17 +1251,17 @@ SyncEngine.prototype = {
 
       // Sort and limit so that on mobile we only get the last X records.
       guidColl.limit = this.downloadLimit;
       guidColl.newer = this.lastSync;
 
       // index: Orders by the sortindex descending (highest weight first).
       guidColl.sort  = "index";
 
-      let guids = guidColl.get();
+      let guids = Async.promiseSpinningly(guidColl.get());
       if (!guids.success)
         throw guids;
 
       // Figure out which guids weren't just fetched then remove any guids that
       // were already waiting and prepend the new ones
       let extra = Utils.arraySub(guids.obj, handled);
       if (extra.length > 0) {
         fetchBatch = Utils.arrayUnion(extra, fetchBatch);
@@ -1284,17 +1284,17 @@ SyncEngine.prototype = {
     while (fetchBatch.length && !aborting) {
       // Reuse the original query, but get rid of the restricting params
       // and batch remaining records.
       newitems.limit = 0;
       newitems.newer = 0;
       newitems.ids = fetchBatch.slice(0, batchSize);
 
       // Reuse the existing record handler set earlier
-      let resp = newitems.get();
+      let resp = Async.promiseSpinningly(newitems.get());
       if (!resp.success) {
         resp.failureCode = ENGINE_DOWNLOAD_FAIL;
         throw resp;
       }
 
       // This batch was successfully applied. Not using
       // doApplyBatchAndPersistFailed() here to avoid writing toFetch twice.
       fetchBatch = fetchBatch.slice(batchSize);
@@ -1755,17 +1755,17 @@ SyncEngine.prototype = {
   // Save the current snapshot so as to calculate changes at next sync
   _syncFinish() {
     this._log.trace("Finishing up sync");
     this._tracker.resetScore();
 
     let doDelete = Utils.bind2(this, function(key, val) {
       let coll = new Collection(this.engineURL, this._recordObj, this.service);
       coll[key] = val;
-      coll.delete();
+      Async.promiseSpinningly(coll.delete());
     });
 
     for (let [key, val] of Object.entries(this._delete)) {
       // Remove the key for future uses
       delete this._delete[key];
 
       // Send a simple delete for the property
       if (key != "ids" || val.length <= 100)
@@ -1821,17 +1821,17 @@ SyncEngine.prototype = {
     test.recordHandler = function recordHandler(record) {
       record.decrypt(key);
       canDecrypt = true;
     };
 
     // Any failure fetching/decrypting will just result in false
     try {
       this._log.trace("Trying to decrypt a record from the server..");
-      test.get();
+      Async.promiseSpinningly(test.get());
     } catch (ex) {
       if (Async.isShutdownException(ex)) {
         throw ex;
       }
       this._log.debug("Failed test decrypt", ex);
     }
 
     return canDecrypt;
@@ -1839,24 +1839,24 @@ SyncEngine.prototype = {
 
   _resetClient() {
     this.resetLastSync();
     this.previousFailed = [];
     this.toFetch = [];
   },
 
   wipeServer() {
-    let response = this.service.resource(this.engineURL).delete();
+    let response = Async.promiseSpinningly(this.service.resource(this.engineURL).delete());
     if (response.status != 200 && response.status != 404) {
       throw response;
     }
     this._resetClient();
   },
 
-  removeClientData() {
+  async removeClientData() {
     // Implement this method in engines that store client specific data
     // on the server.
   },
 
   /*
    * Decide on (and partially effect) an error-handling strategy.
    *
    * Asks the Service to respond to an HMAC error, which might result in keys
--- a/services/sync/modules/engines/clients.js
+++ b/services/sync/modules/engines/clients.js
@@ -504,19 +504,19 @@ ClientEngine.prototype = {
     this._store.wipe();
     const logRemoveError = err => this._log.warn("Could not delete json file", err);
     Async.promiseSpinningly(
       Utils.jsonRemove("commands", this).catch(logRemoveError)
         .then(Utils.jsonRemove("commands-syncing", this).catch(logRemoveError))
     );
   },
 
-  removeClientData: function removeClientData() {
+  async removeClientData() {
     let res = this.service.resource(this.engineURL + "/" + this.localID);
-    res.delete();
+    await res.delete();
   },
 
   // Override the default behavior to delete bad records from the server.
   handleHMACMismatch: function handleHMACMismatch(item, mayRetry) {
     this._log.debug("Handling HMAC mismatch for " + item.id);
 
     let base = SyncEngine.prototype.handleHMACMismatch.call(this, item, mayRetry);
     if (base != SyncEngine.kRecoveryStrategy.error)
--- a/services/sync/modules/engines/tabs.js
+++ b/services/sync/modules/engines/tabs.js
@@ -68,19 +68,19 @@ TabEngine.prototype = {
 
   _resetClient() {
     SyncEngine.prototype._resetClient.call(this);
     this._store.wipe();
     this._tracker.modified = true;
     this.hasSyncedThisSession = false;
   },
 
-  removeClientData() {
+  async removeClientData() {
     let url = this.engineURL + "/" + this.service.clientsEngine.localID;
-    this.service.resource(url).delete();
+    await this.service.resource(url).delete();
   },
 
   /**
    * Return a Set of open URLs.
    */
   getOpenURLs() {
     let urls = new Set();
     for (let entry of this._store.getAllTabs()) {
--- a/services/sync/modules/record.js
+++ b/services/sync/modules/record.js
@@ -37,30 +37,30 @@ WBORecord.prototype = {
   get sortindex() {
     if (this.data.sortindex)
       return this.data.sortindex;
     return 0;
   },
 
   // Get thyself from your URI, then deserialize.
   // Set thine 'response' field.
-  fetch: function fetch(resource) {
+  async fetch(resource) {
     if (!(resource instanceof Resource)) {
       throw new Error("First argument must be a Resource instance.");
     }
 
-    let r = resource.get();
+    let r = await resource.get();
     if (r.success) {
       this.deserialize(r);   // Warning! Muffles exceptions!
     }
     this.response = r;
     return this;
   },
 
-  upload: function upload(resource) {
+  upload(resource) {
     if (!(resource instanceof Resource)) {
       throw new Error("First argument must be a Resource instance.");
     }
 
     return resource.put(this);
   },
 
   // Take a base URI string, with trailing slash, and return the URI of this
@@ -218,22 +218,22 @@ this.RecordManager = function RecordMana
 
   this._log = Log.repository.getLogger(this._logName);
   this._records = {};
 }
 RecordManager.prototype = {
   _recordType: CryptoWrapper,
   _logName: "Sync.RecordManager",
 
-  import: function RecordMgr_import(url) {
+  async import(url) {
     this._log.trace("Importing record: " + (url.spec ? url.spec : url));
     try {
       // Clear out the last response with empty object if GET fails
       this.response = {};
-      this.response = this.service.resource(url).get();
+      this.response = await this.service.resource(url).get();
 
       // Don't parse and save the record on failure
       if (!this.response.success)
         return null;
 
       let record = new this._recordType(url);
       record.deserialize(this.response);
 
@@ -242,21 +242,21 @@ RecordManager.prototype = {
       if (Async.isShutdownException(ex)) {
         throw ex;
       }
       this._log.debug("Failed to import record", ex);
       return null;
     }
   },
 
-  get: function RecordMgr_get(url) {
+  get(url) {
     // Use a url string as the key to the hash
     let spec = url.spec ? url.spec : url;
     if (spec in this._records)
-      return this._records[spec];
+      return Promise.resolve(this._records[spec]);
     return this.import(url);
   },
 
   set: function RecordMgr_set(url, record) {
     let spec = url.spec ? url.spec : url;
     return this._records[spec] = record;
   },
 
@@ -698,17 +698,17 @@ Collection.prototype = {
   },
 
   // Similar to get(), but will page through the items `batchSize` at a time,
   // deferring calling the record handler until we've gotten them all.
   //
   // Returns the last response processed, and doesn't run the record handler
   // on any items if a non-success status is received while downloading the
   // records (or if a network error occurs).
-  getBatched(batchSize = DEFAULT_DOWNLOAD_BATCH_SIZE) {
+  async getBatched(batchSize = DEFAULT_DOWNLOAD_BATCH_SIZE) {
     let totalLimit = Number(this.limit) || Infinity;
     if (batchSize <= 0 || batchSize >= totalLimit) {
       // Invalid batch sizes should arguably be an error, but they're easy to handle
       return this.get();
     }
 
     if (!this.full) {
       throw new Error("getBatched is unimplemented for guid-only GETs");
@@ -728,17 +728,17 @@ Collection.prototype = {
       do {
         this._onProgress = _onProgress;
         this._onComplete = _onComplete;
         if (batchSize + recordBuffer.length > totalLimit) {
           this.limit = totalLimit - recordBuffer.length;
         }
         this._log.trace("Performing batched GET", { limit: this.limit, offset: this.offset });
         // Actually perform the request
-        resp = this.get();
+        resp = await this.get();
         if (!resp.success) {
           break;
         }
 
         // Initialize last modified, or check that something broken isn't happening.
         let lastModified = resp.headers["x-last-modified"];
         if (!lastModifiedTime) {
           lastModifiedTime = lastModified;
@@ -991,17 +991,18 @@ PostQueue.prototype = {
       this.bytesAlreadyBatched = 0;
       this.numAlreadyBatched = 0;
     } else {
       this.bytesAlreadyBatched += queued.length;
       this.numAlreadyBatched += this.numQueued;
     }
     this.queued = "";
     this.numQueued = 0;
-    let response = this.poster(queued, headers, batch, !!(finalBatchPost && this.batchID !== null));
+    let response = Async.promiseSpinningly(
+                    this.poster(queued, headers, batch, !!(finalBatchPost && this.batchID !== null)));
 
     if (!response.success) {
       this.log.trace("Server error response during a batch", response);
       // not clear what we should do here - we expect the consumer of this to
       // abort by throwing in the postCallback below.
       this.postCallback(response, !finalBatchPost);
       return;
     }
--- a/services/sync/modules/resource.js
+++ b/services/sync/modules/resource.js
@@ -128,17 +128,18 @@ AsyncResource.prototype = {
   },
 
   // ** {{{ AsyncResource._createRequest }}} **
   //
   // This method returns a new IO Channel for requests to be made
   // through. It is never called directly, only {{{_doRequest}}} uses it
   // to obtain a request channel.
   //
-  _createRequest: function Res__createRequest(method) {
+  _createRequest(method) {
+    this.method = method;
     let channel = NetUtil.newChannel({uri: this.spec, loadUsingSystemPrincipal: true})
                          .QueryInterface(Ci.nsIRequest)
                          .QueryInterface(Ci.nsIHttpChannel);
 
     channel.loadFlags |= DEFAULT_LOAD_FLAGS;
 
     // Setup a callback to handle channel notifications.
     let listener = new ChannelNotificationListener(this.headerNames);
@@ -167,65 +168,65 @@ AsyncResource.prototype = {
         this._log.trace("HTTP Header " + key + ": ***** (suppressed)");
       else
         this._log.trace("HTTP Header " + key + ": " + headers[key]);
       channel.setRequestHeader(key, headers[key], false);
     }
     return channel;
   },
 
-  _onProgress: function Res__onProgress(channel) {},
+  _onProgress(channel) {},
 
-  _doRequest: function _doRequest(action, data, callback) {
+  _doRequest(action, data) {
     this._log.trace("In _doRequest.");
-    this._callback = callback;
-    let channel = this._createRequest(action);
+    return new Promise((resolve, reject) => {
+      this._deferred = { resolve, reject };
+      let channel = this._createRequest(action);
 
-    if ("undefined" != typeof(data))
-      this._data = data;
+      if ("undefined" != typeof(data))
+        this._data = data;
 
-    // PUT and POST are treated differently because they have payload data.
-    if ("PUT" == action || "POST" == action) {
-      // Convert non-string bodies into JSON
-      if (this._data.constructor.toString() != String)
-        this._data = JSON.stringify(this._data);
-
-      this._log.debug(action + " Length: " + this._data.length);
-      this._log.trace(action + " Body: " + this._data);
+      // PUT and POST are treated differently because they have payload data.
+      if ("PUT" == action || "POST" == action) {
+        // Convert non-string bodies into JSON
+        if (this._data.constructor.toString() != String)
+          this._data = JSON.stringify(this._data);
 
-      let type = ("content-type" in this._headers) ?
-        this._headers["content-type"] : "text/plain";
+        this._log.debug(action + " Length: " + this._data.length);
+        this._log.trace(action + " Body: " + this._data);
 
-      let stream = Cc["@mozilla.org/io/string-input-stream;1"].
-        createInstance(Ci.nsIStringInputStream);
-      stream.setData(this._data, this._data.length);
+        let type = ("content-type" in this._headers) ?
+          this._headers["content-type"] : "text/plain";
 
-      channel.QueryInterface(Ci.nsIUploadChannel);
-      channel.setUploadStream(stream, type, this._data.length);
-    }
+        let stream = Cc["@mozilla.org/io/string-input-stream;1"].
+          createInstance(Ci.nsIStringInputStream);
+        stream.setData(this._data, this._data.length);
 
-    // Setup a channel listener so that the actual network operation
-    // is performed asynchronously.
-    let listener = new ChannelListener(this._onComplete, this._onProgress,
-                                       this._log, this.ABORT_TIMEOUT);
-    channel.requestMethod = action;
-    try {
+        channel.QueryInterface(Ci.nsIUploadChannel);
+        channel.setUploadStream(stream, type, this._data.length);
+      }
+
+      // Setup a channel listener so that the actual network operation
+      // is performed asynchronously.
+      let listener = new ChannelListener(this._onComplete, this._onProgress,
+                                         this._log, this.ABORT_TIMEOUT);
+      channel.requestMethod = action;
       channel.asyncOpen2(listener);
-    } catch (ex) {
-      // asyncOpen2 can throw in a bunch of cases -- e.g., a forbidden port.
-      this._log.warn("Caught an error in asyncOpen2", ex);
-      CommonUtils.nextTick(callback.bind(this, ex));
-    }
+    });
   },
 
-  _onComplete: function _onComplete(error, data, channel) {
-    this._log.trace("In _onComplete. Error is " + error + ".");
+  _onComplete(ex, data, channel) {
+    this._log.trace("In _onComplete. Error is " + ex + ".");
 
-    if (error) {
-      this._callback(error);
+    if (ex) {
+      if (!Async.isShutdownException(ex)) {
+        this._log.warn("${action} request to ${url} failed: ${ex}",
+                       { action: this.method, url: this.uri.spec, ex});
+      }
+      this._deferred.reject(ex);
       return;
     }
 
     this._data = data;
     let action = channel.requestMethod;
 
     this._log.trace("Channel: " + channel);
     this._log.trace("Action: " + action);
@@ -319,127 +320,39 @@ AsyncResource.prototype = {
         // Stringify to avoid possibly printing non-printable characters.
         this._log.debug("Parse fail: Response body starts: \"" +
                         JSON.stringify((ret + "").slice(0, 100)) +
                         "\".");
         throw ex;
       }
     }.bind(this));
 
-    this._callback(null, ret);
+    this._deferred.resolve(ret);
   },
 
-  get: function get(callback) {
-    this._doRequest("GET", undefined, callback);
+  get() {
+    return this._doRequest("GET", undefined);
   },
 
-  put: function put(data, callback) {
-    if (typeof data == "function")
-      [data, callback] = [undefined, data];
-    this._doRequest("PUT", data, callback);
+  put(data) {
+    return this._doRequest("PUT", data);
   },
 
-  post: function post(data, callback) {
-    if (typeof data == "function")
-      [data, callback] = [undefined, data];
-    this._doRequest("POST", data, callback);
+  post(data) {
+    return this._doRequest("POST", data);
   },
 
-  delete: function delete_(callback) {
-    this._doRequest("DELETE", undefined, callback);
+  delete() {
+    return this._doRequest("DELETE", undefined);
   }
 };
 
-
-/*
- * Represent a remote network resource, identified by a URI, with a
- * synchronous API.
- *
- * 'Resource' is not recommended for new code. Use the asynchronous API of
- * 'AsyncResource' instead.
- */
-this.Resource = function Resource(uri) {
-  AsyncResource.call(this, uri);
-}
-Resource.prototype = {
-
-  __proto__: AsyncResource.prototype,
-
-  _logName: "Sync.Resource",
-
-  // ** {{{ Resource._request }}} **
-  //
-  // Perform a particular HTTP request on the resource. This method
-  // is never called directly, but is used by the high-level
-  // {{{get}}}, {{{put}}}, {{{post}}} and {{delete}} methods.
-  _request: function Res__request(action, data) {
-    let cb = Async.makeSyncCallback();
-    function callback(error, ret) {
-      if (error)
-        cb.throw(error);
-      else
-        cb(ret);
-    }
-
-    // The channel listener might get a failure code
-    try {
-      this._doRequest(action, data, callback);
-      return Async.waitForSyncCallback(cb);
-    } catch (ex) {
-      if (Async.isShutdownException(ex)) {
-        throw ex;
-      }
-      this._log.warn("${action} request to ${url} failed: ${ex}",
-                     { action, url: this.uri.spec, ex });
-      // Combine the channel stack with this request stack.  Need to create
-      // a new error object for that.
-      let error = Error(ex.message);
-      error.result = ex.result;
-      let chanStack = [];
-      if (ex.stack)
-        chanStack = ex.stack.trim().split(/\n/).slice(1);
-      let requestStack = error.stack.split(/\n/).slice(1);
-
-      // Strip out the args for the last 2 frames because they're usually HUGE!
-      for (let i = 0; i <= 1; i++)
-        requestStack[i] = requestStack[i].replace(/\(".*"\)@/, "(...)@");
-
-      error.stack = chanStack.concat(requestStack).join("\n");
-      throw error;
-    }
-  },
-
-  // ** {{{ Resource.get }}} **
-  //
-  // Perform an asynchronous HTTP GET for this resource.
-  get: function Res_get() {
-    return this._request("GET");
-  },
-
-  // ** {{{ Resource.put }}} **
-  //
-  // Perform a HTTP PUT for this resource.
-  put: function Res_put(data) {
-    return this._request("PUT", data);
-  },
-
-  // ** {{{ Resource.post }}} **
-  //
-  // Perform a HTTP POST for this resource.
-  post: function Res_post(data) {
-    return this._request("POST", data);
-  },
-
-  // ** {{{ Resource.delete }}} **
-  //
-  // Perform a HTTP DELETE for this resource.
-  delete: function Res_delete() {
-    return this._request("DELETE");
-  }
-};
+// TODO: We still export both "Resource" and "AsyncRecourse" as the same
+// object, but we should decide on one and unify all references.
+this.Resource = AsyncResource;
 
 // = ChannelListener =
 //
 // This object implements the {{{nsIStreamListener}}} interface
 // and is called as the network operation proceeds.
 function ChannelListener(onComplete, onProgress, logger, timeout) {
   this._onComplete = onComplete;
   this._onProgress = onProgress;
--- a/services/sync/modules/service.js
+++ b/services/sync/modules/service.js
@@ -201,17 +201,17 @@ Sync11Service.prototype = {
                    "or signaling to other clients.");
 
     // Set the last handled time so that we don't act again.
     this.lastHMACEvent = now;
 
     // Fetch keys.
     let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
     try {
-      let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+      let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
 
       // Save out the ciphertext for when we reupload. If there's a bug in
       // CollectionKeyManager, this will prevent us from uploading junk.
       let cipherText = cryptoKeys.ciphertext;
 
       if (!cryptoResp.success) {
         this._log.warn("Failed to download keys.");
         return false;
@@ -457,17 +457,17 @@ Sync11Service.prototype = {
    * inside engine sync.
    */
   _fetchInfo(url) {
     let infoURL = url || this.infoURL;
 
     this._log.trace("In _fetchInfo: " + infoURL);
     let info;
     try {
-      info = this.resource(infoURL).get();
+      info = Async.promiseSpinningly(this.resource(infoURL).get());
     } catch (ex) {
       this.errorHandler.checkServerError(ex);
       throw ex;
     }
 
     // Always check for errors; this is also where we look for X-Weave-Alert.
     this.errorHandler.checkServerError(info);
     if (!info.success) {
@@ -510,17 +510,17 @@ Sync11Service.prototype = {
         // Don't always set to CREDENTIALS_CHANGED -- we will probably take care of this.
 
         // Fetch storage/crypto/keys.
         let cryptoKeys;
 
         if (infoCollections && (CRYPTO_COLLECTION in infoCollections)) {
           try {
             cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
-            let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+            let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
 
             if (cryptoResp.success) {
               this.handleFetchedKeys(syncKeyBundle, cryptoKeys);
               return true;
             } else if (cryptoResp.status == 404) {
               // On failure, ask to generate new keys and upload them.
               // Fall through to the behavior below.
               this._log.warn("Got 404 for crypto/keys, but 'crypto' in info/collections. Regenerating.");
@@ -605,17 +605,17 @@ Sync11Service.prototype = {
       // This is a little weird, if we don't get a node we pretend
       // to succeed, since that probably means we just don't have storage.
       if (this.clusterURL == "" && !this._clusterManager.setCluster()) {
         this.status.sync = NO_SYNC_NODE_FOUND;
         return true;
       }
 
       // Fetch collection info on every startup.
-      let test = this.resource(this.infoURL).get();
+      let test = Async.promiseSpinningly(this.resource(this.infoURL).get());
 
       switch (test.status) {
         case 200:
           // The user is authenticated.
 
           // We have no way of verifying the passphrase right now,
           // so wait until remoteSetup to do so.
           // Just make the most trivial checks.
@@ -709,17 +709,17 @@ Sync11Service.prototype = {
                       "is stale after successful upload.");
       throw new Error("Symmetric key upload failed.");
     }
 
     // Doesn't matter if the timestamp is ahead.
 
     // Download and install them.
     let cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
-    let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
+    let cryptoResp = Async.promiseSpinningly(cryptoKeys.fetch(this.resource(this.cryptoKeysURL))).response;
     if (cryptoResp.status != 200) {
       this._log.warn("Failed to download keys.");
       throw new Error("Symmetric key download failed.");
     }
     let keysChanged = this.handleFetchedKeys(this.identity.syncKeyBundle,
                                              cryptoKeys, true);
     if (keysChanged) {
       this._log.info("Downloaded keys differed, as expected.");
@@ -731,17 +731,21 @@ Sync11Service.prototype = {
     Svc.Obs.notify("weave:engine:stop-tracking");
     this.status.resetSync();
 
     // Deletion doesn't make sense if we aren't set up yet!
     if (this.clusterURL != "") {
       // Clear client-specific data from the server, including disabled engines.
       for (let engine of [this.clientsEngine].concat(this.engineManager.getAll())) {
         try {
-          engine.removeClientData();
+          // Note the additional Promise.resolve here is to handle the fact that
+          // some 3rd party engines probably don't return a promise. We can
+          // probably nuke this once webextensions become mandatory as then
+          // no 3rd party engines will be allowed to exist.
+          Async.promiseSpinningly(Promise.resolve().then(() => engine.removeClientData()));
         } catch (ex) {
           this._log.warn(`Deleting client data for ${engine.name} failed`, ex);
         }
       }
       this._log.debug("Finished deleting client data.");
     } else {
       this._log.debug("Skipping client data removal: no cluster URL.");
     }
@@ -772,17 +776,17 @@ Sync11Service.prototype = {
 
     try {
       this.identity.finalize();
       this.status.__authManager = null;
       this.identity = Status._authManager;
       this._clusterManager = this.identity.createClusterManager(this);
       Svc.Obs.notify("weave:service:start-over:finish");
     } catch (err) {
-      this._log.error("startOver failed to re-initialize the identity manager: " + err);
+      this._log.error("startOver failed to re-initialize the identity manager", err);
       // Still send the observer notification so the current state is
       // reflected in the UI.
       Svc.Obs.notify("weave:service:start-over:finish");
     }
   },
 
   login: function login() {
     function onNotify() {
@@ -839,17 +843,17 @@ Sync11Service.prototype = {
   // supporting the api.
   _fetchServerConfiguration() {
     // This is similar to _fetchInfo, but with different error handling.
 
     let infoURL = this.userBaseURL + "info/configuration";
     this._log.debug("Fetching server configuration", infoURL);
     let configResponse;
     try {
-      configResponse = this.resource(infoURL).get();
+      configResponse = Async.promiseSpinningly(this.resource(infoURL).get());
     } catch (ex) {
       // This is probably a network or similar error.
       this._log.warn("Failed to fetch info/configuration", ex);
       this.errorHandler.checkServerError(ex);
       return false;
     }
 
     if (configResponse.status == 404) {
@@ -869,32 +873,32 @@ Sync11Service.prototype = {
   // Stuff we need to do after login, before we can really do
   // anything (e.g. key setup).
   _remoteSetup: function _remoteSetup(infoResponse) {
     if (!this._fetchServerConfiguration()) {
       return false;
     }
 
     this._log.debug("Fetching global metadata record");
-    let meta = this.recordManager.get(this.metaURL);
+    let meta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
 
     // Checking modified time of the meta record.
     if (infoResponse &&
         (infoResponse.obj.meta != this.metaModified) &&
         (!meta || !meta.isNew)) {
 
       // Delete the cached meta record...
       this._log.debug("Clearing cached meta record. metaModified is " +
           JSON.stringify(this.metaModified) + ", setting to " +
           JSON.stringify(infoResponse.obj.meta));
 
       this.recordManager.del(this.metaURL);
 
       // ... fetch the current record from the server, and COPY THE FLAGS.
-      let newMeta = this.recordManager.get(this.metaURL);
+      let newMeta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
 
       // If we got a 401, we do not want to create a new meta/global - we
       // should be able to get the existing meta after we get a new node.
       if (this.recordManager.response.status == 401) {
         this._log.debug("Fetching meta/global record on the server returned 401.");
         this.errorHandler.checkServerError(this.recordManager.response);
         return false;
       }
@@ -1084,17 +1088,17 @@ Sync11Service.prototype = {
       // so don't attempt to get it in that case.
       if (this.clusterURL) {
         this.identity.prefetchMigrationSentinel(this);
       }
 
       // Now let's update our declined engines (but only if we have a metaURL;
       // if Sync failed due to no node we will not have one)
       if (this.metaURL) {
-        let meta = this.recordManager.get(this.metaURL);
+        let meta = Async.promiseSpinningly(this.recordManager.get(this.metaURL));
         if (!meta) {
           this._log.warn("No meta/global; can't update declined state.");
           return;
         }
 
         let declinedEngines = new DeclinedEngines(this);
         let didChange = declinedEngines.updateDeclined(meta, this.engineManager);
         if (!didChange) {
@@ -1126,17 +1130,17 @@ Sync11Service.prototype = {
    * Upload meta/global, throwing the response on failure
    * @param {WBORecord} meta meta/global record
    * @throws the response object if the request was not a success
    */
   uploadMetaGlobal(meta) {
     this._log.debug("Uploading meta/global", meta);
     let res = this.resource(this.metaURL);
     res.setHeader("X-If-Unmodified-Since", meta.modified);
-    let response = res.put(meta);
+    let response = Async.promiseSpinningly(res.put(meta));
     if (!response.success) {
       throw response;
     }
     // From https://docs.services.mozilla.com/storage/apis-1.5.html:
     // "Successful responses will return the new last-modified time for the collection."
     meta.modified = response.obj;
     this.recordManager.set(this.metaURL, meta);
   },
@@ -1146,17 +1150,17 @@ Sync11Service.prototype = {
    * @param {WBORecord} cryptoKeys crypto/keys record
    * @param {Number} lastModified known last modified timestamp (in decimal seconds),
    *                 will be used to set the X-If-Unmodified-Since header
    */
   _uploadCryptoKeys(cryptoKeys, lastModified) {
     this._log.debug(`Uploading crypto/keys (lastModified: ${lastModified})`);
     let res = this.resource(this.cryptoKeysURL);
     res.setHeader("X-If-Unmodified-Since", lastModified);
-    return res.put(cryptoKeys);
+    return Async.promiseSpinningly(res.put(cryptoKeys));
   },
 
   _freshStart: function _freshStart() {
     this._log.info("Fresh start. Resetting client.");
     this.resetClient();
     this.collectionKeys.clear();
 
     // Wipe the server.
@@ -1189,17 +1193,17 @@ Sync11Service.prototype = {
   wipeServer: function wipeServer(collections) {
     let response;
     let histogram = Services.telemetry.getHistogramById("WEAVE_WIPE_SERVER_SUCCEEDED");
     if (!collections) {
       // Strip the trailing slash.
       let res = this.resource(this.storageURL.slice(0, -1));
       res.setHeader("X-Confirm-Delete", "1");
       try {
-        response = res.delete();
+        response = Async.promiseSpinningly(res.delete());
       } catch (ex) {
         this._log.debug("Failed to wipe server", ex);
         histogram.add(false);
         throw ex;
       }
       if (response.status != 200 && response.status != 404) {
         this._log.debug("Aborting wipeServer. Server responded with " +
                         response.status + " response for " + this.storageURL);
@@ -1209,17 +1213,17 @@ Sync11Service.prototype = {
       histogram.add(true);
       return response.headers["x-weave-timestamp"];
     }
 
     let timestamp;
     for (let name of collections) {
       let url = this.storageURL + name;
       try {
-        response = this.resource(url).delete();
+        response = Async.promiseSpinningly(this.resource(url).delete());
       } catch (ex) {
         this._log.debug("Failed to wipe '" + name + "' collection", ex);
         histogram.add(false);
         throw ex;
       }
 
       if (response.status != 200 && response.status != 404) {
         this._log.debug("Aborting wipeServer. Server responded with " +
--- a/services/sync/modules/stages/enginesync.js
+++ b/services/sync/modules/stages/enginesync.js
@@ -175,17 +175,17 @@ EngineSynchronizer.prototype = {
       if (!this.service.clusterURL) {
         this._log.debug("Aborting sync, no cluster URL: " +
                         "not uploading new meta/global.");
         this.onComplete(null);
         return;
       }
 
       // Upload meta/global if any engines changed anything.
-      let meta = this.service.recordManager.get(this.service.metaURL);
+      let meta = Async.promiseSpinningly(this.service.recordManager.get(this.service.metaURL));
       if (meta.isNew || meta.changed) {
         this._log.info("meta/global changed locally: reuploading.");
         try {
           this.service.uploadMetaGlobal(meta);
           delete meta.isNew;
           delete meta.changed;
         } catch (error) {
           this._log.error("Unable to upload meta/global. Leaving marked as new.");
@@ -337,16 +337,16 @@ EngineSynchronizer.prototype = {
     engineManager.decline(toDecline);
     engineManager.undecline(toUndecline);
 
     Svc.Prefs.resetBranch("engineStatusChanged.");
     this.service._ignorePrefObserver = false;
   },
 
   _updateEnabledEngines() {
-    let meta = this.service.recordManager.get(this.service.metaURL);
+    let meta = Async.promiseSpinningly(this.service.recordManager.get(this.service.metaURL));
     let numClients = this.service.scheduler.numClients;
     let engineManager = this.service.engineManager;
 
     this._updateEnabledFromMeta(meta, numClients, engineManager);
   },
 };
 Object.freeze(EngineSynchronizer.prototype);
--- a/services/sync/tests/unit/head_errorhandler_common.js
+++ b/services/sync/tests/unit/head_errorhandler_common.js
@@ -95,23 +95,24 @@ const EHTestsCommon = {
 
 
   generateCredentialsChangedFailure() {
     // Make sync fail due to changed credentials. We simply re-encrypt
     // the keys with a different Sync Key, without changing the local one.
     let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562");
     let keys = Service.collectionKeys.asWBO();
     keys.encrypt(newSyncKeyBundle);
-    keys.upload(Service.resource(Service.cryptoKeysURL));
+    return keys.upload(Service.resource(Service.cryptoKeysURL));
   },
 
   async setUp(server) {
     await configureIdentity({ username: "johndoe" }, server);
     return EHTestsCommon.generateAndUploadKeys()
   },
 
-  generateAndUploadKeys() {
+  async generateAndUploadKeys() {
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
-    return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
+    let response = await serverKeys.upload(Service.resource(Service.cryptoKeysURL));
+    return response.success;
   }
 };
--- a/services/sync/tests/unit/test_bookmark_engine.js
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -718,17 +718,17 @@ add_task(async function test_misreconcil
     }
   };
 
   _("Applying record.");
   engine._processIncoming({
     getBatched() {
       return this.get();
     },
-    get() {
+    async get() {
       this.recordHandler(encrypted);
       return {success: true}
     },
   });
 
   // Ensure that afterwards, toolbar is still there.
   // As of 2012-12-05, this only passes because Places doesn't use "toolbar" as
   // the real GUID, instead using a generated one. Sync does the translation.
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -87,21 +87,21 @@ add_task(async function test_bad_hmac() 
   }
 
   function check_client_deleted(id) {
     let coll = user.collection("clients");
     let wbo  = coll.wbo(id);
     return !wbo || !wbo.payload;
   }
 
-  function uploadNewKeys() {
+  async function uploadNewKeys() {
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
-    ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+    ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
   }
 
   try {
     await configureIdentity({username: "foo"}, server);
     Service.login();
 
     generateNewKeys(Service.collectionKeys);
 
@@ -121,17 +121,17 @@ add_task(async function test_bad_hmac() 
 
     _("Change our keys and our client ID, reupload keys.");
     let oldLocalID  = engine.localID;     // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
     engine.resetClient();
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
-    ok(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+    ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     _("Sync.");
     engine._sync();
 
     _("Old record " + oldLocalID + " was deleted, new one uploaded.");
     check_clients_count(1);
     check_client_deleted(oldLocalID);
 
@@ -156,24 +156,24 @@ add_task(async function test_bad_hmac() 
     user.collection("clients")._wbos = {};
     Service.lastHMACEvent = 0;
     engine.localID = Utils.makeGUID();
     engine.resetClient();
     deletedCollections = [];
     deletedItems       = [];
     check_clients_count(0);
 
-    uploadNewKeys();
+    await uploadNewKeys();
 
     // Sync once to upload a record.
     engine._sync();
     check_clients_count(1);
 
     // Generate and upload new keys, so the old client record is wrong.
-    uploadNewKeys();
+    await uploadNewKeys();
 
     // Create a new client record and new keys. Now our keys are wrong, as well
     // as the object on the server. We'll download the new keys and also delete
     // the bad client record.
     oldLocalID  = engine.localID;         // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
     engine.resetClient();
     generateNewKeys(Service.collectionKeys);
@@ -312,17 +312,17 @@ add_task(async function test_sync() {
     engine.lastRecordUpload -= MORE_THAN_CLIENTS_TTL_REFRESH;
     let lastweek = engine.lastRecordUpload;
     clientWBO().payload = undefined;
     engine._sync();
     ok(!!clientWBO().payload);
     ok(engine.lastRecordUpload > lastweek);
 
     _("Remove client record.");
-    engine.removeClientData();
+    await engine.removeClientData();
     equal(clientWBO().payload, undefined);
 
     _("Time travel one day back, no record uploaded.");
     engine.lastRecordUpload -= LESS_THAN_CLIENTS_TTL_REFRESH;
     let yesterday = engine.lastRecordUpload;
     engine._sync();
     equal(clientWBO().payload, undefined);
     equal(engine.lastRecordUpload, yesterday);
--- a/services/sync/tests/unit/test_collection_getBatched.js
+++ b/services/sync/tests/unit/test_collection_getBatched.js
@@ -21,17 +21,17 @@ function recordRange(lim, offset, total)
 function get_test_collection_info({ totalRecords, batchSize, lastModified,
                                     throwAfter = Infinity,
                                     interruptedAfter = Infinity }) {
   let coll = new Collection("http://example.com/test/", WBORecord, Service);
   coll.full = true;
   let requests = [];
   let responses = [];
   let sawRecord = false;
-  coll.get = function() {
+  coll.get = async function() {
     ok(!sawRecord); // make sure we call record handler after all requests.
     let limit = +this.limit;
     let offset = 0;
     if (this.offset) {
       equal(this.offset.slice(0, 6), "foobar");
       offset = +this.offset.slice(6);
     }
     requests.push({
@@ -71,26 +71,26 @@ function get_test_collection_info({ tota
     // ensure records are coming in in the right order
     equal(record.id, String(records.length));
     equal(record.payload, "test:" + records.length);
     records.push(record);
   };
   return { records, responses, requests, coll };
 }
 
-add_test(function test_success() {
+add_task(async function test_success() {
   const totalRecords = 11;
   const batchSize = 2;
   const lastModified = "111111";
   let { records, responses, requests, coll } = get_test_collection_info({
     totalRecords,
     batchSize,
     lastModified,
   });
-  let response = coll.getBatched(batchSize);
+  let response = await coll.getBatched(batchSize);
 
   equal(requests.length, Math.ceil(totalRecords / batchSize));
 
   // records are mostly checked in recordHandler, we just care about the length
   equal(records.length, totalRecords);
 
   // ensure we're returning the last response
   equal(responses[responses.length - 1], response);
@@ -111,85 +111,79 @@ add_test(function test_success() {
     expectedOffset += batchSize;
   }
 
   // ensure we cleaned up anything that would break further
   // use of this collection.
   ok(!coll._headers["x-if-unmodified-since"]);
   ok(!coll.offset);
   ok(!coll.limit || (coll.limit == Infinity));
-
-  run_next_test();
 });
 
-add_test(function test_total_limit() {
+add_task(async function test_total_limit() {
   _("getBatched respects the (initial) value of the limit property");
   const totalRecords = 100;
   const recordLimit = 11;
   const batchSize = 2;
   const lastModified = "111111";
   let { records, requests, coll } = get_test_collection_info({
     totalRecords,
     batchSize,
     lastModified,
   });
   coll.limit = recordLimit;
-  coll.getBatched(batchSize);
+  await coll.getBatched(batchSize);
 
   equal(requests.length, Math.ceil(recordLimit / batchSize));
   equal(records.length, recordLimit);
 
   for (let i = 0; i < requests.length; ++i) {
     let req = requests[i];
     if (i !== requests.length - 1) {
       equal(req.limit, batchSize);
     } else {
       equal(req.limit, recordLimit % batchSize);
     }
   }
 
   equal(coll._limit, recordLimit);
-
-  run_next_test();
 });
 
-add_test(function test_412() {
+add_task(async function test_412() {
   _("We shouldn't record records if we get a 412 in the middle of a batch");
   const totalRecords = 11;
   const batchSize = 2;
   const lastModified = "111111";
   let { records, responses, requests, coll } = get_test_collection_info({
     totalRecords,
     batchSize,
     lastModified,
     interruptedAfter: 3
   });
-  let response = coll.getBatched(batchSize);
+  let response = await coll.getBatched(batchSize);
 
   equal(requests.length, 3);
   equal(records.length, 0); // record handler shouldn't be called for anything
 
   // ensure we're returning the last response
   equal(responses[responses.length - 1], response);
 
   ok(!response.success);
   equal(response.status, 412);
-  run_next_test();
 });
 
-add_test(function test_get_throws() {
+add_task(async function test_get_throws() {
   _("We shouldn't record records if get() throws for some reason");
   const totalRecords = 11;
   const batchSize = 2;
   const lastModified = "111111";
   let { records, requests, coll } = get_test_collection_info({
     totalRecords,
     batchSize,
     lastModified,
     throwAfter: 3
   });
 
-  throws(() => coll.getBatched(batchSize), "Some Network Error");
+  await Assert.rejects(coll.getBatched(batchSize), "Some Network Error");
 
   equal(requests.length, 3);
   equal(records.length, 0);
-  run_next_test();
 });
--- a/services/sync/tests/unit/test_corrupt_keys.js
+++ b/services/sync/tests/unit/test_corrupt_keys.js
@@ -66,25 +66,25 @@ add_task(async function test_locally_cha
     }
 
     _("Setting meta.");
 
     // Bump version on the server.
     let m = new WBORecord("meta", "global");
     m.payload = {"syncID": "foooooooooooooooooooooooooo",
                  "storageVersion": STORAGE_VERSION};
-    m.upload(Service.resource(Service.metaURL));
+    await m.upload(Service.resource(Service.metaURL));
 
     _("New meta/global: " + JSON.stringify(johndoe.collection("meta").wbo("global")));
 
     // Upload keys.
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
-    do_check_true(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
+    do_check_true((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     // Check that login works.
     do_check_true(Service.login());
     do_check_true(Service.isLoggedIn);
 
     // Sync should upload records.
     await sync_and_validate_telem();
 
@@ -118,17 +118,17 @@ add_task(async function test_locally_cha
     }
 
     history.timestamp = Date.now() / 1000;
     let old_key_time = johndoe.modified("crypto");
     _("Old key time: " + old_key_time);
 
     // Check that we can decrypt one.
     let rec = new CryptoWrapper("history", "record-no--0");
-    rec.fetch(Service.resource(Service.storageURL + "history/record-no--0"));
+    await rec.fetch(Service.resource(Service.storageURL + "history/record-no--0"));
     _(JSON.stringify(rec));
     do_check_true(!!rec.decrypt(liveKeys));
 
     do_check_eq(hmacErrorCount, 0);
 
     // Fill local key cache with bad data.
     corrupt_local_keys();
     _("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
--- a/services/sync/tests/unit/test_errorhandler_1.js
+++ b/services/sync/tests/unit/test_errorhandler_1.js
@@ -115,17 +115,17 @@ add_task(async function test_credentials
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   await sync_and_validate_telem();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let ping = await sync_and_validate_telem(true);
   equal(ping.status.sync, CREDENTIALS_CHANGED);
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
 
@@ -392,17 +392,17 @@ add_task(async function test_sync_syncAn
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   let ping = await wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
   equal(ping.status.sync, CREDENTIALS_CHANGED);
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
@@ -445,17 +445,17 @@ add_task(async function test_sync_syncAn
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
   let ping = await wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
   equal(ping.status.sync, CREDENTIALS_CHANGED);
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
@@ -574,17 +574,17 @@ add_task(async function test_sync_prolon
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
 
   let ping = await sync_and_validate_telem(true);
   equal(ping.status.sync, PROLONGED_SYNC_FAILURE);
   deepEqual(ping.failureReason, {
@@ -663,17 +663,17 @@ add_task(async function test_sync_non_ne
   let server = EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  EHTestsCommon.generateCredentialsChangedFailure();
+  await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   Service.sync();
   await promiseObserved;
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   do_check_false(errorHandler.didReportProlongedError);
--- a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
+++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
@@ -52,22 +52,22 @@ function sync_httpd_setup() {
   return httpd_setup(handlers);
 }
 
 async function setUp(server) {
   await configureIdentity({username: "johndoe"}, server);
   new FakeCryptoService();
 }
 
-function generateAndUploadKeys(server) {
+async function generateAndUploadKeys(server) {
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
   let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
-  return serverKeys.upload(res).success;
+  return (await serverKeys.upload(res)).success;
 }
 
 
 add_task(async function test_backoff500() {
   enableValidationPrefs();
 
   _("Test: HTTP 500 sets backoff status.");
   let server = sync_httpd_setup();
@@ -76,17 +76,17 @@ add_task(async function test_backoff500(
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 500};
 
   try {
     do_check_false(Status.enforceBackoff);
 
     // Forcibly create and upload keys here -- otherwise we don't get to the 500!
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
     do_check_true(Status.enforceBackoff);
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetBackoff();
@@ -111,17 +111,17 @@ add_task(async function test_backoff503(
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function(subject) {
     backoffInterval = subject;
   });
 
   try {
     do_check_false(Status.enforceBackoff);
 
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
 
     do_check_true(Status.enforceBackoff);
     do_check_eq(backoffInterval, BACKOFF);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
     do_check_eq(Status.sync, SERVER_MAINTENANCE);
@@ -145,17 +145,17 @@ add_task(async function test_overQuota()
   engine.exception = {status: 400,
                       toString() {
                         return "14";
                       }};
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
 
     do_check_eq(Status.sync, OVER_QUOTA);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
@@ -225,17 +225,17 @@ add_task(async function test_engine_netw
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = Components.Exception("NS_ERROR_UNKNOWN_HOST",
                                           Cr.NS_ERROR_UNKNOWN_HOST);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
@@ -254,17 +254,17 @@ add_task(async function test_resource_ti
   engine.enabled = true;
   // Resource throws this when it encounters a timeout.
   engine.exception = Components.Exception("Aborting due to channel inactivity.",
                                           Cr.NS_ERROR_NET_TIMEOUT);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
-    do_check_true(generateAndUploadKeys(server));
+    do_check_true(await generateAndUploadKeys(server));
 
     Service.login();
     Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
--- a/services/sync/tests/unit/test_postqueue.js
+++ b/services/sync/tests/unit/test_postqueue.js
@@ -17,17 +17,17 @@ function makePostQueue(config, lastModTi
     posts: [],
   }
   let poster = (data, headers, batch, commit) => {
     let thisPost = { nbytes: data.length, batch, commit };
     if (headers.length) {
       thisPost.headers = headers;
     }
     stats.posts.push(thisPost);
-    return responseGenerator.next().value;
+    return Promise.resolve(responseGenerator.next().value);
   }
 
   let done = () => {}
   let pq = new PostQueue(poster, lastModTime, config, getTestLogger(), done);
   return { pq, stats };
 }
 
 add_test(function test_simple() {
--- a/services/sync/tests/unit/test_records_wbo.js
+++ b/services/sync/tests/unit/test_records_wbo.js
@@ -2,18 +2,19 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/resource.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
+initTestLogging("Trace");
 
-function test_toJSON() {
+add_test(function test_toJSON() {
   _("Create a record, for now without a TTL.");
   let wbo = new WBORecord("coll", "a_record");
   wbo.modified = 12345;
   wbo.sortindex = 42;
   wbo.payload = {};
 
   _("Verify that the JSON representation contains the WBO properties, but not TTL.");
   let json = JSON.parse(JSON.stringify(wbo));
@@ -21,64 +22,57 @@ function test_toJSON() {
   do_check_eq(json.sortindex, 42);
   do_check_eq(json.payload, "{}");
   do_check_false("ttl" in json);
 
   _("Set a TTL, make sure it's present in the JSON representation.");
   wbo.ttl = 30 * 60;
   json = JSON.parse(JSON.stringify(wbo));
   do_check_eq(json.ttl, 30 * 60);
-}
+  run_next_test();
+});
 
 
-function test_fetch() {
+add_task(async function test_fetch() {
   let record = {id: "asdf-1234-asdf-1234",
                 modified: 2454725.98283,
                 payload: JSON.stringify({cheese: "roquefort"})};
   let record2 = {id: "record2",
                  modified: 2454725.98284,
                  payload: JSON.stringify({cheese: "gruyere"})};
   let coll = [{id: "record2",
                modified: 2454725.98284,
                payload: JSON.stringify({cheese: "gruyere"})}];
 
   _("Setting up server.");
   let server = httpd_setup({
     "/record":  httpd_handler(200, "OK", JSON.stringify(record)),
     "/record2": httpd_handler(200, "OK", JSON.stringify(record2)),
     "/coll":    httpd_handler(200, "OK", JSON.stringify(coll))
   });
-  do_test_pending();
 
   try {
     _("Fetching a WBO record");
     let rec = new WBORecord("coll", "record");
-    rec.fetch(Service.resource(server.baseURI + "/record"));
+    await rec.fetch(Service.resource(server.baseURI + "/record"));
     do_check_eq(rec.id, "asdf-1234-asdf-1234"); // NOT "record"!
 
     do_check_eq(rec.modified, 2454725.98283);
     do_check_eq(typeof(rec.payload), "object");
     do_check_eq(rec.payload.cheese, "roquefort");
 
     _("Fetching a WBO record using the record manager");
-    let rec2 = Service.recordManager.get(server.baseURI + "/record2");
+    let rec2 = await Service.recordManager.get(server.baseURI + "/record2");
     do_check_eq(rec2.id, "record2");
     do_check_eq(rec2.modified, 2454725.98284);
     do_check_eq(typeof(rec2.payload), "object");
     do_check_eq(rec2.payload.cheese, "gruyere");
     do_check_eq(Service.recordManager.response.status, 200);
 
     // Testing collection extraction.
     _("Extracting collection.");
     let rec3 = new WBORecord("tabs", "foo");   // Create through constructor.
     do_check_eq(rec3.collection, "tabs");
 
   } finally {
-    server.stop(do_test_finished);
+    await promiseStopServer(server);
   }
-}
-
-function run_test() {
-  initTestLogging("Trace");
-
-  test_toJSON();
-  test_fetch();
-}
+});
--- a/services/sync/tests/unit/test_resource.js
+++ b/services/sync/tests/unit/test_resource.js
@@ -143,21 +143,19 @@ function server_headers(metadata, respon
   for (let header of header_names) {
     headers[header] = metadata.getHeader(header);
   }
   let body = JSON.stringify(headers);
   response.setStatusLine(metadata.httpVersion, 200, "OK");
   response.bodyOutputStream.write(body, body.length);
 }
 
-function run_test() {
+add_task(async function test() {
   initTestLogging("Trace");
 
-  do_test_pending();
-
   let logger = Log.repository.getLogger("Test");
   Log.repository.rootLogger.addAppender(new Log.DumpAppender());
 
   let server = httpd_setup({
     "/open": server_open,
     "/protected": server_protected,
     "/404": server_404,
     "/upload": server_upload,
@@ -174,17 +172,17 @@ function run_test() {
   Svc.Prefs.set("network.numRetries", 1); // speed up test
 
   // This apparently has to come first in order for our PAC URL to be hit.
   // Don't put any other HTTP requests earlier in the file!
   _("Testing handling of proxy auth redirection.");
   PACSystemSettings.PACURI = server.baseURI + "/pac1";
   installFakePAC();
   let proxiedRes = new Resource(server.baseURI + "/open");
-  let content = proxiedRes.get();
+  let content = await proxiedRes.get();
   do_check_true(pacFetched);
   do_check_true(fetched);
   do_check_eq(content, "This path exists");
   pacFetched = fetched = false;
   uninstallFakePAC();
 
   _("Resource object members");
   let res = new Resource(server.baseURI + "/open");
@@ -193,17 +191,17 @@ function run_test() {
   do_check_eq(res.spec, server.baseURI + "/open");
   do_check_eq(typeof res.headers, "object");
   do_check_eq(typeof res.authenticator, "object");
   // Initially res.data is null since we haven't performed a GET or
   // PUT/POST request yet.
   do_check_eq(res.data, null);
 
   _("GET a non-password-protected resource");
-  content = res.get();
+  content = await res.get();
   do_check_eq(content, "This path exists");
   do_check_eq(content.status, 200);
   do_check_true(content.success);
   // res.data has been updated with the result from the request
   do_check_eq(res.data, content);
 
   // Observe logging messages.
   logger = res._log;
@@ -226,197 +224,197 @@ function run_test() {
   do_check_true(didThrow);
   do_check_eq(debugMessages.length, 1);
   do_check_eq(debugMessages[0],
               "Parse fail: Response body starts: \"\"This path exists\"\".");
   logger.debug = dbg;
 
   _("GET a password protected resource (test that it'll fail w/o pass, no throw)");
   let res2 = new Resource(server.baseURI + "/protected");
-  content = res2.get();
+  content = await res2.get();
   do_check_eq(content, "This path exists and is protected - failed");
   do_check_eq(content.status, 401);
   do_check_false(content.success);
 
   _("GET a password protected resource");
   let res3 = new Resource(server.baseURI + "/protected");
   let identityConfig = makeIdentityConfig();
   let browseridManager = Status._authManager;
   configureFxAccountIdentity(browseridManager, identityConfig);
   let auth = browseridManager.getResourceAuthenticator();
   res3.authenticator = auth;
   do_check_eq(res3.authenticator, auth);
-  content = res3.get();
+  content = await res3.get();
   do_check_eq(content, "This path exists and is protected");
   do_check_eq(content.status, 200);
   do_check_true(content.success);
 
   _("GET a non-existent resource (test that it'll fail, but not throw)");
   let res4 = new Resource(server.baseURI + "/404");
-  content = res4.get();
+  content = await res4.get();
   do_check_eq(content, "File not found");
   do_check_eq(content.status, 404);
   do_check_false(content.success);
 
   // Check some headers of the 404 response
   do_check_eq(content.headers.connection, "close");
   do_check_eq(content.headers.server, "httpd.js");
   do_check_eq(content.headers["content-length"], 14);
 
   _("PUT to a resource (string)");
   let res5 = new Resource(server.baseURI + "/upload");
-  content = res5.put(JSON.stringify(sample_data));
+  content = await res5.put(JSON.stringify(sample_data));
   do_check_eq(content, "Valid data upload via PUT");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("PUT to a resource (object)");
-  content = res5.put(sample_data);
+  content = await res5.put(sample_data);
   do_check_eq(content, "Valid data upload via PUT");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("PUT without data arg (uses resource.data) (string)");
   res5.data = JSON.stringify(sample_data);
-  content = res5.put();
+  content = await res5.put();
   do_check_eq(content, "Valid data upload via PUT");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("PUT without data arg (uses resource.data) (object)");
   res5.data = sample_data;
-  content = res5.put();
+  content = await res5.put();
   do_check_eq(content, "Valid data upload via PUT");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("POST to a resource (string)");
-  content = res5.post(JSON.stringify(sample_data));
+  content = await res5.post(JSON.stringify(sample_data));
   do_check_eq(content, "Valid data upload via POST");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("POST to a resource (object)");
-  content = res5.post(sample_data);
+  content = await res5.post(sample_data);
   do_check_eq(content, "Valid data upload via POST");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("POST without data arg (uses resource.data) (string)");
   res5.data = JSON.stringify(sample_data);
-  content = res5.post();
+  content = await res5.post();
   do_check_eq(content, "Valid data upload via POST");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("POST without data arg (uses resource.data) (object)");
   res5.data = sample_data;
-  content = res5.post();
+  content = await res5.post();
   do_check_eq(content, "Valid data upload via POST");
   do_check_eq(content.status, 200);
   do_check_eq(res5.data, content);
 
   _("DELETE a resource");
   let res6 = new Resource(server.baseURI + "/delete");
-  content = res6.delete();
+  content = await res6.delete();
   do_check_eq(content, "This resource has been deleted")
   do_check_eq(content.status, 200);
 
   _("JSON conversion of response body");
   let res7 = new Resource(server.baseURI + "/json");
-  content = res7.get();
+  content = await res7.get();
   do_check_eq(content, JSON.stringify(sample_data));
   do_check_eq(content.status, 200);
   do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
 
   _("X-Weave-Timestamp header updates AsyncResource.serverTime");
   // Before having received any response containing the
   // X-Weave-Timestamp header, AsyncResource.serverTime is null.
   do_check_eq(AsyncResource.serverTime, null);
   let res8 = new Resource(server.baseURI + "/timestamp");
-  content = res8.get();
+  content = await res8.get();
   do_check_eq(AsyncResource.serverTime, TIMESTAMP);
 
   _("GET: no special request headers");
   let res9 = new Resource(server.baseURI + "/headers");
-  content = res9.get();
+  content = await res9.get();
   do_check_eq(content, "{}");
 
   _("PUT: Content-Type defaults to text/plain");
-  content = res9.put("data");
+  content = await res9.put("data");
   do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
 
   _("POST: Content-Type defaults to text/plain");
-  content = res9.post("data");
+  content = await res9.post("data");
   do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
 
   _("setHeader(): setting simple header");
   res9.setHeader("X-What-Is-Weave", "awesome");
   do_check_eq(res9.headers["x-what-is-weave"], "awesome");
-  content = res9.get();
+  content = await res9.get();
   do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
 
   _("setHeader(): setting multiple headers, overwriting existing header");
   res9.setHeader("X-WHAT-is-Weave", "more awesomer");
   res9.setHeader("X-Another-Header", "hello world");
   do_check_eq(res9.headers["x-what-is-weave"], "more awesomer");
   do_check_eq(res9.headers["x-another-header"], "hello world");
-  content = res9.get();
+  content = await res9.get();
   do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
                                        "x-what-is-weave": "more awesomer"}));
 
   _("Setting headers object");
   res9.headers = {};
-  content = res9.get();
+  content = await res9.get();
   do_check_eq(content, "{}");
 
   _("PUT/POST: override default Content-Type");
   res9.setHeader("Content-Type", "application/foobar");
   do_check_eq(res9.headers["content-type"], "application/foobar");
-  content = res9.put("data");
+  content = await res9.put("data");
   do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
-  content = res9.post("data");
+  content = await res9.post("data");
   do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
 
 
   _("X-Weave-Backoff header notifies observer");
   let backoffInterval;
   function onBackoff(subject, data) {
     backoffInterval = subject;
   }
   Observers.add("weave:service:backoff:interval", onBackoff);
 
   let res10 = new Resource(server.baseURI + "/backoff");
-  content = res10.get();
+  content = await res10.get();
   do_check_eq(backoffInterval, 600);
 
 
   _("X-Weave-Quota-Remaining header notifies observer on successful requests.");
   let quotaValue;
   function onQuota(subject, data) {
     quotaValue = subject;
   }
   Observers.add("weave:service:quota:remaining", onQuota);
 
   res10 = new Resource(server.baseURI + "/quota-error");
-  content = res10.get();
+  content = await res10.get();
   do_check_eq(content.status, 400);
   do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
 
   res10 = new Resource(server.baseURI + "/quota-notice");
-  content = res10.get();
+  content = await res10.get();
   do_check_eq(content.status, 200);
   do_check_eq(quotaValue, 1048576);
 
 
   _("Error handling in _request() preserves exception information");
   let error;
   let res11 = new Resource("http://localhost:12345/does/not/exist");
   try {
-    content = res11.get();
+    content = await res11.get();
   } catch (ex) {
     error = ex;
   }
   do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
   do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
   do_check_eq(typeof error.stack, "string");
 
   _("Checking handling of errors in onProgress.");
@@ -425,24 +423,24 @@ function run_test() {
     // Provoke an XPC exception without a Javascript wrapper.
     Services.io.newURI("::::::::");
   };
   res18._onProgress = onProgress;
   let warnings = [];
   res18._log.warn = function(msg) { warnings.push(msg) };
   error = undefined;
   try {
-    content = res18.get();
+    content = await res18.get();
   } catch (ex) {
     error = ex;
   }
 
   // It throws and logs.
   do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
-  do_check_eq(error, "Error: NS_ERROR_MALFORMED_URI");
+  do_check_eq(error.message, "NS_ERROR_MALFORMED_URI");
   // Note the strings haven't been formatted yet, but that's OK for this test.
   do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
   do_check_eq(warnings.pop(),
               "Got exception calling onProgress handler during fetch of " +
               server.baseURI + "/json");
 
   // And this is what happens if JS throws an exception.
   res18 = new Resource(server.baseURI + "/json");
@@ -450,37 +448,37 @@ function run_test() {
     throw "BOO!";
   };
   res18._onProgress = onProgress;
   let oldWarn = res18._log.warn;
   warnings = [];
   res18._log.warn = function(msg) { warnings.push(msg) };
   error = undefined;
   try {
-    content = res18.get();
+    content = await res18.get();
   } catch (ex) {
     error = ex;
   }
 
   // It throws and logs.
   do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
-  do_check_eq(error, "Error: NS_ERROR_XPC_JS_THREW_STRING");
+  do_check_eq(error.message, "NS_ERROR_XPC_JS_THREW_STRING");
   do_check_eq(warnings.pop(), "${action} request to ${url} failed: ${ex}");
   do_check_eq(warnings.pop(),
               "Got exception calling onProgress handler during fetch of " +
               server.baseURI + "/json");
 
   res18._log.warn = oldWarn;
 
   _("Ensure channel timeouts are thrown appropriately.");
   let res19 = new Resource(server.baseURI + "/json");
   res19.ABORT_TIMEOUT = 0;
   error = undefined;
   try {
-    content = res19.get();
+    content = await res19.get();
   } catch (ex) {
     error = ex;
   }
   do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
 
   _("Testing URI construction.");
   let args = [];
   args.push("newer=" + 1234);
@@ -491,9 +489,9 @@ function run_test() {
 
   let uri1 = Utils.makeURI("http://foo/" + query)
                   .QueryInterface(Ci.nsIURL);
   let uri2 = Utils.makeURI("http://foo/")
                   .QueryInterface(Ci.nsIURL);
   uri2.query = query;
   do_check_eq(uri1.query, uri2.query);
   server.stop(do_test_finished);
-}
+});
--- a/services/sync/tests/unit/test_resource_async.js
+++ b/services/sync/tests/unit/test_resource_async.js
@@ -158,39 +158,37 @@ function run_test() {
   Log.repository.rootLogger.addAppender(new Log.DumpAppender());
 
   Svc.Prefs.set("network.numRetries", 1); // speed up test
   run_next_test();
 }
 
 // This apparently has to come first in order for our PAC URL to be hit.
 // Don't put any other HTTP requests earlier in the file!
-add_test(function test_proxy_auth_redirect() {
+add_task(async function test_proxy_auth_redirect() {
   _("Ensure that a proxy auth redirect (which switches out our channel) " +
     "doesn't break AsyncResource.");
   let server = httpd_setup({
     "/open": server_open,
     "/pac2": server_pac
   });
 
   PACSystemSettings.PACURI = server.baseURI + "/pac2";
   installFakePAC();
   let res = new AsyncResource(server.baseURI + "/open");
-  res.get(function(error, result) {
-    do_check_true(!error);
-    do_check_true(pacFetched);
-    do_check_true(fetched);
-    do_check_eq("This path exists", result);
-    pacFetched = fetched = false;
-    uninstallFakePAC();
-    server.stop(run_next_test);
-  });
+  let result = await res.get();
+  do_check_true(pacFetched);
+  do_check_true(fetched);
+  do_check_eq("This path exists", result);
+  pacFetched = fetched = false;
+  uninstallFakePAC();
+  await promiseStopServer(server);
 });
 
-add_test(function test_new_channel() {
+add_task(async function test_new_channel() {
   _("Ensure a redirect to a new channel is handled properly.");
 
   let resourceRequested = false;
   function resourceHandler(metadata, response) {
     resourceRequested = true;
 
     let body = "Test";
     response.setHeader("Content-Type", "text/plain");
@@ -205,25 +203,23 @@ add_test(function test_new_channel() {
     response.bodyOutputStream.write(body, body.length);
   }
 
   let server = httpd_setup({"/resource": resourceHandler,
                             "/redirect": redirectHandler});
   locationURL = server.baseURI + "/resource";
 
   let request = new AsyncResource(server.baseURI + "/redirect");
-  request.get(function onRequest(error, content) {
-    do_check_null(error);
-    do_check_true(resourceRequested);
-    do_check_eq(200, content.status);
-    do_check_true("content-type" in content.headers);
-    do_check_eq("text/plain", content.headers["content-type"]);
+  let content = await request.get()
+  do_check_true(resourceRequested);
+  do_check_eq(200, content.status);
+  do_check_true("content-type" in content.headers);
+  do_check_eq("text/plain", content.headers["content-type"]);
 
-    server.stop(run_next_test);
-  });
+  await promiseStopServer(server);
 });
 
 
 var server;
 
 add_test(function setup() {
   server = httpd_setup({
     "/open": server_open,
@@ -254,438 +250,357 @@ add_test(function test_members() {
   do_check_eq(typeof res.authenticator, "object");
   // Initially res.data is null since we haven't performed a GET or
   // PUT/POST request yet.
   do_check_eq(res.data, null);
 
   run_next_test();
 });
 
-add_test(function test_get() {
+add_task(async function test_get() {
   _("GET a non-password-protected resource");
   let res = new AsyncResource(server.baseURI + "/open");
-  res.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "This path exists");
-    do_check_eq(content.status, 200);
-    do_check_true(content.success);
-    // res.data has been updated with the result from the request
-    do_check_eq(res.data, content);
-
-    // Observe logging messages.
-    let resLogger = res._log;
-    let dbg    = resLogger.debug;
-    let debugMessages = [];
-    resLogger.debug = function(msg) {
-      debugMessages.push(msg);
-      dbg.call(this, msg);
-    }
+  let content = await res.get();
+  do_check_eq(content, "This path exists");
+  do_check_eq(content.status, 200);
+  do_check_true(content.success);
+  // res.data has been updated with the result from the request
+  do_check_eq(res.data, content);
 
-    // Since we didn't receive proper JSON data, accessing content.obj
-    // will result in a SyntaxError from JSON.parse
-    let didThrow = false;
-    try {
-      content.obj;
-    } catch (ex) {
-      didThrow = true;
-    }
-    do_check_true(didThrow);
-    do_check_eq(debugMessages.length, 1);
-    do_check_eq(debugMessages[0],
-                "Parse fail: Response body starts: \"\"This path exists\"\".");
-    resLogger.debug = dbg;
+  // Observe logging messages.
+  let resLogger = res._log;
+  let dbg    = resLogger.debug;
+  let debugMessages = [];
+  resLogger.debug = function(msg) {
+    debugMessages.push(msg);
+    dbg.call(this, msg);
+  }
 
-    run_next_test();
-  });
+  // Since we didn't receive proper JSON data, accessing content.obj
+  // will result in a SyntaxError from JSON.parse
+  let didThrow = false;
+  try {
+    content.obj;
+  } catch (ex) {
+    didThrow = true;
+  }
+  do_check_true(didThrow);
+  do_check_eq(debugMessages.length, 1);
+  do_check_eq(debugMessages[0],
+              "Parse fail: Response body starts: \"\"This path exists\"\".");
+  resLogger.debug = dbg;
 });
 
 add_test(function test_basicauth() {
   _("Test that the BasicAuthenticator doesn't screw up header case.");
   let res1 = new AsyncResource(server.baseURI + "/foo");
   res1.setHeader("Authorization", "Basic foobar");
   do_check_eq(res1._headers["authorization"], "Basic foobar");
   do_check_eq(res1.headers["authorization"], "Basic foobar");
 
   run_next_test();
 });
 
-add_test(function test_get_protected_fail() {
+add_task(async function test_get_protected_fail() {
   _("GET a password protected resource (test that it'll fail w/o pass, no throw)");
   let res2 = new AsyncResource(server.baseURI + "/protected");
-  res2.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "This path exists and is protected - failed");
-    do_check_eq(content.status, 401);
-    do_check_false(content.success);
-    run_next_test();
-  });
+  let content = await res2.get()
+  do_check_eq(content, "This path exists and is protected - failed");
+  do_check_eq(content.status, 401);
+  do_check_false(content.success);
 });
 
-add_test(function test_get_protected_success() {
+add_task(async function test_get_protected_success() {
   _("GET a password protected resource");
   let identityConfig = makeIdentityConfig();
   let browseridManager = new BrowserIDManager();
   configureFxAccountIdentity(browseridManager, identityConfig);
   let auth = browseridManager.getResourceAuthenticator();
   let res3 = new AsyncResource(server.baseURI + "/protected");
   res3.authenticator = auth;
   do_check_eq(res3.authenticator, auth);
-  res3.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "This path exists and is protected");
-    do_check_eq(content.status, 200);
-    do_check_true(content.success);
-    run_next_test();
-  });
+  let content = await res3.get();
+  do_check_eq(content, "This path exists and is protected");
+  do_check_eq(content.status, 200);
+  do_check_true(content.success);
 });
 
-add_test(function test_get_404() {
+add_task(async function test_get_404() {
   _("GET a non-existent resource (test that it'll fail, but not throw)");
   let res4 = new AsyncResource(server.baseURI + "/404");
-  res4.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "File not found");
-    do_check_eq(content.status, 404);
-    do_check_false(content.success);
+  let content = await res4.get();
+  do_check_eq(content, "File not found");
+  do_check_eq(content.status, 404);
+  do_check_false(content.success);
 
-    // Check some headers of the 404 response
-    do_check_eq(content.headers.connection, "close");
-    do_check_eq(content.headers.server, "httpd.js");
-    do_check_eq(content.headers["content-length"], 14);
-
-    run_next_test();
-  });
+  // Check some headers of the 404 response
+  do_check_eq(content.headers.connection, "close");
+  do_check_eq(content.headers.server, "httpd.js");
+  do_check_eq(content.headers["content-length"], 14);
 });
 
-add_test(function test_put_string() {
+add_task(async function test_put_string() {
   _("PUT to a resource (string)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
-  res_upload.put(JSON.stringify(sample_data), function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via PUT");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.put(JSON.stringify(sample_data));
+  do_check_eq(content, "Valid data upload via PUT");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_put_object() {
+add_task(async function test_put_object() {
   _("PUT to a resource (object)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
-  res_upload.put(sample_data, function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via PUT");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.put(sample_data);
+  do_check_eq(content, "Valid data upload via PUT");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_put_data_string() {
+add_task(async function test_put_data_string() {
   _("PUT without data arg (uses resource.data) (string)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
   res_upload.data = JSON.stringify(sample_data);
-  res_upload.put(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via PUT");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.put();
+  do_check_eq(content, "Valid data upload via PUT");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_put_data_object() {
+add_task(async function test_put_data_object() {
   _("PUT without data arg (uses resource.data) (object)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
   res_upload.data = sample_data;
-  res_upload.put(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via PUT");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.put();
+  do_check_eq(content, "Valid data upload via PUT");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_post_string() {
+add_task(async function test_post_string() {
   _("POST to a resource (string)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
-  res_upload.post(JSON.stringify(sample_data), function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via POST");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.post(JSON.stringify(sample_data));
+  do_check_eq(content, "Valid data upload via POST");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_post_object() {
+add_task(async function test_post_object() {
   _("POST to a resource (object)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
-  res_upload.post(sample_data, function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via POST");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.post(sample_data);
+  do_check_eq(content, "Valid data upload via POST");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_post_data_string() {
+add_task(async function test_post_data_string() {
   _("POST without data arg (uses resource.data) (string)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
   res_upload.data = JSON.stringify(sample_data);
-  res_upload.post(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via POST");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.post();
+  do_check_eq(content, "Valid data upload via POST");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_post_data_object() {
+add_task(async function test_post_data_object() {
   _("POST without data arg (uses resource.data) (object)");
   let res_upload = new AsyncResource(server.baseURI + "/upload");
   res_upload.data = sample_data;
-  res_upload.post(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "Valid data upload via POST");
-    do_check_eq(content.status, 200);
-    do_check_eq(res_upload.data, content);
-    run_next_test();
-  });
+  let content = await res_upload.post();
+  do_check_eq(content, "Valid data upload via POST");
+  do_check_eq(content.status, 200);
+  do_check_eq(res_upload.data, content);
 });
 
-add_test(function test_delete() {
+add_task(async function test_delete() {
   _("DELETE a resource");
   let res6 = new AsyncResource(server.baseURI + "/delete");
-  res6.delete(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "This resource has been deleted");
-    do_check_eq(content.status, 200);
-    run_next_test();
-  });
+  let content = await res6.delete();
+  do_check_eq(content, "This resource has been deleted");
+  do_check_eq(content.status, 200);
 });
 
-add_test(function test_json_body() {
+add_task(async function test_json_body() {
   _("JSON conversion of response body");
   let res7 = new AsyncResource(server.baseURI + "/json");
-  res7.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify(sample_data));
-    do_check_eq(content.status, 200);
-    do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
-    run_next_test();
-  });
+  let content = await res7.get();
+  do_check_eq(content, JSON.stringify(sample_data));
+  do_check_eq(content.status, 200);
+  do_check_eq(JSON.stringify(content.obj), JSON.stringify(sample_data));
 });
 
-add_test(function test_weave_timestamp() {
+add_task(async function test_weave_timestamp() {
   _("X-Weave-Timestamp header updates AsyncResource.serverTime");
   // Before having received any response containing the
   // X-Weave-Timestamp header, AsyncResource.serverTime is null.
   do_check_eq(AsyncResource.serverTime, null);
   let res8 = new AsyncResource(server.baseURI + "/timestamp");
-  res8.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(AsyncResource.serverTime, TIMESTAMP);
-    run_next_test();
-  });
+  await res8.get();
+  do_check_eq(AsyncResource.serverTime, TIMESTAMP);
 });
 
-add_test(function test_get_no_headers() {
+add_task(async function test_get_no_headers() {
   _("GET: no special request headers");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
-  res_headers.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "{}");
-    run_next_test();
-  });
+  let content = await res_headers.get();
+  do_check_eq(content, "{}");
 });
 
-add_test(function test_put_default_content_type() {
+add_task(async function test_put_default_content_type() {
   _("PUT: Content-Type defaults to text/plain");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
-  res_headers.put("data", function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
-    run_next_test();
-  });
+  let content = await res_headers.put("data");
+  do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
 });
 
-add_test(function test_post_default_content_type() {
+add_task(async function test_post_default_content_type() {
   _("POST: Content-Type defaults to text/plain");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
-  res_headers.post("data", function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
-    run_next_test();
-  });
+  let content = await res_headers.post("data");
+  do_check_eq(content, JSON.stringify({"content-type": "text/plain"}));
 });
 
-add_test(function test_setHeader() {
+add_task(async function test_setHeader() {
   _("setHeader(): setting simple header");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.setHeader("X-What-Is-Weave", "awesome");
   do_check_eq(res_headers.headers["x-what-is-weave"], "awesome");
-  res_headers.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
-    run_next_test();
-  });
+  let content = await res_headers.get();
+  do_check_eq(content, JSON.stringify({"x-what-is-weave": "awesome"}));
 });
 
-add_test(function test_setHeader_overwrite() {
+add_task(async function test_setHeader_overwrite() {
   _("setHeader(): setting multiple headers, overwriting existing header");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.setHeader("X-WHAT-is-Weave", "more awesomer");
   res_headers.setHeader("X-Another-Header", "hello world");
   do_check_eq(res_headers.headers["x-what-is-weave"], "more awesomer");
   do_check_eq(res_headers.headers["x-another-header"], "hello world");
-  res_headers.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
-                                         "x-what-is-weave": "more awesomer"}));
-
-    run_next_test();
-  });
+  let content = await res_headers.get();
+  do_check_eq(content, JSON.stringify({"x-another-header": "hello world",
+                                       "x-what-is-weave": "more awesomer"}));
 });
 
-add_test(function test_headers_object() {
+add_task(async function test_headers_object() {
   _("Setting headers object");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.headers = {};
-  res_headers.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, "{}");
-    run_next_test();
-  });
+  let content = await res_headers.get();
+  do_check_eq(content, "{}");
 });
 
-add_test(function test_put_override_content_type() {
+add_task(async function test_put_override_content_type() {
   _("PUT: override default Content-Type");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.setHeader("Content-Type", "application/foobar");
   do_check_eq(res_headers.headers["content-type"], "application/foobar");
-  res_headers.put("data", function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
-    run_next_test();
-  });
+  let content = await res_headers.put("data");
+  do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
 });
 
-add_test(function test_post_override_content_type() {
+add_task(async function test_post_override_content_type() {
   _("POST: override default Content-Type");
   let res_headers = new AsyncResource(server.baseURI + "/headers");
   res_headers.setHeader("Content-Type", "application/foobar");
-  res_headers.post("data", function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
-    run_next_test();
-  });
+  let content = await res_headers.post("data");
+  do_check_eq(content, JSON.stringify({"content-type": "application/foobar"}));
 });
 
-add_test(function test_weave_backoff() {
+add_task(async function test_weave_backoff() {
   _("X-Weave-Backoff header notifies observer");
   let backoffInterval;
   function onBackoff(subject, data) {
     backoffInterval = subject;
   }
   Observers.add("weave:service:backoff:interval", onBackoff);
 
   let res10 = new AsyncResource(server.baseURI + "/backoff");
-  res10.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(backoffInterval, 600);
-    run_next_test();
-  });
+  await res10.get();
+  do_check_eq(backoffInterval, 600);
+});
+
+add_task(async function test_quota_error() {
+  _("X-Weave-Quota-Remaining header notifies observer on successful requests.");
+  let res10 = new AsyncResource(server.baseURI + "/quota-error");
+  let content = await res10.get();
+  do_check_eq(content.status, 400);
+  do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
 });
 
-add_test(function test_quota_error() {
-  _("X-Weave-Quota-Remaining header notifies observer on successful requests.");
-  let res10 = new AsyncResource(server.baseURI + "/quota-error");
-  res10.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content.status, 400);
-    do_check_eq(quotaValue, undefined); // HTTP 400, so no observer notification.
-    run_next_test();
+add_task(async function test_quota_notice() {
+  let res10 = new AsyncResource(server.baseURI + "/quota-notice");
+  let content = await res10.get();
+  do_check_eq(content.status, 200);
+  do_check_eq(quotaValue, 1048576);
+});
+
+add_task(async function test_preserve_exceptions() {
+  _("Error handling in ChannelListener etc. preserves exception information");
+  let res11 = new AsyncResource("http://localhost:12345/does/not/exist");
+  await Assert.rejects(res11.get(), error => {
+    do_check_neq(error, null);
+    do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
+    do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
+    return true;
   });
 });
 
-add_test(function test_quota_notice() {
-  let res10 = new AsyncResource(server.baseURI + "/quota-notice");
-  res10.get(function(error, content) {
-    do_check_eq(error, null);
-    do_check_eq(content.status, 200);
-    do_check_eq(quotaValue, 1048576);
-    run_next_test();
-  });
-});
-
-add_test(function test_preserve_exceptions() {
-  _("Error handling in ChannelListener etc. preserves exception information");
-  let res11 = new AsyncResource("http://localhost:12345/does/not/exist");
-  res11.get(function(error, content) {
-    do_check_neq(error, null);
-    do_check_eq(error.result, Cr.NS_ERROR_CONNECTION_REFUSED);
-    do_check_eq(error.message, "NS_ERROR_CONNECTION_REFUSED");
-    run_next_test();
-  });
-});
-
-add_test(function test_xpc_exception_handling() {
+add_task(async function test_xpc_exception_handling() {
   _("Exception handling inside fetches.");
   let res14 = new AsyncResource(server.baseURI + "/json");
   res14._onProgress = function(rec) {
     // Provoke an XPC exception without a Javascript wrapper.
     Services.io.newURI("::::::::");
   };
   let warnings = [];
   res14._log.warn = function(msg) { warnings.push(msg); };
 
-  res14.get(function(error, content) {
+  await Assert.rejects(res14.get(), error => {
     do_check_eq(error.result, Cr.NS_ERROR_MALFORMED_URI);
     do_check_eq(error.message, "NS_ERROR_MALFORMED_URI");
-    do_check_eq(content, null);
-    do_check_eq(warnings.pop(),
-                "Got exception calling onProgress handler during fetch of " +
-                server.baseURI + "/json");
-
-    run_next_test();
+    return true;
   });
+  do_check_eq(warnings.pop(),
+              "${action} request to ${url} failed: ${ex}");
+  do_check_eq(warnings.pop(),
+              "Got exception calling onProgress handler during fetch of " +
+              server.baseURI + "/json");
 });
 
-add_test(function test_js_exception_handling() {
+add_task(async function test_js_exception_handling() {
   _("JS exception handling inside fetches.");
   let res15 = new AsyncResource(server.baseURI + "/json");
   res15._onProgress = function(rec) {
     throw "BOO!";
   };
   let warnings = [];
   res15._log.warn = function(msg) { warnings.push(msg); };
 
-  res15.get(function(error, content) {
+  await Assert.rejects(res15.get(), error => {
     do_check_eq(error.result, Cr.NS_ERROR_XPC_JS_THREW_STRING);
     do_check_eq(error.message, "NS_ERROR_XPC_JS_THREW_STRING");
-    do_check_eq(content, null);
-    do_check_eq(warnings.pop(),
-                "Got exception calling onProgress handler during fetch of " +
-                server.baseURI + "/json");
-
-    run_next_test();
+    return true;
   });
+  do_check_eq(warnings.pop(),
+              "${action} request to ${url} failed: ${ex}");
+  do_check_eq(warnings.pop(),
+              "Got exception calling onProgress handler during fetch of " +
+              server.baseURI + "/json");
 });
 
-add_test(function test_timeout() {
+add_task(async function test_timeout() {
   _("Ensure channel timeouts are thrown appropriately.");
   let res19 = new AsyncResource(server.baseURI + "/json");
   res19.ABORT_TIMEOUT = 0;
-  res19.get(function(error, content) {
+  await Assert.rejects(res19.get(), error => {
     do_check_eq(error.result, Cr.NS_ERROR_NET_TIMEOUT);
-    run_next_test();
+    return true;
   });
 });
 
 add_test(function test_uri_construction() {
   _("Testing URI construction.");
   let args = [];
   args.push("newer=" + 1234);
   args.push("limit=" + 1234);
@@ -698,30 +613,15 @@ add_test(function test_uri_construction(
   let uri2 = Utils.makeURI("http://foo/")
                   .QueryInterface(Ci.nsIURL);
   uri2.query = query;
   do_check_eq(uri1.query, uri2.query);
 
   run_next_test();
 });
 
-add_test(function test_not_sending_cookie() {
-  let cookieSer = Cc["@mozilla.org/cookieService;1"]
-                    .getService(Ci.nsICookieService);
-  let uri = CommonUtils.makeURI(server.baseURI);
-  cookieSer.setCookieString(uri, null, "test=test; path=/;", null);
-
-  let res = new AsyncResource(server.baseURI + "/test");
-  res.get(function(error) {
-    do_check_null(error);
-    do_check_true(this.response.success);
-    do_check_eq("COOKIE!", this.response.body);
-    server.stop(run_next_test);
-  });
-});
-
 /**
  * End of tests that rely on a single HTTP server.
  * All tests after this point must begin and end their own.
  */
 add_test(function eliminate_server() {
   server.stop(run_next_test);
 });
--- a/services/sync/tests/unit/test_resource_header.js
+++ b/services/sync/tests/unit/test_resource_header.js
@@ -41,25 +41,25 @@ function triggerRedirect() {
                          "}";
 
   let prefsService = Cc["@mozilla.org/preferences-service;1"].getService(Ci.nsIPrefService);
   let prefs = prefsService.getBranch("network.proxy.");
   prefs.setIntPref("type", 2);
   prefs.setCharPref("autoconfig_url", "data:text/plain," + PROXY_FUNCTION);
 }
 
-add_test(function test_headers_copied() {
+add_task(async function test_headers_copied() {
   triggerRedirect();
 
   _("Issuing request.");
   let resource = new Resource(TEST_URL);
   resource.setHeader("Authorization", "Basic foobar");
   resource.setHeader("X-Foo", "foofoo");
 
-  let result = resource.get(TEST_URL);
+  let result = await resource.get(TEST_URL);
   _("Result: " + result);
 
   do_check_eq(result, BODY);
   do_check_eq(auth, "Basic foobar");
   do_check_eq(foo, "foofoo");
 
-  httpServer.stop(run_next_test);
+  await promiseStopServer(httpServer);
 });
--- a/services/sync/tests/unit/test_resource_ua.js
+++ b/services/sync/tests/unit/test_resource_ua.js
@@ -52,47 +52,41 @@ add_test(function test_fetchInfo() {
   Service.login();
   Service._fetchInfo();
   _("User-Agent: " + ua);
   do_check_eq(ua, expectedUA + ".desktop");
   ua = "";
   run_next_test();
 });
 
-add_test(function test_desktop_post() {
+add_task(async function test_desktop_post() {
   _("Testing direct Resource POST.");
   let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
-  r.post("foo=bar", function(error, content) {
-    _("User-Agent: " + ua);
-    do_check_eq(ua, expectedUA + ".desktop");
-    ua = "";
-    run_next_test();
-  });
+  await r.post("foo=bar");
+  _("User-Agent: " + ua);
+  do_check_eq(ua, expectedUA + ".desktop");
+  ua = "";
 });
 
-add_test(function test_desktop_get() {
+add_task(async function test_desktop_get() {
   _("Testing async.");
   Svc.Prefs.set("client.type", "desktop");
   let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
-  r.get(function(error, content) {
-    _("User-Agent: " + ua);
-    do_check_eq(ua, expectedUA + ".desktop");
-    ua = "";
-    run_next_test();
-  });
+  await r.get();
+  _("User-Agent: " + ua);
+  do_check_eq(ua, expectedUA + ".desktop");
+  ua = "";
 });
 
-add_test(function test_mobile_get() {
+add_task(async function test_mobile_get() {
   _("Testing mobile.");
   Svc.Prefs.set("client.type", "mobile");
   let r = new AsyncResource(server.baseURI + "/1.1/johndoe/storage/meta/global");
-  r.get(function(error, content) {
-    _("User-Agent: " + ua);
-    do_check_eq(ua, expectedUA + ".mobile");
-    ua = "";
-    run_next_test();
-  });
+  await r.get();
+  _("User-Agent: " + ua);
+  do_check_eq(ua, expectedUA + ".mobile");
+  ua = "";
 });
 
 add_test(function tear_down() {
   server.stop(run_next_test);
 });
 
--- a/services/sync/tests/unit/test_service_detect_upgrade.js
+++ b/services/sync/tests/unit/test_service_detect_upgrade.js
@@ -108,79 +108,79 @@ add_task(async function v4_upgrade() {
     Service.sync();
     do_check_true(Service.isLoggedIn);
 
     let serverDecrypted;
     let serverKeys;
     let serverResp;
 
 
-    function retrieve_server_default() {
+    async function retrieve_server_default() {
       serverKeys = serverResp = serverDecrypted = null;
 
       serverKeys = new CryptoWrapper("crypto", "keys");
-      serverResp = serverKeys.fetch(Service.resource(Service.cryptoKeysURL)).response;
+      serverResp = (await serverKeys.fetch(Service.resource(Service.cryptoKeysURL))).response;
       do_check_true(serverResp.success);
 
       serverDecrypted = serverKeys.decrypt(Service.identity.syncKeyBundle);
       _("Retrieved WBO:       " + JSON.stringify(serverDecrypted));
       _("serverKeys:          " + JSON.stringify(serverKeys));
 
       return serverDecrypted.default;
     }
 
-    function retrieve_and_compare_default(should_succeed) {
-      let serverDefault = retrieve_server_default();
+    async function retrieve_and_compare_default(should_succeed) {
+      let serverDefault = await retrieve_server_default();
       let localDefault = Service.collectionKeys.keyForCollection().keyPairB64;
 
       _("Retrieved keyBundle: " + JSON.stringify(serverDefault));
       _("Local keyBundle:     " + JSON.stringify(localDefault));
 
       if (should_succeed)
         do_check_eq(JSON.stringify(serverDefault), JSON.stringify(localDefault));
       else
         do_check_neq(JSON.stringify(serverDefault), JSON.stringify(localDefault));
     }
 
     // Uses the objects set above.
-    function set_server_keys(pair) {
+    async function set_server_keys(pair) {
       serverDecrypted.default = pair;
       serverKeys.cleartext = serverDecrypted;
       serverKeys.encrypt(Service.identity.syncKeyBundle);
-      serverKeys.upload(Service.resource(Service.cryptoKeysURL));
+      await serverKeys.upload(Service.resource(Service.cryptoKeysURL));
     }
 
     _("Checking we have the latest keys.");
-    retrieve_and_compare_default(true);
+    await retrieve_and_compare_default(true);
 
     _("Update keys on server.");
-    set_server_keys(["KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
-                     "aaaaaaaaaaaapxMO6TEWtLIOv9dj6kBAJdzhWDkkkis="]);
+    await set_server_keys(["KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
+                           "aaaaaaaaaaaapxMO6TEWtLIOv9dj6kBAJdzhWDkkkis="]);
 
     _("Checking that we no longer have the latest keys.");
-    retrieve_and_compare_default(false);
+    await retrieve_and_compare_default(false);
 
     _("Indeed, they're what we set them to...");
     do_check_eq("KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
-                retrieve_server_default()[0]);
+                (await retrieve_server_default())[0]);
 
     _("Sync. Should download changed keys automatically.");
     let oldClientsModified = collections.clients;
     let oldTabsModified = collections.tabs;
 
     Service.login();
     Service.sync();
     _("New key should have forced upload of data.");
     _("Tabs: " + oldTabsModified + " < " + collections.tabs);
     _("Clients: " + oldClientsModified + " < " + collections.clients);
     do_check_true(collections.clients > oldClientsModified);
     do_check_true(collections.tabs > oldTabsModified);
 
     _("... and keys will now match.");
-    retrieve_and_compare_default(true);
+    await retrieve_and_compare_default(true);
 
     // Clean up.
     Service.startOver();
 
   } finally {
     Svc.Prefs.resetBranch("");
     await promiseStopServer(server);
   }
@@ -232,37 +232,37 @@ add_task(async function v5_upgrade() {
 
     Service.clusterURL = server.baseURI + "/";
 
     await configureIdentity({ "username": "johndoe" }, server);
 
     // Test an upgrade where the contents of the server would cause us to error
     // -- keys decrypted with a different sync key, for example.
     _("Testing v4 -> v5 (or similar) upgrade.");
-    function update_server_keys(syncKeyBundle, wboName, collWBO) {
+    async function update_server_keys(syncKeyBundle, wboName, collWBO) {
       generateNewKeys(Service.collectionKeys);
       let serverKeys = Service.collectionKeys.asWBO("crypto", wboName);
       serverKeys.encrypt(syncKeyBundle);
       let res = Service.resource(Service.storageURL + collWBO);
-      do_check_true(serverKeys.upload(res).success);
+      do_check_true((await serverKeys.upload(res)).success);
     }
 
     _("Bumping version.");
     // Bump version on the server.
     let m = new WBORecord("meta", "global");
     m.payload = {"syncID": "foooooooooooooooooooooooooo",
                  "storageVersion": STORAGE_VERSION + 1};
-    m.upload(Service.resource(Service.metaURL));
+    await m.upload(Service.resource(Service.metaURL));
 
     _("New meta/global: " + JSON.stringify(meta_global));
 
     // Fill the keys with bad data.
     let badKeys = new SyncKeyBundle("foobar", "aaaaaaaaaaaaaaaaaaaaaaaaaa");
-    update_server_keys(badKeys, "keys", "crypto/keys");  // v4
-    update_server_keys(badKeys, "bulk", "crypto/bulk");  // v5
+    await update_server_keys(badKeys, "keys", "crypto/keys");  // v4
+    await update_server_keys(badKeys, "bulk", "crypto/bulk");  // v5
 
     _("Generating new keys.");
     generateNewKeys(Service.collectionKeys);
 
     // Now sync and see what happens. It should be a version fail, not a crypto
     // fail.
 
     _("Logging in.");
--- a/services/sync/tests/unit/test_service_startOver.js
+++ b/services/sync/tests/unit/test_service_startOver.js
@@ -9,17 +9,17 @@ Cu.import("resource://testing-common/ser
 
 function BlaEngine() {
   SyncEngine.call(this, "Bla", Service);
 }
 BlaEngine.prototype = {
   __proto__: SyncEngine.prototype,
 
   removed: false,
-  removeClientData() {
+  async removeClientData() {
     this.removed = true;
   }
 
 };
 
 Service.engineManager.register(BlaEngine);
 
 
--- a/services/sync/tests/unit/test_service_sync_remoteSetup.js
+++ b/services/sync/tests/unit/test_service_sync_remoteSetup.js
@@ -80,17 +80,17 @@ add_task(async function run_test() {
     Service.verifyAndFetchSymmetricKeys();
     do_check_eq(Service.status.sync, CREDENTIALS_CHANGED);
     do_check_eq(Service.status.login, LOGIN_FAILED_NO_PASSPHRASE);
 
     await configureIdentity({ username: "johndoe" }, server);
 
     Service.login();
     _("Checking that remoteSetup returns true when credentials have changed.");
-    Service.recordManager.get(Service.metaURL).payload.syncID = "foobar";
+    (await Service.recordManager.get(Service.metaURL)).payload.syncID = "foobar";
     do_check_true(Service._remoteSetup());
 
     let returnStatusCode = (method, code) => (oldMethod) => (req, res) => {
       if (req.method === method) {
         res.setStatusLine(req.httpVersion, code, "");
       } else {
         oldMethod(req, res);
       }
--- a/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
+++ b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
@@ -260,17 +260,17 @@ add_task(async function test_enabledRemo
   await setUp(server);
 
   // We need to be very careful how we do this, so that we don't trigger a
   // fresh start!
   try {
     _("Upload some keys to avoid a fresh start.");
     let wbo = Service.collectionKeys.generateNewKeysWBO();
     wbo.encrypt(Service.identity.syncKeyBundle);
-    do_check_eq(200, wbo.upload(Service.resource(Service.cryptoKeysURL)).status);
+    do_check_eq(200, (await wbo.upload(Service.resource(Service.cryptoKeysURL))).status);
 
     _("Engine is disabled.");
     do_check_false(engine.enabled);
 
     _("Sync.");
     Service.sync();
 
     _("Engine is enabled.");
--- a/services/sync/tests/unit/test_syncengine_sync.js
+++ b/services/sync/tests/unit/test_syncengine_sync.js
@@ -94,17 +94,17 @@ add_task(async function test_syncStartup
   await SyncTestingInfrastructure(server);
 
   let engine = makeRotaryEngine();
   engine._store.items = {rekolok: "Rekonstruktionslokomotive"};
   try {
 
     // Confirm initial environment
     do_check_eq(engine._tracker.changedIDs["rekolok"], undefined);
-    let metaGlobal = Service.recordManager.get(engine.metaURL);
+    let metaGlobal = await Service.recordManager.get(engine.metaURL);
     do_check_eq(metaGlobal.payload.engines, undefined);
     do_check_true(!!collection.payload("flying"));
     do_check_true(!!collection.payload("scotsman"));
 
     engine.lastSync = Date.now() / 1000;
     engine.lastSyncLocal = Date.now();
 
     // Trying to prompt a wipe -- we no longer track CryptoMeta per engine,
--- a/services/sync/tests/unit/test_syncscheduler.js
+++ b/services/sync/tests/unit/test_syncscheduler.js
@@ -58,17 +58,17 @@ function sync_httpd_setup() {
 }
 
 async function setUp(server) {
   await configureIdentity({username: "johndoe@mozilla.com"}, server);
 
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
-  let result = serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
+  let result = (await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success;
   return result;
 }
 
 async function cleanUpAndGo(server) {
   await promiseNextTick();
   clientsEngine._store.wipe();
   Service.startOver();
   if (server) {
--- a/services/sync/tests/unit/test_warn_on_truncated_response.js
+++ b/services/sync/tests/unit/test_warn_on_truncated_response.js
@@ -33,45 +33,43 @@ function getWarningMessages(log) {
     if (message.match(regEx)) {
       warnMessages.push(message);
     }
     warn.call(log, message);
   }
   return warnMessages;
 }
 
-add_test(function test_resource_logs_content_length_mismatch() {
+add_task(async function test_resource_logs_content_length_mismatch() {
   _("Issuing request.");
   let httpServer = httpd_setup({"/content": contentHandler});
   let resource = new Resource(httpServer.baseURI + "/content");
 
   let warnMessages = getWarningMessages(resource._log);
-  let result = resource.get();
+  let result = await resource.get();
 
   notEqual(warnMessages.length, 0, "test that a warning was logged");
   notEqual(result.length, contentLength);
   equal(result, BODY);
 
-  httpServer.stop(run_next_test);
+  await promiseStopServer(httpServer);
 });
 
-add_test(function test_async_resource_logs_content_length_mismatch() {
+add_task(async function test_async_resource_logs_content_length_mismatch() {
   _("Issuing request.");
   let httpServer = httpd_setup({"/content": contentHandler});
   let asyncResource = new AsyncResource(httpServer.baseURI + "/content");
 
   let warnMessages = getWarningMessages(asyncResource._log);
 
-  asyncResource.get(function(error, content) {
-    equal(error, null);
-    equal(content, BODY);
-    notEqual(warnMessages.length, 0, "test that warning was logged");
-    notEqual(content.length, contentLength);
-    httpServer.stop(run_next_test);
-  });
+  let content = await asyncResource.get();
+  equal(content, BODY);
+  notEqual(warnMessages.length, 0, "test that warning was logged");
+  notEqual(content.length, contentLength);
+  await promiseStopServer(httpServer);
 });
 
 add_test(function test_sync_storage_request_logs_content_length_mismatch() {
   _("Issuing request.");
   let httpServer = httpd_setup({"/content": contentHandler});
   let request = new SyncStorageRequest(httpServer.baseURI + "/content");
   let warnMessages = getWarningMessages(request._log);
 
--- a/services/sync/tps/extensions/tps/resource/tps.jsm
+++ b/services/sync/tps/extensions/tps/resource/tps.jsm
@@ -659,17 +659,17 @@ var TPS = {
 
   ValidateCollection(engineName, ValidatorType) {
     let serverRecordDumpStr;
     let clientRecordDumpStr;
     try {
       Logger.logInfo(`About to perform validation for "${engineName}"`);
       let engine = Weave.Service.engineManager.get(engineName);
       let validator = new ValidatorType(engine);
-      let serverRecords = validator.getServerItems(engine);
+      let serverRecords = Async.promiseSpinningly(validator.getServerItems(engine));
       let clientRecords = Async.promiseSpinningly(validator.getClientItems());
       try {
         // This substantially improves the logs for addons while not making a
         // substantial difference for the other two
         clientRecordDumpStr = JSON.stringify(clientRecords.map(r => {
           let res = validator.normalizeClientItem(r);
           delete res.original; // Try and prevent cyclic references
           return res;