Bug 1400467 - Make WeaveCrypto use promises instead of spinning event loops r?markh,eoger draft
authorThom Chiovoloni <tchiovoloni@mozilla.com>
Fri, 15 Sep 2017 19:21:31 -0700
changeset 671352 41a2c1b3120c720a0ed811a2b74c9a0f11affe09
parent 671351 18a91f94f2ed4f049f5d1b62ee5674d2866e28a9
child 671353 ca226323d1a0fa9e7d4639d689e99f58e335ba82
push id81922
push userbmo:tchiovoloni@mozilla.com
push dateWed, 27 Sep 2017 20:58:37 +0000
reviewersmarkh, eoger
bugs1400467
milestone58.0a1
Bug 1400467 - Make WeaveCrypto use promises instead of spinning event loops r?markh,eoger MozReview-Commit-ID: 64dewKKaAOJ
services/crypto/modules/WeaveCrypto.js
services/crypto/tests/unit/test_crypto_crypt.js
services/crypto/tests/unit/test_crypto_random.js
services/sync/modules-testing/fakeservices.js
services/sync/modules/bookmark_validator.js
services/sync/modules/collection_validator.js
services/sync/modules/engines.js
services/sync/modules/engines/bookmarks.js
services/sync/modules/keys.js
services/sync/modules/record.js
services/sync/modules/service.js
services/sync/tests/unit/head_errorhandler_common.js
services/sync/tests/unit/head_helpers.js
services/sync/tests/unit/test_412.js
services/sync/tests/unit/test_addons_engine.js
services/sync/tests/unit/test_bookmark_decline_undecline.js
services/sync/tests/unit/test_bookmark_duping.js
services/sync/tests/unit/test_bookmark_engine.js
services/sync/tests/unit/test_bookmark_order.js
services/sync/tests/unit/test_bookmark_record.js
services/sync/tests/unit/test_bookmark_repair.js
services/sync/tests/unit/test_bookmark_repair_responder.js
services/sync/tests/unit/test_bookmark_smart_bookmarks.js
services/sync/tests/unit/test_clients_engine.js
services/sync/tests/unit/test_clients_escape.js
services/sync/tests/unit/test_corrupt_keys.js
services/sync/tests/unit/test_engine_abort.js
services/sync/tests/unit/test_engine_changes_during_sync.js
services/sync/tests/unit/test_errorhandler_2.js
services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
services/sync/tests/unit/test_interval_triggers.js
services/sync/tests/unit/test_keys.js
services/sync/tests/unit/test_password_engine.js
services/sync/tests/unit/test_records_crypto.js
services/sync/tests/unit/test_service_detect_upgrade.js
services/sync/tests/unit/test_service_sync_remoteSetup.js
services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
services/sync/tests/unit/test_service_wipeClient.js
services/sync/tests/unit/test_syncengine_sync.js
services/sync/tests/unit/test_syncscheduler.js
services/sync/tests/unit/test_tab_engine.js
services/sync/tests/unit/test_telemetry.js
services/sync/tps/extensions/tps/resource/tps.jsm
toolkit/components/extensions/ExtensionStorageSync.jsm
toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
--- a/services/crypto/modules/WeaveCrypto.js
+++ b/services/crypto/modules/WeaveCrypto.js
@@ -64,88 +64,80 @@ WeaveCrypto.prototype = {
         Services.console.logStringMessage("WeaveCrypto: " + message);
     },
 
     // /!\ Only use this for tests! /!\
     _getCrypto() {
         return crypto;
     },
 
-    encrypt(clearTextUCS2, symmetricKey, iv) {
+    async encrypt(clearTextUCS2, symmetricKey, iv) {
         this.log("encrypt() called");
         let clearTextBuffer = this.encoder.encode(clearTextUCS2).buffer;
-        let encrypted = this._commonCrypt(clearTextBuffer, symmetricKey, iv, OPERATIONS.ENCRYPT);
+        let encrypted = await this._commonCrypt(clearTextBuffer, symmetricKey, iv, OPERATIONS.ENCRYPT);
         return this.encodeBase64(encrypted);
     },
 
-    decrypt(cipherText, symmetricKey, iv) {
+    async decrypt(cipherText, symmetricKey, iv) {
         this.log("decrypt() called");
         if (cipherText.length) {
             cipherText = atob(cipherText);
         }
         let cipherTextBuffer = this.byteCompressInts(cipherText);
-        let decrypted = this._commonCrypt(cipherTextBuffer, symmetricKey, iv, OPERATIONS.DECRYPT);
+        let decrypted = await this._commonCrypt(cipherTextBuffer, symmetricKey, iv, OPERATIONS.DECRYPT);
         return this.decoder.decode(decrypted);
     },
 
     /**
      * _commonCrypt
      *
      * @args
      * data: data to encrypt/decrypt (ArrayBuffer)
      * symKeyStr: symmetric key (Base64 String)
      * ivStr: initialization vector (Base64 String)
      * operation: operation to apply (either OPERATIONS.ENCRYPT or OPERATIONS.DECRYPT)
      * @returns
      * the encrypted/decrypted data (ArrayBuffer)
     */
-    _commonCrypt(data, symKeyStr, ivStr, operation) {
+    async _commonCrypt(data, symKeyStr, ivStr, operation) {
         this.log("_commonCrypt() called");
         ivStr = atob(ivStr);
 
         if (operation !== OPERATIONS.ENCRYPT && operation !== OPERATIONS.DECRYPT) {
             throw new Error("Unsupported operation in _commonCrypt.");
         }
         // We never want an IV longer than the block size, which is 16 bytes
         // for AES, neither do we want one smaller; throw in both cases.
         if (ivStr.length !== AES_CBC_IV_SIZE) {
             throw new Error(
                 `Invalid IV size; must be ${AES_CBC_IV_SIZE} bytes.`);
         }
 
         let iv = this.byteCompressInts(ivStr);
-        let symKey = this.importSymKey(symKeyStr, operation);
+        let symKey = await this.importSymKey(symKeyStr, operation);
         let cryptMethod = (operation === OPERATIONS.ENCRYPT
                            ? crypto.subtle.encrypt
                            : crypto.subtle.decrypt)
                           .bind(crypto.subtle);
         let algo = { name: CRYPT_ALGO, iv };
 
-
-        return Async.promiseSpinningly(
-            cryptMethod(algo, symKey, data)
-            .then(keyBytes => new Uint8Array(keyBytes))
-        );
+        let keyBytes = await cryptMethod.call(crypto.subtle, algo, symKey, data);
+        return new Uint8Array(keyBytes);
     },
 
 
-    generateRandomKey() {
+    async generateRandomKey() {
         this.log("generateRandomKey() called");
         let algo = {
             name: CRYPT_ALGO,
             length: CRYPT_ALGO_LENGTH
         };
-        return Async.promiseSpinningly(
-            crypto.subtle.generateKey(algo, true, [])
-            .then(key => crypto.subtle.exportKey("raw", key))
-            .then(keyBytes => {
-                keyBytes = new Uint8Array(keyBytes);
-                return this.encodeBase64(keyBytes);
-            })
-        );
+        let key = await crypto.subtle.generateKey(algo, true, []);
+        let keyBytes = await crypto.subtle.exportKey("raw", key);
+        return this.encodeBase64(new Uint8Array(keyBytes));
     },
 
     generateRandomIV() {
       return this.generateRandomBytes(AES_CBC_IV_SIZE);
     },
 
     generateRandomBytes(byteCount) {
         this.log("generateRandomBytes() called");
@@ -159,17 +151,17 @@ WeaveCrypto.prototype = {
     //
     // SymKey CryptoKey memoization.
     //
 
     // Memoize the import of symmetric keys. We do this by using the base64
     // string itself as a key.
     _encryptionSymKeyMemo: {},
     _decryptionSymKeyMemo: {},
-    importSymKey(encodedKeyString, operation) {
+    async importSymKey(encodedKeyString, operation) {
         let memo;
 
         // We use two separate memos for thoroughness: operation is an input to
         // key import.
         switch (operation) {
             case OPERATIONS.ENCRYPT:
                 memo = this._encryptionSymKeyMemo;
                 break;
@@ -181,24 +173,19 @@ WeaveCrypto.prototype = {
         }
 
         if (encodedKeyString in memo)
             return memo[encodedKeyString];
 
         let symmetricKeyBuffer = this.makeUint8Array(encodedKeyString, true);
         let algo = { name: CRYPT_ALGO };
         let usages = [operation === OPERATIONS.ENCRYPT ? "encrypt" : "decrypt"];
-
-        return Async.promiseSpinningly(
-            crypto.subtle.importKey("raw", symmetricKeyBuffer, algo, false, usages)
-            .then(symKey => {
-                memo[encodedKeyString] = symKey;
-                return symKey;
-            })
-        );
+        let symKey = await crypto.subtle.importKey("raw", symmetricKeyBuffer, algo, false, usages);
+        memo[encodedKeyString] = symKey;
+        return symKey;
     },
 
 
     //
     // Utility functions
     //
 
     /**
--- a/services/crypto/tests/unit/test_crypto_crypt.js
+++ b/services/crypto/tests/unit/test_crypto_crypt.js
@@ -7,34 +7,34 @@ add_task(async function test_key_memoiza
   let cryptoGlobal = cryptoSvc._getCrypto();
   let oldImport = cryptoGlobal.subtle.importKey;
   if (!oldImport) {
     _("Couldn't swizzle crypto.subtle.importKey; returning.");
     return;
   }
 
   let iv  = cryptoSvc.generateRandomIV();
-  let key = cryptoSvc.generateRandomKey();
+  let key = await cryptoSvc.generateRandomKey();
   let c   = 0;
   cryptoGlobal.subtle.importKey = function(format, keyData, algo, extractable, usages) {
     c++;
     return oldImport.call(cryptoGlobal.subtle, format, keyData, algo, extractable, usages);
   }
 
   // Encryption should cause a single counter increment.
   do_check_eq(c, 0);
-  let cipherText = cryptoSvc.encrypt("Hello, world.", key, iv);
+  let cipherText = await cryptoSvc.encrypt("Hello, world.", key, iv);
   do_check_eq(c, 1);
-  cipherText = cryptoSvc.encrypt("Hello, world.", key, iv);
+  cipherText = await cryptoSvc.encrypt("Hello, world.", key, iv);
   do_check_eq(c, 1);
 
   // ... as should decryption.
-  cryptoSvc.decrypt(cipherText, key, iv);
-  cryptoSvc.decrypt(cipherText, key, iv);
-  cryptoSvc.decrypt(cipherText, key, iv);
+  await cryptoSvc.decrypt(cipherText, key, iv);
+  await cryptoSvc.decrypt(cipherText, key, iv);
+  await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(c, 2);
 
   // Un-swizzle.
   cryptoGlobal.subtle.importKey = oldImport;
 });
 
 // Just verify that it gets populated with the correct bytes.
 add_task(async function test_makeUint8Array() {
@@ -47,167 +47,167 @@ add_task(async function test_makeUint8Ar
 });
 
 add_task(async function test_encrypt_decrypt() {
   // First, do a normal run with expected usage... Generate a random key and
   // iv, encrypt and decrypt a string.
   var iv = cryptoSvc.generateRandomIV();
   do_check_eq(iv.length, 24);
 
-  var key = cryptoSvc.generateRandomKey();
+  var key = await cryptoSvc.generateRandomKey();
   do_check_eq(key.length, 44);
 
   var mySecret = "bacon is a vegetable";
-  var cipherText = cryptoSvc.encrypt(mySecret, key, iv);
+  var cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
   do_check_eq(cipherText.length, 44);
 
-  var clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  var clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(clearText.length, 20);
 
   // Did the text survive the encryption round-trip?
   do_check_eq(clearText, mySecret);
   do_check_neq(cipherText, mySecret); // just to be explicit
 
 
   // Do some more tests with a fixed key/iv, to check for reproducable results.
   key = "St1tFCor7vQEJNug/465dQ==";
   iv  = "oLjkfrLIOnK2bDRvW4kXYA==";
 
   _("Testing small IV.");
   mySecret = "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo=";
   let shortiv  = "YWJj";
   let err;
   try {
-    cryptoSvc.encrypt(mySecret, key, shortiv);
+    await cryptoSvc.encrypt(mySecret, key, shortiv);
   } catch (ex) {
     err = ex;
   }
   do_check_true(!!err);
 
   _("Testing long IV.");
   let longiv  = "gsgLRDaxWvIfKt75RjuvFWERt83FFsY2A0TW+0b2iVk=";
   try {
-    cryptoSvc.encrypt(mySecret, key, longiv);
+    await cryptoSvc.encrypt(mySecret, key, longiv);
   } catch (ex) {
     err = ex;
   }
   do_check_true(!!err);
 
   // Test small input sizes
   mySecret = "";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "OGQjp6mK1a3fs9k9Ml4L3w==");
   do_check_eq(clearText, mySecret);
 
   mySecret = "x";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "96iMl4vhOxFUW/lVHHzVqg==");
   do_check_eq(clearText, mySecret);
 
   mySecret = "xx";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "olpPbETRYROCSqFWcH2SWg==");
   do_check_eq(clearText, mySecret);
 
   mySecret = "xxx";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "rRbpHGyVSZizLX/x43Wm+Q==");
   do_check_eq(clearText, mySecret);
 
   mySecret = "xxxx";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "HeC7miVGDcpxae9RmiIKAw==");
   do_check_eq(clearText, mySecret);
 
   // Test non-ascii input
   // ("testuser1" using similar-looking glyphs)
   mySecret = String.fromCharCode(355, 277, 349, 357, 533, 537, 101, 345, 185);
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "Pj4ixByXoH3SU3JkOXaEKPgwRAWplAWFLQZkpJd5Kr4=");
   do_check_eq(clearText, mySecret);
 
   // Tests input spanning a block boundary (AES block size is 16 bytes)
   mySecret = "123456789012345";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "e6c5hwphe45/3VN/M0bMUA==");
   do_check_eq(clearText, mySecret);
 
   mySecret = "1234567890123456";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "V6aaOZw8pWlYkoIHNkhsP1JOIQF87E2vTUvBUQnyV04=");
   do_check_eq(clearText, mySecret);
 
   mySecret = "12345678901234567";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "V6aaOZw8pWlYkoIHNkhsP5GvxWJ9+GIAS6lXw+5fHTI=");
   do_check_eq(clearText, mySecret);
 
 
   key = "iz35tuIMq4/H+IYw2KTgow==";
   iv  = "TJYrvva2KxvkM8hvOIvWp3==";
   mySecret = "i like pie";
 
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "DLGx8BWqSCLGG7i/xwvvxg==");
   do_check_eq(clearText, mySecret);
 
   key = "c5hG3YG+NC61FFy8NOHQak1ZhMEWO79bwiAfar2euzI=";
   iv  = "gsgLRDaxWvIfKt75RjuvFW==";
   mySecret = "i like pie";
 
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
-  clearText = cryptoSvc.decrypt(cipherText, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
+  clearText = await cryptoSvc.decrypt(cipherText, key, iv);
   do_check_eq(cipherText, "o+ADtdMd8ubzNWurS6jt0Q==");
   do_check_eq(clearText, mySecret);
 
   key = "St1tFCor7vQEJNug/465dQ==";
   iv  = "oLjkfrLIOnK2bDRvW4kXYA==";
   mySecret = "does thunder read testcases?";
-  cipherText = cryptoSvc.encrypt(mySecret, key, iv);
+  cipherText = await cryptoSvc.encrypt(mySecret, key, iv);
   do_check_eq(cipherText, "T6fik9Ros+DB2ablH9zZ8FWZ0xm/szSwJjIHZu7sjPs=");
 
   var badkey    = "badkeybadkeybadkeybadk==";
   var badiv     = "badivbadivbadivbadivbad=";
   var badcipher = "crapinputcrapinputcrapinputcrapinputcrapinp=";
   var failure;
 
   try {
     failure = false;
-    clearText = cryptoSvc.decrypt(cipherText, badkey, iv);
+    clearText = await cryptoSvc.decrypt(cipherText, badkey, iv);
   } catch (e) {
     failure = true;
   }
   do_check_true(failure);
 
   try {
     failure = false;
-    clearText = cryptoSvc.decrypt(cipherText, key, badiv);
+    clearText = await cryptoSvc.decrypt(cipherText, key, badiv);
   } catch (e) {
     failure = true;
   }
   do_check_true(failure);
 
   try {
     failure = false;
-    clearText = cryptoSvc.decrypt(cipherText, badkey, badiv);
+    clearText = await cryptoSvc.decrypt(cipherText, badkey, badiv);
   } catch (e) {
     failure = true;
   }
   do_check_true(failure);
 
   try {
     failure = false;
-    clearText = cryptoSvc.decrypt(badcipher, key, iv);
+    clearText = await cryptoSvc.decrypt(badcipher, key, iv);
   } catch (e) {
     failure = true;
   }
   do_check_true(failure);
 });
--- a/services/crypto/tests/unit/test_crypto_random.js
+++ b/services/crypto/tests/unit/test_crypto_random.js
@@ -1,13 +1,13 @@
 Cu.import("resource://services-crypto/WeaveCrypto.js", this);
 
 var cryptoSvc = new WeaveCrypto();
 
-function run_test() {
+add_task(async function test_crypto_random() {
   if (this.gczeal) {
     _("Running crypto random tests with gczeal(2).");
     gczeal(2);
   }
 
   // Test salt generation.
   var salt;
 
@@ -33,18 +33,18 @@ function run_test() {
   do_check_eq(salt.length, 1368);
   salt = cryptoSvc.generateRandomBytes(16);
   do_check_eq(salt.length, 24);
 
 
   // Test random key generation
   var keydata, keydata2, iv;
 
-  keydata  = cryptoSvc.generateRandomKey();
+  keydata  = await cryptoSvc.generateRandomKey();
   do_check_eq(keydata.length, 44);
-  keydata2 = cryptoSvc.generateRandomKey();
+  keydata2 = await cryptoSvc.generateRandomKey();
   do_check_neq(keydata, keydata2); // sanity check for randomness
   iv = cryptoSvc.generateRandomIV();
   do_check_eq(iv.length, 24);
 
   if (this.gczeal)
     gczeal(0);
-}
+})
--- a/services/sync/modules-testing/fakeservices.js
+++ b/services/sync/modules-testing/fakeservices.js
@@ -91,25 +91,25 @@ this.FakeCryptoService = function FakeCr
   Weave.Crypto = this;
 
   CryptoWrapper.prototype.ciphertextHMAC = function ciphertextHMAC(keyBundle) {
     return fakeSHA256HMAC(this.ciphertext);
   };
 }
 FakeCryptoService.prototype = {
 
-  encrypt: function encrypt(clearText, symmetricKey, iv) {
+  async encrypt(clearText, symmetricKey, iv) {
     return clearText;
   },
 
-  decrypt: function decrypt(cipherText, symmetricKey, iv) {
+  async decrypt(cipherText, symmetricKey, iv) {
     return cipherText;
   },
 
-  generateRandomKey: function generateRandomKey() {
+  async generateRandomKey() {
     return btoa("fake-symmetric-key-" + this.counter++);
   },
 
   generateRandomIV: function generateRandomIV() {
     // A base64-encoded IV is 24 characters long
     return btoa("fake-fake-fake-random-iv");
   },
 
--- a/services/sync/modules/bookmark_validator.js
+++ b/services/sync/modules/bookmark_validator.js
@@ -818,29 +818,33 @@ class BookmarkValidator {
       if (structuralDifferences.length) {
         problemData.structuralDifferences.push({ id, differences: structuralDifferences });
       }
     }
     return inspectionInfo;
   }
 
   async _getServerState(engine) {
-// XXXXX - todo - we need to capture last-modified of the server here and
-// ensure the repairer only applys with if-unmodified-since that date.
+    // XXXXX - todo - we need to capture last-modified of the server here and
+    // ensure the repairer only applys with if-unmodified-since that date.
     let collection = engine.itemSource();
     let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
     collection.full = true;
     let result = await collection.getBatched();
     if (!result.response.success) {
       throw result.response;
     }
-    return result.records.map(record => {
-      record.decrypt(collectionKey);
-      return record.cleartext;
-    });
+    let maybeYield = Async.jankYielder();
+    let cleartexts = [];
+    for (let record of result.records) {
+      await maybeYield();
+      await record.decrypt(collectionKey);
+      cleartexts.push(record.cleartext);
+    }
+    return cleartexts;
   }
 
   async validate(engine) {
     let start = Date.now();
     let clientTree = await PlacesUtils.promiseBookmarksTree("", {
       includeItemIds: true
     });
     let serverState = await this._getServerState(engine);
--- a/services/sync/modules/collection_validator.js
+++ b/services/sync/modules/collection_validator.js
@@ -72,20 +72,24 @@ class CollectionValidator {
   async getServerItems(engine) {
     let collection = engine.itemSource();
     let collectionKey = engine.service.collectionKeys.keyForCollection(engine.name);
     collection.full = true;
     let result = await collection.getBatched();
     if (!result.response.success) {
       throw result.response;
     }
-    return result.records.map(record => {
-      record.decrypt(collectionKey);
-      return record.cleartext;
-    });
+    let maybeYield = Async.jankYielder();
+    let cleartexts = [];
+    for (let record of result.records) {
+      await maybeYield();
+      await record.decrypt(collectionKey);
+      cleartexts.push(record.cleartext);
+    }
+    return cleartexts;
   }
 
   // Should return a promise that resolves to an array of client items.
   getClientItems() {
     return Promise.reject("Must implement");
   }
 
   /**
--- a/services/sync/modules/engines.js
+++ b/services/sync/modules/engines.js
@@ -1146,29 +1146,29 @@ SyncEngine.prototype = {
       // Track the collection for the WBO.
       item.collection = self.name;
 
       // Remember which records were processed
       handled.push(item.id);
 
       try {
         try {
-          item.decrypt(key);
+          await item.decrypt(key);
         } catch (ex) {
           if (!Utils.isHMACMismatch(ex)) {
             throw ex;
           }
           let strategy = await self.handleHMACMismatch(item, true);
           if (strategy == SyncEngine.kRecoveryStrategy.retry) {
             // You only get one retry.
             try {
               // Try decrypting again, typically because we've got new keys.
               self._log.info("Trying decrypt again...");
               key = self.service.collectionKeys.keyForCollection(self.name);
-              item.decrypt(key);
+              await item.decrypt(key);
               strategy = null;
             } catch (ex) {
               if (!Utils.isHMACMismatch(ex)) {
                 throw ex;
               }
               strategy = await self.handleHMACMismatch(item, false);
             }
           }
@@ -1664,18 +1664,17 @@ SyncEngine.prototype = {
           let { forceTombstone = false } = this._needWeakUpload.get(id) || {};
           if (forceTombstone) {
             out = await this._createTombstone(id);
           } else {
             out = await this._createRecord(id);
           }
           if (this._log.level <= Log.Level.Trace)
             this._log.trace("Outgoing: " + out);
-
-          out.encrypt(this.service.collectionKeys.keyForCollection(this.name));
+          await out.encrypt(this.service.collectionKeys.keyForCollection(this.name));
           ok = true;
         } catch (ex) {
           this._log.warn("Error creating record", ex);
           ++counts.failed;
           if (Async.isShutdownException(ex) || !this.allowSkippedRecord) {
             if (!this.allowSkippedRecord) {
               // Don't bother for shutdown errors
               Observers.notify("weave:engine:sync:uploaded", counts, this.name);
@@ -1791,17 +1790,17 @@ SyncEngine.prototype = {
     let key = this.service.collectionKeys.keyForCollection(this.name);
 
     // Any failure fetching/decrypting will just result in false
     try {
       this._log.trace("Trying to decrypt a record from the server..");
       let json = (await test.get()).obj[0];
       let record = new this._recordObj();
       record.deserialize(json);
-      record.decrypt(key);
+      await record.decrypt(key);
       canDecrypt = true;
     } catch (ex) {
       if (Async.isShutdownException(ex)) {
         throw ex;
       }
       this._log.debug("Failed test decrypt", ex);
     }
 
--- a/services/sync/modules/engines/bookmarks.js
+++ b/services/sync/modules/engines/bookmarks.js
@@ -93,19 +93,19 @@ function getTypeObject(type) {
   return null;
 }
 
 this.PlacesItem = function PlacesItem(collection, id, type) {
   CryptoWrapper.call(this, collection, id);
   this.type = type || "item";
 }
 PlacesItem.prototype = {
-  decrypt: function PlacesItem_decrypt(keyBundle) {
+  async decrypt(keyBundle) {
     // Do the normal CryptoWrapper decrypt, but change types before returning
-    let clear = CryptoWrapper.prototype.decrypt.call(this, keyBundle);
+    let clear = await CryptoWrapper.prototype.decrypt.call(this, keyBundle);
 
     // Convert the abstract places item to the actual object type
     if (!this.deleted)
       this.__proto__ = this.getTypeObject(this.type).prototype;
 
     return clear;
   },
 
--- a/services/sync/modules/keys.js
+++ b/services/sync/modules/keys.js
@@ -102,19 +102,22 @@ KeyBundle.prototype = {
 
   get sha256HMACHasher() {
     return this._sha256HMACHasher;
   },
 
   /**
    * Populate this key pair with 2 new, randomly generated keys.
    */
-  generateRandom: function generateRandom() {
-    let generatedHMAC = Weave.Crypto.generateRandomKey();
-    let generatedEncr = Weave.Crypto.generateRandomKey();
+  async generateRandom() {
+    // Compute both at that same time
+    let [generatedHMAC, generatedEncr] = await Promise.all([
+      Weave.Crypto.generateRandomKey(),
+      Weave.Crypto.generateRandomKey()
+    ]);
     this.keyPairB64 = [generatedEncr, generatedHMAC];
   },
 
 };
 
 /**
  * Represents a KeyBundle associated with a collection.
  *
--- a/services/sync/modules/record.js
+++ b/services/sync/modules/record.js
@@ -131,48 +131,48 @@ CryptoWrapper.prototype = {
    * Don't directly use the sync key. Instead, grab a key for this
    * collection, which is decrypted with the sync key.
    *
    * Cache those keys; invalidate the cache if the time on the keys collection
    * changes, or other auth events occur.
    *
    * Optional key bundle overrides the collection key lookup.
    */
-  encrypt: function encrypt(keyBundle) {
+  async encrypt(keyBundle) {
     if (!keyBundle) {
       throw new Error("A key bundle must be supplied to encrypt.");
     }
 
     this.IV = Weave.Crypto.generateRandomIV();
-    this.ciphertext = Weave.Crypto.encrypt(JSON.stringify(this.cleartext),
-                                           keyBundle.encryptionKeyB64, this.IV);
+    this.ciphertext = await Weave.Crypto.encrypt(JSON.stringify(this.cleartext),
+                                                 keyBundle.encryptionKeyB64, this.IV);
     this.hmac = this.ciphertextHMAC(keyBundle);
     this.cleartext = null;
   },
 
   // Optional key bundle.
-  decrypt: function decrypt(keyBundle) {
+  async decrypt(keyBundle) {
     if (!this.ciphertext) {
       throw new Error("No ciphertext: nothing to decrypt?");
     }
 
     if (!keyBundle) {
       throw new Error("A key bundle must be supplied to decrypt.");
     }
 
     // Authenticate the encrypted blob with the expected HMAC
     let computedHMAC = this.ciphertextHMAC(keyBundle);
 
     if (computedHMAC != this.hmac) {
       Utils.throwHMACMismatch(this.hmac, computedHMAC);
     }
 
     // Handle invalid data here. Elsewhere we assume that cleartext is an object.
-    let cleartext = Weave.Crypto.decrypt(this.ciphertext,
-                                         keyBundle.encryptionKeyB64, this.IV);
+    let cleartext = await Weave.Crypto.decrypt(this.ciphertext,
+                                               keyBundle.encryptionKeyB64, this.IV);
     let json_result = JSON.parse(cleartext);
 
     if (json_result && (json_result instanceof Object)) {
       this.cleartext = json_result;
       this.ciphertext = null;
     } else {
       throw new Error(
           `Decryption failed: result is <${json_result}>, not an object.`);
@@ -384,57 +384,57 @@ CollectionKeyManager.prototype = {
    */
   asWBO(collection, id) {
     return this._makeWBO(this._collections, this._default);
   },
 
   /**
    * Compute a new default key, and new keys for any specified collections.
    */
-  newKeys(collections) {
-    let newDefaultKeyBundle = this.newDefaultKeyBundle();
+  async newKeys(collections) {
+    let newDefaultKeyBundle = await this.newDefaultKeyBundle();
 
     let newColls = {};
     if (collections) {
-      collections.forEach(function(c) {
+      for (let c of collections) {
         let b = new BulkKeyBundle(c);
-        b.generateRandom();
+        await b.generateRandom();
         newColls[c] = b;
-      });
+      }
     }
     return [newDefaultKeyBundle, newColls];
   },
 
   /**
    * Generates new keys, but does not replace our local copy. Use this to
    * verify an upload before storing.
    */
-  generateNewKeysWBO(collections) {
+  async generateNewKeysWBO(collections) {
     let newDefaultKey, newColls;
-    [newDefaultKey, newColls] = this.newKeys(collections);
+    [newDefaultKey, newColls] = await this.newKeys(collections);
 
     return this._makeWBO(newColls, newDefaultKey);
   },
 
   /**
    * Create a new default key.
    *
    * @returns {BulkKeyBundle}
    */
-  newDefaultKeyBundle() {
+  async newDefaultKeyBundle() {
     const key = new BulkKeyBundle(DEFAULT_KEYBUNDLE_NAME);
-    key.generateRandom();
+    await key.generateRandom();
     return key;
   },
 
   /**
    * Create a new default key and store it as this._default, since without one you cannot use setContents.
    */
-  generateDefaultKey() {
-    this._default = this.newDefaultKeyBundle();
+  async generateDefaultKey() {
+    this._default = await this.newDefaultKeyBundle();
   },
 
   /**
    * Return true if keys are already present for each of the given
    * collections.
    */
   hasKeysFor(collections) {
     // We can't use filter() here because sometimes collections is an iterator.
@@ -446,25 +446,25 @@ CollectionKeyManager.prototype = {
     return true;
   },
 
   /**
    * Return a new CollectionKeyManager that has keys for each of the
    * given collections (creating new ones for collections where we
    * don't already have keys).
    */
-  ensureKeysFor(collections) {
+  async ensureKeysFor(collections) {
     const newKeys = Object.assign({}, this._collections);
     for (let c of collections) {
       if (newKeys[c]) {
         continue;  // don't replace existing keys
       }
 
       const b = new BulkKeyBundle(c);
-      b.generateRandom();
+      await b.generateRandom();
       newKeys[c] = b;
     }
     return new CollectionKeyManager(this.lastModified, this._default, newKeys);
   },
 
   // Take the fetched info/collections WBO, checking the change
   // time of the crypto collection.
   updateNeeded(info_collections) {
@@ -554,27 +554,27 @@ CollectionKeyManager.prototype = {
     if (modified) {
       self._log.info("Bumping last modified to " + modified);
       self.lastModified = modified;
     }
 
     return sameDefault ? collComparison.changed : true;
   },
 
-  updateContents: function updateContents(syncKeyBundle, storage_keys) {
+  async updateContents(syncKeyBundle, storage_keys) {
     let log = this._log;
     log.info("Updating collection keys...");
 
     // storage_keys is a WBO, fetched from storage/crypto/keys.
     // Its payload is the default key, and a map of collections to keys.
     // We lazily compute the key objects from the strings we're given.
 
     let payload;
     try {
-      payload = storage_keys.decrypt(syncKeyBundle);
+      payload = await storage_keys.decrypt(syncKeyBundle);
     } catch (ex) {
       log.warn("Got exception decrypting storage keys with sync key.", ex);
       log.info("Aborting updateContents. Rethrowing.");
       throw ex;
     }
 
     let r = this.setContents(payload, storage_keys.modified);
     log.info("Collection keys updated.");
--- a/services/sync/modules/service.js
+++ b/services/sync/modules/service.js
@@ -262,17 +262,17 @@ Sync11Service.prototype = {
                      "Will try again later.", ex);
       return false;
     }
   },
 
   async handleFetchedKeys(syncKey, cryptoKeys, skipReset) {
     // Don't want to wipe if we're just starting up!
     let wasBlank = this.collectionKeys.isClear;
-    let keysChanged = this.collectionKeys.updateContents(syncKey, cryptoKeys);
+    let keysChanged = await this.collectionKeys.updateContents(syncKey, cryptoKeys);
 
     if (keysChanged && !wasBlank) {
       this._log.debug("Keys changed: " + JSON.stringify(keysChanged));
 
       if (!skipReset) {
         this._log.info("Resetting client to reflect key change.");
 
         if (keysChanged.length) {
@@ -719,19 +719,19 @@ Sync11Service.prototype = {
       this.status.login = LOGIN_FAILED_NETWORK_ERROR;
       this.errorHandler.checkServerError(ex);
       return false;
     }
   },
 
   async generateNewSymmetricKeys() {
     this._log.info("Generating new keys WBO...");
-    let wbo = this.collectionKeys.generateNewKeysWBO();
+    let wbo = await this.collectionKeys.generateNewKeysWBO();
     this._log.info("Encrypting new key bundle.");
-    wbo.encrypt(this.identity.syncKeyBundle);
+    await wbo.encrypt(this.identity.syncKeyBundle);
 
     let uploadRes = await this._uploadCryptoKeys(wbo, 0);
     if (uploadRes.status != 200) {
       this._log.warn("Got status " + uploadRes.status + " uploading new keys. What to do? Throw!");
       this.errorHandler.checkServerError(uploadRes);
       throw new Error("Unable to upload symmetric keys.");
     }
     this._log.info("Got status " + uploadRes.status + " uploading keys.");
--- a/services/sync/tests/unit/head_errorhandler_common.js
+++ b/services/sync/tests/unit/head_errorhandler_common.js
@@ -89,31 +89,31 @@ const EHTestsCommon = {
         }
       }
     };
 
     return CatapultEngine;
   }()),
 
 
-  generateCredentialsChangedFailure() {
+  async generateCredentialsChangedFailure() {
     // Make sync fail due to changed credentials. We simply re-encrypt
     // the keys with a different Sync Key, without changing the local one.
     let newSyncKeyBundle = new BulkKeyBundle("crypto");
-    newSyncKeyBundle.generateRandom();
+    await newSyncKeyBundle.generateRandom();
     let keys = Service.collectionKeys.asWBO();
-    keys.encrypt(newSyncKeyBundle);
+    await keys.encrypt(newSyncKeyBundle);
     return keys.upload(Service.resource(Service.cryptoKeysURL));
   },
 
   async setUp(server) {
     await configureIdentity({ username: "johndoe" }, server);
     return EHTestsCommon.generateAndUploadKeys()
   },
 
   async generateAndUploadKeys() {
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
-    serverKeys.encrypt(Service.identity.syncKeyBundle);
+    await serverKeys.encrypt(Service.identity.syncKeyBundle);
     let response = await serverKeys.upload(Service.resource(Service.cryptoKeysURL));
     return response.success;
   }
 };
--- a/services/sync/tests/unit/head_helpers.js
+++ b/services/sync/tests/unit/head_helpers.js
@@ -192,18 +192,18 @@ function uninstallAddon(addon) {
     }
   }};
 
   AddonManager.addAddonListener(listener);
   addon.uninstall();
   Async.waitForSyncCallback(cb);
 }
 
-function generateNewKeys(collectionKeys, collections = null) {
-  let wbo = collectionKeys.generateNewKeysWBO(collections);
+async function generateNewKeys(collectionKeys, collections = null) {
+  let wbo = await collectionKeys.generateNewKeysWBO(collections);
   let modified = new_timestamp();
   collectionKeys.setContents(wbo.cleartext, modified);
 }
 
 // Helpers for testing open tabs.
 // These reflect part of the internal structure of TabEngine,
 // and stub part of Service.wm.
 
@@ -530,20 +530,20 @@ async function registerRotaryEngine() {
 // Set the validation prefs to attempt validation every time to avoid non-determinism.
 function enableValidationPrefs() {
   Svc.Prefs.set("engine.bookmarks.validation.interval", 0);
   Svc.Prefs.set("engine.bookmarks.validation.percentageChance", 100);
   Svc.Prefs.set("engine.bookmarks.validation.maxRecords", -1);
   Svc.Prefs.set("engine.bookmarks.validation.enabled", true);
 }
 
-function serverForEnginesWithKeys(users, engines, callback) {
+async function serverForEnginesWithKeys(users, engines, callback) {
   // Generate and store a fake default key bundle to avoid resetting the client
   // before the first sync.
-  let wbo = Service.collectionKeys.generateNewKeysWBO();
+  let wbo = await Service.collectionKeys.generateNewKeysWBO();
   let modified = new_timestamp();
   Service.collectionKeys.setContents(wbo.cleartext, modified);
 
   let allEngines = [Service.clientsEngine].concat(engines);
 
   let globalEngines = allEngines.reduce((entries, engine) => {
     let { name, version, syncID } = engine;
     entries[name] = { version, syncID };
@@ -564,17 +564,17 @@ function serverForEnginesWithKeys(users,
     crypto: {
       keys: encryptPayload(wbo.cleartext),
     },
   });
 
   return serverForUsers(users, contents, callback);
 }
 
-function serverForFoo(engine, callback) {
+async function serverForFoo(engine, callback) {
   // The bookmarks engine *always* tracks changes, meaning we might try
   // and sync due to the bookmarks we ourselves create! Worse, because we
   // do an engine sync only, there's no locking - so we end up with multiple
   // syncs running. Neuter that by making the threshold very large.
   Service.scheduler.syncThreshold = 10000000;
   return serverForEnginesWithKeys({"foo": "password"}, engine, callback);
 }
 
--- a/services/sync/tests/unit/test_412.js
+++ b/services/sync/tests/unit/test_412.js
@@ -8,20 +8,20 @@ Cu.import("resource://testing-common/ser
 initTestLogging("Trace");
 
 Services.prefs.setCharPref("services.sync.log.logger.service.main", "Trace");
 
 add_task(async function test_412_not_treated_as_failure() {
   await Service.engineManager.register(RotaryEngine);
   let engine = Service.engineManager.get("rotary");
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   // add an item to the server to the first sync advances lastModified.
   let collection = server.getCollection("foo", "rotary");
   let payload = encryptPayload({id: "existing", something: "existing record"});
   collection.insert("existing", payload);
 
   let promiseObserved = promiseOneObserver("weave:engine:sync:finish");
   try {
--- a/services/sync/tests/unit/test_addons_engine.js
+++ b/services/sync/tests/unit/test_addons_engine.js
@@ -171,17 +171,17 @@ add_task(async function test_disabled_in
   const USER       = "foo";
   const PASSWORD   = "password";
   const ADDON_ID   = "addon1@tests.mozilla.org";
 
   let server = new SyncServer();
   server.start();
   await SyncTestingInfrastructure(server, USER, PASSWORD);
 
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let contents = {
     meta: {global: {engines: {addons: {version: engine.version,
                                       syncID:  engine.syncID}}}},
     crypto: {},
     addons: {}
   };
 
--- a/services/sync/tests/unit/test_bookmark_decline_undecline.js
+++ b/services/sync/tests/unit/test_bookmark_decline_undecline.js
@@ -20,19 +20,24 @@ add_task(async function setup() {
 function getBookmarkWBO(server, guid) {
   let coll = server.user("foo").collection("bookmarks");
   if (!coll) {
     return null;
   }
   return coll.wbo(guid);
 }
 
+add_task(async function setup() {
+  initTestLogging("Trace");
+  await generateNewKeys(Service.collectionKeys);
+})
+
 add_task(async function test_decline_undecline() {
   let engine = Service.engineManager.get("bookmarks");
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   try {
     let { guid: bzGuid } = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.menuGuid,
       url: "https://bugzilla.mozilla.org",
       index: PlacesUtils.bookmarks.DEFAULT_INDEX,
       title: "bugzilla",
@@ -51,13 +56,8 @@ add_task(async function test_decline_und
     ok(getBookmarkWBO(server, bzGuid), "Should be present on server again");
 
   } finally {
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
-function run_test() {
-  initTestLogging("Trace");
-  generateNewKeys(Service.collectionKeys);
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_bookmark_duping.js
+++ b/services/sync/tests/unit/test_bookmark_duping.js
@@ -20,17 +20,17 @@ add_task(async function setup() {
   await Service.engineManager.register(BookmarksEngine);
   engine = Service.engineManager.get("bookmarks");
   store = engine._store;
   store._log.level = Log.Level.Trace;
   engine._log.level = Log.Level.Trace;
 });
 
 async function sharedSetup() {
- let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");   // We skip usual startup...
 
   return { server, collection };
 }
--- a/services/sync/tests/unit/test_bookmark_engine.js
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -32,24 +32,28 @@ async function fetchAllSyncIds() {
   let syncIds = new Set();
   for (let row of rows) {
     let syncId = PlacesSyncUtils.bookmarks.guidToSyncId(
       row.getResultByName("guid"));
     syncIds.add(syncId);
   }
   return syncIds;
 }
+add_task(async function setup() {
+  initTestLogging("Trace");
+  await generateNewKeys(Service.collectionKeys);
+})
 
 add_task(async function test_delete_invalid_roots_from_server() {
   _("Ensure that we delete the Places and Reading List roots from the server.");
 
   let engine  = new BookmarksEngine(Service);
   await engine.initialize();
   let store   = engine._store;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     let placesRecord = await store.createRecord("places");
@@ -126,17 +130,17 @@ add_task(async function bad_record_allID
 });
 
 add_task(async function test_processIncoming_error_orderChildren() {
   _("Ensure that _orderChildren() is called even when _processIncoming() throws an error.");
 
   let engine = new BookmarksEngine(Service);
   await engine.initialize();
   let store  = engine._store;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   try {
 
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
@@ -212,17 +216,17 @@ async function test_restoreOrImport(aRep
   let verbing = aReplace ? "restoring" : "importing";
   let bookmarkUtils = aReplace ? BookmarkJSONUtils : BookmarkHTMLUtils;
 
   _(`Ensure that ${verbing} from a backup will reupload all records.`);
 
   let engine = new BookmarksEngine(Service);
   await engine.initialize();
   let store  = engine._store;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");   // We skip usual startup...
 
   try {
 
@@ -437,17 +441,17 @@ add_task(async function test_mismatched_
        "oT74WwV8_j4P", "IztsItWVSo3-"],
     "parentid": "toolbar"
   };
   newRecord.cleartext = newRecord;
 
   let engine = new BookmarksEngine(Service);
   await engine.initialize();
   let store  = engine._store;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   _("GUID: " + (await store.GUIDForId(6, true)));
 
   try {
     let bms = PlacesUtils.bookmarks;
     let oldR = new FakeRecord(BookmarkFolder, oldRecord);
     let newR = new FakeRecord(Livemark, newRecord);
@@ -482,17 +486,17 @@ add_task(async function test_mismatched_
 
 add_task(async function test_bookmark_guidMap_fail() {
   _("Ensure that failures building the GUID map cause early death.");
 
   let engine = new BookmarksEngine(Service);
   await engine.initialize();
   let store = engine._store;
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   let coll   = server.user("foo").collection("bookmarks");
   await SyncTestingInfrastructure(server);
 
   // Add one item to the server.
   let itemID = PlacesUtils.bookmarks.createFolder(
     PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
   let itemGUID = await store.GUIDForId(itemID);
   let itemRecord = await store.createRecord(itemGUID);
@@ -581,17 +585,17 @@ add_task(async function test_bookmark_ta
 });
 
 add_task(async function test_misreconciled_root() {
   _("Ensure that we don't reconcile an arbitrary record with a root.");
 
   let engine = new BookmarksEngine(Service);
   await engine.initialize();
   let store = engine._store;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   // Log real hard for this test.
   store._log.trace = store._log.debug;
   engine._log.trace = engine._log.debug;
 
   await engine._syncStartup();
 
@@ -638,17 +642,17 @@ add_task(async function test_misreconcil
 });
 
 add_task(async function test_sync_dateAdded() {
   await Service.recordManager.clearCache();
   await PlacesSyncUtils.bookmarks.reset();
   let engine = new BookmarksEngine(Service);
   await engine.initialize();
   let store  = engine._store;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   // TODO: Avoid random orange (bug 1374599), this is only necessary
   // intermittently - reset the last sync date so that we'll get all bookmarks.
   engine.lastSync = 1;
 
@@ -782,14 +786,8 @@ add_task(async function test_sync_dateAd
   } finally {
     await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
-
-function run_test() {
-  initTestLogging("Trace");
-  generateNewKeys(Service.collectionKeys);
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_bookmark_order.js
+++ b/services/sync/tests/unit/test_bookmark_order.js
@@ -8,18 +8,18 @@ Cu.import("resource://services-sync/main
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 Svc.Prefs.set("log.logger.engine.bookmarks", "Trace");
 initTestLogging("Trace");
 Log.repository.getLogger("Sqlite").level = Log.Level.Info;
 
-function serverForFoo(engine) {
-  generateNewKeys(Service.collectionKeys);
+async function serverForFoo(engine) {
+  await generateNewKeys(Service.collectionKeys);
 
   let clientsEngine = Service.clientsEngine;
   return serverForUsers({"foo": "password"}, {
     meta: {
       global: {
         syncID: Service.syncID,
         storageVersion: STORAGE_VERSION,
         engines: {
@@ -35,18 +35,18 @@ function serverForFoo(engine) {
       },
     },
     crypto: {
       keys: encryptPayload({
         id: "keys",
         // Generate a fake default key bundle to avoid resetting the client
         // before the first sync.
         default: [
-          Weave.Crypto.generateRandomKey(),
-          Weave.Crypto.generateRandomKey(),
+          await Weave.Crypto.generateRandomKey(),
+          await Weave.Crypto.generateRandomKey(),
         ],
       }),
     },
     [engine.name]: {},
   });
 }
 
 async function resolveConflict(engine, collection, timestamp, buildTree,
@@ -146,17 +146,17 @@ async function resolveConflict(engine, c
   let expectedTree = buildTree(guids);
   await assertBookmarksTreeMatches(PlacesUtils.bookmarks.menuGuid,
     expectedTree, message);
 }
 
 add_task(async function test_local_order_newer() {
   let engine = Service.engineManager.get("bookmarks");
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   try {
     let collection = server.user("foo").collection("bookmarks");
     let serverModified = Date.now() / 1000 - 120;
     await resolveConflict(engine, collection, serverModified, guids => [{
       guid: guids.fx,
       index: 0,
@@ -191,17 +191,17 @@ add_task(async function test_local_order
     await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_remote_order_newer() {
   let engine = Service.engineManager.get("bookmarks");
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   try {
     let collection = server.user("foo").collection("bookmarks");
     let serverModified = Date.now() / 1000 + 120;
     await resolveConflict(engine, collection, serverModified, guids => [{
       guid: guids.tb,
       index: 0,
--- a/services/sync/tests/unit/test_bookmark_record.js
+++ b/services/sync/tests/unit/test_bookmark_record.js
@@ -13,34 +13,34 @@ function prepareBookmarkItem(collection,
   let b = new Bookmark(collection, id);
   b.cleartext.stuff = "my payload here";
   return b;
 }
 
 add_task(async function test_bookmark_record() {
   await configureIdentity();
 
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
   let keyBundle = Service.identity.syncKeyBundle;
 
   let log = Log.repository.getLogger("Test");
   Log.repository.rootLogger.addAppender(new Log.DumpAppender());
 
   log.info("Creating a record");
 
   let placesItem = new PlacesItem("bookmarks", "foo", "bookmark");
   let bookmarkItem = prepareBookmarkItem("bookmarks", "foo");
 
   log.info("Checking getTypeObject");
   do_check_eq(placesItem.getTypeObject(placesItem.type), Bookmark);
   do_check_eq(bookmarkItem.getTypeObject(bookmarkItem.type), Bookmark);
 
-  bookmarkItem.encrypt(keyBundle);
+  await bookmarkItem.encrypt(keyBundle);
   log.info("Ciphertext is " + bookmarkItem.ciphertext);
   do_check_true(bookmarkItem.ciphertext != null);
 
   log.info("Decrypting the record");
 
-  let payload = bookmarkItem.decrypt(keyBundle);
+  let payload = await bookmarkItem.decrypt(keyBundle);
   do_check_eq(payload.stuff, "my payload here");
   do_check_eq(bookmarkItem.getTypeObject(bookmarkItem.type), Bookmark);
   do_check_neq(payload, bookmarkItem.payload); // wrap.data.payload is the encrypted one
 });
--- a/services/sync/tests/unit/test_bookmark_repair.js
+++ b/services/sync/tests/unit/test_bookmark_repair.js
@@ -35,17 +35,17 @@ const BOOKMARK_REPAIR_STATE_PREFS = [
 let clientsEngine;
 let bookmarksEngine;
 var recordedEvents = [];
 
 add_task(async function setup() {
   clientsEngine = Service.clientsEngine;
   bookmarksEngine = Service.engineManager.get("bookmarks");
 
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
     recordedEvents.push({ object, method, value, extra });
   };
 });
 
 function checkRecordedEvents(expected, message) {
   deepEqual(recordedEvents, expected, message);
@@ -88,17 +88,17 @@ async function cleanup(server) {
   await promiseStopServer(server);
 }
 
 add_task(async function test_bookmark_repair_integration() {
   enableValidationPrefs();
 
   _("Ensure that a validation error triggers a repair request.");
 
-  let server = serverForFoo(bookmarksEngine);
+  let server = await serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
 
   let user = server.user("foo");
 
   let initialID = Service.clientsEngine.localID;
   let remoteID = Utils.makeGUID();
   try {
 
@@ -311,17 +311,17 @@ add_task(async function test_bookmark_re
   }
 });
 
 add_task(async function test_repair_client_missing() {
   enableValidationPrefs();
 
   _("Ensure that a record missing from the client only will get re-downloaded from the server");
 
-  let server = serverForFoo(bookmarksEngine);
+  let server = await serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
     await Service.sync();
 
@@ -380,17 +380,17 @@ add_task(async function test_repair_clie
   }
 });
 
 add_task(async function test_repair_server_missing() {
   enableValidationPrefs();
 
   _("Ensure that a record missing from the server only will get re-upload from the client");
 
-  let server = serverForFoo(bookmarksEngine);
+  let server = await serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
 
   let user = server.user("foo");
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
@@ -444,17 +444,17 @@ add_task(async function test_repair_serv
   }
 });
 
 add_task(async function test_repair_server_deleted() {
   enableValidationPrefs();
 
   _("Ensure that a record marked as deleted on the server but present on the client will get deleted on the client");
 
-  let server = serverForFoo(bookmarksEngine);
+  let server = await serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
     await Service.sync();
 
--- a/services/sync/tests/unit/test_bookmark_repair_responder.js
+++ b/services/sync/tests/unit/test_bookmark_repair_responder.js
@@ -27,17 +27,17 @@ function checkRecordedEvents(expected) {
   recordedEvents = [];
 }
 
 function getServerBookmarks(server) {
   return server.user("foo").collection("bookmarks");
 }
 
 async function makeServer() {
-  let server = serverForFoo(bookmarksEngine);
+  let server = await serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
   return server;
 }
 
 async function cleanup(server) {
   await promiseStopServer(server);
   await PlacesSyncUtils.bookmarks.wipe();
   // clear keys so when each test finds a different server it accepts its keys.
--- a/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
+++ b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
@@ -36,25 +36,32 @@ function clearBookmarks() {
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.toolbarFolder);
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.unfiledBookmarksFolder);
 }
 
 let engine;
 let store;
 
 add_task(async function setup() {
+  initTestLogging("Trace");
+  Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
+
+  await generateNewKeys(Service.collectionKeys);
+})
+
+add_task(async function setup() {
   await Service.engineManager.register(BookmarksEngine);
   engine = Service.engineManager.get("bookmarks");
   store = engine._store;
 });
 
 // Verify that Places smart bookmarks have their annotation uploaded and
 // handled locally.
 add_task(async function test_annotation_uploaded() {
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let startCount = smartBookmarkCount();
 
   _("Start count is " + startCount);
 
   if (startCount > 0) {
     // This can happen in XULRunner.
@@ -159,17 +166,17 @@ add_task(async function test_annotation_
     await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_smart_bookmarks_duped() {
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let parent = PlacesUtils.toolbarFolderId;
   let uri =
     CommonUtils.makeURI("place:sort=" +
                   Ci.nsINavHistoryQueryOptions.SORT_BY_VISITCOUNT_DESCENDING +
                   "&maxResults=10");
   let title = "Most Visited";
@@ -208,17 +215,8 @@ add_task(async function test_smart_bookm
   } finally {
     // Clean up.
     await store.wipe();
     await promiseStopServer(server);
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
   }
 });
-
-function run_test() {
-  initTestLogging("Trace");
-  Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
-
-  generateNewKeys(Service.collectionKeys);
-
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -13,27 +13,27 @@ const MORE_THAN_CLIENTS_TTL_REFRESH = 69
 const LESS_THAN_CLIENTS_TTL_REFRESH = 86400;  // 1 day
 
 let engine;
 
 /**
  * Unpack the record with this ID, and verify that it has the same version that
  * we should be putting into records.
  */
-function check_record_version(user, id) {
+async function check_record_version(user, id) {
     let payload = JSON.parse(user.collection("clients").wbo(id).payload);
 
     let rec = new CryptoWrapper();
     rec.id = id;
     rec.collection = "clients";
     rec.ciphertext = payload.ciphertext;
     rec.hmac = payload.hmac;
     rec.IV = payload.IV;
 
-    let cleartext = rec.decrypt(Service.collectionKeys.keyForCollection("clients"));
+    let cleartext = await rec.decrypt(Service.collectionKeys.keyForCollection("clients"));
 
     _("Payload is " + JSON.stringify(cleartext));
     equal(Services.appinfo.version, cleartext.version);
     equal(1, cleartext.protocols.length);
     equal("1.5", cleartext.protocols[0]);
 }
 
 // compare 2 different command arrays, taking into account that a flowID
@@ -73,17 +73,17 @@ add_task(async function test_bad_hmac() 
     __proto__: SyncServerCallback,
     onItemDeleted(username, coll, wboID) {
       deletedItems.push(coll + "/" + wboID);
     },
     onCollectionDeleted(username, coll) {
       deletedCollections.push(coll);
     }
   }
-  let server = serverForFoo(engine, callback);
+  let server = await serverForFoo(engine, callback);
   let user   = server.user("foo");
 
   function check_clients_count(expectedCount) {
     let stack = Components.stack.caller;
     let coll  = user.collection("clients");
 
     // Treat a non-existent collection as empty.
     equal(expectedCount, coll ? coll.count() : 0, stack);
@@ -91,64 +91,64 @@ add_task(async function test_bad_hmac() 
 
   function check_client_deleted(id) {
     let coll = user.collection("clients");
     let wbo  = coll.wbo(id);
     return !wbo || !wbo.payload;
   }
 
   async function uploadNewKeys() {
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
-    serverKeys.encrypt(Service.identity.syncKeyBundle);
+    await serverKeys.encrypt(Service.identity.syncKeyBundle);
     ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
   }
 
   try {
     await configureIdentity({username: "foo"}, server);
     await Service.login();
 
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
 
     _("First sync, client record is uploaded");
     equal(engine.lastRecordUpload, 0);
     check_clients_count(0);
     await syncClientsEngine(server);
     check_clients_count(1);
     ok(engine.lastRecordUpload > 0);
 
     // Our uploaded record has a version.
-    check_record_version(user, engine.localID);
+    await check_record_version(user, engine.localID);
 
     // Initial setup can wipe the server, so clean up.
     deletedCollections = [];
     deletedItems       = [];
 
     _("Change our keys and our client ID, reupload keys.");
     let oldLocalID  = engine.localID;     // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
     await engine.resetClient();
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
-    serverKeys.encrypt(Service.identity.syncKeyBundle);
+    await serverKeys.encrypt(Service.identity.syncKeyBundle);
     ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     _("Sync.");
     await syncClientsEngine(server);
 
     _("Old record " + oldLocalID + " was deleted, new one uploaded.");
     check_clients_count(1);
     check_client_deleted(oldLocalID);
 
     _("Now change our keys but don't upload them. " +
       "That means we get an HMAC error but redownload keys.");
     Service.lastHMACEvent = 0;
     engine.localID = Utils.makeGUID();
     await engine.resetClient();
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
     deletedCollections = [];
     deletedItems       = [];
     check_clients_count(1);
     await syncClientsEngine(server);
 
     _("Old record was not deleted, new one uploaded.");
     equal(deletedCollections.length, 0);
     equal(deletedItems.length, 0);
@@ -174,17 +174,17 @@ add_task(async function test_bad_hmac() 
     await uploadNewKeys();
 
     // Create a new client record and new keys. Now our keys are wrong, as well
     // as the object on the server. We'll download the new keys and also delete
     // the bad client record.
     oldLocalID  = engine.localID;         // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
     await engine.resetClient();
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
     let oldKey = Service.collectionKeys.keyForCollection();
 
     equal(deletedCollections.length, 0);
     equal(deletedItems.length, 0);
     await syncClientsEngine(server);
     equal(deletedItems.length, 1);
     check_client_deleted(oldLocalID);
     check_clients_count(1);
@@ -210,21 +210,21 @@ add_task(async function test_properties(
     await cleanup();
   }
 });
 
 add_task(async function test_full_sync() {
   _("Ensure that Clients engine fetches all records for each sync.");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   let user   = server.user("foo");
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let activeID = Utils.makeGUID();
   server.insertWBO("foo", "clients", new ServerWBO(activeID, encryptPayload({
     id: activeID,
     name: "Active client",
     type: "desktop",
     commands: [],
     version: "48",
@@ -276,21 +276,21 @@ add_task(async function test_full_sync()
       await promiseStopServer(server);
     }
   }
 });
 
 add_task(async function test_sync() {
   _("Ensure that Clients engine uploads a new client record once a week.");
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   let user   = server.user("foo");
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   function clientWBO() {
     return user.collection("clients").wbo(engine.localID);
   }
 
   try {
 
     _("First sync. Client record is uploaded.");
@@ -356,21 +356,21 @@ add_task(async function test_client_name
 
   await cleanup();
 });
 
 add_task(async function test_last_modified() {
   _("Ensure that remote records have a sane serverLastModified attribute.");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   let user   = server.user("foo");
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let activeID = Utils.makeGUID();
   server.insertWBO("foo", "clients", new ServerWBO(activeID, encryptPayload({
     id: activeID,
     name: "Active client",
     type: "desktop",
     commands: [],
     version: "48",
@@ -588,21 +588,21 @@ add_task(async function test_process_inc
 
   await cleanup();
 });
 
 add_task(async function test_filter_duplicate_names() {
   _("Ensure that we exclude clients with identical names that haven't synced in a week.");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   let user   = server.user("foo");
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   // Synced recently.
   let recentID = Utils.makeGUID();
   server.insertWBO("foo", "clients", new ServerWBO(recentID, encryptPayload({
     id: recentID,
     name: "My Phone",
     type: "mobile",
     commands: [],
@@ -739,19 +739,19 @@ add_task(async function test_filter_dupl
     }
   }
 });
 
 add_task(async function test_command_sync() {
   _("Ensure that commands are synced across clients.");
 
   await engine._store.wipe();
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
-  let server   = serverForFoo(engine);
+  let server   = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let user     = server.user("foo");
   let remoteId = Utils.makeGUID();
 
   function clientWBO(id) {
     return user.collection("clients").wbo(id);
   }
@@ -810,19 +810,19 @@ add_task(async function test_command_syn
     }
   }
 });
 
 add_task(async function test_clients_not_in_fxa_list() {
   _("Ensure that clients not in the FxA devices list are marked as stale.");
 
   await engine._store.wipe();
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
-  let server   = serverForFoo(engine);
+  let server   = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let remoteId = Utils.makeGUID();
   let remoteId2 = Utils.makeGUID();
 
   _("Create remote client records");
   server.insertWBO("foo", "clients", new ServerWBO(remoteId, encryptPayload({
     id: remoteId,
@@ -988,20 +988,20 @@ add_task(async function test_optional_cl
 
   await cleanup();
 });
 
 add_task(async function test_merge_commands() {
   _("Verifies local commands for remote clients are merged with the server's");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let desktopID = Utils.makeGUID();
   server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
     id: desktopID,
     name: "Desktop client",
     type: "desktop",
     commands: [{
       command: "displayURI",
@@ -1058,20 +1058,20 @@ add_task(async function test_merge_comma
     }
   }
 });
 
 add_task(async function test_duplicate_remote_commands() {
   _("Verifies local commands for remote clients are sent only once (bug 1289287)");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let desktopID = Utils.makeGUID();
   server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
     id: desktopID,
     name: "Desktop client",
     type: "desktop",
     commands: [],
     version: "48",
@@ -1117,20 +1117,20 @@ add_task(async function test_duplicate_r
     }
   }
 });
 
 add_task(async function test_upload_after_reboot() {
   _("Multiple downloads, reboot, then upload (bug 1289287)");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let deviceBID = Utils.makeGUID();
   let deviceCID = Utils.makeGUID();
   server.insertWBO("foo", "clients", new ServerWBO(deviceBID, encryptPayload({
     id: deviceBID,
     name: "Device B",
     type: "desktop",
     commands: [{
@@ -1200,20 +1200,20 @@ add_task(async function test_upload_afte
     }
   }
 });
 
 add_task(async function test_keep_cleared_commands_after_reboot() {
   _("Download commands, fail upload, reboot, then apply new commands (bug 1289287)");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let deviceBID = Utils.makeGUID();
   let deviceCID = Utils.makeGUID();
   server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload({
     id: engine.localID,
     name: "Device A",
     type: "desktop",
     commands: [{
@@ -1320,20 +1320,20 @@ add_task(async function test_keep_cleare
     }
   }
 });
 
 add_task(async function test_deleted_commands() {
   _("Verifies commands for a deleted client are discarded");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let activeID = Utils.makeGUID();
   server.insertWBO("foo", "clients", new ServerWBO(activeID, encryptPayload({
     id: activeID,
     name: "Active client",
     type: "desktop",
     commands: [],
     version: "48",
@@ -1378,20 +1378,20 @@ add_task(async function test_deleted_com
     }
   }
 });
 
 add_task(async function test_send_uri_ack() {
   _("Ensure a sent URI is deleted when the client syncs");
 
   let now = Date.now() / 1000;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   try {
     let fakeSenderID = Utils.makeGUID();
 
     _("Initial sync for empty clients collection");
     await syncClientsEngine(server);
     let collection = server.getCollection("foo", "clients");
     let ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
@@ -1433,19 +1433,19 @@ add_task(async function test_send_uri_ac
     }
   }
 });
 
 add_task(async function test_command_sync() {
   _("Notify other clients when writing their record.");
 
   await engine._store.wipe();
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
-  let server    = serverForFoo(engine);
+  let server    = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.getCollection("foo", "clients");
   let remoteId   = Utils.makeGUID();
   let remoteId2  = Utils.makeGUID();
 
   _("Create remote client record 1");
   server.insertWBO("foo", "clients", new ServerWBO(remoteId, encryptPayload({
@@ -1498,17 +1498,17 @@ add_task(async function test_command_syn
 
 add_task(async function ensureSameFlowIDs() {
   let events = []
   let origRecordTelemetryEvent = Service.recordTelemetryEvent;
   Service.recordTelemetryEvent = (object, method, value, extra) => {
     events.push({ object, method, value, extra });
   }
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   try {
     // Setup 2 clients, send them a command, and ensure we get to events
     // written, both with the same flowID.
     await SyncTestingInfrastructure(server);
 
     let remoteId   = Utils.makeGUID();
     let remoteId2  = Utils.makeGUID();
 
@@ -1592,17 +1592,17 @@ add_task(async function ensureSameFlowID
 
 add_task(async function test_duplicate_commands_telemetry() {
   let events = []
   let origRecordTelemetryEvent = Service.recordTelemetryEvent;
   Service.recordTelemetryEvent = (object, method, value, extra) => {
     events.push({ object, method, value, extra });
   }
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   try {
     await SyncTestingInfrastructure(server);
 
     let remoteId   = Utils.makeGUID();
     let remoteId2  = Utils.makeGUID();
 
     _("Create remote client record 1");
     server.insertWBO("foo", "clients", new ServerWBO(remoteId, encryptPayload({
@@ -1643,19 +1643,19 @@ add_task(async function test_duplicate_c
   }
 });
 
 add_task(async function test_other_clients_notified_on_first_sync() {
   _("Ensure that other clients are notified when we upload our client record for the first time.");
 
   engine.resetLastSync();
   engine._store.wipe();
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   const fxAccounts = engine.fxAccounts;
   let calls = 0;
   engine.fxAccounts = {
     getDeviceId() { return fxAccounts.getDeviceId(); },
     notifyDevices() {
       calls++;
--- a/services/sync/tests/unit/test_clients_escape.js
+++ b/services/sync/tests/unit/test_clients_escape.js
@@ -21,34 +21,34 @@ add_task(async function test_clients_esc
     engine.localName = "wéävê";
 
     _("Make sure we have the expected record");
     let record = await engine._createRecord("ascii");
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
 
     _("Encrypting record...");
-    record.encrypt(keyBundle);
+    await record.encrypt(keyBundle);
     _("Encrypted.");
 
     let serialized = JSON.stringify(record);
     let checkCount = 0;
     _("Checking for all ASCII:", serialized);
     Array.forEach(serialized, function(ch) {
       let code = ch.charCodeAt(0);
       _("Checking asciiness of '", ch, "'=", code);
       do_check_true(code < 128);
       checkCount++;
     });
 
     _("Processed", checkCount, "characters out of", serialized.length);
     do_check_eq(checkCount, serialized.length);
 
     _("Making sure the record still looks like it did before");
-    record.decrypt(keyBundle);
+    await record.decrypt(keyBundle);
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
 
     _("Sanity check that creating the record also gives the same");
     record = await engine._createRecord("ascii");
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
   } finally {
--- a/services/sync/tests/unit/test_corrupt_keys.js
+++ b/services/sync/tests/unit/test_corrupt_keys.js
@@ -41,35 +41,37 @@ add_task(async function test_locally_cha
 
     await configureIdentity({ username: "johndoe" }, server);
     // We aren't doing a .login yet, so fudge the cluster URL.
     Service.clusterURL = Service.identity._token.endpoint;
 
     await Service.engineManager.register(HistoryEngine);
     Service.engineManager.unregister("addons");
 
-    function corrupt_local_keys() {
-      Service.collectionKeys._default.keyPair = [Weave.Crypto.generateRandomKey(),
-                                                 Weave.Crypto.generateRandomKey()];
+    async function corrupt_local_keys() {
+      Service.collectionKeys._default.keyPair = [
+        await Weave.Crypto.generateRandomKey(),
+        await Weave.Crypto.generateRandomKey()
+      ];
     }
 
     _("Setting meta.");
 
     // Bump version on the server.
     let m = new WBORecord("meta", "global");
     m.payload = {"syncID": "foooooooooooooooooooooooooo",
                  "storageVersion": STORAGE_VERSION};
     await m.upload(Service.resource(Service.metaURL));
 
     _("New meta/global: " + JSON.stringify(johndoe.collection("meta").wbo("global")));
 
     // Upload keys.
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
-    serverKeys.encrypt(Service.identity.syncKeyBundle);
+    await serverKeys.encrypt(Service.identity.syncKeyBundle);
     do_check_true((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     // Check that login works.
     do_check_true((await Service.login()));
     do_check_true(Service.isLoggedIn);
 
     // Sync should upload records.
     await sync_and_validate_telem();
@@ -90,38 +92,38 @@ add_task(async function test_locally_cha
       let w = new CryptoWrapper("history", "id");
       w.cleartext = {
         id,
         histUri: "http://foo/bar?" + id,
         title: id,
         sortindex: i,
         visits: [{date: (modified - 5) * 1000000, type: visitType}],
         deleted: false};
-      w.encrypt(liveKeys);
+      await w.encrypt(liveKeys);
 
       let payload = {ciphertext: w.ciphertext,
                      IV:         w.IV,
                      hmac:       w.hmac};
       history.insert(id, payload, modified);
     }
 
     history.timestamp = Date.now() / 1000;
     let old_key_time = johndoe.modified("crypto");
     _("Old key time: " + old_key_time);
 
     // Check that we can decrypt one.
     let rec = new CryptoWrapper("history", "record-no--0");
     await rec.fetch(Service.resource(Service.storageURL + "history/record-no--0"));
     _(JSON.stringify(rec));
-    do_check_true(!!rec.decrypt(liveKeys));
+    do_check_true(!!await rec.decrypt(liveKeys));
 
     do_check_eq(hmacErrorCount, 0);
 
     // Fill local key cache with bad data.
-    corrupt_local_keys();
+    await corrupt_local_keys();
     _("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
 
     do_check_eq(hmacErrorCount, 0);
 
     _("HMAC error count: " + hmacErrorCount);
     // Now syncing should succeed, after one HMAC error.
     let ping = await wait_for_ping(() => Service.sync(), true);
     equal(ping.engines.find(e => e.name == "history").incoming.applied, 5);
@@ -146,17 +148,17 @@ add_task(async function test_locally_cha
       let w = new CryptoWrapper("history", "id");
       w.cleartext = {
         id,
         histUri: "http://foo/bar?" + id,
         title: id,
         sortindex: i,
         visits: [{date: (modified - 5 ) * 1000000, type: visitType}],
         deleted: false};
-      w.encrypt(Service.collectionKeys.keyForCollection("history"));
+      await w.encrypt(Service.collectionKeys.keyForCollection("history"));
       w.hmac = w.hmac.toUpperCase();
 
       let payload = {ciphertext: w.ciphertext,
                      IV:         w.IV,
                      hmac:       w.hmac};
       history.insert(id, payload, modified);
     }
     history.timestamp = Date.now() / 1000;
--- a/services/sync/tests/unit/test_engine_abort.js
+++ b/services/sync/tests/unit/test_engine_abort.js
@@ -17,17 +17,17 @@ add_task(async function test_processInco
   let payload = encryptPayload({id, denomination: "Record No. " + id});
   collection.insert(id, payload);
 
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler()
   });
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   _("Create some server data.");
   let meta_global = Service.recordManager.set(engine.metaURL,
                                               new WBORecord(engine.metaURL));
   meta_global.payload.engines = {rotary: {version: engine.version,
                                           syncID: engine.syncID}};
   _("Fake applyIncoming to abort.");
   engine._store.applyIncoming = async function(record) {
--- a/services/sync/tests/unit/test_engine_changes_during_sync.js
+++ b/services/sync/tests/unit/test_engine_changes_during_sync.js
@@ -35,17 +35,17 @@ async function cleanup(engine, server) {
 }
 
 add_task(async function test_history_change_during_sync() {
   _("Ensure that we don't bump the score when applying history records.");
 
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("history");
-  let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
+  let server = await serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("history");
 
   // Override `uploadOutgoing` to insert a record while we're applying
   // changes. The tracker should ignore this change.
   let uploadOutgoing = engine._uploadOutgoing;
   engine._uploadOutgoing = async function() {
     engine._uploadOutgoing = uploadOutgoing;
@@ -89,17 +89,17 @@ add_task(async function test_history_cha
 });
 
 add_task(async function test_passwords_change_during_sync() {
   _("Ensure that we don't bump the score when applying passwords.");
 
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("passwords");
-  let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
+  let server = await serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("passwords");
 
   let uploadOutgoing = engine._uploadOutgoing;
   engine._uploadOutgoing = async function() {
     engine._uploadOutgoing = uploadOutgoing;
     try {
       await uploadOutgoing.call(this);
@@ -146,17 +146,17 @@ add_task(async function test_passwords_c
 add_task(async function test_prefs_change_during_sync() {
   _("Ensure that we don't bump the score when applying prefs.");
 
   const TEST_PREF = "services.sync.prefs.sync.test.duringSync";
 
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("prefs");
-  let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
+  let server = await serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("prefs");
 
   let uploadOutgoing = engine._uploadOutgoing;
   engine._uploadOutgoing = async function() {
     engine._uploadOutgoing = uploadOutgoing;
     try {
       await uploadOutgoing.call(this);
@@ -205,17 +205,17 @@ add_task(async function test_prefs_chang
 });
 
 add_task(async function test_forms_change_during_sync() {
   _("Ensure that we don't bump the score when applying form records.");
 
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("forms");
-  let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
+  let server = await serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("forms");
 
   let uploadOutgoing = engine._uploadOutgoing;
   engine._uploadOutgoing = async function() {
     engine._uploadOutgoing = uploadOutgoing;
     try {
       await uploadOutgoing.call(this);
@@ -277,17 +277,17 @@ add_task(async function test_bookmark_ch
 
   await PlacesTestUtils.setBookmarkSyncFields({
     guid: bzBmk.guid,
     syncChangeCounter: 0,
     syncStatus: PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
   });
 
   let engine = Service.engineManager.get("bookmarks");
-  let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
+  let server = await serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("bookmarks");
 
   let bmk3; // New child of Folder 1, created locally during sync.
 
   let uploadOutgoing = engine._uploadOutgoing;
   engine._uploadOutgoing = async function() {
     engine._uploadOutgoing = uploadOutgoing;
--- a/services/sync/tests/unit/test_errorhandler_2.js
+++ b/services/sync/tests/unit/test_errorhandler_2.js
@@ -331,17 +331,17 @@ add_task(async function test_wipeRemote_
   enableValidationPrefs();
 
   // Test that we report prolonged server maintenance errors that occur whilst
   // wiping all remote devices.
   let server = await EHTestsCommon.sync_httpd_setup();
 
   server.registerPathHandler("/1.1/broken.wipe/storage/catapult", EHTestsCommon.service_unavailable);
   await configureIdentity({username: "broken.wipe"}, server);
-  EHTestsCommon.generateAndUploadKeys();
+  await EHTestsCommon.generateAndUploadKeys();
 
   engine.exception = null;
   engine.enabled = true;
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -559,17 +559,17 @@ add_task(async function test_wipeServer_
 add_task(async function test_wipeRemote_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test that we report prolonged server maintenance errors that occur whilst
   // wiping all remote devices.
   let server = await EHTestsCommon.sync_httpd_setup();
 
   await configureIdentity({username: "broken.wipe"}, server);
-  EHTestsCommon.generateAndUploadKeys();
+  await EHTestsCommon.generateAndUploadKeys();
 
   engine.exception = null;
   engine.enabled = true;
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
--- a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
+++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
@@ -53,19 +53,19 @@ async function sync_httpd_setup() {
 }
 
 async function setUp(server) {
   await configureIdentity({username: "johndoe"}, server);
   new FakeCryptoService();
 }
 
 async function generateAndUploadKeys(server) {
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
-  serverKeys.encrypt(Service.identity.syncKeyBundle);
+  await serverKeys.encrypt(Service.identity.syncKeyBundle);
   let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
   return (await serverKeys.upload(res)).success;
 }
 
 add_task(async function run_test() {
   validate_all_future_pings();
   await engineManager.register(CatapultEngine);
 });
--- a/services/sync/tests/unit/test_interval_triggers.js
+++ b/services/sync/tests/unit/test_interval_triggers.js
@@ -33,19 +33,19 @@ function sync_httpd_setup() {
     "/1.1/johndoe/storage/crypto/keys":
       upd("crypto", (new ServerWBO("keys")).handler()),
     "/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler())
   });
 }
 
 async function setUp(server) {
   await configureIdentity({username: "johndoe"}, server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
-  serverKeys.encrypt(Service.identity.syncKeyBundle);
+  await serverKeys.encrypt(Service.identity.syncKeyBundle);
   serverKeys.upload(Service.resource(Service.cryptoKeysURL));
 }
 
 add_task(async function setup() {
   initTestLogging("Trace");
 
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
--- a/services/sync/tests/unit/test_keys.js
+++ b/services/sync/tests/unit/test_keys.js
@@ -18,30 +18,31 @@ function sha256HMAC(message, key) {
 
 function do_check_keypair_eq(a, b) {
   do_check_eq(2, a.length);
   do_check_eq(2, b.length);
   do_check_eq(a[0], b[0]);
   do_check_eq(a[1], b[1]);
 }
 
-function test_time_keyFromString(iterations) {
+add_task(async function test_time_keyFromString() {
+  const iterations = 1000;
   let o;
   let b = new BulkKeyBundle("dummy");
   let d = Utils.decodeKeyBase32("ababcdefabcdefabcdefabcdef");
-  b.generateRandom();
+  await b.generateRandom();
 
   _("Running " + iterations + " iterations of hmacKeyObject + sha256HMAC.");
   for (let i = 0; i < iterations; ++i) {
     let k = b.hmacKeyObject;
     o = sha256HMAC(d, k);
   }
   do_check_true(!!o);
   _("Done.");
-}
+})
 
 add_test(function test_set_invalid_values() {
   _("Ensure that setting invalid encryption and HMAC key values is caught.");
 
   let bundle = new BulkKeyBundle("foo");
 
   let thrown = false;
   try {
@@ -131,42 +132,42 @@ add_task(async function test_ensureLogge
   /*
    * Build a test version of storage/crypto/keys.
    * Encrypt it with the sync key.
    * Pass it into the CollectionKeyManager.
    */
 
   log.info("Building storage keys...");
   let storage_keys = new CryptoWrapper("crypto", "keys");
-  let default_key64 = Weave.Crypto.generateRandomKey();
-  let default_hmac64 = Weave.Crypto.generateRandomKey();
-  let bookmarks_key64 = Weave.Crypto.generateRandomKey();
-  let bookmarks_hmac64 = Weave.Crypto.generateRandomKey();
+  let default_key64 = await Weave.Crypto.generateRandomKey();
+  let default_hmac64 = await Weave.Crypto.generateRandomKey();
+  let bookmarks_key64 = await Weave.Crypto.generateRandomKey();
+  let bookmarks_hmac64 = await Weave.Crypto.generateRandomKey();
 
   storage_keys.cleartext = {
     "default": [default_key64, default_hmac64],
     "collections": {"bookmarks": [bookmarks_key64, bookmarks_hmac64]},
   };
   storage_keys.modified = Date.now() / 1000;
   storage_keys.id = "keys";
 
   log.info("Encrypting storage keys...");
 
   // Use passphrase (sync key) itself to encrypt the key bundle.
-  storage_keys.encrypt(keyBundle);
+  await storage_keys.encrypt(keyBundle);
 
   // Sanity checking.
   do_check_true(null == storage_keys.cleartext);
   do_check_true(null != storage_keys.ciphertext);
 
   log.info("Updating collection keys.");
 
   // updateContents decrypts the object, releasing the payload for us to use.
   // Returns true, because the default key has changed.
-  do_check_true(collectionKeys.updateContents(keyBundle, storage_keys));
+  do_check_true(await collectionKeys.updateContents(keyBundle, storage_keys));
   let payload = storage_keys.cleartext;
 
   _("CK: " + JSON.stringify(collectionKeys._collections));
 
   // Test that the CollectionKeyManager returns a similar WBO.
   let wbo = collectionKeys.asWBO("crypto", "keys");
 
   _("WBO: " + JSON.stringify(wbo));
@@ -180,18 +181,18 @@ add_task(async function test_ensureLogge
   do_check_true(!!wbo.cleartext.default);
   do_check_keypair_eq(payload.default, wbo.cleartext.default);
   do_check_keypair_eq(payload.collections.bookmarks, wbo.cleartext.collections.bookmarks);
 
   do_check_true("bookmarks" in collectionKeys._collections);
   do_check_false("tabs" in collectionKeys._collections);
 
   _("Updating contents twice with the same data doesn't proceed.");
-  storage_keys.encrypt(keyBundle);
-  do_check_false(collectionKeys.updateContents(keyBundle, storage_keys));
+  await storage_keys.encrypt(keyBundle);
+  do_check_false(await collectionKeys.updateContents(keyBundle, storage_keys));
 
   /*
    * Test that we get the right keys out when we ask for
    * a collection's tokens.
    */
   let b1 = new BulkKeyBundle("bookmarks");
   b1.keyPairB64 = [bookmarks_key64, bookmarks_hmac64];
   let b2 = collectionKeys.keyForCollection("bookmarks");
@@ -221,26 +222,26 @@ add_task(async function test_ensureLogge
   do_check_true(collectionKeys.updateNeeded(info_collections));
 
   collectionKeys.lastModified = null;
   do_check_true(collectionKeys.updateNeeded({}));
 
   /*
    * Check _compareKeyBundleCollections.
    */
-  function newBundle(name) {
+  async function newBundle(name) {
     let r = new BulkKeyBundle(name);
-    r.generateRandom();
+    await r.generateRandom();
     return r;
   }
-  let k1 = newBundle("k1");
-  let k2 = newBundle("k2");
-  let k3 = newBundle("k3");
-  let k4 = newBundle("k4");
-  let k5 = newBundle("k5");
+  let k1 = await newBundle("k1");
+  let k2 = await newBundle("k2");
+  let k3 = await newBundle("k3");
+  let k4 = await newBundle("k4");
+  let k5 = await newBundle("k5");
   let coll1 = {"foo": k1, "bar": k2};
   let coll2 = {"foo": k1, "bar": k2};
   let coll3 = {"foo": k1, "bar": k3};
   let coll4 = {"foo": k4};
   let coll5 = {"baz": k5, "bar": k2};
   let coll6 = {};
 
   let d1 = collectionKeys._compareKeyBundleCollections(coll1, coll2); // []
@@ -263,15 +264,8 @@ add_task(async function test_ensureLogge
 
   do_check_array_eq(d1.changed, []);
   do_check_array_eq(d2.changed, ["bar"]);
   do_check_array_eq(d3.changed, ["bar"]);
   do_check_array_eq(d4.changed, ["bar", "foo"]);
   do_check_array_eq(d5.changed, ["baz", "foo"]);
   do_check_array_eq(d6.changed, ["bar", "foo"]);
 });
-
-function run_test() {
-  // Only do 1,000 to avoid a 5-second pause in test runs.
-  test_time_keyFromString(1000);
-
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_password_engine.js
+++ b/services/sync/tests/unit/test_password_engine.js
@@ -22,17 +22,17 @@ async function cleanup(engine, server) {
   await promiseStopServer(server);
 }
 
 add_task(async function test_ignored_fields() {
   _("Only changes to syncable fields should be tracked");
 
   let engine = Service.engineManager.get("passwords");
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   enableValidationPrefs();
 
   let login = Services.logins.addLogin(new LoginInfo("https://example.com", "",
     null, "username", "password", "", ""));
   login.QueryInterface(Ci.nsILoginMetaInfo); // For `guid`.
 
@@ -59,17 +59,17 @@ add_task(async function test_ignored_fie
   }
 });
 
 add_task(async function test_ignored_sync_credentials() {
   _("Sync credentials in login manager should be ignored");
 
   let engine = Service.engineManager.get("passwords");
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   enableValidationPrefs();
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     let login = Services.logins.addLogin(new LoginInfo(FXA_PWDMGR_HOST, null,
@@ -89,17 +89,17 @@ add_task(async function test_ignored_syn
   }
 });
 
 add_task(async function test_password_engine() {
   _("Basic password sync test");
 
   let engine = Service.engineManager.get("passwords");
 
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("passwords");
 
   enableValidationPrefs();
 
   _("Add new login to upload during first sync");
   let newLogin;
   {
--- a/services/sync/tests/unit/test_records_crypto.js
+++ b/services/sync/tests/unit/test_records_crypto.js
@@ -44,123 +44,123 @@ add_task(async function test_records_cry
     log.info("Creating a record");
 
     cryptoWrap = prepareCryptoWrap("steam", "resource");
 
     log.info("cryptoWrap: " + cryptoWrap.toString());
 
     log.info("Encrypting a record");
 
-    cryptoWrap.encrypt(keyBundle);
+    await cryptoWrap.encrypt(keyBundle);
     log.info("Ciphertext is " + cryptoWrap.ciphertext);
     do_check_true(cryptoWrap.ciphertext != null);
 
     let firstIV = cryptoWrap.IV;
 
     log.info("Decrypting the record");
 
-    let payload = cryptoWrap.decrypt(keyBundle);
+    let payload = await cryptoWrap.decrypt(keyBundle);
     do_check_eq(payload.stuff, "my payload here");
     do_check_neq(payload, cryptoWrap.payload); // wrap.data.payload is the encrypted one
 
     log.info("Make sure multiple decrypts cause failures");
     let error = "";
     try {
-      payload = cryptoWrap.decrypt(keyBundle);
+      payload = await cryptoWrap.decrypt(keyBundle);
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.message, "No ciphertext: nothing to decrypt?");
 
     log.info("Re-encrypting the record with alternate payload");
 
     cryptoWrap.cleartext.stuff = "another payload";
-    cryptoWrap.encrypt(keyBundle);
+    await cryptoWrap.encrypt(keyBundle);
     let secondIV = cryptoWrap.IV;
-    payload = cryptoWrap.decrypt(keyBundle);
+    payload = await cryptoWrap.decrypt(keyBundle);
     do_check_eq(payload.stuff, "another payload");
 
     log.info("Make sure multiple encrypts use different IVs");
     do_check_neq(firstIV, secondIV);
 
-    log.info("Make sure differing ids cause failures");
-    cryptoWrap.encrypt(keyBundle);
+    log.info(await "Make sure differing ids cause failures");
+    await cryptoWrap.encrypt(keyBundle);
     cryptoWrap.data.id = "other";
     error = "";
     try {
-      cryptoWrap.decrypt(keyBundle);
+      await cryptoWrap.decrypt(keyBundle);
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.message, "Record id mismatch: resource != other");
 
     log.info("Make sure wrong hmacs cause failures");
-    cryptoWrap.encrypt(keyBundle);
+    await cryptoWrap.encrypt(keyBundle);
     cryptoWrap.hmac = "foo";
     error = "";
     try {
-      cryptoWrap.decrypt(keyBundle);
+      await cryptoWrap.decrypt(keyBundle);
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.message.substr(0, 42), "Record SHA256 HMAC mismatch: should be foo");
 
     // Checking per-collection keys and default key handling.
 
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
     let bookmarkItem = prepareCryptoWrap("bookmarks", "foo");
-    bookmarkItem.encrypt(Service.collectionKeys.keyForCollection("bookmarks"));
+    await bookmarkItem.encrypt(Service.collectionKeys.keyForCollection("bookmarks"));
     log.info("Ciphertext is " + bookmarkItem.ciphertext);
     do_check_true(bookmarkItem.ciphertext != null);
     log.info("Decrypting the record explicitly with the default key.");
-    do_check_eq(bookmarkItem.decrypt(Service.collectionKeys._default).stuff, "my payload here");
+    do_check_eq((await bookmarkItem.decrypt(Service.collectionKeys._default)).stuff, "my payload here");
 
     // Per-collection keys.
     // Generate a key for "bookmarks".
-    generateNewKeys(Service.collectionKeys, ["bookmarks"]);
+    await generateNewKeys(Service.collectionKeys, ["bookmarks"]);
     bookmarkItem = prepareCryptoWrap("bookmarks", "foo");
     do_check_eq(bookmarkItem.collection, "bookmarks");
 
     // Encrypt. This'll use the "bookmarks" encryption key, because we have a
     // special key for it. The same key will need to be used for decryption.
-    bookmarkItem.encrypt(Service.collectionKeys.keyForCollection("bookmarks"));
+    await bookmarkItem.encrypt(Service.collectionKeys.keyForCollection("bookmarks"));
     do_check_true(bookmarkItem.ciphertext != null);
 
     // Attempt to use the default key, because this is a collision that could
     // conceivably occur in the real world. Decryption will error, because
     // it's not the bookmarks key.
     let err;
     try {
-      bookmarkItem.decrypt(Service.collectionKeys._default);
+      await bookmarkItem.decrypt(Service.collectionKeys._default);
     } catch (ex) {
       err = ex;
     }
     do_check_eq("Record SHA256 HMAC mismatch", err.message.substr(0, 27));
 
     // Explicitly check that it's using the bookmarks key.
     // This should succeed.
-    do_check_eq(bookmarkItem.decrypt(Service.collectionKeys.keyForCollection("bookmarks")).stuff,
-        "my payload here");
+    do_check_eq((await bookmarkItem.decrypt(Service.collectionKeys.keyForCollection("bookmarks"))).stuff,
+                "my payload here");
 
     do_check_true(Service.collectionKeys.hasKeysFor(["bookmarks"]));
 
     // Add a key for some new collection and verify that it isn't the
     // default key.
     do_check_false(Service.collectionKeys.hasKeysFor(["forms"]));
     do_check_false(Service.collectionKeys.hasKeysFor(["bookmarks", "forms"]));
     let oldFormsKey = Service.collectionKeys.keyForCollection("forms");
     do_check_eq(oldFormsKey, Service.collectionKeys._default);
-    let newKeys = Service.collectionKeys.ensureKeysFor(["forms"]);
+    let newKeys = await Service.collectionKeys.ensureKeysFor(["forms"]);
     do_check_true(newKeys.hasKeysFor(["forms"]));
     do_check_true(newKeys.hasKeysFor(["bookmarks", "forms"]));
     let newFormsKey = newKeys.keyForCollection("forms");
     do_check_neq(newFormsKey, oldFormsKey);
 
     // Verify that this doesn't overwrite keys
-    let regetKeys = newKeys.ensureKeysFor(["forms"]);
+    let regetKeys = await newKeys.ensureKeysFor(["forms"]);
     do_check_eq(regetKeys.keyForCollection("forms"), newFormsKey);
 
     const emptyKeys = new CollectionKeyManager();
     payload = {
       default: Service.collectionKeys._default.keyPairB64,
       collections: {}
     };
     // Verify that not passing `modified` doesn't throw
--- a/services/sync/tests/unit/test_service_detect_upgrade.js
+++ b/services/sync/tests/unit/test_service_detect_upgrade.js
@@ -97,17 +97,17 @@ add_task(async function v4_upgrade() {
 
     async function retrieve_server_default() {
       serverKeys = serverResp = serverDecrypted = null;
 
       serverKeys = new CryptoWrapper("crypto", "keys");
       serverResp = (await serverKeys.fetch(Service.resource(Service.cryptoKeysURL))).response;
       do_check_true(serverResp.success);
 
-      serverDecrypted = serverKeys.decrypt(Service.identity.syncKeyBundle);
+      serverDecrypted = await serverKeys.decrypt(Service.identity.syncKeyBundle);
       _("Retrieved WBO:       " + JSON.stringify(serverDecrypted));
       _("serverKeys:          " + JSON.stringify(serverKeys));
 
       return serverDecrypted.default;
     }
 
     async function retrieve_and_compare_default(should_succeed) {
       let serverDefault = await retrieve_server_default();
@@ -121,17 +121,17 @@ add_task(async function v4_upgrade() {
       else
         do_check_neq(JSON.stringify(serverDefault), JSON.stringify(localDefault));
     }
 
     // Uses the objects set above.
     async function set_server_keys(pair) {
       serverDecrypted.default = pair;
       serverKeys.cleartext = serverDecrypted;
-      serverKeys.encrypt(Service.identity.syncKeyBundle);
+      await serverKeys.encrypt(Service.identity.syncKeyBundle);
       await serverKeys.upload(Service.resource(Service.cryptoKeysURL));
     }
 
     _("Checking we have the latest keys.");
     await retrieve_and_compare_default(true);
 
     _("Update keys on server.");
     await set_server_keys(["KaaaaaaaaaaaHAtfmuRY0XEJ7LXfFuqvF7opFdBD/MY=",
@@ -198,40 +198,40 @@ add_task(async function v5_upgrade() {
     Service.clusterURL = server.baseURI + "/";
 
     await configureIdentity({ "username": "johndoe" }, server);
 
     // Test an upgrade where the contents of the server would cause us to error
     // -- keys decrypted with a different sync key, for example.
     _("Testing v4 -> v5 (or similar) upgrade.");
     async function update_server_keys(syncKeyBundle, wboName, collWBO) {
-      generateNewKeys(Service.collectionKeys);
+      await generateNewKeys(Service.collectionKeys);
       let serverKeys = Service.collectionKeys.asWBO("crypto", wboName);
-      serverKeys.encrypt(syncKeyBundle);
+      await serverKeys.encrypt(syncKeyBundle);
       let res = Service.resource(Service.storageURL + collWBO);
       do_check_true((await serverKeys.upload(res)).success);
     }
 
     _("Bumping version.");
     // Bump version on the server.
     let m = new WBORecord("meta", "global");
     m.payload = {"syncID": "foooooooooooooooooooooooooo",
                  "storageVersion": STORAGE_VERSION + 1};
     await m.upload(Service.resource(Service.metaURL));
 
     _("New meta/global: " + JSON.stringify(meta_global));
 
     // Fill the keys with bad data.
     let badKeys = new BulkKeyBundle("crypto");
-    badKeys.generateRandom();
+    await badKeys.generateRandom();
     await update_server_keys(badKeys, "keys", "crypto/keys");  // v4
     await update_server_keys(badKeys, "bulk", "crypto/bulk");  // v5
 
     _("Generating new keys.");
-    generateNewKeys(Service.collectionKeys);
+    await generateNewKeys(Service.collectionKeys);
 
     // Now sync and see what happens. It should be a version fail, not a crypto
     // fail.
 
     _("Logging in.");
     try {
       await Service.login();
     } catch (e) {
--- a/services/sync/tests/unit/test_service_sync_remoteSetup.js
+++ b/services/sync/tests/unit/test_service_sync_remoteSetup.js
@@ -179,20 +179,20 @@ add_task(async function run_test() {
     do_check_eq(metaModified, meta_global.modified);
 
     // Try to screw up HMAC calculation.
     // Re-encrypt keys with a new random keybundle, and upload them to the
     // server, just as might happen with a second client.
     _("Attempting to screw up HMAC by re-encrypting keys.");
     let keys = Service.collectionKeys.asWBO();
     let b = new BulkKeyBundle("hmacerror");
-    b.generateRandom();
+    await b.generateRandom();
     collections.crypto = keys.modified = 100 + (Date.now() / 1000);  // Future modification time.
-    keys.encrypt(b);
-    keys.upload(Service.resource(Service.cryptoKeysURL));
+    await keys.encrypt(b);
+    await keys.upload(Service.resource(Service.cryptoKeysURL));
 
     do_check_false((await Service.verifyAndFetchSymmetricKeys()));
     do_check_eq(Service.status.login, LOGIN_FAILED_INVALID_PASSPHRASE);
   } finally {
     Svc.Prefs.resetBranch("");
     server.stop(do_test_finished);
   }
 });
--- a/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
+++ b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
@@ -62,19 +62,19 @@ function sync_httpd_setup(handlers) {
 
   return httpd_setup(handlers);
 }
 
 async function setUp(server) {
   await SyncTestingInfrastructure(server, "johndoe", "ilovejane");
   // Ensure that the server has valid keys so that logging in will work and not
   // result in a server wipe, rendering many of these tests useless.
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
-  serverKeys.encrypt(Service.identity.syncKeyBundle);
+  await serverKeys.encrypt(Service.identity.syncKeyBundle);
   return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
 }
 
 const PAYLOAD = 42;
 
 add_task(async function setup() {
   initTestLogging();
   Service.engineManager.clear();
@@ -256,18 +256,18 @@ add_task(async function test_enabledRemo
     upd("steam", new ServerWBO("steam", {}).handler())
   });
   await setUp(server);
 
   // We need to be very careful how we do this, so that we don't trigger a
   // fresh start!
   try {
     _("Upload some keys to avoid a fresh start.");
-    let wbo = Service.collectionKeys.generateNewKeysWBO();
-    wbo.encrypt(Service.identity.syncKeyBundle);
+    let wbo = await Service.collectionKeys.generateNewKeysWBO();
+    await wbo.encrypt(Service.identity.syncKeyBundle);
     do_check_eq(200, (await wbo.upload(Service.resource(Service.cryptoKeysURL))).status);
 
     _("Engine is disabled.");
     do_check_false(engine.enabled);
 
     _("Sync.");
     await Service.sync();
 
--- a/services/sync/tests/unit/test_service_wipeClient.js
+++ b/services/sync/tests/unit/test_service_wipeClient.js
@@ -71,13 +71,13 @@ add_task(async function test_withEngineL
   } finally {
     canDecryptEngine.wasWiped = false;
     cannotDecryptEngine.wasWiped = false;
     await Service.startOver();
   }
 });
 
 add_task(async function test_startOver_clears_keys() {
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
   do_check_true(!!Service.collectionKeys.keyForCollection());
   await Service.startOver();
   do_check_false(!!Service.collectionKeys.keyForCollection());
 });
--- a/services/sync/tests/unit/test_syncengine_sync.js
+++ b/services/sync/tests/unit/test_syncengine_sync.js
@@ -51,36 +51,36 @@ async function createServerAndConfigureC
   server.start();
 
   await SyncTestingInfrastructure(server, USER);
   Service._updateCachedURLs();
 
   return [engine, server, USER];
 }
 
-function run_test() {
-  generateNewKeys(Service.collectionKeys);
-  Svc.Prefs.set("log.logger.engine.rotary", "Trace");
-  run_next_test();
-}
 
 /*
  * Tests
  *
  * SyncEngine._sync() is divided into four rather independent steps:
  *
  * - _syncStartup()
  * - _processIncoming()
  * - _uploadOutgoing()
  * - _syncFinish()
  *
  * In the spirit of unit testing, these are tested individually for
  * different scenarios below.
  */
 
+add_task(async function setup() {
+  await generateNewKeys(Service.collectionKeys);
+  Svc.Prefs.set("log.logger.engine.rotary", "Trace");
+});
+
 add_task(async function test_syncStartup_emptyOrOutdatedGlobalsResetsSync() {
   _("SyncEngine._syncStartup resets sync and wipes server data if there's no or an outdated global record");
 
   // Some server side data that's going to be wiped
   let collection = new ServerCollection();
   collection.insert("flying",
                     encryptPayload({id: "flying",
                                     denomination: "LNER Class A3 4472"}));
@@ -235,17 +235,17 @@ add_task(async function test_processInco
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler(),
       "/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
       "/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler()
   });
 
   await SyncTestingInfrastructure(server);
 
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let engine = makeRotaryEngine();
   let meta_global = Service.recordManager.set(engine.metaURL,
                                               new WBORecord(engine.metaURL));
   meta_global.payload.engines = {rotary: {version: engine.version,
                                          syncID: engine.syncID}};
 
   try {
@@ -1160,17 +1160,17 @@ add_task(async function test_uploadOutgo
 
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler(),
       "/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
       "/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler()
   });
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let engine = makeRotaryEngine();
   engine.lastSync = 123; // needs to be non-zero so that tracker is queried
   engine._store.items = {flying: "LNER Class A3 4472",
                          scotsman: "Flying Scotsman"};
   // Mark one of these records as changed
   engine._tracker.addChangedID("scotsman", 0);
 
@@ -1216,17 +1216,17 @@ async function test_uploadOutgoing_max_r
 
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler(),
       "/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
       "/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler(),
   });
 
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let engine = makeRotaryEngine();
   engine.allowSkippedRecord = allowSkippedRecord;
   engine.lastSync = 1;
   engine._store.items = { flying: "a".repeat(1024 * 1024), scotsman: "abcd" };
 
   engine._tracker.addChangedID("flying", 1000);
   engine._tracker.addChangedID("scotsman", 1000);
@@ -1588,17 +1588,17 @@ add_task(async function test_sync_partia
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler()
   });
   let oldServerConfiguration = Service.serverConfiguration;
   Service.serverConfiguration = {
     max_post_records: 100
   };
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let engine = makeRotaryEngine();
   engine.lastSync = 123; // needs to be non-zero so that tracker is queried
   engine.lastSyncLocal = 456;
 
   // Let the third upload fail completely
   var noOfUploads = 0;
   collection.post = (function(orig) {
@@ -1683,17 +1683,17 @@ add_task(async function test_canDecrypt_
   } finally {
     await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_canDecrypt_true() {
   _("SyncEngine.canDecrypt returns true if the engine can decrypt the items on the server.");
 
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let collection = new ServerCollection();
   collection._wbos.flying = new ServerWBO(
       "flying", encryptPayload({id: "flying",
                                 denomination: "LNER Class A3 4472"}));
 
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler()
--- a/services/sync/tests/unit/test_syncscheduler.js
+++ b/services/sync/tests/unit/test_syncscheduler.js
@@ -46,19 +46,19 @@ function sync_httpd_setup() {
       upd("crypto", (new ServerWBO("keys")).handler()),
     "/1.1/johndoe@mozilla.com/storage/clients": upd("clients", clientsColl.handler())
   });
 }
 
 async function setUp(server) {
   await configureIdentity({username: "johndoe@mozilla.com"}, server);
 
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
-  serverKeys.encrypt(Service.identity.syncKeyBundle);
+  await serverKeys.encrypt(Service.identity.syncKeyBundle);
   let result = (await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success;
   return result;
 }
 
 async function cleanUpAndGo(server) {
   await Async.promiseYield();
   await clientsEngine._store.wipe();
   await Service.startOver();
@@ -813,18 +813,18 @@ add_task(async function test_sync_X_Weav
   server.registerPathHandler(INFO_COLLECTIONS, infoCollWithBackoff);
 
   // Pretend we have two clients so that the regular sync interval is
   // sufficiently low.
   await clientsEngine._store.create(
     { id: "foo", cleartext: { os: "mobile", version: "0.01", type: "desktop" } }
   );
   let rec = await clientsEngine._store.createRecord("foo", "clients");
-  rec.encrypt(Service.collectionKeys.keyForCollection("clients"));
-  rec.upload(Service.resource(clientsEngine.engineURL + rec.id));
+  await rec.encrypt(Service.collectionKeys.keyForCollection("clients"));
+  await rec.upload(Service.resource(clientsEngine.engineURL + rec.id));
 
   // Sync once to log in and get everything set up. Let's verify our initial
   // values.
   await Service.sync();
   do_check_eq(Status.backoffInterval, 0);
   do_check_eq(Status.minimumNextSync, 0);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
   do_check_true(scheduler.nextSync <=
@@ -874,18 +874,18 @@ add_task(async function test_sync_503_Re
   server.registerPathHandler(INFO_COLLECTIONS, infoCollWithMaintenance);
 
   // Pretend we have two clients so that the regular sync interval is
   // sufficiently low.
   await clientsEngine._store.create(
     { id: "foo", cleartext: { os: "mobile", version: "0.01", type: "desktop" } }
   );
   let rec = await clientsEngine._store.createRecord("foo", "clients");
-  rec.encrypt(Service.collectionKeys.keyForCollection("clients"));
-  rec.upload(Service.resource(clientsEngine.engineURL + rec.id));
+  await rec.encrypt(Service.collectionKeys.keyForCollection("clients"));
+  await rec.upload(Service.resource(clientsEngine.engineURL + rec.id));
 
   // Sync once to log in and get everything set up. Let's verify our initial
   // values.
   await Service.sync();
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.backoffInterval, 0);
   do_check_eq(Status.minimumNextSync, 0);
   do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
--- a/services/sync/tests/unit/test_tab_engine.js
+++ b/services/sync/tests/unit/test_tab_engine.js
@@ -75,17 +75,17 @@ add_task(async function test_tab_engine_
 
   await SyncTestingInfrastructure(server);
 
   let meta_global = Service.recordManager.set(engine.metaURL,
                                               new WBORecord(engine.metaURL));
   meta_global.payload.engines = {tabs: {version: engine.version,
                                         syncID: engine.syncID}};
 
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let promiseFinished = new Promise(resolve => {
     let syncFinish = engine._syncFinish;
     engine._syncFinish = async function() {
       equal(applied.length, 1, "Remote client record was applied");
       equal(applied[0].id, remoteID, "Remote client ID matches");
 
       await syncFinish.call(engine);
--- a/services/sync/tests/unit/test_telemetry.js
+++ b/services/sync/tests/unit/test_telemetry.js
@@ -105,17 +105,17 @@ add_task(async function test_basic() {
   Svc.Prefs.resetBranch("");
   await promiseStopServer(server);
 });
 
 add_task(async function test_processIncoming_error() {
   let engine = new BookmarksEngine(Service);
   await engine.initialize();
   let store  = engine._store;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("bookmarks");
   try {
     // Create a bogus record that when synced down will provoke a
     // network error which in turn provokes an exception in _processIncoming.
     const BOGUS_GUID = "zzzzzzzzzzzz";
     let bogus_record = collection.insert(BOGUS_GUID, "I'm a bogus record!");
     bogus_record.get = function get() {
@@ -156,17 +156,17 @@ add_task(async function test_processInco
     await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_uploading() {
   let engine = new BookmarksEngine(Service);
   await engine.initialize();
   let store  = engine._store;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let parent = PlacesUtils.toolbarFolderId;
   let uri = CommonUtils.makeURI("http://getfirefox.com/");
 
   let bmk_id = PlacesUtils.bookmarks.insertBookmark(parent, uri,
     PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
 
@@ -252,17 +252,17 @@ add_task(async function test_upload_fail
 });
 
 add_task(async function test_sync_partialUpload() {
   let collection = new ServerCollection();
   let server = sync_httpd_setup({
       "/1.1/foo/storage/rotary": collection.handler()
   });
   await SyncTestingInfrastructure(server);
-  generateNewKeys(Service.collectionKeys);
+  await generateNewKeys(Service.collectionKeys);
 
   let engine = new RotaryEngine(Service);
   engine.lastSync = 123;
   engine.lastSyncLocal = 456;
 
 
   // Create a bunch of records (and server side handlers)
   for (let i = 0; i < 234; i++) {
@@ -335,17 +335,17 @@ add_task(async function test_sync_partia
 });
 
 add_task(async function test_generic_engine_fail() {
   enableValidationPrefs();
 
   await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   let e = new Error("generic failure message")
   engine._errToThrow = e;
 
   try {
     _(`test_generic_engine_fail: Steam tracker contents: ${
       JSON.stringify(engine._tracker.changedIDs)}`);
     let ping = await sync_and_validate_telem(true);
@@ -361,17 +361,17 @@ add_task(async function test_generic_eng
 });
 
 add_task(async function test_engine_fail_ioerror() {
   enableValidationPrefs();
 
   await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   // create an IOError to re-throw as part of Sync.
   try {
     // (Note that fakeservices.js has replaced Utils.jsonMove etc, but for
     // this test we need the real one so we get real exceptions from the
     // filesystem.)
     await Utils._real_jsonMove("file-does-not-exist", "anything", {});
   } catch (ex) {
@@ -396,17 +396,17 @@ add_task(async function test_engine_fail
 });
 
 add_task(async function test_clean_urls() {
   enableValidationPrefs();
 
   Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   engine._errToThrow = new TypeError("http://www.google .com is not a valid URL.");
 
   try {
     _(`test_clean_urls: Steam tracker contents: ${
       JSON.stringify(engine._tracker.changedIDs)}`);
     let ping = await sync_and_validate_telem(true);
     equal(ping.status.service, SYNC_FAILED_PARTIAL);
@@ -430,17 +430,17 @@ add_task(async function test_clean_urls(
 add_task(async function test_initial_sync_engines() {
   enableValidationPrefs();
 
   await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   // These are the only ones who actually have things to sync at startup.
   let engineNames = ["clients", "bookmarks", "prefs", "tabs"];
-  let server = serverForEnginesWithKeys({"foo": "password"}, ["bookmarks", "prefs", "tabs"].map(name =>
+  let server = await serverForEnginesWithKeys({"foo": "password"}, ["bookmarks", "prefs", "tabs"].map(name =>
     Service.engineManager.get(name)
   ));
   await SyncTestingInfrastructure(server);
   try {
     _(`test_initial_sync_engines: Steam tracker contents: ${
       JSON.stringify(engine._tracker.changedIDs)}`);
     let ping = await wait_for_ping(() => Service.sync(), true);
 
@@ -465,17 +465,17 @@ add_task(async function test_initial_syn
 });
 
 add_task(async function test_nserror() {
   enableValidationPrefs();
 
   await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   engine._errToThrow = Components.Exception("NS_ERROR_UNKNOWN_HOST", Cr.NS_ERROR_UNKNOWN_HOST);
   try {
     _(`test_nserror: Steam tracker contents: ${
       JSON.stringify(engine._tracker.changedIDs)}`);
     let ping = await sync_and_validate_telem(true);
     deepEqual(ping.status, {
       service: SYNC_FAILED_PARTIAL,
@@ -540,17 +540,17 @@ add_task(async function test_discarding(
 })
 
 add_task(async function test_no_foreign_engines_in_error_ping() {
   enableValidationPrefs();
 
   await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   engine._errToThrow = new Error("Oh no!");
   await SyncTestingInfrastructure(server);
   try {
     let ping = await sync_and_validate_telem(true);
     equal(ping.status.service, SYNC_FAILED_PARTIAL);
     ok(ping.engines.every(e => e.name !== "bogus"));
   } finally {
     await cleanAndGo(engine, server);
@@ -559,17 +559,17 @@ add_task(async function test_no_foreign_
 });
 
 add_task(async function test_sql_error() {
   enableValidationPrefs();
 
   await Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
   await SyncTestingInfrastructure(server);
   engine._sync = function() {
     // Just grab a DB connection and issue a bogus SQL statement synchronously.
     let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase).DBConnection;
     Async.querySpinningly(db.createAsyncStatement("select bar from foo"));
   };
   try {
     _(`test_sql_error: Steam tracker contents: ${
@@ -584,17 +584,17 @@ add_task(async function test_sql_error()
 });
 
 add_task(async function test_no_foreign_engines_in_success_ping() {
   enableValidationPrefs();
 
   await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
   try {
     let ping = await sync_and_validate_telem();
     ok(ping.engines.every(e => e.name !== "bogus"));
   } finally {
     await cleanAndGo(engine, server);
     Service.engineManager.unregister(engine);
@@ -602,17 +602,17 @@ add_task(async function test_no_foreign_
 });
 
 add_task(async function test_events() {
   enableValidationPrefs();
 
   await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
   try {
     let serverTime = AsyncResource.serverTime;
     Service.recordTelemetryEvent("object", "method", "value", { foo: "bar" });
     let ping = await wait_for_ping(() => Service.sync(), true, true);
     equal(ping.events.length, 1);
     let [timestamp, category, method, object, value, extra] = ping.events[0];
@@ -646,17 +646,17 @@ add_task(async function test_events() {
 });
 
 add_task(async function test_invalid_events() {
   enableValidationPrefs();
 
   await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   async function checkNotRecorded(...args) {
     Service.recordTelemetryEvent.call(args);
     let ping = await wait_for_ping(() => Service.sync(), false, true);
     equal(ping.events, undefined);
   }
 
   await SyncTestingInfrastructure(server);
@@ -692,17 +692,17 @@ add_task(async function test_no_ping_for
   enableValidationPrefs();
 
   let telem = get_sync_test_telemetry();
   let oldSubmit = telem.submit;
 
   await Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
-  let server = serverForFoo(engine);
+  let server = await serverForFoo(engine);
 
   await SyncTestingInfrastructure(server);
   try {
     let submitPromise = new Promise(resolve => {
       telem.submit = function() {
         let result = oldSubmit.apply(this, arguments);
         resolve(result);
       };
--- a/services/sync/tps/extensions/tps/resource/tps.jsm
+++ b/services/sync/tps/extensions/tps/resource/tps.jsm
@@ -622,17 +622,17 @@ var TPS = {
       let collection = bookmarkEngine.itemSource();
       let collectionKey = bookmarkEngine.service.collectionKeys.keyForCollection(bookmarkEngine.name);
       collection.full = true;
       let items = [];
       let resp = await collection.get();
       for (let json of resp.obj) {
         let record = new collection._recordObj();
         record.deserialize(json);
-        record.decrypt(collectionKey);
+        await record.decrypt(collectionKey);
         items.push(record.cleartext);
       }
       return items;
     };
     let serverRecordDumpStr;
     try {
       Logger.logInfo("About to perform bookmark validation");
       let clientTree = await (PlacesUtils.promiseBookmarksTree("", {
--- a/toolkit/components/extensions/ExtensionStorageSync.jsm
+++ b/toolkit/components/extensions/ExtensionStorageSync.jsm
@@ -165,18 +165,18 @@ class EncryptionRemoteTransformer {
       throw new Error("Attempt to reencrypt??");
     }
     let id = await this.getEncodedRecordId(record);
     if (!id) {
       throw new Error("Record ID is missing or invalid");
     }
 
     let IV = WeaveCrypto.generateRandomIV();
-    let ciphertext = WeaveCrypto.encrypt(JSON.stringify(record),
-                                         keyBundle.encryptionKeyB64, IV);
+    let ciphertext = await WeaveCrypto.encrypt(JSON.stringify(record),
+                                               keyBundle.encryptionKeyB64, IV);
     let hmac = ciphertextHMAC(keyBundle, id, IV, ciphertext);
     const encryptedResult = {ciphertext, IV, hmac, id};
 
     // Copy over the _status field, so that we handle concurrency
     // headers (If-Match, If-None-Match) correctly.
     // DON'T copy over "deleted" status, because then we'd leak
     // plaintext deletes.
     encryptedResult._status = record._status == "deleted" ? "updated" : record._status;
@@ -199,18 +199,18 @@ class EncryptionRemoteTransformer {
     // Authenticate the encrypted blob with the expected HMAC
     let computedHMAC = ciphertextHMAC(keyBundle, record.id, record.IV, record.ciphertext);
 
     if (computedHMAC != record.hmac) {
       Utils.throwHMACMismatch(record.hmac, computedHMAC);
     }
 
     // Handle invalid data here. Elsewhere we assume that cleartext is an object.
-    let cleartext = WeaveCrypto.decrypt(record.ciphertext,
-                                        keyBundle.encryptionKeyB64, record.IV);
+    let cleartext = await WeaveCrypto.decrypt(record.ciphertext,
+                                              keyBundle.encryptionKeyB64, record.IV);
     let jsonResult = JSON.parse(cleartext);
     if (!jsonResult || typeof jsonResult !== "object") {
       throw new Error("Decryption failed: result is <" + jsonResult + ">, not an object.");
     }
 
     if (record.hasOwnProperty("last_modified")) {
       jsonResult.last_modified = record.last_modified;
     }
@@ -562,17 +562,17 @@ class CryptoCollection {
   async getKeyRing() {
     const cryptoKeyRecord = await this.getKeyRingRecord();
     const collectionKeys = new CollectionKeyManager();
     if (cryptoKeyRecord.keys) {
       collectionKeys.setContents(cryptoKeyRecord.keys, cryptoKeyRecord.last_modified);
     } else {
       // We never actually use the default key, so it's OK if we
       // generate one multiple times.
-      collectionKeys.generateDefaultKey();
+      await collectionKeys.generateDefaultKey();
     }
     // Pass through uuid field so that we can save it if we need to.
     collectionKeys.uuid = cryptoKeyRecord.uuid;
     return collectionKeys;
   }
 
   async updateKBHash(kbHash) {
     const coll = await this.getCollection();
--- a/toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
+++ b/toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
@@ -328,26 +328,26 @@ class KintoServer {
           last_modified: 1475161309026,
           id: "b09f1618-d789-302d-696e-74ec53ee18a8", // FIXME
         },
       }));
     });
   }
 
   // Utility function to install a keyring at the start of a test.
-  installKeyRing(fxaService, keysData, salts, etag, properties) {
+  async installKeyRing(fxaService, keysData, salts, etag, properties) {
     const keysRecord = {
       "id": "keys",
       "keys": keysData,
       "salts": salts,
       "last_modified": etag,
     };
     this.etag = etag;
     const transformer = new KeyRingEncryptionRemoteTransformer(fxaService);
-    this.encryptAndAddRecord(transformer, Object.assign({}, properties, {
+    return this.encryptAndAddRecord(transformer, Object.assign({}, properties, {
       collectionId: "storage-sync-crypto",
       data: keysRecord,
     }));
   }
 
   encryptAndAddRecord(transformer, properties) {
     return transformer.encode(properties.data).then(encrypted => {
       this.addRecord(Object.assign({}, properties, {data: encrypted}));
@@ -629,67 +629,67 @@ add_task(async function ensureCanSync_pu
   // correctly -- we keep the server key (since presumably it's
   // already been used to encrypt records) and we don't wipe out other
   // collections' keys.
   const extensionId = uuid();
   const extensionId2 = uuid();
   const extensionOnlyKey = uuid();
   const extensionOnlySalt = uuid();
   const DEFAULT_KEY = new BulkKeyBundle("[default]");
-  DEFAULT_KEY.generateRandom();
+  await DEFAULT_KEY.generateRandom();
   const RANDOM_KEY = new BulkKeyBundle(extensionId);
-  RANDOM_KEY.generateRandom();
+  await RANDOM_KEY.generateRandom();
   await withContextAndServer(async function(context, server) {
     await withSignedInUser(loggedInUser, async function(extensionStorageSync, fxaService) {
       // FIXME: generating a random salt probably shouldn't require a CryptoCollection?
       const cryptoCollection = new CryptoCollection(fxaService);
       const RANDOM_SALT = cryptoCollection.getNewSalt();
       await extensionStorageSync.cryptoCollection._clear();
       const keysData = {
         "default": DEFAULT_KEY.keyPairB64,
         "collections": {
           [extensionId]: RANDOM_KEY.keyPairB64,
         },
       };
       const saltData = {
         [extensionId]: RANDOM_SALT,
       };
-      server.installKeyRing(fxaService, keysData, saltData, 950, {
+      await server.installKeyRing(fxaService, keysData, saltData, 950, {
         predicate: appearsAt(900),
       });
 
       let collectionKeys = await extensionStorageSync.ensureCanSync([extensionId]);
       assertKeyRingKey(collectionKeys, extensionId, RANDOM_KEY);
 
       let posts = server.getPosts();
       equal(posts.length, 0,
             "ensureCanSync shouldn't push when the server keyring has the right key");
 
       // Another client generates a key for extensionId2
       const newKey = new BulkKeyBundle(extensionId2);
-      newKey.generateRandom();
+      await newKey.generateRandom();
       keysData.collections[extensionId2] = newKey.keyPairB64;
       saltData[extensionId2] = cryptoCollection.getNewSalt();
-      server.installKeyRing(fxaService, keysData, saltData, 1050, {
+      await server.installKeyRing(fxaService, keysData, saltData, 1050, {
         predicate: appearsAt(1000),
       });
 
       let newCollectionKeys = await extensionStorageSync.ensureCanSync([extensionId, extensionId2]);
       assertKeyRingKey(newCollectionKeys, extensionId2, newKey);
       assertKeyRingKey(newCollectionKeys, extensionId, RANDOM_KEY,
                        `ensureCanSync shouldn't lose the old key for ${extensionId}`);
 
       posts = server.getPosts();
       equal(posts.length, 0, "ensureCanSync shouldn't push when updating keys");
 
       // Another client generates a key, but not a salt, for extensionOnlyKey
       const onlyKey = new BulkKeyBundle(extensionOnlyKey);
-      onlyKey.generateRandom();
+      await onlyKey.generateRandom();
       keysData.collections[extensionOnlyKey] = onlyKey.keyPairB64;
-      server.installKeyRing(fxaService, keysData, saltData, 1150, {
+      await server.installKeyRing(fxaService, keysData, saltData, 1150, {
         predicate: appearsAt(1100),
       });
 
       let withNewKey = await extensionStorageSync.ensureCanSync([extensionId, extensionOnlyKey]);
       dump(`got ${JSON.stringify(withNewKey.asWBO().cleartext)}\n`);
       assertKeyRingKey(withNewKey, extensionOnlyKey, onlyKey);
       assertKeyRingKey(withNewKey, extensionId, RANDOM_KEY,
                        `ensureCanSync shouldn't lose the old key for ${extensionId}`);
@@ -700,17 +700,17 @@ add_task(async function ensureCanSync_pu
       // We don't a priori know what the new salt is
       dump(`${JSON.stringify(withNewKeyRecord)}\n`);
       ok(withNewKeyRecord.salts[extensionOnlyKey],
          `ensureCanSync should generate a salt for an extension that only had a key`);
 
       // Another client generates a key, but not a salt, for extensionOnlyKey
       const newSalt = cryptoCollection.getNewSalt();
       saltData[extensionOnlySalt] = newSalt;
-      server.installKeyRing(fxaService, keysData, saltData, 1250, {
+      await server.installKeyRing(fxaService, keysData, saltData, 1250, {
         predicate: appearsAt(1200),
       });
 
       let withOnlySaltKey = await extensionStorageSync.ensureCanSync([extensionId, extensionOnlySalt]);
       assertKeyRingKey(withOnlySaltKey, extensionId, RANDOM_KEY,
                        `ensureCanSync shouldn't lose the old key for ${extensionId}`);
       // We don't a priori know what the new key is
       ok(withOnlySaltKey.hasKeysFor([extensionOnlySalt]),
@@ -728,34 +728,34 @@ add_task(async function ensureCanSync_pu
 add_task(async function ensureCanSync_handles_conflicts() {
   // Syncing is done through a pull followed by a push of any merged
   // changes. Accordingly, the only way to have a "true" conflict --
   // i.e. with the server rejecting a change -- is if
   // someone pushes changes between our pull and our push. Ensure that
   // if this happens, we still behave sensibly (keep the remote key).
   const extensionId = uuid();
   const DEFAULT_KEY = new BulkKeyBundle("[default]");
-  DEFAULT_KEY.generateRandom();
+  await DEFAULT_KEY.generateRandom();
   const RANDOM_KEY = new BulkKeyBundle(extensionId);
-  RANDOM_KEY.generateRandom();
+  await RANDOM_KEY.generateRandom();
   await withContextAndServer(async function(context, server) {
     await withSignedInUser(loggedInUser, async function(extensionStorageSync, fxaService) {
       // FIXME: generating salts probably shouldn't rely on a CryptoCollection
       const cryptoCollection = new CryptoCollection(fxaService);
       const RANDOM_SALT = cryptoCollection.getNewSalt();
       const keysData = {
         "default": DEFAULT_KEY.keyPairB64,
         "collections": {
           [extensionId]: RANDOM_KEY.keyPairB64,
         },
       };
       const saltData = {
         [extensionId]: RANDOM_SALT,
       };
-      server.installKeyRing(fxaService, keysData, saltData, 765, {conflict: true});
+      await server.installKeyRing(fxaService, keysData, saltData, 765, {conflict: true});
 
       await extensionStorageSync.cryptoCollection._clear();
 
       let collectionKeys = await extensionStorageSync.ensureCanSync([extensionId]);
       assertKeyRingKey(collectionKeys, extensionId, RANDOM_KEY,
                        `syncing keyring should keep the server key for ${extensionId}`);
 
       let posts = server.getPosts();