Bug 1325523 - Clean up after tests that modify tracker contents. r?markh draft
authorKit Cambridge <kit@yakshaving.ninja>
Thu, 29 Dec 2016 18:18:38 -0700
changeset 454833 49c002bd64a3a1c1e79c41cf4d06509af99e9dc5
parent 454641 79ef936724454728beeeba41fb76a61d02c7c226
child 540836 f52bc08a5ebf386ec587f6824e27cf57a8147238
push id40070
push userbmo:kit@mozilla.com
push dateFri, 30 Dec 2016 16:06:45 +0000
reviewersmarkh
bugs1325523
milestone53.0a1
Bug 1325523 - Clean up after tests that modify tracker contents. r?markh MozReview-Commit-ID: 45g4n4UipTW
services/sync/tests/unit/test_addons_engine.js
services/sync/tests/unit/test_clients_engine.js
services/sync/tests/unit/test_engine.js
services/sync/tests/unit/test_hmac_error.js
services/sync/tests/unit/test_syncengine_sync.js
services/sync/tests/unit/test_telemetry.js
--- a/services/sync/tests/unit/test_addons_engine.js
+++ b/services/sync/tests/unit/test_addons_engine.js
@@ -31,16 +31,17 @@ var tracker = engine._tracker;
 function advance_test() {
   reconciler._addons = {};
   reconciler._changes = [];
 
   let cb = Async.makeSpinningCallback();
   reconciler.saveState(null, cb);
   cb.wait();
 
+  tracker.clearChangedIDs();
   run_next_test();
 }
 
 // This is a basic sanity test for the unit test itself. If this breaks, the
 // add-ons API likely changed upstream.
 add_test(function test_addon_install() {
   _("Ensure basic add-on APIs work as expected.");
 
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -194,16 +194,17 @@ add_test(function test_properties() {
     equal(Svc.Prefs.get("clients.lastRecordUpload"), undefined);
     equal(engine.lastRecordUpload, 0);
 
     let now = Date.now();
     engine.lastRecordUpload = now / 1000;
     equal(engine.lastRecordUpload, Math.floor(now / 1000));
   } finally {
     Svc.Prefs.resetBranch("");
+    engine._tracker.clearChangedIDs();
     run_next_test();
   }
 });
 
 add_task(async function test_full_sync() {
   _("Ensure that Clients engine fetches all records for each sync.");
 
   let now = Date.now() / 1000;
@@ -354,16 +355,17 @@ add_test(function test_client_name_chang
   notEqual(initialName, engine.localName);
   equal(Object.keys(tracker.changedIDs).length, 1);
   ok(engine.localID in tracker.changedIDs);
   ok(tracker.score > initialScore);
   ok(tracker.score >= SCORE_INCREMENT_XLARGE);
 
   Svc.Obs.notify("weave:engine:stop-tracking");
 
+  engine._tracker.clearChangedIDs();
   run_next_test();
 });
 
 add_test(function test_send_command() {
   _("Verifies _sendCommandToClient puts commands in the outbound queue.");
 
   let store = engine._store;
   let tracker = engine._tracker;
@@ -385,16 +387,17 @@ add_test(function test_send_command() {
 
   let command = clientCommands[0];
   equal(command.command, action);
   equal(command.args.length, 2);
   deepEqual(command.args, args);
 
   notEqual(tracker.changedIDs[remoteId], undefined);
 
+  engine._tracker.clearChangedIDs();
   run_next_test();
 });
 
 add_test(function test_command_validation() {
   _("Verifies that command validation works properly.");
 
   let store = engine._store;
 
@@ -441,16 +444,17 @@ add_test(function test_command_validatio
       equal(clientCommands, undefined);
 
       if (store._tracker) {
         equal(engine._tracker[remoteId], undefined);
       }
     }
 
   }
+  engine._tracker.clearChangedIDs();
   run_next_test();
 });
 
 add_test(function test_command_duplication() {
   _("Ensures duplicate commands are detected and not added");
 
   let store = engine._store;
   let remoteId = Utils.makeGUID();
@@ -476,16 +480,17 @@ add_test(function test_command_duplicati
   engine.sendCommand(action, [{ x: "bar" }], remoteId);
 
   _("Make sure we spot a real dupe argument.");
   engine.sendCommand(action, [{ x: "bar" }], remoteId);
 
   clientCommands = engine._readCommands()[remoteId];
   equal(clientCommands.length, 2);
 
+  engine._tracker.clearChangedIDs();
   run_next_test();
 });
 
 add_test(function test_command_invalid_client() {
   _("Ensures invalid client IDs are caught");
 
   let id = Utils.makeGUID();
   let error;
@@ -493,16 +498,17 @@ add_test(function test_command_invalid_c
   try {
     engine.sendCommand("wipeAll", [], id);
   } catch (ex) {
     error = ex;
   }
 
   equal(error.message.indexOf("Unknown remote client ID: "), 0);
 
+  engine._tracker.clearChangedIDs();
   run_next_test();
 });
 
 add_test(function test_process_incoming_commands() {
   _("Ensures local commands are executed");
 
   engine.localCommands = [{ command: "logout", args: [] }];
 
@@ -510,16 +516,17 @@ add_test(function test_process_incoming_
 
   var handler = function() {
     Svc.Obs.remove(ev, handler);
 
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     engine._resetClient();
 
+    engine._tracker.clearChangedIDs();
     run_next_test();
   };
 
   Svc.Obs.add(ev, handler);
 
   // logout command causes processIncomingCommands to return explicit false.
   ok(!engine.processIncomingCommands());
 });
@@ -804,16 +811,17 @@ add_test(function test_send_uri_to_clien
   }
 
   equal(error.message.indexOf("Unknown remote client ID: "), 0);
 
   Svc.Prefs.resetBranch("");
   Service.recordManager.clearCache();
   engine._resetClient();
 
+  engine._tracker.clearChangedIDs();
   run_next_test();
 });
 
 add_test(function test_receive_display_uri() {
   _("Ensure processing of received 'displayURI' commands works.");
 
   // We don't set up WBOs and perform syncing because other tests verify
   // the command API works as advertised. This saves us a little work.
@@ -836,16 +844,17 @@ add_test(function test_receive_display_u
   let handler = function(subject, data) {
     Svc.Obs.remove(ev, handler);
 
     equal(subject[0].uri, uri);
     equal(subject[0].clientId, remoteId);
     equal(subject[0].title, title);
     equal(data, null);
 
+    engine._tracker.clearChangedIDs();
     run_next_test();
   };
 
   Svc.Obs.add(ev, handler);
 
   ok(engine.processIncomingCommands());
 
   Svc.Prefs.resetBranch("");
@@ -872,16 +881,17 @@ add_test(function test_optional_client_f
   ok(!!local.os);
   ok(!!local.appPackage);
   ok(!!local.application);
 
   // We don't currently populate device or formfactor.
   // See Bug 1100722, Bug 1100723.
 
   engine._resetClient();
+  engine._tracker.clearChangedIDs();
   run_next_test();
 });
 
 add_task(async function test_merge_commands() {
   _("Verifies local commands for remote clients are merged with the server's");
 
   let now = Date.now() / 1000;
   let contents = {
@@ -1428,16 +1438,17 @@ add_task(async function test_command_syn
     engine._sync();
     deepEqual(notifiedIds, ["fxa-fake-guid-00","fxa-fake-guid-01"]);
     ok(!notifiedIds.includes(engine.getClientFxaDeviceId(engine.localID)),
       "We never notify the local device");
 
   } finally {
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
+    engine._tracker.clearChangedIDs();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
--- a/services/sync/tests/unit/test_engine.js
+++ b/services/sync/tests/unit/test_engine.js
@@ -97,16 +97,17 @@ add_task(async function test_resetClient
 
   engine.resetClient();
   do_check_true(engine.wasReset);
   do_check_eq(engineObserver.topics[0], "weave:engine:reset-client:start");
   do_check_eq(engineObserver.topics[1], "weave:engine:reset-client:finish");
 
   engine.wasReset = false;
   engineObserver.reset();
+  engine._tracker.clearChangedIDs();
 });
 
 add_task(async function test_invalidChangedIDs() {
   _("Test that invalid changed IDs on disk don't end up live.");
   let engine = new SteamEngine("Steam", Service);
   let tracker = engine._tracker;
 
   await tracker._beforeSave();
@@ -115,16 +116,17 @@ add_task(async function test_invalidChan
 
   ok(!tracker._storage.dataReady);
   tracker.changedIDs.placeholder = true;
   deepEqual(tracker.changedIDs, { placeholder: true },
     "Accessing changed IDs should load changes from disk as a side effect");
   ok(tracker._storage.dataReady);
 
   do_check_true(tracker.changedIDs.placeholder);
+  engine._tracker.clearChangedIDs();
 });
 
 add_task(async function test_wipeClient() {
   _("Engine.wipeClient calls resetClient, wipes store, clears changed IDs");
   let engine = new SteamEngine("Steam", Service);
   do_check_false(engine.wasReset);
   do_check_false(engine._store.wasWiped);
   do_check_true(engine._tracker.addChangedID("a-changed-id"));
@@ -137,16 +139,17 @@ add_task(async function test_wipeClient(
   do_check_eq(engineObserver.topics[0], "weave:engine:wipe-client:start");
   do_check_eq(engineObserver.topics[1], "weave:engine:reset-client:start");
   do_check_eq(engineObserver.topics[2], "weave:engine:reset-client:finish");
   do_check_eq(engineObserver.topics[3], "weave:engine:wipe-client:finish");
 
   engine.wasReset = false;
   engine._store.wasWiped = false;
   engineObserver.reset();
+  engine._tracker.clearChangedIDs();
 });
 
 add_task(async function test_enabled() {
   _("Engine.enabled corresponds to preference");
   let engine = new SteamEngine("Steam", Service);
   try {
     do_check_false(engine.enabled);
     Svc.Prefs.set("engine.steam", true);
@@ -175,16 +178,17 @@ add_task(async function test_sync() {
     engine.sync();
     do_check_true(engine.wasSynced);
     do_check_eq(engineObserver.topics[0], "weave:engine:sync:start");
     do_check_eq(engineObserver.topics[1], "weave:engine:sync:finish");
   } finally {
     Svc.Prefs.resetBranch("");
     engine.wasSynced = false;
     engineObserver.reset();
+    engine._tracker.clearChangedIDs();
   }
 });
 
 add_task(async function test_disabled_no_track() {
   _("When an engine is disabled, its tracker is not tracking.");
   let engine = new SteamEngine("Steam", Service);
   let tracker = engine._tracker;
   do_check_eq(engine, tracker.engine);
@@ -203,9 +207,11 @@ add_task(async function test_disabled_no
   do_check_true(tracker._isTracking);
   do_check_empty(tracker.changedIDs);
 
   tracker.addChangedID("abcdefghijkl");
   do_check_true(0 < tracker.changedIDs["abcdefghijkl"]);
   Svc.Prefs.set("engine." + engine.prefName, false);
   do_check_false(tracker._isTracking);
   do_check_empty(tracker.changedIDs);
+
+  engine._tracker.clearChangedIDs();
 });
--- a/services/sync/tests/unit/test_hmac_error.js
+++ b/services/sync/tests/unit/test_hmac_error.js
@@ -226,16 +226,17 @@ add_task(async function hmac_error_durin
             // Two rotary items, one client record... no errors.
             do_check_eq(hmacErrorCount, 0)
 
             Svc.Obs.remove("weave:service:sync:finish", obs);
             Svc.Obs.remove("weave:service:sync:error", obs);
 
             Svc.Prefs.resetBranch("");
             Service.recordManager.clearCache();
+            engine._tracker.clearChangedIDs();
             server.stop(resolve);
           };
 
           Service.sync();
         },
         this);
       };
     };
--- a/services/sync/tests/unit/test_syncengine_sync.js
+++ b/services/sync/tests/unit/test_syncengine_sync.js
@@ -10,29 +10,30 @@ Cu.import("resource://services-sync/serv
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/rotaryengine.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 function makeRotaryEngine() {
   return new RotaryEngine(Service);
 }
 
-function clean() {
+function clean(engine) {
   Svc.Prefs.resetBranch("");
   Svc.Prefs.set("log.logger.engine.rotary", "Trace");
   Service.recordManager.clearCache();
+  engine._tracker.clearChangedIDs();
 }
 
-async function cleanAndGo(server) {
-  clean();
+async function cleanAndGo(engine, server) {
+  clean(engine);
   await promiseStopServer(server);
 }
 
-async function promiseClean(server) {
-  clean();
+async function promiseClean(engine, server) {
+  clean(engine);
   await promiseStopServer(server);
 }
 
 function configureService(server, username, password) {
   Service.clusterURL = server.baseURI;
 
   Service.identity.account = username || "foo";
   Service.identity.basicPassword = password || "password";
@@ -123,17 +124,17 @@ add_task(async function test_syncStartup
     do_check_eq(engineData.syncID, engine.syncID);
 
     // Sync was reset and server data was wiped
     do_check_eq(engine.lastSync, 0);
     do_check_eq(collection.payload("flying"), undefined);
     do_check_eq(collection.payload("scotsman"), undefined);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_syncStartup_serverHasNewerVersion() {
   _("SyncEngine._syncStartup ");
 
   let global = new ServerWBO('global', {engines: {rotary: {version: 23456}}});
   let server = httpd_setup({
@@ -152,17 +153,17 @@ add_task(async function test_syncStartup
     try {
       engine._syncStartup();
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.failureCode, VERSION_OUT_OF_DATE);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_syncStartup_syncIDMismatchResetsClient() {
   _("SyncEngine._syncStartup resets sync if syncIDs don't match");
 
   let server = sync_httpd_setup({});
@@ -188,17 +189,17 @@ add_task(async function test_syncStartup
 
     // The engine has assumed the server's syncID
     do_check_eq(engine.syncID, 'foobar');
 
     // Sync was reset
     do_check_eq(engine.lastSync, 0);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_emptyServer() {
   _("SyncEngine._processIncoming working with an empty server backend");
 
   let collection = new ServerCollection();
@@ -212,17 +213,17 @@ add_task(async function test_processInco
   let engine = makeRotaryEngine();
   try {
 
     // Merely ensure that this code path is run without any errors
     engine._processIncoming();
     do_check_eq(engine.lastSync, 0);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_createFromServer() {
   _("SyncEngine._processIncoming creates new records from server data");
 
   // Some server records that will be downloaded
@@ -273,17 +274,17 @@ add_task(async function test_processInco
     do_check_true(engine.lastModified > 0);
 
     // Local records have been created from the server data.
     do_check_eq(engine._store.items.flying, "LNER Class A3 4472");
     do_check_eq(engine._store.items.scotsman, "Flying Scotsman");
     do_check_eq(engine._store.items['../pathological'], "Pathological Case");
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_reconcile() {
   _("SyncEngine._processIncoming updates local records");
 
   let collection = new ServerCollection();
@@ -384,17 +385,17 @@ add_task(async function test_processInco
     // The incoming ID is preferred.
     do_check_eq(engine._store.items.original, undefined);
     do_check_eq(engine._store.items.duplication, "Original Entry");
     do_check_neq(engine._delete.ids.indexOf("original"), -1);
 
     // The 'nukeme' record marked as deleted is removed.
     do_check_eq(engine._store.items.nukeme, undefined);
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_processIncoming_reconcile_local_deleted() {
   _("Ensure local, duplicate ID is deleted on server.");
 
   // When a duplicate is resolved, the local ID (which is never taken) should
   // be deleted on the server.
@@ -420,17 +421,17 @@ add_task(async function test_processInco
 
   do_check_attribute_count(engine._store.items, 1);
   do_check_true("DUPE_INCOMING" in engine._store.items);
 
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
   do_check_neq(undefined, collection.wbo("DUPE_INCOMING"));
 
-  await cleanAndGo(server);
+  await cleanAndGo(engine, server);
 });
 
 add_task(async function test_processIncoming_reconcile_equivalent() {
   _("Ensure proper handling of incoming records that match local.");
 
   let [engine, server, user] = await createServerAndConfigureClient();
 
   let now = Date.now() / 1000 - 10;
@@ -443,17 +444,17 @@ add_task(async function test_processInco
 
   engine._store.items = {entry: "denomination"};
   do_check_true(engine._store.itemExists("entry"));
 
   engine._sync();
 
   do_check_attribute_count(engine._store.items, 1);
 
-  await cleanAndGo(server);
+  await cleanAndGo(engine, server);
 });
 
 add_task(async function test_processIncoming_reconcile_locally_deleted_dupe_new() {
   _("Ensure locally deleted duplicate record newer than incoming is handled.");
 
   // This is a somewhat complicated test. It ensures that if a client receives
   // a modified record for an item that is deleted locally but with a different
   // ID that the incoming record is ignored. This is a corner case for record
@@ -482,17 +483,17 @@ add_task(async function test_processInco
   do_check_empty(engine._store.items);
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
   wbo = collection.wbo("DUPE_INCOMING");
   do_check_neq(null, wbo);
   let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
   do_check_true(payload.deleted);
 
-  await cleanAndGo(server);
+  await cleanAndGo(engine, server);
 });
 
 add_task(async function test_processIncoming_reconcile_locally_deleted_dupe_old() {
   _("Ensure locally deleted duplicate record older than incoming is restored.");
 
   // This is similar to the above test except it tests the condition where the
   // incoming record is newer than the local deletion, therefore overriding it.
 
@@ -521,17 +522,17 @@ add_task(async function test_processInco
   do_check_eq("incoming", engine._store.items.DUPE_INCOMING);
 
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
   wbo = collection.wbo("DUPE_INCOMING");
   let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
   do_check_eq("incoming", payload.denomination);
 
-  await cleanAndGo(server);
+  await cleanAndGo(engine, server);
 });
 
 add_task(async function test_processIncoming_reconcile_changed_dupe() {
   _("Ensure that locally changed duplicate record is handled properly.");
 
   let [engine, server, user] = await createServerAndConfigureClient();
 
   let now = Date.now() / 1000 - 10;
@@ -558,17 +559,17 @@ add_task(async function test_processInco
   // have its payload set to what was in the local record.
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
   wbo = collection.wbo("DUPE_INCOMING");
   do_check_neq(undefined, wbo);
   let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
   do_check_eq("local", payload.denomination);
 
-  await cleanAndGo(server);
+  await cleanAndGo(engine, server);
 });
 
 add_task(async function test_processIncoming_reconcile_changed_dupe_new() {
   _("Ensure locally changed duplicate record older than incoming is ignored.");
 
   // This test is similar to the above except the incoming record is younger
   // than the local record. The incoming record should be authoritative.
   let [engine, server, user] = await createServerAndConfigureClient();
@@ -595,17 +596,17 @@ add_task(async function test_processInco
   // On the server, the local ID should be deleted and the incoming ID should
   // have its payload retained.
   let collection = server.getCollection(user, "rotary");
   do_check_eq(1, collection.count());
   wbo = collection.wbo("DUPE_INCOMING");
   do_check_neq(undefined, wbo);
   let payload = JSON.parse(JSON.parse(wbo.payload).ciphertext);
   do_check_eq("incoming", payload.denomination);
-  await cleanAndGo(server);
+  await cleanAndGo(engine, server);
 });
 
 add_task(async function test_processIncoming_mobile_batchSize() {
   _("SyncEngine._processIncoming doesn't fetch everything at once on mobile clients");
 
   Svc.Prefs.set("client.type", "mobile");
   Service.identity.username = "foo";
 
@@ -664,17 +665,17 @@ add_task(async function test_processInco
       do_check_eq(collection.get_log[i+1].limit, undefined);
       if (i < Math.floor(234 / MOBILE_BATCH_SIZE))
         do_check_eq(collection.get_log[i+1].ids.length, MOBILE_BATCH_SIZE);
       else
         do_check_eq(collection.get_log[i+1].ids.length, 234 % MOBILE_BATCH_SIZE);
     }
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_store_toFetch() {
   _("If processIncoming fails in the middle of a batch on mobile, state is saved in toFetch and lastSync.");
   Service.identity.username = "foo";
   Svc.Prefs.set("client.type", "mobile");
@@ -731,17 +732,17 @@ add_task(async function test_processInco
                 MOBILE_BATCH_SIZE * 2);
 
     // The third batch is stuck in toFetch. lastSync has been moved forward to
     // the last successful item's timestamp.
     do_check_eq(engine.toFetch.length, MOBILE_BATCH_SIZE);
     do_check_eq(engine.lastSync, collection.wbo("record-no-99").modified);
 
   } finally {
-    await promiseClean(server);
+    await promiseClean(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_resume_toFetch() {
   _("toFetch and previousFailed items left over from previous syncs are fetched on the next sync, along with new items.");
   Service.identity.username = "foo";
 
@@ -800,17 +801,17 @@ add_task(async function test_processInco
     do_check_eq(engine._store.items.flying, "LNER Class A3 4472");
     do_check_eq(engine._store.items.scotsman, "Flying Scotsman");
     do_check_eq(engine._store.items.rekolok, "Rekonstruktionslokomotive");
     do_check_eq(engine._store.items.failed0, "Record No. 0");
     do_check_eq(engine._store.items.failed1, "Record No. 1");
     do_check_eq(engine._store.items.failed2, "Record No. 2");
     do_check_eq(engine.previousFailed.length, 0);
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_applyIncomingBatchSize_smaller() {
   _("Ensure that a number of incoming items less than applyIncomingBatchSize is still applied.");
   Service.identity.username = "foo";
 
@@ -855,17 +856,17 @@ add_task(async function test_processInco
     // Records have been applied and the expected failures have failed.
     do_check_attribute_count(engine._store.items, APPLY_BATCH_SIZE - 1 - 2);
     do_check_eq(engine.toFetch.length, 0);
     do_check_eq(engine.previousFailed.length, 2);
     do_check_eq(engine.previousFailed[0], "record-no-0");
     do_check_eq(engine.previousFailed[1], "record-no-8");
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_applyIncomingBatchSize_multiple() {
   _("Ensure that incoming items are applied according to applyIncomingBatchSize.");
   Service.identity.username = "foo";
 
@@ -908,17 +909,17 @@ add_task(async function test_processInco
     engine._syncStartup();
     engine._processIncoming();
 
     // Records have been applied in 3 batches.
     do_check_eq(batchCalls, 3);
     do_check_attribute_count(engine._store.items, APPLY_BATCH_SIZE * 3);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_notify_count() {
   _("Ensure that failed records are reported only once.");
   Service.identity.username = "foo";
 
@@ -997,17 +998,17 @@ add_task(async function test_processInco
     do_check_eq(called, 2);
     do_check_eq(counts.failed, 1);
     do_check_eq(counts.applied, 3);
     do_check_eq(counts.newFailed, 0);
     do_check_eq(counts.succeeded, 2);
 
     Svc.Obs.remove("weave:engine:sync:applied", onApplied);
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_previousFailed() {
   _("Ensure that failed records are retried.");
   Service.identity.username = "foo";
   Svc.Prefs.set("client.type", "mobile");
@@ -1083,17 +1084,17 @@ add_task(async function test_processInco
     do_check_eq(engine.previousFailed[3], "record-no-9");
 
     // Refetched items that didn't fail the second time are in engine._store.items.
     do_check_eq(engine._store.items['record-no-4'], "Record No. 4");
     do_check_eq(engine._store.items['record-no-5'], "Record No. 5");
     do_check_eq(engine._store.items['record-no-12'], "Record No. 12");
     do_check_eq(engine._store.items['record-no-13'], "Record No. 13");
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_failed_records() {
   _("Ensure that failed records from _reconcile and applyIncomingBatch are refetched.");
   Service.identity.username = "foo";
 
@@ -1217,17 +1218,17 @@ add_task(async function test_processInco
 
     // If we're on mobile, that limit is used by default.
     _("Test batching with tiny mobile batch size.");
     Svc.Prefs.set("client.type", "mobile");
     engine.mobileGUIDFetchBatchSize = 2;
     do_check_eq(batchDownload(BOGUS_RECORDS.length), 4);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_processIncoming_decrypt_failed() {
   _("Ensure that records failing to decrypt are either replaced or refetched.");
 
   Service.identity.username = "foo";
@@ -1298,17 +1299,17 @@ add_task(async function test_processInco
     do_check_eq(engine.previousFailed[3], "nodecrypt2");
 
     // Ensure the observer was notified
     do_check_eq(observerData, engine.name);
     do_check_eq(observerSubject.applied, 2);
     do_check_eq(observerSubject.failed, 4);
 
   } finally {
-    await promiseClean(server);
+    await promiseClean(engine, server);
   }
 });
 
 
 add_task(async function test_uploadOutgoing_toEmptyServer() {
   _("SyncEngine._uploadOutgoing uploads new records to server");
 
   Service.identity.username = "foo";
@@ -1357,17 +1358,17 @@ add_task(async function test_uploadOutgo
     do_check_eq(JSON.parse(collection.wbo("scotsman").data.ciphertext).id,
                 "scotsman");
     do_check_eq(engine._tracker.changedIDs["scotsman"], undefined);
 
     // The 'flying' record wasn't marked so it wasn't uploaded
     do_check_eq(collection.payload("flying"), undefined);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_uploadOutgoing_huge() {
   Service.identity.username = "foo";
   let collection = new ServerCollection();
   collection._wbos.flying = new ServerWBO('flying');
@@ -1404,17 +1405,17 @@ add_task(async function test_uploadOutgo
     engine.trackRemainingChanges();
 
     // Check we didn't upload to the server
     do_check_eq(collection.payload("flying"), undefined);
     // And that we won't try to upload it again next time.
     do_check_eq(engine._tracker.changedIDs["flying"], undefined);
 
   } finally {
-    cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_uploadOutgoing_failed() {
   _("SyncEngine._uploadOutgoing doesn't clear the tracker of objects that failed to upload.");
 
   Service.identity.username = "foo";
@@ -1467,17 +1468,17 @@ add_task(async function test_uploadOutgo
     do_check_eq(engine._tracker.changedIDs['flying'], undefined);
 
     // The 'scotsman' and 'peppercorn' records couldn't be uploaded so
     // they weren't cleared from the tracker.
     do_check_eq(engine._tracker.changedIDs['scotsman'], SCOTSMAN_CHANGED);
     do_check_eq(engine._tracker.changedIDs['peppercorn'], PEPPERCORN_CHANGED);
 
   } finally {
-    await promiseClean(server);
+    await promiseClean(engine, server);
   }
 });
 
 /* A couple of "functional" tests to ensure we split records into appropriate
    POST requests. More comprehensive unit-tests for this "batching" are in
    test_postqueue.js.
 */
 add_task(async function test_uploadOutgoing_MAX_UPLOAD_RECORDS() {
@@ -1536,17 +1537,17 @@ add_task(async function test_uploadOutgo
     for (i = 0; i < 234; i++) {
       do_check_true(!!collection.payload('record-no-' + i));
     }
 
     // Ensure that the uploads were performed in batches of MAX_UPLOAD_RECORDS.
     do_check_eq(noOfUploads, Math.ceil(234/MAX_UPLOAD_RECORDS));
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_uploadOutgoing_largeRecords() {
   _("SyncEngine._uploadOutgoing throws on records larger than MAX_UPLOAD_BYTES");
 
   Service.identity.username = "foo";
   let collection = new ServerCollection();
@@ -1574,17 +1575,17 @@ add_task(async function test_uploadOutgo
     let error = null;
     try {
       engine._uploadOutgoing();
     } catch (e) {
       error = e;
     }
     ok(!!error);
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_syncFinish_noDelete() {
   _("SyncEngine._syncFinish resets tracker's score");
 
   let server = httpd_setup({});
@@ -1631,17 +1632,17 @@ add_task(async function test_syncFinish_
     do_check_eq(collection.payload("flying"), undefined);
     do_check_true(!!collection.payload("scotsman"));
     do_check_eq(collection.payload("rekolok"), undefined);
 
     // The deletion todo list has been reset.
     do_check_eq(engine._delete.ids, undefined);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_syncFinish_deleteLotsInBatches() {
   _("SyncEngine._syncFinish deletes server records in batches of 100 (list of record IDs).");
 
   Service.identity.username = "foo";
@@ -1702,17 +1703,17 @@ add_task(async function test_syncFinish_
 
     // The deletion was done in batches
     do_check_eq(noOfUploads, 2 + 1);
 
     // The deletion todo list has been reset.
     do_check_eq(engine._delete.ids, undefined);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 
 add_task(async function test_sync_partialUpload() {
   _("SyncEngine.sync() keeps changedIDs that couldn't be uploaded.");
 
   Service.identity.username = "foo";
@@ -1778,17 +1779,17 @@ add_task(async function test_sync_partia
       //   hard on the 3rd upload.
       if ((i == 23) || (i == 42) || (i >= 200))
         do_check_eq(engine._tracker.changedIDs[id], i);
       else
         do_check_false(id in engine._tracker.changedIDs);
     }
 
   } finally {
-    await promiseClean(server);
+    await promiseClean(engine, server);
   }
 });
 
 add_task(async function test_canDecrypt_noCryptoKeys() {
   _("SyncEngine.canDecrypt returns false if the engine fails to decrypt items on the server, e.g. due to a missing crypto key collection.");
   Service.identity.username = "foo";
 
   // Wipe collection keys so we can test the desired scenario.
@@ -1805,17 +1806,17 @@ add_task(async function test_canDecrypt_
 
   let syncTesting = await SyncTestingInfrastructure(server);
   let engine = makeRotaryEngine();
   try {
 
     do_check_false(engine.canDecrypt());
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_canDecrypt_true() {
   _("SyncEngine.canDecrypt returns true if the engine can decrypt the items on the server.");
   Service.identity.username = "foo";
 
   generateNewKeys(Service.collectionKeys);
@@ -1831,17 +1832,17 @@ add_task(async function test_canDecrypt_
 
   let syncTesting = await SyncTestingInfrastructure(server);
   let engine = makeRotaryEngine();
   try {
 
     do_check_true(engine.canDecrypt());
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 
 });
 
 add_task(async function test_syncapplied_observer() {
   Service.identity.username = "foo";
 
   const NUMBER_OF_RECORDS = 10;
@@ -1888,13 +1889,13 @@ add_task(async function test_syncapplied
     do_check_attribute_count(engine._store.items, 10);
 
     do_check_eq(numApplyCalls, 1);
     do_check_eq(engine_name, "rotary");
     do_check_eq(count.applied, 10);
 
     do_check_true(Service.scheduler.hasIncomingItems);
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
     Service.scheduler.hasIncomingItems = false;
     Svc.Obs.remove("weave:engine:sync:applied", onApplied);
   }
 });
--- a/services/sync/tests/unit/test_telemetry.js
+++ b/services/sync/tests/unit/test_telemetry.js
@@ -53,20 +53,21 @@ SteamEngine.prototype = {
 };
 
 function BogusEngine(service) {
   Engine.call(this, "bogus", service);
 }
 
 BogusEngine.prototype = Object.create(SteamEngine.prototype);
 
-async function cleanAndGo(server) {
+async function cleanAndGo(engine, server) {
   Svc.Prefs.resetBranch("");
   Svc.Prefs.set("log.logger.engine.rotary", "Trace");
   Service.recordManager.clearCache();
+  engine._tracker.clearChangedIDs();
   await promiseStopServer(server);
 }
 
 // Avoid addon manager complaining about not being initialized
 Service.engineManager.unregister("addons");
 
 add_identity_test(this, async function test_basic() {
   let helper = track_collections_helper();
@@ -134,17 +135,17 @@ add_task(async function test_processInco
     equal(ping.engines[0].name, "bookmarks");
     deepEqual(ping.engines[0].failureReason, {
       name: "othererror",
       error: "error.engine.reason.record_download_fail"
     });
 
   } finally {
     store.wipe();
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_uploading() {
   let engine = new BookmarksEngine(Service);
   let store  = engine._store;
   let server = serverForUsers({"foo": "password"}, {
     meta: {global: {engines: {bookmarks: {version: engine.version,
@@ -182,17 +183,17 @@ add_task(async function test_uploading()
     equal(ping.engines.length, 1);
     equal(ping.engines[0].name, "bookmarks");
     equal(ping.engines[0].outgoing.length, 1);
     ok(!!ping.engines[0].incoming);
 
   } finally {
     // Clean up.
     store.wipe();
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_upload_failed() {
   Service.identity.username = "foo";
   let collection = new ServerCollection();
   collection._wbos.flying = new ServerWBO('flying');
 
@@ -232,17 +233,17 @@ add_task(async function test_upload_fail
 
     ping = await sync_engine_and_validate_telem(engine, true);
     ok(!!ping);
     equal(ping.engines.length, 1);
     equal(ping.engines[0].incoming.reconciled, 1);
     deepEqual(ping.engines[0].outgoing, [{ sent: 2, failed: 2 }]);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_sync_partialUpload() {
   Service.identity.username = "foo";
 
   let collection = new ServerCollection();
   let server = sync_httpd_setup({
@@ -312,17 +313,17 @@ add_task(async function test_sync_partia
       failed: 1,
       newFailed: 1,
       reconciled: 232
     });
     ok(!ping.engines[0].outgoing);
     deepEqual(ping.engines[0].failureReason, uploadFailureError);
 
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_generic_engine_fail() {
   Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let store  = engine._store;
@@ -339,17 +340,17 @@ add_task(async function test_generic_eng
     let ping = await sync_and_validate_telem(true);
     equal(ping.status.service, SYNC_FAILED_PARTIAL);
     deepEqual(ping.engines.find(e => e.name === "steam").failureReason, {
       name: "unexpectederror",
       error: String(e)
     });
   } finally {
     Service.engineManager.unregister(engine);
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_engine_fail_ioerror() {
   Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let store  = engine._store;
@@ -375,17 +376,17 @@ add_task(async function test_engine_fail
     equal(ping.status.service, SYNC_FAILED_PARTIAL);
     let failureReason = ping.engines.find(e => e.name === "steam").failureReason;
     equal(failureReason.name, "unexpectederror");
     // ensure the profile dir in the exception message has been stripped.
     ok(!failureReason.error.includes(OS.Constants.Path.profileDir), failureReason.error);
     ok(failureReason.error.includes("[profileDir]"), failureReason.error);
   } finally {
     Service.engineManager.unregister(engine);
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_initial_sync_engines() {
   Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let store  = engine._store;
@@ -412,17 +413,17 @@ add_task(async function test_initial_syn
       }
       greaterOrEqual(e.took, 1);
       ok(!!e.outgoing)
       equal(e.outgoing.length, 1);
       notEqual(e.outgoing[0].sent, undefined);
       equal(e.outgoing[0].failed, undefined);
     }
   } finally {
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_nserror() {
   Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let store  = engine._store;
@@ -441,17 +442,17 @@ add_task(async function test_nserror() {
     });
     let enginePing = ping.engines.find(e => e.name === "steam");
     deepEqual(enginePing.failureReason, {
       name: "nserror",
       code: Cr.NS_ERROR_UNKNOWN_HOST
     });
   } finally {
     Service.engineManager.unregister(engine);
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_identity_test(this, async function test_discarding() {
   let helper = track_collections_helper();
   let upd = helper.with_updated_collection;
   let telem = get_sync_test_telemetry();
   telem.maxPayloadCount = 2;
@@ -507,17 +508,17 @@ add_task(async function test_no_foreign_
   engine._errToThrow = new Error("Oh no!");
   await SyncTestingInfrastructure(server);
   try {
     let ping = await sync_and_validate_telem(true);
     equal(ping.status.service, SYNC_FAILED_PARTIAL);
     ok(ping.engines.every(e => e.name !== "bogus"));
   } finally {
     Service.engineManager.unregister(engine);
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_sql_error() {
   Service.engineManager.register(SteamEngine);
   let engine = Service.engineManager.get("steam");
   engine.enabled = true;
   let store  = engine._store;
@@ -533,17 +534,17 @@ add_task(async function test_sql_error()
     Async.querySpinningly(db.createAsyncStatement("select bar from foo"));
   };
   try {
     let ping = await sync_and_validate_telem(true);
     let enginePing = ping.engines.find(e => e.name === "steam");
     deepEqual(enginePing.failureReason, { name: "sqlerror", code: 1 });
   } finally {
     Service.engineManager.unregister(engine);
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
 
 add_task(async function test_no_foreign_engines_in_success_ping() {
   Service.engineManager.register(BogusEngine);
   let engine = Service.engineManager.get("bogus");
   engine.enabled = true;
   let store  = engine._store;
@@ -553,11 +554,11 @@ add_task(async function test_no_foreign_
   });
 
   await SyncTestingInfrastructure(server);
   try {
     let ping = await sync_and_validate_telem();
     ok(ping.engines.every(e => e.name !== "bogus"));
   } finally {
     Service.engineManager.unregister(engine);
-    await cleanAndGo(server);
+    await cleanAndGo(engine, server);
   }
 });
\ No newline at end of file