--- a/services/sync/modules-testing/utils.js
+++ b/services/sync/modules-testing/utils.js
@@ -11,17 +11,18 @@ this.EXPORTED_SYMBOLS = [
"ensureLegacyIdentityManager",
"setBasicCredentials",
"makeIdentityConfig",
"makeFxAccountsInternalMock",
"configureFxAccountIdentity",
"configureIdentity",
"SyncTestingInfrastructure",
"waitForZeroTimer",
- "Promise", // from a module import
+ "promiseZeroTimer",
+ "promiseNamedTimer",
"add_identity_test",
"MockFxaStorageManager",
"AccountState", // from a module import
"sumHistogram",
];
var {utils: Cu} = Components;
@@ -31,17 +32,16 @@ Cu.import("resource://services-common/ut
Cu.import("resource://services-crypto/utils.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://services-sync/browserid_identity.js");
Cu.import("resource://testing-common/services/common/logging.js");
Cu.import("resource://testing-common/services/sync/fakeservices.js");
Cu.import("resource://gre/modules/FxAccounts.jsm");
Cu.import("resource://gre/modules/FxAccountsClient.jsm");
Cu.import("resource://gre/modules/FxAccountsCommon.js");
-Cu.import("resource://gre/modules/Promise.jsm");
Cu.import("resource://gre/modules/Services.jsm");
// and grab non-exported stuff via a backstage pass.
const {AccountState} = Cu.import("resource://gre/modules/FxAccounts.jsm", {});
// A mock "storage manager" for FxAccounts that doesn't actually write anywhere.
function MockFxaStorageManager() {
}
@@ -91,16 +91,28 @@ this.waitForZeroTimer = function waitFor
CommonUtils.nextTick(wait);
return;
}
callback();
}
CommonUtils.namedTimer(wait, 150, {}, "timer");
}
+this.promiseZeroTimer = function() {
+ return new Promise(resolve => {
+ waitForZeroTimer(resolve);
+ });
+}
+
+this.promiseNamedTimer = function(wait, thisObj, name) {
+ return new Promise(resolve => {
+ Utils.namedTimer(resolve, wait, thisObj, name);
+ });
+}
+
/**
* Return true if Sync is configured with the "legacy" identity provider.
*/
this.isConfiguredWithLegacyIdentity = function() {
let ns = {};
Cu.import("resource://services-sync/service.js", ns);
// We can't use instanceof as BrowserIDManager (the "other" identity) inherits
@@ -232,34 +244,31 @@ this.configureFxAccountIdentity = functi
authService._fxaService = fxa;
authService._tokenServerClient = mockTSC;
// Set the "account" of the browserId manager to be the "email" of the
// logged in user of the mockFXA service.
authService._signedInUser = config.fxaccount.user;
authService._account = config.fxaccount.user.email;
}
-this.configureIdentity = function(identityOverrides) {
+this.configureIdentity = async function(identityOverrides) {
let config = makeIdentityConfig(identityOverrides);
let ns = {};
Cu.import("resource://services-sync/service.js", ns);
if (ns.Service.identity instanceof BrowserIDManager) {
// do the FxAccounts thang...
configureFxAccountIdentity(ns.Service.identity, config);
- return ns.Service.identity.initializeWithCurrentIdentity().then(() => {
- // need to wait until this identity manager is readyToAuthenticate.
- return ns.Service.identity.whenReadyToAuthenticate.promise;
- });
+ await ns.Service.identity.initializeWithCurrentIdentity();
+ // need to wait until this identity manager is readyToAuthenticate.
+ await ns.Service.identity.whenReadyToAuthenticate.promise;
+ return;
}
// old style identity provider.
setBasicCredentials(config.username, config.sync.password, config.sync.syncKey);
- let deferred = Promise.defer();
- deferred.resolve();
- return deferred.promise;
}
this.SyncTestingInfrastructure = function (server, username, password, syncKey) {
let ns = {};
Cu.import("resource://services-sync/service.js", ns);
ensureLegacyIdentityManager();
let config = makeIdentityConfig();
@@ -315,29 +324,29 @@ this.encryptPayload = function encryptPa
this.add_identity_test = function(test, testFunction) {
function note(what) {
let msg = "running test " + testFunction.name + " with " + what + " identity manager";
test.do_print(msg);
}
let ns = {};
Cu.import("resource://services-sync/service.js", ns);
// one task for the "old" identity manager.
- test.add_task(function* () {
+ test.add_task(async function() {
note("sync");
let oldIdentity = Status._authManager;
ensureLegacyIdentityManager();
- yield testFunction();
+ await testFunction();
Status.__authManager = ns.Service.identity = oldIdentity;
});
// another task for the FxAccounts identity manager.
- test.add_task(function* () {
+ test.add_task(async function() {
note("FxAccounts");
let oldIdentity = Status._authManager;
Status.__authManager = ns.Service.identity = new BrowserIDManager();
- yield testFunction();
+ await testFunction();
Status.__authManager = ns.Service.identity = oldIdentity;
});
}
this.sumHistogram = function(name, options = {}) {
let histogram = options.key ? Services.telemetry.getKeyedHistogramById(name) :
Services.telemetry.getHistogramById(name);
let snapshot = histogram.snapshot(options.key);
--- a/services/sync/tests/unit/head_errorhandler_common.js
+++ b/services/sync/tests/unit/head_errorhandler_common.js
@@ -87,25 +87,21 @@ const EHTestsCommon = {
// Make sync fail due to changed credentials. We simply re-encrypt
// the keys with a different Sync Key, without changing the local one.
let newSyncKeyBundle = new SyncKeyBundle("johndoe", "23456234562345623456234562");
let keys = Service.collectionKeys.asWBO();
keys.encrypt(newSyncKeyBundle);
keys.upload(Service.resource(Service.cryptoKeysURL));
},
- setUp(server) {
- return configureIdentity({ username: "johndoe" }).then(
- () => {
- Service.serverURL = server.baseURI + "/";
- Service.clusterURL = server.baseURI + "/";
- }
- ).then(
- () => EHTestsCommon.generateAndUploadKeys()
- );
+ async setUp(server) {
+ await configureIdentity({ username: "johndoe" });
+ Service.serverURL = server.baseURI + "/";
+ Service.clusterURL = server.baseURI + "/";
+ return EHTestsCommon.generateAndUploadKeys()
},
generateAndUploadKeys() {
generateNewKeys(Service.collectionKeys);
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
serverKeys.encrypt(Service.identity.syncKeyBundle);
return serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
}
--- a/services/sync/tests/unit/head_helpers.js
+++ b/services/sync/tests/unit/head_helpers.js
@@ -431,16 +431,37 @@ function sync_engine_and_validate_telem(
if (caughtError) {
Svc.Obs.notify("weave:service:sync:error", caughtError);
} else {
Svc.Obs.notify("weave:service:sync:finish");
}
});
}
+// Returns a promise that resolves once the specified observer notification
+// has fired.
+function promiseOneObserver(topic, callback) {
+ return new Promise((resolve, reject) => {
+ let observer = function(subject, data) {
+ Svc.Obs.remove(topic, observer);
+ resolve({ subject: subject, data: data });
+ }
+ Svc.Obs.add(topic, observer)
+ });
+}
+
+function promiseStopServer(server) {
+ return new Promise(resolve => server.stop(resolve));
+}
+
+function promiseNextTick() {
+ return new Promise(resolve => {
+ Utils.nextTick(resolve);
+ });
+}
// Avoid an issue where `client.name2` containing unicode characters causes
// a number of tests to fail, due to them assuming that we do not need to utf-8
// encode or decode data sent through the mocked server (see bug 1268912).
Utils.getDefaultDeviceName = function() {
return "Test device name";
};
--- a/services/sync/tests/unit/test_bookmark_duping.js
+++ b/services/sync/tests/unit/test_bookmark_duping.js
@@ -18,26 +18,16 @@ const bms = PlacesUtils.bookmarks;
Service.engineManager.register(BookmarksEngine);
const engine = new BookmarksEngine(Service);
const store = engine._store;
store._log.level = Log.Level.Trace;
engine._log.level = Log.Level.Trace;
-function promiseOneObserver(topic) {
- return new Promise((resolve, reject) => {
- let observer = function(subject, topic, data) {
- Services.obs.removeObserver(observer, topic);
- resolve({ subject: subject, data: data });
- }
- Services.obs.addObserver(observer, topic, false);
- });
-}
-
function setup() {
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bookmarks: {version: engine.version,
syncID: engine.syncID}}}},
bookmarks: {},
});
generateNewKeys(Service.collectionKeys);
@@ -46,24 +36,24 @@ function setup() {
let collection = server.user("foo").collection("bookmarks");
Svc.Obs.notify("weave:engine:start-tracking"); // We skip usual startup...
return { server, collection };
}
-function* cleanup(server) {
+async function cleanup(server) {
Svc.Obs.notify("weave:engine:stop-tracking");
Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", true);
let promiseStartOver = promiseOneObserver("weave:service:start-over:finish");
Service.startOver();
- yield promiseStartOver;
- yield new Promise(resolve => server.stop(resolve));
- yield bms.eraseEverything();
+ await promiseStartOver;
+ await promiseStopServer(server);
+ await bms.eraseEverything();
}
function getFolderChildrenIDs(folderId) {
let index = 0;
let result = [];
while (true) {
let childId = bms.getIdForItemAt(folderId, index);
if (childId == -1) {
@@ -89,25 +79,25 @@ function createBookmark(parentId, url, t
}
function getServerRecord(collection, id) {
let wbo = collection.get({ full: true, ids: [id] });
// Whew - lots of json strings inside strings.
return JSON.parse(JSON.parse(JSON.parse(wbo).payload).ciphertext);
}
-function* promiseNoLocalItem(guid) {
+async function promiseNoLocalItem(guid) {
// Check there's no item with the specified guid.
- let got = yield bms.fetch({ guid });
+ let got = await bms.fetch({ guid });
ok(!got, `No record remains with GUID ${guid}`);
// and while we are here ensure the places cache doesn't still have it.
- yield Assert.rejects(PlacesUtils.promiseItemId(guid));
+ await Assert.rejects(PlacesUtils.promiseItemId(guid));
}
-function* validate(collection, expectedFailures = []) {
+async function validate(collection, expectedFailures = []) {
let validator = new BookmarkValidator();
let records = collection.payloads();
let problems = validator.inspectServerRecords(records).problemData;
// all non-zero problems.
let summary = problems.getSummary().filter(prob => prob.count != 0);
// split into 2 arrays - expected and unexpected.
@@ -126,23 +116,23 @@ function* validate(collection, expectedF
}
if (unexpected.length || expected.length != expectedFailures.length) {
do_print("Validation failed:");
do_print(JSON.stringify(summary));
// print the entire validator output as it has IDs etc.
do_print(JSON.stringify(problems, undefined, 2));
// All server records and the entire bookmark tree.
do_print("Server records:\n" + JSON.stringify(collection.payloads(), undefined, 2));
- let tree = yield PlacesUtils.promiseBookmarksTree("", { includeItemIds: true });
+ let tree = await PlacesUtils.promiseBookmarksTree("", { includeItemIds: true });
do_print("Local bookmark tree:\n" + JSON.stringify(tree, undefined, 2));
ok(false);
}
}
-add_task(function* test_dupe_bookmark() {
+add_task(async function test_dupe_bookmark() {
_("Ensure that a bookmark we consider a dupe is handled correctly.");
let { server, collection } = this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
@@ -168,32 +158,32 @@ add_task(function* test_dupe_bookmark()
_("Syncing so new dupe record is processed");
engine.lastSync = engine.lastSync - 0.01;
engine.sync();
// We should have logically deleted the dupe record.
equal(collection.count(), 7);
ok(getServerRecord(collection, bmk1_guid).deleted);
// and physically removed from the local store.
- yield promiseNoLocalItem(bmk1_guid);
+ await promiseNoLocalItem(bmk1_guid);
// Parent should still only have 1 item.
equal(getFolderChildrenIDs(folder1_id).length, 1);
// The parent record on the server should now reference the new GUID and not the old.
let serverRecord = getServerRecord(collection, folder1_guid);
ok(!serverRecord.children.includes(bmk1_guid));
ok(serverRecord.children.includes(newGUID));
// and a final sanity check - use the validator
- yield validate(collection);
+ await validate(collection);
} finally {
- yield cleanup(server);
+ await cleanup(server);
}
});
-add_task(function* test_dupe_reparented_bookmark() {
+add_task(async function test_dupe_reparented_bookmark() {
_("Ensure that a bookmark we consider a dupe from a different parent is handled correctly");
let { server, collection } = this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
@@ -226,40 +216,40 @@ add_task(function* test_dupe_reparented_
_("Syncing so new dupe record is processed");
engine.lastSync = engine.lastSync - 0.01;
engine.sync();
// We should have logically deleted the dupe record.
equal(collection.count(), 8);
ok(getServerRecord(collection, bmk1_guid).deleted);
// and physically removed from the local store.
- yield promiseNoLocalItem(bmk1_guid);
+ await promiseNoLocalItem(bmk1_guid);
// The original folder no longer has the item
equal(getFolderChildrenIDs(folder1_id).length, 0);
// But the second dupe folder does.
equal(getFolderChildrenIDs(folder2_id).length, 1);
// The record for folder1 on the server should reference neither old or new GUIDs.
let serverRecord1 = getServerRecord(collection, folder1_guid);
ok(!serverRecord1.children.includes(bmk1_guid));
ok(!serverRecord1.children.includes(newGUID));
// The record for folder2 on the server should only reference the new new GUID.
let serverRecord2 = getServerRecord(collection, folder2_guid);
ok(!serverRecord2.children.includes(bmk1_guid));
ok(serverRecord2.children.includes(newGUID));
// and a final sanity check - use the validator
- yield validate(collection);
+ await validate(collection);
} finally {
- yield cleanup(server);
+ await cleanup(server);
}
});
-add_task(function* test_dupe_reparented_locally_changed_bookmark() {
+add_task(async function test_dupe_reparented_locally_changed_bookmark() {
_("Ensure that a bookmark with local changes we consider a dupe from a different parent is handled correctly");
let { server, collection } = this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
@@ -288,54 +278,54 @@ add_task(function* test_dupe_reparented_
};
collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 10);
// Make a change to the bookmark that's a dupe, and set the modification
// time further in the future than the incoming record. This will cause
// us to issue the infamous "DATA LOSS" warning in the logs but cause us
// to *not* apply the incoming record.
- yield PlacesTestUtils.setBookmarkSyncFields({
+ await PlacesTestUtils.setBookmarkSyncFields({
guid: bmk1_guid,
syncChangeCounter: 1,
lastModified: Date.now() + 60000,
});
_("Syncing so new dupe record is processed");
engine.lastSync = engine.lastSync - 0.01;
engine.sync();
// We should have logically deleted the dupe record.
equal(collection.count(), 8);
ok(getServerRecord(collection, bmk1_guid).deleted);
// and physically removed from the local store.
- yield promiseNoLocalItem(bmk1_guid);
+ await promiseNoLocalItem(bmk1_guid);
// The original folder still longer has the item
equal(getFolderChildrenIDs(folder1_id).length, 1);
// The second folder does not.
equal(getFolderChildrenIDs(folder2_id).length, 0);
// The record for folder1 on the server should reference only the GUID.
let serverRecord1 = getServerRecord(collection, folder1_guid);
ok(!serverRecord1.children.includes(bmk1_guid));
ok(serverRecord1.children.includes(newGUID));
// The record for folder2 on the server should reference nothing.
let serverRecord2 = getServerRecord(collection, folder2_guid);
ok(!serverRecord2.children.includes(bmk1_guid));
ok(!serverRecord2.children.includes(newGUID));
// and a final sanity check - use the validator
- yield validate(collection);
+ await validate(collection);
} finally {
- yield cleanup(server);
+ await cleanup(server);
}
});
-add_task(function* test_dupe_reparented_to_earlier_appearing_parent_bookmark() {
+add_task(async function test_dupe_reparented_to_earlier_appearing_parent_bookmark() {
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"appears in the same sync before the dupe item");
let { server, collection } = this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
@@ -396,23 +386,23 @@ add_task(function* test_dupe_reparented_
// Everything should be parented correctly.
equal(getFolderChildrenIDs(folder1_id).length, 0);
let newParentID = store.idForGUID(newParentGUID);
let newID = store.idForGUID(newGUID);
deepEqual(getFolderChildrenIDs(newParentID), [newID]);
// Make sure the validator thinks everything is hunky-dory.
- yield validate(collection);
+ await validate(collection);
} finally {
- yield cleanup(server);
+ await cleanup(server);
}
});
-add_task(function* test_dupe_reparented_to_later_appearing_parent_bookmark() {
+add_task(async function test_dupe_reparented_to_later_appearing_parent_bookmark() {
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"doesn't exist locally as we process the child, but does appear in the same sync");
let { server, collection } = this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
@@ -473,23 +463,23 @@ add_task(function* test_dupe_reparented_
// The intended parent did end up existing, so it should be parented
// correctly after de-duplication.
equal(getFolderChildrenIDs(folder1_id).length, 0);
let newParentID = store.idForGUID(newParentGUID);
let newID = store.idForGUID(newGUID);
deepEqual(getFolderChildrenIDs(newParentID), [newID]);
// Make sure the validator thinks everything is hunky-dory.
- yield validate(collection);
+ await validate(collection);
} finally {
- yield cleanup(server);
+ await cleanup(server);
}
});
-add_task(function* test_dupe_reparented_to_future_arriving_parent_bookmark() {
+add_task(async function test_dupe_reparented_to_future_arriving_parent_bookmark() {
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"doesn't exist locally and doesn't appear in this Sync is handled correctly");
let { server, collection } = this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
@@ -524,17 +514,17 @@ add_task(function* test_dupe_reparented_
_("Syncing so new dupe record is processed");
engine.lastSync = engine.lastSync - 0.01;
engine.sync();
// We should have logically deleted the dupe record.
equal(collection.count(), 8);
ok(getServerRecord(collection, bmk1_guid).deleted);
// and physically removed from the local store.
- yield promiseNoLocalItem(bmk1_guid);
+ await promiseNoLocalItem(bmk1_guid);
// The intended parent doesn't exist, so it remains in the original folder
equal(getFolderChildrenIDs(folder1_id).length, 1);
// The record for folder1 on the server should reference the new GUID.
let serverRecord1 = getServerRecord(collection, folder1_guid);
ok(!serverRecord1.children.includes(bmk1_guid));
ok(serverRecord1.children.includes(newGUID));
@@ -544,17 +534,17 @@ add_task(function* test_dupe_reparented_
newParentGUID);
// Check the validator. Sadly, this is known to cause a mismatch between
// the server and client views of the tree.
let expected = [
// We haven't fixed the incoming record that referenced the missing parent.
{ name: "orphans", count: 1 },
];
- yield validate(collection, expected);
+ await validate(collection, expected);
// Now have the parent magically appear in a later sync - but
// it appears as being in a different parent from our existing "Folder 1",
// so the folder itself isn't duped.
collection.insert(newParentGUID, encryptPayload({
id: newParentGUID,
type: "folder",
title: "Folder 1",
@@ -592,24 +582,24 @@ add_task(function* test_dupe_reparented_
// * Our original Folder1 was updated to include newGUID when it
// originally de-deuped and couldn't find the parent.
// * When the parent *did* eventually arrive we used the parent annotation
// to correctly reparent - but that reparenting process does not change
// the server record.
// Hence, newGUID is a child of both those server records :(
{ name: "multipleParents", count: 1 },
];
- yield validate(collection, expected);
+ await validate(collection, expected);
} finally {
- yield cleanup(server);
+ await cleanup(server);
}
});
-add_task(function* test_dupe_empty_folder() {
+add_task(async function test_dupe_empty_folder() {
_("Ensure that an empty folder we consider a dupe is handled correctly.");
// Empty folders aren't particularly interesting in practice (as that seems
// an edge-case) but duping folders with items is broken - bug 1293163.
let { server, collection } = this.setup();
try {
// The folder we will end up duping away.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
@@ -629,20 +619,20 @@ add_task(function* test_dupe_empty_folde
parentid: "toolbar",
children: [],
}), Date.now() / 1000 + 10);
_("Syncing so new dupe records are processed");
engine.lastSync = engine.lastSync - 0.01;
engine.sync();
- yield validate(collection);
+ await validate(collection);
// Collection now has one additional record - the logically deleted dupe.
equal(collection.count(), 6);
// original folder should be logically deleted.
ok(getServerRecord(collection, folder1_guid).deleted);
- yield promiseNoLocalItem(folder1_guid);
+ await promiseNoLocalItem(folder1_guid);
} finally {
- yield cleanup(server);
+ await cleanup(server);
}
});
// XXX - TODO - folders with children. Bug 1293163
--- a/services/sync/tests/unit/test_bookmark_engine.js
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -6,31 +6,30 @@ Cu.import("resource://gre/modules/Places
Cu.import("resource://gre/modules/BookmarkJSONUtils.jsm");
Cu.import("resource://gre/modules/Log.jsm");
Cu.import("resource://services-sync/constants.js");
Cu.import("resource://services-sync/engines.js");
Cu.import("resource://services-sync/engines/bookmarks.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/utils.js");
-Cu.import("resource://gre/modules/Promise.jsm");
initTestLogging("Trace");
Service.engineManager.register(BookmarksEngine);
-function* assertChildGuids(folderGuid, expectedChildGuids, message) {
- let tree = yield PlacesUtils.promiseBookmarksTree(folderGuid);
+async function assertChildGuids(folderGuid, expectedChildGuids, message) {
+ let tree = await PlacesUtils.promiseBookmarksTree(folderGuid);
let childGuids = tree.children.map(child => child.guid);
deepEqual(childGuids, expectedChildGuids, message);
}
-function* fetchAllSyncIds() {
- let db = yield PlacesUtils.promiseDBConnection();
- let rows = yield db.executeCached(`
+async function fetchAllSyncIds() {
+ let db = await PlacesUtils.promiseDBConnection();
+ let rows = await db.executeCached(`
WITH RECURSIVE
syncedItems(id, guid) AS (
SELECT b.id, b.guid FROM moz_bookmarks b
WHERE b.guid IN ('menu________', 'toolbar_____', 'unfiled_____',
'mobile______')
UNION ALL
SELECT b.id, b.guid FROM moz_bookmarks b
JOIN syncedItems s ON b.parent = s.id
@@ -40,17 +39,17 @@ function* fetchAllSyncIds() {
for (let row of rows) {
let syncId = PlacesSyncUtils.bookmarks.guidToSyncId(
row.getResultByName("guid"));
syncIds.add(syncId);
}
return syncIds;
}
-add_task(function* test_delete_invalid_roots_from_server() {
+add_task(async function test_delete_invalid_roots_from_server() {
_("Ensure that we delete the Places and Reading List roots from the server.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
let tracker = engine._tracker;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
@@ -80,51 +79,51 @@ add_task(function* test_delete_invalid_r
newBmk.title = "Get Firefox!";
newBmk.parentName = "Bookmarks Toolbar";
newBmk.parentid = "toolbar";
collection.insert(newBmk.id, encryptPayload(newBmk.cleartext));
deepEqual(collection.keys().sort(), ["places", "readinglist", listBmk.id, newBmk.id].sort(),
"Should store Places root, reading list items, and new bookmark on server");
- yield sync_engine_and_validate_telem(engine, false);
+ await sync_engine_and_validate_telem(engine, false);
ok(!store.itemExists("readinglist"), "Should not apply Reading List root");
ok(!store.itemExists(listBmk.id), "Should not apply items in Reading List");
ok(store.itemExists(newBmk.id), "Should apply new bookmark");
deepEqual(collection.keys().sort(), ["menu", "mobile", "toolbar", "unfiled", newBmk.id].sort(),
"Should remove Places root and reading list items from server; upload local roots");
} finally {
store.wipe();
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
- yield new Promise(resolve => server.stop(resolve));
+ await promiseStopServer(server);
Svc.Obs.notify("weave:engine:stop-tracking");
}
});
-add_task(function* test_change_during_sync() {
+add_task(async function test_change_during_sync() {
_("Ensure that we track changes made during a sync.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
let tracker = engine._tracker;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
let collection = server.user("foo").collection("bookmarks");
let bz_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarksMenuFolderId, Utils.makeURI("https://bugzilla.mozilla.org/"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Bugzilla");
- let bz_guid = yield PlacesUtils.promiseItemGuid(bz_id);
+ let bz_guid = await PlacesUtils.promiseItemGuid(bz_id);
_(`Bugzilla GUID: ${bz_guid}`);
- yield PlacesTestUtils.markBookmarksAsSynced();
+ await PlacesTestUtils.markBookmarksAsSynced();
Svc.Obs.notify("weave:engine:start-tracking");
try {
let folder1_id = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
let folder1_guid = store.GUIDForId(folder1_id);
_(`Folder GUID: ${folder1_guid}`);
@@ -194,72 +193,72 @@ add_task(function* test_change_during_sy
localTaggedBmk.bmkUri = "https://example.org";
localTaggedBmk.title = "Tagged bookmark";
localTaggedBmk.tags = ["taggy"];
localTaggedBmk.parentName = "Folder 2";
localTaggedBmk.parentid = folder2_guid;
collection.insert(bmk4_guid, encryptPayload(localTaggedBmk.cleartext));
}
- yield* assertChildGuids(folder1_guid, [bmk1_guid], "Folder should have 1 child before first sync");
+ await assertChildGuids(folder1_guid, [bmk1_guid], "Folder should have 1 child before first sync");
_("Perform first sync");
{
let changes = engine.pullNewChanges();
deepEqual(changes.ids().sort(), [folder1_guid, bmk1_guid, "toolbar"].sort(),
"Should track bookmark and folder created before first sync");
- yield sync_engine_and_validate_telem(engine, false);
+ await sync_engine_and_validate_telem(engine, false);
}
let bmk2_id = store.idForGUID(bmk2_guid);
let bmk3_guid = store.GUIDForId(bmk3_id);
_(`Mozilla GUID: ${bmk3_guid}`);
{
equal(store.GUIDForId(bmk2_id), bmk2_guid,
"Remote bookmark should be applied during first sync");
ok(bmk3_id > -1,
"Bookmark created during first sync should exist locally");
ok(!collection.wbo(bmk3_guid),
"Bookmark created during first sync shouldn't be uploaded yet");
- yield* assertChildGuids(folder1_guid, [bmk1_guid, bmk3_guid, bmk2_guid],
+ await assertChildGuids(folder1_guid, [bmk1_guid, bmk3_guid, bmk2_guid],
"Folder 1 should have 3 children after first sync");
- yield* assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
+ await assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
"Folder 2 should have 2 children after first sync");
let taggedURIs = PlacesUtils.tagging.getURIsForTag("taggy");
equal(taggedURIs.length, 1, "Should have 1 tagged URI");
equal(taggedURIs[0].spec, "https://example.org/",
"Synced tagged bookmark should appear in tagged URI list");
}
_("Perform second sync");
{
let changes = engine.pullNewChanges();
deepEqual(changes.ids().sort(), [bmk3_guid, folder1_guid].sort(),
"Should track bookmark added during last sync and its parent");
- yield sync_engine_and_validate_telem(engine, false);
+ await sync_engine_and_validate_telem(engine, false);
ok(collection.wbo(bmk3_guid),
"Bookmark created during first sync should be uploaded during second sync");
- yield* assertChildGuids(folder1_guid, [bmk1_guid, bmk3_guid, bmk2_guid],
+ await assertChildGuids(folder1_guid, [bmk1_guid, bmk3_guid, bmk2_guid],
"Folder 1 should have same children after second sync");
- yield* assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
+ await assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
"Folder 2 should have same children after second sync");
}
} finally {
store.wipe();
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
- yield new Promise(resolve => server.stop(resolve));
+ await promiseStopServer(server);
Svc.Obs.notify("weave:engine:stop-tracking");
}
});
-add_task(function* bad_record_allIDs() {
+add_task(async function bad_record_allIDs() {
let server = new SyncServer();
server.start();
let syncTesting = new SyncTestingInfrastructure(server.server);
_("Ensure that bad Places queries don't cause an error in getAllIDs.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
let badRecordID = PlacesUtils.bookmarks.insertBookmark(
@@ -268,37 +267,37 @@ add_task(function* bad_record_allIDs() {
PlacesUtils.bookmarks.DEFAULT_INDEX,
null);
do_check_true(badRecordID > 0);
_("Record is " + badRecordID);
_("Type: " + PlacesUtils.bookmarks.getItemType(badRecordID));
_("Fetching all IDs.");
- let all = yield* fetchAllSyncIds();
+ let all = await fetchAllSyncIds();
_("All IDs: " + JSON.stringify([...all]));
do_check_true(all.has("menu"));
do_check_true(all.has("toolbar"));
_("Clean up.");
PlacesUtils.bookmarks.removeItem(badRecordID);
- yield PlacesSyncUtils.bookmarks.reset();
- yield new Promise(r => server.stop(r));
+ await PlacesSyncUtils.bookmarks.reset();
+ await promiseStopServer(server);
});
function serverForFoo(engine) {
return serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bookmarks: {version: engine.version,
syncID: engine.syncID}}}},
bookmarks: {}
});
}
-add_task(function* test_processIncoming_error_orderChildren() {
+add_task(async function test_processIncoming_error_orderChildren() {
_("Ensure that _orderChildren() is called even when _processIncoming() throws an error.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
let collection = server.user("foo").collection("bookmarks");
@@ -335,17 +334,17 @@ add_task(function* test_processIncoming_
// Make the 10 minutes old so it will only be synced in the toFetch phase.
bogus_record.modified = Date.now() / 1000 - 60 * 10;
engine.lastSync = Date.now() / 1000 - 60;
engine.toFetch = [BOGUS_GUID];
let error;
try {
- yield sync_engine_and_validate_telem(engine, true)
+ await sync_engine_and_validate_telem(engine, true)
} catch(ex) {
error = ex;
}
ok(!!error);
// Verify that the bookmark order has been applied.
let new_children = store.createRecord(folder1_guid).children;
do_check_eq(new_children.length, 2);
@@ -354,22 +353,22 @@ add_task(function* test_processIncoming_
do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 1);
do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 0);
} finally {
store.wipe();
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
- yield PlacesSyncUtils.bookmarks.reset();
- yield new Promise(resolve => server.stop(resolve));
+ await PlacesSyncUtils.bookmarks.reset();
+ await promiseStopServer(server);
}
});
-add_task(function* test_restorePromptsReupload() {
+add_task(async function test_restorePromptsReupload() {
_("Ensure that restoring from a backup will reupload all records.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
let collection = server.user("foo").collection("bookmarks");
@@ -396,48 +395,48 @@ add_task(function* test_restorePromptsRe
.getService(Ci.nsIProperties);
let backupFile = dirSvc.get("TmpD", Ci.nsILocalFile);
_("Make a backup.");
backupFile.append("t_b_e_" + Date.now() + ".json");
_("Backing up to file " + backupFile.path);
- yield BookmarkJSONUtils.exportToFile(backupFile.path);
+ await BookmarkJSONUtils.exportToFile(backupFile.path);
_("Create a different record and sync.");
let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
let bmk2_guid = store.GUIDForId(bmk2_id);
_("Get Thunderbird!: " + bmk2_id + ", " + bmk2_guid);
PlacesUtils.bookmarks.removeItem(bmk1_id);
let error;
try {
- yield sync_engine_and_validate_telem(engine, false);
+ await sync_engine_and_validate_telem(engine, false);
} catch(ex) {
error = ex;
_("Got error: " + Log.exceptionStr(ex));
}
do_check_true(!error);
_("Verify that there's only one bookmark on the server, and it's Thunderbird.");
// Of course, there's also the Bookmarks Toolbar and Bookmarks Menu...
let wbos = collection.keys(function (id) {
return ["menu", "toolbar", "mobile", "unfiled", folder1_guid].indexOf(id) == -1;
});
do_check_eq(wbos.length, 1);
do_check_eq(wbos[0], bmk2_guid);
_("Now restore from a backup.");
- yield BookmarkJSONUtils.importFromFile(backupFile, true);
+ await BookmarkJSONUtils.importFromFile(backupFile, true);
_("Ensure we have the bookmarks we expect locally.");
- let guids = yield* fetchAllSyncIds();
+ let guids = await fetchAllSyncIds();
_("GUIDs: " + JSON.stringify([...guids]));
let found = false;
let count = 0;
let newFX;
for (let guid of guids) {
count++;
let id = store.idForGUID(guid, true);
// Only one bookmark, so _all_ should be Firefox!
@@ -452,17 +451,17 @@ add_task(function* test_restorePromptsRe
_("We found it: " + found);
do_check_true(found);
_("Have the correct number of IDs locally, too.");
do_check_eq(count, ["menu", "toolbar", "mobile", "unfiled", folder1_id, bmk1_id].length);
_("Sync again. This'll wipe bookmarks from the server.");
try {
- yield sync_engine_and_validate_telem(engine, false);
+ await sync_engine_and_validate_telem(engine, false);
} catch(ex) {
error = ex;
_("Got error: " + Log.exceptionStr(ex));
}
do_check_true(!error);
_("Verify that there's only one bookmark on the server, and it's Firefox.");
// Of course, there's also the Bookmarks Toolbar and Bookmarks Menu...
@@ -486,32 +485,32 @@ add_task(function* test_restorePromptsRe
_("Our old friend Folder 1 is still in play.");
do_check_eq(folderWBOs.length, 1);
do_check_eq(folderWBOs[0].title, "Folder 1");
} finally {
store.wipe();
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
- yield PlacesSyncUtils.bookmarks.reset();
- yield new Promise(r => server.stop(r));
+ await PlacesSyncUtils.bookmarks.reset();
+ await promiseStopServer(server);
}
});
function FakeRecord(constructor, r) {
constructor.call(this, "bookmarks", r.id);
for (let x in r) {
this[x] = r[x];
}
// Borrow the constructor's conversion functions.
this.toSyncBookmark = constructor.prototype.toSyncBookmark;
}
// Bug 632287.
-add_task(function* test_mismatched_types() {
+add_task(async function test_mismatched_types() {
_("Ensure that handling a record that changes type causes deletion " +
"then re-adding.");
let oldRecord = {
"id": "l1nZZXfB8nC7",
"type":"folder",
"parentName":"Bookmarks Toolbar",
"title":"Innerst i Sneglehode",
@@ -566,22 +565,22 @@ add_task(function* test_mismatched_types
do_check_eq(bms.getItemType(newID), bms.TYPE_FOLDER);
do_check_true(PlacesUtils.annotations
.itemHasAnnotation(newID, PlacesUtils.LMANNO_FEEDURI));
} finally {
store.wipe();
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
- yield PlacesSyncUtils.bookmarks.reset();
- yield new Promise(r => server.stop(r));
+ await PlacesSyncUtils.bookmarks.reset();
+ await promiseStopServer(server);
}
});
-add_task(function* test_bookmark_guidMap_fail() {
+add_task(async function test_bookmark_guidMap_fail() {
_("Ensure that failures building the GUID map cause early death.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForFoo(engine);
let coll = server.user("foo").collection("bookmarks");
new SyncTestingInfrastructure(server.server);
@@ -620,44 +619,44 @@ add_task(function* test_bookmark_guidMap
try {
engine._processIncoming();
} catch (ex) {
err = ex;
}
do_check_eq(err, "Nooo");
PlacesUtils.promiseBookmarksTree = pbt;
- yield PlacesSyncUtils.bookmarks.reset();
- yield new Promise(r => server.stop(r));
+ await PlacesSyncUtils.bookmarks.reset();
+ await promiseStopServer(server);
});
-add_task(function* test_bookmark_tag_but_no_uri() {
+add_task(async function test_bookmark_tag_but_no_uri() {
_("Ensure that a bookmark record with tags, but no URI, doesn't throw an exception.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
// We're simply checking that no exception is thrown, so
// no actual checks in this test.
- yield PlacesSyncUtils.bookmarks.insert({
+ await PlacesSyncUtils.bookmarks.insert({
kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK,
syncId: Utils.makeGUID(),
parentSyncId: "toolbar",
url: "http://example.com",
tags: ["foo"],
});
- yield PlacesSyncUtils.bookmarks.insert({
+ await PlacesSyncUtils.bookmarks.insert({
kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK,
syncId: Utils.makeGUID(),
parentSyncId: "toolbar",
url: "http://example.org",
tags: null,
});
- yield PlacesSyncUtils.bookmarks.insert({
+ await PlacesSyncUtils.bookmarks.insert({
kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK,
syncId: Utils.makeGUID(),
url: "about:fake",
parentSyncId: "toolbar",
tags: null,
});
let record = new FakeRecord(BookmarkFolder, {
@@ -669,17 +668,17 @@ add_task(function* test_bookmark_tag_but
type: "folder"
});
store.create(record);
record.tags = ["bar"];
store.update(record);
});
-add_task(function* test_misreconciled_root() {
+add_task(async function test_misreconciled_root() {
_("Ensure that we don't reconcile an arbitrary record with a root.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForFoo(engine);
// Log real hard for this test.
store._log.trace = store._log.debug;
@@ -733,17 +732,17 @@ add_task(function* test_misreconciled_ro
// the real GUID, instead using a generated one. Sync does the translation.
let toolbarAfter = store.createRecord("toolbar", "bookmarks");
let parentGUIDAfter = toolbarAfter.parentid;
let parentIDAfter = store.idForGUID(parentGUIDAfter);
do_check_eq(store.GUIDForId(toolbarIDBefore), "toolbar");
do_check_eq(parentGUIDBefore, parentGUIDAfter);
do_check_eq(parentIDBefore, parentIDAfter);
- yield PlacesSyncUtils.bookmarks.reset();
- yield new Promise(r => server.stop(r));
+ await PlacesSyncUtils.bookmarks.reset();
+ await promiseStopServer(server);
});
function run_test() {
initTestLogging("Trace");
generateNewKeys(Service.collectionKeys);
run_next_test();
}
--- a/services/sync/tests/unit/test_bookmark_invalid.js
+++ b/services/sync/tests/unit/test_bookmark_invalid.js
@@ -1,61 +1,60 @@
Cu.import("resource://gre/modules/PlacesUtils.jsm");
Cu.import("resource://gre/modules/Log.jsm");
-Cu.import("resource://gre/modules/Task.jsm");
Cu.import("resource://services-sync/engines.js");
Cu.import("resource://services-sync/engines/bookmarks.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Service.engineManager.register(BookmarksEngine);
var engine = Service.engineManager.get("bookmarks");
var store = engine._store;
var tracker = engine._tracker;
-add_task(function* test_ignore_invalid_uri() {
+add_task(async function test_ignore_invalid_uri() {
_("Ensure that we don't die with invalid bookmarks.");
// First create a valid bookmark.
let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
Services.io.newURI("http://example.com/", null, null),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"the title");
// Now update moz_places with an invalid url.
- yield PlacesUtils.withConnectionWrapper("test_ignore_invalid_uri", Task.async(function* (db) {
- yield db.execute(
+ await PlacesUtils.withConnectionWrapper("test_ignore_invalid_uri", async function(db) {
+ await db.execute(
`UPDATE moz_places SET url = :url, url_hash = hash(:url)
WHERE id = (SELECT b.fk FROM moz_bookmarks b
WHERE b.id = :id LIMIT 1)`,
{ id: bmid, url: "<invalid url>" });
- }));
+ });
// Ensure that this doesn't throw even though the DB is now in a bad state (a
// bookmark has an illegal url).
engine._buildGUIDMap();
});
-add_task(function* test_ignore_missing_uri() {
+add_task(async function test_ignore_missing_uri() {
_("Ensure that we don't die with a bookmark referencing an invalid bookmark id.");
// First create a valid bookmark.
let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
Services.io.newURI("http://example.com/", null, null),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"the title");
// Now update moz_bookmarks to reference a non-existing places ID
- yield PlacesUtils.withConnectionWrapper("test_ignore_missing_uri", Task.async(function* (db) {
- yield db.execute(
+ await PlacesUtils.withConnectionWrapper("test_ignore_missing_uri", async function(db) {
+ await db.execute(
`UPDATE moz_bookmarks SET fk = 999999
WHERE id = :id`
, { id: bmid });
- }));
+ });
// Ensure that this doesn't throw even though the DB is now in a bad state (a
// bookmark has an illegal url).
engine._buildGUIDMap();
});
function run_test() {
initTestLogging('Trace');
--- a/services/sync/tests/unit/test_bookmark_order.js
+++ b/services/sync/tests/unit/test_bookmark_order.js
@@ -1,20 +1,19 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
_("Making sure after processing incoming bookmarks, they show up in the right order");
Cu.import("resource://gre/modules/PlacesUtils.jsm");
-Cu.import("resource://gre/modules/Task.jsm");
Cu.import("resource://services-sync/engines/bookmarks.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
-var check = Task.async(function* (expected, message) {
- let root = yield PlacesUtils.promiseBookmarksTree();
+var check = async function(expected, message) {
+ let root = await PlacesUtils.promiseBookmarksTree();
let bookmarks = (function mapTree(children) {
return children.map(child => {
let result = {
guid: child.guid,
index: child.index,
};
if (child.children) {
@@ -29,25 +28,25 @@ var check = Task.async(function* (expect
}
return result;
});
}(root.children));
_("Checking if the bookmark structure is", JSON.stringify(expected));
_("Got bookmarks:", JSON.stringify(bookmarks));
deepEqual(bookmarks, expected);
-});
+};
-add_task(function* test_bookmark_order() {
+add_task(async function test_bookmark_order() {
let store = new BookmarksEngine(Service)._store;
initTestLogging("Trace");
_("Starting with a clean slate of no bookmarks");
store.wipe();
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
// Index 2 is the tags root. (Root indices depend on the order of the
// `CreateRoot` calls in `Database::CreateBookmarkRoots`).
@@ -81,17 +80,17 @@ add_task(function* test_bookmark_order()
store._childrenToOrder = {};
store.applyIncoming(record);
store._orderChildren();
delete store._childrenToOrder;
}
let id10 = "10_aaaaaaaaa";
_("basic add first bookmark");
apply(bookmark(id10, ""));
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -101,17 +100,17 @@ add_task(function* test_bookmark_order()
}],
}, {
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "basic add first bookmark");
let id20 = "20_aaaaaaaaa";
_("basic append behind 10");
apply(bookmark(id20, ""));
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -128,17 +127,17 @@ add_task(function* test_bookmark_order()
}], "basic append behind 10");
let id31 = "31_aaaaaaaaa";
let id30 = "f30_aaaaaaaa";
_("basic create in folder");
apply(bookmark(id31, id30));
let f30 = folder(id30, "", [id31]);
apply(f30);
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -160,17 +159,17 @@ add_task(function* test_bookmark_order()
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "basic create in folder");
let id41 = "41_aaaaaaaaa";
let id40 = "f40_aaaaaaaa";
_("insert missing parent -> append to unfiled");
apply(bookmark(id41, id40));
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -196,17 +195,17 @@ add_task(function* test_bookmark_order()
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "insert missing parent -> append to unfiled");
let id42 = "42_aaaaaaaaa";
_("insert another missing parent -> append");
apply(bookmark(id42, id40));
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -235,17 +234,17 @@ add_task(function* test_bookmark_order()
}, {
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "insert another missing parent -> append");
_("insert folder -> move children and followers");
let f40 = folder(id40, "", [id41, id42]);
apply(f40);
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -276,17 +275,17 @@ add_task(function* test_bookmark_order()
}, {
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "insert folder -> move children and followers");
_("Moving 41 behind 42 -> update f40");
f40.children = [id42, id41];
apply(f40);
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -317,17 +316,17 @@ add_task(function* test_bookmark_order()
}, {
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "Moving 41 behind 42 -> update f40");
_("Moving 10 back to front -> update 10, 20");
f40.children = [id41, id42];
apply(f40);
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -357,17 +356,17 @@ add_task(function* test_bookmark_order()
}],
}, {
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "Moving 10 back to front -> update 10, 20");
_("Moving 20 behind 42 in f40 -> update 50");
apply(bookmark(id20, id40));
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -399,17 +398,17 @@ add_task(function* test_bookmark_order()
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "Moving 20 behind 42 in f40 -> update 50");
_("Moving 10 in front of 31 in f30 -> update 10, f30");
apply(bookmark(id10, id30));
f30.children = [id10, id31];
apply(f30);
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -441,17 +440,17 @@ add_task(function* test_bookmark_order()
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "Moving 10 in front of 31 in f30 -> update 10, f30");
_("Moving 20 from f40 to f30 -> update 20, f30");
apply(bookmark(id20, id30));
f30.children = [id10, id20, id31];
apply(f30);
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
@@ -483,17 +482,17 @@ add_task(function* test_bookmark_order()
guid: PlacesUtils.bookmarks.mobileGuid,
index: 4,
}], "Moving 20 from f40 to f30 -> update 20, f30");
_("Move 20 back to front -> update 20, f30");
apply(bookmark(id20, ""));
f30.children = [id10, id31];
apply(f30);
- yield check([{
+ await check([{
guid: PlacesUtils.bookmarks.menuGuid,
index: 0,
}, {
guid: PlacesUtils.bookmarks.toolbarGuid,
index: 1,
}, {
guid: PlacesUtils.bookmarks.unfiledGuid,
index: 3,
--- a/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
+++ b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
@@ -52,17 +52,17 @@ function serverForFoo(engine) {
meta: {global: {engines: {bookmarks: {version: engine.version,
syncID: engine.syncID}}}},
bookmarks: {}
});
}
// Verify that Places smart bookmarks have their annotation uploaded and
// handled locally.
-add_task(function *test_annotation_uploaded() {
+add_task(async function test_annotation_uploaded() {
let server = serverForFoo(engine);
new SyncTestingInfrastructure(server.server);
let startCount = smartBookmarkCount();
_("Start count is " + startCount);
if (startCount > 0) {
@@ -105,17 +105,17 @@ add_task(function *test_annotation_uploa
_("Our count has increased since we started.");
do_check_eq(smartBookmarkCount(), startCount + 1);
_("Sync record to the server.");
let collection = server.user("foo").collection("bookmarks");
try {
- yield sync_engine_and_validate_telem(engine, false);
+ await sync_engine_and_validate_telem(engine, false);
let wbos = collection.keys(function (id) {
return ["menu", "toolbar", "mobile", "unfiled"].indexOf(id) == -1;
});
do_check_eq(wbos.length, 1);
_("Verify that the server WBO has the annotation.");
let serverGUID = wbos[0];
do_check_eq(serverGUID, guid);
@@ -136,17 +136,17 @@ add_task(function *test_annotation_uploa
mostVisitedID, Utils.makeURI("http://something/else"));
PlacesUtils.annotations.removeItemAnnotation(mostVisitedID,
SMART_BOOKMARKS_ANNO);
store.wipe();
engine.resetClient();
do_check_eq(smartBookmarkCount(), startCount);
_("Sync. Verify that the downloaded record carries the annotation.");
- yield sync_engine_and_validate_telem(engine, false);
+ await sync_engine_and_validate_telem(engine, false);
_("Verify that the Places DB now has an annotated bookmark.");
_("Our count has increased again.");
do_check_eq(smartBookmarkCount(), startCount + 1);
_("Find by GUID and verify that it's annotated.");
let newID = store.idForGUID(serverGUID);
let newAnnoValue = PlacesUtils.annotations.getItemAnnotation(
--- a/services/sync/tests/unit/test_bookmark_store.js
+++ b/services/sync/tests/unit/test_bookmark_store.js
@@ -16,36 +16,36 @@ var store = engine._store;
var tracker = engine._tracker;
// Don't write some persistence files asynchronously.
tracker.persistChangedIDs = false;
var fxuri = Utils.makeURI("http://getfirefox.com/");
var tburi = Utils.makeURI("http://getthunderbird.com/");
-add_task(function* test_ignore_specials() {
+add_task(async function test_ignore_specials() {
_("Ensure that we can't delete bookmark roots.");
// Belt...
let record = new BookmarkFolder("bookmarks", "toolbar", "folder");
record.deleted = true;
do_check_neq(null, store.idForGUID("toolbar"));
store.applyIncoming(record);
- yield store.deletePending();
+ await store.deletePending();
// Ensure that the toolbar exists.
do_check_neq(null, store.idForGUID("toolbar"));
// This will fail painfully in getItemType if the deletion worked.
engine._buildGUIDMap();
// Braces...
store.remove(record);
- yield store.deletePending();
+ await store.deletePending();
do_check_neq(null, store.idForGUID("toolbar"));
engine._buildGUIDMap();
store.wipe();
});
add_test(function test_bookmark_create() {
try {
@@ -239,29 +239,29 @@ add_test(function test_folder_createReco
} finally {
_("Clean up.");
store.wipe();
run_next_test();
}
});
-add_task(function* test_deleted() {
+add_task(async function test_deleted() {
try {
_("Create a bookmark that will be deleted.");
let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.toolbarFolder, fxuri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bmk1_guid = store.GUIDForId(bmk1_id);
_("Delete the bookmark through the store.");
let record = new PlacesItem("bookmarks", bmk1_guid);
record.deleted = true;
store.applyIncoming(record);
- yield store.deletePending();
+ await store.deletePending();
_("Ensure it has been deleted.");
let error;
try {
PlacesUtils.bookmarks.getBookmarkURI(bmk1_id);
} catch(ex) {
error = ex;
}
do_check_eq(error.result, Cr.NS_ERROR_ILLEGAL_VALUE);
@@ -433,19 +433,19 @@ function assertDeleted(id) {
try {
PlacesUtils.bookmarks.getItemType(id);
} catch (e) {
error = e;
}
equal(error.result, Cr.NS_ERROR_ILLEGAL_VALUE)
}
-add_task(function* test_delete_buffering() {
+add_task(async function test_delete_buffering() {
store.wipe();
- yield PlacesTestUtils.markBookmarksAsSynced();
+ await PlacesTestUtils.markBookmarksAsSynced();
try {
_("Create a folder with two bookmarks.");
let folder = new BookmarkFolder("bookmarks", "testfolder-1");
folder.parentName = "Bookmarks Toolbar";
folder.parentid = "toolbar";
folder.title = "Test Folder";
store.applyIncoming(folder);
@@ -505,17 +505,17 @@ add_task(function* test_delete_buffering
equal(PlacesUtils.bookmarks.getFolderIdForItem(fxRecordId), folderId);
ok(store._itemsToDelete.has(folder.id));
ok(store._itemsToDelete.has(fxRecord.id));
ok(!store._itemsToDelete.has(tbRecord.id));
_("Process pending deletions and ensure that the right things are deleted.");
- let newChangeRecords = yield store.deletePending();
+ let newChangeRecords = await store.deletePending();
deepEqual(Object.keys(newChangeRecords).sort(), ["get-tndrbrd1", "toolbar"]);
assertDeleted(fxRecordId);
assertDeleted(folderId);
ok(!store._itemsToDelete.has(folder.id));
ok(!store._itemsToDelete.has(fxRecord.id));
--- a/services/sync/tests/unit/test_bookmark_tracker.js
+++ b/services/sync/tests/unit/test_bookmark_tracker.js
@@ -2,17 +2,17 @@
http://creativecommons.org/publicdomain/zero/1.0/ */
Cu.import("resource://gre/modules/PlacesUtils.jsm");
const {
// `fetchGuidsWithAnno` isn't exported, but we can still access it here via a
// backstage pass.
fetchGuidsWithAnno,
} = Cu.import("resource://gre/modules/PlacesSyncUtils.jsm");
-Cu.import("resource://gre/modules/Task.jsm");
+Cu.import("resource://gre/modules/PlacesSyncUtils.jsm");
Cu.import("resource://services-sync/constants.js");
Cu.import("resource://services-sync/engines/bookmarks.js");
Cu.import("resource://services-sync/engines.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://gre/modules/osfile.jsm");
Cu.import("resource://gre/modules/Task.jsm");
Cu.import("resource://testing-common/PlacesTestUtils.jsm");
@@ -24,65 +24,65 @@ var store = engine._store;
var tracker = engine._tracker;
store.wipe();
tracker.persistChangedIDs = false;
const DAY_IN_MS = 24 * 60 * 60 * 1000;
// Test helpers.
-function* verifyTrackerEmpty() {
- let changes = yield tracker.promiseChangedIDs();
+async function verifyTrackerEmpty() {
+ let changes = await tracker.promiseChangedIDs();
deepEqual(changes, {});
equal(tracker.score, 0);
}
-function* resetTracker() {
- yield PlacesTestUtils.markBookmarksAsSynced();
+async function resetTracker() {
+ await PlacesTestUtils.markBookmarksAsSynced();
tracker.resetScore();
}
-function* cleanup() {
+async function cleanup() {
engine.lastSync = 0;
store.wipe();
- yield resetTracker();
- yield stopTracking();
+ await resetTracker();
+ await stopTracking();
}
// startTracking is a signal that the test wants to notice things that happen
// after this is called (ie, things already tracked should be discarded.)
-function* startTracking() {
+async function startTracking() {
Svc.Obs.notify("weave:engine:start-tracking");
- yield PlacesTestUtils.markBookmarksAsSynced();
+ await PlacesTestUtils.markBookmarksAsSynced();
}
-function* stopTracking() {
+async function stopTracking() {
Svc.Obs.notify("weave:engine:stop-tracking");
}
-function* verifyTrackedItems(tracked) {
- let changedIDs = yield tracker.promiseChangedIDs();
+async function verifyTrackedItems(tracked) {
+ let changedIDs = await tracker.promiseChangedIDs();
let trackedIDs = new Set(Object.keys(changedIDs));
for (let guid of tracked) {
ok(guid in changedIDs, `${guid} should be tracked`);
ok(changedIDs[guid].modified > 0, `${guid} should have a modified time`);
ok(changedIDs[guid].counter >= -1, `${guid} should have a change counter`);
trackedIDs.delete(guid);
}
equal(trackedIDs.size, 0, `Unhandled tracked IDs: ${
JSON.stringify(Array.from(trackedIDs))}`);
}
-function* verifyTrackedCount(expected) {
- let changedIDs = yield tracker.promiseChangedIDs();
+async function verifyTrackedCount(expected) {
+ let changedIDs = await tracker.promiseChangedIDs();
do_check_attribute_count(changedIDs, expected);
}
// A debugging helper that dumps the full bookmarks tree.
-function* dumpBookmarks() {
+async function dumpBookmarks() {
let columns = ["id", "title", "guid", "syncStatus", "syncChangeCounter", "position"];
return PlacesUtils.promiseDBConnection().then(connection => {
let all = [];
return connection.executeCached(`SELECT ${columns.join(", ")} FROM moz_bookmarks;`,
{},
row => {
let repr = {};
for (let column of columns) {
@@ -92,413 +92,413 @@ function* dumpBookmarks() {
}
).then(() => {
dump("All bookmarks:\n");
dump(JSON.stringify(all, undefined, 2));
});
})
}
-var populateTree = Task.async(function* populate(parentId, ...items) {
+var populateTree = async function populate(parentId, ...items) {
let guids = {};
for (let item of items) {
let itemId;
switch (item.type) {
case PlacesUtils.bookmarks.TYPE_BOOKMARK:
itemId = PlacesUtils.bookmarks.insertBookmark(parentId,
Utils.makeURI(item.url),
PlacesUtils.bookmarks.DEFAULT_INDEX, item.title);
break;
case PlacesUtils.bookmarks.TYPE_FOLDER: {
itemId = PlacesUtils.bookmarks.createFolder(parentId,
item.title, PlacesUtils.bookmarks.DEFAULT_INDEX);
- Object.assign(guids, yield* populate(itemId, ...item.children));
+ Object.assign(guids, await populate(itemId, ...item.children));
break;
}
default:
throw new Error(`Unsupported item type: ${item.type}`);
}
if (item.exclude) {
PlacesUtils.annotations.setItemAnnotation(
itemId, BookmarkAnnos.EXCLUDEBACKUP_ANNO, "Don't back this up", 0,
PlacesUtils.annotations.EXPIRE_NEVER);
}
- guids[item.title] = yield PlacesUtils.promiseItemGuid(itemId);
+ guids[item.title] = await PlacesUtils.promiseItemGuid(itemId);
}
return guids;
-});
+}
-function* insertBookmarksToMigrate() {
- let mozBmk = yield PlacesUtils.bookmarks.insert({
+async function insertBookmarksToMigrate() {
+ let mozBmk = await PlacesUtils.bookmarks.insert({
guid: "0gtWTOgYcoJD",
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://mozilla.org",
});
- let fxBmk = yield PlacesUtils.bookmarks.insert({
+ let fxBmk = await PlacesUtils.bookmarks.insert({
guid: "0dbpnMdxKxfg",
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
});
- let tbBmk = yield PlacesUtils.bookmarks.insert({
+ let tbBmk = await PlacesUtils.bookmarks.insert({
guid: "r5ouWdPB3l28",
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getthunderbird.com",
});
- let bzBmk = yield PlacesUtils.bookmarks.insert({
+ let bzBmk = await PlacesUtils.bookmarks.insert({
guid: "YK5Bdq5MIqL6",
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://bugzilla.mozilla.org",
});
- let exampleBmk = yield PlacesUtils.bookmarks.insert({
+ let exampleBmk = await PlacesUtils.bookmarks.insert({
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://example.com",
});
- yield PlacesTestUtils.setBookmarkSyncFields({
+ await PlacesTestUtils.setBookmarkSyncFields({
guid: fxBmk.guid,
syncStatus: PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
}, {
guid: tbBmk.guid,
syncStatus: PlacesUtils.bookmarks.SYNC_STATUS.UNKNOWN,
}, {
guid: exampleBmk.guid,
syncStatus: PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
});
- yield PlacesUtils.bookmarks.remove(exampleBmk.guid);
+ await PlacesUtils.bookmarks.remove(exampleBmk.guid);
}
-add_task(function* test_tracking() {
+add_task(async function test_tracking() {
_("Test starting and stopping the tracker");
// Remove existing tracking information for roots.
- yield startTracking();
+ await startTracking();
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX);
// creating the folder should have made 2 changes - the folder itself and
// the parent of the folder.
- yield verifyTrackedCount(2);
+ await verifyTrackedCount(2);
// Reset the changes as the rest of the test doesn't want to see these.
- yield resetTracker();
+ await resetTracker();
function createBmk() {
return PlacesUtils.bookmarks.insertBookmark(
folder, Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
}
try {
_("Tell the tracker to start tracking changes.");
- yield startTracking();
+ await startTracking();
createBmk();
// We expect two changed items because the containing folder
// changed as well (new child).
- yield verifyTrackedCount(2);
+ await verifyTrackedCount(2);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
_("Notifying twice won't do any harm.");
createBmk();
- yield verifyTrackedCount(3);
+ await verifyTrackedCount(3);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_batch_tracking() {
+add_task(async function test_batch_tracking() {
_("Test tracker does the correct thing during and after a places 'batch'");
- yield startTracking();
+ await startTracking();
PlacesUtils.bookmarks.runInBatchMode({
runBatched: function() {
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX);
// We should be tracking the new folder and its parent (and need to jump
// through blocking hoops...)
- Async.promiseSpinningly(Task.spawn(verifyTrackedCount(2)));
+ Async.promiseSpinningly(verifyTrackedCount(2));
// But not have bumped the score.
do_check_eq(tracker.score, 0);
}
}, null);
// Out of batch mode - tracker should be the same, but score should be up.
- yield verifyTrackedCount(2);
+ await verifyTrackedCount(2);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield cleanup();
+ await cleanup();
});
-add_task(function* test_nested_batch_tracking() {
+add_task(async function test_nested_batch_tracking() {
_("Test tracker does the correct thing if a places 'batch' is nested");
- yield startTracking();
+ await startTracking();
PlacesUtils.bookmarks.runInBatchMode({
runBatched: function() {
PlacesUtils.bookmarks.runInBatchMode({
runBatched: function() {
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX);
// We should be tracking the new folder and its parent (and need to jump
// through blocking hoops...)
- Async.promiseSpinningly(Task.spawn(verifyTrackedCount(2)));
+ Async.promiseSpinningly(verifyTrackedCount(2));
// But not have bumped the score.
do_check_eq(tracker.score, 0);
}
}, null);
_("inner batch complete.");
// should still not have a score as the outer batch is pending.
do_check_eq(tracker.score, 0);
}
}, null);
// Out of both batches - tracker should be the same, but score should be up.
- yield verifyTrackedCount(2);
+ await verifyTrackedCount(2);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield cleanup();
+ await cleanup();
});
-add_task(function* test_tracker_sql_batching() {
+add_task(async function test_tracker_sql_batching() {
_("Test tracker does the correct thing when it is forced to batch SQL queries");
const SQLITE_MAX_VARIABLE_NUMBER = 999;
let numItems = SQLITE_MAX_VARIABLE_NUMBER * 2 + 10;
let createdIDs = [];
- yield startTracking();
+ await startTracking();
PlacesUtils.bookmarks.runInBatchMode({
runBatched: function() {
for (let i = 0; i < numItems; i++) {
let syncBmkID = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.unfiledBookmarksFolder,
Utils.makeURI("https://example.org/" + i),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Sync Bookmark " + i);
createdIDs.push(syncBmkID);
}
}
}, null);
do_check_eq(createdIDs.length, numItems);
- yield verifyTrackedCount(numItems + 1); // the folder is also tracked.
- yield resetTracker();
+ await verifyTrackedCount(numItems + 1); // the folder is also tracked.
+ await resetTracker();
PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.unfiledBookmarksFolder);
- yield verifyTrackedCount(numItems + 1);
+ await verifyTrackedCount(numItems + 1);
- yield cleanup();
+ await cleanup();
});
-add_task(function* test_onItemAdded() {
+add_task(async function test_onItemAdded() {
_("Items inserted via the synchronous bookmarks API should be tracked");
try {
- yield startTracking();
+ await startTracking();
_("Insert a folder using the sync API");
let syncFolderID = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder, "Sync Folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let syncFolderGUID = engine._store.GUIDForId(syncFolderID);
- yield verifyTrackedItems(["menu", syncFolderGUID]);
+ await verifyTrackedItems(["menu", syncFolderGUID]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
- yield startTracking();
+ await resetTracker();
+ await startTracking();
_("Insert a bookmark using the sync API");
let syncBmkID = PlacesUtils.bookmarks.insertBookmark(syncFolderID,
Utils.makeURI("https://example.org/sync"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Sync Bookmark");
let syncBmkGUID = engine._store.GUIDForId(syncBmkID);
- yield verifyTrackedItems([syncFolderGUID, syncBmkGUID]);
+ await verifyTrackedItems([syncFolderGUID, syncBmkGUID]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
- yield startTracking();
+ await resetTracker();
+ await startTracking();
_("Insert a separator using the sync API");
let syncSepID = PlacesUtils.bookmarks.insertSeparator(
PlacesUtils.bookmarks.bookmarksMenuFolder,
PlacesUtils.bookmarks.getItemIndex(syncFolderID));
let syncSepGUID = engine._store.GUIDForId(syncSepID);
- yield verifyTrackedItems(["menu", syncSepGUID]);
+ await verifyTrackedItems(["menu", syncSepGUID]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemAdded() {
+add_task(async function test_async_onItemAdded() {
_("Items inserted via the asynchronous bookmarks API should be tracked");
try {
- yield startTracking();
+ await startTracking();
_("Insert a folder using the async API");
- let asyncFolder = yield PlacesUtils.bookmarks.insert({
+ let asyncFolder = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_FOLDER,
parentGuid: PlacesUtils.bookmarks.menuGuid,
title: "Async Folder",
});
- yield verifyTrackedItems(["menu", asyncFolder.guid]);
+ await verifyTrackedItems(["menu", asyncFolder.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
- yield startTracking();
+ await resetTracker();
+ await startTracking();
_("Insert a bookmark using the async API");
- let asyncBmk = yield PlacesUtils.bookmarks.insert({
+ let asyncBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: asyncFolder.guid,
url: "https://example.org/async",
title: "Async Bookmark",
});
- yield verifyTrackedItems([asyncFolder.guid, asyncBmk.guid]);
+ await verifyTrackedItems([asyncFolder.guid, asyncBmk.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
- yield startTracking();
+ await resetTracker();
+ await startTracking();
_("Insert a separator using the async API");
- let asyncSep = yield PlacesUtils.bookmarks.insert({
+ let asyncSep = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_SEPARATOR,
parentGuid: PlacesUtils.bookmarks.menuGuid,
index: asyncFolder.index,
});
- yield verifyTrackedItems(["menu", asyncSep.guid]);
+ await verifyTrackedItems(["menu", asyncSep.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemChanged() {
+add_task(async function test_async_onItemChanged() {
_("Items updated using the asynchronous bookmarks API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert a bookmark");
- let fxBmk = yield PlacesUtils.bookmarks.insert({
+ let fxBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
_(`Firefox GUID: ${fxBmk.guid}`);
- yield startTracking();
+ await startTracking();
_("Update the bookmark using the async API");
- yield PlacesUtils.bookmarks.update({
+ await PlacesUtils.bookmarks.update({
guid: fxBmk.guid,
title: "Download Firefox",
url: "https://www.mozilla.org/firefox",
// PlacesUtils.bookmarks.update rejects last modified dates older than
// the added date.
lastModified: new Date(Date.now() + DAY_IN_MS),
});
- yield verifyTrackedItems([fxBmk.guid]);
+ await verifyTrackedItems([fxBmk.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemChanged_itemDates() {
+add_task(async function test_onItemChanged_itemDates() {
_("Changes to item dates should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert a bookmark");
let fx_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = engine._store.GUIDForId(fx_id);
_(`Firefox GUID: ${fx_guid}`);
- yield startTracking();
+ await startTracking();
_("Reset the bookmark's added date");
// Convert to microseconds for PRTime.
let dateAdded = (Date.now() - DAY_IN_MS) * 1000;
PlacesUtils.bookmarks.setItemDateAdded(fx_id, dateAdded);
- yield verifyTrackedItems([fx_guid]);
+ await verifyTrackedItems([fx_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
+ await resetTracker();
_("Set the bookmark's last modified date");
let dateModified = Date.now() * 1000;
PlacesUtils.bookmarks.setItemLastModified(fx_id, dateModified);
- yield verifyTrackedItems([fx_guid]);
+ await verifyTrackedItems([fx_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemChanged_changeBookmarkURI() {
+add_task(async function test_onItemChanged_changeBookmarkURI() {
_("Changes to bookmark URIs should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert a bookmark");
let fx_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = engine._store.GUIDForId(fx_id);
_(`Firefox GUID: ${fx_guid}`);
_("Set a tracked annotation to make sure we only notify once");
PlacesUtils.annotations.setItemAnnotation(
fx_id, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
PlacesUtils.annotations.EXPIRE_NEVER);
- yield startTracking();
+ await startTracking();
_("Change the bookmark's URI");
PlacesUtils.bookmarks.changeBookmarkURI(fx_id,
Utils.makeURI("https://www.mozilla.org/firefox"));
- yield verifyTrackedItems([fx_guid]);
+ await verifyTrackedItems([fx_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemTagged() {
+add_task(async function test_onItemTagged() {
_("Items tagged using the synchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Create a folder");
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folderGUID = engine._store.GUIDForId(folder);
_("Folder ID: " + folder);
_("Folder GUID: " + folderGUID);
@@ -507,340 +507,340 @@ add_task(function* test_onItemTagged() {
let uri = Utils.makeURI("http://getfirefox.com");
let b = PlacesUtils.bookmarks.insertBookmark(
folder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bGUID = engine._store.GUIDForId(b);
_("New item is " + b);
_("GUID: " + bGUID);
- yield startTracking();
+ await startTracking();
_("Tag the item");
PlacesUtils.tagging.tagURI(uri, ["foo"]);
// bookmark should be tracked, folder should not be.
- yield verifyTrackedItems([bGUID]);
+ await verifyTrackedItems([bGUID]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemUntagged() {
+add_task(async function test_onItemUntagged() {
_("Items untagged using the synchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert tagged bookmarks");
let uri = Utils.makeURI("http://getfirefox.com");
let fx1ID = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let fx1GUID = engine._store.GUIDForId(fx1ID);
// Different parent and title; same URL.
let fx2ID = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.toolbarFolder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Download Firefox");
let fx2GUID = engine._store.GUIDForId(fx2ID);
PlacesUtils.tagging.tagURI(uri, ["foo"]);
- yield startTracking();
+ await startTracking();
_("Remove the tag");
PlacesUtils.tagging.untagURI(uri, ["foo"]);
- yield verifyTrackedItems([fx1GUID, fx2GUID]);
+ await verifyTrackedItems([fx1GUID, fx2GUID]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 4);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemUntagged() {
+add_task(async function test_async_onItemUntagged() {
_("Items untagged using the asynchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert tagged bookmarks");
- let fxBmk1 = yield PlacesUtils.bookmarks.insert({
+ let fxBmk1 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
- let fxBmk2 = yield PlacesUtils.bookmarks.insert({
+ let fxBmk2 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.toolbarGuid,
url: "http://getfirefox.com",
title: "Download Firefox",
});
- let tag = yield PlacesUtils.bookmarks.insert({
+ let tag = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_FOLDER,
parentGuid: PlacesUtils.bookmarks.tagsGuid,
title: "some tag",
});
- let fxTag = yield PlacesUtils.bookmarks.insert({
+ let fxTag = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: tag.guid,
url: "http://getfirefox.com",
});
- yield startTracking();
+ await startTracking();
_("Remove the tag using the async bookmarks API");
- yield PlacesUtils.bookmarks.remove(fxTag.guid);
+ await PlacesUtils.bookmarks.remove(fxTag.guid);
- yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
+ await verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 4);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemTagged() {
+add_task(async function test_async_onItemTagged() {
_("Items tagged using the asynchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert untagged bookmarks");
- let folder1 = yield PlacesUtils.bookmarks.insert({
+ let folder1 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_FOLDER,
parentGuid: PlacesUtils.bookmarks.menuGuid,
title: "Folder 1",
});
- let fxBmk1 = yield PlacesUtils.bookmarks.insert({
+ let fxBmk1 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: folder1.guid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
- let folder2 = yield PlacesUtils.bookmarks.insert({
+ let folder2 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_FOLDER,
parentGuid: PlacesUtils.bookmarks.menuGuid,
title: "Folder 2",
});
// Different parent and title; same URL.
- let fxBmk2 = yield PlacesUtils.bookmarks.insert({
+ let fxBmk2 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: folder2.guid,
url: "http://getfirefox.com",
title: "Download Firefox",
});
- yield startTracking();
+ await startTracking();
// This will change once tags are moved into a separate table (bug 424160).
// We specifically test this case because Bookmarks.jsm updates tagged
// bookmarks and notifies observers.
_("Insert a tag using the async bookmarks API");
- let tag = yield PlacesUtils.bookmarks.insert({
+ let tag = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_FOLDER,
parentGuid: PlacesUtils.bookmarks.tagsGuid,
title: "some tag",
});
_("Tag an item using the async bookmarks API");
- yield PlacesUtils.bookmarks.insert({
+ await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: tag.guid,
url: "http://getfirefox.com",
});
- yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
+ await verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 4);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemKeywordChanged() {
+add_task(async function test_onItemKeywordChanged() {
_("Keyword changes via the synchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folderGUID = engine._store.GUIDForId(folder);
_("Track changes to keywords");
let uri = Utils.makeURI("http://getfirefox.com");
let b = PlacesUtils.bookmarks.insertBookmark(
folder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bGUID = engine._store.GUIDForId(b);
_("New item is " + b);
_("GUID: " + bGUID);
- yield startTracking();
+ await startTracking();
_("Give the item a keyword");
PlacesUtils.bookmarks.setKeywordForBookmark(b, "the_keyword");
// bookmark should be tracked, folder should not be.
- yield verifyTrackedItems([bGUID]);
+ await verifyTrackedItems([bGUID]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemKeywordChanged() {
+add_task(async function test_async_onItemKeywordChanged() {
_("Keyword changes via the asynchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert two bookmarks with the same URL");
- let fxBmk1 = yield PlacesUtils.bookmarks.insert({
+ let fxBmk1 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
- let fxBmk2 = yield PlacesUtils.bookmarks.insert({
+ let fxBmk2 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.toolbarGuid,
url: "http://getfirefox.com",
title: "Download Firefox",
});
- yield startTracking();
+ await startTracking();
_("Add a keyword for both items");
- yield PlacesUtils.keywords.insert({
+ await PlacesUtils.keywords.insert({
keyword: "the_keyword",
url: "http://getfirefox.com",
postData: "postData",
});
- yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
+ await verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemKeywordDeleted() {
+add_task(async function test_async_onItemKeywordDeleted() {
_("Keyword deletions via the asynchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert two bookmarks with the same URL and keywords");
- let fxBmk1 = yield PlacesUtils.bookmarks.insert({
+ let fxBmk1 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
- let fxBmk2 = yield PlacesUtils.bookmarks.insert({
+ let fxBmk2 = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.toolbarGuid,
url: "http://getfirefox.com",
title: "Download Firefox",
});
- yield PlacesUtils.keywords.insert({
+ await PlacesUtils.keywords.insert({
keyword: "the_keyword",
url: "http://getfirefox.com",
});
- yield startTracking();
+ await startTracking();
_("Remove the keyword");
- yield PlacesUtils.keywords.remove("the_keyword");
+ await PlacesUtils.keywords.remove("the_keyword");
- yield verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
+ await verifyTrackedItems([fxBmk1.guid, fxBmk2.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemPostDataChanged() {
+add_task(async function test_onItemPostDataChanged() {
_("Post data changes should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert a bookmark");
let fx_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = engine._store.GUIDForId(fx_id);
_(`Firefox GUID: ${fx_guid}`);
- yield startTracking();
+ await startTracking();
// PlacesUtils.setPostDataForBookmark is deprecated, but still used by
// PlacesTransactions.NewBookmark.
_("Post data for the bookmark should be ignored");
- yield PlacesUtils.setPostDataForBookmark(fx_id, "postData");
- yield verifyTrackedItems([]);
+ await PlacesUtils.setPostDataForBookmark(fx_id, "postData");
+ await verifyTrackedItems([]);
do_check_eq(tracker.score, 0);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemAnnoChanged() {
+add_task(async function test_onItemAnnoChanged() {
_("Item annotations should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folderGUID = engine._store.GUIDForId(folder);
_("Track changes to annos.");
let b = PlacesUtils.bookmarks.insertBookmark(
folder, Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bGUID = engine._store.GUIDForId(b);
_("New item is " + b);
_("GUID: " + bGUID);
- yield startTracking();
+ await startTracking();
PlacesUtils.annotations.setItemAnnotation(
b, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
PlacesUtils.annotations.EXPIRE_NEVER);
// bookmark should be tracked, folder should not.
- yield verifyTrackedItems([bGUID]);
+ await verifyTrackedItems([bGUID]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
+ await resetTracker();
PlacesUtils.annotations.removeItemAnnotation(b,
PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO);
- yield verifyTrackedItems([bGUID]);
+ await verifyTrackedItems([bGUID]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemAdded_filtered_root() {
+add_task(async function test_onItemAdded_filtered_root() {
_("Items outside the change roots should not be tracked");
try {
- yield startTracking();
+ await startTracking();
_("Create a new root");
let rootID = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.placesRoot,
"New root",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let rootGUID = engine._store.GUIDForId(rootID);
_(`New root GUID: ${rootGUID}`);
@@ -858,180 +858,180 @@ add_task(function* test_onItemAdded_filt
let rootBmkID = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.placesRoot,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let rootBmkGUID = engine._store.GUIDForId(rootBmkID);
_(`New Places root bookmark GUID: ${rootBmkGUID}`);
_("New root and bookmark should be ignored");
- yield verifyTrackedItems([]);
+ await verifyTrackedItems([]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemDeleted_filtered_root() {
+add_task(async function test_onItemDeleted_filtered_root() {
_("Deleted items outside the change roots should not be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert a bookmark underneath the Places root");
let rootBmkID = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.placesRoot,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let rootBmkGUID = engine._store.GUIDForId(rootBmkID);
_(`New Places root bookmark GUID: ${rootBmkGUID}`);
- yield startTracking();
+ await startTracking();
PlacesUtils.bookmarks.removeItem(rootBmkID);
- yield verifyTrackedItems([]);
+ await verifyTrackedItems([]);
// We'll still increment the counter for the removed item.
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onPageAnnoChanged() {
+add_task(async function test_onPageAnnoChanged() {
_("Page annotations should not be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert a bookmark without an annotation");
let pageURI = Utils.makeURI("http://getfirefox.com");
PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
pageURI,
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
- yield startTracking();
+ await startTracking();
_("Add a page annotation");
PlacesUtils.annotations.setPageAnnotation(pageURI, "URIProperties/characterSet",
"UTF-8", 0, PlacesUtils.annotations.EXPIRE_NEVER);
- yield verifyTrackedItems([]);
+ await verifyTrackedItems([]);
do_check_eq(tracker.score, 0);
- yield resetTracker();
+ await resetTracker();
_("Remove the page annotation");
PlacesUtils.annotations.removePageAnnotation(pageURI,
"URIProperties/characterSet");
- yield verifyTrackedItems([]);
+ await verifyTrackedItems([]);
do_check_eq(tracker.score, 0);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onFaviconChanged() {
+add_task(async function test_onFaviconChanged() {
_("Favicon changes should not be tracked");
try {
- yield stopTracking();
+ await stopTracking();
let pageURI = Utils.makeURI("http://getfirefox.com");
let iconURI = Utils.makeURI("http://getfirefox.com/icon");
PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
pageURI,
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
- yield PlacesTestUtils.addVisits(pageURI);
+ await PlacesTestUtils.addVisits(pageURI);
- yield startTracking();
+ await startTracking();
_("Favicon annotations should be ignored");
let iconURL = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAA" +
"AAAA6fptVAAAACklEQVQI12NgAAAAAgAB4iG8MwAAAABJRU5ErkJggg==";
PlacesUtils.favicons.replaceFaviconDataFromDataURL(iconURI, iconURL, 0,
Services.scriptSecurityManager.getSystemPrincipal());
- yield new Promise(resolve => {
+ await new Promise(resolve => {
PlacesUtils.favicons.setAndFetchFaviconForPage(pageURI, iconURI, true,
PlacesUtils.favicons.FAVICON_LOAD_NON_PRIVATE, (iconURI, dataLen, data, mimeType) => {
resolve();
},
Services.scriptSecurityManager.getSystemPrincipal());
});
- yield verifyTrackedItems([]);
+ await verifyTrackedItems([]);
do_check_eq(tracker.score, 0);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onLivemarkAdded() {
+add_task(async function test_onLivemarkAdded() {
_("New livemarks should be tracked");
try {
- yield startTracking();
+ await startTracking();
_("Insert a livemark");
- let livemark = yield PlacesUtils.livemarks.addLivemark({
+ let livemark = await PlacesUtils.livemarks.addLivemark({
parentGuid: PlacesUtils.bookmarks.menuGuid,
// Use a local address just in case, to avoid potential aborts for
// non-local connections.
feedURI: Utils.makeURI("http://localhost:0"),
});
// Prevent the livemark refresh timer from requesting the URI.
livemark.terminate();
- yield verifyTrackedItems(["menu", livemark.guid]);
+ await verifyTrackedItems(["menu", livemark.guid]);
// Two observer notifications: one for creating the livemark folder, and
// one for setting the "livemark/feedURI" anno on the folder.
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onLivemarkDeleted() {
+add_task(async function test_onLivemarkDeleted() {
_("Deleted livemarks should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert a livemark");
- let livemark = yield PlacesUtils.livemarks.addLivemark({
+ let livemark = await PlacesUtils.livemarks.addLivemark({
parentGuid: PlacesUtils.bookmarks.menuGuid,
feedURI: Utils.makeURI("http://localhost:0"),
});
livemark.terminate();
- yield startTracking();
+ await startTracking();
_("Remove a livemark");
- yield PlacesUtils.livemarks.removeLivemark({
+ await PlacesUtils.livemarks.removeLivemark({
guid: livemark.guid,
});
- yield verifyTrackedItems(["menu", livemark.guid]);
+ await verifyTrackedItems(["menu", livemark.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemMoved() {
+add_task(async function test_onItemMoved() {
_("Items moved via the synchronous API should be tracked");
try {
let fx_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
@@ -1040,136 +1040,136 @@ add_task(function* test_onItemMoved() {
let tb_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = engine._store.GUIDForId(tb_id);
_("Thunderbird GUID: " + tb_guid);
- yield startTracking();
+ await startTracking();
// Moving within the folder will just track the folder.
PlacesUtils.bookmarks.moveItem(
tb_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0);
- yield verifyTrackedItems(['menu']);
+ await verifyTrackedItems(['menu']);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
- yield PlacesTestUtils.markBookmarksAsSynced();
+ await resetTracker();
+ await PlacesTestUtils.markBookmarksAsSynced();
// Moving a bookmark to a different folder will track the old
// folder, the new folder and the bookmark.
PlacesUtils.bookmarks.moveItem(fx_id, PlacesUtils.bookmarks.toolbarFolder,
PlacesUtils.bookmarks.DEFAULT_INDEX);
- yield verifyTrackedItems(['menu', 'toolbar', fx_guid]);
+ await verifyTrackedItems(['menu', 'toolbar', fx_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemMoved_update() {
+add_task(async function test_async_onItemMoved_update() {
_("Items moved via the asynchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
- let fxBmk = yield PlacesUtils.bookmarks.insert({
+ let fxBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
- let tbBmk = yield PlacesUtils.bookmarks.insert({
+ let tbBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getthunderbird.com",
title: "Get Thunderbird!",
});
- yield startTracking();
+ await startTracking();
_("Repositioning a bookmark should track the folder");
- yield PlacesUtils.bookmarks.update({
+ await PlacesUtils.bookmarks.update({
guid: tbBmk.guid,
parentGuid: PlacesUtils.bookmarks.menuGuid,
index: 0,
});
- yield verifyTrackedItems(['menu']);
+ await verifyTrackedItems(['menu']);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
+ await resetTracker();
_("Reparenting a bookmark should track both folders and the bookmark");
- yield PlacesUtils.bookmarks.update({
+ await PlacesUtils.bookmarks.update({
guid: tbBmk.guid,
parentGuid: PlacesUtils.bookmarks.toolbarGuid,
index: PlacesUtils.bookmarks.DEFAULT_INDEX,
});
- yield verifyTrackedItems(['menu', 'toolbar', tbBmk.guid]);
+ await verifyTrackedItems(['menu', 'toolbar', tbBmk.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemMoved_reorder() {
+add_task(async function test_async_onItemMoved_reorder() {
_("Items reordered via the asynchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Insert out-of-order bookmarks");
- let fxBmk = yield PlacesUtils.bookmarks.insert({
+ let fxBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
_(`Firefox GUID: ${fxBmk.guid}`);
- let tbBmk = yield PlacesUtils.bookmarks.insert({
+ let tbBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getthunderbird.com",
title: "Get Thunderbird!",
});
_(`Thunderbird GUID: ${tbBmk.guid}`);
- let mozBmk = yield PlacesUtils.bookmarks.insert({
+ let mozBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://mozilla.org",
title: "Mozilla",
});
_(`Mozilla GUID: ${mozBmk.guid}`);
- yield startTracking();
+ await startTracking();
_("Reorder bookmarks");
- yield PlacesUtils.bookmarks.reorder(PlacesUtils.bookmarks.menuGuid,
+ await PlacesUtils.bookmarks.reorder(PlacesUtils.bookmarks.menuGuid,
[mozBmk.guid, fxBmk.guid, tbBmk.guid]);
// As with setItemIndex, we should only track the folder if we reorder
// its children, but we should bump the score for every changed item.
- yield verifyTrackedItems(["menu"]);
+ await verifyTrackedItems(["menu"]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemMoved_setItemIndex() {
+add_task(async function test_onItemMoved_setItemIndex() {
_("Items with updated indices should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
let folder_id = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"Test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folder_guid = engine._store.GUIDForId(folder_id);
_(`Folder GUID: ${folder_guid}`);
@@ -1193,45 +1193,45 @@ add_task(function* test_onItemMoved_setI
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("https://mozilla.org"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Mozilla"
);
let moz_guid = engine._store.GUIDForId(moz_id);
_(`Mozilla GUID: ${moz_guid}`);
- yield startTracking();
+ await startTracking();
// PlacesSortFolderByNameTransaction exercises
// PlacesUtils.bookmarks.setItemIndex.
let txn = new PlacesSortFolderByNameTransaction(folder_id);
// We're reordering items within the same folder, so only the folder
// should be tracked.
_("Execute the sort folder transaction");
txn.doTransaction();
- yield verifyTrackedItems([folder_guid]);
+ await verifyTrackedItems([folder_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
+ await resetTracker();
_("Undo the sort folder transaction");
txn.undoTransaction();
- yield verifyTrackedItems([folder_guid]);
+ await verifyTrackedItems([folder_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemDeleted_removeFolderTransaction() {
+add_task(async function test_onItemDeleted_removeFolderTransaction() {
_("Folders removed in a transaction should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
_("Create a folder with two children");
let folder_id = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"Test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folder_guid = engine._store.GUIDForId(folder_id);
_(`Folder GUID: ${folder_guid}`);
@@ -1245,49 +1245,49 @@ add_task(function* test_onItemDeleted_re
let tb_id = PlacesUtils.bookmarks.insertBookmark(
folder_id,
Utils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = engine._store.GUIDForId(tb_id);
_(`Thunderbird GUID: ${tb_guid}`);
- yield startTracking();
+ await startTracking();
let txn = PlacesUtils.bookmarks.getRemoveFolderTransaction(folder_id);
// We haven't executed the transaction yet.
- yield verifyTrackerEmpty();
+ await verifyTrackerEmpty();
_("Execute the remove folder transaction");
txn.doTransaction();
- yield verifyTrackedItems(["menu", folder_guid, fx_guid, tb_guid]);
+ await verifyTrackedItems(["menu", folder_guid, fx_guid, tb_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
- yield resetTracker();
+ await resetTracker();
_("Undo the remove folder transaction");
txn.undoTransaction();
// At this point, the restored folder has the same ID, but a different GUID.
- let new_folder_guid = yield PlacesUtils.promiseItemGuid(folder_id);
+ let new_folder_guid = await PlacesUtils.promiseItemGuid(folder_id);
- yield verifyTrackedItems(["menu", new_folder_guid]);
+ await verifyTrackedItems(["menu", new_folder_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
- yield resetTracker();
+ await resetTracker();
_("Redo the transaction");
txn.redoTransaction();
- yield verifyTrackedItems(["menu", new_folder_guid]);
+ await verifyTrackedItems(["menu", new_folder_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_treeMoved() {
+add_task(async function test_treeMoved() {
_("Moving an entire tree of bookmarks should track the parents");
try {
// Create a couple of parent folders.
let folder1_id = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"First test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
@@ -1309,177 +1309,177 @@ add_task(function* test_treeMoved() {
let fx_guid = engine._store.GUIDForId(fx_id);
let tb_id = PlacesUtils.bookmarks.insertBookmark(
folder2_id,
Utils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = engine._store.GUIDForId(tb_id);
- yield startTracking();
+ await startTracking();
// Move folder 2 to be a sibling of folder1.
PlacesUtils.bookmarks.moveItem(
folder2_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0);
// the menu and both folders should be tracked, the children should not be.
- yield verifyTrackedItems(['menu', folder1_guid, folder2_guid]);
+ await verifyTrackedItems(['menu', folder1_guid, folder2_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemDeleted() {
+add_task(async function test_onItemDeleted() {
_("Bookmarks deleted via the synchronous API should be tracked");
try {
let fx_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
let fx_guid = engine._store.GUIDForId(fx_id);
let tb_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = engine._store.GUIDForId(tb_id);
- yield startTracking();
+ await startTracking();
// Delete the last item - the item and parent should be tracked.
PlacesUtils.bookmarks.removeItem(tb_id);
- yield verifyTrackedItems(['menu', tb_guid]);
+ await verifyTrackedItems(['menu', tb_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemDeleted() {
+add_task(async function test_async_onItemDeleted() {
_("Bookmarks deleted via the asynchronous API should be tracked");
try {
- yield stopTracking();
+ await stopTracking();
- let fxBmk = yield PlacesUtils.bookmarks.insert({
+ let fxBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
- let tbBmk = yield PlacesUtils.bookmarks.insert({
+ let tbBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getthunderbird.com",
title: "Get Thunderbird!",
});
- yield startTracking();
+ await startTracking();
_("Delete the first item");
- yield PlacesUtils.bookmarks.remove(fxBmk.guid);
+ await PlacesUtils.bookmarks.remove(fxBmk.guid);
- yield verifyTrackedItems(["menu", fxBmk.guid]);
+ await verifyTrackedItems(["menu", fxBmk.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_async_onItemDeleted_eraseEverything() {
+add_task(async function test_async_onItemDeleted_eraseEverything() {
_("Erasing everything should track all deleted items");
try {
- yield stopTracking();
+ await stopTracking();
- let fxBmk = yield PlacesUtils.bookmarks.insert({
+ let fxBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.mobileGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
_(`Firefox GUID: ${fxBmk.guid}`);
- let tbBmk = yield PlacesUtils.bookmarks.insert({
+ let tbBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.mobileGuid,
url: "http://getthunderbird.com",
title: "Get Thunderbird!",
});
_(`Thunderbird GUID: ${tbBmk.guid}`);
- let mozBmk = yield PlacesUtils.bookmarks.insert({
+ let mozBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://mozilla.org",
title: "Mozilla",
});
_(`Mozilla GUID: ${mozBmk.guid}`);
- let mdnBmk = yield PlacesUtils.bookmarks.insert({
+ let mdnBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://developer.mozilla.org",
title: "MDN",
});
_(`MDN GUID: ${mdnBmk.guid}`);
- let bugsFolder = yield PlacesUtils.bookmarks.insert({
+ let bugsFolder = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_FOLDER,
parentGuid: PlacesUtils.bookmarks.toolbarGuid,
title: "Bugs",
});
_(`Bugs folder GUID: ${bugsFolder.guid}`);
- let bzBmk = yield PlacesUtils.bookmarks.insert({
+ let bzBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: bugsFolder.guid,
url: "https://bugzilla.mozilla.org",
title: "Bugzilla",
});
_(`Bugzilla GUID: ${bzBmk.guid}`);
- let bugsChildFolder = yield PlacesUtils.bookmarks.insert({
+ let bugsChildFolder = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_FOLDER,
parentGuid: bugsFolder.guid,
title: "Bugs child",
});
_(`Bugs child GUID: ${bugsChildFolder.guid}`);
- let bugsGrandChildBmk = yield PlacesUtils.bookmarks.insert({
+ let bugsGrandChildBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: bugsChildFolder.guid,
url: "https://example.com",
title: "Bugs grandchild",
});
_(`Bugs grandchild GUID: ${bugsGrandChildBmk.guid}`);
- yield startTracking();
+ await startTracking();
// Simulate moving a synced item into a new folder. Deleting the folder
// should write a tombstone for the item, but not the folder.
- yield PlacesTestUtils.setBookmarkSyncFields({
+ await PlacesTestUtils.setBookmarkSyncFields({
guid: bugsChildFolder.guid,
syncStatus: PlacesUtils.bookmarks.SYNC_STATUS.NEW,
});
- yield PlacesUtils.bookmarks.eraseEverything();
+ await PlacesUtils.bookmarks.eraseEverything();
// bugsChildFolder's sync status is still "NEW", so it shouldn't be
// tracked. bugsGrandChildBmk is "NORMAL", so we *should* write a
// tombstone and track it.
- yield verifyTrackedItems(["menu", mozBmk.guid, mdnBmk.guid, "toolbar",
+ await verifyTrackedItems(["menu", mozBmk.guid, mdnBmk.guid, "toolbar",
bugsFolder.guid, "mobile", fxBmk.guid,
tbBmk.guid, "unfiled", bzBmk.guid,
bugsGrandChildBmk.guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 8);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemDeleted_removeFolderChildren() {
+add_task(async function test_onItemDeleted_removeFolderChildren() {
_("Removing a folder's children should track the folder and its children");
try {
let fx_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.mobileFolderId,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
@@ -1498,30 +1498,30 @@ add_task(function* test_onItemDeleted_re
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("https://mozilla.org"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Mozilla"
);
let moz_guid = engine._store.GUIDForId(moz_id);
_(`Mozilla GUID: ${moz_guid}`);
- yield startTracking();
+ await startTracking();
_(`Mobile root ID: ${PlacesUtils.mobileFolderId}`);
PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.mobileFolderId);
- yield verifyTrackedItems(["mobile", fx_guid, tb_guid]);
+ await verifyTrackedItems(["mobile", fx_guid, tb_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 2);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_onItemDeleted_tree() {
+add_task(async function test_onItemDeleted_tree() {
_("Deleting a tree of bookmarks should track all items");
try {
// Create a couple of parent folders.
let folder1_id = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"First test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
@@ -1543,266 +1543,266 @@ add_task(function* test_onItemDeleted_tr
let fx_guid = engine._store.GUIDForId(fx_id);
let tb_id = PlacesUtils.bookmarks.insertBookmark(
folder2_id,
Utils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = engine._store.GUIDForId(tb_id);
- yield startTracking();
+ await startTracking();
// Delete folder2 - everything we created should be tracked.
PlacesUtils.bookmarks.removeItem(folder2_id);
- yield verifyTrackedItems([fx_guid, tb_guid, folder1_guid, folder2_guid]);
+ await verifyTrackedItems([fx_guid, tb_guid, folder1_guid, folder2_guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_mobile_query() {
+add_task(async function test_mobile_query() {
_("Ensure we correctly create the mobile query");
try {
- yield startTracking();
+ await startTracking();
// Creates the organizer queries as a side effect.
let leftPaneId = PlacesUIUtils.leftPaneFolderId;
_(`Left pane root ID: ${leftPaneId}`);
- let allBookmarksGuids = yield fetchGuidsWithAnno("PlacesOrganizer/OrganizerQuery",
+ let allBookmarksGuids = await fetchGuidsWithAnno("PlacesOrganizer/OrganizerQuery",
"AllBookmarks");
equal(allBookmarksGuids.length, 1, "Should create folder with all bookmarks queries");
let allBookmarkGuid = allBookmarksGuids[0];
_("Try creating query after organizer is ready");
tracker._ensureMobileQuery();
- let queryGuids = yield fetchGuidsWithAnno("PlacesOrganizer/OrganizerQuery",
+ let queryGuids = await fetchGuidsWithAnno("PlacesOrganizer/OrganizerQuery",
"MobileBookmarks");
equal(queryGuids.length, 0, "Should not create query without any mobile bookmarks");
_("Insert mobile bookmark, then create query");
- let mozBmk = yield PlacesUtils.bookmarks.insert({
+ let mozBmk = await PlacesUtils.bookmarks.insert({
parentGuid: PlacesUtils.bookmarks.mobileGuid,
url: "https://mozilla.org",
});
tracker._ensureMobileQuery();
- queryGuids = yield fetchGuidsWithAnno("PlacesOrganizer/OrganizerQuery",
+ queryGuids = await fetchGuidsWithAnno("PlacesOrganizer/OrganizerQuery",
"MobileBookmarks");
equal(queryGuids.length, 1, "Should create query once mobile bookmarks exist");
let queryGuid = queryGuids[0];
- let queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid);
+ let queryInfo = await PlacesUtils.bookmarks.fetch(queryGuid);
equal(queryInfo.url, `place:folder=${PlacesUtils.mobileFolderId}`, "Query should point to mobile root");
equal(queryInfo.title, "Mobile Bookmarks", "Query title should be localized");
equal(queryInfo.parentGuid, allBookmarkGuid, "Should append mobile query to all bookmarks queries");
_("Rename root and query, then recreate");
- yield PlacesUtils.bookmarks.update({
+ await PlacesUtils.bookmarks.update({
guid: PlacesUtils.bookmarks.mobileGuid,
title: "renamed root",
});
- yield PlacesUtils.bookmarks.update({
+ await PlacesUtils.bookmarks.update({
guid: queryGuid,
title: "renamed query",
});
tracker._ensureMobileQuery();
- let rootInfo = yield PlacesUtils.bookmarks.fetch(PlacesUtils.bookmarks.mobileGuid);
+ let rootInfo = await PlacesUtils.bookmarks.fetch(PlacesUtils.bookmarks.mobileGuid);
equal(rootInfo.title, "Mobile Bookmarks", "Should fix root title");
- queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid);
+ queryInfo = await PlacesUtils.bookmarks.fetch(queryGuid);
equal(queryInfo.title, "Mobile Bookmarks", "Should fix query title");
_("Point query to different folder");
- yield PlacesUtils.bookmarks.update({
+ await PlacesUtils.bookmarks.update({
guid: queryGuid,
url: "place:folder=BOOKMARKS_MENU",
});
tracker._ensureMobileQuery();
- queryInfo = yield PlacesUtils.bookmarks.fetch(queryGuid);
+ queryInfo = await PlacesUtils.bookmarks.fetch(queryGuid);
equal(queryInfo.url.href, `place:folder=${PlacesUtils.mobileFolderId}`,
"Should fix query URL to point to mobile root");
_("We shouldn't track the query or the left pane root");
- yield verifyTrackedItems([mozBmk.guid, "mobile"]);
+ await verifyTrackedItems([mozBmk.guid, "mobile"]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 5);
} finally {
_("Clean up.");
- yield cleanup();
+ await cleanup();
}
});
-add_task(function* test_skip_migration() {
- yield* insertBookmarksToMigrate();
+add_task(async function test_skip_migration() {
+ await insertBookmarksToMigrate();
- let originalTombstones = yield PlacesTestUtils.fetchSyncTombstones();
- let originalFields = yield PlacesTestUtils.fetchBookmarkSyncFields(
+ let originalTombstones = await PlacesTestUtils.fetchSyncTombstones();
+ let originalFields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
let filePath = OS.Path.join(OS.Constants.Path.profileDir, "weave", "changes",
"bookmarks.json");
_("No tracker file");
{
- yield Utils.jsonRemove("changes/bookmarks", tracker);
- ok(!(yield OS.File.exists(filePath)), "Tracker file should not exist");
+ await Utils.jsonRemove("changes/bookmarks", tracker);
+ ok(!(await OS.File.exists(filePath)), "Tracker file should not exist");
- yield tracker._migrateOldEntries();
+ await tracker._migrateOldEntries();
- let fields = yield PlacesTestUtils.fetchBookmarkSyncFields(
+ let fields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
deepEqual(fields, originalFields,
"Sync fields should not change if tracker file is missing");
- let tombstones = yield PlacesTestUtils.fetchSyncTombstones();
+ let tombstones = await PlacesTestUtils.fetchSyncTombstones();
deepEqual(tombstones, originalTombstones,
"Tombstones should not change if tracker file is missing");
}
_("Existing tracker file; engine disabled");
{
- yield Utils.jsonSave("changes/bookmarks", tracker, {});
- ok(yield OS.File.exists(filePath),
+ await Utils.jsonSave("changes/bookmarks", tracker, {});
+ ok(await OS.File.exists(filePath),
"Tracker file should exist before disabled engine migration");
engine.disabled = true;
- yield tracker._migrateOldEntries();
+ await tracker._migrateOldEntries();
engine.disabled = false;
- let fields = yield PlacesTestUtils.fetchBookmarkSyncFields(
+ let fields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
deepEqual(fields, originalFields,
"Sync fields should not change on disabled engine migration");
- let tombstones = yield PlacesTestUtils.fetchSyncTombstones();
+ let tombstones = await PlacesTestUtils.fetchSyncTombstones();
deepEqual(tombstones, originalTombstones,
"Tombstones should not change if tracker file is missing");
- ok(!(yield OS.File.exists(filePath)),
+ ok(!(await OS.File.exists(filePath)),
"Tracker file should be deleted after disabled engine migration");
}
_("Existing tracker file; first sync");
{
- yield Utils.jsonSave("changes/bookmarks", tracker, {});
- ok(yield OS.File.exists(filePath),
+ await Utils.jsonSave("changes/bookmarks", tracker, {});
+ ok(await OS.File.exists(filePath),
"Tracker file should exist before first sync migration");
engine.lastSync = 0;
- yield tracker._migrateOldEntries();
+ await tracker._migrateOldEntries();
- let fields = yield PlacesTestUtils.fetchBookmarkSyncFields(
+ let fields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
deepEqual(fields, originalFields,
"Sync fields should not change on first sync migration");
- let tombstones = yield PlacesTestUtils.fetchSyncTombstones();
+ let tombstones = await PlacesTestUtils.fetchSyncTombstones();
deepEqual(tombstones, originalTombstones,
"Tombstones should not change if tracker file is missing");
- ok(!(yield OS.File.exists(filePath)),
+ ok(!(await OS.File.exists(filePath)),
"Tracker file should be deleted after first sync migration");
}
- yield* cleanup();
+ await cleanup();
});
-add_task(function* test_migrate_empty_tracker() {
+add_task(async function test_migrate_empty_tracker() {
_("Migration with empty tracker file");
- yield* insertBookmarksToMigrate();
+ await insertBookmarksToMigrate();
- yield Utils.jsonSave("changes/bookmarks", tracker, {});
+ await Utils.jsonSave("changes/bookmarks", tracker, {});
engine.lastSync = Date.now() / 1000;
- yield tracker._migrateOldEntries();
+ await tracker._migrateOldEntries();
- let fields = yield PlacesTestUtils.fetchBookmarkSyncFields(
+ let fields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg", "r5ouWdPB3l28", "YK5Bdq5MIqL6");
for (let field of fields) {
equal(field.syncStatus, PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
`Sync status of migrated bookmark ${field.guid} should be NORMAL`);
strictEqual(field.syncChangeCounter, 0,
`Change counter of migrated bookmark ${field.guid} should be 0`);
}
- let tombstones = yield PlacesTestUtils.fetchSyncTombstones();
+ let tombstones = await PlacesTestUtils.fetchSyncTombstones();
deepEqual(tombstones, [], "Migration should delete old tombstones");
let filePath = OS.Path.join(OS.Constants.Path.profileDir, "weave", "changes",
"bookmarks.json");
- ok(!(yield OS.File.exists(filePath)),
+ ok(!(await OS.File.exists(filePath)),
"Tracker file should be deleted after empty tracker migration");
- yield* cleanup();
+ await cleanup();
});
-add_task(function* test_migrate_existing_tracker() {
+add_task(async function test_migrate_existing_tracker() {
_("Migration with existing tracker entries");
- yield* insertBookmarksToMigrate();
+ await insertBookmarksToMigrate();
- let mozBmk = yield PlacesUtils.bookmarks.fetch("0gtWTOgYcoJD");
- let fxBmk = yield PlacesUtils.bookmarks.fetch("0dbpnMdxKxfg");
+ let mozBmk = await PlacesUtils.bookmarks.fetch("0gtWTOgYcoJD");
+ let fxBmk = await PlacesUtils.bookmarks.fetch("0dbpnMdxKxfg");
let mozChangeTime = Math.floor(mozBmk.lastModified / 1000) - 60;
let fxChangeTime = Math.floor(fxBmk.lastModified / 1000) + 60;
- yield Utils.jsonSave("changes/bookmarks", tracker, {
+ await Utils.jsonSave("changes/bookmarks", tracker, {
"0gtWTOgYcoJD": mozChangeTime,
"0dbpnMdxKxfg": {
modified: fxChangeTime,
deleted: false,
},
"3kdIPWHs9hHC": {
modified: 1479494951,
deleted: true,
},
"l7DlMy2lL1jL": 1479496460,
});
engine.lastSync = Date.now() / 1000;
- yield tracker._migrateOldEntries();
+ await tracker._migrateOldEntries();
- let changedFields = yield PlacesTestUtils.fetchBookmarkSyncFields(
+ let changedFields = await PlacesTestUtils.fetchBookmarkSyncFields(
"0gtWTOgYcoJD", "0dbpnMdxKxfg");
for (let field of changedFields) {
if (field.guid == "0gtWTOgYcoJD") {
ok(field.lastModified.getTime(), mozBmk.lastModified.getTime(),
`Modified time for ${field.guid} should not be reset to older change time`);
} else if (field.guid == "0dbpnMdxKxfg") {
equal(field.lastModified.getTime(), fxChangeTime * 1000,
`Modified time for ${field.guid} should be updated to newer change time`);
}
equal(field.syncStatus, PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
`Sync status of migrated bookmark ${field.guid} should be NORMAL`);
ok(field.syncChangeCounter > 0,
`Change counter of migrated bookmark ${field.guid} should be > 0`);
}
- let unchangedFields = yield PlacesTestUtils.fetchBookmarkSyncFields(
+ let unchangedFields = await PlacesTestUtils.fetchBookmarkSyncFields(
"r5ouWdPB3l28", "YK5Bdq5MIqL6");
for (let field of unchangedFields) {
equal(field.syncStatus, PlacesUtils.bookmarks.SYNC_STATUS.NORMAL,
`Sync status of unchanged bookmark ${field.guid} should be NORMAL`);
strictEqual(field.syncChangeCounter, 0,
`Change counter of unchanged bookmark ${field.guid} should be 0`);
}
- let tombstones = yield PlacesTestUtils.fetchSyncTombstones();
- yield deepEqual(tombstones, [{
+ let tombstones = await PlacesTestUtils.fetchSyncTombstones();
+ await deepEqual(tombstones, [{
guid: "3kdIPWHs9hHC",
dateRemoved: new Date(1479494951 * 1000),
}, {
guid: "l7DlMy2lL1jL",
dateRemoved: new Date(1479496460 * 1000),
}], "Should write tombstones for deleted tracked items");
let filePath = OS.Path.join(OS.Constants.Path.profileDir, "weave", "changes",
"bookmarks.json");
- ok(!(yield OS.File.exists(filePath)),
+ ok(!(await OS.File.exists(filePath)),
"Tracker file should be deleted after existing tracker migration");
- yield* cleanup();
+ await cleanup();
});
function run_test() {
initTestLogging("Trace");
Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
Log.repository.getLogger("Sync.Store.Bookmarks").level = Log.Level.Trace;
Log.repository.getLogger("Sync.Tracker.Bookmarks").level = Log.Level.Trace;
--- a/services/sync/tests/unit/test_bookmark_validator.js
+++ b/services/sync/tests/unit/test_bookmark_validator.js
@@ -313,23 +313,23 @@ function validationPing(server, client,
recordCount: server.length,
problems: validator.compareServerWithClient(server, client).problemData,
};
Svc.Obs.notify("weave:engine:validate:finish", data, "bookmarks");
Svc.Obs.notify("weave:service:sync:finish");
}, true); // Allow "failing" pings, since having validation info indicates failure.
}
-add_task(function *test_telemetry_integration() {
+add_task(async function test_telemetry_integration() {
let {server, client} = getDummyServerAndClient();
// remove "c"
server.pop();
server[0].children.pop();
const duration = 50;
- let ping = yield validationPing(server, client, duration);
+ let ping = await validationPing(server, client, duration);
ok(ping.engines);
let bme = ping.engines.find(e => e.name === "bookmarks");
ok(bme);
ok(bme.validation);
ok(bme.validation.problems)
equal(bme.validation.checked, server.length);
equal(bme.validation.took, duration);
bme.validation.problems.sort((a, b) => String.localeCompare(a.name, b.name));
--- a/services/sync/tests/unit/test_browserid_identity.js
+++ b/services/sync/tests/unit/test_browserid_identity.js
@@ -72,27 +72,27 @@ function run_test() {
add_test(function test_initial_state() {
_("Verify initial state");
do_check_false(!!browseridManager._token);
do_check_false(browseridManager.hasValidToken());
run_next_test();
}
);
-add_task(function* test_initialializeWithCurrentIdentity() {
+add_task(async function test_initialializeWithCurrentIdentity() {
_("Verify start after initializeWithCurrentIdentity");
browseridManager.initializeWithCurrentIdentity();
- yield browseridManager.whenReadyToAuthenticate.promise;
+ await browseridManager.whenReadyToAuthenticate.promise;
do_check_true(!!browseridManager._token);
do_check_true(browseridManager.hasValidToken());
do_check_eq(browseridManager.account, identityConfig.fxaccount.user.email);
}
);
-add_task(function* test_initialializeWithAuthErrorAndDeletedAccount() {
+add_task(async function test_initialializeWithAuthErrorAndDeletedAccount() {
_("Verify sync unpair after initializeWithCurrentIdentity with auth error + account deleted");
var identityConfig = makeIdentityConfig();
var browseridManager = new BrowserIDManager();
// Use the real `_getAssertion` method that calls
// `mockFxAClient.signCertificate`.
let fxaInternal = makeFxAccountsInternalMock(identityConfig);
@@ -120,38 +120,38 @@ add_task(function* test_initialializeWit
accountStatusCalled = true;
return Promise.resolve(false);
}
};
let mockFxAClient = new MockFxAccountsClient();
browseridManager._fxaService.internal._fxAccountsClient = mockFxAClient;
- yield browseridManager.initializeWithCurrentIdentity();
- yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ await browseridManager.initializeWithCurrentIdentity();
+ await Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
"should reject due to an auth error");
do_check_true(signCertificateCalled);
do_check_true(accountStatusCalled);
do_check_false(browseridManager.account);
do_check_false(browseridManager._token);
do_check_false(browseridManager.hasValidToken());
do_check_false(browseridManager.account);
});
-add_task(function* test_initialializeWithNoKeys() {
+add_task(async function test_initialializeWithNoKeys() {
_("Verify start after initializeWithCurrentIdentity without kA, kB or keyFetchToken");
let identityConfig = makeIdentityConfig();
delete identityConfig.fxaccount.user.kA;
delete identityConfig.fxaccount.user.kB;
// there's no keyFetchToken by default, so the initialize should fail.
configureFxAccountIdentity(browseridManager, identityConfig);
- yield browseridManager.initializeWithCurrentIdentity();
- yield browseridManager.whenReadyToAuthenticate.promise;
+ await browseridManager.initializeWithCurrentIdentity();
+ await browseridManager.whenReadyToAuthenticate.promise;
do_check_eq(Status.login, LOGIN_SUCCEEDED, "login succeeded even without keys");
do_check_false(browseridManager._canFetchKeys(), "_canFetchKeys reflects lack of keys");
do_check_eq(browseridManager._token, null, "we don't have a token");
});
add_test(function test_getResourceAuthenticator() {
_("BrowserIDManager supplies a Resource Authenticator callback which returns a Hawk header.");
configureFxAccountIdentity(browseridManager);
@@ -301,46 +301,46 @@ add_test(function test_RESTResourceAuthe
// window.
do_check_eq(getTimestamp(authHeader), now - 12 * HOUR_MS);
do_check_true(
(getTimestampDelta(authHeader, now) - 12 * HOUR_MS) < 2 * MINUTE_MS);
run_next_test();
});
-add_task(function* test_ensureLoggedIn() {
+add_task(async function test_ensureLoggedIn() {
configureFxAccountIdentity(browseridManager);
- yield browseridManager.initializeWithCurrentIdentity();
- yield browseridManager.whenReadyToAuthenticate.promise;
+ await browseridManager.initializeWithCurrentIdentity();
+ await browseridManager.whenReadyToAuthenticate.promise;
Assert.equal(Status.login, LOGIN_SUCCEEDED, "original initialize worked");
- yield browseridManager.ensureLoggedIn();
+ await browseridManager.ensureLoggedIn();
Assert.equal(Status.login, LOGIN_SUCCEEDED, "original ensureLoggedIn worked");
Assert.ok(browseridManager._shouldHaveSyncKeyBundle,
"_shouldHaveSyncKeyBundle should always be true after ensureLogin completes.");
// arrange for no logged in user.
let fxa = browseridManager._fxaService
let signedInUser = fxa.internal.currentAccountState.storageManager.accountData;
fxa.internal.currentAccountState.storageManager.accountData = null;
browseridManager.initializeWithCurrentIdentity();
Assert.ok(!browseridManager._shouldHaveSyncKeyBundle,
"_shouldHaveSyncKeyBundle should be false so we know we are testing what we think we are.");
Status.login = LOGIN_FAILED_NO_USERNAME;
- yield Assert.rejects(browseridManager.ensureLoggedIn(), "expecting rejection due to no user");
+ await Assert.rejects(browseridManager.ensureLoggedIn(), "expecting rejection due to no user");
Assert.ok(browseridManager._shouldHaveSyncKeyBundle,
"_shouldHaveSyncKeyBundle should always be true after ensureLogin completes.");
// Restore the logged in user to what it was.
fxa.internal.currentAccountState.storageManager.accountData = signedInUser;
Status.login = LOGIN_FAILED_LOGIN_REJECTED;
- yield Assert.rejects(browseridManager.ensureLoggedIn(),
+ await Assert.rejects(browseridManager.ensureLoggedIn(),
"LOGIN_FAILED_LOGIN_REJECTED should have caused immediate rejection");
Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED,
"status should remain LOGIN_FAILED_LOGIN_REJECTED");
Status.login = LOGIN_FAILED_NETWORK_ERROR;
- yield browseridManager.ensureLoggedIn();
+ await browseridManager.ensureLoggedIn();
Assert.equal(Status.login, LOGIN_SUCCEEDED, "final ensureLoggedIn worked");
});
add_test(function test_tokenExpiration() {
_("BrowserIDManager notices token expiration:");
let bimExp = new BrowserIDManager();
configureFxAccountIdentity(bimExp, identityConfig);
@@ -399,50 +399,50 @@ add_test(function test_computeXClientSta
let bidUser = new BrowserIDManager();
let header = bidUser._computeXClientState(kB);
do_check_eq(header, "6ae94683571c7a7c54dab4700aa3995f");
run_next_test();
});
-add_task(function* test_getTokenErrors() {
+add_task(async function test_getTokenErrors() {
_("BrowserIDManager correctly handles various failures to get a token.");
_("Arrange for a 401 - Sync should reflect an auth error.");
initializeIdentityWithTokenServerResponse({
status: 401,
headers: {"content-type": "application/json"},
body: JSON.stringify({}),
});
let browseridManager = Service.identity;
- yield browseridManager.initializeWithCurrentIdentity();
- yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ await browseridManager.initializeWithCurrentIdentity();
+ await Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
"should reject due to 401");
Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED, "login was rejected");
// XXX - other interesting responses to return?
// And for good measure, some totally "unexpected" errors - we generally
// assume these problems are going to magically go away at some point.
_("Arrange for an empty body with a 200 response - should reflect a network error.");
initializeIdentityWithTokenServerResponse({
status: 200,
headers: [],
body: "",
});
browseridManager = Service.identity;
- yield browseridManager.initializeWithCurrentIdentity();
- yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ await browseridManager.initializeWithCurrentIdentity();
+ await Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
"should reject due to non-JSON response");
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login state is LOGIN_FAILED_NETWORK_ERROR");
});
-add_task(function* test_refreshCertificateOn401() {
+add_task(async function test_refreshCertificateOn401() {
_("BrowserIDManager refreshes the FXA certificate after a 401.");
var identityConfig = makeIdentityConfig();
var browseridManager = new BrowserIDManager();
// Use the real `_getAssertion` method that calls
// `mockFxAClient.signCertificate`.
let fxaInternal = makeFxAccountsInternalMock(identityConfig);
delete fxaInternal._getAssertion;
configureFxAccountIdentity(browseridManager, identityConfig, fxaInternal);
@@ -486,47 +486,47 @@ add_task(function* test_refreshCertifica
duration: 300,
})
};
}
});
browseridManager._tokenServerClient = mockTSC;
- yield browseridManager.initializeWithCurrentIdentity();
- yield browseridManager.whenReadyToAuthenticate.promise;
+ await browseridManager.initializeWithCurrentIdentity();
+ await browseridManager.whenReadyToAuthenticate.promise;
do_check_eq(getCertCount, 2);
do_check_true(didReturn401);
do_check_true(didReturn200);
do_check_true(browseridManager.account);
do_check_true(browseridManager._token);
do_check_true(browseridManager.hasValidToken());
do_check_true(browseridManager.account);
});
-add_task(function* test_getTokenErrorWithRetry() {
+add_task(async function test_getTokenErrorWithRetry() {
_("tokenserver sends an observer notification on various backoff headers.");
// Set Sync's backoffInterval to zero - after we simulated the backoff header
// it should reflect the value we sent.
Status.backoffInterval = 0;
_("Arrange for a 503 with a Retry-After header.");
initializeIdentityWithTokenServerResponse({
status: 503,
headers: {"content-type": "application/json",
"retry-after": "100"},
body: JSON.stringify({}),
});
let browseridManager = Service.identity;
- yield browseridManager.initializeWithCurrentIdentity();
- yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ await browseridManager.initializeWithCurrentIdentity();
+ await Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
"should reject due to 503");
// The observer should have fired - check it got the value in the response.
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login was rejected");
// Sync will have the value in ms with some slop - so check it is at least that.
Assert.ok(Status.backoffInterval >= 100000);
_("Arrange for a 200 with an X-Backoff header.");
@@ -534,168 +534,168 @@ add_task(function* test_getTokenErrorWit
initializeIdentityWithTokenServerResponse({
status: 503,
headers: {"content-type": "application/json",
"x-backoff": "200"},
body: JSON.stringify({}),
});
browseridManager = Service.identity;
- yield browseridManager.initializeWithCurrentIdentity();
- yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ await browseridManager.initializeWithCurrentIdentity();
+ await Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
"should reject due to no token in response");
// The observer should have fired - check it got the value in the response.
Assert.ok(Status.backoffInterval >= 200000);
});
-add_task(function* test_getKeysErrorWithBackoff() {
+add_task(async function test_getKeysErrorWithBackoff() {
_("Auth server (via hawk) sends an observer notification on backoff headers.");
// Set Sync's backoffInterval to zero - after we simulated the backoff header
// it should reflect the value we sent.
Status.backoffInterval = 0;
_("Arrange for a 503 with a X-Backoff header.");
let config = makeIdentityConfig();
// We want no kA or kB so we attempt to fetch them.
delete config.fxaccount.user.kA;
delete config.fxaccount.user.kB;
config.fxaccount.user.keyFetchToken = "keyfetchtoken";
- yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ await initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
Assert.equal(method, "get");
Assert.equal(uri, "http://mockedserver:9999/account/keys")
return {
status: 503,
headers: {"content-type": "application/json",
"x-backoff": "100"},
body: "{}",
}
});
let browseridManager = Service.identity;
- yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ await Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
"should reject due to 503");
// The observer should have fired - check it got the value in the response.
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login was rejected");
// Sync will have the value in ms with some slop - so check it is at least that.
Assert.ok(Status.backoffInterval >= 100000);
});
-add_task(function* test_getKeysErrorWithRetry() {
+add_task(async function test_getKeysErrorWithRetry() {
_("Auth server (via hawk) sends an observer notification on retry headers.");
// Set Sync's backoffInterval to zero - after we simulated the backoff header
// it should reflect the value we sent.
Status.backoffInterval = 0;
_("Arrange for a 503 with a Retry-After header.");
let config = makeIdentityConfig();
// We want no kA or kB so we attempt to fetch them.
delete config.fxaccount.user.kA;
delete config.fxaccount.user.kB;
config.fxaccount.user.keyFetchToken = "keyfetchtoken";
- yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ await initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
Assert.equal(method, "get");
Assert.equal(uri, "http://mockedserver:9999/account/keys")
return {
status: 503,
headers: {"content-type": "application/json",
"retry-after": "100"},
body: "{}",
}
});
let browseridManager = Service.identity;
- yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ await Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
"should reject due to 503");
// The observer should have fired - check it got the value in the response.
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login was rejected");
// Sync will have the value in ms with some slop - so check it is at least that.
Assert.ok(Status.backoffInterval >= 100000);
});
-add_task(function* test_getHAWKErrors() {
+add_task(async function test_getHAWKErrors() {
_("BrowserIDManager correctly handles various HAWK failures.");
_("Arrange for a 401 - Sync should reflect an auth error.");
let config = makeIdentityConfig();
- yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ await initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
Assert.equal(method, "post");
Assert.equal(uri, "http://mockedserver:9999/certificate/sign")
return {
status: 401,
headers: {"content-type": "application/json"},
body: JSON.stringify({}),
}
});
Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED, "login was rejected");
// XXX - other interesting responses to return?
// And for good measure, some totally "unexpected" errors - we generally
// assume these problems are going to magically go away at some point.
_("Arrange for an empty body with a 200 response - should reflect a network error.");
- yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ await initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
Assert.equal(method, "post");
Assert.equal(uri, "http://mockedserver:9999/certificate/sign")
return {
status: 200,
headers: [],
body: "",
}
});
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "login state is LOGIN_FAILED_NETWORK_ERROR");
});
-add_task(function* test_getGetKeysFailing401() {
+add_task(async function test_getGetKeysFailing401() {
_("BrowserIDManager correctly handles 401 responses fetching keys.");
_("Arrange for a 401 - Sync should reflect an auth error.");
let config = makeIdentityConfig();
// We want no kA or kB so we attempt to fetch them.
delete config.fxaccount.user.kA;
delete config.fxaccount.user.kB;
config.fxaccount.user.keyFetchToken = "keyfetchtoken";
- yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ await initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
Assert.equal(method, "get");
Assert.equal(uri, "http://mockedserver:9999/account/keys")
return {
status: 401,
headers: {"content-type": "application/json"},
body: "{}",
}
});
Assert.equal(Status.login, LOGIN_FAILED_LOGIN_REJECTED, "login was rejected");
});
-add_task(function* test_getGetKeysFailing503() {
+add_task(async function test_getGetKeysFailing503() {
_("BrowserIDManager correctly handles 5XX responses fetching keys.");
_("Arrange for a 503 - Sync should reflect a network error.");
let config = makeIdentityConfig();
// We want no kA or kB so we attempt to fetch them.
delete config.fxaccount.user.kA;
delete config.fxaccount.user.kB;
config.fxaccount.user.keyFetchToken = "keyfetchtoken";
- yield initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
+ await initializeIdentityWithHAWKResponseFactory(config, function(method, data, uri) {
Assert.equal(method, "get");
Assert.equal(uri, "http://mockedserver:9999/account/keys")
return {
status: 503,
headers: {"content-type": "application/json"},
body: "{}",
}
});
Assert.equal(Status.login, LOGIN_FAILED_NETWORK_ERROR, "state reflects network error");
});
-add_task(function* test_getKeysMissing() {
+add_task(async function test_getKeysMissing() {
_("BrowserIDManager correctly handles getKeys succeeding but not returning keys.");
let browseridManager = new BrowserIDManager();
let identityConfig = makeIdentityConfig();
// our mock identity config already has kA and kB - remove them or we never
// try and fetch them.
delete identityConfig.fxaccount.user.kA;
delete identityConfig.fxaccount.user.kB;
@@ -727,29 +727,29 @@ add_task(function* test_getKeysMissing()
validUntil: fxa.internal.now() + CERT_LIFETIME,
cert: "certificate",
};
return Promise.resolve(this.cert.cert);
};
browseridManager._fxaService = fxa;
- yield browseridManager.initializeWithCurrentIdentity();
+ await browseridManager.initializeWithCurrentIdentity();
let ex;
try {
- yield browseridManager.whenReadyToAuthenticate.promise;
+ await browseridManager.whenReadyToAuthenticate.promise;
} catch (e) {
ex = e;
}
Assert.ok(ex.message.indexOf("missing kA or kB") >= 0);
});
-add_task(function* test_signedInUserMissing() {
+add_task(async function test_signedInUserMissing() {
_("BrowserIDManager detects getSignedInUser returning incomplete account data");
let browseridManager = new BrowserIDManager();
let config = makeIdentityConfig();
// Delete stored keys and the key fetch token.
delete identityConfig.fxaccount.user.kA;
delete identityConfig.fxaccount.user.kB;
delete identityConfig.fxaccount.user.keyFetchToken;
@@ -770,31 +770,31 @@ add_task(function* test_signedInUserMiss
let storageManager = new MockFxaStorageManager();
storageManager.initialize(identityConfig.fxaccount.user);
return new AccountState(storageManager);
},
});
browseridManager._fxaService = fxa;
- let status = yield browseridManager.unlockAndVerifyAuthState();
+ let status = await browseridManager.unlockAndVerifyAuthState();
Assert.equal(status, LOGIN_FAILED_LOGIN_REJECTED);
});
// End of tests
// Utility functions follow
// Create a new browserid_identity object and initialize it with a
// hawk mock that simulates HTTP responses.
// The callback function will be called each time the mocked hawk server wants
// to make a request. The result of the callback should be the mock response
// object that will be returned to hawk.
// A token server mock will be used that doesn't hit a server, so we move
// directly to a hawk request.
-function* initializeIdentityWithHAWKResponseFactory(config, cbGetResponse) {
+async function initializeIdentityWithHAWKResponseFactory(config, cbGetResponse) {
// A mock request object.
function MockRESTRequest(uri, credentials, extra) {
this._uri = uri;
this._credentials = credentials;
this._extra = extra;
};
MockRESTRequest.prototype = {
setHeader: function() {},
@@ -843,18 +843,18 @@ function* initializeIdentityWithHAWKResp
storageManager.initialize(config.fxaccount.user);
return new AccountState(storageManager);
},
}
let fxa = new FxAccounts(internal);
browseridManager._fxaService = fxa;
browseridManager._signedInUser = null;
- yield browseridManager.initializeWithCurrentIdentity();
- yield Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
+ await browseridManager.initializeWithCurrentIdentity();
+ await Assert.rejects(browseridManager.whenReadyToAuthenticate.promise,
"expecting rejection due to hawk error");
}
function getTimestamp(hawkAuthHeader) {
return parseInt(/ts="(\d+)"/.exec(hawkAuthHeader)[1], 10) * SECOND_MS;
}
--- a/services/sync/tests/unit/test_collections_recovery.js
+++ b/services/sync/tests/unit/test_collections_recovery.js
@@ -1,17 +1,17 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
// Verify that we wipe the server if we have to regenerate keys.
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/utils.js");
-add_identity_test(this, function* test_missing_crypto_collection() {
+add_identity_test(this, async function test_missing_crypto_collection() {
let johnHelper = track_collections_helper();
let johnU = johnHelper.with_updated_collection;
let johnColls = johnHelper.collections;
let empty = false;
function maybe_empty(handler) {
return function (request, response) {
if (empty) {
@@ -19,17 +19,17 @@ add_identity_test(this, function* test_m
response.setStatusLine(request.httpVersion, 200, "OK");
response.bodyOutputStream.write(body, body.length);
} else {
handler(request, response);
}
};
}
- yield configureIdentity({username: "johndoe"});
+ await configureIdentity({username: "johndoe"});
let handlers = {
"/1.1/johndoe/info/collections": maybe_empty(johnHelper.handler),
"/1.1/johndoe/storage/crypto/keys": johnU("crypto", new ServerWBO("keys").handler()),
"/1.1/johndoe/storage/meta/global": johnU("meta", new ServerWBO("global").handler())
};
let collections = ["clients", "bookmarks", "forms", "history",
"passwords", "prefs", "tabs"];
@@ -48,38 +48,36 @@ add_identity_test(this, function* test_m
let orig = Service._freshStart;
Service._freshStart = function() {
_("Called _freshStart.");
orig.call(Service);
fresh++;
};
_("Startup, no meta/global: freshStart called once.");
- yield sync_and_validate_telem();
+ await sync_and_validate_telem();
do_check_eq(fresh, 1);
fresh = 0;
_("Regular sync: no need to freshStart.");
Service.sync();
do_check_eq(fresh, 0);
_("Simulate a bad info/collections.");
delete johnColls.crypto;
- yield sync_and_validate_telem();
+ await sync_and_validate_telem();
do_check_eq(fresh, 1);
fresh = 0;
_("Regular sync: no need to freshStart.");
- yield sync_and_validate_telem();
+ await sync_and_validate_telem();
do_check_eq(fresh, 0);
} finally {
Svc.Prefs.resetBranch("");
- let deferred = Promise.defer();
- server.stop(deferred.resolve);
- yield deferred.promise;
+ await promiseStopServer(server);
}
});
function run_test() {
initTestLogging("Trace");
run_next_test();
}
--- a/services/sync/tests/unit/test_corrupt_keys.js
+++ b/services/sync/tests/unit/test_corrupt_keys.js
@@ -7,19 +7,18 @@ Cu.import("resource://services-sync/cons
Cu.import("resource://services-sync/engines.js");
Cu.import("resource://services-sync/engines/tabs.js");
Cu.import("resource://services-sync/engines/history.js");
Cu.import("resource://services-sync/record.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/status.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/utils.js");
-Cu.import("resource://gre/modules/Promise.jsm");
-add_task(function* test_locally_changed_keys() {
+add_task(async function test_locally_changed_keys() {
let passphrase = "abcdeabcdeabcdeabcdeabcdea";
let hmacErrorCount = 0;
function counting(f) {
return function() {
hmacErrorCount++;
return f.call(this);
};
@@ -82,17 +81,17 @@ add_task(function* test_locally_changed_
serverKeys.encrypt(Service.identity.syncKeyBundle);
do_check_true(serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success);
// Check that login works.
do_check_true(Service.login("johndoe", "ilovejane", passphrase));
do_check_true(Service.isLoggedIn);
// Sync should upload records.
- yield sync_and_validate_telem();
+ await sync_and_validate_telem();
// Tabs exist.
_("Tabs modified: " + johndoe.modified("tabs"));
do_check_true(johndoe.modified("tabs") > 0);
let coll_modified = Service.collectionKeys.lastModified;
// Let's create some server side history records.
@@ -135,29 +134,29 @@ add_task(function* test_locally_changed_
// Fill local key cache with bad data.
corrupt_local_keys();
_("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
do_check_eq(hmacErrorCount, 0);
_("HMAC error count: " + hmacErrorCount);
// Now syncing should succeed, after one HMAC error.
- let ping = yield wait_for_ping(() => Service.sync(), true);
+ let ping = await wait_for_ping(() => Service.sync(), true);
equal(ping.engines.find(e => e.name == "history").incoming.applied, 5);
do_check_eq(hmacErrorCount, 1);
_("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
// And look! We downloaded history!
let store = Service.engineManager.get("history")._store;
- do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--0"));
- do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--1"));
- do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--2"));
- do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--3"));
- do_check_true(yield promiseIsURIVisited("http://foo/bar?record-no--4"));
+ do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--0"));
+ do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--1"));
+ do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--2"));
+ do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--3"));
+ do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--4"));
do_check_eq(hmacErrorCount, 1);
_("Busting some new server values.");
// Now what happens if we corrupt the HMAC on the server?
for (let i = 5; i < 10; i++) {
let id = 'record-no--' + i;
let modified = 1 + (Date.now() / 1000);
@@ -181,33 +180,31 @@ add_task(function* test_locally_changed_
_("Server key time hasn't changed.");
do_check_eq(johndoe.modified("crypto"), old_key_time);
_("Resetting HMAC error timer.");
Service.lastHMACEvent = 0;
_("Syncing...");
- ping = yield sync_and_validate_telem(true);
+ ping = await sync_and_validate_telem(true);
do_check_eq(ping.engines.find(e => e.name == "history").incoming.failed, 5);
_("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
_("Server keys have been updated, and we skipped over 5 more HMAC errors without adjusting history.");
do_check_true(johndoe.modified("crypto") > old_key_time);
do_check_eq(hmacErrorCount, 6);
- do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--5"));
- do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--6"));
- do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--7"));
- do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--8"));
- do_check_false(yield promiseIsURIVisited("http://foo/bar?record-no--9"));
+ do_check_false(await promiseIsURIVisited("http://foo/bar?record-no--5"));
+ do_check_false(await promiseIsURIVisited("http://foo/bar?record-no--6"));
+ do_check_false(await promiseIsURIVisited("http://foo/bar?record-no--7"));
+ do_check_false(await promiseIsURIVisited("http://foo/bar?record-no--8"));
+ do_check_false(await promiseIsURIVisited("http://foo/bar?record-no--9"));
} finally {
Svc.Prefs.resetBranch("");
- let deferred = Promise.defer();
- server.stop(deferred.resolve);
- yield deferred.promise;
+ await promiseStopServer(server);
}
});
function run_test() {
let logger = Log.repository.rootLogger;
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
validate_all_future_pings();
@@ -219,15 +216,14 @@ function run_test() {
/**
* Asynchronously check a url is visited.
* @param url the url
* @return {Promise}
* @resolves When the check has been added successfully.
* @rejects JavaScript exception.
*/
function promiseIsURIVisited(url) {
- let deferred = Promise.defer();
- PlacesUtils.asyncHistory.isURIVisited(Utils.makeURI(url), function(aURI, aIsVisited) {
- deferred.resolve(aIsVisited);
+ return new Promise(resolve => {
+ PlacesUtils.asyncHistory.isURIVisited(Utils.makeURI(url), function(aURI, aIsVisited) {
+ resolve(aIsVisited);
+ });
});
-
- return deferred.promise;
}
--- a/services/sync/tests/unit/test_errorhandler_1.js
+++ b/services/sync/tests/unit/test_errorhandler_1.js
@@ -6,16 +6,17 @@ Cu.import("resource://services-sync/cons
Cu.import("resource://services-sync/engines.js");
Cu.import("resource://services-sync/keys.js");
Cu.import("resource://services-sync/policies.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/status.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/utils.js");
Cu.import("resource://gre/modules/FileUtils.jsm");
+Cu.import("resource://gre/modules/PromiseUtils.jsm");
var fakeServer = new SyncServer();
fakeServer.start();
do_register_cleanup(function() {
return new Promise(resolve => {
fakeServer.stop(resolve);
});
@@ -59,26 +60,26 @@ function run_test() {
function clean() {
Service.startOver();
Status.resetSync();
Status.resetBackoff();
errorHandler.didReportProlongedError = false;
}
-add_identity_test(this, function* test_401_logout() {
+add_identity_test(this, async function test_401_logout() {
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
// By calling sync, we ensure we're logged in.
- yield sync_and_validate_telem();
+ await sync_and_validate_telem();
do_check_eq(Status.sync, SYNC_SUCCEEDED);
do_check_true(Service.isLoggedIn);
- let deferred = Promise.defer();
+ let deferred = PromiseUtils.defer();
Svc.Obs.add("weave:service:sync:error", onSyncError);
function onSyncError() {
_("Got weave:service:sync:error in first sync.");
Svc.Obs.remove("weave:service:sync:error", onSyncError);
// Wait for the automatic next sync.
function onLoginError() {
_("Got weave:service:login:error in second sync.");
@@ -95,52 +96,50 @@ add_identity_test(this, function* test_4
Service.startOver();
server.stop(deferred.resolve);
});
}
Svc.Obs.add("weave:service:login:error", onLoginError);
}
// Make sync fail due to login rejected.
- yield configureIdentity({username: "janedoe"});
+ await configureIdentity({username: "janedoe"});
Service._updateCachedURLs();
_("Starting first sync.");
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
deepEqual(ping.failureReason, { name: "httperror", code: 401 });
_("First sync done.");
- yield deferred.promise;
+ await deferred.promise;
});
-add_identity_test(this, function* test_credentials_changed_logout() {
+add_identity_test(this, async function test_credentials_changed_logout() {
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
// By calling sync, we ensure we're logged in.
- yield sync_and_validate_telem();
+ await sync_and_validate_telem();
do_check_eq(Status.sync, SYNC_SUCCEEDED);
do_check_true(Service.isLoggedIn);
EHTestsCommon.generateCredentialsChangedFailure();
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
equal(ping.status.sync, CREDENTIALS_CHANGED);
deepEqual(ping.failureReason, {
name: "unexpectederror",
error: "Error: Aborting sync, remote setup failed"
});
do_check_eq(Status.sync, CREDENTIALS_CHANGED);
do_check_false(Service.isLoggedIn);
// Clean up.
Service.startOver();
- let deferred = Promise.defer();
- server.stop(deferred.resolve);
- yield deferred.promise;
+ await promiseStopServer(server);
});
add_identity_test(this, function test_no_lastSync_pref() {
// Test reported error.
Status.resetSync();
errorHandler.dontIgnoreErrors = true;
Status.sync = CREDENTIALS_CHANGED;
do_check_true(errorHandler.shouldReportError());
@@ -361,39 +360,37 @@ add_identity_test(this, function test_sh
Status.resetSync();
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.dontIgnoreErrors = true;
Status.login = SERVER_MAINTENANCE;
do_check_true(errorHandler.shouldReportError());
do_check_false(errorHandler.didReportProlongedError);
});
-add_identity_test(this, function* test_shouldReportError_master_password() {
+add_identity_test(this, async function test_shouldReportError_master_password() {
_("Test error ignored due to locked master password");
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
// Monkey patch Service.verifyLogin to imitate
// master password being locked.
Service._verifyLogin = Service.verifyLogin;
Service.verifyLogin = function () {
Status.login = MASTER_PASSWORD_LOCKED;
return false;
};
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
do_check_false(errorHandler.shouldReportError());
// Clean up.
Service.verifyLogin = Service._verifyLogin;
clean();
- let deferred = Promise.defer();
- server.stop(deferred.resolve);
- yield deferred.promise;
+ await promiseStopServer(server);
});
// Test that even if we don't have a cluster URL, a login failure due to
// authentication errors is always reported.
add_identity_test(this, function test_shouldReportLoginFailureWithNoCluster() {
// Ensure no clusterURL - any error not specific to login should not be reported.
Service.serverURL = "";
Service.clusterURL = "";
@@ -406,146 +403,129 @@ add_identity_test(this, function test_sh
// But any other status with a missing clusterURL is treated as a mid-sync
// 401 (ie, should be treated as a node reassignment)
Status.login = LOGIN_SUCCEEDED;
do_check_false(errorHandler.shouldReportError());
});
// XXX - how to arrange for 'Service.identity.basicPassword = null;' in
// an fxaccounts environment?
-add_task(function* test_login_syncAndReportErrors_non_network_error() {
+add_task(async function test_login_syncAndReportErrors_non_network_error() {
// Test non-network errors are reported
// when calling syncAndReportErrors
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
Service.identity.basicPassword = null;
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:login:error", onSyncError);
- do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_sync_syncAndReportErrors_non_network_error() {
+add_identity_test(this, async function test_sync_syncAndReportErrors_non_network_error() {
// Test non-network errors are reported
// when calling syncAndReportErrors
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
// By calling sync, we ensure we're logged in.
Service.sync();
do_check_eq(Status.sync, SYNC_SUCCEEDED);
do_check_true(Service.isLoggedIn);
EHTestsCommon.generateCredentialsChangedFailure();
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:sync:error", onSyncError);
- do_check_eq(Status.sync, CREDENTIALS_CHANGED);
- // If we clean this tick, telemetry won't get the right error
- server.stop(() => {
- clean();
- deferred.resolve();
- });
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
- let ping = yield wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
+ let ping = await wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
equal(ping.status.sync, CREDENTIALS_CHANGED);
deepEqual(ping.failureReason, {
name: "unexpectederror",
error: "Error: Aborting sync, remote setup failed"
});
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_eq(Status.sync, CREDENTIALS_CHANGED);
+ // If we clean this tick, telemetry won't get the right error
+ await promiseStopServer(server);
+ clean();
});
// XXX - how to arrange for 'Service.identity.basicPassword = null;' in
// an fxaccounts environment?
-add_task(function* test_login_syncAndReportErrors_prolonged_non_network_error() {
+add_task(async function test_login_syncAndReportErrors_prolonged_non_network_error() {
// Test prolonged, non-network errors are
// reported when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
Service.identity.basicPassword = null;
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:login:error", onSyncError);
- do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_sync_syncAndReportErrors_prolonged_non_network_error() {
+add_identity_test(this, async function test_sync_syncAndReportErrors_prolonged_non_network_error() {
// Test prolonged, non-network errors are
// reported when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
// By calling sync, we ensure we're logged in.
Service.sync();
do_check_eq(Status.sync, SYNC_SUCCEEDED);
do_check_true(Service.isLoggedIn);
EHTestsCommon.generateCredentialsChangedFailure();
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:sync:error", onSyncError);
- do_check_eq(Status.sync, CREDENTIALS_CHANGED);
- // If we clean this tick, telemetry won't get the right error
- server.stop(() => {
- clean();
- deferred.resolve();
- });
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
setLastSync(PROLONGED_ERROR_DURATION);
- let ping = yield wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
+ let ping = await wait_for_ping(() => errorHandler.syncAndReportErrors(), true);
equal(ping.status.sync, CREDENTIALS_CHANGED);
deepEqual(ping.failureReason, {
name: "unexpectederror",
error: "Error: Aborting sync, remote setup failed"
});
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_eq(Status.sync, CREDENTIALS_CHANGED);
+ // If we clean this tick, telemetry won't get the right error
+ await promiseStopServer(server);
+ clean();
});
-add_identity_test(this, function* test_login_syncAndReportErrors_network_error() {
+add_identity_test(this, async function test_login_syncAndReportErrors_network_error() {
// Test network errors are reported when calling syncAndReportErrors.
- yield configureIdentity({username: "broken.wipe"});
+ await configureIdentity({username: "broken.wipe"});
Service.serverURL = fakeServerUrl;
Service.clusterURL = fakeServerUrl;
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:login:error", onSyncError);
- do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
-
- clean();
- deferred.resolve();
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+
+ clean();
});
add_test(function test_sync_syncAndReportErrors_network_error() {
// Test network errors are reported when calling syncAndReportErrors.
Services.io.offline = true;
Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
@@ -556,36 +536,32 @@ add_test(function test_sync_syncAndRepor
clean();
run_next_test();
});
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
});
-add_identity_test(this, function* test_login_syncAndReportErrors_prolonged_network_error() {
+add_identity_test(this, async function test_login_syncAndReportErrors_prolonged_network_error() {
// Test prolonged, network errors are reported
// when calling syncAndReportErrors.
- yield configureIdentity({username: "johndoe"});
+ await configureIdentity({username: "johndoe"});
Service.serverURL = fakeServerUrl;
Service.clusterURL = fakeServerUrl;
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:login:error", onSyncError);
- do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
-
- clean();
- deferred.resolve();
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+
+ clean();
});
add_test(function test_sync_syncAndReportErrors_prolonged_network_error() {
// Test prolonged, network errors are reported
// when calling syncAndReportErrors.
Services.io.offline = true;
Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
@@ -596,90 +572,78 @@ add_test(function test_sync_syncAndRepor
clean();
run_next_test();
});
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
});
-add_task(function* test_login_prolonged_non_network_error() {
+add_task(async function test_login_prolonged_non_network_error() {
// Test prolonged, non-network errors are reported
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
Service.identity.basicPassword = null;
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:login:error", onSyncError);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_task(function* test_sync_prolonged_non_network_error() {
+add_task(async function test_sync_prolonged_non_network_error() {
// Test prolonged, non-network errors are reported
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
// By calling sync, we ensure we're logged in.
Service.sync();
do_check_eq(Status.sync, SYNC_SUCCEEDED);
do_check_true(Service.isLoggedIn);
EHTestsCommon.generateCredentialsChangedFailure();
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:sync:error", onSyncError);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
- server.stop(() => {
- clean();
- deferred.resolve();
- });
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
setLastSync(PROLONGED_ERROR_DURATION);
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
equal(ping.status.sync, PROLONGED_SYNC_FAILURE);
deepEqual(ping.failureReason, {
name: "unexpectederror",
error: "Error: Aborting sync, remote setup failed"
});
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+ await promiseStopServer(server);
+ clean();
});
-add_identity_test(this, function* test_login_prolonged_network_error() {
+add_identity_test(this, async function test_login_prolonged_network_error() {
// Test prolonged, network errors are reported
- yield configureIdentity({username: "johndoe"});
+ await configureIdentity({username: "johndoe"});
Service.serverURL = fakeServerUrl;
Service.clusterURL = fakeServerUrl;
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:login:error", onSyncError);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
-
- clean();
- deferred.resolve();
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
});
add_test(function test_sync_prolonged_network_error() {
// Test prolonged, network errors are reported
Services.io.offline = true;
Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
Svc.Obs.remove("weave:ui:sync:error", onSyncError);
@@ -690,85 +654,74 @@ add_test(function test_sync_prolonged_ne
clean();
run_next_test();
});
setLastSync(PROLONGED_ERROR_DURATION);
Service.sync();
});
-add_task(function* test_login_non_network_error() {
+add_task(async function test_login_non_network_error() {
// Test non-network errors are reported
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
Service.identity.basicPassword = null;
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:login:error", onSyncError);
- do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.login, LOGIN_FAILED_NO_PASSWORD);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_task(function* test_sync_non_network_error() {
+add_task(async function test_sync_non_network_error() {
// Test non-network errors are reported
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
// By calling sync, we ensure we're logged in.
Service.sync();
do_check_eq(Status.sync, SYNC_SUCCEEDED);
do_check_true(Service.isLoggedIn);
EHTestsCommon.generateCredentialsChangedFailure();
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
- Svc.Obs.remove("weave:ui:sync:error", onSyncError);
- do_check_eq(Status.sync, CREDENTIALS_CHANGED);
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.sync, CREDENTIALS_CHANGED);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_login_network_error() {
- yield configureIdentity({username: "johndoe"});
+add_identity_test(this, async function test_login_network_error() {
+ await configureIdentity({username: "johndoe"});
Service.serverURL = fakeServerUrl;
Service.clusterURL = fakeServerUrl;
- let deferred = Promise.defer();
+ let promiseObserved = promiseOneObserver("weave:ui:clear-error");
// Test network errors are not reported.
- Svc.Obs.add("weave:ui:clear-error", function onClearError() {
- Svc.Obs.remove("weave:ui:clear-error", onClearError);
-
- do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
- do_check_false(errorHandler.didReportProlongedError);
-
- Services.io.offline = false;
- clean();
- deferred.resolve()
- });
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Services.io.offline = false;
+ clean();
});
add_test(function test_sync_network_error() {
// Test network errors are not reported.
Services.io.offline = true;
Svc.Obs.add("weave:ui:sync:finish", function onUIUpdate() {
Svc.Obs.remove("weave:ui:sync:finish", onUIUpdate);
@@ -779,64 +732,57 @@ add_test(function test_sync_network_erro
clean();
run_next_test();
});
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
});
-add_identity_test(this, function* test_sync_server_maintenance_error() {
+add_identity_test(this, async function test_sync_server_maintenance_error() {
// Test server maintenance errors are not reported.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
const BACKOFF = 42;
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 503,
headers: {"retry-after": BACKOFF}};
function onSyncError() {
do_throw("Shouldn't get here!");
}
Svc.Obs.add("weave:ui:sync:error", onSyncError);
do_check_eq(Status.service, STATUS_OK);
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:finish", function onSyncFinish() {
- Svc.Obs.remove("weave:ui:sync:finish", onSyncFinish);
-
- do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
- do_check_eq(Status.sync, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- Svc.Obs.remove("weave:ui:sync:error", onSyncError);
- server.stop(() => {
- clean();
- deferred.resolve();
- })
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:finish");
setLastSync(NON_PROLONGED_ERROR_DURATION);
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
equal(ping.status.sync, SERVER_MAINTENANCE);
deepEqual(ping.engines.find(e => e.failureReason).failureReason, { name: "httperror", code: 503 })
- yield deferred.promise;
+ await promiseObserved;
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ await promiseStopServer(server);
+ clean();
});
-add_identity_test(this, function* test_info_collections_login_server_maintenance_error() {
+add_identity_test(this, async function test_info_collections_login_server_maintenance_error() {
// Test info/collections server maintenance errors are not reported.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
Service.username = "broken.info";
- yield configureIdentity({username: "broken.info"});
+ await configureIdentity({username: "broken.info"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
@@ -844,42 +790,39 @@ add_identity_test(this, function* test_i
function onUIUpdate() {
do_throw("Shouldn't experience UI update!");
}
Svc.Obs.add("weave:ui:login:error", onUIUpdate);
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() {
- Svc.Obs.remove("weave:ui:clear-error", onLoginFinish);
-
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:clear-error")
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_meta_global_login_server_maintenance_error() {
+add_identity_test(this, async function test_meta_global_login_server_maintenance_error() {
// Test meta/global server maintenance errors are not reported.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.meta"});
+ await configureIdentity({username: "broken.meta"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
@@ -887,27 +830,24 @@ add_identity_test(this, function* test_m
function onUIUpdate() {
do_throw("Shouldn't get here!");
}
Svc.Obs.add("weave:ui:login:error", onUIUpdate);
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() {
- Svc.Obs.remove("weave:ui:clear-error", onLoginFinish);
-
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:clear-error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ clean();
+ await promiseStopServer(server);
});
--- a/services/sync/tests/unit/test_errorhandler_2.js
+++ b/services/sync/tests/unit/test_errorhandler_2.js
@@ -6,16 +6,17 @@ Cu.import("resource://services-sync/cons
Cu.import("resource://services-sync/engines.js");
Cu.import("resource://services-sync/keys.js");
Cu.import("resource://services-sync/policies.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/status.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/utils.js");
Cu.import("resource://gre/modules/FileUtils.jsm");
+Cu.import("resource://gre/modules/PromiseUtils.jsm");
var fakeServer = new SyncServer();
fakeServer.start();
do_register_cleanup(function() {
return new Promise(resolve => {
fakeServer.stop(resolve);
});
@@ -59,23 +60,23 @@ function run_test() {
function clean() {
Service.startOver();
Status.resetSync();
Status.resetBackoff();
errorHandler.didReportProlongedError = false;
}
-add_identity_test(this, function* test_crypto_keys_login_server_maintenance_error() {
+add_identity_test(this, async function test_crypto_keys_login_server_maintenance_error() {
Status.resetSync();
// Test crypto/keys server maintenance errors are not reported.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.keys"});
+ await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
// Force re-download of keys
Service.collectionKeys.clear();
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@@ -86,803 +87,755 @@ add_identity_test(this, function* test_c
function onUIUpdate() {
do_throw("Shouldn't get here!");
}
Svc.Obs.add("weave:ui:login:error", onUIUpdate);
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:clear-error", function onLoginFinish() {
- Svc.Obs.remove("weave:ui:clear-error", onLoginFinish);
-
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:clear-error");
setLastSync(NON_PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
+ clean();
+ await promiseStopServer(server);
});
-add_task(function* test_sync_prolonged_server_maintenance_error() {
+add_task(async function test_sync_prolonged_server_maintenance_error() {
// Test prolonged server maintenance errors are reported.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
const BACKOFF = 42;
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 503,
headers: {"retry-after": BACKOFF}};
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
- do_check_eq(Status.service, SYNC_FAILED);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
-
- server.stop(() => {
- clean();
- deferred.resolve();
- });
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
deepEqual(ping.status.sync, PROLONGED_SYNC_FAILURE);
deepEqual(ping.engines.find(e => e.failureReason).failureReason,
{ name: "httperror", code: 503 });
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ await promiseStopServer(server);
+ clean();
});
-add_identity_test(this, function* test_info_collections_login_prolonged_server_maintenance_error(){
+add_identity_test(this, async function test_info_collections_login_prolonged_server_maintenance_error(){
// Test info/collections prolonged server maintenance errors are reported.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.info"});
+ await configureIdentity({username: "broken.info"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, SYNC_FAILED);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_meta_global_login_prolonged_server_maintenance_error(){
+add_identity_test(this, async function test_meta_global_login_prolonged_server_maintenance_error(){
// Test meta/global prolonged server maintenance errors are reported.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.meta"});
+ await configureIdentity({username: "broken.meta"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, SYNC_FAILED);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_download_crypto_keys_login_prolonged_server_maintenance_error(){
+add_identity_test(this, async function test_download_crypto_keys_login_prolonged_server_maintenance_error(){
// Test crypto/keys prolonged server maintenance errors are reported.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.keys"});
+ await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
// Force re-download of keys
Service.collectionKeys.clear();
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, SYNC_FAILED);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_upload_crypto_keys_login_prolonged_server_maintenance_error(){
+add_identity_test(this, async function test_upload_crypto_keys_login_prolonged_server_maintenance_error(){
// Test crypto/keys prolonged server maintenance errors are reported.
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
- yield configureIdentity({username: "broken.keys"});
+ await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, SYNC_FAILED);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_wipeServer_login_prolonged_server_maintenance_error(){
+add_identity_test(this, async function test_wipeServer_login_prolonged_server_maintenance_error(){
// Test that we report prolonged server maintenance errors that occur whilst
// wiping the server.
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
- yield configureIdentity({username: "broken.wipe"});
+ await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, SYNC_FAILED);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_true(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_true(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_wipeRemote_prolonged_server_maintenance_error(){
+add_identity_test(this, async function test_wipeRemote_prolonged_server_maintenance_error(){
// Test that we report prolonged server maintenance errors that occur whilst
// wiping all remote devices.
let server = EHTestsCommon.sync_httpd_setup();
server.registerPathHandler("/1.1/broken.wipe/storage/catapult", EHTestsCommon.service_unavailable);
- yield configureIdentity({username: "broken.wipe"});
+ await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
EHTestsCommon.generateAndUploadKeys();
let engine = engineManager.get("catapult");
engine.exception = null;
engine.enabled = true;
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, SYNC_FAILED);
- do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
- do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
- do_check_true(errorHandler.didReportProlongedError);
- server.stop(() => {
- clean();
- deferred.resolve();
- });
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
Svc.Prefs.set("firstSync", "wipeRemote");
setLastSync(PROLONGED_ERROR_DURATION);
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
deepEqual(ping.failureReason, { name: "httperror", code: 503 });
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+ do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
+ do_check_true(errorHandler.didReportProlongedError);
+ await promiseStopServer(server);
+ clean();
});
-add_task(function* test_sync_syncAndReportErrors_server_maintenance_error() {
+add_task(async function test_sync_syncAndReportErrors_server_maintenance_error() {
// Test server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
const BACKOFF = 42;
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 503,
headers: {"retry-after": BACKOFF}};
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
- do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
- do_check_eq(Status.sync, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
do_check_eq(Status.service, STATUS_OK);
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_info_collections_login_syncAndReportErrors_server_maintenance_error() {
+add_identity_test(this, async function test_info_collections_login_syncAndReportErrors_server_maintenance_error() {
// Test info/collections server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.info"});
+ await configureIdentity({username: "broken.info"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_meta_global_login_syncAndReportErrors_server_maintenance_error() {
+add_identity_test(this, async function test_meta_global_login_syncAndReportErrors_server_maintenance_error() {
// Test meta/global server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.meta"});
+ await configureIdentity({username: "broken.meta"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_download_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
+add_identity_test(this, async function test_download_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
// Test crypto/keys server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.keys"});
+ await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
// Force re-download of keys
Service.collectionKeys.clear();
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_upload_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
+add_identity_test(this, async function test_upload_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
// Test crypto/keys server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
- yield configureIdentity({username: "broken.keys"});
+ await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_wipeServer_login_syncAndReportErrors_server_maintenance_error() {
+add_identity_test(this, async function test_wipeServer_login_syncAndReportErrors_server_maintenance_error() {
// Test crypto/keys server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
- yield configureIdentity({username: "broken.wipe"});
+ await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_wipeRemote_syncAndReportErrors_server_maintenance_error(){
+add_identity_test(this, async function test_wipeRemote_syncAndReportErrors_server_maintenance_error(){
// Test that we report prolonged server maintenance errors that occur whilst
// wiping all remote devices.
let server = EHTestsCommon.sync_httpd_setup();
- yield configureIdentity({username: "broken.wipe"});
+ await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
EHTestsCommon.generateAndUploadKeys();
let engine = engineManager.get("catapult");
engine.exception = null;
engine.enabled = true;
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, SYNC_FAILED);
- do_check_eq(Status.sync, SERVER_MAINTENANCE);
- do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
Svc.Prefs.set("firstSync", "wipeRemote");
setLastSync(NON_PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, SYNC_FAILED);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_task(function* test_sync_syncAndReportErrors_prolonged_server_maintenance_error() {
+add_task(async function test_sync_syncAndReportErrors_prolonged_server_maintenance_error() {
// Test prolonged server maintenance errors are
// reported when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
const BACKOFF = 42;
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 503,
headers: {"retry-after": BACKOFF}};
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:sync:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:sync:error", onUIUpdate);
- do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
- do_check_eq(Status.sync, SERVER_MAINTENANCE);
- // syncAndReportErrors means dontIgnoreErrors, which means
- // didReportProlongedError not touched.
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:sync:error");
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+ do_check_eq(Status.sync, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_info_collections_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+add_identity_test(this, async function test_info_collections_login_syncAndReportErrors_prolonged_server_maintenance_error() {
// Test info/collections server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.info"});
+ await configureIdentity({username: "broken.info"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- // syncAndReportErrors means dontIgnoreErrors, which means
- // didReportProlongedError not touched.
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_meta_global_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+add_identity_test(this, async function test_meta_global_login_syncAndReportErrors_prolonged_server_maintenance_error() {
// Test meta/global server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.meta"});
+ await configureIdentity({username: "broken.meta"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- // syncAndReportErrors means dontIgnoreErrors, which means
- // didReportProlongedError not touched.
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_download_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+add_identity_test(this, async function test_download_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
// Test crypto/keys server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
- yield EHTestsCommon.setUp(server);
+ await EHTestsCommon.setUp(server);
- yield configureIdentity({username: "broken.keys"});
+ await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
// Force re-download of keys
Service.collectionKeys.clear();
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- // syncAndReportErrors means dontIgnoreErrors, which means
- // didReportProlongedError not touched.
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_upload_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+add_identity_test(this, async function test_upload_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
// Test crypto/keys server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
- yield configureIdentity({username: "broken.keys"});
+ await configureIdentity({username: "broken.keys"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- // syncAndReportErrors means dontIgnoreErrors, which means
- // didReportProlongedError not touched.
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_wipeServer_login_syncAndReportErrors_prolonged_server_maintenance_error() {
+add_identity_test(this, async function test_wipeServer_login_syncAndReportErrors_prolonged_server_maintenance_error() {
// Test crypto/keys server maintenance errors are reported
// when calling syncAndReportErrors.
let server = EHTestsCommon.sync_httpd_setup();
// Start off with an empty account, do not upload a key.
- yield configureIdentity({username: "broken.wipe"});
+ await configureIdentity({username: "broken.wipe"});
Service.serverURL = server.baseURI + "/maintenance/";
Service.clusterURL = server.baseURI + "/maintenance/";
let backoffInterval;
Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
Svc.Obs.remove("weave:service:backoff:interval", observe);
backoffInterval = subject;
});
- let deferred = Promise.defer();
- Svc.Obs.add("weave:ui:login:error", function onUIUpdate() {
- Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
- do_check_true(Status.enforceBackoff);
- do_check_eq(backoffInterval, 42);
- do_check_eq(Status.service, LOGIN_FAILED);
- do_check_eq(Status.login, SERVER_MAINTENANCE);
- // syncAndReportErrors means dontIgnoreErrors, which means
- // didReportProlongedError not touched.
- do_check_false(errorHandler.didReportProlongedError);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:ui:login:error");
do_check_false(Status.enforceBackoff);
do_check_eq(Status.service, STATUS_OK);
setLastSync(PROLONGED_ERROR_DURATION);
errorHandler.syncAndReportErrors();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_true(Status.enforceBackoff);
+ do_check_eq(backoffInterval, 42);
+ do_check_eq(Status.service, LOGIN_FAILED);
+ do_check_eq(Status.login, SERVER_MAINTENANCE);
+ // syncAndReportErrors means dontIgnoreErrors, which means
+ // didReportProlongedError not touched.
+ do_check_false(errorHandler.didReportProlongedError);
+
+ clean();
+ await promiseStopServer(server);
});
-add_task(function* test_sync_engine_generic_fail() {
+add_task(async function test_sync_engine_generic_fail() {
let server = EHTestsCommon.sync_httpd_setup();
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.sync = function sync() {
Svc.Obs.notify("weave:engine:sync:error", ENGINE_UNKNOWN_FAIL, "catapult");
};
let log = Log.repository.getLogger("Sync.ErrorHandler");
Svc.Prefs.set("log.appender.file.logOnError", true);
do_check_eq(Status.engines["catapult"], undefined);
- let deferred = Promise.defer();
+ let deferred = PromiseUtils.defer();
// Don't wait for reset-file-log until the sync is underway.
// This avoids us catching a delayed notification from an earlier test.
Svc.Obs.add("weave:engine:sync:finish", function onEngineFinish() {
Svc.Obs.remove("weave:engine:sync:finish", onEngineFinish);
log.info("Adding reset-file-log observer.");
Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
@@ -906,22 +859,22 @@ let engine = engineManager.get("catapult
server.stop(() => {
clean();
deferred.resolve();
});
});
});
- do_check_true(yield EHTestsCommon.setUp(server));
- let ping = yield sync_and_validate_telem(true);
+ do_check_true(await EHTestsCommon.setUp(server));
+ let ping = await sync_and_validate_telem(true);
deepEqual(ping.status.service, SYNC_FAILED_PARTIAL);
deepEqual(ping.engines.find(e => e.status).status, ENGINE_UNKNOWN_FAIL);
- yield deferred.promise;
+ await deferred.promise;
});
add_test(function test_logs_on_sync_error_despite_shouldReportError() {
_("Ensure that an error is still logged when weave:service:sync:error " +
"is notified, despite shouldReportError returning false.");
let log = Log.repository.getLogger("Sync.ErrorHandler");
Svc.Prefs.set("log.appender.file.logOnError", true);
@@ -970,43 +923,40 @@ add_test(function test_logs_on_login_err
clean();
run_next_test();
});
Svc.Obs.notify("weave:service:login:error", {});
});
// This test should be the last one since it monkeypatches the engine object
// and we should only have one engine object throughout the file (bug 629664).
-add_task(function* test_engine_applyFailed() {
+add_task(async function test_engine_applyFailed() {
let server = EHTestsCommon.sync_httpd_setup();
let engine = engineManager.get("catapult");
engine.enabled = true;
delete engine.exception;
engine.sync = function sync() {
Svc.Obs.notify("weave:engine:sync:applied", {newFailed:1}, "catapult");
};
let log = Log.repository.getLogger("Sync.ErrorHandler");
Svc.Prefs.set("log.appender.file.logOnError", true);
- let deferred = Promise.defer();
- Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
- Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
-
- do_check_eq(Status.engines["catapult"], ENGINE_APPLY_FAIL);
- do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
-
- // Test Error log was written on SYNC_FAILED_PARTIAL.
- let entries = logsdir.directoryEntries;
- do_check_true(entries.hasMoreElements());
- let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
- do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
-
- clean();
- server.stop(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
do_check_eq(Status.engines["catapult"], undefined);
- do_check_true(yield EHTestsCommon.setUp(server));
+ do_check_true(await EHTestsCommon.setUp(server));
Service.sync();
- yield deferred.promise;
+ await promiseObserved;
+
+ do_check_eq(Status.engines["catapult"], ENGINE_APPLY_FAIL);
+ do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+
+ // Test Error log was written on SYNC_FAILED_PARTIAL.
+ let entries = logsdir.directoryEntries;
+ do_check_true(entries.hasMoreElements());
+ let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+ do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+
+ clean();
+ await promiseStopServer(server);
});
--- a/services/sync/tests/unit/test_errorhandler_eol.js
+++ b/services/sync/tests/unit/test_errorhandler_eol.js
@@ -38,18 +38,18 @@ function handler200(eolCode) {
function sync_httpd_setup(infoHandler) {
let handlers = {
"/1.1/johndoe/info/collections": infoHandler,
};
return httpd_setup(handlers);
}
-function* setUp(server) {
- yield configureIdentity({username: "johndoe"});
+async function setUp(server) {
+ await configureIdentity({username: "johndoe"});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
new FakeCryptoService();
}
function run_test() {
run_next_test();
}
@@ -61,77 +61,68 @@ function do_check_soft_eol(eh, start) {
}
function do_check_hard_eol(eh, start) {
// We subtract 1000 because the stored value is in second precision.
do_check_true(eh.earliestNextAlert >= (start + eh.MINIMUM_ALERT_INTERVAL_MSEC - 1000));
do_check_eq("hard-eol", eh.currentAlertMode);
do_check_true(Status.eol);
}
-add_identity_test(this, function* test_200_hard() {
+add_identity_test(this, async function test_200_hard() {
let eh = Service.errorHandler;
let start = Date.now();
let server = sync_httpd_setup(handler200("hard-eol"));
- yield setUp(server);
+ await setUp(server);
- let deferred = Promise.defer();
- let obs = function (subject, topic, data) {
- Svc.Obs.remove("weave:eol", obs);
- do_check_eq("hard-eol", subject.code);
- do_check_hard_eol(eh, start);
- do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
- eh.clearServerAlerts();
- server.stop(deferred.resolve);
- };
+ let promiseObserved = promiseOneObserver("weave:eol");
- Svc.Obs.add("weave:eol", obs);
Service._fetchInfo();
Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
- yield deferred.promise;
+
+ let { subject } = await promiseObserved;
+ do_check_eq("hard-eol", subject.code);
+ do_check_hard_eol(eh, start);
+ do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
+ eh.clearServerAlerts();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_513_hard() {
+add_identity_test(this, async function test_513_hard() {
let eh = Service.errorHandler;
let start = Date.now();
let server = sync_httpd_setup(handler513);
- yield setUp(server);
+ await setUp(server);
- let deferred = Promise.defer();
- let obs = function (subject, topic, data) {
- Svc.Obs.remove("weave:eol", obs);
- do_check_eq("hard-eol", subject.code);
- do_check_hard_eol(eh, start);
- do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
- eh.clearServerAlerts();
- server.stop(deferred.resolve);
- };
+ let promiseObserved = promiseOneObserver("weave:eol");
- Svc.Obs.add("weave:eol", obs);
try {
Service._fetchInfo();
Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
} catch (ex) {
// Because fetchInfo will fail on a 513.
}
- yield deferred.promise;
+ let { subject } = await promiseObserved;
+ do_check_eq("hard-eol", subject.code);
+ do_check_hard_eol(eh, start);
+ do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
+ eh.clearServerAlerts();
+
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_200_soft() {
+add_identity_test(this, async function test_200_soft() {
let eh = Service.errorHandler;
let start = Date.now();
let server = sync_httpd_setup(handler200("soft-eol"));
- yield setUp(server);
+ await setUp(server);
- let deferred = Promise.defer();
- let obs = function (subject, topic, data) {
- Svc.Obs.remove("weave:eol", obs);
- do_check_eq("soft-eol", subject.code);
- do_check_soft_eol(eh, start);
- do_check_eq(Service.scheduler.singleDeviceInterval, Service.scheduler.syncInterval);
- eh.clearServerAlerts();
- server.stop(deferred.resolve);
- };
+ let promiseObserved = promiseOneObserver("weave:eol");
- Svc.Obs.add("weave:eol", obs);
Service._fetchInfo();
Service.scheduler.adjustSyncInterval(); // As if we failed or succeeded in syncing.
- yield deferred.promise;
+ let { subject } = await promiseObserved;
+ do_check_eq("soft-eol", subject.code);
+ do_check_soft_eol(eh, start);
+ do_check_eq(Service.scheduler.singleDeviceInterval, Service.scheduler.syncInterval);
+ eh.clearServerAlerts();
+
+ await promiseStopServer(server);
});
--- a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
+++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
@@ -11,22 +11,16 @@ Cu.import("resource://services-sync/util
Cu.import("resource://testing-common/services/sync/fakeservices.js");
Cu.import("resource://testing-common/services/sync/utils.js");
initTestLogging("Trace");
var engineManager = Service.engineManager;
engineManager.clear();
-function promiseStopServer(server) {
- let deferred = Promise.defer();
- server.stop(deferred.resolve);
- return deferred.promise;
-}
-
function CatapultEngine() {
SyncEngine.call(this, "Catapult", Service);
}
CatapultEngine.prototype = {
__proto__: SyncEngine.prototype,
exception: null, // tests fill this in
_sync: function _sync() {
throw this.exception;
@@ -54,36 +48,36 @@ function sync_httpd_setup() {
"/1.1/johndoe/info/collections": collectionsHelper.handler,
"/1.1/johndoe/storage/meta/global": upd("meta", globalWBO.handler()),
"/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler()),
"/1.1/johndoe/storage/crypto/keys": upd("crypto", keysWBO.handler())
};
return httpd_setup(handlers);
}
-function* setUp(server) {
- yield configureIdentity({username: "johndoe"});
+async function setUp(server) {
+ await configureIdentity({username: "johndoe"});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
new FakeCryptoService();
}
function generateAndUploadKeys(server) {
generateNewKeys(Service.collectionKeys);
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
serverKeys.encrypt(Service.identity.syncKeyBundle);
let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
return serverKeys.upload(res).success;
}
-add_identity_test(this, function* test_backoff500() {
+add_identity_test(this, async function test_backoff500() {
_("Test: HTTP 500 sets backoff status.");
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 500};
try {
do_check_false(Status.enforceBackoff);
@@ -94,23 +88,23 @@ add_identity_test(this, function* test_b
Service.sync();
do_check_true(Status.enforceBackoff);
do_check_eq(Status.sync, SYNC_SUCCEEDED);
do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
} finally {
Status.resetBackoff();
Service.startOver();
}
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_backoff503() {
+add_identity_test(this, async function test_backoff503() {
_("Test: HTTP 503 with Retry-After header leads to backoff notification and sets backoff status.");
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
const BACKOFF = 42;
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 503,
headers: {"retry-after": BACKOFF}};
let backoffInterval;
@@ -130,23 +124,23 @@ add_identity_test(this, function* test_b
do_check_eq(backoffInterval, BACKOFF);
do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
do_check_eq(Status.sync, SERVER_MAINTENANCE);
} finally {
Status.resetBackoff();
Status.resetSync();
Service.startOver();
}
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_overQuota() {
+add_identity_test(this, async function test_overQuota() {
_("Test: HTTP 400 with body error code 14 means over quota.");
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 400,
toString() {
return "14";
}};
@@ -159,77 +153,70 @@ add_identity_test(this, function* test_o
Service.sync();
do_check_eq(Status.sync, OVER_QUOTA);
do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
} finally {
Status.resetSync();
Service.startOver();
}
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_service_networkError() {
+add_identity_test(this, async function test_service_networkError() {
_("Test: Connection refused error from Service.sync() leads to the right status code.");
let server = sync_httpd_setup();
- yield setUp(server);
- let deferred = Promise.defer();
- server.stop(() => {
- // Provoke connection refused.
- Service.clusterURL = "http://localhost:12345/";
+ await setUp(server);
+ await promiseStopServer(server);
+ // Provoke connection refused.
+ Service.clusterURL = "http://localhost:12345/";
- try {
- do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ try {
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
- Service._loggedIn = true;
- Service.sync();
+ Service._loggedIn = true;
+ Service.sync();
- do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
- do_check_eq(Status.service, SYNC_FAILED);
- } finally {
- Status.resetSync();
- Service.startOver();
- }
- deferred.resolve();
- });
- yield deferred.promise;
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_eq(Status.service, SYNC_FAILED);
+ } finally {
+ Status.resetSync();
+ Service.startOver();
+ }
});
-add_identity_test(this, function* test_service_offline() {
+add_identity_test(this, async function test_service_offline() {
_("Test: Wanting to sync in offline mode leads to the right status code but does not increment the ignorable error count.");
let server = sync_httpd_setup();
- yield setUp(server);
- let deferred = Promise.defer();
- server.stop(() => {
- Services.io.offline = true;
- Services.prefs.setBoolPref("network.dns.offline-localhost", false);
+ await setUp(server);
- try {
- do_check_eq(Status.sync, SYNC_SUCCEEDED);
+ await promiseStopServer(server);
+ Services.io.offline = true;
+ Services.prefs.setBoolPref("network.dns.offline-localhost", false);
- Service._loggedIn = true;
- Service.sync();
+ try {
+ do_check_eq(Status.sync, SYNC_SUCCEEDED);
- do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
- do_check_eq(Status.service, SYNC_FAILED);
- } finally {
- Status.resetSync();
- Service.startOver();
- }
- Services.io.offline = false;
- Services.prefs.clearUserPref("network.dns.offline-localhost");
- deferred.resolve();
- });
- yield deferred.promise;
+ Service._loggedIn = true;
+ Service.sync();
+
+ do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+ do_check_eq(Status.service, SYNC_FAILED);
+ } finally {
+ Status.resetSync();
+ Service.startOver();
+ }
+ Services.io.offline = false;
+ Services.prefs.clearUserPref("network.dns.offline-localhost");
});
-add_identity_test(this, function* test_engine_networkError() {
+add_identity_test(this, async function test_engine_networkError() {
_("Test: Network related exceptions from engine.sync() lead to the right status code.");
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
let engine = engineManager.get("catapult");
engine.enabled = true;
engine.exception = Components.Exception("NS_ERROR_UNKNOWN_HOST",
Cr.NS_ERROR_UNKNOWN_HOST);
try {
do_check_eq(Status.sync, SYNC_SUCCEEDED);
@@ -240,22 +227,22 @@ add_identity_test(this, function* test_e
Service.sync();
do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
} finally {
Status.resetSync();
Service.startOver();
}
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_resource_timeout() {
+add_identity_test(this, async function test_resource_timeout() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
let engine = engineManager.get("catapult");
engine.enabled = true;
// Resource throws this when it encounters a timeout.
engine.exception = Components.Exception("Aborting due to channel inactivity.",
Cr.NS_ERROR_NET_TIMEOUT);
try {
@@ -267,16 +254,16 @@ add_identity_test(this, function* test_r
Service.sync();
do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
} finally {
Status.resetSync();
Service.startOver();
}
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
function run_test() {
validate_all_future_pings();
engineManager.register(CatapultEngine);
run_next_test();
}
--- a/services/sync/tests/unit/test_fxa_migration.js
+++ b/services/sync/tests/unit/test_fxa_migration.js
@@ -1,14 +1,13 @@
// We change this pref before anything else initializes
Services.prefs.setCharPref("identity.fxaccounts.auth.uri", "http://localhost");
// Test the FxAMigration module
Cu.import("resource://services-sync/FxaMigrator.jsm");
-Cu.import("resource://gre/modules/Promise.jsm");
// Set our username pref early so sync initializes with the legacy provider.
Services.prefs.setCharPref("services.sync.username", "foo");
// And ensure all debug messages end up being printed.
Services.prefs.setCharPref("services.sync.log.appender.dump", "Debug");
// Now import sync
Cu.import("resource://services-sync/service.js");
@@ -19,34 +18,16 @@ Cu.import("resource://services-sync/util
Services.prefs.clearUserPref("services.sync.username");
Cu.import("resource://testing-common/services/sync/utils.js");
Cu.import("resource://testing-common/services/common/logging.js");
Cu.import("resource://testing-common/services/sync/rotaryengine.js");
const FXA_USERNAME = "someone@somewhere";
-// Utilities
-function promiseOneObserver(topic) {
- return new Promise((resolve, reject) => {
- let observer = function(subject, topic, data) {
- Services.obs.removeObserver(observer, topic);
- resolve({ subject: subject, data: data });
- }
- Services.obs.addObserver(observer, topic, false);
- });
-}
-
-function promiseStopServer(server) {
- return new Promise((resolve, reject) => {
- server.stop(resolve);
- });
-}
-
-
// Helpers
function configureLegacySync() {
let engine = new RotaryEngine(Service);
engine.enabled = true;
Svc.Prefs.set("registerEngines", engine.name);
Svc.Prefs.set("log.logger.engine.rotary", "Trace");
let contents = {
@@ -75,17 +56,17 @@ function configureLegacySync() {
Service.identity.username = USER;
Service._updateCachedURLs();
Service.engineManager._engines[engine.name] = engine;
return [engine, server];
}
-add_task(function *testMigrationUnlinks() {
+add_task(async function testMigrationUnlinks() {
// when we do a .startOver we want the new provider.
let oldValue = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity");
Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", false);
do_register_cleanup(() => {
Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", oldValue)
});
@@ -96,18 +77,18 @@ add_task(function *testMigrationUnlinks(
// Start a sync - this will cause an EOL notification which the migrator's
// observer will notice.
let promiseMigration = promiseOneObserver("fxa-migration:state-changed");
let promiseStartOver = promiseOneObserver("weave:service:start-over:finish");
_("Starting sync");
Service.sync();
_("Finished sync");
- yield promiseStartOver;
- yield promiseMigration;
+ await promiseStartOver;
+ await promiseMigration;
// We should have seen the observer and Sync should no longer be configured.
Assert.ok(!Services.prefs.prefHasUserValue("services.sync.username"));
});
function run_test() {
initTestLogging();
do_register_cleanup(() => {
fxaMigrator.finalize();
--- a/services/sync/tests/unit/test_fxa_node_reassignment.js
+++ b/services/sync/tests/unit/test_fxa_node_reassignment.js
@@ -11,16 +11,17 @@ Cu.import("resource://gre/modules/Log.js
Cu.import("resource://services-common/rest.js");
Cu.import("resource://services-sync/constants.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/status.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/rotaryengine.js");
Cu.import("resource://services-sync/browserid_identity.js");
Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://gre/modules/PromiseUtils.jsm");
Service.engineManager.clear();
function run_test() {
Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
Log.repository.getLogger("Sync.Resource").level = Log.Level.Trace;
Log.repository.getLogger("Sync.RESTRequest").level = Log.Level.Trace;
@@ -112,20 +113,20 @@ function getReassigned() {
}
/**
* Make a test request to `url`, then watch the result of two syncs
* to ensure that a node request was made.
* Runs `between` between the two. This can be used to undo deliberate failure
* setup, detach observers, etc.
*/
-function* syncAndExpectNodeReassignment(server, firstNotification, between,
- secondNotification, url) {
+async function syncAndExpectNodeReassignment(server, firstNotification, between,
+ secondNotification, url) {
_("Starting syncAndExpectNodeReassignment\n");
- let deferred = Promise.defer();
+ let deferred = PromiseUtils.defer();
function onwards() {
let numTokenRequestsBefore;
function onFirstSync() {
_("First sync completed.");
Svc.Obs.remove(firstNotification, onFirstSync);
Svc.Obs.add(secondNotification, onSecondSync);
do_check_eq(Service.clusterURL, "");
@@ -163,54 +164,54 @@ function* syncAndExpectNodeReassignment(
request.get(function () {
do_check_eq(request.response.status, 401);
Utils.nextTick(onwards);
});
} else {
_("Skipping preliminary validation check for a 401 as we aren't logged in");
Utils.nextTick(onwards);
}
- yield deferred.promise;
+ await deferred.promise;
}
// Check that when we sync we don't request a new token by default - our
// test setup has configured the client with a valid token, and that token
// should be used to form the cluster URL.
-add_task(function* test_single_token_fetch() {
+add_task(async function test_single_token_fetch() {
_("Test a normal sync only fetches 1 token");
let numTokenFetches = 0;
function afterTokenFetch() {
numTokenFetches++;
}
// Set the cluster URL to an "old" version - this is to ensure we don't
// use that old cached version for the first sync but prefer the value
// we got from the token (and as above, we are also checking we don't grab
// a new token). If the test actually attempts to connect to this URL
// it will crash.
Service.clusterURL = "http://example.com/";
- let server = yield prepareServer(afterTokenFetch);
+ let server = await prepareServer(afterTokenFetch);
do_check_false(Service.isLoggedIn, "not already logged in");
Service.sync();
do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
do_check_eq(numTokenFetches, 0, "didn't fetch a new token");
// A bit hacky, but given we know how prepareServer works we can deduce
// that clusterURL we expect.
let expectedClusterURL = server.baseURI + "1.1/johndoe/";
do_check_eq(Service.clusterURL, expectedClusterURL);
- yield new Promise(resolve => server.stop(resolve));
+ await promiseStopServer(server);
});
-add_task(function* test_momentary_401_engine() {
+add_task(async function test_momentary_401_engine() {
_("Test a failure for engine URLs that's resolved by reassignment.");
- let server = yield prepareServer();
+ let server = await prepareServer();
let john = server.user("johndoe");
_("Enabling the Rotary engine.");
let engine = Service.engineManager.get("rotary");
engine.enabled = true;
// We need the server to be correctly set up prior to experimenting. Do this
// through a sync.
@@ -242,127 +243,125 @@ add_task(function* test_momentary_401_en
Svc.Obs.remove("weave:service:login:start", onLoginStart);
do_check_true(getReassigned());
}
_("Adding observer that lastSyncReassigned is still set on login.");
Svc.Obs.add("weave:service:login:start", onLoginStart);
}
- yield syncAndExpectNodeReassignment(server,
+ await syncAndExpectNodeReassignment(server,
"weave:service:sync:finish",
between,
"weave:service:sync:finish",
Service.storageURL + "rotary");
});
// This test ends up being a failing info fetch *after we're already logged in*.
-add_task(function* test_momentary_401_info_collections_loggedin() {
+add_task(async function test_momentary_401_info_collections_loggedin() {
_("Test a failure for info/collections after login that's resolved by reassignment.");
- let server = yield prepareServer();
+ let server = await prepareServer();
_("First sync to prepare server contents.");
Service.sync();
_("Arrange for info/collections to return a 401.");
let oldHandler = server.toplevelHandlers.info;
server.toplevelHandlers.info = handleReassign;
function undo() {
_("Undoing test changes.");
server.toplevelHandlers.info = oldHandler;
}
do_check_true(Service.isLoggedIn, "already logged in");
- yield syncAndExpectNodeReassignment(server,
+ await syncAndExpectNodeReassignment(server,
"weave:service:sync:error",
undo,
"weave:service:sync:finish",
Service.infoURL);
});
// This test ends up being a failing info fetch *before we're logged in*.
// In this case we expect to recover during the login phase - so the first
// sync succeeds.
-add_task(function* test_momentary_401_info_collections_loggedout() {
+add_task(async function test_momentary_401_info_collections_loggedout() {
_("Test a failure for info/collections before login that's resolved by reassignment.");
let oldHandler;
let sawTokenFetch = false;
function afterTokenFetch() {
// After a single token fetch, we undo our evil handleReassign hack, so
// the next /info request returns the collection instead of a 401
server.toplevelHandlers.info = oldHandler;
sawTokenFetch = true;
}
- let server = yield prepareServer(afterTokenFetch);
+ let server = await prepareServer(afterTokenFetch);
// Return a 401 for the next /info request - it will be reset immediately
// after a new token is fetched.
oldHandler = server.toplevelHandlers.info
server.toplevelHandlers.info = handleReassign;
do_check_false(Service.isLoggedIn, "not already logged in");
Service.sync();
do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
// sync was successful - check we grabbed a new token.
do_check_true(sawTokenFetch, "a new token was fetched by this test.")
// and we are done.
Service.startOver();
- let deferred = Promise.defer();
- server.stop(deferred.resolve);
- yield deferred.promise;
+ await promiseStopServer(server);
});
// This test ends up being a failing meta/global fetch *after we're already logged in*.
-add_task(function* test_momentary_401_storage_loggedin() {
+add_task(async function test_momentary_401_storage_loggedin() {
_("Test a failure for any storage URL after login that's resolved by" +
"reassignment.");
- let server = yield prepareServer();
+ let server = await prepareServer();
_("First sync to prepare server contents.");
Service.sync();
_("Arrange for meta/global to return a 401.");
let oldHandler = server.toplevelHandlers.storage;
server.toplevelHandlers.storage = handleReassign;
function undo() {
_("Undoing test changes.");
server.toplevelHandlers.storage = oldHandler;
}
do_check_true(Service.isLoggedIn, "already logged in");
- yield syncAndExpectNodeReassignment(server,
+ await syncAndExpectNodeReassignment(server,
"weave:service:sync:error",
undo,
"weave:service:sync:finish",
Service.storageURL + "meta/global");
});
// This test ends up being a failing meta/global fetch *before we've logged in*.
-add_task(function* test_momentary_401_storage_loggedout() {
+add_task(async function test_momentary_401_storage_loggedout() {
_("Test a failure for any storage URL before login, not just engine parts. " +
"Resolved by reassignment.");
- let server = yield prepareServer();
+ let server = await prepareServer();
// Return a 401 for all storage requests.
let oldHandler = server.toplevelHandlers.storage;
server.toplevelHandlers.storage = handleReassign;
function undo() {
_("Undoing test changes.");
server.toplevelHandlers.storage = oldHandler;
}
do_check_false(Service.isLoggedIn, "already logged in");
- yield syncAndExpectNodeReassignment(server,
+ await syncAndExpectNodeReassignment(server,
"weave:service:login:error",
undo,
"weave:service:sync:finish",
Service.storageURL + "meta/global");
});
--- a/services/sync/tests/unit/test_fxa_service_cluster.js
+++ b/services/sync/tests/unit/test_fxa_service_cluster.js
@@ -1,43 +1,43 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/fxa_utils.js");
Cu.import("resource://testing-common/services/sync/utils.js");
-add_task(function* test_findCluster() {
+add_task(async function test_findCluster() {
_("Test FxA _findCluster()");
_("_findCluster() throws on 500 errors.");
initializeIdentityWithTokenServerResponse({
status: 500,
headers: [],
body: "",
});
- yield Service.identity.initializeWithCurrentIdentity();
- yield Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
+ await Service.identity.initializeWithCurrentIdentity();
+ await Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
"should reject due to 500");
Assert.throws(function() {
Service._clusterManager._findCluster();
});
_("_findCluster() returns null on authentication errors.");
initializeIdentityWithTokenServerResponse({
status: 401,
headers: {"content-type": "application/json"},
body: "{}",
});
- yield Service.identity.initializeWithCurrentIdentity();
- yield Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
+ await Service.identity.initializeWithCurrentIdentity();
+ await Assert.rejects(Service.identity.whenReadyToAuthenticate.promise,
"should reject due to 401");
cluster = Service._clusterManager._findCluster();
Assert.strictEqual(cluster, null);
_("_findCluster() works with correct tokenserver response.");
let endpoint = "http://example.com/something";
initializeIdentityWithTokenServerResponse({
@@ -48,18 +48,18 @@ add_task(function* test_findCluster() {
api_endpoint: endpoint,
duration: 300,
id: "id",
key: "key",
uid: "uid",
})
});
- yield Service.identity.initializeWithCurrentIdentity();
- yield Service.identity.whenReadyToAuthenticate.promise;
+ await Service.identity.initializeWithCurrentIdentity();
+ await Service.identity.whenReadyToAuthenticate.promise;
cluster = Service._clusterManager._findCluster();
// The cluster manager ensures a trailing "/"
Assert.strictEqual(cluster, endpoint + "/");
Svc.Prefs.resetBranch("");
});
function run_test() {
--- a/services/sync/tests/unit/test_fxa_startOver.js
+++ b/services/sync/tests/unit/test_fxa_startOver.js
@@ -6,22 +6,22 @@ Cu.import("resource://services-sync/iden
Cu.import("resource://services-sync/browserid_identity.js");
Cu.import("resource://services-sync/service.js");
function run_test() {
initTestLogging("Trace");
run_next_test();
}
-add_task(function* test_startover() {
+add_task(async function test_startover() {
let oldValue = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity", true);
Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", false);
ensureLegacyIdentityManager();
- yield configureIdentity({username: "johndoe"});
+ await configureIdentity({username: "johndoe"});
// The boolean flag on the xpcom service should reflect a legacy provider.
let xps = Cc["@mozilla.org/weave/service;1"]
.getService(Components.interfaces.nsISupports)
.wrappedJSObject;
do_check_false(xps.fxAccountsEnabled);
// we expect the "legacy" provider (but can't instanceof that, as BrowserIDManager
@@ -33,24 +33,25 @@ add_task(function* test_startover() {
Service.login();
// We should have a cluster URL
do_check_true(Service.clusterURL.length > 0);
// remember some stuff so we can reset it after.
let oldIdentity = Service.identity;
let oldClusterManager = Service._clusterManager;
- let deferred = Promise.defer();
- Services.obs.addObserver(function observeStartOverFinished() {
- Services.obs.removeObserver(observeStartOverFinished, "weave:service:start-over:finish");
- deferred.resolve();
- }, "weave:service:start-over:finish", false);
+ let promiseStartOver = new Promise(resolve => {
+ Services.obs.addObserver(function observeStartOverFinished() {
+ Services.obs.removeObserver(observeStartOverFinished, "weave:service:start-over:finish");
+ resolve();
+ }, "weave:service:start-over:finish", false);
+ });
Service.startOver();
- yield deferred.promise; // wait for the observer to fire.
+ await promiseStartOver; // wait for the observer to fire.
// the xpcom service should indicate FxA is enabled.
do_check_true(xps.fxAccountsEnabled);
// should have swapped identities.
do_check_true(Service.identity instanceof BrowserIDManager);
// should have clobbered the cluster URL
do_check_eq(Service.clusterURL, "");
--- a/services/sync/tests/unit/test_hmac_error.js
+++ b/services/sync/tests/unit/test_hmac_error.js
@@ -44,17 +44,17 @@ function shared_setup() {
let global = new ServerWBO("global", {engines: engines});
let keysWBO = new ServerWBO("keys");
let rotaryColl = new ServerCollection({}, true);
let clientsColl = new ServerCollection({}, true);
return [engine, rotaryColl, clientsColl, keysWBO, global];
}
-add_task(function *hmac_error_during_404() {
+add_task(async function hmac_error_during_404() {
_("Attempt to replicate the HMAC error setup.");
let [engine, rotaryColl, clientsColl, keysWBO, global] = shared_setup();
// Hand out 404s for crypto/keys.
let keysHandler = keysWBO.handler();
let key404Counter = 0;
let keys404Handler = function (request, response) {
if (key404Counter > 0) {
@@ -78,32 +78,32 @@ add_task(function *hmac_error_during_404
"/1.1/foo/storage/rotary": upd("rotary", rotaryColl.handler())
};
let server = sync_httpd_setup(handlers);
Service.serverURL = server.baseURI;
try {
_("Syncing.");
- yield sync_and_validate_telem();
+ await sync_and_validate_telem();
_("Partially resetting client, as if after a restart, and forcing redownload.");
Service.collectionKeys.clear();
engine.lastSync = 0; // So that we redownload records.
key404Counter = 1;
_("---------------------------");
- yield sync_and_validate_telem();
+ await sync_and_validate_telem();
_("---------------------------");
// Two rotary items, one client record... no errors.
do_check_eq(hmacErrorCount, 0)
} finally {
Svc.Prefs.resetBranch("");
Service.recordManager.clearCache();
- yield new Promise(resolve => server.stop(resolve));
+ await promiseStopServer(server);
}
});
add_test(function hmac_error_during_node_reassignment() {
_("Attempt to replicate an HMAC error during node reassignment.");
let [engine, rotaryColl, clientsColl, keysWBO, global] = shared_setup();
let collectionsHelper = track_collections_helper();
--- a/services/sync/tests/unit/test_interval_triggers.js
+++ b/services/sync/tests/unit/test_interval_triggers.js
@@ -13,22 +13,16 @@ Cu.import("resource://services-sync/serv
var scheduler = Service.scheduler;
var clientsEngine = Service.clientsEngine;
// Don't remove stale clients when syncing. This is a test-only workaround
// that lets us add clients directly to the store, without losing them on
// the next sync.
clientsEngine._removeRemoteClient = id => {};
-function promiseStopServer(server) {
- let deferred = Promise.defer();
- server.stop(deferred.resolve);
- return deferred.promise;
-}
-
function sync_httpd_setup() {
let global = new ServerWBO("global", {
syncID: Service.syncID,
storageVersion: STORAGE_VERSION,
engines: {clients: {version: clientsEngine.version,
syncID: clientsEngine.syncID}}
});
let clientsColl = new ServerCollection({}, true);
@@ -41,18 +35,18 @@ function sync_httpd_setup() {
"/1.1/johndoe/storage/meta/global": upd("meta", global.handler()),
"/1.1/johndoe/info/collections": collectionsHelper.handler,
"/1.1/johndoe/storage/crypto/keys":
upd("crypto", (new ServerWBO("keys")).handler()),
"/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler())
});
}
-function* setUp(server) {
- yield configureIdentity({username: "johndoe"});
+async function setUp(server) {
+ await configureIdentity({username: "johndoe"});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
generateNewKeys(Service.collectionKeys);
let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
serverKeys.encrypt(Service.identity.syncKeyBundle);
serverKeys.upload(Service.resource(Service.cryptoKeysURL));
}
@@ -60,27 +54,27 @@ function run_test() {
initTestLogging("Trace");
Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
run_next_test();
}
-add_identity_test(this, function* test_successful_sync_adjustSyncInterval() {
+add_identity_test(this, async function test_successful_sync_adjustSyncInterval() {
_("Test successful sync calling adjustSyncInterval");
let syncSuccesses = 0;
function onSyncFinish() {
_("Sync success.");
syncSuccesses++;
};
Svc.Obs.add("weave:service:sync:finish", onSyncFinish);
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Confirm defaults
do_check_false(scheduler.idle);
do_check_false(scheduler.numClients > 1);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
do_check_false(scheduler.hasIncomingItems);
_("Test as long as numClients <= 1 our sync interval is SINGLE_USER.");
@@ -156,35 +150,35 @@ add_identity_test(this, function* test_s
do_check_eq(syncSuccesses, 8);
do_check_false(scheduler.idle);
do_check_true(scheduler.numClients > 1);
do_check_false(scheduler.hasIncomingItems); //gets reset to false
do_check_eq(scheduler.syncInterval, scheduler.immediateInterval);
Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
Service.startOver();
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_unsuccessful_sync_adjustSyncInterval() {
+add_identity_test(this, async function test_unsuccessful_sync_adjustSyncInterval() {
_("Test unsuccessful sync calling adjustSyncInterval");
let syncFailures = 0;
function onSyncError() {
_("Sync error.");
syncFailures++;
}
Svc.Obs.add("weave:service:sync:error", onSyncError);
_("Test unsuccessful sync calls adjustSyncInterval");
// Force sync to fail.
Svc.Prefs.set("firstSync", "notReady");
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Confirm defaults
do_check_false(scheduler.idle);
do_check_false(scheduler.numClients > 1);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
do_check_false(scheduler.hasIncomingItems);
_("Test as long as numClients <= 1 our sync interval is SINGLE_USER.");
@@ -261,54 +255,51 @@ add_identity_test(this, function* test_u
do_check_eq(syncFailures, 8);
do_check_false(scheduler.idle);
do_check_true(scheduler.numClients > 1);
do_check_false(scheduler.hasIncomingItems); //gets reset to false
do_check_eq(scheduler.syncInterval, scheduler.immediateInterval);
Service.startOver();
Svc.Obs.remove("weave:service:sync:error", onSyncError);
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_back_triggers_sync() {
+add_identity_test(this, async function test_back_triggers_sync() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Single device: no sync triggered.
scheduler.idle = true;
scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
do_check_false(scheduler.idle);
// Multiple devices: sync is triggered.
clientsEngine._store.create({id: "foo", cleartext: "bar"});
scheduler.updateClientMode();
- let deferred = Promise.defer();
- Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
- Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
-
- Service.recordManager.clearCache();
- Svc.Prefs.resetBranch("");
- scheduler.setDefaults();
- clientsEngine.resetClient();
-
- Service.startOver();
- server.stop(deferred.resolve);
- });
+ let promiseDone = promiseOneObserver("weave:service:sync:finish");
scheduler.idle = true;
scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
do_check_false(scheduler.idle);
- yield deferred.promise;
+ await promiseDone;
+
+ Service.recordManager.clearCache();
+ Svc.Prefs.resetBranch("");
+ scheduler.setDefaults();
+ clientsEngine.resetClient();
+
+ Service.startOver();
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_adjust_interval_on_sync_error() {
+add_identity_test(this, async function test_adjust_interval_on_sync_error() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
let syncFailures = 0;
function onSyncError() {
_("Sync error.");
syncFailures++;
}
Svc.Obs.add("weave:service:sync:error", onSyncError);
@@ -324,61 +315,62 @@ add_identity_test(this, function* test_a
Service.sync();
do_check_eq(syncFailures, 1);
do_check_true(scheduler.numClients > 1);
do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
Svc.Obs.remove("weave:service:sync:error", onSyncError);
Service.startOver();
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
-add_identity_test(this, function* test_bug671378_scenario() {
+add_identity_test(this, async function test_bug671378_scenario() {
// Test scenario similar to bug 671378. This bug appeared when a score
// update occurred that wasn't large enough to trigger a sync so
// scheduleNextSync() was called without a time interval parameter,
// setting nextSync to a non-zero value and preventing the timer from
// being adjusted in the next call to scheduleNextSync().
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
let syncSuccesses = 0;
function onSyncFinish() {
_("Sync success.");
syncSuccesses++;
};
Svc.Obs.add("weave:service:sync:finish", onSyncFinish);
// After first sync call, syncInterval & syncTimer are singleDeviceInterval.
Service.sync();
do_check_eq(syncSuccesses, 1);
do_check_false(scheduler.numClients > 1);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
- let deferred = Promise.defer();
- // Wrap scheduleNextSync so we are notified when it is finished.
- scheduler._scheduleNextSync = scheduler.scheduleNextSync;
- scheduler.scheduleNextSync = function() {
- scheduler._scheduleNextSync();
+ let promiseDone = new Promise(resolve => {
+ // Wrap scheduleNextSync so we are notified when it is finished.
+ scheduler._scheduleNextSync = scheduler.scheduleNextSync;
+ scheduler.scheduleNextSync = function() {
+ scheduler._scheduleNextSync();
- // Check on sync:finish scheduleNextSync sets the appropriate
- // syncInterval and syncTimer values.
- if (syncSuccesses == 2) {
- do_check_neq(scheduler.nextSync, 0);
- do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
- do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
+ // Check on sync:finish scheduleNextSync sets the appropriate
+ // syncInterval and syncTimer values.
+ if (syncSuccesses == 2) {
+ do_check_neq(scheduler.nextSync, 0);
+ do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
+ do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
- scheduler.scheduleNextSync = scheduler._scheduleNextSync;
- Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
- Service.startOver();
- server.stop(deferred.resolve);
- }
- };
+ scheduler.scheduleNextSync = scheduler._scheduleNextSync;
+ Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
+ Service.startOver();
+ server.stop(resolve);
+ }
+ };
+ });
// Set nextSync != 0
// syncInterval still hasn't been set by call to updateClientMode.
// Explicitly trying to invoke scheduleNextSync during a sync
// (to immitate a score update that isn't big enough to trigger a sync).
Svc.Obs.add("weave:service:sync:start", function onSyncStart() {
// Wait for other sync:start observers to be called so that
// nextSync is set to 0.
@@ -389,17 +381,17 @@ add_identity_test(this, function* test_b
do_check_neq(scheduler.nextSync, 0);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
do_check_eq(scheduler.syncTimer.delay, scheduler.singleDeviceInterval);
});
});
clientsEngine._store.create({id: "foo", cleartext: "bar"});
Service.sync();
- yield deferred.promise;
+ await promiseDone;
});
add_test(function test_adjust_timer_larger_syncInterval() {
_("Test syncInterval > current timout period && nextSync != 0, syncInterval is NOT used.");
clientsEngine._store.create({id: "foo", cleartext: "bar"});
scheduler.updateClientMode();
do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
--- a/services/sync/tests/unit/test_node_reassignment.js
+++ b/services/sync/tests/unit/test_node_reassignment.js
@@ -7,16 +7,17 @@
Cu.import("resource://gre/modules/Log.jsm");
Cu.import("resource://services-common/rest.js");
Cu.import("resource://services-sync/constants.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/status.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/rotaryengine.js");
Cu.import("resource://testing-common/services/sync/utils.js");
+Cu.import("resource://gre/modules/PromiseUtils.jsm");
Service.engineManager.clear();
function run_test() {
Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
Log.repository.getLogger("Sync.Resource").level = Log.Level.Trace;
Log.repository.getLogger("Sync.RESTRequest").level = Log.Level.Trace;
@@ -71,17 +72,17 @@ function installNodeHandler(server, next
Utils.nextTick(next);
}
let nodePath = "/user/1.0/johndoe/node/weave";
server.server.registerPathHandler(nodePath, handleNodeRequest);
_("Registered node handler at " + nodePath);
}
function prepareServer() {
- let deferred = Promise.defer();
+ let deferred = PromiseUtils.defer();
configureIdentity({username: "johndoe"}).then(() => {
let server = new SyncServer();
server.registerUser("johndoe");
server.start();
Service.serverURL = server.baseURI;
Service.clusterURL = server.baseURI;
do_check_eq(Service.userAPIURI, server.baseURI + "user/1.0/");
deferred.resolve(server);
@@ -102,19 +103,19 @@ function getReassigned() {
}
/**
* Make a test request to `url`, then watch the result of two syncs
* to ensure that a node request was made.
* Runs `between` between the two. This can be used to undo deliberate failure
* setup, detach observers, etc.
*/
-function* syncAndExpectNodeReassignment(server, firstNotification, between,
+async function syncAndExpectNodeReassignment(server, firstNotification, between,
secondNotification, url) {
- let deferred = Promise.defer();
+ let deferred = PromiseUtils.defer();
function onwards() {
let nodeFetched = false;
function onFirstSync() {
_("First sync completed.");
Svc.Obs.remove(firstNotification, onFirstSync);
Svc.Obs.add(secondNotification, onSecondSync);
do_check_eq(Service.clusterURL, "");
@@ -153,22 +154,22 @@ function* syncAndExpectNodeReassignment(
}
// Make sure that it works!
let request = new RESTRequest(url);
request.get(function () {
do_check_eq(request.response.status, 401);
Utils.nextTick(onwards);
});
- yield deferred.promise;
+ await deferred.promise;
}
-add_task(function* test_momentary_401_engine() {
+add_task(async function test_momentary_401_engine() {
_("Test a failure for engine URLs that's resolved by reassignment.");
- let server = yield prepareServer();
+ let server = await prepareServer();
let john = server.user("johndoe");
_("Enabling the Rotary engine.");
let engine = Service.engineManager.get("rotary");
engine.enabled = true;
// We need the server to be correctly set up prior to experimenting. Do this
// through a sync.
@@ -200,110 +201,110 @@ add_task(function* test_momentary_401_en
Svc.Obs.remove("weave:service:login:start", onLoginStart);
do_check_true(getReassigned());
}
_("Adding observer that lastSyncReassigned is still set on login.");
Svc.Obs.add("weave:service:login:start", onLoginStart);
}
- yield syncAndExpectNodeReassignment(server,
+ await syncAndExpectNodeReassignment(server,
"weave:service:sync:finish",
between,
"weave:service:sync:finish",
Service.storageURL + "rotary");
});
// This test ends up being a failing fetch *after we're already logged in*.
-add_task(function* test_momentary_401_info_collections() {
+add_task(async function test_momentary_401_info_collections() {
_("Test a failure for info/collections that's resolved by reassignment.");
- let server = yield prepareServer();
+ let server = await prepareServer();
_("First sync to prepare server contents.");
Service.sync();
// Return a 401 for info requests, particularly info/collections.
let oldHandler = server.toplevelHandlers.info;
server.toplevelHandlers.info = handleReassign;
function undo() {
_("Undoing test changes.");
server.toplevelHandlers.info = oldHandler;
}
- yield syncAndExpectNodeReassignment(server,
+ await syncAndExpectNodeReassignment(server,
"weave:service:sync:error",
undo,
"weave:service:sync:finish",
Service.infoURL);
});
-add_task(function* test_momentary_401_storage_loggedin() {
+add_task(async function test_momentary_401_storage_loggedin() {
_("Test a failure for any storage URL, not just engine parts. " +
"Resolved by reassignment.");
- let server = yield prepareServer();
+ let server = await prepareServer();
_("Performing initial sync to ensure we are logged in.")
Service.sync();
// Return a 401 for all storage requests.
let oldHandler = server.toplevelHandlers.storage;
server.toplevelHandlers.storage = handleReassign;
function undo() {
_("Undoing test changes.");
server.toplevelHandlers.storage = oldHandler;
}
do_check_true(Service.isLoggedIn, "already logged in");
- yield syncAndExpectNodeReassignment(server,
+ await syncAndExpectNodeReassignment(server,
"weave:service:sync:error",
undo,
"weave:service:sync:finish",
Service.storageURL + "meta/global");
});
-add_task(function* test_momentary_401_storage_loggedout() {
+add_task(async function test_momentary_401_storage_loggedout() {
_("Test a failure for any storage URL, not just engine parts. " +
"Resolved by reassignment.");
- let server = yield prepareServer();
+ let server = await prepareServer();
// Return a 401 for all storage requests.
let oldHandler = server.toplevelHandlers.storage;
server.toplevelHandlers.storage = handleReassign;
function undo() {
_("Undoing test changes.");
server.toplevelHandlers.storage = oldHandler;
}
do_check_false(Service.isLoggedIn, "not already logged in");
- yield syncAndExpectNodeReassignment(server,
+ await syncAndExpectNodeReassignment(server,
"weave:service:login:error",
undo,
"weave:service:sync:finish",
Service.storageURL + "meta/global");
});
-add_task(function* test_loop_avoidance_storage() {
+add_task(async function test_loop_avoidance_storage() {
_("Test that a repeated failure doesn't result in a sync loop " +
"if node reassignment cannot resolve the failure.");
- let server = yield prepareServer();
+ let server = await prepareServer();
// Return a 401 for all storage requests.
let oldHandler = server.toplevelHandlers.storage;
server.toplevelHandlers.storage = handleReassign;
let firstNotification = "weave:service:login:error";
let secondNotification = "weave:service:login:error";
let thirdNotification = "weave:service:sync:finish";
let nodeFetched = false;
- let deferred = Promise.defer();
+ let deferred = PromiseUtils.defer();
// Track the time. We want to make sure the duration between the first and
// second sync is small, and then that the duration between second and third
// is set to be large.
let now;
function onFirstSync() {
_("First sync completed.");
@@ -375,29 +376,29 @@ add_task(function* test_loop_avoidance_s
server.stop(deferred.resolve);
});
}
Svc.Obs.add(firstNotification, onFirstSync);
now = Date.now();
Service.sync();
- yield deferred.promise;
+ await deferred.promise;
});
-add_task(function* test_loop_avoidance_engine() {
+add_task(async function test_loop_avoidance_engine() {
_("Test that a repeated 401 in an engine doesn't result in a sync loop " +
"if node reassignment cannot resolve the failure.");
- let server = yield prepareServer();
+ let server = await prepareServer();
let john = server.user("johndoe");
_("Enabling the Rotary engine.");
let engine = Service.engineManager.get("rotary");
engine.enabled = true;
- let deferred = Promise.defer();
+ let deferred = PromiseUtils.defer();
// We need the server to be correctly set up prior to experimenting. Do this
// through a sync.
let global = {syncID: Service.syncID,
storageVersion: STORAGE_VERSION,
rotary: {version: engine.version,
syncID: engine.syncID}}
john.createCollection("meta").insert("global", global);
@@ -514,10 +515,10 @@ add_task(function* test_loop_avoidance_e
afterSuccessfulSync();
});
}
Svc.Obs.add(firstNotification, onFirstSync);
now = Date.now();
Service.sync();
- yield deferred.promise;
+ await deferred.promise;
});
--- a/services/sync/tests/unit/test_service_startOver.js
+++ b/services/sync/tests/unit/test_service_startOver.js
@@ -23,18 +23,18 @@ BlaEngine.prototype = {
Service.engineManager.register(BlaEngine);
function run_test() {
initTestLogging("Trace");
run_next_test();
}
-add_identity_test(this, function* test_resetLocalData() {
- yield configureIdentity();
+add_identity_test(this, async function test_resetLocalData() {
+ await configureIdentity();
Service.status.enforceBackoff = true;
Service.status.backoffInterval = 42;
Service.status.minimumNextSync = 23;
Service.persistLogin();
// Verify set up.
do_check_eq(Service.status.checkSetup(), STATUS_OK);
--- a/services/sync/tests/unit/test_service_wipeServer.js
+++ b/services/sync/tests/unit/test_service_wipeServer.js
@@ -3,17 +3,17 @@ Cu.import("resource://services-sync/reco
Cu.import("resource://services-sync/resource.js");
Cu.import("resource://testing-common/services/sync/fakeservices.js");
Cu.import("resource://testing-common/services/sync/utils.js");
Svc.DefaultPrefs.set("registerEngines", "");
Cu.import("resource://services-sync/service.js");
// configure the identity we use for this test.
-identityConfig = makeIdentityConfig({username: "johndoe"});
+const identityConfig = makeIdentityConfig({username: "johndoe"});
function FakeCollection() {
this.deleted = false;
}
FakeCollection.prototype = {
handler: function() {
let self = this;
return function(request, response) {
@@ -26,85 +26,79 @@ FakeCollection.prototype = {
}
response.setHeader("X-Weave-Timestamp", timestamp);
response.setStatusLine(request.httpVersion, 200, "OK");
response.bodyOutputStream.write(body, body.length);
};
}
};
-function* setUpTestFixtures(server) {
+async function setUpTestFixtures(server) {
let cryptoService = new FakeCryptoService();
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
- yield configureIdentity(identityConfig);
+ await configureIdentity(identityConfig);
}
function run_test() {
initTestLogging("Trace");
run_next_test();
}
-function promiseStopServer(server) {
- let deferred = Promise.defer();
- server.stop(deferred.resolve);
- return deferred.promise;
-}
-
-add_identity_test(this, function* test_wipeServer_list_success() {
+add_identity_test(this, async function test_wipeServer_list_success() {
_("Service.wipeServer() deletes collections given as argument.");
let steam_coll = new FakeCollection();
let diesel_coll = new FakeCollection();
let server = httpd_setup({
"/1.1/johndoe/storage/steam": steam_coll.handler(),
"/1.1/johndoe/storage/diesel": diesel_coll.handler(),
"/1.1/johndoe/storage/petrol": httpd_handler(404, "Not Found")
});
try {
- yield setUpTestFixtures(server);
+ await setUpTestFixtures(server);
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
_("Confirm initial environment.");
do_check_false(steam_coll.deleted);
do_check_false(diesel_coll.deleted);
_("wipeServer() will happily ignore the non-existent collection and use the timestamp of the last DELETE that was successful.");
let timestamp = Service.wipeServer(["steam", "diesel", "petrol"]);
do_check_eq(timestamp, diesel_coll.timestamp);
_("wipeServer stopped deleting after encountering an error with the 'petrol' collection, thus only 'steam' has been deleted.");
do_check_true(steam_coll.deleted);
do_check_true(diesel_coll.deleted);
} finally {
- yield promiseStopServer(server);
+ await promiseStopServer(server);
Svc.Prefs.resetBranch("");
}
});
-add_identity_test(this, function* test_wipeServer_list_503() {
+add_identity_test(this, async function test_wipeServer_list_503() {
_("Service.wipeServer() deletes collections given as argument.");
let steam_coll = new FakeCollection();
let diesel_coll = new FakeCollection();
let server = httpd_setup({
"/1.1/johndoe/storage/steam": steam_coll.handler(),
"/1.1/johndoe/storage/petrol": httpd_handler(503, "Service Unavailable"),
"/1.1/johndoe/storage/diesel": diesel_coll.handler()
});
try {
- yield setUpTestFixtures(server);
+ await setUpTestFixtures(server);
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
_("Confirm initial environment.");
do_check_false(steam_coll.deleted);
do_check_false(diesel_coll.deleted);
_("wipeServer() will happily ignore the non-existent collection, delete the 'steam' collection and abort after an receiving an error on the 'petrol' collection.");
let error;
@@ -117,22 +111,22 @@ add_identity_test(this, function* test_w
_("wipeServer() threw this exception: " + error);
do_check_eq(error.status, 503);
_("wipeServer stopped deleting after encountering an error with the 'petrol' collection, thus only 'steam' has been deleted.");
do_check_true(steam_coll.deleted);
do_check_false(diesel_coll.deleted);
} finally {
- yield promiseStopServer(server);
+ await promiseStopServer(server);
Svc.Prefs.resetBranch("");
}
});
-add_identity_test(this, function* test_wipeServer_all_success() {
+add_identity_test(this, async function test_wipeServer_all_success() {
_("Service.wipeServer() deletes all the things.");
/**
* Handle the bulk DELETE request sent by wipeServer.
*/
let deleted = false;
let serverTimestamp;
function storageHandler(request, response) {
@@ -140,29 +134,29 @@ add_identity_test(this, function* test_w
do_check_true(request.hasHeader("X-Confirm-Delete"));
deleted = true;
serverTimestamp = return_timestamp(request, response);
}
let server = httpd_setup({
"/1.1/johndoe/storage": storageHandler
});
- yield setUpTestFixtures(server);
+ await setUpTestFixtures(server);
_("Try deletion.");
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
let returnedTimestamp = Service.wipeServer();
do_check_true(deleted);
do_check_eq(returnedTimestamp, serverTimestamp);
- yield promiseStopServer(server);
+ await promiseStopServer(server);
Svc.Prefs.resetBranch("");
});
-add_identity_test(this, function* test_wipeServer_all_404() {
+add_identity_test(this, async function test_wipeServer_all_404() {
_("Service.wipeServer() accepts a 404.");
/**
* Handle the bulk DELETE request sent by wipeServer. Returns a 404.
*/
let deleted = false;
let serverTimestamp;
function storageHandler(request, response) {
@@ -172,71 +166,71 @@ add_identity_test(this, function* test_w
serverTimestamp = new_timestamp();
response.setHeader("X-Weave-Timestamp", "" + serverTimestamp);
response.setStatusLine(request.httpVersion, 404, "Not Found");
}
let server = httpd_setup({
"/1.1/johndoe/storage": storageHandler
});
- yield setUpTestFixtures(server);
+ await setUpTestFixtures(server);
_("Try deletion.");
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
let returnedTimestamp = Service.wipeServer();
do_check_true(deleted);
do_check_eq(returnedTimestamp, serverTimestamp);
- yield promiseStopServer(server);
+ await promiseStopServer(server);
Svc.Prefs.resetBranch("");
});
-add_identity_test(this, function* test_wipeServer_all_503() {
+add_identity_test(this, async function test_wipeServer_all_503() {
_("Service.wipeServer() throws if it encounters a non-200/404 response.");
/**
* Handle the bulk DELETE request sent by wipeServer. Returns a 503.
*/
function storageHandler(request, response) {
do_check_eq("DELETE", request.method);
do_check_true(request.hasHeader("X-Confirm-Delete"));
response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
}
let server = httpd_setup({
"/1.1/johndoe/storage": storageHandler
});
- yield setUpTestFixtures(server);
+ await setUpTestFixtures(server);
_("Try deletion.");
let error;
try {
new SyncTestingInfrastructure(server, "johndoe", "irrelevant", "irrelevant");
Service.wipeServer();
do_throw("Should have thrown!");
} catch (ex) {
error = ex;
}
do_check_eq(error.status, 503);
- yield promiseStopServer(server);
+ await promiseStopServer(server);
Svc.Prefs.resetBranch("");
});
-add_identity_test(this, function* test_wipeServer_all_connectionRefused() {
+add_identity_test(this, async function test_wipeServer_all_connectionRefused() {
_("Service.wipeServer() throws if it encounters a network problem.");
let server = httpd_setup({});
- yield setUpTestFixtures(server);
+ await setUpTestFixtures(server);
Service.serverURL = "http://localhost:4352/";
Service.clusterURL = "http://localhost:4352/";
_("Try deletion.");
try {
Service.wipeServer();
do_throw("Should have thrown!");
} catch (ex) {
do_check_eq(ex.result, Cr.NS_ERROR_CONNECTION_REFUSED);
}
Svc.Prefs.resetBranch("");
- yield promiseStopServer(server);
+ await promiseStopServer(server);
});
--- a/services/sync/tests/unit/test_syncedtabs.js
+++ b/services/sync/tests/unit/test_syncedtabs.js
@@ -76,52 +76,52 @@ function configureClients(clients, clien
engine.clients = clients;
// Apply clients collection overrides.
MockClientsEngine.clientSettings = clientSettings;
// Send an observer that pretends the engine just finished a sync.
Services.obs.notifyObservers(null, "weave:engine:sync:finish", "tabs");
}
// The tests.
-add_task(function* test_noClients() {
+add_task(async function test_noClients() {
// no clients, can't be tabs.
- yield configureClients({});
+ await configureClients({});
- let tabs = yield SyncedTabs.getTabClients();
+ let tabs = await SyncedTabs.getTabClients();
equal(Object.keys(tabs).length, 0);
});
-add_task(function* test_clientWithTabs() {
- yield configureClients({
+add_task(async function test_clientWithTabs() {
+ await configureClients({
guid_desktop: {
clientName: "My Desktop",
tabs: [
{
urlHistory: ["http://foo.com/"],
icon: "http://foo.com/favicon",
}],
},
guid_mobile: {
clientName: "My Phone",
tabs: [],
}
});
- let clients = yield SyncedTabs.getTabClients();
+ let clients = await SyncedTabs.getTabClients();
equal(clients.length, 2);
clients.sort((a, b) => { return a.name.localeCompare(b.name);});
equal(clients[0].tabs.length, 1);
equal(clients[0].tabs[0].url, "http://foo.com/");
equal(clients[0].tabs[0].icon, "http://foo.com/favicon");
// second client has no tabs.
equal(clients[1].tabs.length, 0);
});
-add_task(function* test_staleClientWithTabs() {
- yield configureClients({
+add_task(async function test_staleClientWithTabs() {
+ await configureClients({
guid_desktop: {
clientName: "My Desktop",
tabs: [
{
urlHistory: ["http://foo.com/"],
icon: "http://foo.com/favicon",
}],
},
@@ -151,71 +151,71 @@ add_task(function* test_staleClientWithT
},
}, {
guid_stale_mobile: false,
guid_stale_desktop: false,
// We should always use the device name from the clients collection, instead
// of the possibly stale tabs collection.
guid_stale_name_desktop: "My Laptop",
});
- let clients = yield SyncedTabs.getTabClients();
+ let clients = await SyncedTabs.getTabClients();
clients.sort((a, b) => { return a.name.localeCompare(b.name);});
equal(clients.length, 3);
equal(clients[0].name, "My Desktop");
equal(clients[0].tabs.length, 1);
equal(clients[0].tabs[0].url, "http://foo.com/");
equal(clients[1].name, "My Laptop");
equal(clients[1].tabs.length, 1);
equal(clients[1].tabs[0].url, "https://example.edu/");
equal(clients[2].name, "My Phone");
equal(clients[2].tabs.length, 0);
});
-add_task(function* test_clientWithTabsIconsDisabled() {
+add_task(async function test_clientWithTabsIconsDisabled() {
Services.prefs.setBoolPref("services.sync.syncedTabs.showRemoteIcons", false);
- yield configureClients({
+ await configureClients({
guid_desktop: {
clientName: "My Desktop",
tabs: [
{
urlHistory: ["http://foo.com/"],
icon: "http://foo.com/favicon",
}],
},
});
- let clients = yield SyncedTabs.getTabClients();
+ let clients = await SyncedTabs.getTabClients();
equal(clients.length, 1);
clients.sort((a, b) => { return a.name.localeCompare(b.name);});
equal(clients[0].tabs.length, 1);
equal(clients[0].tabs[0].url, "http://foo.com/");
// expect the default favicon (empty string) due to the pref being false.
equal(clients[0].tabs[0].icon, "");
Services.prefs.clearUserPref("services.sync.syncedTabs.showRemoteIcons");
});
-add_task(function* test_filter() {
+add_task(async function test_filter() {
// Nothing matches.
- yield configureClients({
+ await configureClients({
guid_desktop: {
clientName: "My Desktop",
tabs: [
{
urlHistory: ["http://foo.com/"],
title: "A test page.",
},
{
urlHistory: ["http://bar.com/"],
title: "Another page.",
}],
},
});
- let clients = yield SyncedTabs.getTabClients("foo");
+ let clients = await SyncedTabs.getTabClients("foo");
equal(clients.length, 1);
equal(clients[0].tabs.length, 1);
equal(clients[0].tabs[0].url, "http://foo.com/");
// check it matches the title.
- clients = yield SyncedTabs.getTabClients("test");
+ clients = await SyncedTabs.getTabClients("test");
equal(clients.length, 1);
equal(clients[0].tabs.length, 1);
equal(clients[0].tabs[0].url, "http://foo.com/");
});
--- a/services/sync/tests/unit/test_syncengine_sync.js
+++ b/services/sync/tests/unit/test_syncengine_sync.js
@@ -21,19 +21,19 @@ function clean() {
Service.recordManager.clearCache();
}
function cleanAndGo(server) {
clean();
server.stop(run_next_test);
}
-function promiseClean(server) {
+async function promiseClean(server) {
clean();
- return new Promise(resolve => server.stop(resolve));
+ await promiseStopServer(server);
}
function configureService(server, username, password) {
Service.clusterURL = server.baseURI;
Service.identity.account = username || "foo";
Service.identity.basicPassword = password || "password";
}
@@ -671,17 +671,17 @@ add_test(function test_processIncoming_m
}
} finally {
cleanAndGo(server);
}
});
-add_task(function *test_processIncoming_store_toFetch() {
+add_task(async function test_processIncoming_store_toFetch() {
_("If processIncoming fails in the middle of a batch on mobile, state is saved in toFetch and lastSync.");
Service.identity.username = "foo";
Svc.Prefs.set("client.type", "mobile");
// A collection that throws at the fourth get.
let collection = new ServerCollection();
collection._get_calls = 0;
collection._get = collection.get;
@@ -718,32 +718,32 @@ add_task(function *test_processIncoming_
try {
// Confirm initial environment
do_check_eq(engine.lastSync, 0);
do_check_empty(engine._store.items);
let error;
try {
- yield sync_engine_and_validate_telem(engine, true);
+ await sync_engine_and_validate_telem(engine, true);
} catch (ex) {
error = ex;
}
// Only the first two batches have been applied.
do_check_eq(Object.keys(engine._store.items).length,
MOBILE_BATCH_SIZE * 2);
// The third batch is stuck in toFetch. lastSync has been moved forward to
// the last successful item's timestamp.
do_check_eq(engine.toFetch.length, MOBILE_BATCH_SIZE);
do_check_eq(engine.lastSync, collection.wbo("record-no-99").modified);
} finally {
- yield promiseClean(server);
+ await promiseClean(server);
}
});
add_test(function test_processIncoming_resume_toFetch() {
_("toFetch and previousFailed items left over from previous syncs are fetched on the next sync, along with new items.");
Service.identity.username = "foo";
@@ -1224,17 +1224,17 @@ add_test(function test_processIncoming_f
do_check_eq(batchDownload(BOGUS_RECORDS.length), 4);
} finally {
cleanAndGo(server);
}
});
-add_task(function *test_processIncoming_decrypt_failed() {
+add_task(async function test_processIncoming_decrypt_failed() {
_("Ensure that records failing to decrypt are either replaced or refetched.");
Service.identity.username = "foo";
// Some good and some bogus records. One doesn't contain valid JSON,
// the other will throw during decrypt.
let collection = new ServerCollection();
collection._wbos.flying = new ServerWBO(
@@ -1283,34 +1283,34 @@ add_task(function *test_processIncoming_
let observerData;
Svc.Obs.add("weave:engine:sync:applied", function onApplied(subject, data) {
Svc.Obs.remove("weave:engine:sync:applied", onApplied);
observerSubject = subject;
observerData = data;
});
engine.lastSync = collection.wbo("nojson").modified - 1;
- let ping = yield sync_engine_and_validate_telem(engine, true);
+ let ping = await sync_engine_and_validate_telem(engine, true);
do_check_eq(ping.engines[0].incoming.applied, 2);
do_check_eq(ping.engines[0].incoming.failed, 4);
do_check_eq(ping.engines[0].incoming.newFailed, 4);
do_check_eq(engine.previousFailed.length, 4);
do_check_eq(engine.previousFailed[0], "nojson");
do_check_eq(engine.previousFailed[1], "nojson2");
do_check_eq(engine.previousFailed[2], "nodecrypt");
do_check_eq(engine.previousFailed[3], "nodecrypt2");
// Ensure the observer was notified
do_check_eq(observerData, engine.name);
do_check_eq(observerSubject.applied, 2);
do_check_eq(observerSubject.failed, 4);
} finally {
- yield promiseClean(server);
+ await promiseClean(server);
}
});
add_test(function test_uploadOutgoing_toEmptyServer() {
_("SyncEngine._uploadOutgoing uploads new records to server");
Service.identity.username = "foo";
@@ -1411,17 +1411,17 @@ add_test(function test_uploadOutgoing_hu
do_check_eq(engine._tracker.changedIDs["flying"], undefined);
} finally {
cleanAndGo(server);
}
});
-add_task(function *test_uploadOutgoing_failed() {
+add_task(async function test_uploadOutgoing_failed() {
_("SyncEngine._uploadOutgoing doesn't clear the tracker of objects that failed to upload.");
Service.identity.username = "foo";
let collection = new ServerCollection();
// We only define the "flying" WBO on the server, not the "scotsman"
// and "peppercorn" ones.
collection._wbos.flying = new ServerWBO('flying');
@@ -1454,32 +1454,32 @@ add_task(function *test_uploadOutgoing_f
// Confirm initial environment
do_check_eq(engine.lastSyncLocal, 0);
do_check_eq(collection.payload("flying"), undefined);
do_check_eq(engine._tracker.changedIDs['flying'], FLYING_CHANGED);
do_check_eq(engine._tracker.changedIDs['scotsman'], SCOTSMAN_CHANGED);
do_check_eq(engine._tracker.changedIDs['peppercorn'], PEPPERCORN_CHANGED);
engine.enabled = true;
- yield sync_engine_and_validate_telem(engine, true);
+ await sync_engine_and_validate_telem(engine, true);
// Local timestamp has been set.
do_check_true(engine.lastSyncLocal > 0);
// Ensure the 'flying' record has been uploaded and is no longer marked.
do_check_true(!!collection.payload("flying"));
do_check_eq(engine._tracker.changedIDs['flying'], undefined);
// The 'scotsman' and 'peppercorn' records couldn't be uploaded so
// they weren't cleared from the tracker.
do_check_eq(engine._tracker.changedIDs['scotsman'], SCOTSMAN_CHANGED);
do_check_eq(engine._tracker.changedIDs['peppercorn'], PEPPERCORN_CHANGED);
} finally {
- yield promiseClean(server);
+ await promiseClean(server);
}
});
/* A couple of "functional" tests to ensure we split records into appropriate
POST requests. More comprehensive unit-tests for this "batching" are in
test_postqueue.js.
*/
add_test(function test_uploadOutgoing_MAX_UPLOAD_RECORDS() {
@@ -1709,17 +1709,17 @@ add_test(function test_syncFinish_delete
do_check_eq(engine._delete.ids, undefined);
} finally {
cleanAndGo(server);
}
});
-add_task(function *test_sync_partialUpload() {
+add_task(async function test_sync_partialUpload() {
_("SyncEngine.sync() keeps changedIDs that couldn't be uploaded.");
Service.identity.username = "foo";
let collection = new ServerCollection();
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
@@ -1757,17 +1757,17 @@ add_task(function *test_sync_partialUplo
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
engine.enabled = true;
let error;
try {
- yield sync_engine_and_validate_telem(engine, true);
+ await sync_engine_and_validate_telem(engine, true);
} catch (ex) {
error = ex;
}
ok(!!error);
// The timestamp has been updated.
do_check_true(engine.lastSyncLocal > 456);
@@ -1780,17 +1780,17 @@ add_task(function *test_sync_partialUplo
// hard on the 3rd upload.
if ((i == 23) || (i == 42) || (i >= 200))
do_check_eq(engine._tracker.changedIDs[id], i);
else
do_check_false(id in engine._tracker.changedIDs);
}
} finally {
- yield promiseClean(server);
+ await promiseClean(server);
}
});
add_test(function test_canDecrypt_noCryptoKeys() {
_("SyncEngine.canDecrypt returns false if the engine fails to decrypt items on the server, e.g. due to a missing crypto key collection.");
Service.identity.username = "foo";
// Wipe collection keys so we can test the desired scenario.
--- a/services/sync/tests/unit/test_syncscheduler.js
+++ b/services/sync/tests/unit/test_syncscheduler.js
@@ -52,42 +52,35 @@ function sync_httpd_setup() {
"/1.1/johndoe/info/collections": collectionsHelper.handler,
"/1.1/johndoe/storage/crypto/keys":
upd("crypto", (new ServerWBO("keys")).handler()),
"/1.1/johndoe/storage/clients": upd("clients", clientsColl.handler()),
"/user/1.0/johndoe/node/weave": httpd_handler(200, "OK", "null")
});
}
-function setUp(server) {
- let deferred = Promise.defer();
- configureIdentity({username: "johndoe"}).then(() => {
- Service.clusterURL = server.baseURI + "/";
+async function setUp(server) {
+ await configureIdentity({username: "johndoe"});
+
+ Service.clusterURL = server.baseURI + "/";
- generateNewKeys(Service.collectionKeys);
- let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
- serverKeys.encrypt(Service.identity.syncKeyBundle);
- let result = serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
- deferred.resolve(result);
- });
- return deferred.promise;
+ generateNewKeys(Service.collectionKeys);
+ let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
+ serverKeys.encrypt(Service.identity.syncKeyBundle);
+ let result = serverKeys.upload(Service.resource(Service.cryptoKeysURL)).success;
+ return result;
}
-function cleanUpAndGo(server) {
- let deferred = Promise.defer();
- Utils.nextTick(function () {
- clientsEngine._store.wipe();
- Service.startOver();
- if (server) {
- server.stop(deferred.resolve);
- } else {
- deferred.resolve();
- }
- });
- return deferred.promise;
+async function cleanUpAndGo(server) {
+ await promiseNextTick();
+ clientsEngine._store.wipe();
+ Service.startOver();
+ if (server) {
+ await promiseStopServer(server);
+ }
}
function run_test() {
initTestLogging("Trace");
Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
Log.repository.getLogger("Sync.scheduler").level = Log.Level.Trace;
validate_all_future_pings();
@@ -169,17 +162,17 @@ add_test(function test_prefAttributes()
do_check_eq(scheduler.activeInterval, 60000);
do_check_eq(scheduler.immediateInterval, 60000);
Svc.Prefs.resetBranch("");
scheduler.setDefaults();
run_next_test();
});
-add_identity_test(this, function* test_updateClientMode() {
+add_identity_test(this, async function test_updateClientMode() {
_("Test updateClientMode adjusts scheduling attributes based on # of clients appropriately");
do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
do_check_false(scheduler.numClients > 1);
do_check_false(scheduler.idle);
// Trigger a change in interval & threshold by adding a client.
clientsEngine._store.create({id: "foo", cleartext: "bar"});
@@ -196,20 +189,20 @@ add_identity_test(this, function* test_u
// Goes back to single user if # clients is 1.
do_check_eq(scheduler.numClients, 1);
do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
do_check_false(scheduler.numClients > 1);
do_check_false(scheduler.idle);
- yield cleanUpAndGo();
+ await cleanUpAndGo();
});
-add_identity_test(this, function* test_masterpassword_locked_retry_interval() {
+add_identity_test(this, async function test_masterpassword_locked_retry_interval() {
_("Test Status.login = MASTER_PASSWORD_LOCKED results in reschedule at MASTER_PASSWORD interval");
let loginFailed = false;
Svc.Obs.add("weave:service:login:error", function onLoginError() {
Svc.Obs.remove("weave:service:login:error", onLoginError);
loginFailed = true;
});
let rescheduleInterval = false;
@@ -222,31 +215,31 @@ add_identity_test(this, function* test_m
let oldVerifyLogin = Service.verifyLogin;
Service.verifyLogin = function () {
Status.login = MASTER_PASSWORD_LOCKED;
return false;
};
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
Service.sync();
do_check_true(loginFailed);
do_check_eq(Status.login, MASTER_PASSWORD_LOCKED);
do_check_true(rescheduleInterval);
Service.verifyLogin = oldVerifyLogin;
SyncScheduler.prototype.scheduleAtInterval = oldScheduleAtInterval;
- yield cleanUpAndGo(server);
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_calculateBackoff() {
+add_identity_test(this, async function test_calculateBackoff() {
do_check_eq(Status.backoffInterval, 0);
// Test no interval larger than the maximum backoff is used if
// Status.backoffInterval is smaller.
Status.backoffInterval = 5;
let backoffInterval = Utils.calculateBackoff(50, MAXIMUM_BACKOFF_INTERVAL,
Status.backoffInterval);
@@ -255,35 +248,32 @@ add_identity_test(this, function* test_c
// Test Status.backoffInterval is used if it is
// larger than MAXIMUM_BACKOFF_INTERVAL.
Status.backoffInterval = MAXIMUM_BACKOFF_INTERVAL + 10;
backoffInterval = Utils.calculateBackoff(50, MAXIMUM_BACKOFF_INTERVAL,
Status.backoffInterval);
do_check_eq(backoffInterval, MAXIMUM_BACKOFF_INTERVAL + 10);
- yield cleanUpAndGo();
+ await cleanUpAndGo();
});
-add_identity_test(this, function* test_scheduleNextSync_nowOrPast() {
- let deferred = Promise.defer();
- Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
- Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
- cleanUpAndGo(server).then(deferred.resolve);
- });
+add_identity_test(this, async function test_scheduleNextSync_nowOrPast() {
+ let promiseObserved = promiseOneObserver("weave:service:sync:finish");
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// We're late for a sync...
scheduler.scheduleNextSync(-1);
- yield deferred.promise;
+ await promiseObserved;
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_scheduleNextSync_future_noBackoff() {
+add_identity_test(this, async function test_scheduleNextSync_future_noBackoff() {
_("scheduleNextSync() uses the current syncInterval if no interval is provided.");
// Test backoffInterval is 0 as expected.
do_check_eq(Status.backoffInterval, 0);
_("Test setting sync interval when nextSync == 0");
scheduler.nextSync = 0;
scheduler.scheduleNextSync();
@@ -319,20 +309,20 @@ add_identity_test(this, function* test_s
do_check_true(scheduler.nextSync <= Date.now() + requestedInterval);
do_check_eq(scheduler.syncTimer.delay, requestedInterval);
// Request a sync at the smallest possible interval (0 triggers now).
scheduler.scheduleNextSync(1);
do_check_true(scheduler.nextSync <= Date.now() + 1);
do_check_eq(scheduler.syncTimer.delay, 1);
- yield cleanUpAndGo();
+ await cleanUpAndGo();
});
-add_identity_test(this, function* test_scheduleNextSync_future_backoff() {
+add_identity_test(this, async function test_scheduleNextSync_future_backoff() {
_("scheduleNextSync() will honour backoff in all scheduling requests.");
// Let's take a backoff interval that's bigger than the default sync interval.
const BACKOFF = 7337;
Status.backoffInterval = scheduler.syncInterval + BACKOFF;
_("Test setting sync interval when nextSync == 0");
scheduler.nextSync = 0;
scheduler.scheduleNextSync();
@@ -369,22 +359,22 @@ add_identity_test(this, function* test_s
do_check_true(scheduler.nextSync <= Date.now() + requestedInterval);
do_check_eq(scheduler.syncTimer.delay, requestedInterval);
// Request a sync at the smallest possible interval (0 triggers now).
scheduler.scheduleNextSync(1);
do_check_true(scheduler.nextSync <= Date.now() + Status.backoffInterval);
do_check_eq(scheduler.syncTimer.delay, Status.backoffInterval);
- yield cleanUpAndGo();
+ await cleanUpAndGo();
});
-add_identity_test(this, function* test_handleSyncError() {
+add_identity_test(this, async function test_handleSyncError() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Force sync to fail.
Svc.Prefs.set("firstSync", "notReady");
_("Ensure expected initial environment.");
do_check_eq(scheduler._syncErrors, 0);
do_check_false(Status.enforceBackoff);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
@@ -428,29 +418,26 @@ add_identity_test(this, function* test_h
maxInterval = scheduler._syncErrors * (2 * MINIMUM_BACKOFF_INTERVAL);
do_check_true(scheduler.nextSync <= Date.now() + maxInterval);
do_check_true(scheduler.syncTimer.delay <= maxInterval);
do_check_eq(scheduler._syncErrors, 4);
do_check_true(Status.enforceBackoff);
scheduler.syncTimer.clear();
_("Arrange for a successful sync to reset the scheduler error count");
- let deferred = Promise.defer();
- Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
- Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
- cleanUpAndGo(server).then(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:service:sync:finish");
Svc.Prefs.set("firstSync", "wipeRemote");
scheduler.scheduleNextSync(-1);
- yield deferred.promise;
+ await promiseObserved;
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_client_sync_finish_updateClientMode() {
+add_identity_test(this, async function test_client_sync_finish_updateClientMode() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Confirm defaults.
do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
do_check_false(scheduler.idle);
// Trigger a change in interval & threshold by adding a client.
clientsEngine._store.create({id: "foo", cleartext: "bar"});
@@ -469,242 +456,217 @@ add_identity_test(this, function* test_c
// Goes back to single user if # clients is 1.
do_check_eq(scheduler.numClients, 1);
do_check_eq(scheduler.syncThreshold, SINGLE_USER_THRESHOLD);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
do_check_false(scheduler.numClients > 1);
do_check_false(scheduler.idle);
- yield cleanUpAndGo(server);
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_autoconnect_nextSync_past() {
- let deferred = Promise.defer();
+add_identity_test(this, async function test_autoconnect_nextSync_past() {
+ let promiseObserved = promiseOneObserver("weave:service:sync:finish");
// nextSync will be 0 by default, so it's way in the past.
- Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
- Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
- cleanUpAndGo(server).then(deferred.resolve);
- });
-
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
scheduler.delayedAutoConnect(0);
- yield deferred.promise;
+ await promiseObserved;
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_autoconnect_nextSync_future() {
- let deferred = Promise.defer();
+add_identity_test(this, async function test_autoconnect_nextSync_future() {
let previousSync = Date.now() + scheduler.syncInterval / 2;
scheduler.nextSync = previousSync;
// nextSync rounds to the nearest second.
let expectedSync = scheduler.nextSync;
let expectedInterval = expectedSync - Date.now() - 1000;
// Ensure we don't actually try to sync (or log in for that matter).
function onLoginStart() {
do_throw("Should not get here!");
}
Svc.Obs.add("weave:service:login:start", onLoginStart);
- waitForZeroTimer(function () {
- do_check_eq(scheduler.nextSync, expectedSync);
- do_check_true(scheduler.syncTimer.delay >= expectedInterval);
+ await configureIdentity({username: "johndoe"});
+ scheduler.delayedAutoConnect(0);
+ await promiseZeroTimer();
- Svc.Obs.remove("weave:service:login:start", onLoginStart);
- cleanUpAndGo().then(deferred.resolve);
- });
+ do_check_eq(scheduler.nextSync, expectedSync);
+ do_check_true(scheduler.syncTimer.delay >= expectedInterval);
- yield configureIdentity({username: "johndoe"});
- scheduler.delayedAutoConnect(0);
- yield deferred.promise;
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ await cleanUpAndGo();
});
// XXX - this test can't be run with the browserid identity as it relies
// on the syncKey getter behaving in a certain way...
-add_task(function* test_autoconnect_mp_locked() {
+add_task(async function test_autoconnect_mp_locked() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Pretend user did not unlock master password.
let origLocked = Utils.mpLocked;
Utils.mpLocked = () => true;
let origGetter = Service.identity.__lookupGetter__("syncKey");
let origSetter = Service.identity.__lookupSetter__("syncKey");
delete Service.identity.syncKey;
Service.identity.__defineGetter__("syncKey", function() {
_("Faking Master Password entry cancelation.");
throw "User canceled Master Password entry";
});
- let deferred = Promise.defer();
// A locked master password will still trigger a sync, but then we'll hit
// MASTER_PASSWORD_LOCKED and hence MASTER_PASSWORD_LOCKED_RETRY_INTERVAL.
- Svc.Obs.add("weave:service:login:error", function onLoginError() {
- Svc.Obs.remove("weave:service:login:error", onLoginError);
- Utils.nextTick(function aLittleBitAfterLoginError() {
- do_check_eq(Status.login, MASTER_PASSWORD_LOCKED);
-
- Utils.mpLocked = origLocked;
- delete Service.identity.syncKey;
- Service.identity.__defineGetter__("syncKey", origGetter);
- Service.identity.__defineSetter__("syncKey", origSetter);
-
- cleanUpAndGo(server).then(deferred.resolve);
- });
- });
+ let promiseObserved = promiseOneObserver("weave:service:login:error");
scheduler.delayedAutoConnect(0);
- yield deferred.promise;
+ await promiseObserved;
+
+ await promiseNextTick();
+
+ do_check_eq(Status.login, MASTER_PASSWORD_LOCKED);
+
+ Utils.mpLocked = origLocked;
+ delete Service.identity.syncKey;
+ Service.identity.__defineGetter__("syncKey", origGetter);
+ Service.identity.__defineSetter__("syncKey", origSetter);
+
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_no_autoconnect_during_wizard() {
+add_identity_test(this, async function test_no_autoconnect_during_wizard() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Simulate the Sync setup wizard.
Svc.Prefs.set("firstSync", "notReady");
// Ensure we don't actually try to sync (or log in for that matter).
function onLoginStart() {
do_throw("Should not get here!");
}
Svc.Obs.add("weave:service:login:start", onLoginStart);
- let deferred = Promise.defer();
- waitForZeroTimer(function () {
- Svc.Obs.remove("weave:service:login:start", onLoginStart);
- cleanUpAndGo(server).then(deferred.resolve);
- });
-
scheduler.delayedAutoConnect(0);
- yield deferred.promise;
+ await promiseZeroTimer();
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_no_autoconnect_status_not_ok() {
+add_identity_test(this, async function test_no_autoconnect_status_not_ok() {
let server = sync_httpd_setup();
// Ensure we don't actually try to sync (or log in for that matter).
function onLoginStart() {
do_throw("Should not get here!");
}
Svc.Obs.add("weave:service:login:start", onLoginStart);
- let deferred = Promise.defer();
- waitForZeroTimer(function () {
- Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ scheduler.delayedAutoConnect(0);
+ await promiseZeroTimer();
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
- do_check_eq(Status.service, CLIENT_NOT_CONFIGURED);
- do_check_eq(Status.login, LOGIN_FAILED_NO_USERNAME);
+ do_check_eq(Status.service, CLIENT_NOT_CONFIGURED);
+ do_check_eq(Status.login, LOGIN_FAILED_NO_USERNAME);
- cleanUpAndGo(server).then(deferred.resolve);
- });
-
- scheduler.delayedAutoConnect(0);
- yield deferred.promise;
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_autoconnectDelay_pref() {
- let deferred = Promise.defer();
- Svc.Obs.add("weave:service:sync:finish", function onSyncFinish() {
- Svc.Obs.remove("weave:service:sync:finish", onSyncFinish);
- cleanUpAndGo(server).then(deferred.resolve);
- });
+add_identity_test(this, async function test_autoconnectDelay_pref() {
+ let promiseObserved = promiseOneObserver("weave:service:sync:finish");
Svc.Prefs.set("autoconnectDelay", 1);
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
Svc.Obs.notify("weave:service:ready");
// autoconnectDelay pref is multiplied by 1000.
do_check_eq(scheduler._autoTimer.delay, 1000);
do_check_eq(Status.service, STATUS_OK);
- yield deferred.promise;
+ await promiseObserved;
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_idle_adjustSyncInterval() {
+add_identity_test(this, async function test_idle_adjustSyncInterval() {
// Confirm defaults.
do_check_eq(scheduler.idle, false);
// Single device: nothing changes.
scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
do_check_eq(scheduler.idle, true);
do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
// Multiple devices: switch to idle interval.
scheduler.idle = false;
clientsEngine._store.create({id: "foo", cleartext: "bar"});
scheduler.updateClientMode();
scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
do_check_eq(scheduler.idle, true);
do_check_eq(scheduler.syncInterval, scheduler.idleInterval);
- yield cleanUpAndGo();
+ await cleanUpAndGo();
});
-add_identity_test(this, function* test_back_triggersSync() {
+add_identity_test(this, async function test_back_triggersSync() {
// Confirm defaults.
do_check_false(scheduler.idle);
do_check_eq(Status.backoffInterval, 0);
// Set up: Define 2 clients and put the system in idle.
scheduler.numClients = 2;
scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
do_check_true(scheduler.idle);
- let deferred = Promise.defer();
// We don't actually expect the sync (or the login, for that matter) to
// succeed. We just want to ensure that it was attempted.
- Svc.Obs.add("weave:service:login:error", function onLoginError() {
- Svc.Obs.remove("weave:service:login:error", onLoginError);
- cleanUpAndGo().then(deferred.resolve);
- });
+ let promiseObserved = promiseOneObserver("weave:service:login:error");
// Send an 'active' event to trigger sync soonish.
scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
- yield deferred.promise;
+ await promiseObserved;
+ await cleanUpAndGo();
});
-add_identity_test(this, function* test_active_triggersSync_observesBackoff() {
+add_identity_test(this, async function test_active_triggersSync_observesBackoff() {
// Confirm defaults.
do_check_false(scheduler.idle);
// Set up: Set backoff, define 2 clients and put the system in idle.
const BACKOFF = 7337;
Status.backoffInterval = scheduler.idleInterval + BACKOFF;
scheduler.numClients = 2;
scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
do_check_eq(scheduler.idle, true);
function onLoginStart() {
do_throw("Shouldn't have kicked off a sync!");
}
Svc.Obs.add("weave:service:login:start", onLoginStart);
- let deferred = Promise.defer();
- timer = Utils.namedTimer(function () {
- Svc.Obs.remove("weave:service:login:start", onLoginStart);
-
- do_check_true(scheduler.nextSync <= Date.now() + Status.backoffInterval);
- do_check_eq(scheduler.syncTimer.delay, Status.backoffInterval);
-
- cleanUpAndGo().then(deferred.resolve);
- }, IDLE_OBSERVER_BACK_DELAY * 1.5, {}, "timer");
+ let promiseTimer = promiseNamedTimer(IDLE_OBSERVER_BACK_DELAY * 1.5, {}, "timer");
// Send an 'active' event to try to trigger sync soonish.
scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
- yield deferred.promise;
+ await promiseTimer;
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+
+ do_check_true(scheduler.nextSync <= Date.now() + Status.backoffInterval);
+ do_check_eq(scheduler.syncTimer.delay, Status.backoffInterval);
+
+ await cleanUpAndGo();
});
-add_identity_test(this, function* test_back_debouncing() {
+add_identity_test(this, async function test_back_debouncing() {
_("Ensure spurious back-then-idle events, as observed on OS X, don't trigger a sync.");
// Confirm defaults.
do_check_eq(scheduler.idle, false);
// Set up: Define 2 clients and put the system in idle.
scheduler.numClients = 2;
scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
@@ -714,99 +676,96 @@ add_identity_test(this, function* test_b
do_throw("Shouldn't have kicked off a sync!");
}
Svc.Obs.add("weave:service:login:start", onLoginStart);
// Create spurious back-then-idle events as observed on OS X:
scheduler.observe(null, "active", Svc.Prefs.get("scheduler.idleTime"));
scheduler.observe(null, "idle", Svc.Prefs.get("scheduler.idleTime"));
- let deferred = Promise.defer();
- timer = Utils.namedTimer(function () {
- Svc.Obs.remove("weave:service:login:start", onLoginStart);
- cleanUpAndGo().then(deferred.resolve);
- }, IDLE_OBSERVER_BACK_DELAY * 1.5, {}, "timer");
- yield deferred.promise;
+ await promiseNamedTimer(IDLE_OBSERVER_BACK_DELAY * 1.5, {}, "timer");
+ Svc.Obs.remove("weave:service:login:start", onLoginStart);
+ await cleanUpAndGo();
});
-add_identity_test(this, function* test_no_sync_node() {
+add_identity_test(this, async function test_no_sync_node() {
// Test when Status.sync == NO_SYNC_NODE_FOUND
// it is not overwritten on sync:finish
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
Service.serverURL = server.baseURI + "/";
Service.sync();
do_check_eq(Status.sync, NO_SYNC_NODE_FOUND);
do_check_eq(scheduler.syncTimer.delay, NO_SYNC_NODE_INTERVAL);
- yield cleanUpAndGo(server);
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_sync_failed_partial_500s() {
+add_identity_test(this, async function test_sync_failed_partial_500s() {
_("Test a 5xx status calls handleSyncError.");
scheduler._syncErrors = MAX_ERROR_COUNT_BEFORE_BACKOFF;
let server = sync_httpd_setup();
let engine = Service.engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 500};
do_check_eq(Status.sync, SYNC_SUCCEEDED);
- do_check_true(yield setUp(server));
+ do_check_true(await setUp(server));
Service.sync();
do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
let maxInterval = scheduler._syncErrors * (2 * MINIMUM_BACKOFF_INTERVAL);
do_check_eq(Status.backoffInterval, 0);
do_check_true(Status.enforceBackoff);
do_check_eq(scheduler._syncErrors, 4);
do_check_true(scheduler.nextSync <= (Date.now() + maxInterval));
do_check_true(scheduler.syncTimer.delay <= maxInterval);
- yield cleanUpAndGo(server);
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_sync_failed_partial_400s() {
+add_identity_test(this, async function test_sync_failed_partial_400s() {
_("Test a non-5xx status doesn't call handleSyncError.");
scheduler._syncErrors = MAX_ERROR_COUNT_BEFORE_BACKOFF;
let server = sync_httpd_setup();
let engine = Service.engineManager.get("catapult");
engine.enabled = true;
engine.exception = {status: 400};
// Have multiple devices for an active interval.
clientsEngine._store.create({id: "foo", cleartext: "bar"});
do_check_eq(Status.sync, SYNC_SUCCEEDED);
- do_check_true(yield setUp(server));
+ do_check_true(await setUp(server));
Service.sync();
do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
do_check_eq(scheduler.syncInterval, scheduler.activeInterval);
do_check_eq(Status.backoffInterval, 0);
do_check_false(Status.enforceBackoff);
do_check_eq(scheduler._syncErrors, 0);
do_check_true(scheduler.nextSync <= (Date.now() + scheduler.activeInterval));
do_check_true(scheduler.syncTimer.delay <= scheduler.activeInterval);
- yield cleanUpAndGo(server);
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_sync_X_Weave_Backoff() {
+add_identity_test(this, async function test_sync_X_Weave_Backoff() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Use an odd value on purpose so that it doesn't happen to coincide with one
// of the sync intervals.
const BACKOFF = 7337;
// Extend info/collections so that we can put it into server maintenance mode.
const INFO_COLLECTIONS = "/1.1/johndoe/info/collections";
let infoColl = server._handler._overridePaths[INFO_COLLECTIONS];
@@ -846,22 +805,22 @@ add_identity_test(this, function* test_s
// was set and when this line gets executed.
let minimumExpectedDelay = (BACKOFF - 3) * 1000;
do_check_true(Status.minimumNextSync >= Date.now() + minimumExpectedDelay);
// Verify that the next sync is actually going to wait that long.
do_check_true(scheduler.nextSync >= Date.now() + minimumExpectedDelay);
do_check_true(scheduler.syncTimer.delay >= minimumExpectedDelay);
- yield cleanUpAndGo(server);
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_sync_503_Retry_After() {
+add_identity_test(this, async function test_sync_503_Retry_After() {
let server = sync_httpd_setup();
- yield setUp(server);
+ await setUp(server);
// Use an odd value on purpose so that it doesn't happen to coincide with one
// of the sync intervals.
const BACKOFF = 7337;
// Extend info/collections so that we can put it into server maintenance mode.
const INFO_COLLECTIONS = "/1.1/johndoe/info/collections";
let infoColl = server._handler._overridePaths[INFO_COLLECTIONS];
@@ -905,43 +864,28 @@ add_identity_test(this, function* test_s
// was set and when this line gets executed.
let minimumExpectedDelay = (BACKOFF - 3) * 1000;
do_check_true(Status.minimumNextSync >= Date.now() + minimumExpectedDelay);
// Verify that the next sync is actually going to wait that long.
do_check_true(scheduler.nextSync >= Date.now() + minimumExpectedDelay);
do_check_true(scheduler.syncTimer.delay >= minimumExpectedDelay);
- yield cleanUpAndGo(server);
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_loginError_recoverable_reschedules() {
+add_identity_test(this, async function test_loginError_recoverable_reschedules() {
_("Verify that a recoverable login error schedules a new sync.");
- yield configureIdentity({username: "johndoe"});
+ await configureIdentity({username: "johndoe"});
Service.serverURL = "http://localhost:1234/";
Service.clusterURL = Service.serverURL;
Service.persistLogin();
Status.resetSync(); // reset Status.login
- let deferred = Promise.defer();
- Svc.Obs.add("weave:service:login:error", function onLoginError() {
- Svc.Obs.remove("weave:service:login:error", onLoginError);
- Utils.nextTick(function aLittleBitAfterLoginError() {
- do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
-
- let expectedNextSync = Date.now() + scheduler.syncInterval;
- do_check_true(scheduler.nextSync > Date.now());
- do_check_true(scheduler.nextSync <= expectedNextSync);
- do_check_true(scheduler.syncTimer.delay > 0);
- do_check_true(scheduler.syncTimer.delay <= scheduler.syncInterval);
-
- Svc.Obs.remove("weave:service:sync:start", onSyncStart);
- cleanUpAndGo().then(deferred.resolve);
- });
- });
+ let promiseObserved = promiseOneObserver("weave:service:login:error");
// Let's set it up so that a sync is overdue, both in terms of previously
// scheduled syncs and the global score. We still do not expect an immediate
// sync because we just tried (duh).
scheduler.nextSync = Date.now() - 100000;
scheduler.globalScore = SINGLE_USER_THRESHOLD + 1;
function onSyncStart() {
do_throw("Shouldn't have started a sync!");
@@ -949,85 +893,88 @@ add_identity_test(this, function* test_l
Svc.Obs.add("weave:service:sync:start", onSyncStart);
// Sanity check.
do_check_eq(scheduler.syncTimer, null);
do_check_eq(Status.checkSetup(), STATUS_OK);
do_check_eq(Status.login, LOGIN_SUCCEEDED);
scheduler.scheduleNextSync(0);
- yield deferred.promise;
+ await promiseObserved;
+ await promiseNextTick();
+
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+
+ let expectedNextSync = Date.now() + scheduler.syncInterval;
+ do_check_true(scheduler.nextSync > Date.now());
+ do_check_true(scheduler.nextSync <= expectedNextSync);
+ do_check_true(scheduler.syncTimer.delay > 0);
+ do_check_true(scheduler.syncTimer.delay <= scheduler.syncInterval);
+
+ Svc.Obs.remove("weave:service:sync:start", onSyncStart);
+ await cleanUpAndGo()
});
-add_identity_test(this, function* test_loginError_fatal_clearsTriggers() {
+add_identity_test(this, async function test_loginError_fatal_clearsTriggers() {
_("Verify that a fatal login error clears sync triggers.");
- yield configureIdentity({username: "johndoe"});
+ await configureIdentity({username: "johndoe"});
let server = httpd_setup({
"/1.1/johndoe/info/collections": httpd_handler(401, "Unauthorized")
});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = Service.serverURL;
Service.persistLogin();
Status.resetSync(); // reset Status.login
- let deferred = Promise.defer();
- Svc.Obs.add("weave:service:login:error", function onLoginError() {
- Svc.Obs.remove("weave:service:login:error", onLoginError);
- Utils.nextTick(function aLittleBitAfterLoginError() {
-
- if (isConfiguredWithLegacyIdentity()) {
- // for the "legacy" identity, a 401 on info/collections means the
- // password is wrong, so we enter a "login rejected" state.
- do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED);
-
- do_check_eq(scheduler.nextSync, 0);
- do_check_eq(scheduler.syncTimer, null);
- } else {
- // For the FxA identity, a 401 on info/collections means a transient
- // error, probably due to an inability to fetch a token.
- do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
- // syncs should still be scheduled.
- do_check_true(scheduler.nextSync > Date.now());
- do_check_true(scheduler.syncTimer.delay > 0);
- }
- cleanUpAndGo(server).then(deferred.resolve);
- });
- });
+ let promiseObserved = promiseOneObserver("weave:service:login:error");
// Sanity check.
do_check_eq(scheduler.nextSync, 0);
do_check_eq(scheduler.syncTimer, null);
do_check_eq(Status.checkSetup(), STATUS_OK);
do_check_eq(Status.login, LOGIN_SUCCEEDED);
scheduler.scheduleNextSync(0);
- yield deferred.promise;
+ await promiseObserved;
+ await promiseNextTick();
+
+ if (isConfiguredWithLegacyIdentity()) {
+ // for the "legacy" identity, a 401 on info/collections means the
+ // password is wrong, so we enter a "login rejected" state.
+ do_check_eq(Status.login, LOGIN_FAILED_LOGIN_REJECTED);
+
+ do_check_eq(scheduler.nextSync, 0);
+ do_check_eq(scheduler.syncTimer, null);
+ } else {
+ // For the FxA identity, a 401 on info/collections means a transient
+ // error, probably due to an inability to fetch a token.
+ do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+ // syncs should still be scheduled.
+ do_check_true(scheduler.nextSync > Date.now());
+ do_check_true(scheduler.syncTimer.delay > 0);
+ }
+ await cleanUpAndGo(server);
});
-add_identity_test(this, function* test_proper_interval_on_only_failing() {
+add_identity_test(this, async function test_proper_interval_on_only_failing() {
_("Ensure proper behavior when only failed records are applied.");
// If an engine reports that no records succeeded, we shouldn't decrease the
// sync interval.
do_check_false(scheduler.hasIncomingItems);
const INTERVAL = 10000000;
scheduler.syncInterval = INTERVAL;
Svc.Obs.notify("weave:service:sync:applied", {
applied: 2,
succeeded: 0,
failed: 2,
newFailed: 2,
reconciled: 0
});
- let deferred = Promise.defer();
- Utils.nextTick(function() {
- scheduler.adjustSyncInterval();
- do_check_false(scheduler.hasIncomingItems);
- do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
-
- deferred.resolve();
- });
- yield deferred.promise;
+ await promiseNextTick();
+ scheduler.adjustSyncInterval();
+ do_check_false(scheduler.hasIncomingItems);
+ do_check_eq(scheduler.syncInterval, scheduler.singleDeviceInterval);
});
--- a/services/sync/tests/unit/test_telemetry.js
+++ b/services/sync/tests/unit/test_telemetry.js
@@ -53,52 +53,52 @@ SteamEngine.prototype = {
};
function BogusEngine(service) {
Engine.call(this, "bogus", service);
}
BogusEngine.prototype = Object.create(SteamEngine.prototype);
-function cleanAndGo(server) {
+async function cleanAndGo(server) {
Svc.Prefs.resetBranch("");
Svc.Prefs.set("log.logger.engine.rotary", "Trace");
Service.recordManager.clearCache();
- return new Promise(resolve => server.stop(resolve));
+ await promiseStopServer(server);
}
// Avoid addon manager complaining about not being initialized
Service.engineManager.unregister("addons");
-add_identity_test(this, function *test_basic() {
+add_identity_test(this, async function test_basic() {
let helper = track_collections_helper();
let upd = helper.with_updated_collection;
- yield configureIdentity({ username: "johndoe" });
+ await configureIdentity({ username: "johndoe" });
let handlers = {
"/1.1/johndoe/info/collections": helper.handler,
"/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
"/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler())
};
let collections = ["clients", "bookmarks", "forms", "history", "passwords", "prefs", "tabs"];
for (let coll of collections) {
handlers["/1.1/johndoe/storage/" + coll] = upd(coll, new ServerCollection({}, true).handler());
}
let server = httpd_setup(handlers);
Service.serverURL = server.baseURI;
- yield sync_and_validate_telem(true);
+ await sync_and_validate_telem(true);
- yield new Promise(resolve => server.stop(resolve));
+ await promiseStopServer(server);
});
-add_task(function* test_processIncoming_error() {
+add_task(async function test_processIncoming_error() {
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bookmarks: {version: engine.version,
syncID: engine.syncID}}}},
bookmarks: {}
});
new SyncTestingInfrastructure(server.server);
@@ -113,17 +113,17 @@ add_task(function* test_processIncoming_
};
// Make the 10 minutes old so it will only be synced in the toFetch phase.
bogus_record.modified = Date.now() / 1000 - 60 * 10;
engine.lastSync = Date.now() / 1000 - 60;
engine.toFetch = [BOGUS_GUID];
let error, ping;
try {
- yield sync_engine_and_validate_telem(engine, true, errPing => ping = errPing);
+ await sync_engine_and_validate_telem(engine, true, errPing => ping = errPing);
} catch(ex) {
error = ex;
}
ok(!!error);
ok(!!ping);
equal(ping.uid, "0".repeat(32));
deepEqual(ping.failureReason, {
name: "othererror",
@@ -134,21 +134,21 @@ add_task(function* test_processIncoming_
equal(ping.engines[0].name, "bookmarks");
deepEqual(ping.engines[0].failureReason, {
name: "othererror",
error: "error.engine.reason.record_download_fail"
});
} finally {
store.wipe();
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function *test_uploading() {
+add_task(async function test_uploading() {
let engine = new BookmarksEngine(Service);
let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bookmarks: {version: engine.version,
syncID: engine.syncID}}}},
bookmarks: {}
});
new SyncTestingInfrastructure(server.server);
@@ -160,43 +160,43 @@ add_task(function *test_uploading() {
let bmk_id = PlacesUtils.bookmarks.insertBookmark(parent, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let guid = store.GUIDForId(bmk_id);
let record = store.createRecord(guid);
let collection = server.user("foo").collection("bookmarks");
try {
- let ping = yield sync_engine_and_validate_telem(engine, false);
+ let ping = await sync_engine_and_validate_telem(engine, false);
ok(!!ping);
equal(ping.engines.length, 1);
equal(ping.engines[0].name, "bookmarks");
ok(!!ping.engines[0].outgoing);
greater(ping.engines[0].outgoing[0].sent, 0)
ok(!ping.engines[0].incoming);
PlacesUtils.bookmarks.setItemTitle(bmk_id, "New Title");
store.wipe();
engine.resetClient();
- ping = yield sync_engine_and_validate_telem(engine, false);
+ ping = await sync_engine_and_validate_telem(engine, false);
equal(ping.engines.length, 1);
equal(ping.engines[0].name, "bookmarks");
equal(ping.engines[0].outgoing.length, 1);
ok(!!ping.engines[0].incoming);
} finally {
// Clean up.
store.wipe();
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function *test_upload_failed() {
+add_task(async function test_upload_failed() {
Service.identity.username = "foo";
let collection = new ServerCollection();
collection._wbos.flying = new ServerWBO('flying');
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
@@ -217,36 +217,36 @@ add_task(function *test_upload_failed()
engine._tracker.addChangedID("scotsman", SCOTSMAN_CHANGED);
engine._tracker.addChangedID("peppercorn", PEPPERCORN_CHANGED);
let meta_global = Service.recordManager.set(engine.metaURL, new WBORecord(engine.metaURL));
meta_global.payload.engines = { rotary: { version: engine.version, syncID: engine.syncID } };
try {
engine.enabled = true;
- let ping = yield sync_engine_and_validate_telem(engine, true);
+ let ping = await sync_engine_and_validate_telem(engine, true);
ok(!!ping);
equal(ping.engines.length, 1);
equal(ping.engines[0].incoming, null);
deepEqual(ping.engines[0].outgoing, [{ sent: 3, failed: 2 }]);
engine.lastSync = 123;
engine.lastSyncLocal = 456;
- ping = yield sync_engine_and_validate_telem(engine, true);
+ ping = await sync_engine_and_validate_telem(engine, true);
ok(!!ping);
equal(ping.engines.length, 1);
equal(ping.engines[0].incoming.reconciled, 1);
deepEqual(ping.engines[0].outgoing, [{ sent: 2, failed: 2 }]);
} finally {
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function *test_sync_partialUpload() {
+add_task(async function test_sync_partialUpload() {
Service.identity.username = "foo";
let collection = new ServerCollection();
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
let syncTesting = new SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
@@ -269,17 +269,17 @@ add_task(function *test_sync_partialUplo
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
engine.enabled = true;
- let ping = yield sync_engine_and_validate_telem(engine, true);
+ let ping = await sync_engine_and_validate_telem(engine, true);
ok(!!ping);
ok(!ping.failureReason);
equal(ping.engines.length, 1);
equal(ping.engines[0].name, "rotary");
ok(!ping.engines[0].incoming);
ok(!ping.engines[0].failureReason);
deepEqual(ping.engines[0].outgoing, [{ sent: 234, failed: 2 }]);
@@ -291,17 +291,17 @@ add_task(function *test_sync_partialUplo
collection.insert("record-no-1000", 1000);
engine.lastSync = 123;
engine.lastSyncLocal = 456;
ping = null;
try {
// should throw
- yield sync_engine_and_validate_telem(engine, true, errPing => ping = errPing);
+ await sync_engine_and_validate_telem(engine, true, errPing => ping = errPing);
} catch (e) {}
// It would be nice if we had a more descriptive error for this...
let uploadFailureError = {
name: "othererror",
error: "error.engine.reason.record_upload_fail"
};
ok(!!ping);
@@ -312,161 +312,161 @@ add_task(function *test_sync_partialUplo
failed: 1,
newFailed: 1,
reconciled: 232
});
ok(!ping.engines[0].outgoing);
deepEqual(ping.engines[0].failureReason, uploadFailureError);
} finally {
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function* test_generic_engine_fail() {
+add_task(async function test_generic_engine_fail() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {steam: {version: engine.version,
syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
let e = new Error("generic failure message")
engine._errToThrow = e;
try {
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
equal(ping.status.service, SYNC_FAILED_PARTIAL);
deepEqual(ping.engines.find(e => e.name === "steam").failureReason, {
name: "unexpectederror",
error: String(e)
});
} finally {
Service.engineManager.unregister(engine);
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function* test_engine_fail_ioerror() {
+add_task(async function test_engine_fail_ioerror() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {steam: {version: engine.version,
syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
// create an IOError to re-throw as part of Sync.
try {
// (Note that fakeservices.js has replaced Utils.jsonMove etc, but for
// this test we need the real one so we get real exceptions from the
// filesystem.)
- yield Utils._real_jsonMove("file-does-not-exist", "anything", {});
+ await Utils._real_jsonMove("file-does-not-exist", "anything", {});
} catch (ex) {
engine._errToThrow = ex;
}
ok(engine._errToThrow, "expecting exception");
try {
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
equal(ping.status.service, SYNC_FAILED_PARTIAL);
let failureReason = ping.engines.find(e => e.name === "steam").failureReason;
equal(failureReason.name, "unexpectederror");
// ensure the profile dir in the exception message has been stripped.
ok(!failureReason.error.includes(OS.Constants.Path.profileDir), failureReason.error);
ok(failureReason.error.includes("[profileDir]"), failureReason.error);
} finally {
Service.engineManager.unregister(engine);
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function* test_initial_sync_engines() {
+add_task(async function test_initial_sync_engines() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
let store = engine._store;
let engines = {};
// These are the only ones who actually have things to sync at startup.
let engineNames = ["clients", "bookmarks", "prefs", "tabs"];
let conf = { meta: { global: { engines } } };
for (let e of engineNames) {
engines[e] = { version: engine.version, syncID: engine.syncID };
conf[e] = {};
}
let server = serverForUsers({"foo": "password"}, conf);
new SyncTestingInfrastructure(server.server);
try {
- let ping = yield wait_for_ping(() => Service.sync(), true);
+ let ping = await wait_for_ping(() => Service.sync(), true);
equal(ping.engines.find(e => e.name === "clients").outgoing[0].sent, 1);
equal(ping.engines.find(e => e.name === "tabs").outgoing[0].sent, 1);
// for the rest we don't care about specifics
for (let e of ping.engines) {
if (!engineNames.includes(engine.name)) {
continue;
}
greaterOrEqual(e.took, 1);
ok(!!e.outgoing)
equal(e.outgoing.length, 1);
notEqual(e.outgoing[0].sent, undefined);
equal(e.outgoing[0].failed, undefined);
}
} finally {
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function* test_nserror() {
+add_task(async function test_nserror() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {steam: {version: engine.version,
syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
engine._errToThrow = Components.Exception("NS_ERROR_UNKNOWN_HOST", Cr.NS_ERROR_UNKNOWN_HOST);
try {
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
deepEqual(ping.status, {
service: SYNC_FAILED_PARTIAL,
sync: LOGIN_FAILED_NETWORK_ERROR
});
let enginePing = ping.engines.find(e => e.name === "steam");
deepEqual(enginePing.failureReason, {
name: "nserror",
code: Cr.NS_ERROR_UNKNOWN_HOST
});
} finally {
Service.engineManager.unregister(engine);
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_identity_test(this, function *test_discarding() {
+add_identity_test(this, async function test_discarding() {
let helper = track_collections_helper();
let upd = helper.with_updated_collection;
let telem = get_sync_test_telemetry();
telem.maxPayloadCount = 2;
telem.submissionInterval = Infinity;
let oldSubmit = telem.submit;
let server;
try {
- yield configureIdentity({ username: "johndoe" });
+ await configureIdentity({ username: "johndoe" });
let handlers = {
"/1.1/johndoe/info/collections": helper.handler,
"/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
"/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler())
};
let collections = ["clients", "bookmarks", "forms", "history", "passwords", "prefs", "tabs"];
@@ -478,87 +478,87 @@ add_identity_test(this, function *test_d
Service.serverURL = server.baseURI;
telem.submit = () => ok(false, "Submitted telemetry ping when we should not have");
for (let i = 0; i < 5; ++i) {
Service.sync();
}
telem.submit = oldSubmit;
telem.submissionInterval = -1;
- let ping = yield sync_and_validate_telem(true, true); // with this we've synced 6 times
+ let ping = await sync_and_validate_telem(true, true); // with this we've synced 6 times
equal(ping.syncs.length, 2);
equal(ping.discarded, 4);
} finally {
telem.maxPayloadCount = 500;
telem.submissionInterval = -1;
telem.submit = oldSubmit;
if (server) {
- yield new Promise(resolve => server.stop(resolve));
+ await promiseStopServer(server);
}
}
})
-add_task(function* test_no_foreign_engines_in_error_ping() {
+add_task(async function test_no_foreign_engines_in_error_ping() {
Service.engineManager.register(BogusEngine);
let engine = Service.engineManager.get("bogus");
engine.enabled = true;
let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bogus: {version: engine.version, syncID: engine.syncID}}}},
steam: {}
});
engine._errToThrow = new Error("Oh no!");
new SyncTestingInfrastructure(server.server);
try {
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
equal(ping.status.service, SYNC_FAILED_PARTIAL);
ok(ping.engines.every(e => e.name !== "bogus"));
} finally {
Service.engineManager.unregister(engine);
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function* test_sql_error() {
+add_task(async function test_sql_error() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {steam: {version: engine.version,
syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
engine._sync = function() {
// Just grab a DB connection and issue a bogus SQL statement synchronously.
let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase).DBConnection;
Async.querySpinningly(db.createAsyncStatement("select bar from foo"));
};
try {
- let ping = yield sync_and_validate_telem(true);
+ let ping = await sync_and_validate_telem(true);
let enginePing = ping.engines.find(e => e.name === "steam");
deepEqual(enginePing.failureReason, { name: "sqlerror", code: 1 });
} finally {
Service.engineManager.unregister(engine);
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
-add_task(function* test_no_foreign_engines_in_success_ping() {
+add_task(async function test_no_foreign_engines_in_success_ping() {
Service.engineManager.register(BogusEngine);
let engine = Service.engineManager.get("bogus");
engine.enabled = true;
let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bogus: {version: engine.version, syncID: engine.syncID}}}},
steam: {}
});
new SyncTestingInfrastructure(server.server);
try {
- let ping = yield sync_and_validate_telem();
+ let ping = await sync_and_validate_telem();
ok(ping.engines.every(e => e.name !== "bogus"));
} finally {
Service.engineManager.unregister(engine);
- yield cleanAndGo(server);
+ await cleanAndGo(server);
}
});
\ No newline at end of file
--- a/services/sync/tests/unit/test_utils_json.js
+++ b/services/sync/tests/unit/test_utils_json.js
@@ -104,11 +104,11 @@ add_test(function test_load_logging() {
Utils.jsonLoad("log", obj, ensureThrows(function(val) {
do_check_true(!val);
do_check_true(!!trace);
do_check_true(!!debug);
run_next_test();
}));
});
-add_task(function* test_undefined_callback() {
- yield Utils.jsonSave("foo", {}, ["v1", "v2"]);
+add_task(async function test_undefined_callback() {
+ await Utils.jsonSave("foo", {}, ["v1", "v2"]);
});