--- a/services/.eslintrc.js
+++ b/services/.eslintrc.js
@@ -16,12 +16,11 @@ module.exports = {
"no-ex-assign": "warn",
"no-func-assign": "warn",
"no-native-reassign": "warn",
"no-nested-ternary": "warn",
"no-octal": "warn",
"no-redeclare": "warn",
"no-unreachable": "warn",
"no-unsafe-finally": "warn",
- "no-unused-vars": "warn",
"no-useless-call": "warn"
}
};
--- a/services/cloudsync/CloudSyncBookmarks.jsm
+++ b/services/cloudsync/CloudSyncBookmarks.jsm
@@ -139,17 +139,16 @@ var Bookmarks = function() {
.then(setRootFolderExcludeFromBackupAnnotation)
.then(finish, deferred.reject);
return deferred.promise;
};
let getRootFolder = function(name) {
let ROOT_FOLDER_ANNO = "cloudsync/rootFolder/" + name;
- let ROOT_SHORTCUT_ANNO = "cloudsync/rootShortcut/" + name;
let deferred = Promise.defer();
function checkRootFolder(folderIds) {
if (!folderIds.length) {
return createRootFolder(name);
}
return Promise.resolve(folderIds[0]);
}
@@ -166,17 +165,16 @@ var Bookmarks = function() {
return deferred.promise;
};
let deleteRootFolder = function(name) {
let ROOT_FOLDER_ANNO = "cloudsync/rootFolder/" + name;
let ROOT_SHORTCUT_ANNO = "cloudsync/rootShortcut/" + name;
let deferred = Promise.defer();
- let placesRootId = PlacesUtils.placesRootId;
function getRootShortcutId() {
return PlacesWrapper.getLocalIdsWithAnnotation(ROOT_SHORTCUT_ANNO);
}
function deleteShortcut(shortcutIds) {
if (!shortcutIds.length) {
return Promise.resolve();
--- a/services/cloudsync/CloudSyncTabs.jsm
+++ b/services/cloudsync/CloudSyncTabs.jsm
@@ -215,29 +215,16 @@ this.Tabs = function() {
let eventTypes = [
"change",
];
let eventSource = new EventSource(eventTypes, suspend, resume);
let tabCache = new TabCache();
- let getWindowEnumerator = function() {
- return Services.wm.getEnumerator("navigator:browser");
- };
-
- let shouldSkipWindow = function(win) {
- return win.closed ||
- PrivateBrowsingUtils.isWindowPrivate(win);
- };
-
- let getTabState = function(tab) {
- return JSON.parse(Session.getTabState(tab));
- };
-
let getLocalTabs = function(filter) {
let deferred = Promise.defer();
filter = (undefined === filter) ? true : filter;
let filteredUrls = new RegExp("^(about:.*|chrome://weave/.*|wyciwyg:.*|file:.*)$"); // FIXME: should be a pref (B#1044304)
let allTabs = [];
--- a/services/common/modules-testing/storageserver.js
+++ b/services/common/modules-testing/storageserver.js
@@ -340,17 +340,17 @@ StorageServerCollection.prototype = {
* @param filter
* A predicate function, applied to the BSO, which dictates whether to
* include the BSO in the output.
*
* @return an array of ServerBSOs.
*/
bsos: function bsos(filter) {
let os = [];
- for (let [id, bso] of Object.entries(this._bsos)) {
+ for (let bso of Object.values(this._bsos)) {
if (!bso.deleted) {
os.push(bso);
}
}
if (!filter) {
return os;
}
@@ -435,17 +435,17 @@ StorageServerCollection.prototype = {
}
return true;
},
count: function count(options) {
options = options || {};
let c = 0;
- for (let [id, bso] of Object.entries(this._bsos)) {
+ for (let bso of Object.values(this._bsos)) {
if (bso.modified && this._inResultSet(bso, options)) {
c++;
}
}
return c;
},
get: function get(options) {
@@ -593,17 +593,17 @@ StorageServerCollection.prototype = {
throw new Error("Malformed client request.");
}
if (options.ids && options.ids.length > this.BATCH_MAX_COUNT) {
throw HTTP_400;
}
let deleted = [];
- for (let [id, bso] of Object.entries(this._bsos)) {
+ for (let bso of Object.values(this._bsos)) {
if (this._inResultSet(bso, options)) {
this._log.debug("Deleting " + JSON.stringify(bso));
deleted.push(bso.id);
bso.delete();
}
}
return deleted;
},
@@ -725,18 +725,16 @@ StorageServerCollection.prototype = {
this._log.info("Records: " + data.length);
response.setHeader("X-Num-Records", "" + data.length, false);
response.setHeader("X-Last-Modified", "" + this.timestamp, false);
response.setStatusLine(request.httpVersion, 200, "OK");
response.bodyOutputStream.write(body, body.length);
},
postHandler: function postHandler(request, response) {
- let options = this.parseOptions(request);
-
if (!request.hasHeader("content-type")) {
this._log.info("No Content-Type request header!");
throw HTTP_400;
}
let inputStream = request.bodyInputStream;
let inputBody = CommonUtils.readBytesFromInputStream(inputStream);
let input = [];
@@ -1314,17 +1312,17 @@ StorageServer.prototype = {
resp.setHeader("X-Timestamp", "" + timestamp, false);
let parts = this.pathRE.exec(req.path);
if (!parts) {
this._log.debug("StorageServer: Unexpected request: bad URL " + req.path);
throw HTTP_404;
}
- let [all, version, userPath, first, rest] = parts;
+ let [, version, userPath, first, rest] = parts;
if (version != STORAGE_API_VERSION) {
this._log.debug("StorageServer: Unknown version.");
throw HTTP_404;
}
let username;
// By default, the server requires users to be authenticated. When a
@@ -1422,17 +1420,17 @@ StorageServer.prototype = {
return;
}
let match = this.storageRE.exec(rest);
if (!match) {
this._log.warn("StorageServer: Unknown storage operation " + rest);
throw HTTP_404;
}
- let [all, collection, bsoID] = match;
+ let [, collection, bsoID] = match;
let coll = this.getCollection(username, collection);
let collectionExisted = !!coll;
switch (req.method) {
case "GET":
// Tried to GET on a collection that doesn't exist.
if (!coll) {
respond(404, "Not Found");
--- a/services/common/tests/unit/test_async_querySpinningly.js
+++ b/services/common/tests/unit/test_async_querySpinningly.js
@@ -80,19 +80,19 @@ function run_test() {
_("Grabbing fewer fields than queried is fine");
let r10 = querySpinningly("SELECT value, fieldname FROM moz_formhistory", ["fieldname"]);
do_check_eq(r10.length, 3);
_("Generate an execution error");
let query = "INSERT INTO moz_formhistory (fieldname, value) VALUES ('one', NULL)";
let stmt = Svc.Form.DBConnection.createStatement(query);
- let r11, except;
+ let except;
try {
- r11 = Async.querySpinningly(stmt);
+ Async.querySpinningly(stmt);
} catch (e) {
except = e;
}
stmt.finalize()
do_check_true(!!except);
do_check_eq(except.result, SQLITE_CONSTRAINT_VIOLATION);
_("Cleaning up");
--- a/services/common/tests/unit/test_blocklist_certificates.js
+++ b/services/common/tests/unit/test_blocklist_certificates.js
@@ -59,17 +59,17 @@ add_task(function* test_something() {
} catch (e) {
do_print(e);
}
}
server.registerPathHandler(configPath, handleResponse);
server.registerPathHandler(recordsPath, handleResponse);
// Test an empty db populates
- let result = yield OneCRLBlocklistClient.maybeSync(2000, Date.now());
+ yield OneCRLBlocklistClient.maybeSync(2000, Date.now());
// Open the collection, verify it's been populated:
// Our test data has a single record; it should be in the local collection
let sqliteHandle = yield FirefoxAdapter.openConnection({path: kintoFilename});
let collection = do_get_kinto_collection("certificates", sqliteHandle);
let list = yield collection.list();
do_check_eq(list.data.length, 1);
yield sqliteHandle.close();
--- a/services/common/tests/unit/test_blocklist_clients.js
+++ b/services/common/tests/unit/test_blocklist_clients.js
@@ -116,17 +116,17 @@ function run_test() {
do_register_cleanup(function() {
server.stop(() => { });
});
}
add_task(function* test_records_obtained_from_server_are_stored_in_db() {
for (let {client} of gBlocklistClients) {
// Test an empty db populates
- let result = yield client.maybeSync(2000, Date.now());
+ yield client.maybeSync(2000, Date.now());
// Open the collection, verify it's been populated:
// Our test data has a single record; it should be in the local collection
const sqliteHandle = yield FirefoxAdapter.openConnection({path: kintoFilename});
let collection = kintoCollection(client.collectionName, sqliteHandle);
let list = yield collection.list();
equal(list.data.length, 1);
yield sqliteHandle.close();
@@ -134,40 +134,38 @@ add_task(function* test_records_obtained
});
add_task(clear_state);
add_task(function* test_list_is_written_to_file_in_profile() {
for (let {client, filename, testData} of gBlocklistClients) {
const profFile = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
strictEqual(profFile.exists(), false);
- let result = yield client.maybeSync(2000, Date.now());
+ yield client.maybeSync(2000, Date.now());
strictEqual(profFile.exists(), true);
const content = yield readJSON(profFile.path);
equal(content.data[0].blockID, testData[testData.length - 1]);
}
});
add_task(clear_state);
add_task(function* test_current_server_time_is_saved_in_pref() {
for (let {client} of gBlocklistClients) {
- const before = Services.prefs.getIntPref(client.lastCheckTimePref);
const serverTime = Date.now();
yield client.maybeSync(2000, serverTime);
const after = Services.prefs.getIntPref(client.lastCheckTimePref);
equal(after, Math.round(serverTime / 1000));
}
});
add_task(clear_state);
add_task(function* test_update_json_file_when_addons_has_changes() {
for (let {client, filename, testData} of gBlocklistClients) {
yield client.maybeSync(2000, Date.now() - 1000);
- const before = Services.prefs.getIntPref(client.lastCheckTimePref);
const profFile = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
const fileLastModified = profFile.lastModifiedTime = profFile.lastModifiedTime - 1000;
const serverTime = Date.now();
yield client.maybeSync(3001, serverTime);
// File was updated.
notEqual(fileLastModified, profFile.lastModifiedTime);
@@ -193,17 +191,16 @@ add_task(function* test_sends_reload_mes
equal(received.data.filename, filename);
}
});
add_task(clear_state);
add_task(function* test_do_nothing_when_blocklist_is_up_to_date() {
for (let {client, filename} of gBlocklistClients) {
yield client.maybeSync(2000, Date.now() - 1000);
- const before = Services.prefs.getIntPref(client.lastCheckTimePref);
const profFile = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
const fileLastModified = profFile.lastModifiedTime = profFile.lastModifiedTime - 1000;
const serverTime = Date.now();
yield client.maybeSync(3000, serverTime);
// File was not updated.
equal(fileLastModified, profFile.lastModifiedTime);
--- a/services/common/tests/unit/test_blocklist_pinning.js
+++ b/services/common/tests/unit/test_blocklist_pinning.js
@@ -93,31 +93,31 @@ add_task(function* test_something() {
// ensure our pins are all missing before we start
ok(!sss.isSecureHost(sss.HEADER_HPKP, "one.example.com", 0));
ok(!sss.isSecureHost(sss.HEADER_HPKP, "two.example.com", 0));
ok(!sss.isSecureHost(sss.HEADER_HPKP, "three.example.com", 0));
ok(!sss.isSecureHost(sss.HEADER_HSTS, "five.example.com", 0));
// Test an empty db populates
- let result = yield PinningPreloadClient.maybeSync(2000, Date.now());
+ yield PinningPreloadClient.maybeSync(2000, Date.now());
let connection = yield FirefoxAdapter.openConnection({path: KINTO_STORAGE_PATH});
// Open the collection, verify it's been populated:
// Our test data has a single record; it should be in the local collection
let collection = do_get_kinto_collection(connection, COLLECTION_NAME);
let list = yield collection.list();
do_check_eq(list.data.length, 1);
// check that a pin exists for one.example.com
ok(sss.isSecureHost(sss.HEADER_HPKP, "one.example.com", 0));
// Test the db is updated when we call again with a later lastModified value
- result = yield PinningPreloadClient.maybeSync(4000, Date.now());
+ yield PinningPreloadClient.maybeSync(4000, Date.now());
// Open the collection, verify it's been updated:
// Our data now has four new records; all should be in the local collection
collection = do_get_kinto_collection(connection, COLLECTION_NAME);
list = yield collection.list();
do_check_eq(list.data.length, 5);
yield connection.close();
--- a/services/common/tests/unit/test_blocklist_signatures.js
+++ b/services/common/tests/unit/test_blocklist_signatures.js
@@ -138,17 +138,16 @@ add_task(function* test_check_signatures
// set the server date
response.setHeader("Date", (new Date(serverTimeMillis)).toUTCString());
response.write(sampled.responseBody);
}
for (let key of Object.keys(responses)) {
const keyParts = key.split(":");
- const method = keyParts[0];
const valueParts = keyParts[1].split("?");
const path = valueParts[0];
server.registerPathHandler(path, handleResponse.bind(null, 2000));
}
}
// First, perform a signature verification with known data and signature
--- a/services/common/tests/unit/test_hawkclient.js
+++ b/services/common/tests/unit/test_hawkclient.js
@@ -197,17 +197,17 @@ add_task(function* test_offset_after_req
});
let client = new HawkClient(server.baseURI);
let now = Date.now();
client.now = () => { return now + HOUR_MS; };
do_check_eq(client.localtimeOffsetMsec, 0);
- let response = yield client.request("/foo", method, TEST_CREDS);
+ yield client.request("/foo", method, TEST_CREDS);
// Should be about an hour off
do_check_true(Math.abs(client.localtimeOffsetMsec + HOUR_MS) < SECOND_MS);
yield deferredStop(server);
});
add_task(function* test_offset_in_hawk_header() {
let message = "Ohai!";
@@ -231,19 +231,16 @@ add_task(function* test_offset_in_hawk_h
} else {
response.setStatusLine(request.httpVersion, 400, "Delta: " + delta);
}
response.bodyOutputStream.write(message, message.length);
}
});
let client = new HawkClient(server.baseURI);
- function getOffset() {
- return client.localtimeOffsetMsec;
- }
client.now = () => {
return Date.now() + 12 * HOUR_MS;
};
// We begin with no offset
do_check_eq(client.localtimeOffsetMsec, 0);
yield client.request("/first", method, TEST_CREDS);
@@ -311,19 +308,16 @@ add_task(function* test_retry_request_on
do_check_true(delta < MINUTE_MS);
let message = "i love you!!!";
response.setStatusLine(request.httpVersion, 200, "OK");
response.bodyOutputStream.write(message, message.length);
}
});
let client = new HawkClient(server.baseURI);
- function getOffset() {
- return client.localtimeOffsetMsec;
- }
client.now = () => {
return Date.now() + 12 * HOUR_MS;
};
// We begin with no offset
do_check_eq(client.localtimeOffsetMsec, 0);
@@ -355,19 +349,16 @@ add_task(function* test_multiple_401_ret
let message = "never!!!";
response.setStatusLine(request.httpVersion, 401, "Unauthorized");
response.bodyOutputStream.write(message, message.length);
}
});
let client = new HawkClient(server.baseURI);
- function getOffset() {
- return client.localtimeOffsetMsec;
- }
client.now = () => {
return Date.now() - 12 * HOUR_MS;
};
// We begin with no offset
do_check_eq(client.localtimeOffsetMsec, 0);
@@ -397,19 +388,16 @@ add_task(function* test_500_no_retry() {
"/no-shutup": function() {
let message = "Cannot get ye flask.";
response.setStatusLine(request.httpVersion, 500, "Internal server error");
response.bodyOutputStream.write(message, message.length);
}
});
let client = new HawkClient(server.baseURI);
- function getOffset() {
- return client.localtimeOffsetMsec;
- }
// Throw off the clock so the HawkClient would want to retry the request if
// it could
client.now = () => {
return Date.now() - 12 * HOUR_MS;
};
// Request will 500; no retries
@@ -459,19 +447,16 @@ add_task(function* test_401_then_500() {
do_check_true(delta < MINUTE_MS);
let message = "Cannot get ye flask.";
response.setStatusLine(request.httpVersion, 500, "Internal server error");
response.bodyOutputStream.write(message, message.length);
}
});
let client = new HawkClient(server.baseURI);
- function getOffset() {
- return client.localtimeOffsetMsec;
- }
client.now = () => {
return Date.now() - 12 * HOUR_MS;
};
// We begin with no offset
do_check_eq(client.localtimeOffsetMsec, 0);
--- a/services/common/tests/unit/test_hawkrequest.js
+++ b/services/common/tests/unit/test_hawkrequest.js
@@ -52,17 +52,16 @@ add_test(function test_intl_accept_langu
}
let hawk = new HAWKAuthenticatedRESTRequest("https://example.com");
Services.prefs.addObserver("intl.accept_languages", checkLanguagePref, false);
setLanguagePref(languages[testCount]);
function checkLanguagePref() {
- var _done = false;
CommonUtils.nextTick(function() {
// Ensure we're only called for the number of entries in languages[].
do_check_true(testCount < languages.length);
do_check_eq(hawk._intl.accept_languages, languages[testCount]);
testCount++;
if (testCount < languages.length) {
@@ -181,17 +180,16 @@ add_test(function test_hawk_language_pre
"/foo": function(request, response) {
do_check_eq(languages[1], request.getHeader("Accept-Language"));
response.setStatusLine(request.httpVersion, 200, "OK");
},
});
let url = server.baseURI + "/foo";
- let postData = {};
let request;
setLanguage(languages[0]);
// A new request should create the stateful object for tracking the current
// language.
request = new HAWKAuthenticatedRESTRequest(url, credentials);
CommonUtils.nextTick(testFirstLanguage);
--- a/services/common/tests/unit/test_logmanager.js
+++ b/services/common/tests/unit/test_logmanager.js
@@ -78,17 +78,17 @@ add_task(function* test_SharedLogs() {
// and the second.
Services.prefs.setCharPref("log-manager-2.test.log.appender.console", "Debug");
Services.prefs.setCharPref("log-manager-2.test.log.appender.dump", "Debug");
Services.prefs.setCharPref("log-manager-2.test.log.appender.file.level", "Debug");
let lm2 = new LogManager("log-manager-2.test.", ["TestLog3"], "test");
let log = Log.repository.getLogger("TestLog3");
- let [capp, dapp, fapps] = getAppenders(log);
+ let [capp, dapp, ] = getAppenders(log);
// console and dump appenders should be "trace" as it is more verbose than
// "debug"
equal(capp.level, Log.Level.Trace);
equal(dapp.level, Log.Level.Trace);
// Set the prefs on the -1 branch to "Error" - it should then end up with
// "Debug" from the -2 branch.
--- a/services/common/tests/unit/test_storage_adapter.js
+++ b/services/common/tests/unit/test_storage_adapter.js
@@ -241,17 +241,16 @@ add_test(function test_db_creation() {
});
// this is the closest we can get to a schema version upgrade at v1 - test an
// existing database
add_test(function test_creation_from_empty_db() {
add_test(function test_create_from_empty_db() {
// place an empty kinto db file in the profile
let profile = do_get_profile();
- let kintoDB = do_get_kinto_db();
let emptyDB = do_get_file("test_storage_adapter/empty.sqlite");
emptyDB.copyTo(profile, kintoFilename);
run_next_test();
});
test_collection_operations();
--- a/services/common/tests/unit/test_storage_server.js
+++ b/services/common/tests/unit/test_storage_server.js
@@ -168,18 +168,18 @@ add_test(function test_url_parsing() {
[all, version, user, first, rest] = parts;
do_check_eq(all, "/2.0/123456/storage");
do_check_eq(version, "2.0");
do_check_eq(user, "123456");
do_check_eq(first, "storage");
do_check_eq(rest, undefined);
parts = server.storageRE.exec("storage");
- let storage, collection, id;
- [all, storage, collection, id] = parts;
+ let collection;
+ [all, , collection, ] = parts;
do_check_eq(all, "storage");
do_check_eq(collection, undefined);
run_next_test();
});
add_test(function test_basic_http() {
let server = new StorageServer();
@@ -409,17 +409,17 @@ add_test(function test_bso_delete_not_ex
add_test(function test_bso_delete_exists() {
_("Ensure proper semantics when deleting a BSO that exists.");
let server = new StorageServer();
server.registerUser("123", "password");
server.startSynchronous();
let coll = server.user("123").createCollection("test");
- let bso = coll.insert("myid", {foo: "bar"});
+ coll.insert("myid", {foo: "bar"});
let timestamp = coll.timestamp;
server.callback.onItemDeleted = function onDeleted(username, collection, id) {
delete server.callback.onItemDeleted;
do_check_eq(username, "123");
do_check_eq(collection, "test");
do_check_eq(id, "myid");
};
--- a/services/common/utils.js
+++ b/services/common/utils.js
@@ -232,22 +232,20 @@ this.CommonUtils = {
return CommonUtils.decodeUTF8(CommonUtils.hexToBytes(hex));
},
/**
* Base32 encode (RFC 4648) a string
*/
encodeBase32: function encodeBase32(bytes) {
const key = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
- let quanta = Math.floor(bytes.length / 5);
let leftover = bytes.length % 5;
// Pad the last quantum with zeros so the length is a multiple of 5.
if (leftover) {
- quanta += 1;
for (let i = leftover; i < 5; i++)
bytes += "\0";
}
// Chop the string into quanta of 5 bytes (40 bits). Each quantum
// is turned into 8 characters from the 32 character base.
let ret = "";
for (let i = 0; i < bytes.length; i += 5) {
--- a/services/crypto/modules/utils.js
+++ b/services/crypto/modules/utils.js
@@ -129,17 +129,16 @@ this.CryptoUtils = {
hasher.init(type, key);
return hasher;
},
/**
* HMAC-based Key Derivation (RFC 5869).
*/
hkdf: function hkdf(ikm, xts, info, len) {
- const BLOCKSIZE = 256 / 8;
if (typeof xts === undefined)
xts = String.fromCharCode(0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0);
let h = CryptoUtils.makeHMACHasher(Ci.nsICryptoHMAC.SHA256,
CryptoUtils.makeHMACKey(xts));
let prk = CryptoUtils.digestBytes(ikm, h);
--- a/services/fxaccounts/FxAccounts.jsm
+++ b/services/fxaccounts/FxAccounts.jsm
@@ -739,17 +739,17 @@ FxAccountsInternal.prototype = {
client_id: FX_OAUTH_CLIENT_ID
});
return client.destroyToken(tokenData.token)
},
_destroyAllOAuthTokens(tokenInfos) {
// let's just destroy them all in parallel...
let promises = [];
- for (let [key, tokenInfo] of Object.entries(tokenInfos || {})) {
+ for (let tokenInfo of Object.values(tokenInfos || {})) {
promises.push(this._destroyOAuthToken(tokenInfo));
}
return Promise.all(promises);
},
signOut: function signOut(localOnly) {
let currentState = this.currentAccountState;
let sessionToken;
@@ -950,17 +950,16 @@ FxAccountsInternal.prototype = {
// setSignedInUser() was called.
this.notifyObservers(ONVERIFIED_NOTIFICATION);
return currentState.getUserAccountData();
}.bind(this)).then(result => currentState.resolve(result));
},
getAssertionFromCert(data, keyPair, cert, audience) {
log.debug("getAssertionFromCert");
- let payload = {};
let d = Promise.defer();
let options = {
duration: ASSERTION_LIFETIME,
localtimeOffsetMsec: this.localtimeOffsetMsec,
now: this.now()
};
let currentState = this.currentAccountState;
// "audience" should look like "http://123done.org".
--- a/services/fxaccounts/FxAccountsManager.jsm
+++ b/services/fxaccounts/FxAccountsManager.jsm
@@ -453,18 +453,16 @@ this.FxAccountsManager = {
},
queryAccount(aEmail) {
log.debug("queryAccount " + aEmail);
if (Services.io.offline) {
return this._error(ERROR_OFFLINE);
}
- let deferred = Promise.defer();
-
if (!aEmail) {
return this._error(ERROR_INVALID_EMAIL);
}
let client = this._getFxAccountsClient();
return client.accountExists(aEmail).then(
result => {
log.debug("Account " + result ? "" : "does not" + " exists");
--- a/services/fxaccounts/tests/xpcshell/test_accounts.js
+++ b/services/fxaccounts/tests/xpcshell/test_accounts.js
@@ -999,17 +999,17 @@ add_task(function* test_sign_out_with_de
add_task(function* test_sign_out_without_device() {
const fxa = new MockFxAccounts();
const credentials = getTestUser("alice");
delete credentials.deviceId;
yield fxa.internal.setSignedInUser(credentials);
- const user = yield fxa.internal.getUserAccountData();
+ yield fxa.internal.getUserAccountData();
const spy = {
signOut: { count: 0, args: [] },
signOutAndDeviceDestroy: { count: 0 }
};
const client = fxa.internal.fxAccountsClient;
client.signOut = function() {
spy.signOut.count += 1;
--- a/services/fxaccounts/tests/xpcshell/test_accounts_device_registration.js
+++ b/services/fxaccounts/tests/xpcshell/test_accounts_device_registration.js
@@ -177,17 +177,16 @@ add_task(function* test_updateDeviceRegi
const data = yield state.getUserAccountData();
do_check_eq(data.deviceId, "newly-generated device id");
do_check_eq(data.deviceRegistrationVersion, DEVICE_REGISTRATION_VERSION);
});
add_task(function* test_updateDeviceRegistration_with_existing_device() {
const deviceName = "phil's device";
- const deviceType = "desktop";
const credentials = getTestUser("pb");
const fxa = new MockFxAccounts({ name: deviceName });
yield fxa.internal.setSignedInUser(credentials);
const spy = {
registerDevice: { count: 0, args: [] },
updateDevice: { count: 0, args: [] },
@@ -358,17 +357,16 @@ add_task(function* test_updateDeviceRegi
const data = yield state.getUserAccountData();
do_check_eq(data.deviceId, credentials.deviceId);
do_check_eq(data.deviceRegistrationVersion, null);
});
add_task(function* test_updateDeviceRegistration_with_unrecoverable_error() {
const deviceName = "foo";
- const deviceType = "bar";
const credentials = getTestUser("baz");
delete credentials.deviceId;
const fxa = new MockFxAccounts({ name: deviceName });
yield fxa.internal.setSignedInUser(credentials);
const spy = {
registerDevice: { count: 0, args: [] },
--- a/services/fxaccounts/tests/xpcshell/test_client.js
+++ b/services/fxaccounts/tests/xpcshell/test_client.js
@@ -581,17 +581,16 @@ add_task(function* test_signCertificate(
} catch (expectedError) {
do_check_eq(102, expectedError.errno);
}
yield deferredStop(server);
});
add_task(function* test_accountExists() {
- let sessionMessage = JSON.stringify({sessionToken: FAKE_SESSION_TOKEN});
let existsMessage = JSON.stringify({error: "wrong password", code: 400, errno: 103});
let doesntExistMessage = JSON.stringify({error: "no such account", code: 400, errno: 102});
let emptyMessage = "{}";
let server = httpd_setup({
"/account/login": function(request, response) {
let body = CommonUtils.readBytesFromInputStream(request.bodyInputStream);
let jsonBody = JSON.parse(body);
--- a/services/fxaccounts/tests/xpcshell/test_credentials.js
+++ b/services/fxaccounts/tests/xpcshell/test_credentials.js
@@ -62,18 +62,16 @@ add_task(function* test_onepw_setup_cred
// derive unwrap key
let unwrapKeyInfo = Credentials.keyWord("unwrapBkey");
let unwrapKey = hkdf(quickStretchedPW, hkdfSalt, unwrapKeyInfo, hkdfLen);
do_check_eq(b2h(unwrapKey), "8ff58975be391338e4ec5d7138b5ed7b65c7d1bfd1f3a4f93e05aa47d5b72be9");
});
add_task(function* test_client_stretch_kdf() {
- let pbkdf2 = CryptoUtils.pbkdf2Generate;
- let hkdf = CryptoUtils.hkdf;
let expected = vectors["client stretch-KDF"];
let email = h2s(expected.email);
let password = h2s(expected.password);
// Intermediate value from sjcl implementation in fxa-js-client
// The key thing is the c3a9 sequence in "andré"
let salt = Credentials.keyWordExtended("quickStretch", email);
--- a/services/fxaccounts/tests/xpcshell/test_profile.js
+++ b/services/fxaccounts/tests/xpcshell/test_profile.js
@@ -186,17 +186,17 @@ add_task(function* fetchAndCacheProfileO
then(thenFunc) {
return thenFunc();
}
}
};
let profile = CreateFxAccountsProfile(fxa, client);
let request1 = profile._fetchAndCacheProfile();
- let request2 = profile._fetchAndCacheProfile();
+ profile._fetchAndCacheProfile();
// should be one request made to fetch the profile (but the promise returned
// by it remains unresolved)
do_check_eq(numFetches, 1);
// resolve the promise.
resolveProfile({ body: { avatar: "myimg"} });
--- a/services/sync/modules/SyncedTabs.jsm
+++ b/services/sync/modules/SyncedTabs.jsm
@@ -105,20 +105,19 @@ let SyncedTabsInternal = {
}
// A boolean that controls whether we should show the icon from the remote tab.
const showRemoteIcons = Preferences.get("services.sync.syncedTabs.showRemoteIcons", true);
let engine = Weave.Service.engineManager.get("tabs");
let seenURLs = new Set();
- let parentIndex = 0;
let ntabs = 0;
- for (let [guid, client] of Object.entries(engine.getAllClients())) {
+ for (let client of Object.values(engine.getAllClients())) {
if (!Weave.Service.clientsEngine.remoteClientExists(client.id)) {
continue;
}
let clientRepr = yield this._makeClient(client);
log.debug("Processing client", clientRepr);
for (let tab of client.tabs) {
let url = tab.urlHistory[0];
--- a/services/sync/modules/bookmark_validator.js
+++ b/services/sync/modules/bookmark_validator.js
@@ -319,17 +319,16 @@ class BookmarkValidator {
* out yet.
*/
inspectServerRecords(serverRecords) {
let deletedItemIds = new Set();
let idToRecord = new Map();
let deletedRecords = [];
let folders = [];
- let problems = [];
let problemData = new BookmarkProblemData();
let resultRecords = [];
for (let record of serverRecords) {
if (!record.id) {
++problemData.missingIDs;
@@ -615,18 +614,16 @@ class BookmarkValidator {
inspectionInfo.clientRecords = clientRecords;
// Mainly do this to remove deleted items and normalize child guids.
serverRecords = inspectionInfo.records;
let problemData = inspectionInfo.problemData;
this._validateClient(problemData, clientRecords);
- let matches = [];
-
let allRecords = new Map();
let serverDeletedLookup = new Set(inspectionInfo.deletedRecords.map(r => r.id));
for (let sr of serverRecords) {
if (sr.fake) {
continue;
}
allRecords.set(sr.id, {client: null, server: sr});
--- a/services/sync/modules/collection_validator.js
+++ b/services/sync/modules/collection_validator.js
@@ -148,17 +148,16 @@ class CollectionValidator {
} else {
seenServer.set(id, record);
allRecords.set(id, { server: record, client: null, });
}
record.understood = this.clientUnderstands(record);
}
}
- let recordPairs = [];
let seenClient = new Map();
for (let record of clientItems) {
let id = record[this.idProp];
record.shouldSync = this.syncedByClient(record);
seenClient.set(id, record);
let combined = allRecords.get(id);
if (combined) {
combined.client = record;
--- a/services/sync/modules/engines/bookmarks.js
+++ b/services/sync/modules/engines/bookmarks.js
@@ -331,17 +331,17 @@ BookmarksEngine.prototype = {
for (let child of tree.children) {
if (isSyncedRootNode(child)) {
yield* walkBookmarksTree(child, tree);
}
}
}
for (let [node, parent] of walkBookmarksRoots(tree)) {
- let {guid, id, type: placeType} = node;
+ let {guid, type: placeType} = node;
guid = PlacesSyncUtils.bookmarks.guidToSyncId(guid);
let key;
switch (placeType) {
case PlacesUtils.TYPE_X_MOZ_PLACE:
// Bookmark
let query = null;
if (node.annos && node.uri.startsWith("place:")) {
query = node.annos.find(({name}) =>
--- a/services/sync/modules/identity.js
+++ b/services/sync/modules/identity.js
@@ -290,17 +290,16 @@ IdentityManager.prototype = {
throw new Error("syncKey cannot be set before username.");
}
this._log.info("Sync Key being updated.");
this._syncKey = value;
// Clear any cached Sync Key Bundle and regenerate it.
this._syncKeyBundle = null;
- let bundle = this.syncKeyBundle;
this._syncKeyUpdated = true;
},
/**
* Obtain the active SyncKeyBundle.
*
* This returns a SyncKeyBundle representing a key pair derived from the
--- a/services/sync/modules/resource.js
+++ b/services/sync/modules/resource.js
@@ -158,17 +158,17 @@ AsyncResource.prototype = {
for (let [k, v] of Object.entries(result.headers)) {
headers[k.toLowerCase()] = v;
}
}
} else {
this._log.debug("No authenticator found.");
}
- for (let [key, value] of Object.entries(headers)) {
+ for (let key of Object.keys(headers)) {
if (key == "authorization")
this._log.trace("HTTP Header " + key + ": ***** (suppressed)");
else
this._log.trace("HTTP Header " + key + ": " + headers[key]);
channel.setRequestHeader(key, headers[key], false);
}
return channel;
},
--- a/services/sync/modules/service.js
+++ b/services/sync/modules/service.js
@@ -611,17 +611,17 @@ Sync11Service.prototype = {
let cryptoKeys;
if (infoCollections && (CRYPTO_COLLECTION in infoCollections)) {
try {
cryptoKeys = new CryptoWrapper(CRYPTO_COLLECTION, KEYS_WBO);
let cryptoResp = cryptoKeys.fetch(this.resource(this.cryptoKeysURL)).response;
if (cryptoResp.success) {
- let keysChanged = this.handleFetchedKeys(syncKeyBundle, cryptoKeys);
+ this.handleFetchedKeys(syncKeyBundle, cryptoKeys);
return true;
} else if (cryptoResp.status == 404) {
// On failure, ask to generate new keys and upload them.
// Fall through to the behavior below.
this._log.warn("Got 404 for crypto/keys, but 'crypto' in info/collections. Regenerating.");
cryptoKeys = null;
} else {
// Some other problem.
@@ -1087,18 +1087,16 @@ Sync11Service.prototype = {
}
this._log.trace("info/configuration for this server", this.serverConfiguration);
return true;
},
// Stuff we need to do after login, before we can really do
// anything (e.g. key setup).
_remoteSetup: function _remoteSetup(infoResponse) {
- let reset = false;
-
if (!this._fetchServerConfiguration()) {
return false;
}
this._log.debug("Fetching global metadata record");
let meta = this.recordManager.get(this.metaURL);
// Checking modified time of the meta record.
@@ -1178,18 +1176,16 @@ Sync11Service.prototype = {
return false;
}
if (!meta)
this._log.info("No metadata record, server wipe needed");
if (meta && !meta.payload.syncID)
this._log.warn("No sync id, server wipe needed");
- reset = true;
-
this._log.info("Wiping server data");
this._freshStart();
if (status == 404)
this._log.info("Metadata record not found, server was wiped to ensure " +
"consistency.");
else // 200
this._log.info("Wiped server; incompatible metadata: " + remoteVersion);
@@ -1312,17 +1308,17 @@ Sync11Service.prototype = {
let synchronizer = new EngineSynchronizer(this);
let cb = Async.makeSpinningCallback();
synchronizer.onComplete = cb;
synchronizer.sync(engineNamesToSync);
// wait() throws if the first argument is truthy, which is exactly what
// we want.
- let result = cb.wait();
+ cb.wait();
histogram = Services.telemetry.getHistogramById("WEAVE_COMPLETE_SUCCESS_COUNT");
histogram.add(1);
// We successfully synchronized.
// Check if the identity wants to pre-fetch a migration sentinel from
// the server.
// If we have no clusterURL, we are probably doing a node reassignment
@@ -1500,34 +1496,32 @@ Sync11Service.prototype = {
_freshStart: function _freshStart() {
this._log.info("Fresh start. Resetting client and considering key upgrade.");
this.resetClient();
this.collectionKeys.clear();
this.upgradeSyncKey(this.syncID);
// Wipe the server.
- let wipeTimestamp = this.wipeServer();
+ this.wipeServer();
// Upload a new meta/global record.
let meta = new WBORecord("meta", "global");
meta.payload.syncID = this.syncID;
meta.payload.storageVersion = STORAGE_VERSION;
meta.payload.declined = this.engineManager.getDeclined();
meta.isNew = true;
// uploadMetaGlobal throws on failure -- including race conditions.
// If we got into a race condition, we'll abort the sync this way, too.
// That's fine. We'll just wait till the next sync. The client that we're
// racing is probably busy uploading stuff right now anyway.
this.uploadMetaGlobal(meta);
// Wipe everything we know about except meta because we just uploaded it
- let engines = [this.clientsEngine].concat(this.engineManager.getAll());
- let collections = engines.map(engine => engine.name);
// TODO: there's a bug here. We should be calling resetClient, no?
// Generate, upload, and download new keys. Do this last so we don't wipe
// them...
this.generateNewSymmetricKeys();
},
/**
--- a/services/sync/tests/unit/head_http_server.js
+++ b/services/sync/tests/unit/head_http_server.js
@@ -194,17 +194,17 @@ ServerCollection.prototype = {
* @param filter
* A predicate function, applied to the WBO, which dictates whether to
* include the WBO in the output.
*
* @return an array of ServerWBOs.
*/
wbos: function wbos(filter) {
let os = [];
- for (let [id, wbo] of Object.entries(this._wbos)) {
+ for (let wbo of Object.values(this._wbos)) {
if (wbo.payload) {
os.push(wbo);
}
}
if (filter) {
return os.filter(filter);
}
@@ -271,29 +271,29 @@ ServerCollection.prototype = {
return wbo.payload
&& (!options.ids || (options.ids.indexOf(wbo.id) != -1))
&& (!options.newer || (wbo.modified > options.newer));
},
count(options) {
options = options || {};
let c = 0;
- for (let [id, wbo] of Object.entries(this._wbos)) {
+ for (let wbo of Object.values(this._wbos)) {
if (wbo.modified && this._inResultSet(wbo, options)) {
c++;
}
}
return c;
},
get(options) {
let result;
if (options.full) {
let data = [];
- for (let [id, wbo] of Object.entries(this._wbos)) {
+ for (let wbo of Object.values(this._wbos)) {
// Drop deleted.
if (wbo.modified && this._inResultSet(wbo, options)) {
data.push(wbo.get());
}
}
let start = options.offset || 0;
if (options.limit) {
let numItemsPastOffset = data.length - start;
@@ -356,17 +356,17 @@ ServerCollection.prototype = {
}
return {modified: new_timestamp(),
success,
failed};
},
delete(options) {
let deleted = [];
- for (let [id, wbo] of Object.entries(this._wbos)) {
+ for (let wbo of Object.values(this._wbos)) {
if (this._inResultSet(wbo, options)) {
this._log.debug("Deleting " + JSON.stringify(wbo));
deleted.push(wbo.id);
wbo.delete();
}
}
return deleted;
},
@@ -843,17 +843,17 @@ SyncServer.prototype = {
}
let parts = this.pathRE.exec(req.path);
if (!parts) {
this._log.debug("SyncServer: Unexpected request: bad URL " + req.path);
throw HTTP_404;
}
- let [all, version, username, first, rest] = parts;
+ let [, version, username, first, rest] = parts;
// Doing a float compare of the version allows for us to pretend there was
// a node-reassignment - eg, we could re-assign from "1.1/user/" to
// "1.10/user" - this server will then still accept requests with the new
// URL while any code in sync itself which compares URLs will see a
// different URL.
if (parseFloat(version) != parseFloat(SYNC_API_VERSION)) {
this._log.debug("SyncServer: Unknown version.");
throw HTTP_404;
@@ -911,17 +911,17 @@ SyncServer.prototype = {
return undefined;
}
let match = this.storageRE.exec(rest);
if (!match) {
this._log.warn("SyncServer: Unknown storage operation " + rest);
throw HTTP_404;
}
- let [all, collection, wboID] = match;
+ let [, collection, wboID] = match;
let coll = this.getCollection(username, collection);
switch (req.method) {
case "GET":
if (!coll) {
if (wboID) {
respond(404, "Not found", "Not found");
return undefined;
}
--- a/services/sync/tests/unit/test_addon_utils.js
+++ b/services/sync/tests/unit/test_addon_utils.js
@@ -18,18 +18,16 @@ prefs.set("extensions.getAddons.get.url"
loadAddonTestFunctions();
startupManager();
function createAndStartHTTPServer(port = HTTP_PORT) {
try {
let server = new HttpServer();
- let bootstrap1XPI = ExtensionsTestPath("/addons/test_bootstrap1_1.xpi");
-
server.registerFile("/search/guid:missing-sourceuri%40tests.mozilla.org",
do_get_file("missing-sourceuri.xml"));
server.registerFile("/search/guid:rewrite%40tests.mozilla.org",
do_get_file("rewrite-search.xml"));
server.start(port);
--- a/services/sync/tests/unit/test_addons_reconciler.js
+++ b/services/sync/tests/unit/test_addons_reconciler.js
@@ -101,17 +101,16 @@ add_test(function test_uninstall_detecti
let reconciler = new AddonsReconciler();
reconciler.startListening();
reconciler._addons = {};
reconciler._changes = [];
let addon = installAddon("test_bootstrap1_1");
let id = addon.id;
- let guid = addon.syncGUID;
reconciler._changes = [];
uninstallAddon(addon);
do_check_eq(1, Object.keys(reconciler.addons).length);
do_check_true(id in reconciler.addons);
let record = reconciler.addons[id];
--- a/services/sync/tests/unit/test_addons_store.js
+++ b/services/sync/tests/unit/test_addons_store.js
@@ -169,17 +169,17 @@ add_test(function test_ignore_different_
let record = createRecordForThisApp(addon.syncGUID, addon.id, false, false);
record.applicationID = "FAKE_ID";
let failed = store.applyIncomingBatch([record]);
do_check_eq(0, failed.length);
let newAddon = getAddonFromAddonManagerByID(addon.id);
- do_check_false(addon.userDisabled);
+ do_check_false(newAddon.userDisabled);
uninstallAddon(addon);
run_next_test();
});
add_test(function test_ignore_unknown_source() {
_("Ensure incoming records with unknown source are ignored.");
@@ -188,17 +188,17 @@ add_test(function test_ignore_unknown_so
let record = createRecordForThisApp(addon.syncGUID, addon.id, false, false);
record.source = "DUMMY_SOURCE";
let failed = store.applyIncomingBatch([record]);
do_check_eq(0, failed.length);
let newAddon = getAddonFromAddonManagerByID(addon.id);
- do_check_false(addon.userDisabled);
+ do_check_false(newAddon.userDisabled);
uninstallAddon(addon);
run_next_test();
});
add_test(function test_apply_uninstall() {
_("Ensures that uninstalling an add-on from a record works.");
@@ -426,17 +426,17 @@ add_test(function test_create_bad_instal
let server = createAndStartHTTPServer(HTTP_PORT);
// The handler returns a search result but the XPI will 404.
const id = "missing-xpi@tests.mozilla.org";
let guid = Utils.makeGUID();
let record = createRecordForThisApp(guid, id, true, false);
- let failed = store.applyIncomingBatch([record]);
+ /* let failed = */ store.applyIncomingBatch([record]);
// This addon had no source URI so was skipped - but it's not treated as
// failure.
// XXX - this test isn't testing what we thought it was. Previously the addon
// was not being installed due to requireSecureURL checking *before* we'd
// attempted to get the XPI.
// With requireSecureURL disabled we do see a download failure, but the addon
// *does* get added to |failed|.
// FTR: onDownloadFailed() is called with ERROR_NETWORK_FAILURE, so it's going
--- a/services/sync/tests/unit/test_bookmark_duping.js
+++ b/services/sync/tests/unit/test_bookmark_duping.js
@@ -136,17 +136,17 @@ async function validate(collection, expe
add_task(async function test_dupe_bookmark() {
_("Ensure that a bookmark we consider a dupe is handled correctly.");
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
- let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ let {guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
engine.sync();
// We've added the bookmark, its parent (folder1) plus "menu", "toolbar", "unfiled", and "mobile".
equal(collection.count(), 6);
equal(getFolderChildrenIDs(folder1_id).length, 1);
// Now create a new incoming record that looks alot like a dupe.
@@ -187,17 +187,17 @@ add_task(async function test_dupe_bookma
add_task(async function test_dupe_reparented_bookmark() {
_("Ensure that a bookmark we consider a dupe from a different parent is handled correctly");
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
- let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ let {guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
// Another parent folder *with the same name*
let {id: folder2_id, guid: folder2_guid } = createFolder(bms.toolbarFolder, "Folder 1");
do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`);
engine.sync();
// We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
@@ -253,17 +253,17 @@ add_task(async function test_dupe_repare
add_task(async function test_dupe_reparented_locally_changed_bookmark() {
_("Ensure that a bookmark with local changes we consider a dupe from a different parent is handled correctly");
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
- let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ let {guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
// Another parent folder *with the same name*
let {id: folder2_id, guid: folder2_guid } = createFolder(bms.toolbarFolder, "Folder 1");
do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`);
engine.sync();
// We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
@@ -330,19 +330,19 @@ add_task(async function test_dupe_repare
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"appears in the same sync before the dupe item");
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
- let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ let {guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
// One more folder we'll use later.
- let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
+ let {guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
engine.sync();
// We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
equal(collection.count(), 7);
equal(getFolderChildrenIDs(folder1_id).length, 1);
@@ -407,19 +407,19 @@ add_task(async function test_dupe_repare
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"doesn't exist locally as we process the child, but does appear in the same sync");
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
- let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ let {guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
// One more folder we'll use later.
- let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
+ let {guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
engine.sync();
// We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
equal(collection.count(), 7);
equal(getFolderChildrenIDs(folder1_id).length, 1);
@@ -484,19 +484,19 @@ add_task(async function test_dupe_repare
_("Ensure that a bookmark we consider a dupe from a different parent that " +
"doesn't exist locally and doesn't appear in this Sync is handled correctly");
let { server, collection } = await this.setup();
try {
// The parent folder and one bookmark in it.
let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
- let {id: bmk1_id, guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
+ let {guid: bmk1_guid} = createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
// One more folder we'll use later.
- let {id: folder2_id, guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
+ let {guid: folder2_guid} = createFolder(bms.toolbarFolder, "A second folder");
do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
engine.sync();
// We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
equal(collection.count(), 7);
equal(getFolderChildrenIDs(folder1_id).length, 1);
@@ -603,17 +603,17 @@ add_task(async function test_dupe_repare
add_task(async function test_dupe_empty_folder() {
_("Ensure that an empty folder we consider a dupe is handled correctly.");
// Empty folders aren't particularly interesting in practice (as that seems
// an edge-case) but duping folders with items is broken - bug 1293163.
let { server, collection } = await this.setup();
try {
// The folder we will end up duping away.
- let {id: folder1_id, guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
+ let {guid: folder1_guid } = createFolder(bms.toolbarFolder, "Folder 1");
engine.sync();
// We've added 1 folder, "menu", "toolbar", "unfiled", and "mobile".
equal(collection.count(), 5);
// Now create new incoming records that looks alot like a dupe of "Folder 1".
let newFolderGUID = Utils.makeGUID();
--- a/services/sync/tests/unit/test_bookmark_engine.js
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -44,17 +44,16 @@ async function fetchAllSyncIds() {
return syncIds;
}
add_task(async function test_delete_invalid_roots_from_server() {
_("Ensure that we delete the Places and Reading List roots from the server.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
- let tracker = engine._tracker;
let server = serverForFoo(engine);
await SyncTestingInfrastructure(server);
let collection = server.user("foo").collection("bookmarks");
Svc.Obs.notify("weave:engine:start-tracking");
try {
@@ -101,17 +100,16 @@ add_task(async function test_delete_inva
}
});
add_task(async function test_change_during_sync() {
_("Ensure that we track changes made during a sync.");
let engine = new BookmarksEngine(Service);
let store = engine._store;
- let tracker = engine._tracker;
let server = serverForFoo(engine);
await SyncTestingInfrastructure(server);
let collection = server.user("foo").collection("bookmarks");
let bz_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarksMenuFolderId, Utils.makeURI("https://bugzilla.mozilla.org/"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Bugzilla");
@@ -251,21 +249,19 @@ add_task(async function test_change_duri
await promiseStopServer(server);
Svc.Obs.notify("weave:engine:stop-tracking");
}
});
add_task(async function bad_record_allIDs() {
let server = new SyncServer();
server.start();
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
_("Ensure that bad Places queries don't cause an error in getAllIDs.");
- let engine = new BookmarksEngine(Service);
- let store = engine._store;
let badRecordID = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.toolbarFolder,
Utils.makeURI("place:folder=1138"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
null);
do_check_true(badRecordID > 0);
_("Record is " + badRecordID);
@@ -314,20 +310,18 @@ add_task(async function test_processInco
PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
let folder1_guid = store.GUIDForId(folder1_id);
let fxuri = Utils.makeURI("http://getfirefox.com/");
let tburi = Utils.makeURI("http://getthunderbird.com/");
let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
- let bmk1_guid = store.GUIDForId(bmk1_id);
let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
- let bmk2_guid = store.GUIDForId(bmk2_id);
// Create a server record for folder1 where we flip the order of
// the children.
let folder1_payload = store.createRecord(folder1_guid).cleartext;
folder1_payload.children.reverse();
collection.insert(folder1_guid, encryptPayload(folder1_payload));
// Create a bogus record that when synced down will provoke a
--- a/services/sync/tests/unit/test_bookmark_record.js
+++ b/services/sync/tests/unit/test_bookmark_record.js
@@ -21,17 +21,16 @@ add_task(async function test_bookmark_re
generateNewKeys(Service.collectionKeys);
let keyBundle = Service.identity.syncKeyBundle;
let log = Log.repository.getLogger("Test");
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
log.info("Creating a record");
- let u = "http://localhost:8080/storage/bookmarks/foo";
let placesItem = new PlacesItem("bookmarks", "foo", "bookmark");
let bookmarkItem = prepareBookmarkItem("bookmarks", "foo");
log.info("Checking getTypeObject");
do_check_eq(placesItem.getTypeObject(placesItem.type), Bookmark);
do_check_eq(bookmarkItem.getTypeObject(bookmarkItem.type), Bookmark);
bookmarkItem.encrypt(keyBundle);
--- a/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
+++ b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
@@ -183,18 +183,16 @@ add_task(async function test_smart_bookm
"&maxResults=10");
let title = "Most Visited";
let mostVisitedID = newSmartBookmark(parent, uri, -1, title, "MostVisited");
let mostVisitedGUID = store.GUIDForId(mostVisitedID);
let record = store.createRecord(mostVisitedGUID);
_("Prepare sync.");
- let collection = server.user("foo").collection("bookmarks");
-
try {
engine._syncStartup();
_("Verify that mapDupe uses the anno, discovering a dupe regardless of URI.");
do_check_eq(mostVisitedGUID, engine._mapDupe(record));
record.bmkUri = "http://foo/";
do_check_eq(mostVisitedGUID, engine._mapDupe(record));
--- a/services/sync/tests/unit/test_bookmark_tracker.js
+++ b/services/sync/tests/unit/test_bookmark_tracker.js
@@ -124,32 +124,32 @@ var populateTree = async function popula
PlacesUtils.annotations.EXPIRE_NEVER);
}
guids[item.title] = await PlacesUtils.promiseItemGuid(itemId);
}
return guids;
}
async function insertBookmarksToMigrate() {
- let mozBmk = await PlacesUtils.bookmarks.insert({
+ await PlacesUtils.bookmarks.insert({
guid: "0gtWTOgYcoJD",
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://mozilla.org",
});
let fxBmk = await PlacesUtils.bookmarks.insert({
guid: "0dbpnMdxKxfg",
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
});
let tbBmk = await PlacesUtils.bookmarks.insert({
guid: "r5ouWdPB3l28",
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getthunderbird.com",
});
- let bzBmk = await PlacesUtils.bookmarks.insert({
+ await PlacesUtils.bookmarks.insert({
guid: "YK5Bdq5MIqL6",
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://bugzilla.mozilla.org",
});
let exampleBmk = await PlacesUtils.bookmarks.insert({
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "https://example.com",
});
@@ -212,17 +212,17 @@ add_task(async function test_tracking()
add_task(async function test_batch_tracking() {
_("Test tracker does the correct thing during and after a places 'batch'");
await startTracking();
PlacesUtils.bookmarks.runInBatchMode({
runBatched() {
- let folder = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX);
// We should be tracking the new folder and its parent (and need to jump
// through blocking hoops...)
Async.promiseSpinningly(verifyTrackedCount(2));
// But not have bumped the score.
do_check_eq(tracker.score, 0);
}
@@ -239,17 +239,17 @@ add_task(async function test_nested_batc
await startTracking();
PlacesUtils.bookmarks.runInBatchMode({
runBatched() {
PlacesUtils.bookmarks.runInBatchMode({
runBatched() {
- let folder = PlacesUtils.bookmarks.createFolder(
+ PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder,
"Test Folder", PlacesUtils.bookmarks.DEFAULT_INDEX);
// We should be tracking the new folder and its parent (and need to jump
// through blocking hoops...)
Async.promiseSpinningly(verifyTrackedCount(2));
// But not have bumped the score.
do_check_eq(tracker.score, 0);
}
@@ -662,17 +662,16 @@ add_task(async function test_async_onIte
add_task(async function test_onItemKeywordChanged() {
_("Keyword changes via the synchronous API should be tracked");
try {
await stopTracking();
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
PlacesUtils.bookmarks.DEFAULT_INDEX);
- let folderGUID = engine._store.GUIDForId(folder);
_("Track changes to keywords");
let uri = Utils.makeURI("http://getfirefox.com");
let b = PlacesUtils.bookmarks.insertBookmark(
folder, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bGUID = engine._store.GUIDForId(b);
_("New item is " + b);
_("GUID: " + bGUID);
@@ -798,17 +797,16 @@ add_task(async function test_onItemPostD
add_task(async function test_onItemAnnoChanged() {
_("Item annotations should be tracked");
try {
await stopTracking();
let folder = PlacesUtils.bookmarks.createFolder(
PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
PlacesUtils.bookmarks.DEFAULT_INDEX);
- let folderGUID = engine._store.GUIDForId(folder);
_("Track changes to annos.");
let b = PlacesUtils.bookmarks.insertBookmark(
folder, Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
let bGUID = engine._store.GUIDForId(b);
_("New item is " + b);
_("GUID: " + bGUID);
@@ -1069,17 +1067,17 @@ add_task(async function test_onItemMoved
});
add_task(async function test_async_onItemMoved_update() {
_("Items moved via the asynchronous API should be tracked");
try {
await stopTracking();
- let fxBmk = await PlacesUtils.bookmarks.insert({
+ await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
let tbBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
@@ -1296,28 +1294,26 @@ add_task(async function test_treeMoved()
// A second folder in the first.
let folder2_id = PlacesUtils.bookmarks.createFolder(
folder1_id,
"Second test folder",
PlacesUtils.bookmarks.DEFAULT_INDEX);
let folder2_guid = engine._store.GUIDForId(folder2_id);
// Create a couple of bookmarks in the second folder.
- let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.insertBookmark(
folder2_id,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
- let fx_guid = engine._store.GUIDForId(fx_id);
- let tb_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.insertBookmark(
folder2_id,
Utils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
- let tb_guid = engine._store.GUIDForId(tb_id);
await startTracking();
// Move folder 2 to be a sibling of folder1.
PlacesUtils.bookmarks.moveItem(
folder2_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0);
// the menu and both folders should be tracked, the children should not be.
await verifyTrackedItems(["menu", folder1_guid, folder2_guid]);
@@ -1327,22 +1323,21 @@ add_task(async function test_treeMoved()
await cleanup();
}
});
add_task(async function test_onItemDeleted() {
_("Bookmarks deleted via the synchronous API should be tracked");
try {
- let fx_id = PlacesUtils.bookmarks.insertBookmark(
+ PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getfirefox.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Firefox!");
- let fx_guid = engine._store.GUIDForId(fx_id);
let tb_id = PlacesUtils.bookmarks.insertBookmark(
PlacesUtils.bookmarks.bookmarksMenuFolder,
Utils.makeURI("http://getthunderbird.com"),
PlacesUtils.bookmarks.DEFAULT_INDEX,
"Get Thunderbird!");
let tb_guid = engine._store.GUIDForId(tb_id);
await startTracking();
@@ -1365,17 +1360,17 @@ add_task(async function test_async_onIte
await stopTracking();
let fxBmk = await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getfirefox.com",
title: "Get Firefox!",
});
- let tbBmk = await PlacesUtils.bookmarks.insert({
+ await PlacesUtils.bookmarks.insert({
type: PlacesUtils.bookmarks.TYPE_BOOKMARK,
parentGuid: PlacesUtils.bookmarks.menuGuid,
url: "http://getthunderbird.com",
title: "Get Thunderbird!",
});
await startTracking();
--- a/services/sync/tests/unit/test_browserid_identity.js
+++ b/services/sync/tests/unit/test_browserid_identity.js
@@ -743,17 +743,17 @@ add_task(async function test_getKeysMiss
Assert.ok(ex.message.indexOf("missing kA or kB") >= 0);
});
add_task(async function test_signedInUserMissing() {
_("BrowserIDManager detects getSignedInUser returning incomplete account data");
let browseridManager = new BrowserIDManager();
- let config = makeIdentityConfig();
+ makeIdentityConfig();
// Delete stored keys and the key fetch token.
delete identityConfig.fxaccount.user.kA;
delete identityConfig.fxaccount.user.kB;
delete identityConfig.fxaccount.user.keyFetchToken;
configureFxAccountIdentity(browseridManager, identityConfig);
let fxa = new FxAccounts({
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -331,17 +331,17 @@ add_task(async function test_sync() {
}
});
add_test(function test_client_name_change() {
_("Ensure client name change incurs a client record update.");
let tracker = engine._tracker;
- let localID = engine.localID;
+ engine.localID; // Needed to increase the tracker changedIDs count.
let initialName = engine.localName;
Svc.Obs.notify("weave:engine:start-tracking");
_("initial name: " + initialName);
// Tracker already has data, so clear it.
tracker.clearChangedIDs();
@@ -368,17 +368,17 @@ add_test(function test_send_command() {
_("Verifies _sendCommandToClient puts commands in the outbound queue.");
let store = engine._store;
let tracker = engine._tracker;
let remoteId = Utils.makeGUID();
let rec = new ClientsRec("clients", remoteId);
store.create(rec);
- let remoteRecord = store.createRecord(remoteId, "clients");
+ store.createRecord(remoteId, "clients");
let action = "testCommand";
let args = ["foo", "bar"];
engine._sendCommandToClient(action, args, remoteId);
let newRecord = store._remoteClients[remoteId];
let clientCommands = engine._readCommands()[remoteId];
@@ -464,17 +464,16 @@ add_test(function test_command_duplicati
store.createRecord(remoteId, "clients");
let action = "resetAll";
let args = [];
engine.sendCommand(action, args, remoteId);
engine.sendCommand(action, args, remoteId);
- let newRecord = store._remoteClients[remoteId];
let clientCommands = engine._readCommands()[remoteId];
equal(clientCommands.length, 1);
_("Check variant args length");
engine._saveCommands({});
action = "resetEngine";
engine.sendCommand(action, [{ x: "foo" }], remoteId);
@@ -771,17 +770,17 @@ add_test(function test_send_uri_to_clien
let tracker = engine._tracker;
let store = engine._store;
let remoteId = Utils.makeGUID();
let rec = new ClientsRec("clients", remoteId);
rec.name = "remote";
store.create(rec);
- let remoteRecord = store.createRecord(remoteId, "clients");
+ store.createRecord(remoteId, "clients");
tracker.clearChangedIDs();
let initialScore = tracker.score;
let uri = "http://www.mozilla.org/";
let title = "Title of the Page";
engine.sendURIToClientForDisplay(uri, remoteId, title);
@@ -897,17 +896,16 @@ add_task(async function test_merge_comma
let now = Date.now() / 1000;
let contents = {
meta: {global: {engines: {clients: {version: engine.version,
syncID: engine.syncID}}}},
clients: {},
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
- let user = server.user("foo");
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let desktopID = Utils.makeGUID();
server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
id: desktopID,
name: "Desktop client",
@@ -931,18 +929,16 @@ add_task(async function test_merge_comma
args: [],
flowID: Utils.makeGUID(),
}],
version: "48",
protocols: ["1.5"],
}), now - 10));
try {
- let store = engine._store;
-
_("First sync. 2 records downloaded.");
strictEqual(engine.lastRecordUpload, 0);
engine._sync();
_("Broadcast logout to all clients");
engine.sendCommand("logout", []);
engine._sync();
@@ -978,34 +974,31 @@ add_task(async function test_duplicate_r
let now = Date.now() / 1000;
let contents = {
meta: {global: {engines: {clients: {version: engine.version,
syncID: engine.syncID}}}},
clients: {},
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
- let user = server.user("foo");
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let desktopID = Utils.makeGUID();
server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
id: desktopID,
name: "Desktop client",
type: "desktop",
commands: [],
version: "48",
protocols: ["1.5"],
}), now - 10));
try {
- let store = engine._store;
-
_("First sync. 1 record downloaded.");
strictEqual(engine.lastRecordUpload, 0);
engine._sync();
_("Send tab to client");
engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"]);
engine._sync();
@@ -1048,17 +1041,16 @@ add_task(async function test_upload_afte
let now = Date.now() / 1000;
let contents = {
meta: {global: {engines: {clients: {version: engine.version,
syncID: engine.syncID}}}},
clients: {},
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
- let user = server.user("foo");
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let deviceBID = Utils.makeGUID();
let deviceCID = Utils.makeGUID();
server.insertWBO("foo", "clients", new ServerWBO(deviceBID, encryptPayload({
id: deviceBID,
@@ -1077,18 +1069,16 @@ add_task(async function test_upload_afte
name: "Device C",
type: "desktop",
commands: [],
version: "48",
protocols: ["1.5"],
}), now - 10));
try {
- let store = engine._store;
-
_("First sync. 2 records downloaded.");
strictEqual(engine.lastRecordUpload, 0);
engine._sync();
_("Send tab to client");
engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"], deviceBID);
const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing;
@@ -1141,17 +1131,16 @@ add_task(async function test_keep_cleare
let now = Date.now() / 1000;
let contents = {
meta: {global: {engines: {clients: {version: engine.version,
syncID: engine.syncID}}}},
clients: {},
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
- let user = server.user("foo");
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let deviceBID = Utils.makeGUID();
let deviceCID = Utils.makeGUID();
server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload({
id: engine.localID,
@@ -1183,18 +1172,16 @@ add_task(async function test_keep_cleare
name: "Device C",
type: "desktop",
commands: [],
version: "48",
protocols: ["1.5"],
}), now - 10));
try {
- let store = engine._store;
-
_("First sync. Download remote and our record.");
strictEqual(engine.lastRecordUpload, 0);
let collection = server.getCollection("foo", "clients");
const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing;
SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten.call(engine, [], [deviceBID]);
let commandsProcessed = 0;
engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length };
@@ -1269,17 +1256,16 @@ add_task(async function test_deleted_com
let now = Date.now() / 1000;
let contents = {
meta: {global: {engines: {clients: {version: engine.version,
syncID: engine.syncID}}}},
clients: {},
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
- let user = server.user("foo");
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let activeID = Utils.makeGUID();
server.insertWBO("foo", "clients", new ServerWBO(activeID, encryptPayload({
id: activeID,
name: "Active client",
@@ -1295,18 +1281,16 @@ add_task(async function test_deleted_com
name: "Client to delete",
type: "desktop",
commands: [],
version: "48",
protocols: ["1.5"],
}), now - 10));
try {
- let store = engine._store;
-
_("First sync. 2 records downloaded.");
engine._sync();
_("Delete a record on the server.");
let collection = server.getCollection("foo", "clients");
collection.remove(deletedID);
_("Broadcast a command to all clients");
@@ -1338,17 +1322,16 @@ add_task(async function test_send_uri_ac
let now = Date.now() / 1000;
let contents = {
meta: {global: {engines: {clients: {version: engine.version,
syncID: engine.syncID}}}},
clients: {},
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
- let user = server.user("foo");
await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
try {
let fakeSenderID = Utils.makeGUID();
_("Initial sync for empty clients collection");
@@ -1406,25 +1389,20 @@ add_task(async function test_command_syn
meta: {global: {engines: {clients: {version: engine.version,
syncID: engine.syncID}}}},
clients: {},
crypto: {}
};
let server = serverForUsers({"foo": "password"}, contents);
await SyncTestingInfrastructure(server);
- let user = server.user("foo");
let collection = server.getCollection("foo", "clients");
let remoteId = Utils.makeGUID();
let remoteId2 = Utils.makeGUID();
- function clientWBO(id) {
- return user.collection("clients").wbo(id);
- }
-
_("Create remote client record 1");
server.insertWBO("foo", "clients", new ServerWBO(remoteId, encryptPayload({
id: remoteId,
name: "Remote client",
type: "desktop",
commands: [],
version: "48",
protocols: ["1.5"]
--- a/services/sync/tests/unit/test_clients_escape.js
+++ b/services/sync/tests/unit/test_clients_escape.js
@@ -6,20 +6,16 @@ Cu.import("resource://services-sync/reco
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/utils.js");
add_task(async function test_clients_escape() {
_("Set up test fixtures.");
await configureIdentity();
- let baseUri = "http://fakebase/1.1/foo/storage/";
- let pubUri = baseUri + "keys/pubkey";
- let privUri = baseUri + "keys/privkey";
-
let keyBundle = Service.identity.syncKeyBundle;
let engine = Service.clientsEngine;
try {
_("Test that serializing client records results in uploadable ascii");
engine.localID = "ascii";
engine.localName = "wéävê";
--- a/services/sync/tests/unit/test_collection_getBatched.js
+++ b/services/sync/tests/unit/test_collection_getBatched.js
@@ -121,23 +121,23 @@ add_test(function test_success() {
});
add_test(function test_total_limit() {
_("getBatched respects the (initial) value of the limit property");
const totalRecords = 100;
const recordLimit = 11;
const batchSize = 2;
const lastModified = "111111";
- let { records, responses, requests, coll } = get_test_collection_info({
+ let { records, requests, coll } = get_test_collection_info({
totalRecords,
batchSize,
lastModified,
});
coll.limit = recordLimit;
- let response = coll.getBatched(batchSize);
+ coll.getBatched(batchSize);
equal(requests.length, Math.ceil(recordLimit / batchSize));
equal(records.length, recordLimit);
for (let i = 0; i < requests.length; ++i) {
let req = requests[i];
if (i !== requests.length - 1) {
equal(req.limit, batchSize);
@@ -175,17 +175,17 @@ add_test(function test_412() {
run_next_test();
});
add_test(function test_get_throws() {
_("We shouldn't record records if get() throws for some reason");
const totalRecords = 11;
const batchSize = 2;
const lastModified = "111111";
- let { records, responses, requests, coll } = get_test_collection_info({
+ let { records, requests, coll } = get_test_collection_info({
totalRecords,
batchSize,
lastModified,
throwAfter: 3
});
throws(() => coll.getBatched(batchSize), "Some Network Error");
--- a/services/sync/tests/unit/test_corrupt_keys.js
+++ b/services/sync/tests/unit/test_corrupt_keys.js
@@ -85,18 +85,16 @@ add_task(async function test_locally_cha
// Sync should upload records.
await sync_and_validate_telem();
// Tabs exist.
_("Tabs modified: " + johndoe.modified("tabs"));
do_check_true(johndoe.modified("tabs") > 0);
- let coll_modified = Service.collectionKeys.lastModified;
-
// Let's create some server side history records.
let liveKeys = Service.collectionKeys.keyForCollection("history");
_("Keys now: " + liveKeys.keyPair);
let visitType = Ci.nsINavHistoryService.TRANSITION_LINK;
let history = johndoe.createCollection("history");
for (let i = 0; i < 5; i++) {
let id = "record-no--" + i;
let modified = Date.now() / 1000 - 60 * (i + 10);
@@ -139,17 +137,16 @@ add_task(async function test_locally_cha
// Now syncing should succeed, after one HMAC error.
let ping = await wait_for_ping(() => Service.sync(), true);
equal(ping.engines.find(e => e.name == "history").incoming.applied, 5);
do_check_eq(hmacErrorCount, 1);
_("Keys now: " + Service.collectionKeys.keyForCollection("history").keyPair);
// And look! We downloaded history!
- let store = Service.engineManager.get("history")._store;
do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--0"));
do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--1"));
do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--2"));
do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--3"));
do_check_true(await promiseIsURIVisited("http://foo/bar?record-no--4"));
do_check_eq(hmacErrorCount, 1);
_("Busting some new server values.");
@@ -197,17 +194,16 @@ add_task(async function test_locally_cha
do_check_false(await promiseIsURIVisited("http://foo/bar?record-no--9"));
} finally {
Svc.Prefs.resetBranch("");
await promiseStopServer(server);
}
});
function run_test() {
- let logger = Log.repository.rootLogger;
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
validate_all_future_pings();
run_next_test();
}
/**
* Asynchronously check a url is visited.
--- a/services/sync/tests/unit/test_errorhandler_2.js
+++ b/services/sync/tests/unit/test_errorhandler_2.js
@@ -895,17 +895,16 @@ add_task(async function test_engine_appl
let engine = engineManager.get("catapult");
engine.enabled = true;
delete engine.exception;
engine.sync = function sync() {
Svc.Obs.notify("weave:engine:sync:applied", {newFailed:1}, "catapult");
};
- let log = Log.repository.getLogger("Sync.ErrorHandler");
Svc.Prefs.set("log.appender.file.logOnError", true);
let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
do_check_eq(Status.engines["catapult"], undefined);
do_check_true(await EHTestsCommon.setUp(server));
Service.sync();
await promiseObserved;
--- a/services/sync/tests/unit/test_errorhandler_filelog.js
+++ b/services/sync/tests/unit/test_errorhandler_filelog.js
@@ -268,18 +268,16 @@ add_test(function test_login_error_logOn
setLastSync(PROLONGED_ERROR_DURATION);
Svc.Obs.notify("weave:service:login:error");
});
add_test(function test_errorLog_dumpAddons() {
Svc.Prefs.set("log.appender.file.logOnError", true);
- let log = Log.repository.getLogger("Sync.Test.FileLog");
-
// We need to wait until the log cleanup started by this test is complete
// or the next test will fail as it is ongoing.
Svc.Obs.add("services-tests:common:log-manager:cleanup-logs", function onCleanupLogs() {
Svc.Obs.remove("services-tests:common:log-manager:cleanup-logs", onCleanupLogs);
run_next_test();
});
Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
--- a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
+++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
@@ -25,17 +25,16 @@ CatapultEngine.prototype = {
_sync: function _sync() {
throw this.exception;
}
};
function sync_httpd_setup() {
let collectionsHelper = track_collections_helper();
let upd = collectionsHelper.with_updated_collection;
- let collections = collectionsHelper.collections;
let catapultEngine = engineManager.get("catapult");
let engines = {catapult: {version: catapultEngine.version,
syncID: catapultEngine.syncID}};
// Track these using the collections helper, which keeps modified times
// up-to-date.
let clientsColl = new ServerCollection({}, true);
--- a/services/sync/tests/unit/test_forms_store.js
+++ b/services/sync/tests/unit/test_forms_store.js
@@ -3,27 +3,26 @@
_("Make sure the form store follows the Store api and correctly accesses the backend form storage");
Cu.import("resource://services-sync/engines/forms.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://gre/modules/Services.jsm");
function run_test() {
- let baseuri = "http://fake/uri/";
let engine = new FormEngine(Service);
let store = engine._store;
function applyEnsureNoFailures(records) {
do_check_eq(store.applyIncomingBatch(records).length, 0);
}
_("Remove any existing entries");
store.wipe();
- for (let id in store.getAllIDs()) {
+ if (store.getAllIDs().length) {
do_throw("Shouldn't get any ids!");
}
_("Add a form entry");
applyEnsureNoFailures([{
id: Utils.makeGUID(),
name: "name!!",
value: "value??"
@@ -50,17 +49,17 @@ function run_test() {
_("Create a non-existent id for delete");
do_check_true(store.createRecord("deleted!!").deleted);
_("Try updating.. doesn't do anything yet");
store.update({});
_("Remove all entries");
store.wipe();
- for (let id in store.getAllIDs()) {
+ if (store.getAllIDs().length) {
do_throw("Shouldn't get any ids!");
}
_("Add another entry");
applyEnsureNoFailures([{
id: Utils.makeGUID(),
name: "another",
value: "entry"
@@ -78,25 +77,25 @@ function run_test() {
_("Make sure it's there");
do_check_true(store.itemExists("newid"));
_("Remove the entry");
store.remove({
id: "newid"
});
- for (let id in store.getAllIDs()) {
+ if (store.getAllIDs().length) {
do_throw("Shouldn't get any ids!");
}
_("Removing the entry again shouldn't matter");
store.remove({
id: "newid"
});
- for (let id in store.getAllIDs()) {
+ if (store.getAllIDs().length) {
do_throw("Shouldn't get any ids!");
}
_("Add another entry to delete using applyIncomingBatch");
let toDelete = {
id: Utils.makeGUID(),
name: "todelete",
value: "entry"
@@ -109,38 +108,38 @@ function run_test() {
else
do_throw("Should have only gotten one!");
}
do_check_true(store.itemExists(id));
// mark entry as deleted
toDelete.id = id;
toDelete.deleted = true;
applyEnsureNoFailures([toDelete]);
- for (let id in store.getAllIDs()) {
+ if (store.getAllIDs().length) {
do_throw("Shouldn't get any ids!");
}
_("Add an entry to wipe");
applyEnsureNoFailures([{
id: Utils.makeGUID(),
name: "towipe",
value: "entry"
}]);
store.wipe();
- for (let id in store.getAllIDs()) {
+ if (store.getAllIDs().length) {
do_throw("Shouldn't get any ids!");
}
_("Ensure we work if formfill is disabled.");
Services.prefs.setBoolPref("browser.formfill.enable", false);
try {
// a search
- for (let id in store.getAllIDs()) {
+ if (store.getAllIDs().length) {
do_throw("Shouldn't get any ids!");
}
// an update.
applyEnsureNoFailures([{
id: Utils.makeGUID(),
name: "some",
value: "entry"
}]);
--- a/services/sync/tests/unit/test_fxa_migration.js
+++ b/services/sync/tests/unit/test_fxa_migration.js
@@ -67,17 +67,17 @@ add_task(async function testMigrationUnl
let oldValue = Services.prefs.getBoolPref("services.sync-testing.startOverKeepIdentity");
Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", false);
do_register_cleanup(() => {
Services.prefs.setBoolPref("services.sync-testing.startOverKeepIdentity", oldValue)
});
// Arrange for a legacy sync user.
- let [engine, server] = configureLegacySync();
+ configureLegacySync();
// Start a sync - this will cause an EOL notification which the migrator's
// observer will notice.
let promiseMigration = promiseOneObserver("fxa-migration:state-changed");
let promiseStartOver = promiseOneObserver("weave:service:start-over:finish");
_("Starting sync");
Service.sync();
_("Finished sync");
--- a/services/sync/tests/unit/test_history_tracker.js
+++ b/services/sync/tests/unit/test_history_tracker.js
@@ -204,18 +204,16 @@ add_task(async function test_track_delet
await verifyTrackedItems([guid]);
do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
await cleanup();
});
add_task(async function test_dont_track_expiration() {
_("Expirations are not tracked.");
- let uriToExpire = await addVisit("to_expire");
- let guidToExpire = engine._store.GUIDForUri(uriToExpire);
let uriToRemove = await addVisit("to_remove");
let guidToRemove = engine._store.GUIDForUri(uriToRemove);
await resetTracker();
await verifyTrackerEmpty();
await startTracking();
let visitRemovedPromise = promiseVisit("removed", uriToRemove);
--- a/services/sync/tests/unit/test_hmac_error.js
+++ b/services/sync/tests/unit/test_hmac_error.js
@@ -60,17 +60,16 @@ add_task(async function hmac_error_durin
key404Counter--;
return;
}
keysHandler(request, response);
};
let collectionsHelper = track_collections_helper();
let upd = collectionsHelper.with_updated_collection;
- let collections = collectionsHelper.collections;
let handlers = {
"/1.1/foo/info/collections": collectionsHelper.handler,
"/1.1/foo/storage/meta/global": upd("meta", global.handler()),
"/1.1/foo/storage/crypto/keys": upd("crypto", keys404Handler),
"/1.1/foo/storage/clients": upd("clients", clientsColl.handler()),
"/1.1/foo/storage/rotary": upd("rotary", rotaryColl.handler())
};
--- a/services/sync/tests/unit/test_httpd_sync_server.js
+++ b/services/sync/tests/unit/test_httpd_sync_server.js
@@ -52,18 +52,18 @@ add_test(function test_url_parsing() {
[all, version, username, first, rest] = parts;
do_check_eq(all, "/1.1/johnsmith/storage");
do_check_eq(version, "1.1");
do_check_eq(username, "johnsmith");
do_check_eq(first, "storage");
do_check_eq(rest, undefined);
parts = server.storageRE.exec("storage");
- let storage, collection, id;
- [all, storage, collection, id] = parts;
+ let collection;
+ [all, , collection, ] = parts;
do_check_eq(all, "storage");
do_check_eq(collection, undefined);
run_next_test();
});
Cu.import("resource://services-common/rest.js");
function localRequest(server, path) {
--- a/services/sync/tests/unit/test_keys.js
+++ b/services/sync/tests/unit/test_keys.js
@@ -17,17 +17,16 @@ function sha256HMAC(message, key) {
function do_check_keypair_eq(a, b) {
do_check_eq(2, a.length);
do_check_eq(2, b.length);
do_check_eq(a[0], b[0]);
do_check_eq(a[1], b[1]);
}
function test_time_keyFromString(iterations) {
- let k;
let o;
let b = new BulkKeyBundle("dummy");
let d = Utils.decodeKeyBase32("ababcdefabcdefabcdefabcdef");
b.generateRandom();
_("Running " + iterations + " iterations of hmacKeyObject + sha256HMAC.");
for (let i = 0; i < iterations; ++i) {
let k = b.hmacKeyObject;
--- a/services/sync/tests/unit/test_password_store.js
+++ b/services/sync/tests/unit/test_password_store.js
@@ -114,30 +114,35 @@ function test_apply_multiple_records_wit
1000, 1000, 1000, false);
}
function test_apply_same_record_with_different_times() {
// The following record is going to be inserted multiple times in the store
// and it needs to be found there. Then its timestamps are going to be
// compared to the expected values.
+
+ /* eslint-disable no-unused-vars */
+ /* The eslint linter thinks that timePasswordChanged is unused, even though
+ it is passed as an argument to changePassword. */
var timePasswordChanged = 100;
timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 100,
100, 100, timePasswordChanged, true);
timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 100,
100, 800, timePasswordChanged, true,
true);
timePasswordChanged = changePassword("A", "http://a.tn", "password", 1, 500,
100, 800, timePasswordChanged, true,
true);
timePasswordChanged = changePassword("A", "http://a.tn", "password2", 1, 500,
100, 1536213005222, timePasswordChanged,
true, true);
timePasswordChanged = changePassword("A", "http://a.tn", "password2", 1, 500,
100, 800, timePasswordChanged, true, true);
+ /* eslint-enable no-unsed-vars */
}
function run_test() {
initTestLogging("Trace");
Log.repository.getLogger("Sync.Engine.Passwords").level = Log.Level.Trace;
Log.repository.getLogger("Sync.Store.Passwords").level = Log.Level.Trace;
--- a/services/sync/tests/unit/test_records_crypto.js
+++ b/services/sync/tests/unit/test_records_crypto.js
@@ -38,17 +38,16 @@ add_task(async function test_records_cry
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
log.info("Setting up server and authenticator");
server = httpd_setup({"/steam/resource": crypted_resource_handler});
log.info("Creating a record");
- let cryptoUri = "http://localhost:8080/crypto/steam";
cryptoWrap = prepareCryptoWrap("steam", "resource");
log.info("cryptoWrap: " + cryptoWrap.toString());
log.info("Encrypting a record");
cryptoWrap.encrypt(keyBundle);
log.info("Ciphertext is " + cryptoWrap.ciphertext);
@@ -102,17 +101,16 @@ add_task(async function test_records_cry
} catch (ex) {
error = ex;
}
do_check_eq(error.substr(0, 42), "Record SHA256 HMAC mismatch: should be foo");
// Checking per-collection keys and default key handling.
generateNewKeys(Service.collectionKeys);
- let bu = "http://localhost:8080/storage/bookmarks/foo";
let bookmarkItem = prepareCryptoWrap("bookmarks", "foo");
bookmarkItem.encrypt(Service.collectionKeys.keyForCollection("bookmarks"));
log.info("Ciphertext is " + bookmarkItem.ciphertext);
do_check_true(bookmarkItem.ciphertext != null);
log.info("Decrypting the record explicitly with the default key.");
do_check_eq(bookmarkItem.decrypt(Service.collectionKeys._default).stuff, "my payload here");
// Per-collection keys.
--- a/services/sync/tests/unit/test_resource.js
+++ b/services/sync/tests/unit/test_resource.js
@@ -424,17 +424,16 @@ function run_test() {
_("Checking handling of errors in onProgress.");
let res18 = new Resource(server.baseURI + "/json");
let onProgress = function(rec) {
// Provoke an XPC exception without a Javascript wrapper.
Services.io.newURI("::::::::");
};
res18._onProgress = onProgress;
- let oldWarn = res18._log.warn;
let warnings = [];
res18._log.warn = function(msg) { warnings.push(msg) };
error = undefined;
try {
content = res18.get();
} catch (ex) {
error = ex;
}
--- a/services/sync/tests/unit/test_resource_async.js
+++ b/services/sync/tests/unit/test_resource_async.js
@@ -696,22 +696,16 @@ add_test(function test_uri_construction(
.QueryInterface(Ci.nsIURL);
uri2.query = query;
do_check_eq(uri1.query, uri2.query);
run_next_test();
});
add_test(function test_not_sending_cookie() {
- function handler(metadata, response) {
- let body = "COOKIE!";
- response.setStatusLine(metadata.httpVersion, 200, "OK");
- response.bodyOutputStream.write(body, body.length);
- do_check_false(metadata.hasHeader("Cookie"));
- }
let cookieSer = Cc["@mozilla.org/cookieService;1"]
.getService(Ci.nsICookieService);
let uri = CommonUtils.makeURI(server.baseURI);
cookieSer.setCookieString(uri, null, "test=test; path=/;", null);
let res = new AsyncResource(server.baseURI + "/test");
res.get(function(error) {
do_check_null(error);
--- a/services/sync/tests/unit/test_service_detect_upgrade.js
+++ b/services/sync/tests/unit/test_service_detect_upgrade.js
@@ -189,17 +189,16 @@ add_test(function v4_upgrade() {
});
add_test(function v5_upgrade() {
let passphrase = "abcdeabcdeabcdeabcdeabcdea";
// Tracking info/collections.
let collectionsHelper = track_collections_helper();
let upd = collectionsHelper.with_updated_collection;
- let collections = collectionsHelper.collections;
let keysWBO = new ServerWBO("keys");
let bulkWBO = new ServerWBO("bulk");
let clients = new ServerCollection();
let meta_global = new ServerWBO("global");
let server = httpd_setup({
// Special.
@@ -283,13 +282,12 @@ add_test(function v5_upgrade() {
} finally {
Svc.Prefs.resetBranch("");
server.stop(run_next_test);
}
});
function run_test() {
- let logger = Log.repository.rootLogger;
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
run_next_test();
}
--- a/services/sync/tests/unit/test_service_getStorageInfo.js
+++ b/services/sync/tests/unit/test_service_getStorageInfo.js
@@ -68,27 +68,27 @@ add_test(function test_network_error() {
});
add_test(function test_http_error() {
let handler = httpd_handler(500, "Oh noez", "Something went wrong!");
let server = httpd_setup({"/1.1/johndoe/info/collections": handler});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
- let request = Service.getStorageInfo(INFO_COLLECTIONS, function(error, info) {
+ Service.getStorageInfo(INFO_COLLECTIONS, function(error, info) {
do_check_eq(error.status, 500);
do_check_eq(info, null);
server.stop(run_next_test);
});
});
add_test(function test_invalid_json() {
let handler = httpd_handler(200, "OK", "Invalid JSON");
let server = httpd_setup({"/1.1/johndoe/info/collections": handler});
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
- let request = Service.getStorageInfo(INFO_COLLECTIONS, function(error, info) {
+ Service.getStorageInfo(INFO_COLLECTIONS, function(error, info) {
do_check_eq(error.name, "SyntaxError");
do_check_eq(info, null);
server.stop(run_next_test);
});
});
--- a/services/sync/tests/unit/test_service_login.js
+++ b/services/sync/tests/unit/test_service_login.js
@@ -18,17 +18,16 @@ function login_handling(handler) {
response.setStatusLine(request.httpVersion, 401, "Unauthorized");
response.setHeader("Content-Type", "text/plain");
response.bodyOutputStream.write(body, body.length);
}
};
}
function run_test() {
- let logger = Log.repository.rootLogger;
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
run_next_test();
}
add_test(function test_offline() {
try {
_("The right bits are set when we're offline.");
@@ -40,20 +39,18 @@ add_test(function test_offline() {
Svc.Prefs.resetBranch("");
run_next_test();
}
});
function setup() {
let janeHelper = track_collections_helper();
let janeU = janeHelper.with_updated_collection;
- let janeColls = janeHelper.collections;
let johnHelper = track_collections_helper();
let johnU = johnHelper.with_updated_collection;
- let johnColls = johnHelper.collections;
let server = httpd_setup({
"/1.1/johndoe/info/collections": login_handling(johnHelper.handler),
"/1.1/janedoe/info/collections": login_handling(janeHelper.handler),
// We need these handlers because we test login, and login
// is where keys are generated or fetched.
// TODO: have Jane fetch her keys, not generate them...
@@ -152,17 +149,16 @@ add_test(function test_login_on_sync() {
};
Service.sync();
do_check_true(loginCalled);
Service.login = oldLogin;
// Stub mpLocked.
- let mpLockedF = Utils.mpLocked;
let mpLocked = true;
Utils.mpLocked = () => mpLocked;
// Stub scheduleNextSync. This gets called within checkSyncStatus if we're
// ready to sync, so use it as an indicator.
let scheduleNextSyncF = Service.scheduler.scheduleNextSync;
let scheduleCalled = false;
Service.scheduler.scheduleNextSync = function(wait) {
@@ -204,19 +200,16 @@ add_test(function test_login_on_sync() {
let oldGetter = Service.identity.__lookupGetter__("syncKey");
let oldSetter = Service.identity.__lookupSetter__("syncKey");
_("Old passphrase function is " + oldGetter);
Service.identity.__defineGetter__("syncKey",
function() {
throw "User canceled Master Password entry";
});
- let oldClearSyncTriggers = Service.scheduler.clearSyncTriggers;
- let oldLockedSync = Service._lockedSync;
-
let cSTCalled = false;
let lockedSyncCalled = false;
Service.scheduler.clearSyncTriggers = function() { cSTCalled = true; };
Service._lockedSync = function() { lockedSyncCalled = true; };
_("If master password is canceled, login fails and we report lockage.");
do_check_false(!!Service.login());
--- a/services/sync/tests/unit/test_service_passwordUTF8.js
+++ b/services/sync/tests/unit/test_service_passwordUTF8.js
@@ -51,17 +51,16 @@ function change_password(request, respon
response.setHeader("WWW-Authenticate", 'Basic realm="secret"', false);
response.bodyOutputStream.write(body, body.length);
}
function run_test() {
initTestLogging("Trace");
let collectionsHelper = track_collections_helper();
let upd = collectionsHelper.with_updated_collection;
- let collections = collectionsHelper.collections;
ensureLegacyIdentityManager();
do_test_pending();
let server = httpd_setup({
"/1.1/johndoe/info/collections": login_handling(collectionsHelper.handler),
"/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler()),
"/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
--- a/services/sync/tests/unit/test_service_sync_401.js
+++ b/services/sync/tests/unit/test_service_sync_401.js
@@ -16,22 +16,20 @@ function login_handling(handler) {
let body = "Unauthorized";
response.setStatusLine(request.httpVersion, 401, "Unauthorized");
response.bodyOutputStream.write(body, body.length);
}
};
}
add_task(async function run_test() {
- let logger = Log.repository.rootLogger;
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
let collectionsHelper = track_collections_helper();
let upd = collectionsHelper.with_updated_collection;
- let collections = collectionsHelper.collections;
let server = httpd_setup({
"/1.1/johndoe/storage/crypto/keys": upd("crypto", new ServerWBO("keys").handler()),
"/1.1/johndoe/storage/meta/global": upd("meta", new ServerWBO("global").handler()),
"/1.1/johndoe/info/collections": login_handling(collectionsHelper.handler)
});
const GLOBAL_SCORE = 42;
--- a/services/sync/tests/unit/test_service_sync_remoteSetup.js
+++ b/services/sync/tests/unit/test_service_sync_remoteSetup.js
@@ -6,20 +6,18 @@ Cu.import("resource://services-sync/cons
Cu.import("resource://services-sync/keys.js");
Cu.import("resource://services-sync/service.js");
Cu.import("resource://services-sync/util.js");
Cu.import("resource://testing-common/services/sync/fakeservices.js");
Cu.import("resource://testing-common/services/sync/utils.js");
function run_test() {
validate_all_future_pings();
- let logger = Log.repository.rootLogger;
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
- let guidSvc = new FakeGUIDService();
let clients = new ServerCollection();
let meta_global = new ServerWBO("global");
let collectionsHelper = track_collections_helper();
let upd = collectionsHelper.with_updated_collection;
let collections = collectionsHelper.collections;
function wasCalledHandler(wbo) {
@@ -50,17 +48,16 @@ function run_test() {
let ts = new_timestamp();
collectionsHelper.update_collection("crypto", ts);
collectionsHelper.update_collection("clients", ts);
collectionsHelper.update_collection("meta", ts);
return_timestamp(request, response, ts);
}
const GLOBAL_PATH = "/1.1/johndoe/storage/meta/global";
- const INFO_PATH = "/1.1/johndoe/info/collections";
let handlers = {
"/1.1/johndoe/storage": storageHandler,
"/1.1/johndoe/storage/crypto/keys": upd("crypto", keysWBO.handler()),
"/1.1/johndoe/storage/crypto": upd("crypto", cryptoColl.handler()),
"/1.1/johndoe/storage/clients": upd("clients", clients.handler()),
"/1.1/johndoe/storage/meta": upd("meta", wasCalledHandler(metaColl)),
"/1.1/johndoe/storage/meta/global": upd("meta", wasCalledHandler(meta_global)),
@@ -155,17 +152,16 @@ function run_test() {
Service.recordManager.set(Service.metaURL, { isNew: false });
do_check_false(Service._remoteSetup(makeOutdatedMeta()));
do_check_eq(Service.status.sync, "");
mock.restore();
metaColl.delete({});
_("Do an initial sync.");
- let beforeSync = Date.now() / 1000;
Service.sync();
_("Checking that remoteSetup returns true.");
do_check_true(Service._remoteSetup());
_("Verify that the meta record was uploaded.");
do_check_eq(meta_global.data.syncID, Service.syncID);
do_check_eq(meta_global.data.storageVersion, STORAGE_VERSION);
--- a/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
+++ b/services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
@@ -194,17 +194,16 @@ add_task(async function test_disabledLoc
Service.syncID = "abcdefghij";
let engine = Service.engineManager.get("steam");
let metaWBO = new ServerWBO("global", {
syncID: Service.syncID,
storageVersion: STORAGE_VERSION,
engines: {steam: {syncID: engine.syncID,
version: engine.version}}
});
- let steamCollection = new ServerWBO("steam", PAYLOAD);
function service_unavailable(request, response) {
let body = "Service Unavailable";
response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
response.setHeader("Retry-After", "23");
response.bodyOutputStream.write(body, body.length);
}
--- a/services/sync/tests/unit/test_service_verifyLogin.js
+++ b/services/sync/tests/unit/test_service_verifyLogin.js
@@ -22,25 +22,23 @@ function login_handling(handler) {
function service_unavailable(request, response) {
let body = "Service Unavailable";
response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
response.setHeader("Retry-After", "42");
response.bodyOutputStream.write(body, body.length);
}
function run_test() {
- let logger = Log.repository.rootLogger;
Log.repository.rootLogger.addAppender(new Log.DumpAppender());
ensureLegacyIdentityManager();
// This test expects a clean slate -- no saved passphrase.
Services.logins.removeAllLogins();
let johnHelper = track_collections_helper();
let johnU = johnHelper.with_updated_collection;
- let johnColls = johnHelper.collections;
do_test_pending();
let server;
function weaveHandler(request, response) {
response.setStatusLine(request.httpVersion, 200, "OK");
let body = server.baseURI + "/api/";
response.bodyOutputStream.write(body, body.length);
--- a/services/sync/tests/unit/test_service_wipeServer.js
+++ b/services/sync/tests/unit/test_service_wipeServer.js
@@ -27,18 +27,16 @@ FakeCollection.prototype = {
response.setHeader("X-Weave-Timestamp", timestamp);
response.setStatusLine(request.httpVersion, 200, "OK");
response.bodyOutputStream.write(body, body.length);
};
}
};
async function setUpTestFixtures(server) {
- let cryptoService = new FakeCryptoService();
-
Service.serverURL = server.baseURI + "/";
Service.clusterURL = server.baseURI + "/";
await configureIdentity(identityConfig);
}
function run_test() {
--- a/services/sync/tests/unit/test_status.js
+++ b/services/sync/tests/unit/test_status.js
@@ -6,17 +6,17 @@ function run_test() {
// Check initial states
do_check_false(Status.enforceBackoff);
do_check_eq(Status.backoffInterval, 0);
do_check_eq(Status.minimumNextSync, 0);
do_check_eq(Status.service, STATUS_OK);
do_check_eq(Status.sync, SYNC_SUCCEEDED);
do_check_eq(Status.login, LOGIN_SUCCEEDED);
- for (let name in Status.engines) {
+ if (Status.engines.length) {
do_throw("Status.engines should be empty.");
}
do_check_eq(Status.partial, false);
// Check login status
for (let code of [LOGIN_FAILED_NO_USERNAME,
LOGIN_FAILED_NO_PASSWORD,
--- a/services/sync/tests/unit/test_syncengine.js
+++ b/services/sync/tests/unit/test_syncengine.js
@@ -10,31 +10,31 @@ function makeSteamEngine() {
return new SyncEngine("Steam", Service);
}
var server = httpd_setup({});
add_task(async function test_url_attributes() {
_("SyncEngine url attributes");
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
Service.clusterURL = "https://cluster/1.1/foo/";
let engine = makeSteamEngine();
try {
do_check_eq(engine.storageURL, "https://cluster/1.1/foo/storage/");
do_check_eq(engine.engineURL, "https://cluster/1.1/foo/storage/steam");
do_check_eq(engine.metaURL, "https://cluster/1.1/foo/storage/meta/global");
} finally {
Svc.Prefs.resetBranch("");
}
});
add_task(async function test_syncID() {
_("SyncEngine.syncID corresponds to preference");
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeSteamEngine();
try {
// Ensure pristine environment
do_check_eq(Svc.Prefs.get("steam.syncID"), undefined);
// Performing the first get on the attribute will generate a new GUID.
do_check_eq(engine.syncID, "fake-guid-00");
do_check_eq(Svc.Prefs.get("steam.syncID"), "fake-guid-00");
@@ -44,17 +44,17 @@ add_task(async function test_syncID() {
do_check_eq(engine.syncID, "fake-guid-01");
} finally {
Svc.Prefs.resetBranch("");
}
})
add_task(async function test_lastSync() {
_("SyncEngine.lastSync and SyncEngine.lastSyncLocal correspond to preferences");
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeSteamEngine();
try {
// Ensure pristine environment
do_check_eq(Svc.Prefs.get("steam.lastSync"), undefined);
do_check_eq(engine.lastSync, 0);
do_check_eq(Svc.Prefs.get("steam.lastSyncLocal"), undefined);
do_check_eq(engine.lastSyncLocal, 0);
@@ -134,17 +134,17 @@ add_task(async function test_previousFai
do_check_eq(engine.previousFailed[1], previousFailed[1]);
} finally {
Svc.Prefs.resetBranch("");
}
});
add_task(async function test_resetClient() {
_("SyncEngine.resetClient resets lastSync and toFetch");
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeSteamEngine();
try {
// Ensure pristine environment
do_check_eq(Svc.Prefs.get("steam.lastSync"), undefined);
do_check_eq(Svc.Prefs.get("steam.lastSyncLocal"), undefined);
do_check_eq(engine.toFetch.length, 0);
engine.lastSync = 123.45;
@@ -166,17 +166,17 @@ add_task(async function test_wipeServer(
_("SyncEngine.wipeServer deletes server data and resets the client.");
let engine = makeSteamEngine();
const PAYLOAD = 42;
let steamCollection = new ServerWBO("steam", PAYLOAD);
let server = httpd_setup({
"/1.1/foo/storage/steam": steamCollection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
do_test_pending();
try {
// Some data to reset.
engine.lastSync = 123.45;
engine.toFetch = [Utils.makeGUID(), Utils.makeGUID(), Utils.makeGUID()];
_("Wipe server data and reset client.");
--- a/services/sync/tests/unit/test_syncengine_sync.js
+++ b/services/sync/tests/unit/test_syncengine_sync.js
@@ -92,17 +92,17 @@ add_task(async function test_syncStartup
collection.insert("scotsman",
encryptPayload({id: "scotsman",
denomination: "Flying Scotsman"}));
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let engine = makeRotaryEngine();
engine._store.items = {rekolok: "Rekonstruktionslokomotive"};
try {
// Confirm initial environment
do_check_eq(engine._tracker.changedIDs["rekolok"], undefined);
@@ -136,17 +136,17 @@ add_task(async function test_syncStartup
add_task(async function test_syncStartup_serverHasNewerVersion() {
_("SyncEngine._syncStartup ");
let global = new ServerWBO("global", {engines: {rotary: {version: 23456}}});
let server = httpd_setup({
"/1.1/foo/storage/meta/global": global.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let engine = makeRotaryEngine();
try {
// The server has a newer version of the data and our engine can
// handle. That should give us an exception.
let error;
@@ -162,17 +162,17 @@ add_task(async function test_syncStartup
}
});
add_task(async function test_syncStartup_syncIDMismatchResetsClient() {
_("SyncEngine._syncStartup resets sync if syncIDs don't match");
let server = sync_httpd_setup({});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
// global record with a different syncID than our engine has
let engine = makeRotaryEngine();
let global = new ServerWBO("global",
{engines: {rotary: {version: engine.version,
syncID: "foobar"}}});
server.registerPathHandler("/1.1/foo/storage/meta/global", global.handler());
@@ -202,17 +202,17 @@ add_task(async function test_syncStartup
add_task(async function test_processIncoming_emptyServer() {
_("SyncEngine._processIncoming working with an empty server backend");
let collection = new ServerCollection();
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let engine = makeRotaryEngine();
try {
// Merely ensure that this code path is run without any errors
engine._processIncoming();
do_check_eq(engine.lastSync, 0);
@@ -241,17 +241,17 @@ add_task(async function test_processInco
collection.insert("../pathological", pathologicalPayload);
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler(),
"/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
"/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
generateNewKeys(Service.collectionKeys);
let engine = makeRotaryEngine();
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
@@ -326,17 +326,17 @@ add_task(async function test_processInco
encryptPayload({id: "nukeme",
denomination: "Nuke me!",
deleted: true}));
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let engine = makeRotaryEngine();
engine._store.items = {newerserver: "New data, but not as new as server!",
olderidentical: "Older but identical",
updateclient: "Got data?",
original: "Original Entry",
long_original: "Long Original Entry",
@@ -628,17 +628,17 @@ add_task(async function test_processInco
wbo.modified = Date.now() / 1000 - 60 * (i + 10);
collection.insertWBO(wbo);
}
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
@@ -703,33 +703,31 @@ add_task(async function test_processInco
let engine = makeRotaryEngine();
engine.enabled = true;
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
// Confirm initial environment
do_check_eq(engine.lastSync, 0);
do_check_empty(engine._store.items);
- let error;
try {
await sync_engine_and_validate_telem(engine, true);
} catch (ex) {
- error = ex;
}
// Only the first two batches have been applied.
do_check_eq(Object.keys(engine._store.items).length,
MOBILE_BATCH_SIZE * 2);
// The third batch is stuck in toFetch. lastSync has been moved forward to
// the last successful item's timestamp.
@@ -776,17 +774,17 @@ add_task(async function test_processInco
engine.lastSync = LASTSYNC;
engine.toFetch = ["flying", "scotsman"];
engine.previousFailed = ["failed0", "failed1", "failed2"];
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
// Confirm initial environment
@@ -834,17 +832,17 @@ add_task(async function test_processInco
let payload = encryptPayload({id, denomination: "Record No. " + id});
collection.insert(id, payload);
}
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
// Confirm initial environment
@@ -890,17 +888,17 @@ add_task(async function test_processInco
let payload = encryptPayload({id, denomination: "Record No. " + id});
collection.insert(id, payload);
}
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
// Confirm initial environment
@@ -942,17 +940,17 @@ add_task(async function test_processInco
let payload = encryptPayload({id, denomination: "Record No. " + id});
collection.insert(id, payload);
}
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
// Confirm initial environment.
do_check_eq(engine.lastSync, 0);
@@ -1032,17 +1030,17 @@ add_task(async function test_processInco
let payload = encryptPayload({id, denomination: "Record No. " + i});
collection.insert(id, payload);
}
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
// Confirm initial environment.
do_check_eq(engine.lastSync, 0);
@@ -1148,17 +1146,17 @@ add_task(async function test_processInco
uris.push(req.path + "?" + req.queryString);
return h(req, res);
};
}
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": recording_handler(collection)
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
@@ -1261,17 +1259,17 @@ add_task(async function test_processInco
engine.enabled = true;
engine._store.items = {nojson: "Valid JSON",
nodecrypt: "Valid ciphertext"};
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
try {
// Confirm initial state
@@ -1318,17 +1316,17 @@ add_task(async function test_uploadOutgo
collection._wbos.scotsman = new ServerWBO("scotsman");
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler(),
"/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
"/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let engine = makeRotaryEngine();
engine.lastSync = 123; // needs to be non-zero so that tracker is queried
engine._store.items = {flying: "LNER Class A3 4472",
scotsman: "Flying Scotsman"};
// Mark one of these records as changed
engine._tracker.addChangedID("scotsman", 0);
@@ -1374,17 +1372,17 @@ add_task(async function test_uploadOutgo
collection._wbos.flying = new ServerWBO("flying");
collection._wbos.scotsman = new ServerWBO("scotsman");
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler(),
"/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(),
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let engine = makeRotaryEngine();
engine.allowSkippedRecord = true;
engine.lastSync = 1;
engine._store.items = { flying: "a".repeat(1024 * 1024) };
engine._tracker.addChangedID("flying", 1000);
@@ -1423,17 +1421,17 @@ add_task(async function test_uploadOutgo
// We only define the "flying" WBO on the server, not the "scotsman"
// and "peppercorn" ones.
collection._wbos.flying = new ServerWBO("flying");
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
engine.lastSync = 123; // needs to be non-zero so that tracker is queried
engine._store.items = {flying: "LNER Class A3 4472",
scotsman: "Flying Scotsman",
peppercorn: "Peppercorn Class"};
// Mark these records as changed
const FLYING_CHANGED = 12345;
@@ -1518,18 +1516,17 @@ add_task(async function test_uploadOutgo
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
-
+ await SyncTestingInfrastructure(server);
try {
// Confirm initial environment.
do_check_eq(noOfUploads, 0);
engine._syncStartup();
engine._uploadOutgoing();
@@ -1563,17 +1560,17 @@ add_task(async function test_uploadOutgo
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
try {
engine._syncStartup();
let error = null;
try {
engine._uploadOutgoing();
} catch (e) {
error = e;
@@ -1585,17 +1582,17 @@ add_task(async function test_uploadOutgo
});
add_task(async function test_syncFinish_noDelete() {
_("SyncEngine._syncFinish resets tracker's score");
let server = httpd_setup({});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
engine._delete = {}; // Nothing to delete
engine._tracker.score = 100;
// _syncFinish() will reset the engine's score.
engine._syncFinish();
do_check_eq(engine.score, 0);
server.stop(run_next_test);
@@ -1615,17 +1612,17 @@ add_task(async function test_syncFinish_
denomination: "Flying Scotsman"}));
collection._wbos.rekolok = new ServerWBO(
"rekolok", encryptPayload({id: "rekolok",
denomination: "Rekonstruktionslokomotive"}));
let server = httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
try {
engine._delete = {ids: ["flying", "rekolok"]};
engine._syncFinish();
// The 'flying' and 'rekolok' records were deleted while the
// 'scotsman' one wasn't.
@@ -1666,17 +1663,17 @@ add_task(async function test_syncFinish_
wbo.modified = now / 1000 - 60 * (i + 110);
collection.insertWBO(wbo);
}
let server = httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
try {
// Confirm initial environment
do_check_eq(noOfUploads, 0);
// Declare what we want to have deleted: all records no. 100 and
@@ -1717,17 +1714,17 @@ add_task(async function test_sync_partia
_("SyncEngine.sync() keeps changedIDs that couldn't be uploaded.");
Service.identity.username = "foo";
let collection = new ServerCollection();
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
generateNewKeys(Service.collectionKeys);
let engine = makeRotaryEngine();
engine.lastSync = 123; // needs to be non-zero so that tracker is queried
engine.lastSyncLocal = 456;
// Let the third upload fail completely
var noOfUploads = 0;
@@ -1799,17 +1796,17 @@ add_task(async function test_canDecrypt_
collection._wbos.flying = new ServerWBO(
"flying", encryptPayload({id: "flying",
denomination: "LNER Class A3 4472"}));
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
try {
do_check_false(engine.canDecrypt());
} finally {
await cleanAndGo(engine, server);
}
@@ -1825,17 +1822,17 @@ add_task(async function test_canDecrypt_
collection._wbos.flying = new ServerWBO(
"flying", encryptPayload({id: "flying",
denomination: "LNER Class A3 4472"}));
let server = sync_httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let engine = makeRotaryEngine();
try {
do_check_true(engine.canDecrypt());
} finally {
await cleanAndGo(engine, server);
}
@@ -1856,17 +1853,17 @@ add_task(async function test_syncapplied
let payload = encryptPayload({id, denomination: "Record No. " + id});
collection.insert(id, payload);
}
let server = httpd_setup({
"/1.1/foo/storage/rotary": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {rotary: {version: engine.version,
syncID: engine.syncID}};
let numApplyCalls = 0;
let engine_name;
--- a/services/sync/tests/unit/test_tab_engine.js
+++ b/services/sync/tests/unit/test_tab_engine.js
@@ -73,17 +73,17 @@ add_task(async function test_tab_engine_
let remoteRecord = encryptPayload({id: remoteID, clientName: "not local"});
collection.insert(remoteID, remoteRecord);
_("Setting up Sync server");
let server = sync_httpd_setup({
"/1.1/foo/storage/tabs": collection.handler()
});
- let syncTesting = await SyncTestingInfrastructure(server);
+ await SyncTestingInfrastructure(server);
Service.identity.username = "foo";
let meta_global = Service.recordManager.set(engine.metaURL,
new WBORecord(engine.metaURL));
meta_global.payload.engines = {tabs: {version: engine.version,
syncID: engine.syncID}};
generateNewKeys(Service.collectionKeys);
@@ -100,17 +100,17 @@ add_task(async function test_tab_engine_
});
_("Start sync");
engine._sync();
await promiseFinished;
});
add_test(function test_reconcile() {
- let [engine, store] = getMocks();
+ let [engine, ] = getMocks();
_("Setup engine for reconciling");
engine._syncStartup();
_("Create an incoming remote record");
let remoteRecord = {id: "remote id",
cleartext: "stuff and things!",
modified: 1000};
--- a/services/sync/tests/unit/test_tab_store.js
+++ b/services/sync/tests/unit/test_tab_store.js
@@ -88,18 +88,16 @@ function test_getAllTabs() {
function test_createRecord() {
let store = getMockStore();
let record;
store.getTabState = mockGetTabState;
store.shouldSkipWindow = mockShouldSkipWindow;
store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1);
- let tabs = store.getAllTabs();
- let tabsize = JSON.stringify(tabs[0]).length;
let numtabs = Math.ceil(20000. / 77.);
store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1);
record = store.createRecord("fake-guid");
ok(record instanceof TabSetRecord);
equal(record.tabs.length, 1);
_("create a big record");
--- a/services/sync/tests/unit/test_telemetry.js
+++ b/services/sync/tests/unit/test_telemetry.js
@@ -155,25 +155,20 @@ add_task(async function test_uploading()
meta: {global: {engines: {bookmarks: {version: engine.version,
syncID: engine.syncID}}}},
bookmarks: {}
});
await SyncTestingInfrastructure(server);
let parent = PlacesUtils.toolbarFolderId;
let uri = Utils.makeURI("http://getfirefox.com/");
- let title = "Get Firefox";
let bmk_id = PlacesUtils.bookmarks.insertBookmark(parent, uri,
PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
- let guid = store.GUIDForId(bmk_id);
- let record = store.createRecord(guid);
-
- let collection = server.user("foo").collection("bookmarks");
try {
let ping = await sync_engine_and_validate_telem(engine, false);
ok(!!ping);
equal(ping.engines.length, 1);
equal(ping.engines[0].name, "bookmarks");
ok(!!ping.engines[0].outgoing);
greater(ping.engines[0].outgoing[0].sent, 0)
ok(!ping.engines[0].incoming);
@@ -333,17 +328,16 @@ add_task(async function test_sync_partia
await cleanAndGo(engine, server);
}
});
add_task(async function test_generic_engine_fail() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
- let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {steam: {version: engine.version,
syncID: engine.syncID}}}},
steam: {}
});
await SyncTestingInfrastructure(server);
let e = new Error("generic failure message")
engine._errToThrow = e;
@@ -362,17 +356,16 @@ add_task(async function test_generic_eng
await cleanAndGo(engine, server);
}
});
add_task(async function test_engine_fail_ioerror() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
- let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {steam: {version: engine.version,
syncID: engine.syncID}}}},
steam: {}
});
await SyncTestingInfrastructure(server);
// create an IOError to re-throw as part of Sync.
try {
@@ -400,17 +393,16 @@ add_task(async function test_engine_fail
await cleanAndGo(engine, server);
}
});
add_task(async function test_initial_sync_engines() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
- let store = engine._store;
let engines = {};
// These are the only ones who actually have things to sync at startup.
let engineNames = ["clients", "bookmarks", "prefs", "tabs"];
let conf = { meta: { global: { engines } } };
for (let e of engineNames) {
engines[e] = { version: engine.version, syncID: engine.syncID };
conf[e] = {};
}
@@ -439,17 +431,16 @@ add_task(async function test_initial_syn
await cleanAndGo(engine, server);
}
});
add_task(async function test_nserror() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
- let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {steam: {version: engine.version,
syncID: engine.syncID}}}},
steam: {}
});
await SyncTestingInfrastructure(server);
engine._errToThrow = Components.Exception("NS_ERROR_UNKNOWN_HOST", Cr.NS_ERROR_UNKNOWN_HOST);
try {
@@ -515,17 +506,16 @@ add_identity_test(this, async function t
}
}
})
add_task(async function test_no_foreign_engines_in_error_ping() {
Service.engineManager.register(BogusEngine);
let engine = Service.engineManager.get("bogus");
engine.enabled = true;
- let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bogus: {version: engine.version, syncID: engine.syncID}}}},
steam: {}
});
engine._errToThrow = new Error("Oh no!");
await SyncTestingInfrastructure(server);
try {
let ping = await sync_and_validate_telem(true);
@@ -536,17 +526,16 @@ add_task(async function test_no_foreign_
await cleanAndGo(engine, server);
}
});
add_task(async function test_sql_error() {
Service.engineManager.register(SteamEngine);
let engine = Service.engineManager.get("steam");
engine.enabled = true;
- let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {steam: {version: engine.version,
syncID: engine.syncID}}}},
steam: {}
});
await SyncTestingInfrastructure(server);
engine._sync = function() {
// Just grab a DB connection and issue a bogus SQL statement synchronously.
@@ -564,17 +553,16 @@ add_task(async function test_sql_error()
await cleanAndGo(engine, server);
}
});
add_task(async function test_no_foreign_engines_in_success_ping() {
Service.engineManager.register(BogusEngine);
let engine = Service.engineManager.get("bogus");
engine.enabled = true;
- let store = engine._store;
let server = serverForUsers({"foo": "password"}, {
meta: {global: {engines: {bogus: {version: engine.version, syncID: engine.syncID}}}},
steam: {}
});
await SyncTestingInfrastructure(server);
try {
let ping = await sync_and_validate_telem();
--- a/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm
+++ b/services/sync/tps/extensions/tps/resource/modules/bookmarks.jsm
@@ -17,17 +17,17 @@ Cu.import("resource://gre/modules/Places
Cu.import("resource://gre/modules/PlacesUtils.jsm");
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://services-common/async.js");
Cu.import("resource://tps/logger.jsm");
var DumpBookmarks = function TPS_Bookmarks__DumpBookmarks() {
let cb = Async.makeSpinningCallback();
PlacesBackups.getBookmarksTree().then(result => {
- let [bookmarks, count] = result;
+ let [bookmarks, ] = result;
Logger.logInfo("Dumping Bookmarks...\n" + JSON.stringify(bookmarks) + "\n\n");
cb(null);
}).then(null, error => {
cb(error);
});
cb.wait();
};
--- a/services/sync/tps/extensions/tps/resource/modules/history.jsm
+++ b/services/sync/tps/extensions/tps/resource/modules/history.jsm
@@ -12,23 +12,16 @@ var EXPORTED_SYMBOLS = ["HistoryEntry",
const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/PlacesUtils.jsm");
Cu.import("resource://tps/logger.jsm");
Cu.import("resource://services-common/async.js");
var DumpHistory = function TPS_History__DumpHistory() {
- let writer = {
- value: "",
- write: function PlacesItem__dump__write(aStr, aLen) {
- this.value += aStr;
- }
- };
-
let query = PlacesUtils.history.getNewQuery();
let options = PlacesUtils.history.getNewQueryOptions();
let root = PlacesUtils.history.executeQuery(query, options).root;
root.containerOpen = true;
Logger.logInfo("\n\ndumping history\n", true);
for (var i = 0; i < root.childCount; i++) {
let node = root.getChild(i);
let uri = node.uri;
--- a/services/sync/tps/extensions/tps/resource/modules/tabs.jsm
+++ b/services/sync/tps/extensions/tps/resource/modules/tabs.jsm
@@ -44,17 +44,17 @@ var BrowserTabs = {
* @param uri The uri of the tab to find
* @param title The page title of the tab to find
* @param profile The profile to search for tabs
* @return true if the specified tab could be found, otherwise false
*/
Find(uri, title, profile) {
// Find the uri in Weave's list of tabs for the given profile.
let engine = Weave.Service.engineManager.get("tabs");
- for (let [guid, client] of Object.entries(engine.getAllClients())) {
+ for (let [, client] of Object.entries(engine.getAllClients())) {
if (!client.tabs) {
continue;
}
for (let key in client.tabs) {
let tab = client.tabs[key];
let weaveTabUrl = tab.urlHistory[0];
if (uri == weaveTabUrl && profile == client.clientName)
if (title == undefined || title == tab.title)
--- a/services/sync/tps/extensions/tps/resource/tps.jsm
+++ b/services/sync/tps/extensions/tps/resource/tps.jsm
@@ -427,17 +427,16 @@ var TPS = {
throw (e);
}
},
HandlePasswords(passwords, action) {
this.shouldValidatePasswords = true;
try {
for (let password of passwords) {
- let password_id = -1;
Logger.logInfo("executing action " + action.toUpperCase() +
" on password " + JSON.stringify(password));
let passwordOb = new Password(password);
switch (action) {
case ACTION_ADD:
Logger.AssertTrue(passwordOb.Create() > -1, "error adding password");
break;
case ACTION_VERIFY: