--- a/services/common/blocklist-clients.js
+++ b/services/common/blocklist-clients.js
@@ -33,17 +33,17 @@ const PREF_BLOCKLIST_GFX_COLLECTION
const PREF_BLOCKLIST_GFX_CHECKED_SECONDS = "services.blocklist.gfx.checked";
const PREF_BLOCKLIST_GFX_SIGNER = "services.blocklist.gfx.signer";
/**
* Revoke the appropriate certificates based on the records from the blocklist.
*
* @param {Object} data Current records in the local db.
*/
-async function updateCertBlocklist({data: records}) {
+async function updateCertBlocklist({ data: { current: records } }) {
const certList = Cc["@mozilla.org/security/certblocklist;1"]
.getService(Ci.nsICertBlocklist);
for (let item of records) {
try {
if (item.issuerName && item.serialNumber) {
certList.revokeCertByIssuerAndSerial(item.issuerName,
item.serialNumber);
} else if (item.subject && item.pubKeyHash) {
@@ -61,17 +61,17 @@ async function updateCertBlocklist({data
}
/**
* Modify the appropriate security pins based on records from the remote
* collection.
*
* @param {Object} data Current records in the local db.
*/
-async function updatePinningList({data: records}) {
+async function updatePinningList({ data: { current: records } }) {
if (!Services.prefs.getBoolPref(PREF_BLOCKLIST_PINNING_ENABLED)) {
return;
}
const siteSecurityService = Cc["@mozilla.org/ssservice;1"]
.getService(Ci.nsISiteSecurityService);
// clear the current preload list
@@ -104,17 +104,17 @@ async function updatePinningList({data:
}
/**
* Write list of records into JSON file, and notify nsBlocklistService.
*
* @param {Object} client RemoteSettingsClient instance
* @param {Object} data Current records in the local db.
*/
-async function updateJSONBlocklist(client, { data: records }) {
+async function updateJSONBlocklist(client, { data: { current: records } }) {
// Write JSON dump for synchronous load at startup.
const path = OS.Path.join(OS.Constants.Path.profileDir, client.filename);
const blocklistFolder = OS.Path.dirname(path);
await OS.File.makeDir(blocklistFolder, {from: OS.Constants.Path.profileDir});
const serialized = JSON.stringify({data: records}, null, 2);
try {
@@ -134,38 +134,38 @@ var PinningBlocklistClient;
var PluginBlocklistClient;
function initialize() {
OneCRLBlocklistClient = RemoteSettings(Services.prefs.getCharPref(PREF_BLOCKLIST_ONECRL_COLLECTION), {
bucketName: Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET),
lastCheckTimePref: PREF_BLOCKLIST_ONECRL_CHECKED_SECONDS,
signerName: Services.prefs.getCharPref(PREF_BLOCKLIST_ONECRL_SIGNER),
});
- OneCRLBlocklistClient.on("change", updateCertBlocklist);
+ OneCRLBlocklistClient.on("sync", updateCertBlocklist);
AddonBlocklistClient = RemoteSettings(Services.prefs.getCharPref(PREF_BLOCKLIST_ADDONS_COLLECTION), {
bucketName: Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET),
lastCheckTimePref: PREF_BLOCKLIST_ADDONS_CHECKED_SECONDS,
signerName: Services.prefs.getCharPref(PREF_BLOCKLIST_ADDONS_SIGNER),
});
- AddonBlocklistClient.on("change", updateJSONBlocklist.bind(null, AddonBlocklistClient));
+ AddonBlocklistClient.on("sync", updateJSONBlocklist.bind(null, AddonBlocklistClient));
PluginBlocklistClient = RemoteSettings(Services.prefs.getCharPref(PREF_BLOCKLIST_PLUGINS_COLLECTION), {
bucketName: Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET),
lastCheckTimePref: PREF_BLOCKLIST_PLUGINS_CHECKED_SECONDS,
signerName: Services.prefs.getCharPref(PREF_BLOCKLIST_PLUGINS_SIGNER),
});
- PluginBlocklistClient.on("change", updateJSONBlocklist.bind(null, PluginBlocklistClient));
+ PluginBlocklistClient.on("sync", updateJSONBlocklist.bind(null, PluginBlocklistClient));
GfxBlocklistClient = RemoteSettings(Services.prefs.getCharPref(PREF_BLOCKLIST_GFX_COLLECTION), {
bucketName: Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET),
lastCheckTimePref: PREF_BLOCKLIST_GFX_CHECKED_SECONDS,
signerName: Services.prefs.getCharPref(PREF_BLOCKLIST_GFX_SIGNER),
});
- GfxBlocklistClient.on("change", updateJSONBlocklist.bind(null, GfxBlocklistClient));
+ GfxBlocklistClient.on("sync", updateJSONBlocklist.bind(null, GfxBlocklistClient));
PinningBlocklistClient = RemoteSettings(Services.prefs.getCharPref(PREF_BLOCKLIST_PINNING_COLLECTION), {
bucketName: Services.prefs.getCharPref(PREF_BLOCKLIST_PINNING_BUCKET),
lastCheckTimePref: PREF_BLOCKLIST_PINNING_CHECKED_SECONDS,
signerName: Services.prefs.getCharPref(PREF_BLOCKLIST_PINNING_SIGNER),
});
- PinningBlocklistClient.on("change", updatePinningList);
+ PinningBlocklistClient.on("sync", updatePinningList);
}
--- a/services/common/docs/RemoteSettings.rst
+++ b/services/common/docs/RemoteSettings.rst
@@ -46,23 +46,28 @@ The list can optionally be filtered or o
"enabled": true,
},
order: "-weight"
});
Events
------
-The ``change`` event allows to be notified when the remote settings are changed. The event ``data`` attribute contains the whole new list of settings.
+The ``on()`` function registers handlers to be triggered when events occur.
+
+The ``sync`` event allows to be notified when the remote settings are changed on the server side. Your handler is given an ``event`` object that contains a ``data`` attribute that has information about the changes:
+
+- ``current``: current list of entries (after changes were applied);
+- ``created``, ``updated``, ``deleted``: list of entries that were created/updated/deleted respectively.
.. code-block:: js
- RemoteSettings("a-key").on("change", event => {
- const { data } = event;
- for(const entry of data) {
+ RemoteSettings("a-key").on("sync", event => {
+ const { data: { current } } = event;
+ for(const entry of current) {
// Do something with entry...
// await InternalAPI.reload(entry.id, entry.label, entry.weight);
}
});
.. note::
Currently, the update of remote settings is triggered by the `nsBlocklistService <https://dxr.mozilla.org/mozilla-central/source/toolkit/mozapps/extensions/nsBlocklistService.js>`_ (~ every 24H).
--- a/services/common/remote-settings.js
+++ b/services/common/remote-settings.js
@@ -126,17 +126,17 @@ class RemoteSettingsClient {
constructor(collectionName, { lastCheckTimePref, bucketName, signerName }) {
this.collectionName = collectionName;
this.lastCheckTimePref = lastCheckTimePref;
this.bucketName = bucketName;
this.signerName = signerName;
this._callbacks = new Map();
- this._callbacks.set("change", []);
+ this._callbacks.set("sync", []);
this._kinto = null;
}
get identifier() {
return `${this.bucketName}/${this.collectionName}`;
}
@@ -237,20 +237,22 @@ class RemoteSettingsClient {
// to record the fact that a check happened.
if (lastModified <= collectionLastModified) {
this._updateLastCheck(serverTime);
reportStatus = UptakeTelemetry.STATUS.UP_TO_DATE;
return;
}
// Fetch changes from server.
+ let syncResult;
try {
// Server changes have priority during synchronization.
const strategy = Kinto.syncStrategy.SERVER_WINS;
- const { ok } = await collection.sync({remote, strategy});
+ syncResult = await collection.sync({remote, strategy});
+ const { ok } = syncResult;
if (!ok) {
// Some synchronization conflicts occured.
reportStatus = UptakeTelemetry.STATUS.CONFLICT_ERROR;
throw new Error("Sync failed");
}
} catch (e) {
if (e.message == INVALID_SIGNATURE) {
// Signature verification failed during synchronzation.
@@ -261,46 +263,73 @@ class RemoteSettingsClient {
// remote collection.
const payload = await fetchRemoteCollection(remote, collection);
try {
await this._validateCollectionSignature(remote, payload, collection, {ignoreLocal: true});
} catch (e) {
reportStatus = UptakeTelemetry.STATUS.SIGNATURE_RETRY_ERROR;
throw e;
}
- // if the signature is good (we haven't thrown), and the remote
- // last_modified is newer than the local last_modified, replace the
- // local data
+
+ // The signature is good (we haven't thrown).
+ // Now we will Inspect what we had locally.
+ const { data: oldData } = await collection.list();
+
+ // We build a sync result as if a diff-based sync was performed.
+ syncResult = { created: [], updated: [], deleted: [] };
+
+ // If the remote last_modified is newer than the local last_modified,
+ // replace the local data
const localLastModified = await collection.db.getLastModified();
if (payload.last_modified >= localLastModified) {
+ const { data: newData } = payload;
await collection.clear();
- await collection.loadDump(payload.data);
+ await collection.loadDump(newData);
+
+ // Compare local and remote to populate the sync result
+ const oldById = new Map(oldData.map(e => [e.id, e]));
+ for (const r of newData) {
+ const old = oldById.get(r.id);
+ if (old) {
+ if (old.last_modified != r.last_modified) {
+ syncResult.updated.push({ old, new: r });
+ }
+ oldById.delete(r.id);
+ } else {
+ syncResult.created.push(r);
+ }
+ }
+ // Records that remain in our map now are those missing from remote
+ syncResult.deleted = Array.from(oldById.values());
}
+
} else {
// The sync has thrown, it can be a network or a general error.
if (/NetworkError/.test(e.message)) {
reportStatus = UptakeTelemetry.STATUS.NETWORK_ERROR;
} else if (/Backoff/.test(e.message)) {
reportStatus = UptakeTelemetry.STATUS.BACKOFF;
} else {
reportStatus = UptakeTelemetry.STATUS.SYNC_ERROR;
}
throw e;
}
}
// Read local collection of records.
- const { data } = await collection.list();
+ const { data: current } = await collection.list();
// Handle the obtained records (ie. apply locally).
try {
// Execute callbacks in order and sequentially.
// If one fails everything fails.
- const callbacks = this._callbacks.get("change");
+ const { created, updated, deleted } = syncResult;
+ const event = { data: { current, created, updated, deleted } };
+ const callbacks = this._callbacks.get("sync");
for (const cb of callbacks) {
- await cb({ data });
+ await cb(event);
}
} catch (e) {
reportStatus = UptakeTelemetry.STATUS.APPLY_ERROR;
throw e;
}
// Track last update.
this._updateLastCheck(serverTime);
--- a/services/common/tests/unit/test_blocklist_clients.js
+++ b/services/common/tests/unit/test_blocklist_clients.js
@@ -1,18 +1,16 @@
const { Constructor: CC } = Components;
ChromeUtils.import("resource://gre/modules/Services.jsm");
ChromeUtils.import("resource://testing-common/httpd.js");
-ChromeUtils.import("resource://gre/modules/Timer.jsm");
const { FileUtils } = ChromeUtils.import("resource://gre/modules/FileUtils.jsm", {});
const { OS } = ChromeUtils.import("resource://gre/modules/osfile.jsm", {});
const BlocklistClients = ChromeUtils.import("resource://services-common/blocklist-clients.js", {});
-const { UptakeTelemetry } = ChromeUtils.import("resource://services-common/uptake-telemetry.js", {});
const BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
"nsIBinaryInputStream", "setInputStream");
let gBlocklistClients;
let server;
@@ -62,27 +60,27 @@ function run_test() {
// Setup server fake responses.
function handleResponse(request, response) {
try {
const sample = getSampleResponse(request, server.identity.primaryPort);
if (!sample) {
do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
}
+ const { status: { status, statusText }, sampleHeaders, responseBody } = sample;
- response.setStatusLine(null, sample.status.status,
- sample.status.statusText);
+ response.setStatusLine(null, status, statusText);
// send the headers
- for (let headerLine of sample.sampleHeaders) {
- let headerElements = headerLine.split(":");
+ for (const headerLine of sampleHeaders) {
+ const headerElements = headerLine.split(":");
response.setHeader(headerElements[0], headerElements[1].trimLeft());
}
response.setHeader("Date", (new Date()).toUTCString());
- response.write(sample.responseBody);
+ response.write(responseBody);
response.finish();
} catch (e) {
info(e);
}
}
const configPath = "/v1/";
const addonsRecordsPath = "/v1/buckets/blocklists/collections/addons/records";
const gfxRecordsPath = "/v1/buckets/blocklists/collections/gfx/records";
@@ -107,42 +105,16 @@ add_task(async function test_initial_dum
// Verify the loaded data has status to synced:
const list = await client.get();
equal(list[0]._status, "synced");
}
});
add_task(clear_state);
-add_task(async function test_records_obtained_from_server_are_stored_in_db() {
- for (let {client} of gBlocklistClients) {
- // Test an empty db populates
- await client.maybeSync(2000, Date.now(), { loadDump: false });
-
- // Open the collection, verify it's been populated:
- // Our test data has a single record; it should be in the local collection
- const list = await client.get();
- equal(list.length, 1);
- }
-});
-add_task(clear_state);
-
-add_task(async function test_records_changes_are_overwritten_by_server_changes() {
- const {client} = gBlocklistClients[0];
-
- // Create some local conflicting data, and make sure it syncs without error.
- const collection = await client.openCollection();
- await collection.create({
- "versionRange": [],
- "id": "9d500963-d80e-3a91-6e74-66f3811b99cc"
- }, { useRecordId: true });
- await client.maybeSync(2000, Date.now(), {loadDump: false});
-});
-add_task(clear_state);
-
add_task(async function test_list_is_written_to_file_in_profile() {
for (let {client, testData} of gBlocklistClients) {
const filePath = OS.Path.join(OS.Constants.Path.profileDir, client.filename);
const profFile = new FileUtils.File(filePath);
strictEqual(profFile.exists(), false);
await client.maybeSync(2000, Date.now(), {loadDump: false});
@@ -194,123 +166,31 @@ add_task(async function test_sends_reloa
client.maybeSync(2000, Date.now() - 1000, {loadDump: false});
});
equal(received.data.filename, client.filename);
}
});
add_task(clear_state);
-add_task(async function test_telemetry_reports_up_to_date() {
- for (let {client} of gBlocklistClients) {
- await client.maybeSync(2000, Date.now() - 1000, {loadDump: false});
- const filePath = OS.Path.join(OS.Constants.Path.profileDir, client.filename);
- const profFile = new FileUtils.File(filePath);
- const fileLastModified = profFile.lastModifiedTime = profFile.lastModifiedTime - 1000;
- const serverTime = Date.now();
- const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
-
- await client.maybeSync(3000, serverTime);
-
- // File was not updated.
- equal(fileLastModified, profFile.lastModifiedTime);
- // Server time was updated.
- const after = Services.prefs.getIntPref(client.lastCheckTimePref);
- equal(after, Math.round(serverTime / 1000));
- // No Telemetry was sent.
- const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
- const expectedIncrements = {[UptakeTelemetry.STATUS.UP_TO_DATE]: 1};
- checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
- }
-});
-add_task(clear_state);
-
-add_task(async function test_telemetry_if_sync_succeeds() {
- // We test each client because Telemetry requires preleminary declarations.
- for (let {client} of gBlocklistClients) {
- const serverTime = Date.now();
- const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
-
- await client.maybeSync(2000, serverTime, {loadDump: false});
-
- const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
- const expectedIncrements = {[UptakeTelemetry.STATUS.SUCCESS]: 1};
- checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
- }
-});
-add_task(clear_state);
-
-add_task(async function test_telemetry_reports_if_application_fails() {
- const {client} = gBlocklistClients[0];
- const serverTime = Date.now();
- const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
- client.on("change", () => { throw new Error("boom"); });
-
- try {
- await client.maybeSync(2000, serverTime, {loadDump: false});
- } catch (e) {}
-
- const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
- const expectedIncrements = {[UptakeTelemetry.STATUS.APPLY_ERROR]: 1};
- checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
-});
-add_task(clear_state);
-
-add_task(async function test_telemetry_reports_if_sync_fails() {
- const {client} = gBlocklistClients[0];
- const serverTime = Date.now();
-
- const collection = await client.openCollection();
- await collection.db.saveLastModified(9999);
-
- const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
-
- try {
- await client.maybeSync(10000, serverTime);
- } catch (e) {}
-
- const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
- const expectedIncrements = {[UptakeTelemetry.STATUS.SYNC_ERROR]: 1};
- checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
-});
-add_task(clear_state);
-
-add_task(async function test_telemetry_reports_unknown_errors() {
- const {client} = gBlocklistClients[0];
- const serverTime = Date.now();
- const backup = client.openCollection;
- client.openCollection = () => { throw new Error("Internal"); };
- const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
-
- try {
- await client.maybeSync(2000, serverTime);
- } catch (e) {}
-
- client.openCollection = backup;
- const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
- const expectedIncrements = {[UptakeTelemetry.STATUS.UNKNOWN_ERROR]: 1};
- checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
-});
-add_task(clear_state);
-
// get a response for a given request from sample data
function getSampleResponse(req, port) {
const responses = {
"OPTIONS": {
"sampleHeaders": [
"Access-Control-Allow-Headers: Content-Length,Expires,Backoff,Retry-After,Last-Modified,Total-Records,ETag,Pragma,Cache-Control,authorization,content-type,if-none-match,Alert,Next-Page",
"Access-Control-Allow-Methods: GET,HEAD,OPTIONS,POST,DELETE,OPTIONS",
"Access-Control-Allow-Origin: *",
"Content-Type: application/json; charset=UTF-8",
"Server: waitress"
],
"status": {status: 200, statusText: "OK"},
"responseBody": "null"
},
- "GET:/v1/?": {
+ "GET:/v1/": {
"sampleHeaders": [
"Access-Control-Allow-Origin: *",
"Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
"Content-Type: application/json; charset=UTF-8",
"Server: waitress"
],
"status": {status: 200, statusText: "OK"},
"responseBody": JSON.stringify({
@@ -481,28 +361,15 @@ function getSampleResponse(req, port) {
"blockID": "g200",
"feature": "WEBGL_MSAA",
"devices": [],
"id": "c3a15ba9-e0e2-421f-e399-c995e5b8d14e",
"last_modified": 3500,
"os": "Darwin 11",
"featureStatus": "BLOCKED_DEVICE"
}]})
- },
- "GET:/v1/buckets/blocklists/collections/addons/records?_sort=-last_modified&_since=9999": {
- "sampleHeaders": [
- "Access-Control-Allow-Origin: *",
- "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
- "Content-Type: application/json; charset=UTF-8",
- "Server: waitress",
- ],
- "status": {status: 503, statusText: "Service Unavailable"},
- "responseBody": JSON.stringify({
- code: 503,
- errno: 999,
- error: "Service Unavailable",
- })
}
};
return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+ responses[`${req.method}:${req.path}`] ||
responses[req.method];
}
--- a/services/common/tests/unit/test_blocklist_signatures.js
+++ b/services/common/tests/unit/test_blocklist_signatures.js
@@ -289,16 +289,17 @@ add_task(async function test_check_signa
await OneCRLBlocklistClient.maybeSync(1000, startTime, {loadDump: false});
let endHistogram = getUptakeTelemetrySnapshot(TELEMETRY_HISTOGRAM_KEY);
// ensure that a success histogram is tracked when a succesful sync occurs.
let expectedIncrements = {[UptakeTelemetry.STATUS.SUCCESS]: 1};
checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
+
// Check that some additions (2 records) to the collection have a valid
// signature.
// This response adds two entries (RECORD1 and RECORD2) to the collection
const RESPONSE_TWO_ADDED = {
comment: "RESPONSE_TWO_ADDED",
sampleHeaders: [
"Content-Type: application/json; charset=UTF-8",
@@ -320,16 +321,17 @@ add_task(async function test_check_signa
"GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=1000":
[RESPONSE_TWO_ADDED],
"GET:/v1/buckets/blocklists/collections/certificates?":
[RESPONSE_META_TWO_ITEMS_SIG]
};
registerHandlers(twoItemsResponses);
await OneCRLBlocklistClient.maybeSync(3000, startTime);
+
// Check the collection with one addition and one removal has a valid
// signature
// Remove RECORD1, add RECORD3
const RESPONSE_ONE_ADDED_ONE_REMOVED = {
comment: "RESPONSE_ONE_ADDED_ONE_REMOVED ",
sampleHeaders: [
"Content-Type: application/json; charset=UTF-8",
@@ -373,16 +375,17 @@ add_task(async function test_check_signa
"GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=4000":
[RESPONSE_EMPTY_NO_UPDATE],
"GET:/v1/buckets/blocklists/collections/certificates?":
[RESPONSE_META_THREE_ITEMS_SIG]
};
registerHandlers(noOpResponses);
await OneCRLBlocklistClient.maybeSync(4100, startTime);
+
// Check the collection is reset when the signature is invalid
// Prepare a (deliberately) bad signature to check the collection state is
// reset if something is inconsistent
const RESPONSE_COMPLETE_INITIAL = {
comment: "RESPONSE_COMPLETE_INITIAL ",
sampleHeaders: [
"Content-Type: application/json; charset=UTF-8",
@@ -428,26 +431,37 @@ add_task(async function test_check_signa
"GET:/v1/buckets/blocklists/collections/certificates/records?_sort=id":
[RESPONSE_COMPLETE_INITIAL_SORTED_BY_ID]
};
registerHandlers(badSigGoodSigResponses);
startHistogram = getUptakeTelemetrySnapshot(TELEMETRY_HISTOGRAM_KEY);
+ let retrySyncData;
+ OneCRLBlocklistClient.on("sync", ({ data }) => { retrySyncData = data; });
+
await OneCRLBlocklistClient.maybeSync(5000, startTime);
endHistogram = getUptakeTelemetrySnapshot(TELEMETRY_HISTOGRAM_KEY);
+ // since we only fixed the signature, and no data was changed, the sync result
+ // will be called with empty lists of created/updated/deleted.
+ equal(retrySyncData.current.length, 2);
+ equal(retrySyncData.created.length, 0);
+ equal(retrySyncData.updated.length, 0);
+ equal(retrySyncData.deleted.length, 0);
+
// ensure that the failure count is incremented for a succesful sync with an
// (initial) bad signature - only SERVICES_SETTINGS_SYNC_SIG_FAIL should
// increment.
expectedIncrements = {[UptakeTelemetry.STATUS.SIGNATURE_ERROR]: 1};
checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
+
const badSigGoodOldResponses = {
// In this test, we deliberately serve a bad signature initially. The
// subsequent sitnature returned is a valid one for the three item
// collection.
"GET:/v1/buckets/blocklists/collections/certificates?":
[RESPONSE_META_BAD_SIG, RESPONSE_META_EMPTY_SIG],
// The first collection state is the current state (since there's no update
// - but, since the signature is wrong, another request will be made)
@@ -460,18 +474,72 @@ add_task(async function test_check_signa
"GET:/v1/buckets/blocklists/collections/certificates/records?_sort=id":
[RESPONSE_EMPTY_INITIAL],
};
// ensure our collection hasn't been replaced with an older, empty one
await checkRecordCount(OneCRLBlocklistClient, 2);
registerHandlers(badSigGoodOldResponses);
+
+ let oldChangesData;
+ OneCRLBlocklistClient.on("sync", ({ data }) => { oldChangesData = data; });
+
await OneCRLBlocklistClient.maybeSync(5000, startTime);
+ // Local data was unchanged, since it was never than the one returned by the server.
+ equal(oldChangesData.current.length, 2);
+ equal(oldChangesData.created.length, 0);
+ equal(oldChangesData.updated.length, 0);
+ equal(oldChangesData.deleted.length, 0);
+
+
+ const badLocalContentGoodSigResponses = {
+ // In this test, we deliberately serve a bad signature initially. The
+ // subsequent signature returned is a valid one for the three item
+ // collection.
+ "GET:/v1/buckets/blocklists/collections/certificates?":
+ [RESPONSE_META_BAD_SIG, RESPONSE_META_THREE_ITEMS_SIG],
+ // The next request is for the full collection. This will be checked
+ // against the valid signature - so the sync should succeed.
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified":
+ [RESPONSE_COMPLETE_INITIAL],
+ // The next request is for the full collection sorted by id. This will be
+ // checked against the valid signature - so the sync should succeed.
+ "GET:/v1/buckets/blocklists/collections/certificates/records?_sort=id":
+ [RESPONSE_COMPLETE_INITIAL_SORTED_BY_ID]
+ };
+
+ registerHandlers(badLocalContentGoodSigResponses);
+
+ // we create a local state manually here, in order to test that the sync event data
+ // properly contains created, updated, and deleted records.
+ // the final server collection contains RECORD2 and RECORD3
+ const kintoCol = await OneCRLBlocklistClient.openCollection();
+ await kintoCol.clear();
+ await kintoCol.create({ ...RECORD2, last_modified: 1234567890, serialNumber: "abc" }, { synced: true, useRecordId: true });
+ const localId = "0602b1b2-12ab-4d3a-b6fb-593244e7b035";
+ await kintoCol.create({ id: localId }, { synced: true, useRecordId: true });
+
+ let syncData;
+ OneCRLBlocklistClient.on("sync", ({ data }) => { syncData = data; });
+
+ await OneCRLBlocklistClient.maybeSync(5000, startTime, { loadDump: false });
+
+ // Local data was unchanged, since it was never than the one returned by the server.
+ equal(syncData.current.length, 2);
+ equal(syncData.created.length, 1);
+ equal(syncData.created[0].id, RECORD3.id);
+ equal(syncData.updated.length, 1);
+ equal(syncData.updated[0].old.serialNumber, "abc");
+ equal(syncData.updated[0].new.serialNumber, RECORD2.serialNumber);
+ equal(syncData.deleted.length, 1);
+ equal(syncData.deleted[0].id, localId);
+
+
const allBadSigResponses = {
// In this test, we deliberately serve only a bad signature.
"GET:/v1/buckets/blocklists/collections/certificates?":
[RESPONSE_META_BAD_SIG],
// The first collection state is the three item collection (since
// there's a sync with no updates) - but, since the signature is wrong,
// another request will be made...
"GET:/v1/buckets/blocklists/collections/certificates/records?_sort=-last_modified&_since=4000":
new file mode 100644
--- /dev/null
+++ b/services/common/tests/unit/test_remote_settings.js
@@ -0,0 +1,311 @@
+const { Constructor: CC } = Components;
+
+ChromeUtils.import("resource://gre/modules/Services.jsm");
+ChromeUtils.import("resource://testing-common/httpd.js");
+
+const { RemoteSettings } = ChromeUtils.import("resource://services-common/remote-settings.js", {});
+const { UptakeTelemetry } = ChromeUtils.import("resource://services-common/uptake-telemetry.js", {});
+
+const BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream", "setInputStream");
+
+let server;
+let client;
+
+async function clear_state() {
+ // Clear local DB.
+ const collection = await client.openCollection();
+ await collection.clear();
+}
+
+
+function run_test() {
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ // Point the blocklist clients to use this local HTTP server.
+ Services.prefs.setCharPref("services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v1`);
+ // Ensure that signature verification is disabled to prevent interference
+ // with basic certificate sync tests
+ Services.prefs.setBoolPref("services.settings.verify_signature", false);
+
+ client = RemoteSettings("password-fields");
+
+ // Setup server fake responses.
+ function handleResponse(request, response) {
+ try {
+ const sample = getSampleResponse(request, server.identity.primaryPort);
+ if (!sample) {
+ do_throw(`unexpected ${request.method} request for ${request.path}?${request.queryString}`);
+ }
+
+ response.setStatusLine(null, sample.status.status,
+ sample.status.statusText);
+ // send the headers
+ for (let headerLine of sample.sampleHeaders) {
+ let headerElements = headerLine.split(":");
+ response.setHeader(headerElements[0], headerElements[1].trimLeft());
+ }
+ response.setHeader("Date", (new Date()).toUTCString());
+
+ response.write(JSON.stringify(sample.responseBody));
+ response.finish();
+ } catch (e) {
+ info(e);
+ }
+ }
+ const configPath = "/v1/";
+ const recordsPath = "/v1/buckets/main/collections/password-fields/records";
+ server.registerPathHandler(configPath, handleResponse);
+ server.registerPathHandler(recordsPath, handleResponse);
+
+ run_next_test();
+
+ registerCleanupFunction(function() {
+ server.stop(() => { });
+ });
+}
+
+add_task(async function test_records_obtained_from_server_are_stored_in_db() {
+ // Test an empty db populates
+ await client.maybeSync(2000, Date.now());
+
+ // Open the collection, verify it's been populated:
+ // Our test data has a single record; it should be in the local collection
+ const list = await client.get();
+ equal(list.length, 1);
+});
+add_task(clear_state);
+
+add_task(async function test_records_changes_are_overwritten_by_server_changes() {
+ // Create some local conflicting data, and make sure it syncs without error.
+ const collection = await client.openCollection();
+ await collection.create({
+ "website": "",
+ "id": "9d500963-d80e-3a91-6e74-66f3811b99cc"
+ }, { useRecordId: true });
+
+ await client.maybeSync(2000, Date.now());
+
+ const data = await client.get();
+ equal(data[0].website, "https://some-website.com");
+});
+add_task(clear_state);
+
+add_task(async function test_sync_event_provides_information_about_records() {
+ const serverTime = Date.now();
+
+ let eventData;
+ client.on("sync", ({ data }) => eventData = data);
+
+ await client.maybeSync(2000, serverTime - 1000);
+ equal(eventData.current.length, 1);
+
+ await client.maybeSync(3001, serverTime);
+ equal(eventData.current.length, 2);
+ equal(eventData.created.length, 1);
+ equal(eventData.created[0].website, "https://www.other.org/signin");
+ equal(eventData.updated.length, 1);
+ equal(eventData.updated[0].old.website, "https://some-website.com");
+ equal(eventData.updated[0].new.website, "https://some-website.com/login");
+ equal(eventData.deleted.length, 0);
+
+ await client.maybeSync(4001, serverTime);
+ equal(eventData.current.length, 1);
+ equal(eventData.created.length, 0);
+ equal(eventData.updated.length, 0);
+ equal(eventData.deleted.length, 1);
+ equal(eventData.deleted[0].website, "https://www.other.org/signin");
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_up_to_date() {
+ await client.maybeSync(2000, Date.now() - 1000);
+ const serverTime = Date.now();
+ const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
+
+ await client.maybeSync(3000, serverTime);
+
+ // No Telemetry was sent.
+ const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
+ const expectedIncrements = {[UptakeTelemetry.STATUS.UP_TO_DATE]: 1};
+ checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_if_sync_succeeds() {
+ // We test each client because Telemetry requires preleminary declarations.
+ const serverTime = Date.now();
+ const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
+
+ await client.maybeSync(2000, serverTime);
+
+ const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
+ const expectedIncrements = {[UptakeTelemetry.STATUS.SUCCESS]: 1};
+ checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_if_application_fails() {
+ const serverTime = Date.now();
+ const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
+ client.on("sync", () => { throw new Error("boom"); });
+
+ try {
+ await client.maybeSync(2000, serverTime);
+ } catch (e) {}
+
+ const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
+ const expectedIncrements = {[UptakeTelemetry.STATUS.APPLY_ERROR]: 1};
+ checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_if_sync_fails() {
+ const serverTime = Date.now();
+
+ const collection = await client.openCollection();
+ await collection.db.saveLastModified(9999);
+
+ const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
+
+ try {
+ await client.maybeSync(10000, serverTime);
+ } catch (e) {}
+
+ const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
+ const expectedIncrements = {[UptakeTelemetry.STATUS.SYNC_ERROR]: 1};
+ checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_unknown_errors() {
+ const serverTime = Date.now();
+ const backup = client.openCollection;
+ client.openCollection = () => { throw new Error("Internal"); };
+ const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
+
+ try {
+ await client.maybeSync(2000, serverTime);
+ } catch (e) {}
+
+ client.openCollection = backup;
+ const endHistogram = getUptakeTelemetrySnapshot(client.identifier);
+ const expectedIncrements = {[UptakeTelemetry.STATUS.UNKNOWN_ERROR]: 1};
+ checkUptakeTelemetry(startHistogram, endHistogram, expectedIncrements);
+});
+add_task(clear_state);
+
+// get a response for a given request from sample data
+function getSampleResponse(req, port) {
+ const responses = {
+ "OPTIONS": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Headers: Content-Length,Expires,Backoff,Retry-After,Last-Modified,Total-Records,ETag,Pragma,Cache-Control,authorization,content-type,if-none-match,Alert,Next-Page",
+ "Access-Control-Allow-Methods: GET,HEAD,OPTIONS,POST,DELETE,OPTIONS",
+ "Access-Control-Allow-Origin: *",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": null
+ },
+ "GET:/v1/": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress"
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": {
+ "settings": {
+ "batch_max_requests": 25
+ },
+ "url": `http://localhost:${port}/v1/`,
+ "documentation": "https://kinto.readthedocs.org/",
+ "version": "1.5.1",
+ "commit": "cbc6f58",
+ "hello": "kinto"
+ }
+ },
+ "GET:/v1/buckets/main/collections/password-fields/records?_sort=-last_modified": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"3000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": {
+ "data": [{
+ "id": "9d500963-d80e-3a91-6e74-66f3811b99cc",
+ "last_modified": 3000,
+ "website": "https://some-website.com",
+ "selector": "#user[password]"
+ }]
+ }
+ },
+ "GET:/v1/buckets/main/collections/password-fields/records?_sort=-last_modified&_since=3000": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"4000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": {
+ "data": [{
+ "id": "aabad965-e556-ffe7-4191-074f5dee3df3",
+ "last_modified": 4000,
+ "website": "https://www.other.org/signin",
+ "selector": "#signinpassword"
+ }, {
+ "id": "9d500963-d80e-3a91-6e74-66f3811b99cc",
+ "last_modified": 3500,
+ "website": "https://some-website.com/login",
+ "selector": "input#user[password]"
+ }]
+ }
+ },
+ "GET:/v1/buckets/main/collections/password-fields/records?_sort=-last_modified&_since=4000": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ "Etag: \"5000\""
+ ],
+ "status": {status: 200, statusText: "OK"},
+ "responseBody": {
+ "data": [{
+ "id": "aabad965-e556-ffe7-4191-074f5dee3df3",
+ "deleted": true
+ }]
+ }
+ },
+ "GET:/v1/buckets/main/collections/password-fields/records?_sort=-last_modified&_since=9999": {
+ "sampleHeaders": [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ ],
+ "status": {status: 503, statusText: "Service Unavailable"},
+ "responseBody": {
+ code: 503,
+ errno: 999,
+ error: "Service Unavailable",
+ }
+ }
+ };
+ dump(`${req.method}:${req.path}?${req.queryString}`);
+ return responses[`${req.method}:${req.path}?${req.queryString}`] ||
+ responses[`${req.method}:${req.path}`] ||
+ responses[req.method];
+
+}
--- a/services/common/tests/unit/xpcshell.ini
+++ b/services/common/tests/unit/xpcshell.ini
@@ -11,16 +11,18 @@ support-files =
[test_blocklist_certificates.js]
# Initial JSON data for blocklists are not shipped on Android.
skip-if = (os == "android" || appname == "thunderbird")
tags = blocklist
[test_blocklist_clients.js]
tags = blocklist
[test_blocklist_pinning.js]
tags = blocklist
+[test_remote_settings.js]
+tags = remote-settings blocklist
[test_remote_settings_poll.js]
tags = remote-settings blocklist
[test_kinto.js]
tags = blocklist
[test_blocklist_signatures.js]
tags = remote-settings blocklist
[test_storage_adapter.js]