/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This file is generated from kinto.js - do not modify directly.
*/
/*
* Version 13.0.0 - 7fbf95d
*/
/**
* Base db adapter.
*
* @abstract
*/
class BaseAdapter {
/**
* Deletes every records present in the database.
*
* @abstract
* @return {Promise}
*/
clear() {
throw new Error("Not Implemented.");
}
/**
* Executes a batch of operations within a single transaction.
*
* @abstract
* @param {Function} callback The operation callback.
* @param {Object} options The options object.
* @return {Promise}
*/
execute(callback, options = { preload: [] }) {
throw new Error("Not Implemented.");
}
/**
* Retrieve a record by its primary key from the database.
*
* @abstract
* @param {String} id The record id.
* @return {Promise}
*/
get(id) {
throw new Error("Not Implemented.");
}
/**
* Lists all records from the database.
*
* @abstract
* @param {Object} params The filters and order to apply to the results.
* @return {Promise}
*/
list(params = { filters: {}, order: "" }) {
throw new Error("Not Implemented.");
}
/**
* Store the lastModified value.
*
* @abstract
* @param {Number} lastModified
* @return {Promise}
*/
saveLastModified(lastModified) {
throw new Error("Not Implemented.");
}
/**
* Retrieve saved lastModified value.
*
* @abstract
* @return {Promise}
*/
getLastModified() {
throw new Error("Not Implemented.");
}
/**
* Load records in bulk that were exported from a server.
*
* @abstract
* @param {Array} records The records to load.
* @return {Promise}
*/
importBulk(records) {
throw new Error("Not Implemented.");
}
/**
* Load a dump of records exported from a server.
*
* @deprecated Use {@link importBulk} instead.
* @abstract
* @param {Array} records The records to load.
* @return {Promise}
*/
loadDump(records) {
throw new Error("Not Implemented.");
}
saveMetadata(metadata) {
throw new Error("Not Implemented.");
}
getMetadata() {
throw new Error("Not Implemented.");
}
}
const RE_RECORD_ID = /^[a-zA-Z0-9][a-zA-Z0-9_-]*$/;
/**
* Checks if a value is undefined.
* @param {Any} value
* @return {Boolean}
*/
function _isUndefined(value) {
return typeof value === "undefined";
}
/**
* Sorts records in a list according to a given ordering.
*
* @param {String} order The ordering, eg. `-last_modified`.
* @param {Array} list The collection to order.
* @return {Array}
*/
function sortObjects(order, list) {
const hasDash = order[0] === "-";
const field = hasDash ? order.slice(1) : order;
const direction = hasDash ? -1 : 1;
return list.slice().sort((a, b) => {
if (a[field] && _isUndefined(b[field])) {
return direction;
}
if (b[field] && _isUndefined(a[field])) {
return -direction;
}
if (_isUndefined(a[field]) && _isUndefined(b[field])) {
return 0;
}
return a[field] > b[field] ? direction : -direction;
});
}
/**
* Test if a single object matches all given filters.
*
* @param {Object} filters The filters object.
* @param {Object} entry The object to filter.
* @return {Boolean}
*/
function filterObject(filters, entry) {
return Object.keys(filters).every(filter => {
const value = filters[filter];
if (Array.isArray(value)) {
return value.some(candidate => candidate === entry[filter]);
}
else if (typeof value === "object") {
return filterObject(value, entry[filter]);
}
else if (!Object.prototype.hasOwnProperty.call(entry, filter)) {
console.error(`The property ${filter} does not exist`);
return false;
}
return entry[filter] === value;
});
}
/**
* Resolves a list of functions sequentially, which can be sync or async; in
* case of async, functions must return a promise.
*
* @param {Array} fns The list of functions.
* @param {Any} init The initial value.
* @return {Promise}
*/
function waterfall(fns, init) {
if (!fns.length) {
return Promise.resolve(init);
}
return fns.reduce((promise, nextFn) => {
return promise.then(nextFn);
}, Promise.resolve(init));
}
/**
* Simple deep object comparison function. This only supports comparison of
* serializable JavaScript objects.
*
* @param {Object} a The source object.
* @param {Object} b The compared object.
* @return {Boolean}
*/
function deepEqual(a, b) {
if (a === b) {
return true;
}
if (typeof a !== typeof b) {
return false;
}
if (!(a && typeof a == "object") || !(b && typeof b == "object")) {
return false;
}
if (Object.keys(a).length !== Object.keys(b).length) {
return false;
}
for (const k in a) {
if (!deepEqual(a[k], b[k])) {
return false;
}
}
return true;
}
/**
* Return an object without the specified keys.
*
* @param {Object} obj The original object.
* @param {Array} keys The list of keys to exclude.
* @return {Object} A copy without the specified keys.
*/
function omitKeys(obj, keys = []) {
const result = Object.assign({}, obj);
for (const key of keys) {
delete result[key];
}
return result;
}
function arrayEqual(a, b) {
if (a.length !== b.length) {
return false;
}
for (let i = a.length; i--;) {
if (a[i] !== b[i]) {
return false;
}
}
return true;
}
function makeNestedObjectFromArr(arr, val, nestedFiltersObj) {
const last = arr.length - 1;
return arr.reduce((acc, cv, i) => {
if (i === last) {
return (acc[cv] = val);
}
else if (Object.prototype.hasOwnProperty.call(acc, cv)) {
return acc[cv];
}
else {
return (acc[cv] = {});
}
}, nestedFiltersObj);
}
function transformSubObjectFilters(filtersObj) {
const transformedFilters = {};
for (const key in filtersObj) {
const keysArr = key.split(".");
const val = filtersObj[key];
makeNestedObjectFromArr(keysArr, val, transformedFilters);
}
return transformedFilters;
}
const INDEXED_FIELDS = ["id", "_status", "last_modified"];
/**
* Small helper that wraps the opening of an IndexedDB into a Promise.
*
* @param dbname {String} The database name.
* @param version {Integer} Schema version
* @param onupgradeneeded {Function} The callback to execute if schema is
* missing or different.
* @return {Promise<IDBDatabase>}
*/
async function open(dbname, { version, onupgradeneeded }) {
return new Promise((resolve, reject) => {
const request = indexedDB.open(dbname, version);
request.onupgradeneeded = event => {
const db = event.target.result;
db.onerror = event => reject(event.target.error);
// When an upgrade is needed, a transaction is started.
const transaction = event.target.transaction;
transaction.onabort = event => {
const error = event.target.error ||
transaction.error ||
new DOMException("The operation has been aborted", "AbortError");
reject(error);
};
// Callback for store creation etc.
return onupgradeneeded(event);
};
request.onerror = event => {
reject(event.target.error);
};
request.onsuccess = event => {
const db = event.target.result;
resolve(db);
};
});
}
/**
* Helper to run the specified callback in a single transaction on the
* specified store.
* The helper focuses on transaction wrapping into a promise.
*
* @param db {IDBDatabase} The database instance.
* @param name {String} The store name.
* @param callback {Function} The piece of code to execute in the transaction.
* @param options {Object} Options.
* @param options.mode {String} Transaction mode (default: read).
* @return {Promise} any value returned by the callback.
*/
async function execute(db, name, callback, options = {}) {
const { mode } = options;
return new Promise((resolve, reject) => {
// On Safari, calling IDBDatabase.transaction with mode == undefined raises
// a TypeError.
const transaction = mode
? db.transaction([name], mode)
: db.transaction([name]);
const store = transaction.objectStore(name);
// Let the callback abort this transaction.
const abort = e => {
transaction.abort();
reject(e);
};
// Execute the specified callback **synchronously**.
let result;
try {
result = callback(store, abort);
}
catch (e) {
abort(e);
}
transaction.onerror = event => reject(event.target.error);
transaction.oncomplete = event => resolve(result);
transaction.onabort = event => {
const error = event.target.error ||
transaction.error ||
new DOMException("The operation has been aborted", "AbortError");
reject(error);
};
});
}
/**
* Helper to wrap the deletion of an IndexedDB database into a promise.
*
* @param dbName {String} the database to delete
* @return {Promise}
*/
async function deleteDatabase(dbName) {
return new Promise((resolve, reject) => {
const request = indexedDB.deleteDatabase(dbName);
request.onsuccess = event => resolve(event.target);
request.onerror = event => reject(event.target.error);
});
}
/**
* IDB cursor handlers.
* @type {Object}
*/
const cursorHandlers = {
all(filters, done) {
const results = [];
return event => {
const cursor = event.target.result;
if (cursor) {
const { value } = cursor;
if (filterObject(filters, value)) {
results.push(value);
}
cursor.continue();
}
else {
done(results);
}
};
},
in(values, filters, done) {
const results = [];
let i = 0;
return function (event) {
const cursor = event.target.result;
if (!cursor) {
done(results);
return;
}
const { key, value } = cursor;
// `key` can be an array of two values (see `keyPath` in indices definitions).
// `values` can be an array of arrays if we filter using an index whose key path
// is an array (eg. `cursorHandlers.in([["bid/cid", 42], ["bid/cid", 43]], ...)`)
while (key > values[i]) {
// The cursor has passed beyond this key. Check next.
++i;
if (i === values.length) {
done(results); // There is no next. Stop searching.
return;
}
}
const isEqual = Array.isArray(key)
? arrayEqual(key, values[i])
: key === values[i];
if (isEqual) {
if (filterObject(filters, value)) {
results.push(value);
}
cursor.continue();
}
else {
cursor.continue(values[i]);
}
};
},
};
/**
* Creates an IDB request and attach it the appropriate cursor event handler to
* perform a list query.
*
* Multiple matching values are handled by passing an array.
*
* @param {String} cid The collection id (ie. `{bid}/{cid}`)
* @param {IDBStore} store The IDB store.
* @param {Object} filters Filter the records by field.
* @param {Function} done The operation completion handler.
* @return {IDBRequest}
*/
function createListRequest(cid, store, filters, done) {
const filterFields = Object.keys(filters);
// If no filters, get all results in one bulk.
if (filterFields.length == 0) {
const request = store.index("cid").getAll(IDBKeyRange.only(cid));
request.onsuccess = event => done(event.target.result);
return request;
}
// Introspect filters and check if they leverage an indexed field.
const indexField = filterFields.find(field => {
return INDEXED_FIELDS.includes(field);
});
if (!indexField) {
// Iterate on all records for this collection (ie. cid)
const isSubQuery = Object.keys(filters).some(key => key.includes(".")); // (ie. filters: {
"article.title": "hello"})
if (isSubQuery) {
const newFilter = transformSubObjectFilters(filters);
const request = store.index("cid").openCursor(IDBKeyRange.only(cid));
request.onsuccess = cursorHandlers.all(newFilter, done);
return request;
}
const request = store.index("cid").openCursor(IDBKeyRange.only(cid));
request.onsuccess = cursorHandlers.all(filters, done);
return request;
}
// If `indexField` was used already, don't filter again.
const remainingFilters = omitKeys(filters, [indexField]);
// value specified in the filter (eg. `filters: { _status: ["created", "updated"] }`)
const value = filters[indexField];
// For the "id" field, use the primary key.
const indexStore = indexField == "id" ? store : store.index(indexField);
// WHERE IN equivalent clause
if (Array.isArray(value)) {
if (value.length === 0) {
return done([]);
}
const values = value.map(i => [cid, i]).sort();
const range = IDBKeyRange.bound(values[0], values[values.length - 1]);
const request = indexStore.openCursor(range);
request.onsuccess = cursorHandlers.in(values, remainingFilters, done);
return request;
}
// If no filters on custom attribute, get all results in one bulk.
if (remainingFilters.length == 0) {
const request = indexStore.getAll(IDBKeyRange.only([cid, value]));
request.onsuccess = event => done(event.target.result);
return request;
}
// WHERE field = value clause
const request = indexStore.openCursor(IDBKeyRange.only([cid, value]));
request.onsuccess = cursorHandlers.all(remainingFilters, done);
return request;
}
class IDBError extends Error {
constructor(method, err) {
super(`IndexedDB ${method}() ${err.message}`);
this.name = err.name;
this.stack = err.stack;
}
}
/**
* IndexedDB adapter.
*
* This adapter doesn't support any options.
*/
class IDB extends BaseAdapter {
/* Expose the IDBError class publicly */
static get IDBError() {
return IDBError;
}
/**
* Constructor.
*
* @param {String} cid The key base for this collection (eg. `bid/cid`)
* @param {Object} options
* @param {String} options.dbName The IndexedDB name (default: `"KintoDB"`)
* @param {String} options.migrateOldData Whether old database data should be migrated (default: `false`)
*/
constructor(cid, options = {}) {
super();
this.cid = cid;
this.dbName = options.dbName || "KintoDB";
this._options = options;
this._db = null;
}
_handleError(method, err) {
throw new IDBError(method, err);
}
/**
* Ensures a connection to the IndexedDB database has been opened.
*
* @override
* @return {Promise}
*/
async open() {
if (this._db) {
return this;
}
// In previous versions, we used to have a database with name `${bid}/${cid}`.
// Check if it exists, and migrate data once new schema is in place.
// Note: the built-in migrations from IndexedDB can only be used if the
// database name does not change.
const dataToMigrate = this._options.migrateOldData
? await migrationRequired(this.cid)
: null;
this._db = await open(this.dbName, {
version: 2,
onupgradeneeded: event => {
const db = event.target.result;
if (event.oldVersion < 1) {
// Records store
const recordsStore = db.createObjectStore("records", {
keyPath: ["_cid", "id"],
});
// An index to obtain all the records in a collection.
recordsStore.createIndex("cid", "_cid");
// Here we create indices for every known field in records by collection.
// Local record status ("synced", "created", "updated", "deleted")
recordsStore.createIndex("_status", ["_cid", "_status"]);
// Last modified field
recordsStore.createIndex("last_modified", ["_cid", "last_modified"]);
// Timestamps store
db.createObjectStore("timestamps", {
keyPath: "cid",
});
}
if (event.oldVersion < 2) {
// Collections store
db.createObjectStore("collections", {
keyPath: "cid",
});
}
},
});
if (dataToMigrate) {
const { records, timestamp } = dataToMigrate;
await this.importBulk(records);
await this.saveLastModified(timestamp);
console.log(`${this.cid}: data was migrated successfully.`);
// Delete the old database.
await deleteDatabase(this.cid);
console.warn(`${this.cid}: old database was deleted.`);
}
return this;
}
/**
* Closes current connection to the database.
*
* @override
* @return {Promise}
*/
close() {
if (this._db) {
this._db.close(); // indexedDB.close is synchronous
this._db = null;
}
return Promise.resolve();
}
/**
* Returns a transaction and an object store for a store name.
*
* To determine if a transaction has completed successfully, we should rather
* listen to the transaction’s complete event rather than the IDBObjectStore
* request’s success event, because the transaction may still fail after the
* success event fires.
*
* @param {String} name Store name
* @param {Function} callback to execute
* @param {Object} options Options
* @param {String} options.mode Transaction mode ("readwrite" or undefined)
* @return {Object}
*/
async prepare(name, callback, options) {
await this.open();
await execute(this._db, name, callback, options);
}
/**
* Deletes every records in the current collection.
*
* @override
* @return {Promise}
*/
async clear() {
try {
await this.prepare("records", store => {
const range = IDBKeyRange.only(this.cid);
const request = store.index("cid").openKeyCursor(range);
request.onsuccess = event => {
const cursor = event.target.result;
if (cursor) {
store.delete(cursor.primaryKey);
cursor.continue();
}
};
return request;
}, { mode: "readwrite" });
}
catch (e) {
this._handleError("clear", e);
}
}
/**
* Executes the set of synchronous CRUD operations described in the provided
* callback within an IndexedDB transaction, for current db store.
*
* The callback will be provided an object exposing the following synchronous
* CRUD operation methods: get, create, update, delete.
*
* Important note: because limitations in IndexedDB implementations, no
* asynchronous code should be performed within the provided callback; the
* promise will therefore be rejected if the callback returns a Promise.
*
* Options:
* - {Array} preload: The list of record IDs to fetch and make available to
* the transaction object get() method (default: [])
*
* @example
* const db = new IDB("example");
* const result = await db.execute(transaction => {
* transaction.create({id: 1, title: "foo"});
* transaction.update({id: 2, title: "bar"});
* transaction.delete(3);
* return "foo";
* });
*
* @override
* @param {Function} callback The operation description callback.
* @param {Object} options The options object.
* @return {Promise}
*/
async execute(callback, options = { preload: [] }) {
// Transactions in IndexedDB are autocommited when a callback does not
// perform any additional operation.
// The way Promises are implemented in Firefox (see https://bugzilla.mozilla.org/show_bug.cgi?id=1193394)
// prevents using within an opened transaction.
// To avoid managing asynchronocity in the specified `callback`, we preload
// a list of record in order to execute the `callback` synchronously.
// See also:
// - http://stackoverflow.com/a/28388805/330911
// - http://stackoverflow.com/a/10405196
// - https://jakearchibald.com/2015/tasks-microtasks-queues-and-schedules/
let result;
await this.prepare("records", (store, abort) => {
const runCallback = (preloaded = []) => {
// Expose a consistent API for every adapter instead of raw store methods.
const proxy = transactionProxy(this, store, preloaded);
// The callback is executed synchronously within the same transaction.
try {
const returned = callback(proxy);
if (returned instanceof Promise) {
// XXX: investigate how to provide documentation details in error.
throw new Error("execute() callback should not return a Promise.");
}
// Bring to scope that will be returned (once promise awaited).
result = returned;
}
catch (e) {
// The callback has thrown an error explicitly. Abort transaction cleanly.
abort(e);
}
};
// No option to preload records, go straight to `callback`.
if (!options.preload.length) {
return runCallback();
}
// Preload specified records using a list request.
const filters = { id: options.preload };
createListRequest(this.cid, store, filters, records => {
// Store obtained records by id.
const preloaded = {};
for (const record of records) {
delete record["_cid"];
preloaded[record.id] = record;
}
runCallback(preloaded);
});
}, { mode: "readwrite" });
return result;
}
/**
* Retrieve a record by its primary key from the IndexedDB database.
*
* @override
* @param {String} id The record id.
* @return {Promise}
*/
async get(id) {
try {
let record;
await this.prepare("records", store => {
store.get([this.cid, id]).onsuccess = e => (record = e.target.result);
});
return record;
}
catch (e) {
this._handleError("get", e);
}
}
/**
* Lists all records from the IndexedDB database.
*
* @override
* @param {Object} params The filters and order to apply to the results.
* @return {Promise}
*/
async list(params = { filters: {} }) {
const { filters } = params;
try {
let results = [];
await this.prepare("records", store => {
createListRequest(this.cid, store, filters, _results => {
// we have received all requested records that match the filters,
// we now park them within current scope and hide the `_cid` attribute.
for (const result of _results) {
delete result["_cid"];
}
results = _results;
});
});
// The resulting list of records is sorted.
// XXX: with some efforts, this could be fully implemented using IDB API.
return params.order ? sortObjects(params.order, results) : results;
}
catch (e) {
this._handleError("list", e);
}
}
/**
* Store the lastModified value into metadata store.
*
* @override
* @param {Number} lastModified
* @return {Promise}
*/
async saveLastModified(lastModified) {
const value = parseInt(lastModified, 10) || null;
try {
await this.prepare("timestamps", store => {
if (value === null) {
store.delete(this.cid);
}
else {
store.put({ cid: this.cid, value });
}
}, { mode: "readwrite" });
return value;
}
catch (e) {
this._handleError("saveLastModified", e);
}
}
/**
* Retrieve saved lastModified value.
*
* @override
* @return {Promise}
*/
async getLastModified() {
try {
let entry = null;
await this.prepare("timestamps", store => {
store.get(this.cid).onsuccess = e => (entry = e.target.result);
});
return entry ? entry.value : null;
}
catch (e) {
this._handleError("getLastModified", e);
}
}
/**
* Load a dump of records exported from a server.
*
* @deprecated Use {@link importBulk} instead.
* @abstract
* @param {Array} records The records to load.
* @return {Promise}
*/
async loadDump(records) {
return this.importBulk(records);
}
/**
* Load records in bulk that were exported from a server.
*
* @abstract
* @param {Array} records The records to load.
* @return {Promise}
*/
async importBulk(records) {
try {
await this.execute(transaction => {
// Since the put operations are asynchronous, we chain
// them together. The last one will be waited for the
// `transaction.oncomplete` callback. (see #execute())
let i = 0;
putNext();
function putNext() {
if (i == records.length) {
return;
}
// On error, `transaction.onerror` is called.
transaction.update(records[i]).onsuccess = putNext;
++i;
}
});
const previousLastModified = await this.getLastModified();
const lastModified = Math.max(...records.map(record => record.last_modified));
if (lastModified > previousLastModified) {
await this.saveLastModified(lastModified);
}
return records;
}
catch (e) {
this._handleError("importBulk", e);
}
}
async saveMetadata(metadata) {
try {
await this.prepare("collections", store => store.put({ cid: this.cid, metadata }), { mode: "readwrite" });
return metadata;
}
catch (e) {
this._handleError("saveMetadata", e);
}
}
async getMetadata() {
try {
let entry = null;
await this.prepare("collections", store => {
store.get(this.cid).onsuccess = e => (entry = e.target.result);
});
return entry ? entry.metadata : null;
}
catch (e) {
this._handleError("getMetadata", e);
}
}
}
/**
* IDB transaction proxy.
*
* @param {IDB} adapter The call IDB adapter
* @param {IDBStore} store The IndexedDB database store.
* @param {Array} preloaded The list of records to make available to
* get() (default: []).
* @return {Object}
*/
function transactionProxy(adapter, store, preloaded = []) {
const _cid = adapter.cid;
return {
create(record) {
store.add(Object.assign(Object.assign({}, record), { _cid }));
},
update(record) {
return store.put(Object.assign(Object.assign({}, record), { _cid }));
},
delete(id) {
store.delete([_cid, id]);
},
get(id) {
return preloaded[id];
},
};
}
/**
* Up to version 10.X of kinto.js, each collection had its own collection.
* The database name was `${bid}/${cid}` (eg. `"blocklists/certificates"`)
* and contained only one store with the same name.
*/
async function migrationRequired(dbName) {
let exists = true;
const db = await open(dbName, {
version: 1,
onupgradeneeded: event => {
exists = false;
},
});
// Check that the DB we're looking at is really a legacy one,
// and not some remainder of the open() operation above.
exists &=
db.objectStoreNames.contains("__meta__") &&
db.objectStoreNames.contains(dbName);
if (!exists) {
db.close();
// Testing the existence creates it, so delete it :)
await deleteDatabase(dbName);
return null;
}
console.warn(`${dbName}: old IndexedDB database found.`);
try {
// Scan all records.
let records;
await execute(db, dbName, store => {
store.openCursor().onsuccess = cursorHandlers.all({}, res => (records = res));
});
console.log(`${dbName}: found ${records.length} records.`);
// Check if there's a entry for this.
let timestamp = null;
await execute(db, "__meta__", store => {
store.get(`${dbName}-lastModified`).onsuccess = e => {
timestamp = e.target.result ? e.target.result.value : null;
};
});
// Some previous versions, also used to store the timestamps without prefix.
if (!timestamp) {
await execute(db, "__meta__", store => {
store.get("lastModified").onsuccess = e => {
timestamp = e.target.result ? e.target.result.value : null;
};
});
}
console.log(`${dbName}: ${timestamp ? "found" : "no"} timestamp.`);
// Those will be inserted in the new database/schema.
return { records, timestamp };
}
catch (e) {
console.error("Error occured during migration", e);
return null;
}
finally {
db.close();
}
}
var uuid4 = {};
const RECORD_FIELDS_TO_CLEAN = ["_status"];
const AVAILABLE_HOOKS = ["incoming-changes"];
const IMPORT_CHUNK_SIZE = 200;
/**
* Compare two records omitting local fields and synchronization
* attributes (like _status and last_modified)
* @param {Object} a A record to compare.
* @param {Object} b A record to compare.
* @param {Array} localFields Additional fields to ignore during the comparison
* @return {boolean}
*/
function recordsEqual(a, b, localFields = []) {
const fieldsToClean = RECORD_FIELDS_TO_CLEAN.concat(["last_modified"]).concat(localFields);
const cleanLocal = r => omitKeys(r, fieldsToClean);
return deepEqual(cleanLocal(a), cleanLocal(b));
}
/**
* Synchronization result object.
*/
class SyncResultObject {
/**
* Public constructor.
*/
constructor() {
/**
* Current synchronization result status; becomes `false` when conflicts or
* errors are registered.
* @type {Boolean}
*/
this.lastModified = null;
this._lists = {};
[
"errors",
"created",
"updated",
"deleted",
"published",
"conflicts",
"skipped",
"resolved",
"void",
].forEach(l => (this._lists[l] = []));
this._cached = {};
}
/**
* Adds entries for a given result type.
*
* @param {String} type The result type.
* @param {Array} entries The result entries.
* @return {SyncResultObject}
*/
add(type, entries) {
if (!Array.isArray(this._lists[type])) {
console.warn(`Unknown type "${type}"`);
return;
}
if (!Array.isArray(entries)) {
entries = [entries];
}
this._lists[type] = this._lists[type].concat(entries);
delete this._cached[type];
return this;
}
get ok() {
return this.errors.length + this.conflicts.length === 0;
}
get errors() {
return this._lists["errors"];
}
get conflicts() {
return this._lists["conflicts"];
}
get skipped() {
return this._deduplicate("skipped");
}
get resolved() {
return this._deduplicate("resolved");
}
get created() {
return this._deduplicate("created");
}
get updated() {
return this._deduplicate("updated");
}
get deleted() {
return this._deduplicate("deleted");
}
get published() {
return this._deduplicate("published");
}
_deduplicate(list) {
if (!(list in this._cached)) {
// Deduplicate entries by id. If the values don't have `id` attribute, just
// keep all.
const recordsWithoutId = new Set();
const recordsById = new Map();
this._lists[list].forEach(record => {
if (!record.id) {
recordsWithoutId.add(record);
}
else {
recordsById.set(record.id, record);
}
});
this._cached[list] = Array.from(recordsById.values()).concat(Array.from(recordsWithoutId));
}
return this._cached[list];
}
/**
* Reinitializes result entries for a given result type.
*
* @param {String} type The result type.
* @return {SyncResultObject}
*/
reset(type) {
this._lists[type] = [];
delete this._cached[type];
return this;
}
toObject() {
// Only used in tests.
return {
ok: this.ok,
lastModified: this.lastModified,
errors: this.errors,
created: this.created,
updated: this.updated,
deleted: this.deleted,
skipped: this.skipped,
published: this.published,
conflicts: this.conflicts,
resolved: this.resolved,
};
}
}
class ServerWasFlushedError extends Error {
constructor(clientTimestamp, serverTimestamp, message) {
super(message);
if (Error.captureStackTrace) {
Error.captureStackTrace(this, ServerWasFlushedError);
}
this.clientTimestamp = clientTimestamp;
this.serverTimestamp = serverTimestamp;
}
}
function createUUIDSchema() {
return {
generate() {
return uuid4();
},
validate(id) {
return typeof id == "string" && RE_RECORD_ID.test(id);
},
};
}
function markStatus(record, status) {
return Object.assign(Object.assign({}, record), { _status: status });
}
function markDeleted(record) {
return markStatus(record, "deleted");
}
function markSynced(record) {
return markStatus(record, "synced");
}
/**
* Import a remote change into the local database.
*
* @param {IDBTransactionProxy} transaction The transaction handler.
* @param {Object} remote The remote change object to import.
* @param {Array<String>} localFields The list of fields that remain local.
* @param {String} strategy The {@link Collection.strategy}.
* @return {Object}
*/
function importChange(transaction, remote, localFields, strategy) {
const local = transaction.get(remote.id);
if (!local) {
// Not found locally but remote change is marked as deleted; skip to
// avoid recreation.
if (remote.deleted) {
return { type: "skipped", data: remote };
}
const synced = markSynced(remote);
transaction.create(synced);
return { type: "created", data: synced };
}
// Apply remote changes on local record.
const synced = Object.assign(Object.assign({}, local), markSynced(remote));
// With pull only, we don't need to compare records since we override them.
if (strategy === Collection.strategy.PULL_ONLY) {
if (remote.deleted) {
transaction.delete(remote.id);
return { type: "deleted", data: local };
}
transaction.update(synced);
return { type: "updated", data: { old: local, new: synced } };
}
// With other sync strategies, we detect conflicts,
// by comparing local and remote, ignoring local fields.
const isIdentical = recordsEqual(local, remote, localFields);
// Detect or ignore conflicts if record has also been modified locally.
if (local._status !== "synced") {
// Locally deleted, unsynced: scheduled for remote deletion.
if (local._status === "deleted") {
return { type: "skipped", data: local };
}
if (isIdentical) {
// If records are identical, import anyway, so we bump the
// local last_modified value from the server and set record
// status to "synced".
transaction.update(synced);
return { type: "updated", data: { old: local, new: synced } };
}
if (local.last_modified !== undefined &&
local.last_modified === remote.last_modified) {
// If our local version has the same last_modified as the remote
// one, this represents an object that corresponds to a resolved
// conflict. Our local version represents the final output, so
// we keep that one. (No transaction operation to do.)
// But if our last_modified is undefined,
// that means we've created the same object locally as one on
// the server, which *must* be a conflict.
return { type: "void" };
}
return {
type: "conflicts",
data: { type: "incoming", local: local, remote: remote },
};
}
// Local record was synced.
if (remote.deleted) {
transaction.delete(remote.id);
return { type: "deleted", data: local };
}
// Import locally.
transaction.update(synced);
// if identical, simply exclude it from all SyncResultObject lists
const type = isIdentical ? "void" : "updated";
return { type, data: { old: local, new: synced } };
}
/**
* Abstracts a collection of records stored in the local database, providing
* CRUD operations and synchronization helpers.
*/
class Collection {
/**
* Constructor.
*
* Options:
* - `{BaseAdapter} adapter` The DB adapter (default: `IDB`)
*
* @param {String} bucket The bucket identifier.
* @param {String} name The collection name.
* @param {KintoBase} kinto The Kinto instance.
* @param {Object} options The options object.
*/
constructor(bucket, name, kinto, options = {}) {
this._bucket = bucket;
this._name = name;
this._lastModified = null;
const DBAdapter = options.adapter || IDB;
if (!DBAdapter) {
throw new Error("No adapter provided");
}
const db = new DBAdapter(`${bucket}/${name}`, options.adapterOptions);
if (!(db instanceof BaseAdapter)) {
throw new Error("Unsupported adapter.");
}
// public properties
/**
* The db adapter instance
* @type {BaseAdapter}
*/
this.db = db;
/**
* The KintoBase instance.
* @type {KintoBase}
*/
this.kinto = kinto;
/**
* The event emitter instance.
* @type {EventEmitter}
*/
this.events = options.events;
/**
* The IdSchema instance.
* @type {Object}
*/
this.idSchema = this._validateIdSchema(options.idSchema);
/**
* The list of remote transformers.
* @type {Array}
*/
this.remoteTransformers = this._validateRemoteTransformers(options.remoteTransformers);
/**
* The list of hooks.
* @type {Object}
*/
this.hooks = this._validateHooks(options.hooks);
/**
* The list of fields names that will remain local.
* @type {Array}
*/
this.localFields = options.localFields || [];
}
/**
* The HTTP client.
* @type {KintoClient}
*/
get api() {
return this.kinto.api;
}
/**
* The collection name.
* @type {String}
*/
get name() {
return this._name;
}
/**
* The bucket name.
* @type {String}
*/
get bucket() {
return this._bucket;
}
/**
* The last modified timestamp.
* @type {Number}
*/
get lastModified() {
return this._lastModified;
}
/**
* Synchronization strategies. Available strategies are:
*
* - `MANUAL`: Conflicts will be reported in a dedicated array.
* - `SERVER_WINS`: Conflicts are resolved using remote data.
* - `CLIENT_WINS`: Conflicts are resolved using local data.
*
* @type {Object}
*/
static get strategy() {
return {
CLIENT_WINS: "client_wins",
SERVER_WINS: "server_wins",
PULL_ONLY: "pull_only",
MANUAL: "manual",
};
}
/**
* Validates an idSchema.
*
* @param {Object|undefined} idSchema
* @return {Object}
*/
_validateIdSchema(idSchema) {
if (typeof idSchema === "undefined") {
return createUUIDSchema();
}
if (typeof idSchema !== "object") {
throw new Error("idSchema must be an object.");
}
else if (typeof idSchema.generate !== "function") {
throw new Error("idSchema must provide a generate function.");
}
else if (typeof idSchema.validate !== "function") {
throw new Error("idSchema must provide a validate function.");
}
return idSchema;
}
/**
* Validates a list of remote transformers.
*
* @param {Array|undefined} remoteTransformers
* @return {Array}
*/
_validateRemoteTransformers(remoteTransformers) {
if (typeof remoteTransformers === "undefined") {
return [];
}
if (!Array.isArray(remoteTransformers)) {
throw new Error("remoteTransformers should be an array.");
}
return remoteTransformers.map(transformer => {
if (typeof transformer !== "object") {
throw new Error("A transformer must be an object.");
}
else if (typeof transformer.encode !== "function") {
throw new Error("A transformer must provide an encode function.");
}
else if (typeof transformer.decode !== "function") {
throw new Error("A transformer must provide a decode function.");
}
return transformer;
});
}
/**
* Validate the passed hook is correct.
*
* @param {Array|undefined} hook.
* @return {Array}
**/
_validateHook(hook) {
if (!Array.isArray(hook)) {
throw new Error("A hook definition should be an array of functions.");
}
return hook.map(fn => {
if (typeof fn !== "function") {
throw new Error("A hook definition should be an array of functions.");
}
return fn;
});
}
/**
* Validates a list of hooks.
*
* @param {Object|undefined} hooks
* @return {Object}
*/
_validateHooks(hooks) {
if (typeof hooks === "undefined") {
return {};
}
if (Array.isArray(hooks)) {
throw new Error("hooks should be an object, not an array.");
}
if (typeof hooks !== "object") {
throw new Error("hooks should be an object.");
}
const validatedHooks = {};
for (const hook in hooks) {
if (!AVAILABLE_HOOKS.includes(hook)) {
throw new Error("The hook should be one of " + AVAILABLE_HOOKS.join(", "));
}
validatedHooks[hook] = this._validateHook(hooks[hook]);
}
return validatedHooks;
}
/**
* Deletes every records in the current collection and marks the collection as
* never synced.
*
* @return {Promise}
*/
async clear() {
await this.db.clear();
await this.db.saveMetadata(null);
await this.db.saveLastModified(null);
return { data: [], permissions: {} };
}
/**
* Encodes a record.
*
* @param {String} type Either "remote" or "local".
* @param {Object} record The record object to encode.
* @return {Promise}
*/
_encodeRecord(type, record) {
if (!this[`${type}Transformers`].length) {
return Promise.resolve(record);
}
return waterfall(this[`${type}Transformers`].map(transformer => {
return record => transformer.encode(record);
}), record);
}
/**
* Decodes a record.
*
* @param {String} type Either "remote" or "local".
* @param {Object} record The record object to decode.
* @return {Promise}
*/
_decodeRecord(type, record) {
if (!this[`${type}Transformers`].length) {
return Promise.resolve(record);
}
return waterfall(this[`${type}Transformers`].reverse().map(transformer => {
return record => transformer.decode(record);
}), record);
}
/**
* Adds a record to the local database, asserting that none
* already exist with this ID.
*
* Note: If either the `useRecordId` or `synced` options are true, then the
* record object must contain the id field to be validated. If none of these
* options are true, an id is generated using the current IdSchema; in this
* case, the record passed must not have an id.
*
* Options:
* - {Boolean} synced Sets record status to "synced" (default: `false`).
* - {Boolean} useRecordId Forces the `id` field from the record to be used,
* instead of one that is generated automatically
* (default: `false`).
*
* @param {Object} record
* @param {Object} options
* @return {Promise}
*/
create(record, options = { useRecordId: false, synced: false }) {
// Validate the record and its ID (if any), even though this
// validation is also done in the CollectionTransaction method,
// because we need to pass the ID to preloadIds.
const reject = msg => Promise.reject(new Error(msg));
if (typeof record !== "object") {
return reject("Record is not an object.");
}
if ((options.synced || options.useRecordId) &&
!Object.prototype.hasOwnProperty.call(record, "id")) {
return reject("Missing required Id; synced and useRecordId options require one");
}
if (!options.synced &&
!options.useRecordId &&
Object.prototype.hasOwnProperty.call(record, "id")) {
return reject("Extraneous Id; can't create a record having one set.");
}
const newRecord = Object.assign(Object.assign({}, record), { id: options.synced || options.useRecordId
? record.id
: this.idSchema.generate(record), _status: options.synced ? "synced" : "created" });
if (!this.idSchema.validate(newRecord.id)) {
return reject(`Invalid Id: ${newRecord.id}`);
}
return this.execute(txn => txn.create(newRecord), {
preloadIds: [newRecord.id],
}).catch(err => {
if (options.useRecordId) {
throw new Error("Couldn't create record. It may have been virtually deleted.");
}
throw err;
});
}
/**
* Like {@link CollectionTransaction#update}, but wrapped in its own transaction.
*
* Options:
* - {Boolean} synced: Sets record status to "synced" (default: false)
* - {Boolean} patch: Extends the existing record instead of overwriting it
* (default: false)
*
* @param {Object} record
* @param {Object} options
* @return {Promise}
*/
update(record, options = { synced: false, patch: false }) {
// Validate the record and its ID, even though this validation is
// also done in the CollectionTransaction method, because we need
// to pass the ID to preloadIds.
if (typeof record !== "object") {
return Promise.reject(new Error("Record is not an object."));
}
if (!Object.prototype.hasOwnProperty.call(record, "id")) {
return Promise.reject(new Error("Cannot update a record missing id."));
}
if (!this.idSchema.validate(record.id)) {
return Promise.reject(new Error(`Invalid Id: ${record.id}`));
}
return this.execute(txn => txn.update(record, options), {
preloadIds: [record.id],
});
}
/**
* Like {@link CollectionTransaction#upsert}, but wrapped in its own transaction.
*
* @param {Object} record
* @return {Promise}
*/
upsert(record) {
// Validate the record and its ID, even though this validation is
// also done in the CollectionTransaction method, because we need
// to pass the ID to preloadIds.
if (typeof record !== "object") {
return Promise.reject(new Error("Record is not an object."));
}
if (!Object.prototype.hasOwnProperty.call(record, "id")) {
return Promise.reject(new Error("Cannot update a record missing id."));
}
if (!this.idSchema.validate(record.id)) {
return Promise.reject(new Error(`Invalid Id: ${record.id}`));
}
return this.execute(txn => txn.upsert(record), { preloadIds: [record.id] });
}
/**
* Like {@link CollectionTransaction#get}, but wrapped in its own transaction.
*
* Options:
* - {Boolean} includeDeleted: Include virtually deleted records.
*
* @param {String} id
* @param {Object} options
* @return {Promise}
*/
get(id, options = { includeDeleted: false }) {
return this.execute(txn => txn.get(id, options), { preloadIds: [id] });
}
/**
* Like {@link CollectionTransaction#getAny}, but wrapped in its own transaction.
*
* @param {String} id
* @return {Promise}
*/
getAny(id) {
return this.execute(txn => txn.getAny(id), { preloadIds: [id] });
}
/**
* Same as {@link Collection#delete}, but wrapped in its own transaction.
*
* Options:
* - {Boolean} virtual: When set to `true`, doesn't actually delete the record,
* update its `_status` attribute to `deleted` instead (default: true)
*
* @param {String} id The record's Id.
* @param {Object} options The options object.
* @return {Promise}
*/
delete(id, options = { virtual: true }) {
return this.execute(transaction => {
return transaction.delete(id, options);
}, { preloadIds: [id] });
}
/**
* Same as {@link Collection#deleteAll}, but wrapped in its own transaction, execulding the parameter.
*
* @return {Promise}
*/
async deleteAll() {
const { data } = await this.list({}, { includeDeleted: false });
const recordIds = data.map(record => record.id);
return this.execute(transaction => {
return transaction.deleteAll(recordIds);
}, { preloadIds: recordIds });
}
/**
* The same as {@link CollectionTransaction#deleteAny}, but wrapped
* in its own transaction.
*
* @param {String} id The record's Id.
* @return {Promise}
*/
deleteAny(id) {
return this.execute(txn => txn.deleteAny(id), { preloadIds: [id] });
}
/**
* Lists records from the local database.
*
* Params:
* - {Object} filters Filter the results (default: `{}`).
* - {String} order The order to apply (default: `-last_modified`).
*
* Options:
* - {Boolean} includeDeleted: Include virtually deleted records.
*
* @param {Object} params The filters and order to apply to the results.
* @param {Object} options The options object.
* @return {Promise}
*/
async list(params = {}, options = { includeDeleted: false }) {
params = Object.assign({ order: "-last_modified", filters: {} }, params);
const results = await this.db.list(params);
let data = results;
if (!options.includeDeleted) {
data = results.filter(record => record._status !== "deleted");
}
return { data, permissions: {} };
}
/**
* Imports remote changes into the local database.
* This method is in charge of detecting the conflicts, and resolve them
* according to the specified strategy.
* @param {SyncResultObject} syncResultObject The sync result object.
* @param {Array} decodedChanges The list of changes to import in the local database.
* @param {String} strategy The {@link Collection.strategy} (default: MANUAL)
* @return {Promise}
*/
async importChanges(syncResultObject, decodedChanges, strategy = Collection.strategy.MANUAL) {
// Retrieve records matching change ids.
try {
for (let i = 0; i < decodedChanges.length; i += IMPORT_CHUNK_SIZE) {
const slice = decodedChanges.slice(i, i + IMPORT_CHUNK_SIZE);
const { imports, resolved } = await this.db.execute(transaction => {
const imports = slice.map(remote => {
// Store remote change into local database.
return importChange(transaction, remote, this.localFields, strategy);
});
const conflicts = imports
.filter(i => i.type === "conflicts")
.map(i => i.data);
const resolved = this._handleConflicts(transaction, conflicts, strategy);
return { imports, resolved };
}, { preload: slice.map(record => record.id) });
// Lists of created/updated/deleted records
imports.forEach(({ type, data }) => syncResultObject.add(type, data));
// Automatically resolved conflicts (if not manual)
if (resolved.length > 0) {
syncResultObject.reset("conflicts").add("resolved", resolved);
}
}
}
catch (err) {
const data = {
type: "incoming",
message: err.message,
stack: err.stack,
};
// XXX one error of the whole transaction instead of per atomic op
syncResultObject.add("errors", data);
}
return syncResultObject;
}
/**
* Imports the responses of pushed changes into the local database.
* Basically it stores the timestamp assigned by the server into the local
* database.
* @param {SyncResultObject} syncResultObject The sync result object.
* @param {Array} toApplyLocally The list of changes to import in the local database.
* @param {Array} conflicts The list of conflicts that have to be resolved.
* @param {String} strategy The {@link Collection.strategy}.
* @return {Promise}
*/
async _applyPushedResults(syncResultObject, toApplyLocally, conflicts, strategy = Collection.strategy.MANUAL) {
const toDeleteLocally = toApplyLocally.filter(r => r.deleted);
const toUpdateLocally = toApplyLocally.filter(r => !r.deleted);
const { published, resolved } = await this.db.execute(transaction => {
const updated = toUpdateLocally.map(record => {
const synced = markSynced(record);
transaction.update(synced);
return synced;
});
const deleted = toDeleteLocally.map(record => {
transaction.delete(record.id);
// Amend result data with the deleted attribute set
return { id: record.id, deleted: true };
});
const published = updated.concat(deleted);
// Handle conflicts, if any
const resolved = this._handleConflicts(transaction, conflicts, strategy);
return { published, resolved };
});
syncResultObject.add("published", published);
if (resolved.length > 0) {
syncResultObject
.reset("conflicts")
.reset("resolved")
.add("resolved", resolved);
}
return syncResultObject;
}
/**
* Handles synchronization conflicts according to specified strategy.
*
* @param {SyncResultObject} result The sync result object.
* @param {String} strategy The {@link Collection.strategy}.
* @return {Promise<Array<Object>>} The resolved conflicts, as an
* array of {accepted, rejected} objects
*/
_handleConflicts(transaction, conflicts, strategy) {
if (strategy === Collection.strategy.MANUAL) {
return [];
}
return conflicts.map(conflict => {
const resolution = strategy === Collection.strategy.CLIENT_WINS
? conflict.local
: conflict.remote;
const rejected = strategy === Collection.strategy.CLIENT_WINS
? conflict.remote
: conflict.local;
let accepted, status, id;
if (resolution === null) {
// We "resolved" with the server-side deletion. Delete locally.
// This only happens during SERVER_WINS because the local
// version of a record can never be null.
// We can get "null" from the remote side if we got a conflict
// and there is no remote version available; see kinto-http.js
// batch.js:aggregate.
transaction.delete(conflict.local.id);
accepted = null;
// The record was deleted, but that status is "synced" with
// the server, so we don't need to push the change.
status = "synced";
id = conflict.local.id;
}
else {
const updated = this._resolveRaw(conflict, resolution);
transaction.update(updated);
accepted = updated;
status = updated._status;
id = updated.id;
}
return { rejected, accepted, id, _status: status };
});
}
/**
* Execute a bunch of operations in a transaction.
*
* This transaction should be atomic -- either all of its operations
* will succeed, or none will.
*
* The argument to this function is itself a function which will be
* called with a {@link CollectionTransaction}. Collection methods
* are available on this transaction, but instead of returning
* promises, they are synchronous. execute() returns a Promise whose
* value will be the return value of the provided function.
*
* Most operations will require access to the record itself, which
* must be preloaded by passing its ID in the preloadIds option.
*
* Options:
* - {Array} preloadIds: list of IDs to fetch at the beginning of
* the transaction
*
* @return {Promise} Resolves with the result of the given function
* when the transaction commits.
*/
execute(doOperations, { preloadIds = [] } = {}) {
for (const id of preloadIds) {
if (!this.idSchema.validate(id)) {
return Promise.reject(Error(`Invalid Id: ${id}`));
}
}
return this.db.execute(transaction => {
const txn = new CollectionTransaction(this, transaction);
const result = doOperations(txn);
txn.emitEvents();
return result;
}, { preload: preloadIds });
}
/**
* Resets the local records as if they were never synced; existing records are
* marked as newly created, deleted records are dropped.
*
* A next call to {@link Collection.sync} will thus republish the whole
* content of the local collection to the server.
*
* @return {Promise} Resolves with the number of processed records.
*/
async resetSyncStatus() {
const unsynced = await this.list({ filters: { _status: ["deleted", "synced"] }, order: "" }, { includeDeleted: true });
await this.db.execute(transaction => {
unsynced.data.forEach(record => {
if (record._status === "deleted") {
// Garbage collect deleted records.
transaction.delete(record.id);
}
else {
// Records that were synced become «created».
transaction.update(Object.assign(Object.assign({}, record), { last_modified: undefined, _status: "created" }));
}
});
});
this._lastModified = null;
await this.db.saveLastModified(null);
return unsynced.data.length;
}
/**
* Returns an object containing two lists:
*
* - `toDelete`: unsynced deleted records we can safely delete;
* - `toSync`: local updates to send to the server.
*
* @return {Promise}
*/
async gatherLocalChanges() {
const unsynced = await this.list({
filters: { _status: ["created", "updated"] },
order: "",
});
const deleted = await this.list({ filters: { _status: "deleted" }, order: "" }, { includeDeleted: true });
return await Promise.all(unsynced.data
.concat(deleted.data)
.map(this._encodeRecord.bind(this, "remote")));
}
/**
* Fetch remote changes, import them to the local database, and handle
* conflicts according to `options.strategy`. Then, updates the passed
* {@link SyncResultObject} with import results.
*
* Options:
* - {String} strategy: The selected sync strategy.
* - {String} expectedTimestamp: A timestamp to use as a "cache busting" query parameter.
* - {Array<String>} exclude: A list of record ids to exclude from pull.
* - {Object} headers: The HTTP headers to use in the request.
* - {int} retry: The number of retries to do if the HTTP request fails.
* - {int} lastModified: The timestamp to use in `?_since` query.
*
* @param {KintoClient.Collection} client Kinto client Collection instance.
* @param {SyncResultObject} syncResultObject The sync result object.
* @param {Object} options The options object.
* @return {Promise}
*/
--> --------------------
--> maximum size reached
--> --------------------