mirror of
https://github.com/vector-im/hydrogen-web.git
synced 2024-12-23 11:35:04 +01:00
Merge branch 'master' into DanilaFe/backfill-changes-2
This commit is contained in:
commit
60744a1705
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "hydrogen-web",
|
"name": "hydrogen-web",
|
||||||
"version": "0.2.11",
|
"version": "0.2.12",
|
||||||
"description": "A javascript matrix client prototype, trying to minize RAM usage by offloading as much as possible to IndexedDB",
|
"description": "A javascript matrix client prototype, trying to minize RAM usage by offloading as much as possible to IndexedDB",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"directories": {
|
"directories": {
|
||||||
|
100
prototypes/idb-continue-on-constrainterror.html
Normal file
100
prototypes/idb-continue-on-constrainterror.html
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<script type="text/javascript">
|
||||||
|
class IDBError extends Error {
|
||||||
|
constructor(errorEvent) {
|
||||||
|
const request = errorEvent.target;
|
||||||
|
const {error} = request;
|
||||||
|
super(error.message);
|
||||||
|
this.name = error.name;
|
||||||
|
this.errorEvent = errorEvent;
|
||||||
|
}
|
||||||
|
|
||||||
|
preventAbort() {
|
||||||
|
this.errorEvent.preventDefault();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class AbortError extends Error {
|
||||||
|
get name() { return "AbortError"; }
|
||||||
|
}
|
||||||
|
|
||||||
|
function reqAsPromise(req) {
|
||||||
|
return new Promise(function (resolve, reject) {
|
||||||
|
req.onsuccess = function(e) {
|
||||||
|
resolve(e.target.result);
|
||||||
|
};
|
||||||
|
req.onerror = function(e) {
|
||||||
|
reject(new IDBError(e));
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function txnAsPromise(txn) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
txn.addEventListener("complete", () => resolve());
|
||||||
|
txn.addEventListener("abort", event => {
|
||||||
|
reject(new AbortError());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function Storage(databaseName) {
|
||||||
|
this._databaseName = databaseName;
|
||||||
|
this._database = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
Storage.prototype = {
|
||||||
|
open: function() {
|
||||||
|
const req = window.indexedDB.open(this._databaseName);
|
||||||
|
const self = this;
|
||||||
|
req.onupgradeneeded = function(ev) {
|
||||||
|
const db = ev.target.result;
|
||||||
|
const oldVersion = ev.oldVersion;
|
||||||
|
self._createStores(db, oldVersion);
|
||||||
|
};
|
||||||
|
return reqAsPromise(req).then(function() {
|
||||||
|
self._database = req.result;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
readWriteTxn: function(storeName) {
|
||||||
|
return this._database.transaction([storeName], "readwrite");
|
||||||
|
},
|
||||||
|
readTxn: function(storeName) {
|
||||||
|
return this._database.transaction([storeName], "readonly");
|
||||||
|
},
|
||||||
|
_createStores: function(db) {
|
||||||
|
db.createObjectStore("foos", {keyPath: "id"});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const storage = new Storage("idb-continue-on-constrainterror");
|
||||||
|
await storage.open();
|
||||||
|
const txn1 = storage.readWriteTxn("foos");
|
||||||
|
const store = txn1.objectStore("foos");
|
||||||
|
await reqAsPromise(store.clear());
|
||||||
|
console.log("first foo read back", await reqAsPromise(store.get(5)));
|
||||||
|
await reqAsPromise(store.add({id: 5, name: "Mr Foo"}));
|
||||||
|
try {
|
||||||
|
await reqAsPromise(store.add({id: 5, name: "bar"}));
|
||||||
|
} catch (err) {
|
||||||
|
console.log("we did get an error", err.name);
|
||||||
|
err.preventAbort();
|
||||||
|
}
|
||||||
|
await txnAsPromise(txn1);
|
||||||
|
|
||||||
|
const txn2 = storage.readTxn("foos");
|
||||||
|
const store2 = txn2.objectStore("foos");
|
||||||
|
console.log("got name from second txn", await reqAsPromise(store2.get(5)));
|
||||||
|
}
|
||||||
|
main().catch(err => console.error(err));
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
@ -247,8 +247,8 @@ export function tests() {
|
|||||||
storage.storeNames.timelineFragments
|
storage.storeNames.timelineFragments
|
||||||
]);
|
]);
|
||||||
txn.timelineFragments.add({id: 1, roomId});
|
txn.timelineFragments.add({id: 1, roomId});
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event: messageEvent, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event: messageEvent, roomId}, new NullLogItem());
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 3, event: myReactionEvent, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 3, event: myReactionEvent, roomId}, new NullLogItem());
|
||||||
await relationWriter.writeRelation(myReactionEntry, txn, new NullLogItem());
|
await relationWriter.writeRelation(myReactionEntry, txn, new NullLogItem());
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
// 2. setup queue & timeline
|
// 2. setup queue & timeline
|
||||||
@ -309,7 +309,7 @@ export function tests() {
|
|||||||
storage.storeNames.timelineFragments
|
storage.storeNames.timelineFragments
|
||||||
]);
|
]);
|
||||||
txn.timelineFragments.add({id: 1, roomId});
|
txn.timelineFragments.add({id: 1, roomId});
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event: messageEvent, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event: messageEvent, roomId}, new NullLogItem());
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
// 2. setup queue & timeline
|
// 2. setup queue & timeline
|
||||||
const queue = new SendQueue({roomId, storage, hsApi: new MockHomeServer().api});
|
const queue = new SendQueue({roomId, storage, hsApi: new MockHomeServer().api});
|
||||||
|
@ -21,7 +21,7 @@ import {
|
|||||||
reqAsPromise,
|
reqAsPromise,
|
||||||
iterateCursor,
|
iterateCursor,
|
||||||
fetchResults,
|
fetchResults,
|
||||||
} from "../matrix/storage/idb/utils.js";
|
} from "../matrix/storage/idb/utils";
|
||||||
import {BaseLogger} from "./BaseLogger.js";
|
import {BaseLogger} from "./BaseLogger.js";
|
||||||
|
|
||||||
export class IDBLogger extends BaseLogger {
|
export class IDBLogger extends BaseLogger {
|
||||||
|
@ -20,7 +20,7 @@ function noop () {}
|
|||||||
|
|
||||||
export class NullLogger {
|
export class NullLogger {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.item = new NullLogItem();
|
this.item = new NullLogItem(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
log() {}
|
log() {}
|
||||||
@ -51,6 +51,10 @@ export class NullLogger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class NullLogItem {
|
export class NullLogItem {
|
||||||
|
constructor(logger) {
|
||||||
|
this.logger = logger;
|
||||||
|
}
|
||||||
|
|
||||||
wrap(_, callback) {
|
wrap(_, callback) {
|
||||||
return callback(this);
|
return callback(this);
|
||||||
}
|
}
|
||||||
|
@ -447,10 +447,10 @@ export function tests() {
|
|||||||
// 1. put event and reaction into storage
|
// 1. put event and reaction into storage
|
||||||
const storage = await createMockStorage();
|
const storage = await createMockStorage();
|
||||||
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
||||||
txn.timelineEvents.insert({
|
txn.timelineEvents.tryInsert({
|
||||||
event: withContent(createAnnotation(messageId, "👋"), createEvent("m.reaction", reactionId, bob)),
|
event: withContent(createAnnotation(messageId, "👋"), createEvent("m.reaction", reactionId, bob)),
|
||||||
fragmentId: 1, eventIndex: 1, roomId
|
fragmentId: 1, eventIndex: 1, roomId
|
||||||
});
|
}, new NullLogItem());
|
||||||
txn.timelineRelations.add(roomId, messageId, ANNOTATION_RELATION_TYPE, reactionId);
|
txn.timelineRelations.add(roomId, messageId, ANNOTATION_RELATION_TYPE, reactionId);
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
// 2. setup the timeline
|
// 2. setup the timeline
|
||||||
@ -543,10 +543,10 @@ export function tests() {
|
|||||||
// 1. put reaction in storage
|
// 1. put reaction in storage
|
||||||
const storage = await createMockStorage();
|
const storage = await createMockStorage();
|
||||||
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
||||||
txn.timelineEvents.insert({
|
txn.timelineEvents.tryInsert({
|
||||||
event: withContent(createAnnotation(messageId, "👋"), createEvent("m.reaction", reactionId, bob)),
|
event: withContent(createAnnotation(messageId, "👋"), createEvent("m.reaction", reactionId, bob)),
|
||||||
fragmentId: 1, eventIndex: 3, roomId
|
fragmentId: 1, eventIndex: 3, roomId
|
||||||
});
|
}, new NullLogItem());
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
// 2. setup timeline
|
// 2. setup timeline
|
||||||
const pendingEvents = new ObservableArray();
|
const pendingEvents = new ObservableArray();
|
||||||
|
@ -105,9 +105,10 @@ export class GapWriter {
|
|||||||
if (updatedRelationTargetEntries) {
|
if (updatedRelationTargetEntries) {
|
||||||
updatedEntries.push(...updatedRelationTargetEntries);
|
updatedEntries.push(...updatedRelationTargetEntries);
|
||||||
}
|
}
|
||||||
txn.timelineEvents.insert(eventStorageEntry);
|
if (await txn.timelineEvents.tryInsert(eventStorageEntry, log)) {
|
||||||
const eventEntry = new EventEntry(eventStorageEntry, this._fragmentIdComparer);
|
const eventEntry = new EventEntry(eventStorageEntry, this._fragmentIdComparer);
|
||||||
directionalAppend(entries, eventEntry, direction);
|
directionalAppend(entries, eventEntry, direction);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return {entries, updatedEntries};
|
return {entries, updatedEntries};
|
||||||
}
|
}
|
||||||
@ -293,9 +294,9 @@ export function tests() {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async function backfillAndWrite(mocks, fragmentEntry) {
|
async function backfillAndWrite(mocks, fragmentEntry, limit) {
|
||||||
const {txn, timelineMock, gapWriter} = mocks;
|
const {txn, timelineMock, gapWriter} = mocks;
|
||||||
const messageResponse = timelineMock.messages(fragmentEntry.token, undefined, fragmentEntry.direction.asApiString());
|
const messageResponse = timelineMock.messages(fragmentEntry.token, undefined, fragmentEntry.direction.asApiString(), limit);
|
||||||
await gapWriter.writeFragmentFill(fragmentEntry, messageResponse, txn, logger);
|
await gapWriter.writeFragmentFill(fragmentEntry, messageResponse, txn, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -333,7 +334,7 @@ export function tests() {
|
|||||||
const { timelineMock } = mocks;
|
const { timelineMock } = mocks;
|
||||||
timelineMock.append(30);
|
timelineMock.append(30);
|
||||||
const {fragmentEntry} = await syncAndWrite(mocks);
|
const {fragmentEntry} = await syncAndWrite(mocks);
|
||||||
await backfillAndWrite(mocks, fragmentEntry);
|
await backfillAndWrite(mocks, fragmentEntry, 10);
|
||||||
const events = await allFragmentEvents(mocks, fragmentEntry.fragmentId);
|
const events = await allFragmentEvents(mocks, fragmentEntry.fragmentId);
|
||||||
assert.deepEqual(events.map(e => e.event_id), eventIds(10, 30));
|
assert.deepEqual(events.map(e => e.event_id), eventIds(10, 30));
|
||||||
await mocks.txn.complete();
|
await mocks.txn.complete();
|
||||||
@ -346,8 +347,8 @@ export function tests() {
|
|||||||
timelineMock.append(15);
|
timelineMock.append(15);
|
||||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
||||||
// Only the second backfill (in which all events overlap) fills the gap.
|
// Only the second backfill (in which all events overlap) fills the gap.
|
||||||
await backfillAndWrite(mocks, secondFragmentEntry);
|
await backfillAndWrite(mocks, secondFragmentEntry, 10);
|
||||||
await backfillAndWrite(mocks, await updatedFragmentEntry(mocks, secondFragmentEntry));
|
await backfillAndWrite(mocks, await updatedFragmentEntry(mocks, secondFragmentEntry), 10);
|
||||||
|
|
||||||
const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
|
const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
|
||||||
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
|
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
|
||||||
@ -365,7 +366,7 @@ export function tests() {
|
|||||||
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
|
const {syncResponse, fragmentEntry: firstFragmentEntry} = await syncAndWrite(mocks, { limit: 10 });
|
||||||
timelineMock.append(20);
|
timelineMock.append(20);
|
||||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
||||||
await backfillAndWrite(mocks, secondFragmentEntry);
|
await backfillAndWrite(mocks, secondFragmentEntry, 10);
|
||||||
|
|
||||||
const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
|
const firstFragment = await fetchFragment(mocks, firstFragmentEntry.fragmentId);
|
||||||
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
|
const secondFragment = await fetchFragment(mocks, secondFragmentEntry.fragmentId);
|
||||||
@ -384,7 +385,7 @@ export function tests() {
|
|||||||
// Mess with the saved token to receive old events in backfill
|
// Mess with the saved token to receive old events in backfill
|
||||||
fragmentEntry.token = syncResponse.next_batch;
|
fragmentEntry.token = syncResponse.next_batch;
|
||||||
txn.timelineFragments.update(fragmentEntry.fragment);
|
txn.timelineFragments.update(fragmentEntry.fragment);
|
||||||
await backfillAndWrite(mocks, fragmentEntry);
|
await backfillAndWrite(mocks, fragmentEntry, 10);
|
||||||
|
|
||||||
const fragment = await fetchFragment(mocks, fragmentEntry.fragmentId);
|
const fragment = await fetchFragment(mocks, fragmentEntry.fragmentId);
|
||||||
assert.notEqual(fragment.nextId, fragment.id);
|
assert.notEqual(fragment.nextId, fragment.id);
|
||||||
@ -400,8 +401,8 @@ export function tests() {
|
|||||||
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
const {fragmentEntry: secondFragmentEntry} = await syncAndWrite(mocks, { previous: syncResponse, limit: 10 });
|
||||||
timelineMock.insertAfter(eventId(9), 5);
|
timelineMock.insertAfter(eventId(9), 5);
|
||||||
// Only the second backfill (in which all events overlap) fills the gap.
|
// Only the second backfill (in which all events overlap) fills the gap.
|
||||||
await backfillAndWrite(mocks, secondFragmentEntry);
|
await backfillAndWrite(mocks, secondFragmentEntry, 10);
|
||||||
await backfillAndWrite(mocks, await updatedFragmentEntry(mocks, secondFragmentEntry));
|
await backfillAndWrite(mocks, await updatedFragmentEntry(mocks, secondFragmentEntry), 10);
|
||||||
|
|
||||||
const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId);
|
const firstEvents = await allFragmentEvents(mocks, firstFragmentEntry.fragmentId);
|
||||||
assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10));
|
assert.deepEqual(firstEvents.map(e => e.event_id), eventIds(0, 10));
|
||||||
|
@ -275,7 +275,7 @@ export function tests() {
|
|||||||
|
|
||||||
const storage = await createMockStorage();
|
const storage = await createMockStorage();
|
||||||
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event, roomId}, new NullLogItem());
|
||||||
const updatedEntries = await relationWriter.writeRelation(redactionEntry, txn, new NullLogItem());
|
const updatedEntries = await relationWriter.writeRelation(redactionEntry, txn, new NullLogItem());
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
|
|
||||||
@ -300,7 +300,7 @@ export function tests() {
|
|||||||
|
|
||||||
const storage = await createMockStorage();
|
const storage = await createMockStorage();
|
||||||
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event, roomId}, new NullLogItem());
|
||||||
const updatedEntries = await relationWriter.writeRelation(reactionEntry, txn, new NullLogItem());
|
const updatedEntries = await relationWriter.writeRelation(reactionEntry, txn, new NullLogItem());
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
|
|
||||||
@ -329,7 +329,7 @@ export function tests() {
|
|||||||
|
|
||||||
const storage = await createMockStorage();
|
const storage = await createMockStorage();
|
||||||
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event, roomId}, new NullLogItem());
|
||||||
await relationWriter.writeRelation(reaction1Entry, txn, new NullLogItem());
|
await relationWriter.writeRelation(reaction1Entry, txn, new NullLogItem());
|
||||||
const updatedEntries = await relationWriter.writeRelation(reaction2Entry, txn, new NullLogItem());
|
const updatedEntries = await relationWriter.writeRelation(reaction2Entry, txn, new NullLogItem());
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
@ -358,10 +358,10 @@ export function tests() {
|
|||||||
|
|
||||||
const storage = await createMockStorage();
|
const storage = await createMockStorage();
|
||||||
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents, storage.storeNames.timelineRelations]);
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 2, event, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 2, event, roomId}, new NullLogItem());
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 3, event: myReaction, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 3, event: myReaction, roomId}, new NullLogItem());
|
||||||
await relationWriter.writeRelation(myReactionEntry, txn, new NullLogItem());
|
await relationWriter.writeRelation(myReactionEntry, txn, new NullLogItem());
|
||||||
txn.timelineEvents.insert({fragmentId: 1, eventIndex: 4, event: bobReaction, roomId});
|
txn.timelineEvents.tryInsert({fragmentId: 1, eventIndex: 4, event: bobReaction, roomId}, new NullLogItem());
|
||||||
await relationWriter.writeRelation(bobReactionEntry, txn, new NullLogItem());
|
await relationWriter.writeRelation(bobReactionEntry, txn, new NullLogItem());
|
||||||
const updatedEntries = await relationWriter.writeRelation(myReactionRedactionEntry, txn, new NullLogItem());
|
const updatedEntries = await relationWriter.writeRelation(myReactionRedactionEntry, txn, new NullLogItem());
|
||||||
await txn.complete();
|
await txn.complete();
|
||||||
|
@ -162,7 +162,10 @@ export class SyncWriter {
|
|||||||
storageEntry.displayName = member.displayName;
|
storageEntry.displayName = member.displayName;
|
||||||
storageEntry.avatarUrl = member.avatarUrl;
|
storageEntry.avatarUrl = member.avatarUrl;
|
||||||
}
|
}
|
||||||
txn.timelineEvents.insert(storageEntry, log);
|
const couldInsert = await txn.timelineEvents.tryInsert(storageEntry, log);
|
||||||
|
if (!couldInsert) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
const entry = new EventEntry(storageEntry, this._fragmentIdComparer);
|
const entry = new EventEntry(storageEntry, this._fragmentIdComparer);
|
||||||
entries.push(entry);
|
entries.push(entry);
|
||||||
const updatedRelationTargetEntries = await this._relationWriter.writeRelation(entry, txn, log);
|
const updatedRelationTargetEntries = await this._relationWriter.writeRelation(entry, txn, log);
|
||||||
@ -252,3 +255,35 @@ export class SyncWriter {
|
|||||||
return this._lastLiveKey;
|
return this._lastLiveKey;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import {createMockStorage} from "../../../../mocks/Storage.js";
|
||||||
|
import {createEvent, withTextBody} from "../../../../mocks/event.js";
|
||||||
|
import {Instance as nullLogger} from "../../../../logging/NullLogger.js";
|
||||||
|
export function tests() {
|
||||||
|
const roomId = "!abc:hs.tld";
|
||||||
|
return {
|
||||||
|
"calling timelineEvents.tryInsert with the same event id a second time fails": async assert => {
|
||||||
|
const storage = await createMockStorage();
|
||||||
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents]);
|
||||||
|
const event = withTextBody("hello!", createEvent("m.room.message", "$abc", "@alice:hs.tld"));
|
||||||
|
const entry1 = createEventEntry(EventKey.defaultLiveKey, roomId, event);
|
||||||
|
assert.equal(await txn.timelineEvents.tryInsert(entry1, nullLogger.item), true);
|
||||||
|
const entry2 = createEventEntry(EventKey.defaultLiveKey.nextKey(), roomId, event);
|
||||||
|
assert.equal(await txn.timelineEvents.tryInsert(entry2, nullLogger.item), false);
|
||||||
|
// fake-indexeddb still aborts the transaction when preventDefault is called by tryInsert, so don't await as it will abort
|
||||||
|
// await txn.complete();
|
||||||
|
},
|
||||||
|
"calling timelineEvents.tryInsert with the same event key a second time fails": async assert => {
|
||||||
|
const storage = await createMockStorage();
|
||||||
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents]);
|
||||||
|
const event1 = withTextBody("hello!", createEvent("m.room.message", "$abc", "@alice:hs.tld"));
|
||||||
|
const entry1 = createEventEntry(EventKey.defaultLiveKey, roomId, event1);
|
||||||
|
assert.equal(await txn.timelineEvents.tryInsert(entry1, nullLogger.item), true);
|
||||||
|
const event2 = withTextBody("hello!", createEvent("m.room.message", "$def", "@alice:hs.tld"));
|
||||||
|
const entry2 = createEventEntry(EventKey.defaultLiveKey, roomId, event2);
|
||||||
|
assert.equal(await txn.timelineEvents.tryInsert(entry2, nullLogger.item), false);
|
||||||
|
// fake-indexeddb still aborts the transaction when preventDefault is called by tryInsert, so don't await as it will abort
|
||||||
|
// await txn.complete();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -15,9 +15,9 @@ limitations under the License.
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import {QueryTarget, IDBQuery} from "./QueryTarget";
|
import {QueryTarget, IDBQuery} from "./QueryTarget";
|
||||||
import {IDBRequestAttemptError} from "./error";
|
import {IDBRequestError, IDBRequestAttemptError} from "./error";
|
||||||
import {reqAsPromise} from "./utils";
|
import {reqAsPromise} from "./utils";
|
||||||
import {Transaction} from "./Transaction";
|
import {Transaction, IDBKey} from "./Transaction";
|
||||||
import {LogItem} from "../../../logging/LogItem.js";
|
import {LogItem} from "../../../logging/LogItem.js";
|
||||||
|
|
||||||
const LOG_REQUESTS = false;
|
const LOG_REQUESTS = false;
|
||||||
@ -126,6 +126,10 @@ class QueryTargetWrapper<T> {
|
|||||||
throw new IDBRequestAttemptError("index", this._qt, err, [name]);
|
throw new IDBRequestAttemptError("index", this._qt, err, [name]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get indexNames(): string[] {
|
||||||
|
return Array.from(this._qtStore.indexNames);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Store<T> extends QueryTarget<T> {
|
export class Store<T> extends QueryTarget<T> {
|
||||||
@ -162,30 +166,62 @@ export class Store<T> extends QueryTarget<T> {
|
|||||||
this._prepareErrorLog(request, log, "add", undefined, value);
|
this._prepareErrorLog(request, log, "add", undefined, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async tryAdd(value: T, log: LogItem): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await reqAsPromise(this._idbStore.add(value));
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof IDBRequestError) {
|
||||||
|
log.log({l: "could not write", id: this._getKeys(value), e: err}, log.level.Warn);
|
||||||
|
err.preventTransactionAbort();
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
delete(keyOrKeyRange: IDBValidKey | IDBKeyRange, log?: LogItem): void {
|
delete(keyOrKeyRange: IDBValidKey | IDBKeyRange, log?: LogItem): void {
|
||||||
// ok to not monitor result of request, see comment in `put`.
|
// ok to not monitor result of request, see comment in `put`.
|
||||||
const request = this._idbStore.delete(keyOrKeyRange);
|
const request = this._idbStore.delete(keyOrKeyRange);
|
||||||
this._prepareErrorLog(request, log, "delete", keyOrKeyRange, undefined);
|
this._prepareErrorLog(request, log, "delete", keyOrKeyRange, undefined);
|
||||||
}
|
}
|
||||||
|
|
||||||
private _prepareErrorLog(request: IDBRequest, log: LogItem | undefined, operationName: string, key: IDBValidKey | IDBKeyRange | undefined, value: T | undefined) {
|
private _prepareErrorLog(request: IDBRequest, log: LogItem | undefined, operationName: string, key: IDBKey | undefined, value: T | undefined) {
|
||||||
if (log) {
|
if (log) {
|
||||||
log.ensureRefId();
|
log.ensureRefId();
|
||||||
}
|
}
|
||||||
reqAsPromise(request).catch(err => {
|
reqAsPromise(request).catch(err => {
|
||||||
try {
|
let keys : IDBKey[] | undefined = undefined;
|
||||||
if (!key && value) {
|
if (value) {
|
||||||
key = this._getKey(value);
|
keys = this._getKeys(value);
|
||||||
}
|
} else if (key) {
|
||||||
} catch {
|
keys = [key];
|
||||||
key = "getKey failed";
|
|
||||||
}
|
}
|
||||||
this._transaction.addWriteError(err, log, operationName, key);
|
this._transaction.addWriteError(err, log, operationName, keys);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private _getKey(value: T): IDBValidKey {
|
private _getKeys(value: T): IDBValidKey[] {
|
||||||
|
const keys: IDBValidKey[] = [];
|
||||||
const {keyPath} = this._idbStore;
|
const {keyPath} = this._idbStore;
|
||||||
|
try {
|
||||||
|
keys.push(this._readKeyPath(value, keyPath));
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("could not read keyPath", keyPath);
|
||||||
|
}
|
||||||
|
for (const indexName of this._idbStore.indexNames) {
|
||||||
|
try {
|
||||||
|
const index = this._idbStore.index(indexName);
|
||||||
|
keys.push(this._readKeyPath(value, index.keyPath));
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("could not read index", indexName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return keys;
|
||||||
|
}
|
||||||
|
|
||||||
|
private _readKeyPath(value: T, keyPath: string[] | string): IDBValidKey {
|
||||||
if (Array.isArray(keyPath)) {
|
if (Array.isArray(keyPath)) {
|
||||||
let field: any = value;
|
let field: any = value;
|
||||||
for (const part of keyPath) {
|
for (const part of keyPath) {
|
||||||
@ -198,6 +234,6 @@ export class Store<T> extends QueryTarget<T> {
|
|||||||
return field as IDBValidKey;
|
return field as IDBValidKey;
|
||||||
} else {
|
} else {
|
||||||
return value[keyPath] as IDBValidKey;
|
return value[keyPath] as IDBValidKey;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -39,12 +39,14 @@ import {AccountDataStore} from "./stores/AccountDataStore";
|
|||||||
import {LogItem} from "../../../logging/LogItem.js";
|
import {LogItem} from "../../../logging/LogItem.js";
|
||||||
import {BaseLogger} from "../../../logging/BaseLogger.js";
|
import {BaseLogger} from "../../../logging/BaseLogger.js";
|
||||||
|
|
||||||
|
export type IDBKey = IDBValidKey | IDBKeyRange;
|
||||||
|
|
||||||
class WriteErrorInfo {
|
class WriteErrorInfo {
|
||||||
constructor(
|
constructor(
|
||||||
public readonly error: StorageError,
|
public readonly error: StorageError,
|
||||||
public readonly refItem: LogItem | undefined,
|
public readonly refItem: LogItem | undefined,
|
||||||
public readonly operationName: string,
|
public readonly operationName: string,
|
||||||
public readonly key: IDBValidKey | IDBKeyRange | undefined,
|
public readonly keys: IDBKey[] | undefined,
|
||||||
) {}
|
) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,10 +198,10 @@ export class Transaction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
addWriteError(error: StorageError, refItem: LogItem | undefined, operationName: string, key: IDBValidKey | IDBKeyRange | undefined) {
|
addWriteError(error: StorageError, refItem: LogItem | undefined, operationName: string, keys: IDBKey[] | undefined) {
|
||||||
// don't log subsequent `AbortError`s
|
// don't log subsequent `AbortError`s
|
||||||
if (error.errcode !== "AbortError" || this._writeErrors.length === 0) {
|
if (error.errcode !== "AbortError" || this._writeErrors.length === 0) {
|
||||||
this._writeErrors.push(new WriteErrorInfo(error, refItem, operationName, key));
|
this._writeErrors.push(new WriteErrorInfo(error, refItem, operationName, keys));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -210,7 +212,7 @@ export class Transaction {
|
|||||||
errorGroupItem.set("allowedStoreNames", this._allowedStoreNames);
|
errorGroupItem.set("allowedStoreNames", this._allowedStoreNames);
|
||||||
}
|
}
|
||||||
for (const info of this._writeErrors) {
|
for (const info of this._writeErrors) {
|
||||||
errorGroupItem.wrap({l: info.operationName, id: info.key}, item => {
|
errorGroupItem.wrap({l: info.operationName, id: info.keys}, item => {
|
||||||
if (info.refItem) {
|
if (info.refItem) {
|
||||||
item.refDetached(info.refItem);
|
item.refDetached(info.refItem);
|
||||||
}
|
}
|
||||||
|
@ -57,10 +57,18 @@ export class IDBError extends StorageError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class IDBRequestError extends IDBError {
|
export class IDBRequestError extends IDBError {
|
||||||
constructor(request: IDBRequest, message: string = "IDBRequest failed") {
|
private errorEvent: Event;
|
||||||
|
|
||||||
|
constructor(errorEvent: Event) {
|
||||||
|
const request = errorEvent.target as IDBRequest;
|
||||||
const source = request.source;
|
const source = request.source;
|
||||||
const cause = request.error;
|
const cause = request.error;
|
||||||
super(message, source, cause);
|
super("IDBRequest failed", source, cause);
|
||||||
|
this.errorEvent = errorEvent;
|
||||||
|
}
|
||||||
|
|
||||||
|
preventTransactionAbort() {
|
||||||
|
this.errorEvent.preventDefault();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -253,15 +253,17 @@ export class TimelineEventStore {
|
|||||||
return occuringEventIds;
|
return occuringEventIds;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Inserts a new entry into the store. The combination of roomId and eventKey should not exist yet, or an error is thrown.
|
/** Inserts a new entry into the store.
|
||||||
* @param entry the entry to insert
|
*
|
||||||
* @return nothing. To wait for the operation to finish, await the transaction it's part of.
|
* If the event already exists in the store (either the eventKey or the event id
|
||||||
* @throws {StorageError} ...
|
* are already known for the given roomId), this operation has no effect.
|
||||||
|
*
|
||||||
|
* Returns if the event was not yet known and the entry was written.
|
||||||
*/
|
*/
|
||||||
insert(entry: TimelineEventEntry, log: LogItem): void {
|
tryInsert(entry: TimelineEventEntry, log: LogItem): Promise<boolean> {
|
||||||
(entry as TimelineEventStorageEntry).key = encodeKey(entry.roomId, entry.fragmentId, entry.eventIndex);
|
(entry as TimelineEventStorageEntry).key = encodeKey(entry.roomId, entry.fragmentId, entry.eventIndex);
|
||||||
(entry as TimelineEventStorageEntry).eventIdKey = encodeEventIdKey(entry.roomId, entry.event.event_id);
|
(entry as TimelineEventStorageEntry).eventIdKey = encodeEventIdKey(entry.roomId, entry.event.event_id);
|
||||||
this._timelineStore.add(entry as TimelineEventStorageEntry, log);
|
return this._timelineStore.tryAdd(entry as TimelineEventStorageEntry, log);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Updates the entry into the store with the given [roomId, eventKey] combination.
|
/** Updates the entry into the store with the given [roomId, eventKey] combination.
|
||||||
|
@ -97,7 +97,7 @@ export function reqAsPromise<T>(req: IDBRequest<T>): Promise<T> {
|
|||||||
needsSyncPromise && Promise._flush && Promise._flush();
|
needsSyncPromise && Promise._flush && Promise._flush();
|
||||||
});
|
});
|
||||||
req.addEventListener("error", event => {
|
req.addEventListener("error", event => {
|
||||||
const error = new IDBRequestError(event.target as IDBRequest<T>);
|
const error = new IDBRequestError(event);
|
||||||
reject(error);
|
reject(error);
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
needsSyncPromise && Promise._flush && Promise._flush();
|
needsSyncPromise && Promise._flush && Promise._flush();
|
||||||
@ -143,8 +143,8 @@ type CursorIterator<T, I extends IDBCursor> = (value: I extends IDBCursorWithVal
|
|||||||
export function iterateCursor<T, I extends IDBCursor = IDBCursorWithValue>(cursorRequest: IDBRequest<I | null>, processValue: CursorIterator<T, I>): Promise<boolean> {
|
export function iterateCursor<T, I extends IDBCursor = IDBCursorWithValue>(cursorRequest: IDBRequest<I | null>, processValue: CursorIterator<T, I>): Promise<boolean> {
|
||||||
// TODO: does cursor already have a value here??
|
// TODO: does cursor already have a value here??
|
||||||
return new Promise<boolean>((resolve, reject) => {
|
return new Promise<boolean>((resolve, reject) => {
|
||||||
cursorRequest.onerror = () => {
|
cursorRequest.onerror = event => {
|
||||||
reject(new IDBRequestError(cursorRequest));
|
reject(new IDBRequestError(event));
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
needsSyncPromise && Promise._flush && Promise._flush();
|
needsSyncPromise && Promise._flush && Promise._flush();
|
||||||
};
|
};
|
||||||
|
@ -16,8 +16,8 @@ limitations under the License.
|
|||||||
|
|
||||||
import {FDBFactory, FDBKeyRange} from "../../lib/fake-indexeddb/index.js";
|
import {FDBFactory, FDBKeyRange} from "../../lib/fake-indexeddb/index.js";
|
||||||
import {StorageFactory} from "../matrix/storage/idb/StorageFactory";
|
import {StorageFactory} from "../matrix/storage/idb/StorageFactory";
|
||||||
import {NullLogItem} from "../logging/NullLogger.js";
|
import {Instance as nullLogger} from "../logging/NullLogger.js";
|
||||||
|
|
||||||
export function createMockStorage() {
|
export function createMockStorage() {
|
||||||
return new StorageFactory(null, new FDBFactory(), FDBKeyRange).create(1, new NullLogItem());
|
return new StorageFactory(null, new FDBFactory(), FDBKeyRange).create(1, nullLogger.item);
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ export function eventId(i: number): string {
|
|||||||
return `$event${i}`;
|
return `$event${i}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** `from` is included, `to` is excluded */
|
||||||
export function eventIds(from: number, to: number): string[] {
|
export function eventIds(from: number, to: number): string[] {
|
||||||
return [...Array(to-from).keys()].map(i => eventId(i + from));
|
return [...Array(to-from).keys()].map(i => eventId(i + from));
|
||||||
}
|
}
|
||||||
|
@ -66,7 +66,7 @@ export class BaseMessageView extends TemplateView {
|
|||||||
let reactionsView = null;
|
let reactionsView = null;
|
||||||
t.mapSideEffect(vm => vm.reactions, reactions => {
|
t.mapSideEffect(vm => vm.reactions, reactions => {
|
||||||
if (reactions && this._interactive && !reactionsView) {
|
if (reactions && this._interactive && !reactionsView) {
|
||||||
reactionsView = new ReactionsView(vm.reactions);
|
reactionsView = new ReactionsView(reactions);
|
||||||
this.addSubView(reactionsView);
|
this.addSubView(reactionsView);
|
||||||
li.appendChild(mountView(reactionsView));
|
li.appendChild(mountView(reactionsView));
|
||||||
} else if (!reactions && reactionsView) {
|
} else if (!reactions && reactionsView) {
|
||||||
|
Loading…
Reference in New Issue
Block a user