mirror of
https://github.com/vector-im/hydrogen-web.git
synced 2024-12-23 11:35:04 +01:00
Merge pull request #516 from vector-im/bwindels/fix-515
Fix interpreting hex keys as decimal
This commit is contained in:
commit
9a96a5b7bb
@ -181,7 +181,7 @@ import {FragmentIdComparer} from "../../../../matrix/room/timeline/FragmentIdCom
|
|||||||
import {createAnnotation} from "../../../../matrix/room/timeline/relations.js";
|
import {createAnnotation} from "../../../../matrix/room/timeline/relations.js";
|
||||||
// mocks
|
// mocks
|
||||||
import {Clock as MockClock} from "../../../../mocks/Clock.js";
|
import {Clock as MockClock} from "../../../../mocks/Clock.js";
|
||||||
import {createMockStorage} from "../../../../mocks/Storage.js";
|
import {createMockStorage} from "../../../../mocks/Storage";
|
||||||
import {ListObserver} from "../../../../mocks/ListObserver.js";
|
import {ListObserver} from "../../../../mocks/ListObserver.js";
|
||||||
import {createEvent, withTextBody, withContent} from "../../../../mocks/event.js";
|
import {createEvent, withTextBody, withContent} from "../../../../mocks/event.js";
|
||||||
import {NullLogItem, NullLogger} from "../../../../logging/NullLogger.js";
|
import {NullLogItem, NullLogger} from "../../../../logging/NullLogger.js";
|
||||||
|
@ -351,7 +351,7 @@ export class SendQueue {
|
|||||||
}
|
}
|
||||||
|
|
||||||
import {HomeServer as MockHomeServer} from "../../../mocks/HomeServer.js";
|
import {HomeServer as MockHomeServer} from "../../../mocks/HomeServer.js";
|
||||||
import {createMockStorage} from "../../../mocks/Storage.js";
|
import {createMockStorage} from "../../../mocks/Storage";
|
||||||
import {ListObserver} from "../../../mocks/ListObserver.js";
|
import {ListObserver} from "../../../mocks/ListObserver.js";
|
||||||
import {NullLogger, NullLogItem} from "../../../logging/NullLogger.js";
|
import {NullLogger, NullLogItem} from "../../../logging/NullLogger.js";
|
||||||
import {createEvent, withTextBody, withTxnId} from "../../../mocks/event.js";
|
import {createEvent, withTextBody, withTxnId} from "../../../mocks/event.js";
|
||||||
|
@ -343,7 +343,7 @@ export class Timeline {
|
|||||||
import {FragmentIdComparer} from "./FragmentIdComparer.js";
|
import {FragmentIdComparer} from "./FragmentIdComparer.js";
|
||||||
import {poll} from "../../../mocks/poll.js";
|
import {poll} from "../../../mocks/poll.js";
|
||||||
import {Clock as MockClock} from "../../../mocks/Clock.js";
|
import {Clock as MockClock} from "../../../mocks/Clock.js";
|
||||||
import {createMockStorage} from "../../../mocks/Storage.js";
|
import {createMockStorage} from "../../../mocks/Storage";
|
||||||
import {ListObserver} from "../../../mocks/ListObserver.js";
|
import {ListObserver} from "../../../mocks/ListObserver.js";
|
||||||
import {createEvent, withTextBody, withContent, withSender} from "../../../mocks/event.js";
|
import {createEvent, withTextBody, withContent, withSender} from "../../../mocks/event.js";
|
||||||
import {NullLogItem} from "../../../logging/NullLogger.js";
|
import {NullLogItem} from "../../../logging/NullLogger.js";
|
||||||
|
@ -27,14 +27,18 @@ export class GapWriter {
|
|||||||
this._relationWriter = relationWriter;
|
this._relationWriter = relationWriter;
|
||||||
}
|
}
|
||||||
|
|
||||||
async _findOverlappingEvents(fragmentEntry, events, txn) {
|
async _findOverlappingEvents(fragmentEntry, events, txn, log) {
|
||||||
const eventIds = events.map(e => e.event_id);
|
const eventIds = events.map(e => e.event_id);
|
||||||
const existingEventKeyMap = await txn.timelineEvents.getEventKeysForIds(this._roomId, eventIds);
|
const existingEventKeyMap = await txn.timelineEvents.getEventKeysForIds(this._roomId, eventIds);
|
||||||
|
log.set("existingEvents", existingEventKeyMap.size);
|
||||||
const nonOverlappingEvents = events.filter(e => !existingEventKeyMap.has(e.event_id));
|
const nonOverlappingEvents = events.filter(e => !existingEventKeyMap.has(e.event_id));
|
||||||
|
log.set("nonOverlappingEvents", nonOverlappingEvents.length);
|
||||||
let neighbourFragmentEntry;
|
let neighbourFragmentEntry;
|
||||||
if (fragmentEntry.hasLinkedFragment) {
|
if (fragmentEntry.hasLinkedFragment) {
|
||||||
|
log.set("linkedFragmentId", fragmentEntry.linkedFragmentId);
|
||||||
for (const eventKey of existingEventKeyMap.values()) {
|
for (const eventKey of existingEventKeyMap.values()) {
|
||||||
if (eventKey.fragmentId === fragmentEntry.linkedFragmentId) {
|
if (eventKey.fragmentId === fragmentEntry.linkedFragmentId) {
|
||||||
|
log.set("foundLinkedFragment", true);
|
||||||
const neighbourFragment = await txn.timelineFragments.get(this._roomId, fragmentEntry.linkedFragmentId);
|
const neighbourFragment = await txn.timelineFragments.get(this._roomId, fragmentEntry.linkedFragmentId);
|
||||||
neighbourFragmentEntry = fragmentEntry.createNeighbourEntry(neighbourFragment);
|
neighbourFragmentEntry = fragmentEntry.createNeighbourEntry(neighbourFragment);
|
||||||
break;
|
break;
|
||||||
@ -183,11 +187,12 @@ export class GapWriter {
|
|||||||
|
|
||||||
// find last event in fragment so we get the eventIndex to begin creating keys at
|
// find last event in fragment so we get the eventIndex to begin creating keys at
|
||||||
let lastKey = await this._findFragmentEdgeEventKey(fragmentEntry, txn);
|
let lastKey = await this._findFragmentEdgeEventKey(fragmentEntry, txn);
|
||||||
|
log.set("lastKey", lastKey.toString());
|
||||||
// find out if any event in chunk is already present using findFirstOrLastOccurringEventId
|
// find out if any event in chunk is already present using findFirstOrLastOccurringEventId
|
||||||
const {
|
const {
|
||||||
nonOverlappingEvents,
|
nonOverlappingEvents,
|
||||||
neighbourFragmentEntry
|
neighbourFragmentEntry
|
||||||
} = await this._findOverlappingEvents(fragmentEntry, chunk, txn);
|
} = await this._findOverlappingEvents(fragmentEntry, chunk, txn, log);
|
||||||
// create entries for all events in chunk, add them to entries
|
// create entries for all events in chunk, add them to entries
|
||||||
const {entries, updatedEntries} = await this._storeEvents(nonOverlappingEvents, lastKey, direction, state, txn, log);
|
const {entries, updatedEntries} = await this._storeEvents(nonOverlappingEvents, lastKey, direction, state, txn, log);
|
||||||
const fragments = await this._updateFragments(fragmentEntry, neighbourFragmentEntry, end, entries, txn, log);
|
const fragments = await this._updateFragments(fragmentEntry, neighbourFragmentEntry, end, entries, txn, log);
|
||||||
@ -198,7 +203,7 @@ export class GapWriter {
|
|||||||
|
|
||||||
import {FragmentIdComparer} from "../FragmentIdComparer.js";
|
import {FragmentIdComparer} from "../FragmentIdComparer.js";
|
||||||
import {RelationWriter} from "./RelationWriter.js";
|
import {RelationWriter} from "./RelationWriter.js";
|
||||||
import {createMockStorage} from "../../../../mocks/Storage.js";
|
import {createMockStorage} from "../../../../mocks/Storage";
|
||||||
import {FragmentBoundaryEntry} from "../entries/FragmentBoundaryEntry.js";
|
import {FragmentBoundaryEntry} from "../entries/FragmentBoundaryEntry.js";
|
||||||
import {NullLogItem} from "../../../../logging/NullLogger.js";
|
import {NullLogItem} from "../../../../logging/NullLogger.js";
|
||||||
import {TimelineMock, eventIds, eventId} from "../../../../mocks/TimelineMock.ts";
|
import {TimelineMock, eventIds, eventId} from "../../../../mocks/TimelineMock.ts";
|
||||||
|
@ -253,7 +253,7 @@ const _REDACT_KEEP_CONTENT_MAP = {
|
|||||||
};
|
};
|
||||||
// end of matrix-js-sdk code
|
// end of matrix-js-sdk code
|
||||||
|
|
||||||
import {createMockStorage} from "../../../../mocks/Storage.js";
|
import {createMockStorage} from "../../../../mocks/Storage";
|
||||||
import {createEvent, withTextBody, withRedacts, withContent} from "../../../../mocks/event.js";
|
import {createEvent, withTextBody, withRedacts, withContent} from "../../../../mocks/event.js";
|
||||||
import {createAnnotation} from "../relations.js";
|
import {createAnnotation} from "../relations.js";
|
||||||
import {FragmentIdComparer} from "../FragmentIdComparer.js";
|
import {FragmentIdComparer} from "../FragmentIdComparer.js";
|
||||||
|
@ -256,7 +256,7 @@ export class SyncWriter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
import {createMockStorage} from "../../../../mocks/Storage.js";
|
import {createMockStorage} from "../../../../mocks/Storage";
|
||||||
import {createEvent, withTextBody} from "../../../../mocks/event.js";
|
import {createEvent, withTextBody} from "../../../../mocks/event.js";
|
||||||
import {Instance as nullLogger} from "../../../../logging/NullLogger.js";
|
import {Instance as nullLogger} from "../../../../logging/NullLogger.js";
|
||||||
export function tests() {
|
export function tests() {
|
||||||
|
@ -15,7 +15,15 @@ limitations under the License.
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import {iterateCursor, DONE, NOT_DONE, reqAsPromise} from "./utils";
|
import {iterateCursor, DONE, NOT_DONE, reqAsPromise} from "./utils";
|
||||||
import {Transaction} from "./Transaction";
|
import {StorageError} from "../common";
|
||||||
|
import {LogItem} from "../../../logging/LogItem.js";
|
||||||
|
import {IDBKey} from "./Transaction";
|
||||||
|
|
||||||
|
export interface ITransaction {
|
||||||
|
idbFactory: IDBFactory;
|
||||||
|
IDBKeyRange: typeof IDBKeyRange;
|
||||||
|
addWriteError(error: StorageError, refItem: LogItem | undefined, operationName: string, keys: IDBKey[] | undefined);
|
||||||
|
}
|
||||||
|
|
||||||
type Reducer<A,B> = (acc: B, val: A) => B
|
type Reducer<A,B> = (acc: B, val: A) => B
|
||||||
|
|
||||||
@ -32,9 +40,9 @@ interface QueryTargetInterface<T> {
|
|||||||
|
|
||||||
export class QueryTarget<T> {
|
export class QueryTarget<T> {
|
||||||
protected _target: QueryTargetInterface<T>;
|
protected _target: QueryTargetInterface<T>;
|
||||||
protected _transaction: Transaction;
|
protected _transaction: ITransaction;
|
||||||
|
|
||||||
constructor(target: QueryTargetInterface<T>, transaction: Transaction) {
|
constructor(target: QueryTargetInterface<T>, transaction: ITransaction) {
|
||||||
this._target = target;
|
this._target = target;
|
||||||
this._transaction = transaction;
|
this._transaction = transaction;
|
||||||
}
|
}
|
||||||
@ -160,20 +168,34 @@ export class QueryTarget<T> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if a given set of keys exist.
|
* Checks if a given set of keys exist.
|
||||||
* Calls `callback(key, found)` for each key in `keys`, in key sorting order (or reversed if backwards=true).
|
|
||||||
* If the callback returns true, the search is halted and callback won't be called again.
|
* If the callback returns true, the search is halted and callback won't be called again.
|
||||||
* `callback` is called with the same instances of the key as given in `keys`, so direct comparison can be used.
|
|
||||||
*/
|
*/
|
||||||
async findExistingKeys(keys: IDBValidKey[], backwards: boolean, callback: (key: IDBValidKey, pk: IDBValidKey) => boolean): Promise<void> {
|
async findExistingKeys(keys: IDBValidKey[], backwards: boolean, callback: (key: IDBValidKey, pk: IDBValidKey) => boolean): Promise<void> {
|
||||||
|
const compareKeys = (a, b) => backwards ? -this.idbFactory.cmp(a, b) : this.idbFactory.cmp(a, b);
|
||||||
|
const sortedKeys = keys.slice().sort(compareKeys);
|
||||||
|
const firstKey = sortedKeys[0];
|
||||||
|
const lastKey = sortedKeys[sortedKeys.length - 1];
|
||||||
const direction = backwards ? "prev" : "next";
|
const direction = backwards ? "prev" : "next";
|
||||||
const sortedKeys = keys.slice().sort((a, b) => backwards ? -this.idbFactory.cmp(a, b) : this.idbFactory.cmp(a, b));
|
|
||||||
const firstKey = backwards ? sortedKeys[sortedKeys.length - 1] : sortedKeys[0];
|
|
||||||
const lastKey = backwards ? sortedKeys[0] : sortedKeys[sortedKeys.length - 1];
|
|
||||||
const cursor = this._target.openKeyCursor(this.IDBKeyRange.bound(firstKey, lastKey), direction);
|
const cursor = this._target.openKeyCursor(this.IDBKeyRange.bound(firstKey, lastKey), direction);
|
||||||
|
let index = 0;
|
||||||
await iterateCursor(cursor, (value, key, cursor) => {
|
await iterateCursor(cursor, (value, key, cursor) => {
|
||||||
|
while (index < sortedKeys.length && compareKeys(sortedKeys[index], key) < 0) {
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
let done = false;
|
||||||
|
if (sortedKeys[index] === key) {
|
||||||
const pk = cursor.primaryKey;
|
const pk = cursor.primaryKey;
|
||||||
const done = callback(key, pk);
|
done = callback(key, pk);
|
||||||
return done ? DONE : NOT_DONE;
|
index += 1;
|
||||||
|
}
|
||||||
|
if (done || index >= sortedKeys.length) {
|
||||||
|
return DONE;
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
done: false,
|
||||||
|
jumpTo: sortedKeys[index],
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -239,3 +261,101 @@ export class QueryTarget<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import {createMockDatabase, MockIDBImpl} from "../../../mocks/Storage";
|
||||||
|
import {txnAsPromise} from "./utils";
|
||||||
|
import {QueryTargetWrapper, Store} from "./Store";
|
||||||
|
|
||||||
|
export function tests() {
|
||||||
|
|
||||||
|
class MockTransaction extends MockIDBImpl {
|
||||||
|
addWriteError(error: StorageError, refItem: LogItem | undefined, operationName: string, keys: IDBKey[] | undefined) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TestEntry {
|
||||||
|
key: string
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createTestStore(): Promise<Store<TestEntry>> {
|
||||||
|
const mockImpl = new MockTransaction();
|
||||||
|
const db = await createMockDatabase("findExistingKeys", (db: IDBDatabase) => {
|
||||||
|
db.createObjectStore("test", {keyPath: "key"});
|
||||||
|
}, mockImpl);
|
||||||
|
const txn = db.transaction(["test"], "readwrite");
|
||||||
|
return new Store<TestEntry>(txn.objectStore("test"), mockImpl);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"findExistingKeys should not match on empty store": async assert => {
|
||||||
|
const store = await createTestStore();
|
||||||
|
await store.findExistingKeys(["2db1a709-d8f1-4c40-a835-f312badd277a", "fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2"], false, () => {
|
||||||
|
assert.fail("no key should match");
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
"findExistingKeys should not match any existing keys (in between sorting order)": async assert => {
|
||||||
|
const store = await createTestStore();
|
||||||
|
store.add({key: "43cd16eb-a6b4-4b9d-ab36-ab87d1b038c3"});
|
||||||
|
store.add({key: "b655e7c5-e02d-4823-a7af-4202b12de659"});
|
||||||
|
await store.findExistingKeys(["2db1a709-d8f1-4c40-a835-f312badd277a", "fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2"], false, () => {
|
||||||
|
assert.fail("no key should match");
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
"findExistingKeys should match only existing keys": async assert => {
|
||||||
|
const store = await createTestStore();
|
||||||
|
store.add({key: "2db1a709-d8f1-4c40-a835-f312badd277a"});
|
||||||
|
store.add({key: "43cd16eb-a6b4-4b9d-ab36-ab87d1b038c3"});
|
||||||
|
store.add({key: "fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2"});
|
||||||
|
const found: string[] = [];
|
||||||
|
await store.findExistingKeys([
|
||||||
|
"2db1a709-d8f1-4c40-a835-f312badd277a",
|
||||||
|
"eac3ef5c-a48f-4e19-b41d-ebd1d84c53f2",
|
||||||
|
"fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2"
|
||||||
|
], false, (key: IDBValidKey) => {
|
||||||
|
found.push(key as string);
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
assert.equal(found.length, 2);
|
||||||
|
assert.equal(found[0], "2db1a709-d8f1-4c40-a835-f312badd277a");
|
||||||
|
assert.equal(found[1], "fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2");
|
||||||
|
},
|
||||||
|
"findExistingKeys should match all if all exist": async assert => {
|
||||||
|
const store = await createTestStore();
|
||||||
|
store.add({key: "2db1a709-d8f1-4c40-a835-f312badd277a"});
|
||||||
|
store.add({key: "fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2"});
|
||||||
|
store.add({key: "b655e7c5-e02d-4823-a7af-4202b12de659"});
|
||||||
|
const found: string[] = [];
|
||||||
|
await store.findExistingKeys([
|
||||||
|
"2db1a709-d8f1-4c40-a835-f312badd277a",
|
||||||
|
"b655e7c5-e02d-4823-a7af-4202b12de659",
|
||||||
|
"fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2"
|
||||||
|
], false, (key: IDBValidKey) => {
|
||||||
|
found.push(key as string);
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
assert.equal(found.length, 3);
|
||||||
|
assert.equal(found[0], "2db1a709-d8f1-4c40-a835-f312badd277a");
|
||||||
|
assert.equal(found[1], "b655e7c5-e02d-4823-a7af-4202b12de659");
|
||||||
|
assert.equal(found[2], "fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2");
|
||||||
|
},
|
||||||
|
"findExistingKeys should stop matching when callback returns true": async assert => {
|
||||||
|
const store = await createTestStore();
|
||||||
|
store.add({key: "2db1a709-d8f1-4c40-a835-f312badd277a"});
|
||||||
|
store.add({key: "fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2"});
|
||||||
|
store.add({key: "b655e7c5-e02d-4823-a7af-4202b12de659"});
|
||||||
|
const found: string[] = [];
|
||||||
|
await store.findExistingKeys([
|
||||||
|
"2db1a709-d8f1-4c40-a835-f312badd277a",
|
||||||
|
"b655e7c5-e02d-4823-a7af-4202b12de659",
|
||||||
|
"fe7aa5c2-d4ed-4278-b3b0-f49d48d11df2"
|
||||||
|
], false, (key: IDBValidKey) => {
|
||||||
|
found.push(key as string);
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
assert.equal(found.length, 1);
|
||||||
|
assert.equal(found[0], "2db1a709-d8f1-4c40-a835-f312badd277a");
|
||||||
|
},
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
|
|||||||
limitations under the License.
|
limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import {QueryTarget, IDBQuery} from "./QueryTarget";
|
import {QueryTarget, IDBQuery, ITransaction} from "./QueryTarget";
|
||||||
import {IDBRequestError, IDBRequestAttemptError} from "./error";
|
import {IDBRequestError, IDBRequestAttemptError} from "./error";
|
||||||
import {reqAsPromise} from "./utils";
|
import {reqAsPromise} from "./utils";
|
||||||
import {Transaction, IDBKey} from "./Transaction";
|
import {Transaction, IDBKey} from "./Transaction";
|
||||||
@ -28,7 +28,7 @@ function logRequest(method: string, params: any[], source: any): void {
|
|||||||
console.info(`${databaseName}.${storeName}.${method}(${params.map(p => JSON.stringify(p)).join(", ")})`);
|
console.info(`${databaseName}.${storeName}.${method}(${params.map(p => JSON.stringify(p)).join(", ")})`);
|
||||||
}
|
}
|
||||||
|
|
||||||
class QueryTargetWrapper<T> {
|
export class QueryTargetWrapper<T> {
|
||||||
private _qt: IDBIndex | IDBObjectStore;
|
private _qt: IDBIndex | IDBObjectStore;
|
||||||
|
|
||||||
constructor(qt: IDBIndex | IDBObjectStore) {
|
constructor(qt: IDBIndex | IDBObjectStore) {
|
||||||
@ -133,7 +133,7 @@ class QueryTargetWrapper<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class Store<T> extends QueryTarget<T> {
|
export class Store<T> extends QueryTarget<T> {
|
||||||
constructor(idbStore: IDBObjectStore, transaction: Transaction) {
|
constructor(idbStore: IDBObjectStore, transaction: ITransaction) {
|
||||||
super(new QueryTargetWrapper<T>(idbStore), transaction);
|
super(new QueryTargetWrapper<T>(idbStore), transaction);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ limitations under the License.
|
|||||||
|
|
||||||
import {EventKey} from "../../../room/timeline/EventKey";
|
import {EventKey} from "../../../room/timeline/EventKey";
|
||||||
import { StorageError } from "../../common";
|
import { StorageError } from "../../common";
|
||||||
import { encodeUint32 } from "../utils";
|
import { encodeUint32, decodeUint32 } from "../utils";
|
||||||
import {KeyLimits} from "../../common";
|
import {KeyLimits} from "../../common";
|
||||||
import {Store} from "../Store";
|
import {Store} from "../Store";
|
||||||
import {TimelineEvent, StateEvent} from "../../types";
|
import {TimelineEvent, StateEvent} from "../../types";
|
||||||
@ -46,7 +46,7 @@ function encodeKey(roomId: string, fragmentId: number, eventIndex: number): stri
|
|||||||
|
|
||||||
function decodeKey(key: string): { roomId: string, eventKey: EventKey } {
|
function decodeKey(key: string): { roomId: string, eventKey: EventKey } {
|
||||||
const [roomId, fragmentId, eventIndex] = key.split("|");
|
const [roomId, fragmentId, eventIndex] = key.split("|");
|
||||||
return {roomId, eventKey: new EventKey(parseInt(fragmentId, 10), parseInt(eventIndex, 10))};
|
return {roomId, eventKey: new EventKey(decodeUint32(fragmentId), decodeUint32(eventIndex))};
|
||||||
}
|
}
|
||||||
|
|
||||||
function encodeEventIdKey(roomId: string, eventId: string): string {
|
function encodeEventIdKey(roomId: string, eventId: string): string {
|
||||||
@ -316,3 +316,69 @@ export class TimelineEventStore {
|
|||||||
this._timelineStore.delete(range);
|
this._timelineStore.delete(range);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import {createMockStorage} from "../../../../mocks/Storage";
|
||||||
|
import {createEvent, withTextBody} from "../../../../mocks/event.js";
|
||||||
|
import {createEventEntry} from "../../../room/timeline/persistence/common.js";
|
||||||
|
import {Instance as logItem} from "../../../../logging/NullLogger.js";
|
||||||
|
|
||||||
|
export function tests() {
|
||||||
|
|
||||||
|
const sortedIds = [
|
||||||
|
"$2wZy1W-QdcwaAwz68nfz1oc-3SsZKVDy8d86ERP1Pm0",
|
||||||
|
"$4RWaZ5142grUgTnQyr_5qiPTOwzAOimt5MsXg6m1diM",
|
||||||
|
"$4izqHE2Wf5US_-e_za942pZ10CDNJjDncUMmhqBUVQw",
|
||||||
|
"$Oil2Afq2cBLqMAeJTAHjA3Is9T5Wmaa2ogVRlFJ_gzE",
|
||||||
|
"$Wyl-7u-YqnPJElkPufIRXRFTYP-eFxQ4iD-SmLQo2Rw",
|
||||||
|
"$b-eWaZtp22vL9mp0h7odbpphOZQ-rnp54qjyTQPARgo",
|
||||||
|
"$sS9rTv8u2m9o4RaMI2jGOnpMtb9t8_0euiQLhNFW380",
|
||||||
|
"$uZLkB9rzTKvJAK2QrQNX-prwQ2Niajdi0fvvRnyCtz8",
|
||||||
|
"$vGecIBZFex9_vlQf1E1LjtQXE3q5GwERIHMiy4mOWv0",
|
||||||
|
"$vdLgAnwjHj0cicU3MA4ynLHUBGOIFhvvksY3loqzjF",
|
||||||
|
];
|
||||||
|
|
||||||
|
const insertedIds = [
|
||||||
|
sortedIds[5],
|
||||||
|
sortedIds[3],
|
||||||
|
sortedIds[9],
|
||||||
|
sortedIds[7],
|
||||||
|
sortedIds[1],
|
||||||
|
];
|
||||||
|
|
||||||
|
const checkedIds = [
|
||||||
|
sortedIds[2],
|
||||||
|
sortedIds[4],
|
||||||
|
sortedIds[3],
|
||||||
|
sortedIds[0],
|
||||||
|
sortedIds[8],
|
||||||
|
sortedIds[9],
|
||||||
|
sortedIds[6],
|
||||||
|
];
|
||||||
|
|
||||||
|
const roomId = "!fjsdf423423jksdfdsf:hs.tld";
|
||||||
|
|
||||||
|
function createEventWithId(id) {
|
||||||
|
return withTextBody("hello", createEvent("m.room.message", id, "@alice:hs.tld"));
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"getEventKeysForIds": async assert => {
|
||||||
|
const storage = await createMockStorage();
|
||||||
|
const txn = await storage.readWriteTxn([storage.storeNames.timelineEvents]);
|
||||||
|
let eventKey = EventKey.defaultFragmentKey(109);
|
||||||
|
for (const insertedId of insertedIds) {
|
||||||
|
const entry = createEventEntry(eventKey.nextKey(), roomId, createEventWithId(insertedId));
|
||||||
|
assert(await txn.timelineEvents.tryInsert(entry, logItem));
|
||||||
|
eventKey = eventKey.nextKey();
|
||||||
|
}
|
||||||
|
const eventKeyMap = await txn.timelineEvents.getEventKeysForIds(roomId, checkedIds);
|
||||||
|
assert.equal(eventKeyMap.size, 2);
|
||||||
|
const eventKey1 = eventKeyMap.get("$Oil2Afq2cBLqMAeJTAHjA3Is9T5Wmaa2ogVRlFJ_gzE")!;
|
||||||
|
assert.equal(eventKey1.fragmentId, 109);
|
||||||
|
assert.equal(eventKey1.eventIndex, 0x80000001);
|
||||||
|
const eventKey2 = eventKeyMap.get("$vdLgAnwjHj0cicU3MA4ynLHUBGOIFhvvksY3loqzjF")!;
|
||||||
|
assert.equal(eventKey2.fragmentId, 109);
|
||||||
|
assert.equal(eventKey2.eventIndex, 0x80000002);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -68,7 +68,7 @@ export function decodeUint32(str: string): number {
|
|||||||
return parseInt(str, 16);
|
return parseInt(str, 16);
|
||||||
}
|
}
|
||||||
|
|
||||||
type CreateObjectStore = (db : IDBDatabase, txn: IDBTransaction | null, oldVersion: number, version: number) => any
|
export type CreateObjectStore = (db : IDBDatabase, txn: IDBTransaction | null, oldVersion: number, version: number) => any
|
||||||
|
|
||||||
export function openDatabase(name: string, createObjectStore: CreateObjectStore, version: number, idbFactory: IDBFactory = window.indexedDB): Promise<IDBDatabase> {
|
export function openDatabase(name: string, createObjectStore: CreateObjectStore, version: number, idbFactory: IDBFactory = window.indexedDB): Promise<IDBDatabase> {
|
||||||
const req = idbFactory.open(name, version);
|
const req = idbFactory.open(name, version);
|
||||||
|
@ -16,8 +16,26 @@ limitations under the License.
|
|||||||
|
|
||||||
import {FDBFactory, FDBKeyRange} from "../../lib/fake-indexeddb/index.js";
|
import {FDBFactory, FDBKeyRange} from "../../lib/fake-indexeddb/index.js";
|
||||||
import {StorageFactory} from "../matrix/storage/idb/StorageFactory";
|
import {StorageFactory} from "../matrix/storage/idb/StorageFactory";
|
||||||
|
import {Storage} from "../matrix/storage/idb/Storage";
|
||||||
import {Instance as nullLogger} from "../logging/NullLogger.js";
|
import {Instance as nullLogger} from "../logging/NullLogger.js";
|
||||||
|
import {openDatabase, CreateObjectStore} from "../matrix/storage/idb/utils";
|
||||||
|
|
||||||
export function createMockStorage() {
|
export function createMockStorage(): Promise<Storage> {
|
||||||
return new StorageFactory(null, new FDBFactory(), FDBKeyRange).create(1, nullLogger.item);
|
return new StorageFactory(null as any, new FDBFactory(), FDBKeyRange).create("1", nullLogger.item);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createMockDatabase(name: string, createObjectStore: CreateObjectStore, impl: MockIDBImpl): Promise<IDBDatabase> {
|
||||||
|
return openDatabase(name, createObjectStore, 1, impl.idbFactory);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MockIDBImpl {
|
||||||
|
idbFactory: FDBFactory;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.idbFactory = new FDBFactory();
|
||||||
|
}
|
||||||
|
|
||||||
|
get IDBKeyRange(): typeof IDBKeyRange {
|
||||||
|
return FDBKeyRange;
|
||||||
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user