WIP for fragment support

This commit is contained in:
Bruno Windels 2019-05-19 20:49:46 +02:00
parent 3324fd3afd
commit 784588440c
11 changed files with 159 additions and 25 deletions

View file

@ -5,21 +5,31 @@
- SortKey - SortKey
- FragmentId - FragmentId
- EventIndex - EventIndex
- write fragmentStore - DONE: write fragmentStore
- load all fragments - load all fragments
- add a fragment (live on limited sync, or /context) - add a fragment (live on limited sync, or /context)
- connect two fragments - connect two fragments
- update token on fragment (when filling gap or connecting two fragments) - update token on fragment (when filling gap or connecting two fragments)
fragments can need connecting when filling a gap or creating a new /context fragment fragments can need connecting when filling a gap or creating a new /context fragment
- adapt timelineStore - DONE: adapt timelineStore
how will fragments be exposed in timeline store? how will fragments be exposed in timeline store?
- all read operations are passed a fragment id - all read operations are passed a fragment id
- adapt persister - adapt persister
- DONE: persist fragments in /sync - DONE: persist fragments in /sync
- load n items before and after key - DONE: fill gaps / fragment filling
- fill gaps / fragment filling - load n items before and after key,
- need to add fragments as we come across boundaries
- also cache fragments? not for now ...
- not doing any of the above, just reloading and rebuilding for now
- adapt Timeline
- turn ObservableArray into ObservableSortedArray
- upsert already sorted sections
- upsert single entry
- adapt TilesCollection & Tile to entry changes
- add live fragment id optimization if we haven't done so already - add live fragment id optimization if we haven't done so already
- lets try to not have to have the fragmentindex in memory if the timeline isn't loaded - lets try to not have to have the fragmentindex in memory if the timeline isn't loaded
- could do this by only loading all fragments into index when filling gaps, backpaginating, ... and on persister load only load the last fragment. This wouldn't even need a FragmentIndex? - could do this by only loading all fragments into index when filling gaps, backpaginating, ... and on persister load only load the last fragment. This wouldn't even need a FragmentIndex?

View file

@ -22,6 +22,7 @@ export default class TilesCollection extends BaseObservableList {
for (let entry of this._entries) { for (let entry of this._entries) {
if (!currentTile || !currentTile.tryIncludeEntry(entry)) { if (!currentTile || !currentTile.tryIncludeEntry(entry)) {
currentTile = this._tileCreator(entry); currentTile = this._tileCreator(entry);
// if (currentTile) here?
this._tiles.push(currentTile); this._tiles.push(currentTile);
} }
} }

View file

@ -163,7 +163,7 @@ export default class FragmentIdComparer {
export function tests() { export function tests() {
return { return {
test_1_island_3_fragments(assert) { test_1_island_3_fragments(assert) {
const index = new FragmentIdIndex([ const index = new FragmentIdComparer([
{id: 3, previousId: 2}, {id: 3, previousId: 2},
{id: 1, nextId: 2}, {id: 1, nextId: 2},
{id: 2, nextId: 3, previousId: 1}, {id: 2, nextId: 3, previousId: 1},
@ -180,7 +180,7 @@ export function tests() {
assert.equal(index.compare(1, 1), 0); assert.equal(index.compare(1, 1), 0);
}, },
test_2_island_dont_compare(assert) { test_2_island_dont_compare(assert) {
const index = new FragmentIdIndex([ const index = new FragmentIdComparer([
{id: 1}, {id: 1},
{id: 2}, {id: 2},
]); ]);
@ -188,7 +188,7 @@ export function tests() {
assert.throws(() => index.compare(2, 1)); assert.throws(() => index.compare(2, 1));
}, },
test_2_island_compare_internally(assert) { test_2_island_compare_internally(assert) {
const index = new FragmentIdIndex([ const index = new FragmentIdComparer([
{id: 1, nextId: 2}, {id: 1, nextId: 2},
{id: 2, previousId: 1}, {id: 2, previousId: 1},
{id: 11, nextId: 12}, {id: 11, nextId: 12},
@ -203,12 +203,12 @@ export function tests() {
assert.throws(() => index.compare(12, 2)); assert.throws(() => index.compare(12, 2));
}, },
test_unknown_id(assert) { test_unknown_id(assert) {
const index = new FragmentIdIndex([{id: 1}]); const index = new FragmentIdComparer([{id: 1}]);
assert.throws(() => index.compare(1, 2)); assert.throws(() => index.compare(1, 2));
assert.throws(() => index.compare(2, 1)); assert.throws(() => index.compare(2, 1));
}, },
test_rebuild_flushes_old_state(assert) { test_rebuild_flushes_old_state(assert) {
const index = new FragmentIdIndex([ const index = new FragmentIdComparer([
{id: 1, nextId: 2}, {id: 1, nextId: 2},
{id: 2, previousId: 1}, {id: 2, previousId: 1},
]); ]);

View file

@ -1,6 +1,7 @@
import { ObservableArray } from "../../../observable/index.js"; import { ObservableArray } from "../../../observable/index.js";
import sortedIndex from "../../../utils/sortedIndex.js"; import sortedIndex from "../../../utils/sortedIndex.js";
import GapPersister from "./persistence/GapPersister.js"; import GapPersister from "./persistence/GapPersister.js";
import TimelineReader from "./persistence/TimelineReader.js";
export default class Timeline { export default class Timeline {
constructor({roomId, storage, closeCallback, fragmentIdComparer}) { constructor({roomId, storage, closeCallback, fragmentIdComparer}) {
@ -9,6 +10,11 @@ export default class Timeline {
this._closeCallback = closeCallback; this._closeCallback = closeCallback;
this._entriesList = new ObservableArray(); this._entriesList = new ObservableArray();
this._fragmentIdComparer = fragmentIdComparer; this._fragmentIdComparer = fragmentIdComparer;
this._timelineReader = new TimelineReader({
roomId: this._roomId,
storage: this._storage,
fragmentIdComparer: this._fragmentIdComparer
});
} }
/** @package */ /** @package */
@ -53,6 +59,7 @@ export default class Timeline {
} }
async loadAtTop(amount) { async loadAtTop(amount) {
// TODO: use TimelineReader::readFrom here, and insert returned array at location for first and last entry.
const firstEntry = this._entriesList.at(0); const firstEntry = this._entriesList.at(0);
if (firstEntry) { if (firstEntry) {
const txn = await this._storage.readTxn([this._storage.storeNames.timelineEvents]); const txn = await this._storage.readTxn([this._storage.storeNames.timelineEvents]);

View file

@ -1,14 +1,6 @@
import EventKey from "../EventKey.js"; import EventKey from "../EventKey.js";
import EventEntry from "../entries/EventEntry.js"; import EventEntry from "../entries/EventEntry.js";
import {createEventEntry} from "./common.js"; import {createEventEntry, directionalAppend} from "./common.js";
function directionalAppend(array, value, direction) {
if (direction.isForward) {
array.push(value);
} else {
array.splice(0, 0, value);
}
}
export default class GapPersister { export default class GapPersister {
constructor({roomId, storage, fragmentIdComparer}) { constructor({roomId, storage, fragmentIdComparer}) {

View file

@ -11,7 +11,7 @@ export default class SyncPersister {
} }
async load(txn) { async load(txn) {
const liveFragment = await txn.roomFragments.liveFragment(this._roomId); const liveFragment = await txn.timelineFragments.liveFragment(this._roomId);
if (liveFragment) { if (liveFragment) {
const [lastEvent] = await txn.roomTimeline.lastEvents(this._roomId, liveFragment.id, 1); const [lastEvent] = await txn.roomTimeline.lastEvents(this._roomId, liveFragment.id, 1);
// sorting and identifying (e.g. sort key and pk to insert) are a bit intertwined here // sorting and identifying (e.g. sort key and pk to insert) are a bit intertwined here
@ -26,7 +26,7 @@ export default class SyncPersister {
} }
async _createLiveFragment(txn, previousToken) { async _createLiveFragment(txn, previousToken) {
const liveFragment = await txn.roomFragments.liveFragment(this._roomId); const liveFragment = await txn.timelineFragments.liveFragment(this._roomId);
if (!liveFragment) { if (!liveFragment) {
if (!previousToken) { if (!previousToken) {
previousToken = null; previousToken = null;
@ -39,7 +39,7 @@ export default class SyncPersister {
previousToken: previousToken, previousToken: previousToken,
nextToken: null nextToken: null
}; };
txn.roomFragments.add(fragment); txn.timelineFragments.add(fragment);
return fragment; return fragment;
} else { } else {
return liveFragment; return liveFragment;
@ -47,12 +47,12 @@ export default class SyncPersister {
} }
async _replaceLiveFragment(oldFragmentId, newFragmentId, previousToken, txn) { async _replaceLiveFragment(oldFragmentId, newFragmentId, previousToken, txn) {
const oldFragment = await txn.roomFragments.get(oldFragmentId); const oldFragment = await txn.timelineFragments.get(oldFragmentId);
if (!oldFragment) { if (!oldFragment) {
throw new Error(`old live fragment doesn't exist: ${oldFragmentId}`); throw new Error(`old live fragment doesn't exist: ${oldFragmentId}`);
} }
oldFragment.nextId = newFragmentId; oldFragment.nextId = newFragmentId;
txn.roomFragments.update(oldFragment); txn.timelineFragments.update(oldFragment);
const newFragment = { const newFragment = {
roomId: this._roomId, roomId: this._roomId,
id: newFragmentId, id: newFragmentId,
@ -61,7 +61,7 @@ export default class SyncPersister {
previousToken: previousToken, previousToken: previousToken,
nextToken: null nextToken: null
}; };
txn.roomFragments.add(newFragment); txn.timelineFragments.add(newFragment);
return {oldFragment, newFragment}; return {oldFragment, newFragment};
} }
@ -117,6 +117,11 @@ export default class SyncPersister {
} }
} }
if (timeline.limited) {
const fragments = await txn.timelineFragments.all(this._roomId);
this._fragmentIdComparer.rebuild(fragments);
}
return entries; return entries;
} }
} }

View file

@ -0,0 +1,63 @@
import {directionalConcat, directionalAppend} from "./common.js";
import EventKey from "../EventKey.js";
import EventEntry from "../entries/EventEntry.js";
import FragmentBoundaryEntry from "../entries/FragmentBoundaryEntry.js";
export default class TimelineReader {
constructor({roomId, storage, fragmentIdComparer}) {
this._roomId = roomId;
this._storage = storage;
this._fragmentIdComparer = fragmentIdComparer;
}
async readFrom(eventKey, direction, amount) {
const txn = this._storage.readTxn([
this._storage.storeNames.timelineEvents,
this._storage.storeNames.timelineFragments,
]);
let entries = [];
let loadedFragment = false;
const timelineStore = txn.timelineEvents;
const fragmentStore = txn.timelineFragments;
while (entries.length < amount && eventKey) {
let eventsWithinFragment;
if (direction.isForward) {
eventsWithinFragment = timelineStore.eventsAfter(eventKey, amount);
} else {
eventsWithinFragment = timelineStore.eventsBefore(eventKey, amount);
}
const eventEntries = eventsWithinFragment.map(e => new EventEntry(e, this._fragmentIdComparer));
entries = directionalConcat(entries, eventEntries, direction);
// prepend or append eventsWithinFragment to entries, and wrap them in EventEntry
if (entries.length < amount) {
const fragment = await fragmentStore.get(this._roomId, eventKey.fragmentId);
// this._fragmentIdComparer.addFragment(fragment);
let fragmentEntry = new FragmentBoundaryEntry(fragment, direction.isBackward, this._fragmentIdComparer);
// append or prepend fragmentEntry, reuse func from GapPersister?
directionalAppend(entries, fragmentEntry, direction);
// don't count it in amount perhaps? or do?
if (fragmentEntry.linkedFragmentId) {
const nextFragment = await fragmentStore.get(this._roomId, fragmentEntry.linkedFragmentId);
// this._fragmentIdComparer.addFragment(nextFragment);
const nextFragmentEntry = new FragmentBoundaryEntry(nextFragment, direction.isForward, this._fragmentIdComparer);
directionalAppend(entries, nextFragmentEntry, direction);
eventKey = new EventKey(nextFragmentEntry.fragmentId, nextFragmentEntry.eventIndex);
loadedFragment = true;
} else {
eventKey = null;
}
}
}
// reload fragments
if (loadedFragment) {
const fragments = await fragmentStore.all(this._roomId);
this._fragmentIdComparer.rebuild(fragments);
}
return entries;
}
}

View file

@ -4,4 +4,20 @@ export function createEventEntry(key, event) {
eventIndex: key.eventIndex, eventIndex: key.eventIndex,
event: event, event: event,
}; };
} }
export function directionalAppend(array, value, direction) {
if (direction.isForward) {
array.push(value);
} else {
array.unshift(value);
}
}
export function directionalConcat(array, otherArray, direction) {
if (direction.isForward) {
return array.concat(otherArray);
} else {
return otherArray.concat(array);
}
}

View file

@ -18,6 +18,7 @@ export default class Session {
this._storage.storeNames.roomSummary, this._storage.storeNames.roomSummary,
this._storage.storeNames.roomState, this._storage.storeNames.roomState,
this._storage.storeNames.timelineEvents, this._storage.storeNames.timelineEvents,
this._storage.storeNames.timelineFragments,
]); ]);
// restore session object // restore session object
this._session = await txn.session.get(); this._session = await txn.session.get();

View file

@ -74,6 +74,7 @@ export default class Sync extends EventEmitter {
storeNames.session, storeNames.session,
storeNames.roomSummary, storeNames.roomSummary,
storeNames.timelineEvents, storeNames.timelineEvents,
storeNames.timelineFragments,
storeNames.roomState, storeNames.roomState,
]); ]);
const roomChanges = []; const roomChanges = [];

View file

@ -0,0 +1,38 @@
import BaseObservableList from "./BaseObservableList.js";
import sortedIndex from "../../utils/sortedIndex";
export default class SortedArray extends BaseObservableList {
constructor(comparator) {
super();
this._comparator = comparator;
this._items = [];
}
setSortedMany(items) {
}
set(item) {
const idx = sortedIndex(this._items, item, this._comparator);
if (idx < this._items.length || this._comparator(this._items[idx], item) !== 0) {
this._items.splice(idx, 0, item);
//emitAdd
} else {
this._items[idx] = item;
//emitRemove
//emitAdd
}
}
get array() {
return this._items;
}
get length() {
return this._items.length;
}
[Symbol.iterator]() {
return this._items.values();
}
}