bring down lint error count, delete some old, inactive unit tests

This commit is contained in:
Bruno Windels 2021-05-17 12:59:04 +02:00
parent c3b5c8750e
commit 8ff1f91d2c
3 changed files with 0 additions and 305 deletions

View File

@ -68,110 +68,3 @@ export class EventKey {
return this.fragmentId === other?.fragmentId && this.eventIndex === other?.eventIndex;
}
}
export function xtests() {
const fragmentIdComparer = {compare: (a, b) => a - b};
return {
test_no_fragment_index(assert) {
const min = EventKey.minKey;
const max = EventKey.maxKey;
const a = new EventKey();
a.eventIndex = 1;
a.fragmentId = 1;
assert(min.compare(min) === 0);
assert(max.compare(max) === 0);
assert(a.compare(a) === 0);
assert(min.compare(max) < 0);
assert(max.compare(min) > 0);
assert(min.compare(a) < 0);
assert(a.compare(min) > 0);
assert(max.compare(a) > 0);
assert(a.compare(max) < 0);
},
test_default_key(assert) {
const k = new EventKey(fragmentIdComparer);
assert.equal(k.fragmentId, MID);
assert.equal(k.eventIndex, MID);
},
test_inc(assert) {
const a = new EventKey(fragmentIdComparer);
const b = a.nextKey();
assert.equal(a.fragmentId, b.fragmentId);
assert.equal(a.eventIndex + 1, b.eventIndex);
const c = b.previousKey();
assert.equal(b.fragmentId, c.fragmentId);
assert.equal(c.eventIndex + 1, b.eventIndex);
assert.equal(a.eventIndex, c.eventIndex);
},
test_min_key(assert) {
const minKey = EventKey.minKey;
const k = new EventKey(fragmentIdComparer);
assert(minKey.fragmentId <= k.fragmentId);
assert(minKey.eventIndex <= k.eventIndex);
assert(k.compare(minKey) > 0);
assert(minKey.compare(k) < 0);
},
test_max_key(assert) {
const maxKey = EventKey.maxKey;
const k = new EventKey(fragmentIdComparer);
assert(maxKey.fragmentId >= k.fragmentId);
assert(maxKey.eventIndex >= k.eventIndex);
assert(k.compare(maxKey) < 0);
assert(maxKey.compare(k) > 0);
},
test_immutable(assert) {
const a = new EventKey(fragmentIdComparer);
const fragmentId = a.fragmentId;
const eventIndex = a.eventIndex;
a.nextFragmentKey();
assert.equal(a.fragmentId, fragmentId);
assert.equal(a.eventIndex, eventIndex);
},
test_cmp_fragmentid_first(assert) {
const a = new EventKey(fragmentIdComparer);
const b = new EventKey(fragmentIdComparer);
a.fragmentId = 2;
a.eventIndex = 1;
b.fragmentId = 1;
b.eventIndex = 100000;
assert(a.compare(b) > 0);
},
test_cmp_eventindex_second(assert) {
const a = new EventKey(fragmentIdComparer);
const b = new EventKey(fragmentIdComparer);
a.fragmentId = 1;
a.eventIndex = 100000;
b.fragmentId = 1;
b.eventIndex = 2;
assert(a.compare(b) > 0);
assert(b.compare(a) < 0);
},
test_cmp_max_larger_than_min(assert) {
assert(EventKey.minKey.compare(EventKey.maxKey) < 0);
},
test_cmp_fragmentid_first_large(assert) {
const a = new EventKey(fragmentIdComparer);
const b = new EventKey(fragmentIdComparer);
a.fragmentId = MAX;
a.eventIndex = MIN;
b.fragmentId = MIN;
b.eventIndex = MAX;
assert(b < a);
assert(a > b);
}
};
}

View File

@ -246,102 +246,3 @@ export class GapWriter {
return {entries, fragments};
}
}
//import MemoryStorage from "../storage/memory/MemoryStorage.js";
export function xtests() {
const roomId = "!abc:hs.tld";
// sets sortKey and roomId on an array of entries
function createTimeline(roomId, entries) {
let key = new SortKey();
for (let entry of entries) {
if (entry.gap && entry.gap.prev_batch) {
key = key.nextKeyWithGap();
}
entry.sortKey = key;
if (entry.gap && entry.gap.next_batch) {
key = key.nextKeyWithGap();
} else if (!entry.gap) {
key = key.nextKey();
}
entry.roomId = roomId;
}
}
function areSorted(entries) {
for (var i = 1; i < entries.length; i++) {
const isSorted = entries[i - 1].sortKey.compare(entries[i].sortKey) < 0;
if(!isSorted) {
return false
}
}
return true;
}
return {
"test backwards gap fill with overlapping neighbouring event": async function(assert) {
const currentPaginationToken = "abc";
const gap = {gap: {prev_batch: currentPaginationToken}};
const storage = new MemoryStorage({roomTimeline: createTimeline(roomId, [
{event: {event_id: "b"}},
{gap: {next_batch: "ghi"}},
gap,
])});
const persister = new RoomPersister({roomId, storage});
const response = {
start: currentPaginationToken,
end: "def",
chunk: [
{event_id: "a"},
{event_id: "b"},
{event_id: "c"},
{event_id: "d"},
]
};
const {newEntries, replacedEntries} = await persister.persistGapFill(gap, response);
// should only have taken events up till existing event
assert.equal(newEntries.length, 2);
assert.equal(newEntries[0].event.event_id, "c");
assert.equal(newEntries[1].event.event_id, "d");
assert.equal(replacedEntries.length, 2);
assert.equal(replacedEntries[0].gap.next_batch, "hij");
assert.equal(replacedEntries[1].gap.prev_batch, currentPaginationToken);
assert(areSorted(newEntries));
assert(areSorted(replacedEntries));
},
"test backwards gap fill with non-overlapping neighbouring event": async function(assert) {
const currentPaginationToken = "abc";
const newPaginationToken = "def";
const gap = {gap: {prev_batch: currentPaginationToken}};
const storage = new MemoryStorage({roomTimeline: createTimeline(roomId, [
{event: {event_id: "a"}},
{gap: {next_batch: "ghi"}},
gap,
])});
const persister = new RoomPersister({roomId, storage});
const response = {
start: currentPaginationToken,
end: newPaginationToken,
chunk: [
{event_id: "c"},
{event_id: "d"},
{event_id: "e"},
{event_id: "f"},
]
};
const {newEntries, replacedEntries} = await persister.persistGapFill(gap, response);
// should only have taken events up till existing event
assert.equal(newEntries.length, 5);
assert.equal(newEntries[0].gap.prev_batch, newPaginationToken);
assert.equal(newEntries[1].event.event_id, "c");
assert.equal(newEntries[2].event.event_id, "d");
assert.equal(newEntries[3].event.event_id, "e");
assert.equal(newEntries[4].event.event_id, "f");
assert(areSorted(newEntries));
assert.equal(replacedEntries.length, 1);
assert.equal(replacedEntries[0].gap.prev_batch, currentPaginationToken);
},
}
}

View File

@ -251,102 +251,3 @@ export class SyncWriter {
return this._lastLiveKey;
}
}
//import MemoryStorage from "../storage/memory/MemoryStorage.js";
export function xtests() {
const roomId = "!abc:hs.tld";
// sets sortKey and roomId on an array of entries
function createTimeline(roomId, entries) {
let key = new SortKey();
for (let entry of entries) {
if (entry.gap && entry.gap.prev_batch) {
key = key.nextKeyWithGap();
}
entry.sortKey = key;
if (entry.gap && entry.gap.next_batch) {
key = key.nextKeyWithGap();
} else if (!entry.gap) {
key = key.nextKey();
}
entry.roomId = roomId;
}
}
function areSorted(entries) {
for (var i = 1; i < entries.length; i++) {
const isSorted = entries[i - 1].sortKey.compare(entries[i].sortKey) < 0;
if(!isSorted) {
return false
}
}
return true;
}
return {
"test backwards gap fill with overlapping neighbouring event": async function(assert) {
const currentPaginationToken = "abc";
const gap = {gap: {prev_batch: currentPaginationToken}};
const storage = new MemoryStorage({roomTimeline: createTimeline(roomId, [
{event: {event_id: "b"}},
{gap: {next_batch: "ghi"}},
gap,
])});
const persister = new RoomPersister({roomId, storage});
const response = {
start: currentPaginationToken,
end: "def",
chunk: [
{event_id: "a"},
{event_id: "b"},
{event_id: "c"},
{event_id: "d"},
]
};
const {newEntries, replacedEntries} = await persister.persistGapFill(gap, response);
// should only have taken events up till existing event
assert.equal(newEntries.length, 2);
assert.equal(newEntries[0].event.event_id, "c");
assert.equal(newEntries[1].event.event_id, "d");
assert.equal(replacedEntries.length, 2);
assert.equal(replacedEntries[0].gap.next_batch, "hij");
assert.equal(replacedEntries[1].gap.prev_batch, currentPaginationToken);
assert(areSorted(newEntries));
assert(areSorted(replacedEntries));
},
"test backwards gap fill with non-overlapping neighbouring event": async function(assert) {
const currentPaginationToken = "abc";
const newPaginationToken = "def";
const gap = {gap: {prev_batch: currentPaginationToken}};
const storage = new MemoryStorage({roomTimeline: createTimeline(roomId, [
{event: {event_id: "a"}},
{gap: {next_batch: "ghi"}},
gap,
])});
const persister = new RoomPersister({roomId, storage});
const response = {
start: currentPaginationToken,
end: newPaginationToken,
chunk: [
{event_id: "c"},
{event_id: "d"},
{event_id: "e"},
{event_id: "f"},
]
};
const {newEntries, replacedEntries} = await persister.persistGapFill(gap, response);
// should only have taken events up till existing event
assert.equal(newEntries.length, 5);
assert.equal(newEntries[0].gap.prev_batch, newPaginationToken);
assert.equal(newEntries[1].event.event_id, "c");
assert.equal(newEntries[2].event.event_id, "d");
assert.equal(newEntries[3].event.event_id, "e");
assert.equal(newEntries[4].event.event_id, "f");
assert(areSorted(newEntries));
assert.equal(replacedEntries.length, 1);
assert.equal(replacedEntries[0].gap.prev_batch, currentPaginationToken);
},
}
}