222 lines
6.1 KiB
TypeScript
222 lines
6.1 KiB
TypeScript
import * as asserts from '@std/assert';
|
|
import * as fsdb from '../fsdb.ts';
|
|
import { get_data_dir, random_email_address, random_phone_number } from './helpers.ts';
|
|
import lurid from '@andyburke/lurid';
|
|
|
|
Deno.test({
|
|
name: 'iterate over all items',
|
|
permissions: {
|
|
env: true,
|
|
|
|
// https://github.com/denoland/deno/discussions/17258
|
|
read: true,
|
|
write: true
|
|
},
|
|
fn: async () => {
|
|
type ITEM = {
|
|
id: string;
|
|
email: string;
|
|
phone: string;
|
|
created: string;
|
|
written_by_time?: string;
|
|
};
|
|
|
|
const item_collection: fsdb.FSDB_COLLECTION<ITEM> = new fsdb.FSDB_COLLECTION<ITEM>({
|
|
name: 'test-06-items',
|
|
root: get_data_dir() + '/test-06-items'
|
|
});
|
|
|
|
asserts.assert(item_collection);
|
|
|
|
const items: ITEM[] = [];
|
|
const item_count: number = 500;
|
|
const midpoint: number = Math.floor(item_count / 2);
|
|
let first_id = null;
|
|
let time_mid = null;
|
|
let mid_id = null;
|
|
let time_end = null;
|
|
let last_id = null;
|
|
for (let i = 0; i < item_count; ++i) {
|
|
const item: ITEM = {
|
|
id: lurid(),
|
|
email: random_email_address(),
|
|
phone: random_phone_number(),
|
|
created: new Date().toISOString()
|
|
};
|
|
|
|
first_id = first_id ?? item.id;
|
|
|
|
items.push(item);
|
|
|
|
const stored_item: ITEM = await item_collection.create(item);
|
|
item.written_by_time = new Date().toISOString();
|
|
|
|
asserts.assertObjectMatch(stored_item, item);
|
|
|
|
if (i === midpoint) {
|
|
time_mid = item.written_by_time;
|
|
mid_id = item.id;
|
|
}
|
|
|
|
time_end = item.written_by_time;
|
|
last_id = item.id;
|
|
}
|
|
|
|
const LIMIT_MIN = 11;
|
|
const LIMIT_MAX = 333;
|
|
|
|
let fetch_for_sort_offset = 0;
|
|
const fetched_for_sort = [];
|
|
let more_to_fetch_for_sorting = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_sort_offset
|
|
});
|
|
|
|
fetched_for_sort.push(...fetched_items);
|
|
fetch_for_sort_offset += fetched_items.length;
|
|
more_to_fetch_for_sorting = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_sorting);
|
|
|
|
const sorted_items = items.sort((lhs, rhs) => lhs.id.localeCompare(rhs.id)).map((item) => {
|
|
return { id: item.id, email: item.email, phone: item.phone, created: item.created };
|
|
});
|
|
const sorted_fetched = fetched_for_sort.sort((lhs, rhs) => lhs.id.localeCompare(rhs.id));
|
|
|
|
asserts.assertEquals(sorted_fetched, sorted_items);
|
|
|
|
asserts.assert(time_mid);
|
|
asserts.assert(time_end);
|
|
|
|
asserts.assert(mid_id);
|
|
asserts.assert(last_id);
|
|
|
|
// test before
|
|
let fetch_for_before_offset = 0;
|
|
const fetched_for_before = [];
|
|
let more_to_fetch_for_before = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_before_offset,
|
|
before: time_mid
|
|
});
|
|
|
|
fetched_for_before.push(...fetched_items);
|
|
fetch_for_before_offset += fetched_items.length;
|
|
more_to_fetch_for_before = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_before);
|
|
|
|
let newest = new Date(0).toISOString();
|
|
asserts.assert(newest);
|
|
for (const item of fetched_for_before) {
|
|
const original_item = items.find((_) => _.id === item.id);
|
|
asserts.assert(original_item);
|
|
asserts.assert(original_item.written_by_time);
|
|
if (original_item.written_by_time > newest) {
|
|
newest = original_item.written_by_time;
|
|
}
|
|
}
|
|
|
|
// note: we use less or equal because we don't have the actual file write time
|
|
asserts.assertLessOrEqual(newest, time_mid);
|
|
|
|
// test id_before
|
|
let fetch_for_id_before_offset = 0;
|
|
const fetched_for_id_before = [];
|
|
let more_to_fetch_for_id_before = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_id_before_offset,
|
|
id_before: mid_id
|
|
});
|
|
|
|
fetched_for_id_before.push(...fetched_items);
|
|
fetch_for_id_before_offset += fetched_items.length;
|
|
more_to_fetch_for_id_before = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_id_before);
|
|
|
|
let newest_id = first_id;
|
|
asserts.assert(newest_id);
|
|
for (const item of fetched_for_id_before) {
|
|
if (item.id > newest_id) {
|
|
newest_id = item.id;
|
|
}
|
|
}
|
|
|
|
asserts.assertLess(newest_id, mid_id);
|
|
|
|
// test after
|
|
let fetch_for_after_offset = 0;
|
|
const fetched_for_after = [];
|
|
let more_to_fetch_for_after = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_after_offset,
|
|
after: time_mid
|
|
});
|
|
|
|
fetched_for_after.push(...fetched_items);
|
|
fetch_for_after_offset += fetched_items.length;
|
|
more_to_fetch_for_after = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_after);
|
|
|
|
let oldest = new Date().toISOString();
|
|
asserts.assert(oldest);
|
|
for (const item of fetched_for_after) {
|
|
const original_item = items.find((_) => _.id === item.id);
|
|
asserts.assert(original_item);
|
|
asserts.assert(original_item.written_by_time);
|
|
if (original_item.written_by_time < oldest) {
|
|
oldest = original_item.written_by_time;
|
|
}
|
|
}
|
|
|
|
// again with the file write time slop
|
|
asserts.assertGreaterOrEqual(oldest, time_mid);
|
|
|
|
// test id_after
|
|
let fetch_for_id_after_offset = 0;
|
|
const fetched_for_id_after = [];
|
|
let more_to_fetch_for_id_after = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_id_after_offset,
|
|
id_after: mid_id
|
|
});
|
|
|
|
fetched_for_id_after.push(...fetched_items);
|
|
fetch_for_id_after_offset += fetched_items.length;
|
|
more_to_fetch_for_id_after = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_id_after);
|
|
|
|
let oldest_id = last_id;
|
|
asserts.assert(oldest_id);
|
|
for (const item of fetched_for_id_after) {
|
|
if (item.id < oldest_id) {
|
|
oldest_id = item.id;
|
|
}
|
|
}
|
|
|
|
asserts.assertGreater(oldest_id, mid_id);
|
|
}
|
|
});
|