366 lines
9.6 KiB
TypeScript
366 lines
9.6 KiB
TypeScript
import * as asserts from '@std/assert';
|
|
import * as fsdb from '../fsdb.ts';
|
|
import * as path from '@std/path';
|
|
import { get_data_dir } from './helpers.ts';
|
|
import lurid from '@andyburke/lurid';
|
|
|
|
type ITEM = {
|
|
id: string;
|
|
value: string;
|
|
created: string;
|
|
written_by_time?: string;
|
|
};
|
|
|
|
const item_collection: fsdb.FSDB_COLLECTION<ITEM> = new fsdb.FSDB_COLLECTION<ITEM>({
|
|
name: 'test-06-items',
|
|
root: get_data_dir() + '/test-06-items'
|
|
});
|
|
|
|
const items: ITEM[] = [];
|
|
const item_count: number = 1_000;
|
|
const midpoint: number = Math.floor(item_count / 2);
|
|
let first_id = null;
|
|
let time_mid = null;
|
|
let mid_id = null;
|
|
let time_end = null;
|
|
let last_id = null;
|
|
for (let i = 0; i < item_count; ++i) {
|
|
const item: ITEM = {
|
|
id: lurid(),
|
|
value: `${Math.random() * 10_000_000}`,
|
|
created: new Date().toISOString()
|
|
};
|
|
|
|
first_id = first_id ?? item.id;
|
|
|
|
items.push(item);
|
|
|
|
const stored_item: ITEM = await item_collection.create(item);
|
|
item.written_by_time = new Date().toISOString();
|
|
|
|
asserts.assertObjectMatch(stored_item, item);
|
|
|
|
if (i === midpoint) {
|
|
time_mid = item.written_by_time;
|
|
mid_id = item.id;
|
|
}
|
|
|
|
time_end = item.written_by_time;
|
|
last_id = item.id;
|
|
|
|
// wait a ms between items to help with sort stuff later
|
|
await new Promise((resolve) => setTimeout(resolve, 1));
|
|
}
|
|
|
|
const sorted_items = {
|
|
by_id: items.sort((lhs, rhs) => lhs.id.localeCompare(rhs.id)).map((item) => {
|
|
return { id: item.id, value: item.value, created: item.created };
|
|
}),
|
|
|
|
by_created_newest: items.sort((lhs, rhs) => rhs.created.localeCompare(lhs.created)).map((item) => {
|
|
return { id: item.id, value: item.value, created: item.created };
|
|
}),
|
|
|
|
by_created_oldest: items.sort((lhs, rhs) => lhs.created.localeCompare(rhs.created)).map((item) => {
|
|
return { id: item.id, value: item.value, created: item.created };
|
|
})
|
|
};
|
|
|
|
const LIMIT_MIN = 11;
|
|
const LIMIT_MAX = 333;
|
|
|
|
Deno.test({
|
|
name: 'sort all() by id',
|
|
permissions: {
|
|
env: true,
|
|
// https://github.com/denoland/deno/discussions/17258
|
|
read: true,
|
|
write: true
|
|
},
|
|
fn: async () => {
|
|
asserts.assert(item_collection);
|
|
|
|
let offset = 0;
|
|
const fetched = [];
|
|
let more = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: offset,
|
|
sort: (a: fsdb.WALK_ENTRY<ITEM>, b: fsdb.WALK_ENTRY<ITEM>) =>
|
|
path.basename(a.path).replace(/\.json$/i, '').localeCompare(path.basename(b.path).replace(/\.json/i, ''))
|
|
});
|
|
|
|
fetched.push(...(fetched_items.map((item) => item.load())));
|
|
offset += fetched_items.length;
|
|
more = fetched_items.length === limit;
|
|
} while (more);
|
|
|
|
asserts.assertEquals(fetched, sorted_items.by_id);
|
|
}
|
|
});
|
|
|
|
Deno.test({
|
|
name: 'sort all() by birthtime',
|
|
permissions: {
|
|
env: true,
|
|
// https://github.com/denoland/deno/discussions/17258
|
|
read: true,
|
|
write: true
|
|
},
|
|
fn: async () => {
|
|
asserts.assert(item_collection);
|
|
|
|
let offset = 0;
|
|
const fetched = [];
|
|
let more = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: offset,
|
|
sort: (a: fsdb.WALK_ENTRY<ITEM>, b: fsdb.WALK_ENTRY<ITEM>) =>
|
|
(b.info.birthtime?.toISOString() ?? '').localeCompare(a.info.birthtime?.toISOString() ?? '')
|
|
});
|
|
|
|
fetched.push(...(fetched_items.map((item) => item.load())));
|
|
offset += fetched_items.length;
|
|
more = fetched_items.length === limit;
|
|
} while (more);
|
|
|
|
asserts.assertEquals(fetched, sorted_items.by_created_newest);
|
|
}
|
|
});
|
|
|
|
Deno.test({
|
|
name: 'sort all() by default `oldest` sort',
|
|
permissions: {
|
|
env: true,
|
|
// https://github.com/denoland/deno/discussions/17258
|
|
read: true,
|
|
write: true
|
|
},
|
|
fn: async () => {
|
|
asserts.assert(item_collection);
|
|
|
|
let offset = 0;
|
|
const fetched = [];
|
|
let more = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: offset,
|
|
sort: item_collection.sorts.oldest
|
|
});
|
|
|
|
fetched.push(...(fetched_items.map((item) => item.load())));
|
|
offset += fetched_items.length;
|
|
more = fetched_items.length === limit;
|
|
} while (more);
|
|
|
|
asserts.assertEquals(fetched, sorted_items.by_created_oldest);
|
|
}
|
|
});
|
|
|
|
Deno.test({
|
|
name: 'sort all() by default `newest` sort',
|
|
permissions: {
|
|
env: true,
|
|
// https://github.com/denoland/deno/discussions/17258
|
|
read: true,
|
|
write: true
|
|
},
|
|
fn: async () => {
|
|
asserts.assert(item_collection);
|
|
|
|
let offset = 0;
|
|
const fetched = [];
|
|
let more = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: offset,
|
|
sort: item_collection.sorts.newest
|
|
});
|
|
|
|
fetched.push(...(fetched_items.map((item) => item.load())));
|
|
offset += fetched_items.length;
|
|
more = fetched_items.length === limit;
|
|
} while (more);
|
|
|
|
asserts.assertEquals(fetched, sorted_items.by_created_newest);
|
|
}
|
|
});
|
|
|
|
Deno.test({
|
|
name: 'filter all() by birthtime',
|
|
permissions: {
|
|
env: true,
|
|
// https://github.com/denoland/deno/discussions/17258
|
|
read: true,
|
|
write: true
|
|
},
|
|
fn: async () => {
|
|
asserts.assert(item_collection);
|
|
|
|
asserts.assert(time_mid);
|
|
asserts.assert(time_end);
|
|
|
|
asserts.assert(mid_id);
|
|
asserts.assert(last_id);
|
|
|
|
// test before
|
|
let fetch_for_before_offset = 0;
|
|
const fetched_for_before = [];
|
|
let more_to_fetch_for_before = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_before_offset,
|
|
filter: (entry) => (entry.info.birthtime?.toISOString() ?? '') < time_mid
|
|
});
|
|
|
|
fetched_for_before.push(...(fetched_items.map((item) => item.load())));
|
|
fetch_for_before_offset += fetched_items.length;
|
|
more_to_fetch_for_before = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_before);
|
|
|
|
let newest = new Date(0).toISOString();
|
|
asserts.assert(newest);
|
|
for (const item of fetched_for_before) {
|
|
const original_item = items.find((_) => _.id === item.id);
|
|
asserts.assert(original_item);
|
|
asserts.assert(original_item.written_by_time);
|
|
if (original_item.written_by_time > newest) {
|
|
newest = original_item.written_by_time;
|
|
}
|
|
}
|
|
|
|
// note: we use less or equal because we don't have the actual file write time
|
|
asserts.assertLessOrEqual(newest, time_mid);
|
|
|
|
// test after
|
|
let fetch_for_after_offset = 0;
|
|
const fetched_for_after = [];
|
|
let more_to_fetch_for_after = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_after_offset,
|
|
filter: (entry) => (entry.info.birthtime?.toISOString() ?? '') > time_mid
|
|
});
|
|
|
|
fetched_for_after.push(...(fetched_items.map((item) => item.load())));
|
|
fetch_for_after_offset += fetched_items.length;
|
|
more_to_fetch_for_after = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_after);
|
|
|
|
let oldest = new Date().toISOString();
|
|
asserts.assert(oldest);
|
|
for (const item of fetched_for_after) {
|
|
const original_item = items.find((_) => _.id === item.id);
|
|
asserts.assert(original_item);
|
|
asserts.assert(original_item.written_by_time);
|
|
if (original_item.written_by_time < oldest) {
|
|
oldest = original_item.written_by_time;
|
|
}
|
|
}
|
|
|
|
// again with the file write time slop
|
|
asserts.assertGreaterOrEqual(oldest, time_mid);
|
|
}
|
|
});
|
|
|
|
Deno.test({
|
|
name: 'filter all() by id',
|
|
permissions: {
|
|
env: true,
|
|
// https://github.com/denoland/deno/discussions/17258
|
|
read: true,
|
|
write: true
|
|
},
|
|
fn: async () => {
|
|
asserts.assert(item_collection);
|
|
|
|
asserts.assert(time_mid);
|
|
asserts.assert(time_end);
|
|
|
|
asserts.assert(mid_id);
|
|
asserts.assert(last_id);
|
|
|
|
// test id_before
|
|
let fetch_for_id_before_offset = 0;
|
|
const fetched_for_id_before = [];
|
|
let more_to_fetch_for_id_before = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_id_before_offset,
|
|
filter: (entry) => path.basename(entry.path).replace(/\.json$/i, '') < mid_id
|
|
});
|
|
|
|
fetched_for_id_before.push(...(fetched_items.map((item) => item.load())));
|
|
fetch_for_id_before_offset += fetched_items.length;
|
|
more_to_fetch_for_id_before = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_id_before);
|
|
|
|
let newest_id = first_id;
|
|
asserts.assert(newest_id);
|
|
for (const item of fetched_for_id_before) {
|
|
if (item.id > newest_id) {
|
|
newest_id = item.id;
|
|
}
|
|
}
|
|
|
|
asserts.assertLess(newest_id, mid_id);
|
|
|
|
// test id_after
|
|
let fetch_for_id_after_offset = 0;
|
|
const fetched_for_id_after = [];
|
|
let more_to_fetch_for_id_after = true;
|
|
do {
|
|
// fuzz the limit
|
|
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
|
|
|
const fetched_items = await item_collection.all({
|
|
limit,
|
|
offset: fetch_for_id_after_offset,
|
|
filter: (entry) => path.basename(entry.path).replace(/\.json$/i, '') > mid_id
|
|
});
|
|
|
|
fetched_for_id_after.push(...(fetched_items.map((item) => item.load())));
|
|
fetch_for_id_after_offset += fetched_items.length;
|
|
more_to_fetch_for_id_after = fetched_items.length === limit;
|
|
} while (more_to_fetch_for_id_after);
|
|
|
|
let oldest_id = last_id;
|
|
asserts.assert(oldest_id);
|
|
for (const item of fetched_for_id_after) {
|
|
if (item.id < oldest_id) {
|
|
oldest_id = item.id;
|
|
}
|
|
}
|
|
|
|
asserts.assertGreater(oldest_id, mid_id);
|
|
}
|
|
});
|