feature: add options for all() to limit before/after creation/id
This commit is contained in:
parent
56715a1400
commit
89eff3fb13
3 changed files with 221 additions and 38 deletions
|
@ -156,3 +156,7 @@ for browsing the data as a human.
|
|||
|
||||
TODO: index everything into a sqlite setup as well? would give a way to run
|
||||
SQL against data still stored on disk in a nicely human browsable format.
|
||||
|
||||
## TODO
|
||||
|
||||
- [ ] make all()/find() return something like { file_info, entry: { private data = undefined; load() => { data = data ?? await Deno.readTextFile(this.file_info.path); return data; } } }
|
||||
|
|
51
fsdb.ts
51
fsdb.ts
|
@ -15,6 +15,12 @@ export type FSDB_COLLECTION_CONFIG_INPUT = Optional<FSDB_COLLECTION_CONFIG, 'id_
|
|||
export type FSDB_SEARCH_OPTIONS = {
|
||||
limit: number;
|
||||
offset?: number;
|
||||
before?: string;
|
||||
after?: string;
|
||||
modified_before?: string;
|
||||
modified_after?: string;
|
||||
id_before?: string;
|
||||
id_after?: string;
|
||||
};
|
||||
|
||||
export interface FSDB_INDEXER<T> {
|
||||
|
@ -226,6 +232,51 @@ export class FSDB_COLLECTION<T extends Record<string, any>> {
|
|||
exts: ['json']
|
||||
})
|
||||
) {
|
||||
let item_stat = null;
|
||||
if (options.before) {
|
||||
item_stat = item_stat ?? await Deno.lstat(entry.path);
|
||||
const birthtime = (item_stat.birthtime ?? new Date(0)).toISOString();
|
||||
if (birthtime > options.before) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.after) {
|
||||
item_stat = item_stat ?? await Deno.lstat(entry.path);
|
||||
if ((item_stat.birthtime ?? new Date(0)).toISOString() < options.after) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.modified_before) {
|
||||
item_stat = item_stat ?? await Deno.lstat(entry.path);
|
||||
if ((item_stat.mtime ?? new Date(0)).toISOString() > options.modified_before) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.modified_after) {
|
||||
item_stat = item_stat ?? await Deno.lstat(entry.path);
|
||||
if ((item_stat.mtime ?? new Date(0)).toISOString() < options.modified_after) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let item_id = null;
|
||||
if (options.id_before) {
|
||||
item_id = item_id ?? entry.name.replace(/\.json$/, '');
|
||||
if (item_id >= options.id_before) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.id_after) {
|
||||
item_id = item_id ?? entry.name.replace(/\.json$/, '');
|
||||
if (item_id <= options.id_after) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (counter < offset) {
|
||||
++counter;
|
||||
continue;
|
||||
|
|
|
@ -1,12 +1,7 @@
|
|||
import * as asserts from '@std/assert';
|
||||
import * as fsdb from '../fsdb.ts';
|
||||
import { FSDB_INDEXER_SYMLINKS } from '../indexers.ts';
|
||||
import { get_data_dir, random_email_address, random_phone_number } from './helpers.ts';
|
||||
import lurid from '@andyburke/lurid';
|
||||
import by_email from '../organizers/by_email.ts';
|
||||
import by_character from '../organizers/by_character.ts';
|
||||
import by_phone from '../organizers/by_phone.ts';
|
||||
import { sentence } from 'jsr:@ndaidong/txtgen';
|
||||
|
||||
Deno.test({
|
||||
name: 'iterate over all items',
|
||||
|
@ -22,73 +17,206 @@ Deno.test({
|
|||
id: string;
|
||||
email: string;
|
||||
phone: string;
|
||||
value: string;
|
||||
created: string;
|
||||
written_by_time?: string;
|
||||
};
|
||||
|
||||
const item_collection: fsdb.FSDB_COLLECTION<ITEM> = new fsdb.FSDB_COLLECTION<ITEM>({
|
||||
name: 'test-05-items',
|
||||
root: get_data_dir() + '/test-05-items',
|
||||
indexers: {
|
||||
email: new FSDB_INDEXER_SYMLINKS<ITEM>({
|
||||
name: 'email',
|
||||
field: 'email',
|
||||
organize: by_email
|
||||
}),
|
||||
phone: new FSDB_INDEXER_SYMLINKS<ITEM>({
|
||||
name: 'phone',
|
||||
field: 'phone',
|
||||
organize: by_phone
|
||||
}),
|
||||
by_character_test: new FSDB_INDEXER_SYMLINKS<ITEM>({
|
||||
name: 'by_character_test',
|
||||
organize: by_character,
|
||||
get_values_to_index: (item: ITEM) => item.value.split(/\W/).filter((word) => word.length > 3),
|
||||
to_many: true
|
||||
})
|
||||
}
|
||||
root: get_data_dir() + '/test-05-items'
|
||||
});
|
||||
|
||||
asserts.assert(item_collection);
|
||||
|
||||
const items: ITEM[] = [];
|
||||
for (let i = 0; i < 500; ++i) {
|
||||
const item = {
|
||||
const item_count: number = 500;
|
||||
const midpoint: number = Math.floor(item_count / 2);
|
||||
let first_id = null;
|
||||
let time_mid = null;
|
||||
let mid_id = null;
|
||||
let time_end = null;
|
||||
let last_id = null;
|
||||
for (let i = 0; i < item_count; ++i) {
|
||||
const item: ITEM = {
|
||||
id: lurid(),
|
||||
email: random_email_address(),
|
||||
phone: random_phone_number(),
|
||||
value: sentence()
|
||||
created: new Date().toISOString()
|
||||
};
|
||||
|
||||
first_id = first_id ?? item.id;
|
||||
|
||||
items.push(item);
|
||||
|
||||
const stored_item: ITEM = await item_collection.create(item);
|
||||
item.written_by_time = new Date().toISOString();
|
||||
|
||||
asserts.assertObjectMatch(stored_item, item);
|
||||
|
||||
if (i === midpoint) {
|
||||
time_mid = item.written_by_time;
|
||||
mid_id = item.id;
|
||||
}
|
||||
|
||||
time_end = item.written_by_time;
|
||||
last_id = item.id;
|
||||
}
|
||||
|
||||
const LIMIT_MIN = 11;
|
||||
const LIMIT_MAX = 333;
|
||||
|
||||
let offset = 0;
|
||||
const fetched = [];
|
||||
let more_to_fetch = true;
|
||||
let fetch_for_sort_offset = 0;
|
||||
const fetched_for_sort = [];
|
||||
let more_to_fetch_for_sorting = true;
|
||||
do {
|
||||
// fuzz the limit
|
||||
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
||||
|
||||
const fetched_items = await item_collection.all({
|
||||
limit,
|
||||
offset
|
||||
offset: fetch_for_sort_offset
|
||||
});
|
||||
|
||||
fetched.push(...fetched_items);
|
||||
offset += fetched_items.length;
|
||||
more_to_fetch = fetched_items.length === limit;
|
||||
} while (more_to_fetch);
|
||||
fetched_for_sort.push(...fetched_items);
|
||||
fetch_for_sort_offset += fetched_items.length;
|
||||
more_to_fetch_for_sorting = fetched_items.length === limit;
|
||||
} while (more_to_fetch_for_sorting);
|
||||
|
||||
const sorted_items = items.sort((lhs, rhs) => lhs.id.localeCompare(rhs.id));
|
||||
const sorted_fetched = fetched.sort((lhs, rhs) => lhs.id.localeCompare(rhs.id));
|
||||
const sorted_items = items.sort((lhs, rhs) => lhs.id.localeCompare(rhs.id)).map((item) => {
|
||||
return { id: item.id, email: item.email, phone: item.phone, created: item.created };
|
||||
});
|
||||
const sorted_fetched = fetched_for_sort.sort((lhs, rhs) => lhs.id.localeCompare(rhs.id));
|
||||
|
||||
asserts.assertEquals(sorted_fetched, sorted_items);
|
||||
|
||||
asserts.assert(time_mid);
|
||||
asserts.assert(time_end);
|
||||
|
||||
asserts.assert(mid_id);
|
||||
asserts.assert(last_id);
|
||||
|
||||
// test before
|
||||
let fetch_for_before_offset = 0;
|
||||
const fetched_for_before = [];
|
||||
let more_to_fetch_for_before = true;
|
||||
do {
|
||||
// fuzz the limit
|
||||
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
||||
|
||||
const fetched_items = await item_collection.all({
|
||||
limit,
|
||||
offset: fetch_for_before_offset,
|
||||
before: time_mid
|
||||
});
|
||||
|
||||
fetched_for_before.push(...fetched_items);
|
||||
fetch_for_before_offset += fetched_items.length;
|
||||
more_to_fetch_for_before = fetched_items.length === limit;
|
||||
} while (more_to_fetch_for_before);
|
||||
|
||||
let newest = new Date(0).toISOString();
|
||||
asserts.assert(newest);
|
||||
for (const item of fetched_for_before) {
|
||||
const original_item = items.find((_) => _.id === item.id);
|
||||
asserts.assert(original_item);
|
||||
asserts.assert(original_item.written_by_time);
|
||||
if (original_item.written_by_time > newest) {
|
||||
newest = original_item.written_by_time;
|
||||
}
|
||||
}
|
||||
|
||||
// note: we use less or equal because we don't have the actual file write time
|
||||
asserts.assertLessOrEqual(newest, time_mid);
|
||||
|
||||
// test id_before
|
||||
let fetch_for_id_before_offset = 0;
|
||||
const fetched_for_id_before = [];
|
||||
let more_to_fetch_for_id_before = true;
|
||||
do {
|
||||
// fuzz the limit
|
||||
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
||||
|
||||
const fetched_items = await item_collection.all({
|
||||
limit,
|
||||
offset: fetch_for_id_before_offset,
|
||||
id_before: mid_id
|
||||
});
|
||||
|
||||
fetched_for_id_before.push(...fetched_items);
|
||||
fetch_for_id_before_offset += fetched_items.length;
|
||||
more_to_fetch_for_id_before = fetched_items.length === limit;
|
||||
} while (more_to_fetch_for_id_before);
|
||||
|
||||
let newest_id = first_id;
|
||||
asserts.assert(newest_id);
|
||||
for (const item of fetched_for_id_before) {
|
||||
if (item.id > newest_id) {
|
||||
newest_id = item.id;
|
||||
}
|
||||
}
|
||||
|
||||
asserts.assertLess(newest_id, mid_id);
|
||||
|
||||
// test after
|
||||
let fetch_for_after_offset = 0;
|
||||
const fetched_for_after = [];
|
||||
let more_to_fetch_for_after = true;
|
||||
do {
|
||||
// fuzz the limit
|
||||
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
||||
|
||||
const fetched_items = await item_collection.all({
|
||||
limit,
|
||||
offset: fetch_for_after_offset,
|
||||
after: time_mid
|
||||
});
|
||||
|
||||
fetched_for_after.push(...fetched_items);
|
||||
fetch_for_after_offset += fetched_items.length;
|
||||
more_to_fetch_for_after = fetched_items.length === limit;
|
||||
} while (more_to_fetch_for_after);
|
||||
|
||||
let oldest = new Date().toISOString();
|
||||
asserts.assert(oldest);
|
||||
for (const item of fetched_for_after) {
|
||||
const original_item = items.find((_) => _.id === item.id);
|
||||
asserts.assert(original_item);
|
||||
asserts.assert(original_item.written_by_time);
|
||||
if (original_item.written_by_time < oldest) {
|
||||
oldest = original_item.written_by_time;
|
||||
}
|
||||
}
|
||||
|
||||
// again with the file write time slop
|
||||
asserts.assertGreaterOrEqual(oldest, time_mid);
|
||||
|
||||
// test id_after
|
||||
let fetch_for_id_after_offset = 0;
|
||||
const fetched_for_id_after = [];
|
||||
let more_to_fetch_for_id_after = true;
|
||||
do {
|
||||
// fuzz the limit
|
||||
const limit = Math.floor(Math.random() * (LIMIT_MAX - LIMIT_MIN + 1)) + LIMIT_MIN;
|
||||
|
||||
const fetched_items = await item_collection.all({
|
||||
limit,
|
||||
offset: fetch_for_id_after_offset,
|
||||
id_after: mid_id
|
||||
});
|
||||
|
||||
fetched_for_id_after.push(...fetched_items);
|
||||
fetch_for_id_after_offset += fetched_items.length;
|
||||
more_to_fetch_for_id_after = fetched_items.length === limit;
|
||||
} while (more_to_fetch_for_id_after);
|
||||
|
||||
let oldest_id = last_id;
|
||||
asserts.assert(oldest_id);
|
||||
for (const item of fetched_for_id_after) {
|
||||
if (item.id < oldest_id) {
|
||||
oldest_id = item.id;
|
||||
}
|
||||
}
|
||||
|
||||
asserts.assertGreater(oldest_id, mid_id);
|
||||
}
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue