Simplify database interfaces, make tests use pglite

This commit is contained in:
Alex Gleason 2024-09-11 11:48:31 -05:00
parent dc8d09a9da
commit f3ae200833
No known key found for this signature in database
GPG key ID: 7211D1F99744FBB7
25 changed files with 117 additions and 193 deletions

View file

@ -35,10 +35,10 @@ test:
postgres: postgres:
stage: test stage: test
script: deno task db:migrate && deno task test script: sleep 1 && deno task test
services: services:
- postgres:16 - postgres:16
variables: variables:
DITTO_NSEC: nsec1zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zygs4rm7hz DITTO_NSEC: nsec1zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zygs4rm7hz
DATABASE_URL: postgres://postgres:postgres@postgres:5432/postgres TEST_DATABASE_URL: postgres://postgres:postgres@postgres:5432/postgres
POSTGRES_HOST_AUTH_METHOD: trust POSTGRES_HOST_AUTH_METHOD: trust

View file

@ -9,8 +9,8 @@ import { nostrNow } from '@/utils.ts';
const signer = new AdminSigner(); const signer = new AdminSigner();
const db = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const eventsDB = new EventsDB(db); const eventsDB = new EventsDB(kysely);
const readable = Deno.stdin.readable const readable = Deno.stdin.readable
.pipeThrough(new TextDecoderStream()) .pipeThrough(new TextDecoderStream())

View file

@ -6,8 +6,8 @@ import { AdminSigner } from '@/signers/AdminSigner.ts';
import { EventsDB } from '@/storages/EventsDB.ts'; import { EventsDB } from '@/storages/EventsDB.ts';
import { nostrNow } from '@/utils.ts'; import { nostrNow } from '@/utils.ts';
const db = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const eventsDB = new EventsDB(db); const eventsDB = new EventsDB(kysely);
const [pubkeyOrNpub, role] = Deno.args; const [pubkeyOrNpub, role] = Deno.args;
const pubkey = pubkeyOrNpub.startsWith('npub1') ? nip19.decode(pubkeyOrNpub as `npub1${string}`).data : pubkeyOrNpub; const pubkey = pubkeyOrNpub.startsWith('npub1') ? nip19.decode(pubkeyOrNpub as `npub1${string}`).data : pubkeyOrNpub;

View file

@ -1,11 +1,4 @@
import { Conf } from '@/config.ts';
import { DittoDB } from '@/db/DittoDB.ts'; import { DittoDB } from '@/db/DittoDB.ts';
import { sleep } from '@/test.ts';
if (Deno.env.get('CI') && Conf.db.dialect === 'postgres') {
console.info('Waiting 1 second for postgres to start...');
await sleep(1_000);
}
// This migrates kysely internally. // This migrates kysely internally.
const { kysely } = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();

View file

@ -9,8 +9,8 @@ import { nip19 } from 'nostr-tools';
import { DittoDB } from '@/db/DittoDB.ts'; import { DittoDB } from '@/db/DittoDB.ts';
import { EventsDB } from '@/storages/EventsDB.ts'; import { EventsDB } from '@/storages/EventsDB.ts';
const db = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const eventsDB = new EventsDB(db); const eventsDB = new EventsDB(kysely);
interface ImportEventsOpts { interface ImportEventsOpts {
profilesOnly: boolean; profilesOnly: boolean;

View file

@ -18,6 +18,6 @@ try {
} }
const store = await Storages.db(); const store = await Storages.db();
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
await refreshAuthorStats({ pubkey, kysely, store }); await refreshAuthorStats({ pubkey, kysely, store });

View file

@ -82,7 +82,7 @@ const createTokenController: AppController = async (c) => {
async function getToken( async function getToken(
{ pubkey, secret, relays = [] }: { pubkey: string; secret?: string; relays?: string[] }, { pubkey, secret, relays = [] }: { pubkey: string; secret?: string; relays?: string[] },
): Promise<`token1${string}`> { ): Promise<`token1${string}`> {
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const token = generateToken(); const token = generateToken();
const serverSeckey = generateSecretKey(); const serverSeckey = generateSecretKey();

View file

@ -578,7 +578,7 @@ const zappedByController: AppController = async (c) => {
const id = c.req.param('id'); const id = c.req.param('id');
const params = c.get('listPagination'); const params = c.get('listPagination');
const store = await Storages.db(); const store = await Storages.db();
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const zaps = await kysely.selectFrom('event_zaps') const zaps = await kysely.selectFrom('event_zaps')
.selectAll() .selectAll()

View file

@ -222,7 +222,7 @@ async function topicToFilter(
async function getTokenPubkey(token: string): Promise<string | undefined> { async function getTokenPubkey(token: string): Promise<string | undefined> {
if (token.startsWith('token1')) { if (token.startsWith('token1')) {
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const { user_pubkey } = await kysely const { user_pubkey } = await kysely
.selectFrom('nip46_tokens') .selectFrom('nip46_tokens')

View file

@ -6,9 +6,11 @@ import { dbAvailableConnectionsGauge, dbPoolSizeGauge } from '@/metrics.ts';
/** Prometheus/OpenMetrics controller. */ /** Prometheus/OpenMetrics controller. */
export const metricsController: AppController = async (c) => { export const metricsController: AppController = async (c) => {
const db = await DittoDB.getInstance();
// Update some metrics at request time. // Update some metrics at request time.
dbPoolSizeGauge.set(DittoDB.poolSize); dbPoolSizeGauge.set(db.poolSize);
dbAvailableConnectionsGauge.set(DittoDB.availableConnections); dbAvailableConnectionsGauge.set(db.availableConnections);
const metrics = await register.metrics(); const metrics = await register.metrics();

View file

@ -6,57 +6,35 @@ import { FileMigrationProvider, Kysely, Migrator } from 'kysely';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { DittoPglite } from '@/db/adapters/DittoPglite.ts'; import { DittoPglite } from '@/db/adapters/DittoPglite.ts';
import { DittoPostgres } from '@/db/adapters/DittoPostgres.ts'; import { DittoPostgres } from '@/db/adapters/DittoPostgres.ts';
import { DittoDatabase, DittoDatabaseOpts } from '@/db/DittoDatabase.ts';
import { DittoTables } from '@/db/DittoTables.ts'; import { DittoTables } from '@/db/DittoTables.ts';
export class DittoDB { export class DittoDB {
private static kysely: Promise<Kysely<DittoTables>> | undefined; private static db: DittoDatabase | undefined;
static getInstance(): Promise<Kysely<DittoTables>> { /** Create (and migrate) the database if it isn't been already, or return the existing connection. */
if (!this.kysely) { static async getInstance(): Promise<DittoDatabase> {
this.kysely = this._getInstance(); if (!this.db) {
this.db = this.create(Conf.databaseUrl, { poolSize: Conf.pg.poolSize });
await this.migrate(this.db.kysely);
} }
return this.kysely; return this.db;
} }
static async _getInstance(): Promise<Kysely<DittoTables>> { /** Open a new database connection. */
const { protocol } = new URL(Conf.databaseUrl); static create(databaseUrl: string, opts?: DittoDatabaseOpts): DittoDatabase {
const { protocol } = new URL(databaseUrl);
let kysely: Kysely<DittoTables>;
switch (protocol) { switch (protocol) {
case 'file:': case 'file:':
case 'memory:': case 'memory:':
kysely = await DittoPglite.getInstance(); return DittoPglite.create(databaseUrl);
break;
case 'postgres:': case 'postgres:':
case 'postgresql:': case 'postgresql:':
kysely = await DittoPostgres.getInstance(); return DittoPostgres.create(databaseUrl, opts);
break;
default: default:
throw new Error('Unsupported database URL.'); throw new Error('Unsupported database URL.');
} }
await this.migrate(kysely);
return kysely;
}
static get poolSize(): number {
const { protocol } = new URL(Conf.databaseUrl);
if (['postgres:', 'postgresql:'].includes(protocol)) {
return DittoPostgres.poolSize;
}
return 1;
}
static get availableConnections(): number {
const { protocol } = new URL(Conf.databaseUrl);
if (['postgres:', 'postgresql:'].includes(protocol)) {
return DittoPostgres.availableConnections;
}
return 1;
} }
/** Migrate the database to the latest version. */ /** Migrate the database to the latest version. */

13
src/db/DittoDatabase.ts Normal file
View file

@ -0,0 +1,13 @@
import { Kysely } from 'kysely';
import { DittoTables } from '@/db/DittoTables.ts';
export interface DittoDatabase {
readonly kysely: Kysely<DittoTables>;
readonly poolSize: number;
readonly availableConnections: number;
}
export interface DittoDatabaseOpts {
poolSize?: number;
}

View file

@ -2,32 +2,23 @@ import { PGlite } from '@electric-sql/pglite';
import { PgliteDialect } from '@soapbox/kysely-pglite'; import { PgliteDialect } from '@soapbox/kysely-pglite';
import { Kysely } from 'kysely'; import { Kysely } from 'kysely';
import { Conf } from '@/config.ts'; import { DittoDatabase } from '@/db/DittoDatabase.ts';
import { DittoTables } from '@/db/DittoTables.ts'; import { DittoTables } from '@/db/DittoTables.ts';
import { KyselyLogger } from '@/db/KyselyLogger.ts'; import { KyselyLogger } from '@/db/KyselyLogger.ts';
export class DittoPglite { export class DittoPglite {
static db: Kysely<DittoTables> | undefined; static create(databaseUrl: string): DittoDatabase {
const kysely = new Kysely<DittoTables>({
// deno-lint-ignore require-await
static async getInstance(): Promise<Kysely<DittoTables>> {
if (!this.db) {
this.db = new Kysely<DittoTables>({
dialect: new PgliteDialect({ dialect: new PgliteDialect({
database: new PGlite(Conf.databaseUrl), database: new PGlite(databaseUrl),
}), }),
log: KyselyLogger, log: KyselyLogger,
}) as Kysely<DittoTables>; });
}
return this.db; return {
} kysely,
poolSize: 1,
static get poolSize() { availableConnections: 1,
return 1; };
}
static get availableConnections(): number {
return 1;
} }
} }

View file

@ -12,29 +12,22 @@ import {
import { PostgresJSDialectConfig, PostgresJSDriver } from 'kysely-postgres-js'; import { PostgresJSDialectConfig, PostgresJSDriver } from 'kysely-postgres-js';
import postgres from 'postgres'; import postgres from 'postgres';
import { Conf } from '@/config.ts'; import { DittoDatabase, DittoDatabaseOpts } from '@/db/DittoDatabase.ts';
import { DittoTables } from '@/db/DittoTables.ts'; import { DittoTables } from '@/db/DittoTables.ts';
import { KyselyLogger } from '@/db/KyselyLogger.ts'; import { KyselyLogger } from '@/db/KyselyLogger.ts';
export class DittoPostgres { export class DittoPostgres {
static kysely: Kysely<DittoTables> | undefined; static create(databaseUrl: string, opts?: DittoDatabaseOpts): DittoDatabase {
static postgres?: postgres.Sql; const pg = postgres(databaseUrl, { max: opts?.poolSize });
// deno-lint-ignore require-await const kysely = new Kysely<DittoTables>({
static async getInstance(): Promise<Kysely<DittoTables>> {
if (!this.postgres) {
this.postgres = postgres(Conf.databaseUrl, { max: Conf.pg.poolSize });
}
if (!this.kysely) {
this.kysely = new Kysely<DittoTables>({
dialect: { dialect: {
createAdapter() { createAdapter() {
return new PostgresAdapter(); return new PostgresAdapter();
}, },
createDriver() { createDriver() {
return new PostgresJSDriver({ return new PostgresJSDriver({
postgres: DittoPostgres.postgres as unknown as PostgresJSDialectConfig['postgres'], postgres: pg as unknown as PostgresJSDialectConfig['postgres'],
}); });
}, },
createIntrospector(db) { createIntrospector(db) {
@ -46,17 +39,16 @@ export class DittoPostgres {
}, },
log: KyselyLogger, log: KyselyLogger,
}); });
}
return this.kysely; return {
} kysely,
get poolSize() {
static get poolSize() { return pg.connections.open;
return this.postgres?.connections.open ?? 0; },
} get availableConnections() {
return pg.connections.idle;
static get availableConnections(): number { },
return this.postgres?.connections.idle ?? 0; };
} }
} }

View file

@ -1,19 +1,13 @@
import { Kysely, sql } from 'kysely'; import { Kysely, sql } from 'kysely';
import { Conf } from '@/config.ts';
export async function up(db: Kysely<any>): Promise<void> { export async function up(db: Kysely<any>): Promise<void> {
if (Conf.db.dialect === 'postgres') {
await db.schema.createTable('nostr_pgfts') await db.schema.createTable('nostr_pgfts')
.ifNotExists() .ifNotExists()
.addColumn('event_id', 'text', (c) => c.primaryKey().references('nostr_events.id').onDelete('cascade')) .addColumn('event_id', 'text', (c) => c.primaryKey().references('nostr_events.id').onDelete('cascade'))
.addColumn('search_vec', sql`tsvector`, (c) => c.notNull()) .addColumn('search_vec', sql`tsvector`, (c) => c.notNull())
.execute(); .execute();
}
} }
export async function down(db: Kysely<any>): Promise<void> { export async function down(db: Kysely<any>): Promise<void> {
if (Conf.db.dialect === 'postgres') {
await db.schema.dropTable('nostr_pgfts').ifExists().execute(); await db.schema.dropTable('nostr_pgfts').ifExists().execute();
}
} }

View file

@ -1,9 +1,6 @@
import { Kysely } from 'kysely'; import { Kysely } from 'kysely';
import { Conf } from '@/config.ts';
export async function up(db: Kysely<any>): Promise<void> { export async function up(db: Kysely<any>): Promise<void> {
if (Conf.db.dialect === 'postgres') {
await db.schema await db.schema
.createIndex('nostr_pgfts_gin_search_vec') .createIndex('nostr_pgfts_gin_search_vec')
.ifNotExists() .ifNotExists()
@ -11,11 +8,8 @@ export async function up(db: Kysely<any>): Promise<void> {
.using('gin') .using('gin')
.column('search_vec') .column('search_vec')
.execute(); .execute();
}
} }
export async function down(db: Kysely<any>): Promise<void> { export async function down(db: Kysely<any>): Promise<void> {
if (Conf.db.dialect === 'postgres') {
await db.schema.dropIndex('nostr_pgfts_gin_search_vec').ifExists().execute(); await db.schema.dropIndex('nostr_pgfts_gin_search_vec').ifExists().execute();
}
} }

View file

@ -1,10 +1,6 @@
import { Kysely, sql } from 'kysely'; import { Kysely, sql } from 'kysely';
import { Conf } from '@/config.ts';
export async function up(db: Kysely<any>): Promise<void> { export async function up(db: Kysely<any>): Promise<void> {
if (Conf.db.dialect !== 'postgres') return;
// Create new table and indexes. // Create new table and indexes.
await db.schema await db.schema
.createTable('nostr_events_new') .createTable('nostr_events_new')

View file

@ -20,7 +20,7 @@ export const signerMiddleware: AppMiddleware = async (c, next) => {
if (bech32.startsWith('token1')) { if (bech32.startsWith('token1')) {
try { try {
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const { user_pubkey, server_seckey, relays } = await kysely const { user_pubkey, server_seckey, relays } = await kysely
.selectFrom('nip46_tokens') .selectFrom('nip46_tokens')

View file

@ -1,7 +1,7 @@
import { assertEquals } from '@std/assert'; import { assertEquals } from '@std/assert';
import { generateSecretKey } from 'nostr-tools'; import { generateSecretKey } from 'nostr-tools';
import { createTestDB, genEvent, getTestDB } from '@/test.ts'; import { createTestDB, genEvent } from '@/test.ts';
import { handleZaps } from '@/pipeline.ts'; import { handleZaps } from '@/pipeline.ts';
Deno.test('store one zap receipt in nostr_events; convert it into event_zaps table format and store it', async () => { Deno.test('store one zap receipt in nostr_events; convert it into event_zaps table format and store it', async () => {
@ -58,7 +58,7 @@ Deno.test('store one zap receipt in nostr_events; convert it into event_zaps tab
// If no error happens = ok // If no error happens = ok
Deno.test('zap receipt does not have a "description" tag', async () => { Deno.test('zap receipt does not have a "description" tag', async () => {
await using db = await getTestDB(); await using db = await createTestDB();
const kysely = db.kysely; const kysely = db.kysely;
const sk = generateSecretKey(); const sk = generateSecretKey();
@ -71,7 +71,7 @@ Deno.test('zap receipt does not have a "description" tag', async () => {
}); });
Deno.test('zap receipt does not have a zap request stringified value in the "description" tag', async () => { Deno.test('zap receipt does not have a zap request stringified value in the "description" tag', async () => {
await using db = await getTestDB(); await using db = await createTestDB();
const kysely = db.kysely; const kysely = db.kysely;
const sk = generateSecretKey(); const sk = generateSecretKey();
@ -84,7 +84,7 @@ Deno.test('zap receipt does not have a zap request stringified value in the "des
}); });
Deno.test('zap receipt does not have a "bolt11" tag', async () => { Deno.test('zap receipt does not have a "bolt11" tag', async () => {
await using db = await getTestDB(); await using db = await createTestDB();
const kysely = db.kysely; const kysely = db.kysely;
const sk = generateSecretKey(); const sk = generateSecretKey();
@ -103,7 +103,7 @@ Deno.test('zap receipt does not have a "bolt11" tag', async () => {
}); });
Deno.test('zap request inside zap receipt does not have an "e" tag', async () => { Deno.test('zap request inside zap receipt does not have an "e" tag', async () => {
await using db = await getTestDB(); await using db = await createTestDB();
const kysely = db.kysely; const kysely = db.kysely;
const sk = generateSecretKey(); const sk = generateSecretKey();

View file

@ -53,7 +53,7 @@ async function handleEvent(event: DittoEvent, signal: AbortSignal): Promise<void
throw new RelayError('blocked', 'user is disabled'); throw new RelayError('blocked', 'user is disabled');
} }
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
await Promise.all([ await Promise.all([
storeEvent(event, signal), storeEvent(event, signal),
@ -104,7 +104,7 @@ async function existsInDB(event: DittoEvent): Promise<boolean> {
async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise<void> { async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise<void> {
await hydrateEvents({ events: [event], store: await Storages.db(), signal }); await hydrateEvents({ events: [event], store: await Storages.db(), signal });
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const domain = await kysely const domain = await kysely
.selectFrom('pubkey_domains') .selectFrom('pubkey_domains')
.select('domain') .select('domain')
@ -118,7 +118,7 @@ async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise<voi
async function storeEvent(event: DittoEvent, signal?: AbortSignal): Promise<undefined> { async function storeEvent(event: DittoEvent, signal?: AbortSignal): Promise<undefined> {
if (NKinds.ephemeral(event.kind)) return; if (NKinds.ephemeral(event.kind)) return;
const store = await Storages.db(); const store = await Storages.db();
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
await updateStats({ event, store, kysely }).catch(debug); await updateStats({ event, store, kysely }).catch(debug);
await store.event(event, { signal }); await store.event(event, { signal });
@ -146,7 +146,7 @@ async function parseMetadata(event: NostrEvent, signal: AbortSignal): Promise<vo
// Track pubkey domain. // Track pubkey domain.
try { try {
const kysely = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const { domain } = parseNip05(nip05); const { domain } = parseNip05(nip05);
await sql` await sql`

View file

@ -20,8 +20,8 @@ export class Storages {
public static async db(): Promise<EventsDB> { public static async db(): Promise<EventsDB> {
if (!this._db) { if (!this._db) {
this._db = (async () => { this._db = (async () => {
const db = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const store = new EventsDB(db); const store = new EventsDB(kysely);
await seedZapSplits(store); await seedZapSplits(store);
return store; return store;
})(); })();

View file

@ -18,7 +18,7 @@ interface HydrateOpts {
/** Hydrate events using the provided storage. */ /** Hydrate events using the provided storage. */
async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> { async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
const { events, store, signal, kysely = await DittoDB.getInstance() } = opts; const { events, store, signal, kysely = (await DittoDB.getInstance()).kysely } = opts;
if (!events.length) { if (!events.length) {
return events; return events;

View file

@ -1,16 +1,9 @@
import { PGlite } from '@electric-sql/pglite';
import { NostrEvent } from '@nostrify/nostrify'; import { NostrEvent } from '@nostrify/nostrify';
import { PgliteDialect } from '@soapbox/kysely-pglite';
import { finalizeEvent, generateSecretKey } from 'nostr-tools'; import { finalizeEvent, generateSecretKey } from 'nostr-tools';
import { Kysely } from 'kysely';
import { PostgresJSDialect, PostgresJSDialectConfig } from 'kysely-postgres-js';
import postgres from 'postgres';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { DittoDB } from '@/db/DittoDB.ts'; import { DittoDB } from '@/db/DittoDB.ts';
import { DittoTables } from '@/db/DittoTables.ts';
import { purifyEvent } from '@/storages/hydrate.ts'; import { purifyEvent } from '@/storages/hydrate.ts';
import { KyselyLogger } from '@/db/KyselyLogger.ts';
import { EventsDB } from '@/storages/EventsDB.ts'; import { EventsDB } from '@/storages/EventsDB.ts';
/** Import an event fixture by name in tests. */ /** Import an event fixture by name in tests. */
@ -41,31 +34,7 @@ export function genEvent(t: Partial<NostrEvent> = {}, sk: Uint8Array = generateS
/** Create an database for testing. */ /** Create an database for testing. */
export const createTestDB = async (databaseUrl = Conf.testDatabaseUrl) => { export const createTestDB = async (databaseUrl = Conf.testDatabaseUrl) => {
const { protocol } = new URL(databaseUrl); const { protocol } = new URL(databaseUrl);
const { kysely } = DittoDB.create(databaseUrl, { poolSize: 1 });
const kysely: Kysely<DittoTables> = (() => {
switch (protocol) {
case 'postgres:':
case 'postgresql:':
return new Kysely({
// @ts-ignore Kysely version mismatch.
dialect: new PostgresJSDialect({
postgres: postgres(databaseUrl, {
max: Conf.pg.poolSize,
}) as unknown as PostgresJSDialectConfig['postgres'],
}),
log: KyselyLogger,
});
case 'file:':
case 'memory:':
return new Kysely({
dialect: new PgliteDialect({
database: new PGlite(databaseUrl),
}),
});
default:
throw new Error(`Unsupported database URL protocol: ${protocol}`);
}
})();
await DittoDB.migrate(kysely); await DittoDB.migrate(kysely);
const store = new EventsDB(kysely); const store = new EventsDB(kysely);

View file

@ -70,7 +70,7 @@ export async function updateTrendingTags(
aliases?: string[], aliases?: string[],
) { ) {
console.info(`Updating trending ${l}...`); console.info(`Updating trending ${l}...`);
const db = await DittoDB.getInstance(); const { kysely } = await DittoDB.getInstance();
const signal = AbortSignal.timeout(1000); const signal = AbortSignal.timeout(1000);
const yesterday = Math.floor((Date.now() - Time.days(1)) / 1000); const yesterday = Math.floor((Date.now() - Time.days(1)) / 1000);
@ -79,7 +79,7 @@ export async function updateTrendingTags(
const tagNames = aliases ? [tagName, ...aliases] : [tagName]; const tagNames = aliases ? [tagName, ...aliases] : [tagName];
try { try {
const trends = await getTrendingTagValues(db, tagNames, { const trends = await getTrendingTagValues(kysely, tagNames, {
kinds, kinds,
since: yesterday, since: yesterday,
until: now, until: now,

View file

@ -1,3 +1,5 @@
/// <reference lib="webworker" />
import Debug from '@soapbox/stickynotes/debug'; import Debug from '@soapbox/stickynotes/debug';
import * as Comlink from 'comlink'; import * as Comlink from 'comlink';