Merge branch 'apistore' into 'main'

Replace pipeline with DittoAPIStore

See merge request soapbox-pub/ditto!685
This commit is contained in:
Alex Gleason 2025-02-24 02:54:42 +00:00
commit d783886726
92 changed files with 1820 additions and 1656 deletions

View file

@ -62,8 +62,8 @@
"@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1",
"@negrel/webpush": "jsr:@negrel/webpush@^0.3.0",
"@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0",
"@nostrify/db": "jsr:@nostrify/db@^0.39.3",
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.38.1",
"@nostrify/db": "jsr:@nostrify/db@^0.39.4",
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.39.0",
"@nostrify/policies": "jsr:@nostrify/policies@^0.36.1",
"@nostrify/types": "jsr:@nostrify/types@^0.36.0",
"@scure/base": "npm:@scure/base@^1.1.6",

42
deno.lock generated
View file

@ -31,15 +31,14 @@
"jsr:@hono/hono@^4.4.6": "4.6.15",
"jsr:@negrel/http-ece@0.6.0": "0.6.0",
"jsr:@negrel/webpush@0.3": "0.3.0",
"jsr:@nostrify/db@~0.39.3": "0.39.3",
"jsr:@nostrify/db@~0.39.4": "0.39.4",
"jsr:@nostrify/nostrify@0.31": "0.31.0",
"jsr:@nostrify/nostrify@0.32": "0.32.0",
"jsr:@nostrify/nostrify@0.36": "0.36.2",
"jsr:@nostrify/nostrify@0.38": "0.38.1",
"jsr:@nostrify/nostrify@0.39": "0.39.0",
"jsr:@nostrify/nostrify@~0.22.1": "0.22.5",
"jsr:@nostrify/nostrify@~0.22.4": "0.22.4",
"jsr:@nostrify/nostrify@~0.22.5": "0.22.5",
"jsr:@nostrify/nostrify@~0.38.1": "0.38.1",
"jsr:@nostrify/policies@0.33": "0.33.0",
"jsr:@nostrify/policies@0.33.1": "0.33.1",
"jsr:@nostrify/policies@0.34": "0.34.0",
@ -138,6 +137,7 @@
"npm:type-fest@^4.3.0": "4.18.2",
"npm:unfurl.js@^6.4.0": "6.4.0",
"npm:websocket-ts@^2.1.5": "2.1.5",
"npm:websocket-ts@^2.2.1": "2.2.1",
"npm:zod@^3.23.8": "3.23.8"
},
"jsr": {
@ -363,10 +363,10 @@
"jsr:@std/path@0.224.0"
]
},
"@nostrify/db@0.39.3": {
"integrity": "d1f1104316b33e0fd3c263086b325ee49f86859abc1a966b43bb9f9a21c15429",
"@nostrify/db@0.39.4": {
"integrity": "53fecea3b67394cf4f52795f89d1d065bdeb0627b8655cc7fc3a89d6b21adf01",
"dependencies": [
"jsr:@nostrify/nostrify@~0.38.1",
"jsr:@nostrify/nostrify@0.39",
"jsr:@nostrify/types@0.36",
"npm:kysely@~0.27.3",
"npm:nostr-tools@^2.10.4"
@ -383,7 +383,7 @@
"npm:kysely@~0.27.3",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.5.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -397,7 +397,7 @@
"npm:kysely@~0.27.3",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -412,7 +412,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -425,7 +425,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -438,7 +438,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -453,7 +453,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -466,7 +466,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -481,13 +481,14 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.10.4",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
"@nostrify/nostrify@0.38.1": {
"integrity": "087d1be0d5c46420e6040b07c8cfb1a3ecb9808f23de54d22dd64d3eed001bce",
"@nostrify/nostrify@0.39.0": {
"integrity": "f7e052c32b8b9bafe0f2517dcf090e7d3df5aed38452a0cf61ade817d34067ee",
"dependencies": [
"jsr:@nostrify/nostrify@0.39",
"jsr:@nostrify/types@0.36",
"jsr:@std/crypto",
"jsr:@std/encoding@~0.224.1",
@ -496,7 +497,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.10.4",
"npm:websocket-ts",
"npm:websocket-ts@^2.2.1",
"npm:zod"
]
},
@ -1789,6 +1790,9 @@
"websocket-ts@2.1.5": {
"integrity": "sha512-rCNl9w6Hsir1azFm/pbjBEFzLD/gi7Th5ZgOxMifB6STUfTSovYAzryWw0TRvSZ1+Qu1Z5Plw4z42UfTNA9idA=="
},
"websocket-ts@2.2.1": {
"integrity": "sha512-YKPDfxlK5qOheLZ2bTIiktZO1bpfGdNCPJmTEaPW7G9UXI1GKjDdeacOrsULUS000OPNxDVOyAuKLuIWPqWM0Q=="
},
"whatwg-encoding@3.1.1": {
"integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
"dependencies": [
@ -2460,8 +2464,8 @@
"jsr:@gfx/canvas-wasm@~0.4.2",
"jsr:@hono/hono@^4.4.6",
"jsr:@negrel/webpush@0.3",
"jsr:@nostrify/db@~0.39.3",
"jsr:@nostrify/nostrify@~0.38.1",
"jsr:@nostrify/db@~0.39.4",
"jsr:@nostrify/nostrify@0.39",
"jsr:@nostrify/policies@~0.36.1",
"jsr:@nostrify/types@0.36",
"jsr:@soapbox/kysely-pglite@1",

View file

@ -6,6 +6,7 @@ export interface DittoDB extends AsyncDisposable {
readonly kysely: Kysely<DittoTables>;
readonly poolSize: number;
readonly availableConnections: number;
migrate(): Promise<void>;
listen(channel: string, callback: (payload: string) => void): void;
}

View file

@ -0,0 +1,52 @@
import fs from 'node:fs/promises';
import path from 'node:path';
import { logi } from '@soapbox/logi';
import { FileMigrationProvider, type Kysely, Migrator } from 'kysely';
import type { JsonValue } from '@std/json';
export class DittoPgMigrator {
private migrator: Migrator;
// deno-lint-ignore no-explicit-any
constructor(private kysely: Kysely<any>) {
this.migrator = new Migrator({
db: this.kysely,
provider: new FileMigrationProvider({
fs,
path,
migrationFolder: new URL(import.meta.resolve('./migrations')).pathname,
}),
});
}
async migrate(): Promise<void> {
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Running migrations...', state: 'started' });
const { results, error } = await this.migrator.migrateToLatest();
if (error) {
logi({
level: 'fatal',
ns: 'ditto.db.migration',
msg: 'Migration failed.',
state: 'failed',
results: results as unknown as JsonValue,
error: error instanceof Error ? error : null,
});
throw new Error('Migration failed.');
} else {
if (!results?.length) {
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Everything up-to-date.', state: 'skipped' });
} else {
logi({
level: 'info',
ns: 'ditto.db.migration',
msg: 'Migrations finished!',
state: 'migrated',
results: results as unknown as JsonValue,
});
}
}
}
}

View file

@ -2,8 +2,9 @@ import { assertEquals } from '@std/assert';
import { DittoPglite } from './DittoPglite.ts';
Deno.test('DittoPglite.create', async () => {
const db = DittoPglite.create('memory://');
Deno.test('DittoPglite', async () => {
const db = new DittoPglite('memory://');
await db.migrate();
assertEquals(db.poolSize, 1);
assertEquals(db.availableConnections, 1);

View file

@ -4,42 +4,49 @@ import { PgliteDialect } from '@soapbox/kysely-pglite';
import { Kysely } from 'kysely';
import { KyselyLogger } from '../KyselyLogger.ts';
import { DittoPgMigrator } from '../DittoPgMigrator.ts';
import { isWorker } from '../utils/worker.ts';
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
import type { DittoTables } from '../DittoTables.ts';
export class DittoPglite {
static create(databaseUrl: string, opts?: DittoDBOpts): DittoDB {
export class DittoPglite implements DittoDB {
readonly poolSize = 1;
readonly availableConnections = 1;
readonly kysely: Kysely<DittoTables>;
private pglite: PGlite;
private migrator: DittoPgMigrator;
constructor(databaseUrl: string, opts?: DittoDBOpts) {
const url = new URL(databaseUrl);
if (url.protocol === 'file:' && isWorker()) {
throw new Error('PGlite is not supported in worker threads.');
}
const pglite = new PGlite(databaseUrl, {
this.pglite = new PGlite(databaseUrl, {
extensions: { pg_trgm },
debug: opts?.debug,
});
const kysely = new Kysely<DittoTables>({
dialect: new PgliteDialect({ database: pglite }),
this.kysely = new Kysely<DittoTables>({
dialect: new PgliteDialect({ database: this.pglite }),
log: KyselyLogger,
});
const listen = (channel: string, callback: (payload: string) => void): void => {
pglite.listen(channel, callback);
};
this.migrator = new DittoPgMigrator(this.kysely);
}
return {
kysely,
poolSize: 1,
availableConnections: 1,
listen,
[Symbol.asyncDispose]: async () => {
await pglite.close();
await kysely.destroy();
},
};
listen(channel: string, callback: (payload: string) => void): void {
this.pglite.listen(channel, callback);
}
async migrate(): Promise<void> {
await this.migrator.migrate();
}
async [Symbol.asyncDispose](): Promise<void> {
await this.kysely.destroy();
}
}

View file

@ -1,6 +1,6 @@
import { DittoPolyPg } from './DittoPolyPg.ts';
Deno.test('DittoPolyPg', async () => {
const db = DittoPolyPg.create('memory://');
await DittoPolyPg.migrate(db.kysely);
const db = new DittoPolyPg('memory://');
await db.migrate();
});

View file

@ -1,70 +1,53 @@
import fs from 'node:fs/promises';
import path from 'node:path';
import { logi } from '@soapbox/logi';
import { FileMigrationProvider, type Kysely, Migrator } from 'kysely';
import { DittoPglite } from './DittoPglite.ts';
import { DittoPostgres } from './DittoPostgres.ts';
import type { JsonValue } from '@std/json';
import type { Kysely } from 'kysely';
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
import type { DittoTables } from '../DittoTables.ts';
/** Creates either a PGlite or Postgres connection depending on the databaseUrl. */
export class DittoPolyPg {
export class DittoPolyPg implements DittoDB {
private adapter: DittoDB;
/** Open a new database connection. */
static create(databaseUrl: string, opts?: DittoDBOpts): DittoDB {
constructor(databaseUrl: string, opts?: DittoDBOpts) {
const { protocol } = new URL(databaseUrl);
switch (protocol) {
case 'file:':
case 'memory:':
return DittoPglite.create(databaseUrl, opts);
this.adapter = new DittoPglite(databaseUrl, opts);
break;
case 'postgres:':
case 'postgresql:':
return DittoPostgres.create(databaseUrl, opts);
this.adapter = new DittoPostgres(databaseUrl, opts);
break;
default:
throw new Error('Unsupported database URL.');
}
}
/** Migrate the database to the latest version. */
static async migrate(kysely: Kysely<DittoTables>) {
const migrator = new Migrator({
db: kysely,
provider: new FileMigrationProvider({
fs,
path,
migrationFolder: new URL(import.meta.resolve('../migrations')).pathname,
}),
});
get kysely(): Kysely<DittoTables> {
return this.adapter.kysely;
}
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Running migrations...', state: 'started' });
const { results, error } = await migrator.migrateToLatest();
async migrate(): Promise<void> {
await this.adapter.migrate();
}
if (error) {
logi({
level: 'fatal',
ns: 'ditto.db.migration',
msg: 'Migration failed.',
state: 'failed',
results: results as unknown as JsonValue,
error: error instanceof Error ? error : null,
});
throw new Error('Migration failed.');
} else {
if (!results?.length) {
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Everything up-to-date.', state: 'skipped' });
} else {
logi({
level: 'info',
ns: 'ditto.db.migration',
msg: 'Migrations finished!',
state: 'migrated',
results: results as unknown as JsonValue,
});
}
}
listen(channel: string, callback: (payload: string) => void): void {
this.adapter.listen(channel, callback);
}
get poolSize(): number {
return this.adapter.poolSize;
}
get availableConnections(): number {
return this.adapter.availableConnections;
}
async [Symbol.asyncDispose](): Promise<void> {
await this.adapter[Symbol.asyncDispose]();
}
}

View file

@ -12,53 +12,54 @@ import {
import { type PostgresJSDialectConfig, PostgresJSDriver } from 'kysely-postgres-js';
import postgres from 'postgres';
import { DittoPgMigrator } from '../DittoPgMigrator.ts';
import { KyselyLogger } from '../KyselyLogger.ts';
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
import type { DittoTables } from '../DittoTables.ts';
export class DittoPostgres {
static create(databaseUrl: string, opts?: DittoDBOpts): DittoDB {
const pg = postgres(databaseUrl, { max: opts?.poolSize });
export class DittoPostgres implements DittoDB {
private pg: ReturnType<typeof postgres>;
private migrator: DittoPgMigrator;
const kysely = new Kysely<DittoTables>({
readonly kysely: Kysely<DittoTables>;
constructor(databaseUrl: string, opts?: DittoDBOpts) {
this.pg = postgres(databaseUrl, { max: opts?.poolSize });
this.kysely = new Kysely<DittoTables>({
dialect: {
createAdapter() {
return new PostgresAdapter();
},
createDriver() {
return new PostgresJSDriver({
postgres: pg as unknown as PostgresJSDialectConfig['postgres'],
});
},
createIntrospector(db) {
return new PostgresIntrospector(db);
},
createQueryCompiler() {
return new DittoPostgresQueryCompiler();
},
createAdapter: () => new PostgresAdapter(),
createDriver: () =>
new PostgresJSDriver({ postgres: this.pg as unknown as PostgresJSDialectConfig['postgres'] }),
createIntrospector: (db) => new PostgresIntrospector(db),
createQueryCompiler: () => new DittoPostgresQueryCompiler(),
},
log: KyselyLogger,
});
const listen = (channel: string, callback: (payload: string) => void): void => {
pg.listen(channel, callback);
};
this.migrator = new DittoPgMigrator(this.kysely);
}
return {
kysely,
get poolSize() {
return pg.connections.open;
},
get availableConnections() {
return pg.connections.idle;
},
listen,
[Symbol.asyncDispose]: async () => {
await pg.end();
await kysely.destroy();
},
};
listen(channel: string, callback: (payload: string) => void): void {
this.pg.listen(channel, callback);
}
async migrate(): Promise<void> {
await this.migrator.migrate();
}
get poolSize(): number {
return this.pg.connections.open;
}
get availableConnections(): number {
return this.pg.connections.idle;
}
async [Symbol.asyncDispose](): Promise<void> {
await this.pg.end();
await this.kysely.destroy();
}
}

View file

@ -3,6 +3,8 @@ import { DummyDB } from './DummyDB.ts';
Deno.test('DummyDB', async () => {
const db = new DummyDB();
await db.migrate();
const rows = await db.kysely.selectFrom('nostr_events').selectAll().execute();
assertEquals(rows, []);

View file

@ -23,6 +23,10 @@ export class DummyDB implements DittoDB {
// noop
}
migrate(): Promise<void> {
return Promise.resolve();
}
[Symbol.asyncDispose](): Promise<void> {
return Promise.resolve();
}

View file

@ -0,0 +1,16 @@
import { type Kysely, sql } from 'kysely';
export async function up(db: Kysely<unknown>): Promise<void> {
const result = await sql<{ count: number }>`
SELECT COUNT(*) as count
FROM pg_indexes
WHERE indexname = 'nostr_events_new_pkey'
`.execute(db);
if (result.rows[0].count > 0) {
await sql`ALTER INDEX nostr_events_new_pkey RENAME TO nostr_events_pkey;`.execute(db);
}
}
export async function down(_db: Kysely<unknown>): Promise<void> {
}

View file

@ -1,19 +1,24 @@
import { DittoConf } from '@ditto/conf';
import { ApplicationServer, PushMessageOptions, PushSubscriber, PushSubscription } from '@negrel/webpush';
import { NStore } from '@nostrify/types';
import { logi } from '@soapbox/logi';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
export class DittoPush {
static _server: Promise<ApplicationServer | undefined> | undefined;
interface DittoPushOpts {
conf: DittoConf;
relay: NStore;
}
static get server(): Promise<ApplicationServer | undefined> {
if (!this._server) {
this._server = (async () => {
const store = await Storages.db();
const meta = await getInstanceMetadata(store);
const keys = await Conf.vapidKeys;
export class DittoPush {
private server: Promise<ApplicationServer | undefined>;
constructor(opts: DittoPushOpts) {
const { conf, relay } = opts;
this.server = (async () => {
const meta = await getInstanceMetadata(relay);
const keys = await conf.vapidKeys;
if (keys) {
return await ApplicationServer.new({
@ -30,10 +35,7 @@ export class DittoPush {
})();
}
return this._server;
}
static async push(
async push(
subscription: PushSubscription,
json: object,
opts: PushMessageOptions = {},

View file

@ -1,7 +1,8 @@
import { DittoConf } from '@ditto/conf';
import { DittoDB } from '@ditto/db';
import { DittoDB, DittoPolyPg } from '@ditto/db';
import { paginationMiddleware, tokenMiddleware, userMiddleware } from '@ditto/mastoapi/middleware';
import { DittoApp, type DittoEnv } from '@ditto/mastoapi/router';
import { relayPoolRelaysSizeGauge, relayPoolSubscriptionsSizeGauge } from '@ditto/metrics';
import { type DittoTranslator } from '@ditto/translators';
import { type Context, Handler, Input as HonoInput, MiddlewareHandler } from '@hono/hono';
import { every } from '@hono/hono/combine';
@ -9,11 +10,13 @@ import { cors } from '@hono/hono/cors';
import { serveStatic } from '@hono/hono/deno';
import { NostrEvent, NostrSigner, NRelay, NUploader } from '@nostrify/nostrify';
import '@/startup.ts';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { cron } from '@/cron.ts';
import { startFirehose } from '@/firehose.ts';
import { DittoAPIStore } from '@/storages/DittoAPIStore.ts';
import { DittoPgStore } from '@/storages/DittoPgStore.ts';
import { DittoPool } from '@/storages/DittoPool.ts';
import { Time } from '@/utils/time.ts';
import { seedZapSplits } from '@/utils/zap-split.ts';
import {
accountController,
@ -145,6 +148,7 @@ import { rateLimitMiddleware } from '@/middleware/rateLimitMiddleware.ts';
import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts';
import { translatorMiddleware } from '@/middleware/translatorMiddleware.ts';
import { logiMiddleware } from '@/middleware/logiMiddleware.ts';
import { DittoRelayStore } from '@/storages/DittoRelayStore.ts';
export interface AppEnv extends DittoEnv {
Variables: {
@ -176,14 +180,42 @@ type AppMiddleware = MiddlewareHandler<AppEnv>;
// deno-lint-ignore no-explicit-any
type AppController<P extends string = any> = Handler<AppEnv, P, HonoInput, Response | Promise<Response>>;
const app = new DittoApp({
conf: Conf,
db: await Storages.database(),
relay: await Storages.db(),
}, {
strict: false,
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl, {
poolSize: conf.pg.poolSize,
debug: conf.pgliteDebug,
});
await db.migrate();
const pgstore = new DittoPgStore({
db,
pubkey: await conf.signer.getPublicKey(),
timeout: conf.db.timeouts.default,
notify: conf.notifyEnabled,
});
const pool = new DittoPool({ conf, relay: pgstore });
const relay = new DittoRelayStore({ db, conf, relay: pgstore });
await seedZapSplits(relay);
if (conf.firehoseEnabled) {
startFirehose({
pool,
relay,
concurrency: conf.firehoseConcurrency,
kinds: conf.firehoseKinds,
});
}
if (conf.cronEnabled) {
cron({ conf, db, relay });
}
const app = new DittoApp({ conf, db, relay }, { strict: false });
/** User-provided files in the gitignored `public/` directory. */
const publicFiles = serveStatic({ root: './public/' });
/** Static files provided by the Ditto repo, checked into git. */
@ -209,7 +241,17 @@ app.use('/nodeinfo/*', metricsMiddleware, ratelimit, logiMiddleware);
app.use('/oauth/*', metricsMiddleware, ratelimit, logiMiddleware);
app.get('/api/v1/streaming', socketTokenMiddleware, metricsMiddleware, ratelimit, streamingController);
app.get('/relay', metricsMiddleware, ratelimit, relayController);
app.get(
'/relay',
(c, next) => {
c.set('relay', new DittoAPIStore({ relay, pool }));
return next();
},
metricsMiddleware,
ratelimit,
relayController,
);
app.use(
cspMiddleware(),
@ -218,7 +260,17 @@ app.use(
uploaderMiddleware,
);
app.get('/metrics', metricsController);
app.get('/metrics', async (_c, next) => {
relayPoolRelaysSizeGauge.reset();
relayPoolSubscriptionsSizeGauge.reset();
for (const relay of pool.relays.values()) {
relayPoolRelaysSizeGauge.inc({ ready_state: relay.socket.readyState });
relayPoolSubscriptionsSizeGauge.inc(relay.subscriptions.length);
}
await next();
}, metricsController);
app.get(
'/.well-known/nodeinfo',

View file

@ -1,14 +1,14 @@
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter, NSchema as n, NStore } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { getAuthor, getFollowedPubkeys } from '@/queries.ts';
import { booleanParamSchema, fileSchema } from '@/schema.ts';
import { Storages } from '@/storages.ts';
import { uploadFile } from '@/utils/upload.ts';
import { nostrNow } from '@/utils.ts';
import { assertAuthenticated, createEvent, paginated, parseBody, updateEvent, updateListEvent } from '@/utils/api.ts';
import { assertAuthenticated, createEvent, parseBody, updateEvent, updateListEvent } from '@/utils/api.ts';
import { extractIdentifier, lookupAccount, lookupPubkey } from '@/utils/lookup.ts';
import { renderAccounts, renderEventAccounts, renderStatuses } from '@/views.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
@ -54,7 +54,7 @@ const verifyCredentialsController: AppController = async (c) => {
const pubkey = await signer.getPublicKey();
const [author, [settingsEvent]] = await Promise.all([
getAuthor(pubkey, { signal: AbortSignal.timeout(5000) }),
getAuthor(pubkey, c.var),
relay.query([{
kinds: [30078],
@ -72,8 +72,8 @@ const verifyCredentialsController: AppController = async (c) => {
}
const account = author
? await renderAccount(author, { withSource: true, settingsStore })
: await accountFromPubkey(pubkey, { withSource: true, settingsStore });
? renderAccount(author, { withSource: true, settingsStore })
: accountFromPubkey(pubkey, { withSource: true, settingsStore });
return c.json(account);
};
@ -81,7 +81,7 @@ const verifyCredentialsController: AppController = async (c) => {
const accountController: AppController = async (c) => {
const pubkey = c.req.param('pubkey');
const event = await getAuthor(pubkey);
const event = await getAuthor(pubkey, c.var);
if (event) {
assertAuthenticated(c, event);
return c.json(await renderAccount(event));
@ -97,7 +97,7 @@ const accountLookupController: AppController = async (c) => {
return c.json({ error: 'Missing `acct` query parameter.' }, 422);
}
const event = await lookupAccount(decodeURIComponent(acct));
const event = await lookupAccount(decodeURIComponent(acct), c.var);
if (event) {
assertAuthenticated(c, event);
return c.json(await renderAccount(event));
@ -131,10 +131,10 @@ const accountSearchController: AppController = async (c) => {
const query = decodeURIComponent(result.data.q);
const lookup = extractIdentifier(query);
const event = await lookupAccount(lookup ?? query);
const event = await lookupAccount(lookup ?? query, c.var);
if (!event && lookup) {
const pubkey = await lookupPubkey(lookup);
const pubkey = await lookupPubkey(lookup, c.var);
return c.json(pubkey ? [accountFromPubkey(pubkey)] : []);
}
@ -143,7 +143,7 @@ const accountSearchController: AppController = async (c) => {
if (event) {
events.push(event);
} else {
const following = viewerPubkey ? await getFollowedPubkeys(viewerPubkey) : new Set<string>();
const following = viewerPubkey ? await getFollowedPubkeys(relay, viewerPubkey, signal) : new Set<string>();
const authors = [...await getPubkeysBySearch(db.kysely, { q: query, limit, offset: 0, following })];
const profiles = await relay.query([{ kinds: [0], authors, limit }], { signal });
@ -155,14 +155,14 @@ const accountSearchController: AppController = async (c) => {
}
}
const accounts = await hydrateEvents({ events, relay, signal })
const accounts = await hydrateEvents({ ...c.var, events })
.then((events) => events.map((event) => renderAccount(event)));
return c.json(accounts);
};
const relationshipsController: AppController = async (c) => {
const { user } = c.var;
const { relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const ids = z.array(z.string()).safeParse(c.req.queries('id[]'));
@ -171,11 +171,9 @@ const relationshipsController: AppController = async (c) => {
return c.json({ error: 'Missing `id[]` query parameters.' }, 422);
}
const db = await Storages.db();
const [sourceEvents, targetEvents] = await Promise.all([
db.query([{ kinds: [3, 10000], authors: [pubkey] }]),
db.query([{ kinds: [3], authors: ids.data }]),
relay.query([{ kinds: [3, 10000], authors: [pubkey] }]),
relay.query([{ kinds: [3], authors: ids.data }]),
]);
const event3 = sourceEvents.find((event) => event.kind === 3 && event.pubkey === pubkey);
@ -267,7 +265,7 @@ const accountStatusesController: AppController = async (c) => {
const opts = { signal, limit, timeout: conf.db.timeouts.timelines };
const events = await relay.query([filter], opts)
.then((events) => hydrateEvents({ events, relay, signal }))
.then((events) => hydrateEvents({ ...c.var, events }))
.then((events) => {
if (exclude_replies) {
return events.filter((event) => {
@ -282,8 +280,8 @@ const accountStatusesController: AppController = async (c) => {
const statuses = await Promise.all(
events.map((event) => {
if (event.kind === 6) return renderReblog(event, { viewerPubkey });
return renderStatus(event, { viewerPubkey });
if (event.kind === 6) return renderReblog(relay, event, { viewerPubkey });
return renderStatus(relay, event, { viewerPubkey });
}),
);
return paginated(c, events, statuses);
@ -305,7 +303,7 @@ const updateCredentialsSchema = z.object({
});
const updateCredentialsController: AppController = async (c) => {
const { relay, user, signal } = c.var;
const { relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const body = await parseBody(c.req.raw);
@ -375,7 +373,7 @@ const updateCredentialsController: AppController = async (c) => {
let account: MastodonAccount;
if (event) {
await hydrateEvents({ events: [event], relay, signal });
await hydrateEvents({ ...c.var, events: [event] });
account = await renderAccount(event, { withSource: true, settingsStore });
} else {
account = await accountFromPubkey(pubkey, { withSource: true, settingsStore });
@ -394,7 +392,7 @@ const updateCredentialsController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/accounts/#follow */
const followController: AppController = async (c) => {
const { user } = c.var;
const { relay, user } = c.var;
const sourcePubkey = await user!.signer.getPublicKey();
const targetPubkey = c.req.param('pubkey');
@ -405,7 +403,7 @@ const followController: AppController = async (c) => {
c,
);
const relationship = await getRelationship(sourcePubkey, targetPubkey);
const relationship = await getRelationship(relay, sourcePubkey, targetPubkey);
relationship.following = true;
return c.json(relationship);
@ -413,7 +411,7 @@ const followController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/accounts/#unfollow */
const unfollowController: AppController = async (c) => {
const { user } = c.var;
const { relay, user } = c.var;
const sourcePubkey = await user!.signer.getPublicKey();
const targetPubkey = c.req.param('pubkey');
@ -424,7 +422,7 @@ const unfollowController: AppController = async (c) => {
c,
);
const relationship = await getRelationship(sourcePubkey, targetPubkey);
const relationship = await getRelationship(relay, sourcePubkey, targetPubkey);
return c.json(relationship);
};
@ -435,8 +433,9 @@ const followersController: AppController = (c) => {
};
const followingController: AppController = async (c) => {
const { relay, signal } = c.var;
const pubkey = c.req.param('pubkey');
const pubkeys = await getFollowedPubkeys(pubkey);
const pubkeys = await getFollowedPubkeys(relay, pubkey, signal);
return renderAccounts(c, [...pubkeys]);
};
@ -452,7 +451,7 @@ const unblockController: AppController = (c) => {
/** https://docs.joinmastodon.org/methods/accounts/#mute */
const muteController: AppController = async (c) => {
const { user } = c.var;
const { relay, user } = c.var;
const sourcePubkey = await user!.signer.getPublicKey();
const targetPubkey = c.req.param('pubkey');
@ -463,13 +462,13 @@ const muteController: AppController = async (c) => {
c,
);
const relationship = await getRelationship(sourcePubkey, targetPubkey);
const relationship = await getRelationship(relay, sourcePubkey, targetPubkey);
return c.json(relationship);
};
/** https://docs.joinmastodon.org/methods/accounts/#unmute */
const unmuteController: AppController = async (c) => {
const { user } = c.var;
const { relay, user } = c.var;
const sourcePubkey = await user!.signer.getPublicKey();
const targetPubkey = c.req.param('pubkey');
@ -480,7 +479,7 @@ const unmuteController: AppController = async (c) => {
c,
);
const relationship = await getRelationship(sourcePubkey, targetPubkey);
const relationship = await getRelationship(relay, sourcePubkey, targetPubkey);
return c.json(relationship);
};
@ -499,26 +498,26 @@ const favouritesController: AppController = async (c) => {
.filter((id): id is string => !!id);
const events1 = await relay.query([{ kinds: [1, 20], ids }], { signal })
.then((events) => hydrateEvents({ events, relay, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const viewerPubkey = await user?.signer.getPublicKey();
const statuses = await Promise.all(
events1.map((event) => renderStatus(event, { viewerPubkey })),
events1.map((event) => renderStatus(relay, event, { viewerPubkey })),
);
return paginated(c, events1, statuses);
};
const familiarFollowersController: AppController = async (c) => {
const { relay, user } = c.var;
const { relay, user, signal } = c.var;
const pubkey = await user!.signer.getPublicKey();
const ids = z.array(z.string()).parse(c.req.queries('id[]'));
const follows = await getFollowedPubkeys(pubkey);
const follows = await getFollowedPubkeys(relay, pubkey, signal);
const results = await Promise.all(ids.map(async (id) => {
const followLists = await relay.query([{ kinds: [3], authors: [...follows], '#p': [id] }])
.then((events) => hydrateEvents({ events, relay }));
.then((events) => hydrateEvents({ ...c.var, events }));
const accounts = await Promise.all(
followLists.map((event) => event.author ? renderAccount(event.author) : accountFromPubkey(event.pubkey)),
@ -530,12 +529,10 @@ const familiarFollowersController: AppController = async (c) => {
return c.json(results);
};
async function getRelationship(sourcePubkey: string, targetPubkey: string) {
const db = await Storages.db();
async function getRelationship(relay: NStore, sourcePubkey: string, targetPubkey: string) {
const [sourceEvents, targetEvents] = await Promise.all([
db.query([{ kinds: [3, 10000], authors: [sourcePubkey] }]),
db.query([{ kinds: [3], authors: [targetPubkey] }]),
relay.query([{ kinds: [3, 10000], authors: [sourcePubkey] }]),
relay.query([{ kinds: [3], authors: [targetPubkey] }]),
]);
return renderRelationship({

View file

@ -1,3 +1,4 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrFilter } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { z } from 'zod';
@ -5,7 +6,7 @@ import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { booleanParamSchema } from '@/schema.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { createAdminEvent, paginated, parseBody, updateEventInfo, updateUser } from '@/utils/api.ts';
import { createAdminEvent, parseBody, updateEventInfo, updateUser } from '@/utils/api.ts';
import { renderNameRequest } from '@/views/ditto.ts';
import { renderAdminAccount, renderAdminAccountFromPubkey } from '@/views/mastodon/admin-accounts.ts';
import { errorJson } from '@/utils/log.ts';
@ -59,7 +60,7 @@ const adminAccountsController: AppController = async (c) => {
);
const events = await relay.query([{ kinds: [3036], ids: [...ids] }])
.then((events) => hydrateEvents({ relay, events, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const nameRequests = await Promise.all(events.map(renderNameRequest));
return paginated(c, orig, nameRequests);
@ -97,7 +98,7 @@ const adminAccountsController: AppController = async (c) => {
);
const authors = await relay.query([{ kinds: [0], authors: [...pubkeys] }])
.then((events) => hydrateEvents({ relay, events, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const accounts = await Promise.all(
[...pubkeys].map((pubkey) => {
@ -116,7 +117,7 @@ const adminAccountsController: AppController = async (c) => {
}
const events = await relay.query([filter], { signal })
.then((events) => hydrateEvents({ relay, events, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const accounts = await Promise.all(events.map(renderAdminAccount));
return paginated(c, events, accounts);
@ -210,7 +211,7 @@ const adminApproveController: AppController = async (c) => {
}, c);
await updateEventInfo(eventId, { pending: false, approved: true, rejected: false }, c);
await hydrateEvents({ events: [event], relay });
await hydrateEvents({ ...c.var, events: [event] });
const nameRequest = await renderNameRequest(event);
return c.json(nameRequest);
@ -226,7 +227,7 @@ const adminRejectController: AppController = async (c) => {
}
await updateEventInfo(eventId, { pending: false, approved: false, rejected: true }, c);
await hydrateEvents({ events: [event], relay });
await hydrateEvents({ ...c.var, events: [event] });
const nameRequest = await renderNameRequest(event);
return c.json(nameRequest);

View file

@ -13,10 +13,7 @@ import { createTestDB } from '@/test.ts';
import cashuRoute from './cashu.ts';
import { walletSchema } from '@/schema.ts';
Deno.test('PUT /wallet must be successful', {
sanitizeOps: false,
sanitizeResources: false,
}, async () => {
Deno.test('PUT /wallet must be successful', async () => {
await using test = await createTestRoute();
const { route, signer, sk, relay } = test;
@ -101,10 +98,7 @@ Deno.test('PUT /wallet must NOT be successful: wrong request body/schema', async
assertObjectMatch(body, { error: 'Bad schema' });
});
Deno.test('PUT /wallet must NOT be successful: wallet already exists', {
sanitizeOps: false,
sanitizeResources: false,
}, async () => {
Deno.test('PUT /wallet must NOT be successful: wallet already exists', async () => {
await using test = await createTestRoute();
const { route, sk, relay } = test;
@ -127,10 +121,7 @@ Deno.test('PUT /wallet must NOT be successful: wallet already exists', {
assertEquals(body2, { error: 'You already have a wallet 😏' });
});
Deno.test('GET /wallet must be successful', {
sanitizeOps: false,
sanitizeResources: false,
}, async () => {
Deno.test('GET /wallet must be successful', async () => {
await using test = await createTestRoute();
const { route, sk, relay, signer } = test;
@ -252,7 +243,7 @@ async function createTestRoute() {
const sk = generateSecretKey();
const signer = new NSecSigner(sk);
const route = new DittoApp({ db, relay, conf });
const route = new DittoApp({ db: db.db, relay, conf });
route.use(testUserMiddleware({ signer, relay }));
route.route('/', cashuRoute);

View file

@ -78,7 +78,6 @@ route.put('/wallet', userMiddleware({ enc: 'nip44' }), async (c) => {
await createEvent({
kind: 17375,
content: encryptedWalletContentTags,
// @ts-ignore kill me
}, c);
// Nutzap information
@ -89,7 +88,6 @@ route.put('/wallet', userMiddleware({ enc: 'nip44' }), async (c) => {
['relay', conf.relay], // TODO: add more relays once things get more stable
['pubkey', p2pk],
],
// @ts-ignore kill me
}, c);
// TODO: hydrate wallet and add a 'balance' field when a 'renderWallet' view function is created

View file

@ -1,3 +1,4 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { z } from 'zod';
@ -5,21 +6,30 @@ import { AppController } from '@/app.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getAuthor } from '@/queries.ts';
import { addTag } from '@/utils/tags.ts';
import { createEvent, paginated, parseBody, updateAdminEvent } from '@/utils/api.ts';
import { createEvent, parseBody, updateAdminEvent } from '@/utils/api.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
import { deleteTag } from '@/utils/tags.ts';
import { DittoZapSplits, getZapSplits } from '@/utils/zap-split.ts';
import { screenshotsSchema } from '@/schemas/nostr.ts';
import { booleanParamSchema, percentageSchema, wsUrlSchema } from '@/schema.ts';
import { booleanParamSchema, percentageSchema } from '@/schema.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { renderNameRequest } from '@/views/ditto.ts';
import { accountFromPubkey } from '@/views/mastodon/accounts.ts';
import { renderAccount } from '@/views/mastodon/accounts.ts';
import { Storages } from '@/storages.ts';
import { updateListAdminEvent } from '@/utils/api.ts';
const markerSchema = z.enum(['read', 'write']);
/** WebSocket URL. */
const wsUrlSchema = z.string().refine((val): val is `wss://${string}` | `ws://${string}` => {
try {
const { protocol } = new URL(val);
return protocol === 'wss:' || protocol === 'ws:';
} catch {
return false;
}
}, 'Invalid WebSocket URL');
const relaySchema = z.object({
url: wsUrlSchema,
marker: markerSchema.optional(),
@ -62,7 +72,7 @@ function renderRelays(event: NostrEvent): RelayEntity[] {
return event.tags.reduce((acc, [name, url, marker]) => {
if (name === 'r') {
const relay: RelayEntity = {
url,
url: url as `wss://${string}`,
marker: markerSchema.safeParse(marker).success ? marker as 'read' | 'write' : undefined,
};
acc.push(relay);
@ -110,7 +120,7 @@ export const nameRequestController: AppController = async (c) => {
],
}, c);
await hydrateEvents({ events: [event], relay });
await hydrateEvents({ ...c.var, events: [event] });
const nameRequest = await renderNameRequest(event);
return c.json(nameRequest);
@ -122,7 +132,7 @@ const nameRequestsSchema = z.object({
});
export const nameRequestsController: AppController = async (c) => {
const { conf, relay, user, signal } = c.var;
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const params = c.get('pagination');
@ -158,7 +168,7 @@ export const nameRequestsController: AppController = async (c) => {
}
const events = await relay.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }])
.then((events) => hydrateEvents({ relay, events: events, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const nameRequests = await Promise.all(
events.map((event) => renderNameRequest(event)),
@ -253,7 +263,7 @@ export const getZapSplitsController: AppController = async (c) => {
const pubkeys = Object.keys(dittoZapSplit);
const zapSplits = await Promise.all(pubkeys.map(async (pubkey) => {
const author = await getAuthor(pubkey);
const author = await getAuthor(pubkey, c.var);
const account = author ? renderAccount(author) : accountFromPubkey(pubkey);
@ -282,7 +292,7 @@ export const statusZapSplitsController: AppController = async (c) => {
const pubkeys = zapsTag.map((name) => name[1]);
const users = await relay.query([{ authors: pubkeys, kinds: [0], limit: pubkeys.length }], { signal });
await hydrateEvents({ events: users, relay, signal });
await hydrateEvents({ ...c.var, events: users });
const zapSplits = (await Promise.all(pubkeys.map((pubkey) => {
const author = (users.find((event) => event.pubkey === pubkey) as DittoEvent | undefined)?.author;
@ -315,7 +325,8 @@ const updateInstanceSchema = z.object({
});
export const updateInstanceController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, signal } = c.var;
const body = await parseBody(c.req.raw);
const result = updateInstanceSchema.safeParse(body);
const pubkey = await conf.signer.getPublicKey();
@ -324,7 +335,7 @@ export const updateInstanceController: AppController = async (c) => {
return c.json(result.error, 422);
}
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const meta = await getInstanceMetadata(relay, signal);
await updateAdminEvent(
{ kinds: [0], authors: [pubkey], limit: 1 },

View file

@ -1,7 +1,6 @@
import denoJson from 'deno.json' with { type: 'json' };
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
const version = `3.0.0 (compatible; Ditto ${denoJson.version})`;
@ -16,9 +15,9 @@ const features = [
];
const instanceV1Controller: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, signal } = c.var;
const { host, protocol } = conf.url;
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const meta = await getInstanceMetadata(relay, signal);
/** Protocol to use for WebSocket URLs, depending on the protocol of the `LOCAL_DOMAIN`. */
const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:';
@ -76,9 +75,9 @@ const instanceV1Controller: AppController = async (c) => {
};
const instanceV2Controller: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, signal } = c.var;
const { host, protocol } = conf.url;
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const meta = await getInstanceMetadata(relay, signal);
/** Protocol to use for WebSocket URLs, depending on the protocol of the `LOCAL_DOMAIN`. */
const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:';
@ -165,7 +164,9 @@ const instanceV2Controller: AppController = async (c) => {
};
const instanceDescriptionController: AppController = async (c) => {
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const { relay, signal } = c.var;
const meta = await getInstanceMetadata(relay, signal);
return c.json({
content: meta.about,

View file

@ -1,10 +1,10 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { z } from 'zod';
import { AppContext, AppController } from '@/app.ts';
import { DittoPagination } from '@/interfaces/DittoPagination.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { paginated } from '@/utils/api.ts';
import { renderNotification } from '@/views/mastodon/notifications.ts';
/** Set of known notification types across backends. */
@ -90,9 +90,9 @@ const notificationController: AppController = async (c) => {
return c.json({ error: 'Event not found' }, { status: 404 });
}
await hydrateEvents({ events: [event], relay });
await hydrateEvents({ ...c.var, events: [event] });
const notification = await renderNotification(event, { viewerPubkey: pubkey });
const notification = await renderNotification(relay, event, { viewerPubkey: pubkey });
if (!notification) {
return c.json({ error: 'Notification not found' }, { status: 404 });
@ -116,14 +116,14 @@ async function renderNotifications(
const events = await relay
.query(filters, opts)
.then((events) => events.filter((event) => event.pubkey !== pubkey))
.then((events) => hydrateEvents({ events, relay, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
if (!events.length) {
return c.json([]);
}
const notifications = (await Promise.all(events.map((event) => {
return renderNotification(event, { viewerPubkey: pubkey });
return renderNotification(relay, event, { viewerPubkey: pubkey });
})))
.filter((notification) => notification && types.has(notification.type));

View file

@ -3,8 +3,7 @@ import { escape } from 'entities';
import { generateSecretKey } from 'nostr-tools';
import { z } from 'zod';
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { AppContext, AppController } from '@/app.ts';
import { nostrNow } from '@/utils.ts';
import { parseBody } from '@/utils/api.ts';
import { aesEncrypt } from '@/utils/aes.ts';
@ -40,6 +39,7 @@ const createTokenSchema = z.discriminatedUnion('grant_type', [
const createTokenController: AppController = async (c) => {
const { conf } = c.var;
const body = await parseBody(c.req.raw);
const result = createTokenSchema.safeParse(body);
@ -50,7 +50,7 @@ const createTokenController: AppController = async (c) => {
switch (result.data.grant_type) {
case 'nostr_bunker':
return c.json({
access_token: await getToken(result.data, conf.seckey),
access_token: await getToken(c, result.data, conf.seckey),
token_type: 'Bearer',
scope: 'read write follow push',
created_at: nostrNow(),
@ -90,6 +90,8 @@ const revokeTokenSchema = z.object({
* https://docs.joinmastodon.org/methods/oauth/#revoke
*/
const revokeTokenController: AppController = async (c) => {
const { db } = c.var;
const body = await parseBody(c.req.raw);
const result = revokeTokenSchema.safeParse(body);
@ -99,10 +101,9 @@ const revokeTokenController: AppController = async (c) => {
const { token } = result.data;
const kysely = await Storages.kysely();
const tokenHash = await getTokenHash(token as `token1${string}`);
await kysely
await db.kysely
.deleteFrom('auth_tokens')
.where('token_hash', '=', tokenHash)
.execute();
@ -111,10 +112,11 @@ const revokeTokenController: AppController = async (c) => {
};
async function getToken(
c: AppContext,
{ pubkey: bunkerPubkey, secret, relays = [] }: { pubkey: string; secret?: string; relays?: string[] },
dittoSeckey: Uint8Array,
): Promise<`token1${string}`> {
const kysely = await Storages.kysely();
const { db, relay } = c.var;
const { token, hash } = await generateToken();
const nip46Seckey = generateSecretKey();
@ -123,14 +125,14 @@ async function getToken(
encryption: 'nip44',
pubkey: bunkerPubkey,
signer: new NSecSigner(nip46Seckey),
relay: await Storages.db(), // TODO: Use the relays from the request.
relay,
timeout: 60_000,
});
await signer.connect(secret);
const userPubkey = await signer.getPublicKey();
await kysely.insertInto('auth_tokens').values({
await db.kysely.insertInto('auth_tokens').values({
token_hash: hash,
pubkey: userPubkey,
bunker_pubkey: bunkerPubkey,
@ -236,7 +238,7 @@ const oauthAuthorizeController: AppController = async (c) => {
const bunker = new URL(bunker_uri);
const token = await getToken({
const token = await getToken(c, {
pubkey: bunker.hostname,
secret: bunker.searchParams.get('secret') || undefined,
relays: bunker.searchParams.getAll('relay'),

View file

@ -71,7 +71,7 @@ const pleromaAdminTagController: AppController = async (c) => {
const params = pleromaAdminTagSchema.parse(await c.req.json());
for (const nickname of params.nicknames) {
const pubkey = await lookupPubkey(nickname);
const pubkey = await lookupPubkey(nickname, c.var);
if (!pubkey) continue;
await updateAdminEvent(
@ -104,7 +104,7 @@ const pleromaAdminUntagController: AppController = async (c) => {
const params = pleromaAdminTagSchema.parse(await c.req.json());
for (const nickname of params.nicknames) {
const pubkey = await lookupPubkey(nickname);
const pubkey = await lookupPubkey(nickname, c.var);
if (!pubkey) continue;
await updateAdminEvent(
@ -130,7 +130,7 @@ const pleromaAdminSuggestController: AppController = async (c) => {
const { nicknames } = pleromaAdminSuggestSchema.parse(await c.req.json());
for (const nickname of nicknames) {
const pubkey = await lookupPubkey(nickname);
const pubkey = await lookupPubkey(nickname, c.var);
if (!pubkey) continue;
await updateUser(pubkey, { suggested: true }, c);
}
@ -142,7 +142,7 @@ const pleromaAdminUnsuggestController: AppController = async (c) => {
const { nicknames } = pleromaAdminSuggestSchema.parse(await c.req.json());
for (const nickname of nicknames) {
const pubkey = await lookupPubkey(nickname);
const pubkey = await lookupPubkey(nickname, c.var);
if (!pubkey) continue;
await updateUser(pubkey, { suggested: false }, c);
}

View file

@ -3,7 +3,6 @@ import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { parseBody } from '@/utils/api.ts';
import { getTokenHash } from '@/utils/auth.ts';
@ -42,7 +41,7 @@ const pushSubscribeSchema = z.object({
});
export const pushSubscribeController: AppController = async (c) => {
const { conf, user } = c.var;
const { conf, db, user } = c.var;
const vapidPublicKey = await conf.vapidPublicKey;
if (!vapidPublicKey) {
@ -50,8 +49,6 @@ export const pushSubscribeController: AppController = async (c) => {
}
const accessToken = getAccessToken(c.req.raw);
const kysely = await Storages.kysely();
const signer = user!.signer;
const result = pushSubscribeSchema.safeParse(await parseBody(c.req.raw));
@ -65,7 +62,7 @@ export const pushSubscribeController: AppController = async (c) => {
const pubkey = await signer.getPublicKey();
const tokenHash = await getTokenHash(accessToken);
const { id } = await kysely.transaction().execute(async (trx) => {
const { id } = await db.kysely.transaction().execute(async (trx) => {
await trx
.deleteFrom('push_subscriptions')
.where('token_hash', '=', tokenHash)
@ -97,7 +94,7 @@ export const pushSubscribeController: AppController = async (c) => {
};
export const getSubscriptionController: AppController = async (c) => {
const { conf } = c.var;
const { conf, db } = c.var;
const vapidPublicKey = await conf.vapidPublicKey;
if (!vapidPublicKey) {
@ -106,10 +103,9 @@ export const getSubscriptionController: AppController = async (c) => {
const accessToken = getAccessToken(c.req.raw);
const kysely = await Storages.kysely();
const tokenHash = await getTokenHash(accessToken);
const row = await kysely
const row = await db.kysely
.selectFrom('push_subscriptions')
.selectAll()
.where('token_hash', '=', tokenHash)

View file

@ -31,9 +31,9 @@ const reactionController: AppController = async (c) => {
tags: [['e', id], ['p', event.pubkey]],
}, c);
await hydrateEvents({ events: [event], relay });
await hydrateEvents({ ...c.var, events: [event] });
const status = await renderStatus(event, { viewerPubkey: await user!.signer.getPublicKey() });
const status = await renderStatus(relay, event, { viewerPubkey: await user!.signer.getPublicKey() });
return c.json(status);
};
@ -76,7 +76,7 @@ const deleteReactionController: AppController = async (c) => {
tags,
}, c);
const status = renderStatus(event, { viewerPubkey: pubkey });
const status = renderStatus(relay, event, { viewerPubkey: pubkey });
return c.json(status);
};
@ -99,7 +99,7 @@ const reactionsController: AppController = async (c) => {
const events = await relay.query([{ kinds: [7], '#e': [id], limit: 100 }])
.then((events) => events.filter(({ content }) => /^\p{RGI_Emoji}$/v.test(content)))
.then((events) => events.filter((event) => !emoji || event.content === emoji))
.then((events) => hydrateEvents({ events, relay }));
.then((events) => hydrateEvents({ ...c.var, events }));
/** Events grouped by emoji. */
const byEmoji = events.reduce((acc, event) => {

View file

@ -1,8 +1,9 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { createEvent, paginated, parseBody, updateEventInfo } from '@/utils/api.ts';
import { createEvent, parseBody, updateEventInfo } from '@/utils/api.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { renderAdminReport } from '@/views/mastodon/reports.ts';
import { renderReport } from '@/views/mastodon/reports.ts';
@ -18,7 +19,7 @@ const reportSchema = z.object({
/** https://docs.joinmastodon.org/methods/reports/#post */
const reportController: AppController = async (c) => {
const { conf, relay } = c.var;
const { conf } = c.var;
const body = await parseBody(c.req.raw);
const result = reportSchema.safeParse(body);
@ -49,7 +50,7 @@ const reportController: AppController = async (c) => {
tags,
}, c);
await hydrateEvents({ events: [event], relay });
await hydrateEvents({ ...c.var, events: [event] });
return c.json(await renderReport(event));
};
@ -94,10 +95,10 @@ const adminReportsController: AppController = async (c) => {
}
const events = await relay.query([{ kinds: [1984], ids: [...ids] }])
.then((events) => hydrateEvents({ relay, events: events, signal: c.req.raw.signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const reports = await Promise.all(
events.map((event) => renderAdminReport(event, { viewerPubkey })),
events.map((event) => renderAdminReport(relay, event, { viewerPubkey })),
);
return paginated(c, orig, reports);
@ -120,9 +121,9 @@ const adminReportController: AppController = async (c) => {
return c.json({ error: 'Not found' }, 404);
}
await hydrateEvents({ events: [event], relay, signal });
await hydrateEvents({ ...c.var, events: [event] });
const report = await renderAdminReport(event, { viewerPubkey: pubkey });
const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey });
return c.json(report);
};
@ -144,9 +145,9 @@ const adminReportResolveController: AppController = async (c) => {
}
await updateEventInfo(eventId, { open: false, closed: true }, c);
await hydrateEvents({ events: [event], relay, signal });
await hydrateEvents({ ...c.var, events: [event] });
const report = await renderAdminReport(event, { viewerPubkey: pubkey });
const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey });
return c.json(report);
};
@ -167,9 +168,9 @@ const adminReportReopenController: AppController = async (c) => {
}
await updateEventInfo(eventId, { open: true, closed: false }, c);
await hydrateEvents({ events: [event], relay, signal });
await hydrateEvents({ ...c.var, events: [event] });
const report = await renderAdminReport(event, { viewerPubkey: pubkey });
const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey });
return c.json(report);
};

View file

@ -1,18 +1,17 @@
import { paginated, paginatedList } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { AppController } from '@/app.ts';
import { AppContext, AppController } from '@/app.ts';
import { booleanParamSchema } from '@/schema.ts';
import { Storages } from '@/storages.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { extractIdentifier, lookupPubkey } from '@/utils/lookup.ts';
import { nip05Cache } from '@/utils/nip05.ts';
import { lookupNip05 } from '@/utils/nip05.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
import { getFollowedPubkeys } from '@/queries.ts';
import { getPubkeysBySearch } from '@/utils/search.ts';
import { paginated, paginatedList } from '@/utils/api.ts';
const searchQuerySchema = z.object({
q: z.string().transform(decodeURIComponent),
@ -26,7 +25,7 @@ const searchQuerySchema = z.object({
type SearchQuery = z.infer<typeof searchQuerySchema> & { since?: number; until?: number; limit: number };
const searchController: AppController = async (c) => {
const { user, pagination, signal } = c.var;
const { relay, user, pagination, signal } = c.var;
const result = searchQuerySchema.safeParse(c.req.query());
const viewerPubkey = await user?.signer.getPublicKey();
@ -35,12 +34,12 @@ const searchController: AppController = async (c) => {
return c.json({ error: 'Bad request', schema: result.error }, 422);
}
const event = await lookupEvent({ ...result.data, ...pagination }, signal);
const event = await lookupEvent(c, { ...result.data, ...pagination });
const lookup = extractIdentifier(result.data.q);
// Render account from pubkey.
if (!event && lookup) {
const pubkey = await lookupPubkey(lookup);
const pubkey = await lookupPubkey(lookup, c.var);
return c.json({
accounts: pubkey ? [accountFromPubkey(pubkey)] : [],
statuses: [],
@ -54,7 +53,7 @@ const searchController: AppController = async (c) => {
events = [event];
}
events.push(...(await searchEvents({ ...result.data, ...pagination, viewerPubkey }, signal)));
events.push(...(await searchEvents(c, { ...result.data, ...pagination, viewerPubkey }, signal)));
const [accounts, statuses] = await Promise.all([
Promise.all(
@ -66,7 +65,7 @@ const searchController: AppController = async (c) => {
Promise.all(
events
.filter((event) => event.kind === 1)
.map((event) => renderStatus(event, { viewerPubkey }))
.map((event) => renderStatus(relay, event, { viewerPubkey }))
.filter(Boolean),
),
]);
@ -86,16 +85,17 @@ const searchController: AppController = async (c) => {
/** Get events for the search params. */
async function searchEvents(
c: AppContext,
{ q, type, since, until, limit, offset, account_id, viewerPubkey }: SearchQuery & { viewerPubkey?: string },
signal: AbortSignal,
): Promise<NostrEvent[]> {
const { relay, db } = c.var;
// Hashtag search is not supported.
if (type === 'hashtags') {
return Promise.resolve([]);
}
const relay = await Storages.db();
const filter: NostrFilter = {
kinds: typeToKinds(type),
search: q,
@ -104,12 +104,10 @@ async function searchEvents(
limit,
};
const kysely = await Storages.kysely();
// For account search, use a special index, and prioritize followed accounts.
if (type === 'accounts') {
const following = viewerPubkey ? await getFollowedPubkeys(viewerPubkey) : new Set<string>();
const searchPubkeys = await getPubkeysBySearch(kysely, { q, limit, offset, following });
const following = viewerPubkey ? await getFollowedPubkeys(relay, viewerPubkey) : new Set<string>();
const searchPubkeys = await getPubkeysBySearch(db.kysely, { q, limit, offset, following });
filter.authors = [...searchPubkeys];
filter.search = undefined;
@ -123,7 +121,7 @@ async function searchEvents(
// Query the events.
let events = await relay
.query([filter], { signal })
.then((events) => hydrateEvents({ events, relay, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
// When using an authors filter, return the events in the same order as the filter.
if (filter.authors) {
@ -148,17 +146,17 @@ function typeToKinds(type: SearchQuery['type']): number[] {
}
/** Resolve a searched value into an event, if applicable. */
async function lookupEvent(query: SearchQuery, signal: AbortSignal): Promise<NostrEvent | undefined> {
const filters = await getLookupFilters(query, signal);
const relay = await Storages.db();
async function lookupEvent(c: AppContext, query: SearchQuery): Promise<NostrEvent | undefined> {
const { relay, signal } = c.var;
const filters = await getLookupFilters(c, query);
return relay.query(filters, { limit: 1, signal })
.then((events) => hydrateEvents({ events, relay, signal }))
return relay.query(filters, { signal })
.then((events) => hydrateEvents({ ...c.var, events }))
.then(([event]) => event);
}
/** Get filters to lookup the input value. */
async function getLookupFilters({ q, type, resolve }: SearchQuery, signal: AbortSignal): Promise<NostrFilter[]> {
async function getLookupFilters(c: AppContext, { q, type, resolve }: SearchQuery): Promise<NostrFilter[]> {
const accounts = !type || type === 'accounts';
const statuses = !type || type === 'statuses';
@ -199,7 +197,7 @@ async function getLookupFilters({ q, type, resolve }: SearchQuery, signal: Abort
}
try {
const { pubkey } = await nip05Cache.fetch(lookup, { signal });
const { pubkey } = await lookupNip05(lookup, c.var);
if (pubkey) {
return [{ kinds: [0], authors: [pubkey] }];
}

View file

@ -1,4 +1,5 @@
import { HTTPException } from '@hono/hono/http-exception';
import { paginated, paginatedList, paginationSchema } from '@ditto/mastoapi/pagination';
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
import 'linkify-plugin-hashtag';
import linkify from 'linkifyjs';
@ -9,13 +10,12 @@ import { type AppController } from '@/app.ts';
import { DittoUpload, dittoUploads } from '@/DittoUploads.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getAncestors, getAuthor, getDescendants, getEvent } from '@/queries.ts';
import { paginationSchema } from '@/schemas/pagination.ts';
import { addTag, deleteTag } from '@/utils/tags.ts';
import { asyncReplaceAll } from '@/utils/text.ts';
import { lookupPubkey } from '@/utils/lookup.ts';
import { languageSchema } from '@/schema.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { assertAuthenticated, createEvent, paginated, paginatedList, parseBody, updateListEvent } from '@/utils/api.ts';
import { assertAuthenticated, createEvent, parseBody, updateListEvent } from '@/utils/api.ts';
import { getInvoice, getLnurl } from '@/utils/lnurl.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { getZapSplits } from '@/utils/zap-split.ts';
@ -46,10 +46,10 @@ const createStatusSchema = z.object({
);
const statusController: AppController = async (c) => {
const { user, signal } = c.var;
const { relay, user } = c.var;
const id = c.req.param('id');
const event = await getEvent(id, { signal });
const event = await getEvent(id, c.var);
if (event?.author) {
assertAuthenticated(c, event.author);
@ -57,7 +57,7 @@ const statusController: AppController = async (c) => {
if (event) {
const viewerPubkey = await user?.signer.getPublicKey();
const status = await renderStatus(event, { viewerPubkey });
const status = await renderStatus(relay, event, { viewerPubkey });
return c.json(status);
}
@ -65,7 +65,7 @@ const statusController: AppController = async (c) => {
};
const createStatusController: AppController = async (c) => {
const { conf, relay, user, signal } = c.var;
const { conf, relay, user } = c.var;
const body = await parseBody(c.req.raw);
const result = createStatusSchema.safeParse(body);
@ -153,7 +153,7 @@ const createStatusController: AppController = async (c) => {
data.status ?? '',
/(?<![\w/])@([\w@+._-]+)(?![\w/\.])/g,
async (match, username) => {
const pubkey = await lookupPubkey(username);
const pubkey = await lookupPubkey(username, c.var);
if (!pubkey) return match;
// Content addressing (default)
@ -171,7 +171,7 @@ const createStatusController: AppController = async (c) => {
// Explicit addressing
for (const to of data.to ?? []) {
const pubkey = await lookupPubkey(to);
const pubkey = await lookupPubkey(to, c.var);
if (pubkey) {
pubkeys.add(pubkey);
}
@ -191,7 +191,7 @@ const createStatusController: AppController = async (c) => {
}
const pubkey = await user!.signer.getPublicKey();
const author = pubkey ? await getAuthor(pubkey) : undefined;
const author = pubkey ? await getAuthor(pubkey, c.var) : undefined;
if (conf.zapSplitsEnabled) {
const meta = n.json().pipe(n.metadata()).catch({}).parse(author?.content);
@ -254,22 +254,18 @@ const createStatusController: AppController = async (c) => {
}, c);
if (data.quote_id) {
await hydrateEvents({
events: [event],
relay,
signal,
});
await hydrateEvents({ ...c.var, events: [event] });
}
return c.json(await renderStatus({ ...event, author }, { viewerPubkey: author?.pubkey }));
return c.json(await renderStatus(relay, { ...event, author }, { viewerPubkey: author?.pubkey }));
};
const deleteStatusController: AppController = async (c) => {
const { conf, user, signal } = c.var;
const { conf, relay, user } = c.var;
const id = c.req.param('id');
const pubkey = await user?.signer.getPublicKey();
const event = await getEvent(id, { signal });
const event = await getEvent(id, c.var);
if (event) {
if (event.pubkey === pubkey) {
@ -278,8 +274,8 @@ const deleteStatusController: AppController = async (c) => {
tags: [['e', id, conf.relay, '', pubkey]],
}, c);
const author = await getAuthor(event.pubkey);
return c.json(await renderStatus({ ...event, author }, { viewerPubkey: pubkey }));
const author = await getAuthor(event.pubkey, c.var);
return c.json(await renderStatus(relay, { ...event, author }, { viewerPubkey: pubkey }));
} else {
return c.json({ error: 'Unauthorized' }, 403);
}
@ -297,7 +293,7 @@ const contextController: AppController = async (c) => {
async function renderStatuses(events: NostrEvent[]) {
const statuses = await Promise.all(
events.map((event) => renderStatus(event, { viewerPubkey })),
events.map((event) => renderStatus(relay, event, { viewerPubkey })),
);
return statuses.filter(Boolean);
}
@ -308,11 +304,7 @@ const contextController: AppController = async (c) => {
getDescendants(relay, event),
]);
await hydrateEvents({
events: [...ancestorEvents, ...descendantEvents],
signal: c.req.raw.signal,
relay,
});
await hydrateEvents({ ...c.var, events: [...ancestorEvents, ...descendantEvents] });
const [ancestors, descendants] = await Promise.all([
renderStatuses(ancestorEvents),
@ -341,9 +333,9 @@ const favouriteController: AppController = async (c) => {
],
}, c);
await hydrateEvents({ events: [target], relay });
await hydrateEvents({ ...c.var, events: [target] });
const status = await renderStatus(target, { viewerPubkey: await user?.signer.getPublicKey() });
const status = await renderStatus(relay, target, { viewerPubkey: await user?.signer.getPublicKey() });
if (status) {
status.favourited = true;
@ -367,10 +359,10 @@ const favouritedByController: AppController = (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#boost */
const reblogStatusController: AppController = async (c) => {
const { conf, relay, user, signal } = c.var;
const { conf, relay, user } = c.var;
const eventId = c.req.param('id');
const event = await getEvent(eventId);
const event = await getEvent(eventId, c.var);
if (!event) {
return c.json({ error: 'Event not found.' }, 404);
@ -384,13 +376,9 @@ const reblogStatusController: AppController = async (c) => {
],
}, c);
await hydrateEvents({
events: [reblogEvent],
relay,
signal: signal,
});
await hydrateEvents({ ...c.var, events: [reblogEvent] });
const status = await renderReblog(reblogEvent, { viewerPubkey: await user?.signer.getPublicKey() });
const status = await renderReblog(relay, reblogEvent, { viewerPubkey: await user?.signer.getPublicKey() });
return c.json(status);
};
@ -420,7 +408,7 @@ const unreblogStatusController: AppController = async (c) => {
tags: [['e', repostEvent.id, conf.relay, '', repostEvent.pubkey]],
}, c);
return c.json(await renderStatus(event, { viewerPubkey: pubkey }));
return c.json(await renderStatus(relay, event, { viewerPubkey: pubkey }));
};
const rebloggedByController: AppController = (c) => {
@ -441,12 +429,12 @@ const quotesController: AppController = async (c) => {
const quotes = await relay
.query([{ kinds: [1, 20], '#q': [event.id], ...pagination }])
.then((events) => hydrateEvents({ events, relay }));
.then((events) => hydrateEvents({ ...c.var, events }));
const viewerPubkey = await user?.signer.getPublicKey();
const statuses = await Promise.all(
quotes.map((event) => renderStatus(event, { viewerPubkey })),
quotes.map((event) => renderStatus(relay, event, { viewerPubkey })),
);
if (!statuses.length) {
@ -458,11 +446,11 @@ const quotesController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#bookmark */
const bookmarkController: AppController = async (c) => {
const { conf, user } = c.var;
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const eventId = c.req.param('id');
const event = await getEvent(eventId);
const event = await getEvent(eventId, c.var);
if (event) {
await updateListEvent(
@ -471,7 +459,7 @@ const bookmarkController: AppController = async (c) => {
c,
);
const status = await renderStatus(event, { viewerPubkey: pubkey });
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
if (status) {
status.bookmarked = true;
}
@ -483,12 +471,12 @@ const bookmarkController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#unbookmark */
const unbookmarkController: AppController = async (c) => {
const { conf, user } = c.var;
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const eventId = c.req.param('id');
const event = await getEvent(eventId);
const event = await getEvent(eventId, c.var);
if (event) {
await updateListEvent(
@ -497,7 +485,7 @@ const unbookmarkController: AppController = async (c) => {
c,
);
const status = await renderStatus(event, { viewerPubkey: pubkey });
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
if (status) {
status.bookmarked = false;
}
@ -509,12 +497,12 @@ const unbookmarkController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#pin */
const pinController: AppController = async (c) => {
const { conf, user } = c.var;
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const eventId = c.req.param('id');
const event = await getEvent(eventId);
const event = await getEvent(eventId, c.var);
if (event) {
await updateListEvent(
@ -523,7 +511,7 @@ const pinController: AppController = async (c) => {
c,
);
const status = await renderStatus(event, { viewerPubkey: pubkey });
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
if (status) {
status.pinned = true;
}
@ -535,15 +523,12 @@ const pinController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#unpin */
const unpinController: AppController = async (c) => {
const { conf, user, signal } = c.var;
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const eventId = c.req.param('id');
const event = await getEvent(eventId, {
kind: 1,
signal,
});
const event = await getEvent(eventId, c.var);
if (event) {
await updateListEvent(
@ -552,7 +537,7 @@ const unpinController: AppController = async (c) => {
c,
);
const status = await renderStatus(event, { viewerPubkey: pubkey });
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
if (status) {
status.pinned = false;
}
@ -586,7 +571,7 @@ const zapController: AppController = async (c) => {
let lnurl: undefined | string;
if (status_id) {
target = await getEvent(status_id, { kind: 1, signal });
target = await getEvent(status_id, c.var);
const author = target?.author;
const meta = n.json().pipe(n.metadata()).catch({}).parse(author?.content);
lnurl = getLnurl(meta);

View file

@ -5,7 +5,7 @@ import {
streamingServerMessagesCounter,
} from '@ditto/metrics';
import TTLCache from '@isaacs/ttlcache';
import { NostrEvent, NostrFilter } from '@nostrify/nostrify';
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { z } from 'zod';
@ -83,7 +83,7 @@ const streamingController: AppController = async (c) => {
}
}
const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { protocol: token, idleTimeout: 30 });
const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { protocol: token });
const pubkey = await user?.signer.getPublicKey();
const policy = pubkey ? new MuteListPolicy(pubkey, relay) : undefined;
@ -99,8 +99,9 @@ const streamingController: AppController = async (c) => {
filter: NostrFilter & { limit: 0 },
render: (event: NostrEvent) => Promise<StreamingEvent | undefined>,
) {
const { signal } = controller;
try {
for await (const msg of relay.req([filter], { signal: controller.signal })) {
for await (const msg of relay.req([filter], { signal })) {
if (msg[0] === 'EVENT') {
const event = msg[2];
@ -111,7 +112,7 @@ const streamingController: AppController = async (c) => {
}
}
await hydrateEvents({ events: [event], relay, signal: AbortSignal.timeout(1000) });
await hydrateEvents({ ...c.var, events: [event], signal });
const result = await render(event);
@ -130,17 +131,17 @@ const streamingController: AppController = async (c) => {
streamingConnectionsGauge.set(connections.size);
if (!stream) return;
const topicFilter = await topicToFilter(stream, c.req.query(), pubkey, conf.url.host);
const topicFilter = await topicToFilter(relay, stream, c.req.query(), pubkey, conf.url.host);
if (topicFilter) {
sub(topicFilter, async (event) => {
let payload: object | undefined;
if (event.kind === 1) {
payload = await renderStatus(event, { viewerPubkey: pubkey });
payload = await renderStatus(relay, event, { viewerPubkey: pubkey });
}
if (event.kind === 6) {
payload = await renderReblog(event, { viewerPubkey: pubkey });
payload = await renderReblog(relay, event, { viewerPubkey: pubkey });
}
if (payload) {
@ -156,13 +157,13 @@ const streamingController: AppController = async (c) => {
if (['user', 'user:notification'].includes(stream) && pubkey) {
sub({ '#p': [pubkey], limit: 0 }, async (event) => {
if (event.pubkey === pubkey) return; // skip own events
const payload = await renderNotification(event, { viewerPubkey: pubkey });
const payload = await renderNotification(relay, event, { viewerPubkey: pubkey });
if (payload) {
return {
event: 'notification',
payload: JSON.stringify(payload),
stream: [stream],
};
} satisfies StreamingEvent;
}
});
return;
@ -198,6 +199,7 @@ const streamingController: AppController = async (c) => {
};
async function topicToFilter(
relay: NStore,
topic: Stream,
query: Record<string, string>,
pubkey: string | undefined,
@ -218,7 +220,7 @@ async function topicToFilter(
// HACK: this puts the user's entire contacts list into RAM,
// and then calls `matchFilters` over it. Refreshing the page
// is required after following a new user.
return pubkey ? { kinds: [1, 6, 20], authors: [...await getFeedPubkeys(pubkey)], limit: 0 } : undefined;
return pubkey ? { kinds: [1, 6, 20], authors: [...await getFeedPubkeys(relay, pubkey)], limit: 0 } : undefined;
}
}

View file

@ -1,10 +1,9 @@
import { paginated, paginatedList, paginationSchema } from '@ditto/mastoapi/pagination';
import { NostrFilter } from '@nostrify/nostrify';
import { matchFilter } from 'nostr-tools';
import { AppContext, AppController } from '@/app.ts';
import { paginationSchema } from '@/schemas/pagination.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { paginated, paginatedList } from '@/utils/api.ts';
import { getTagSet } from '@/utils/tags.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
@ -82,7 +81,7 @@ async function renderV2Suggestions(c: AppContext, params: { offset: number; limi
[{ kinds: [0], authors, limit: authors.length }],
{ signal },
)
.then((events) => hydrateEvents({ events, relay, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
return Promise.all(authors.map(async (pubkey) => {
const profile = profiles.find((event) => event.pubkey === pubkey);
@ -115,7 +114,7 @@ export const localSuggestionsController: AppController = async (c) => {
[{ kinds: [0], authors: [...pubkeys], search: `domain:${conf.url.host}`, ...pagination }],
{ signal },
)
.then((events) => hydrateEvents({ relay, events, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const suggestions = [...pubkeys].map((pubkey) => {
const profile = profiles.find((event) => event.pubkey === pubkey);

View file

@ -1,3 +1,4 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrFilter } from '@nostrify/nostrify';
import { z } from 'zod';
@ -5,7 +6,6 @@ import { type AppContext, type AppController } from '@/app.ts';
import { getFeedPubkeys } from '@/queries.ts';
import { booleanParamSchema, languageSchema } from '@/schema.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { paginated } from '@/utils/api.ts';
import { getTagSet } from '@/utils/tags.ts';
import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts';
@ -15,7 +15,7 @@ const homeQuerySchema = z.object({
});
const homeTimelineController: AppController = async (c) => {
const { user, pagination } = c.var;
const { relay, user, pagination } = c.var;
const pubkey = await user?.signer.getPublicKey()!;
const result = homeQuerySchema.safeParse(c.req.query());
@ -25,7 +25,7 @@ const homeTimelineController: AppController = async (c) => {
const { exclude_replies, only_media } = result.data;
const authors = [...await getFeedPubkeys(pubkey)];
const authors = [...await getFeedPubkeys(relay, pubkey)];
const filter: NostrFilter = { authors, kinds: [1, 6, 20], ...pagination };
const search: string[] = [];
@ -110,7 +110,7 @@ async function renderStatuses(c: AppContext, filters: NostrFilter[]) {
const events = await relay
.query(filters, opts)
.then((events) => hydrateEvents({ events, relay, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
if (!events.length) {
return c.json([]);
@ -120,9 +120,9 @@ async function renderStatuses(c: AppContext, filters: NostrFilter[]) {
const statuses = (await Promise.all(events.map((event) => {
if (event.kind === 6) {
return renderReblog(event, { viewerPubkey });
return renderReblog(relay, event, { viewerPubkey });
}
return renderStatus(event, { viewerPubkey });
return renderStatus(relay, event, { viewerPubkey });
}))).filter(Boolean);
if (!statuses.length) {

View file

@ -17,7 +17,7 @@ const translateSchema = z.object({
});
const translateController: AppController = async (c) => {
const { user, signal } = c.var;
const { relay, user, signal } = c.var;
const result = translateSchema.safeParse(await parseBody(c.req.raw));
@ -34,7 +34,7 @@ const translateController: AppController = async (c) => {
const id = c.req.param('id');
const event = await getEvent(id, { signal });
const event = await getEvent(id, c.var);
if (!event) {
return c.json({ error: 'Record not found' }, 400);
}
@ -45,7 +45,7 @@ const translateController: AppController = async (c) => {
return c.json({ error: 'Source and target languages are the same. No translation needed.' }, 400);
}
const status = await renderStatus(event, { viewerPubkey });
const status = await renderStatus(relay, event, { viewerPubkey });
if (!status?.content) {
return c.json({ error: 'Bad request.', schema: result.error }, 400);
}

View file

@ -1,34 +1,44 @@
import { type DittoConf } from '@ditto/conf';
import { paginated, paginationSchema } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { z } from 'zod';
import { AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
import { paginationSchema } from '@/schemas/pagination.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { Storages } from '@/storages.ts';
import { generateDateRange, Time } from '@/utils/time.ts';
import { unfurlCardCached } from '@/utils/unfurl.ts';
import { paginated } from '@/utils/api.ts';
import { PreviewCard, unfurlCardCached } from '@/utils/unfurl.ts';
import { errorJson } from '@/utils/log.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
let trendingHashtagsCache = getTrendingHashtags(Conf).catch((e: unknown) => {
logi({
level: 'error',
ns: 'ditto.trends.api',
type: 'tags',
msg: 'Failed to get trending hashtags',
error: errorJson(e),
});
return Promise.resolve([]);
interface TrendHistory {
day: string;
accounts: string;
uses: string;
}
interface TrendingHashtag {
name: string;
url: string;
history: TrendHistory[];
}
interface TrendingLink extends PreviewCard {
history: TrendHistory[];
}
const trendingTagsQuerySchema = z.object({
limit: z.coerce.number().catch(10).transform((value) => Math.min(Math.max(value, 0), 20)),
offset: z.number().nonnegative().catch(0),
});
Deno.cron('update trending hashtags cache', '35 * * * *', async () => {
const trendingTagsController: AppController = async (c) => {
const { conf, relay } = c.var;
const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query());
try {
const trends = await getTrendingHashtags(Conf);
trendingHashtagsCache = Promise.resolve(trends);
const trends = await getTrendingHashtags(conf, relay);
return c.json(trends.slice(offset, offset + limit));
} catch (e) {
logi({
level: 'error',
@ -37,22 +47,11 @@ Deno.cron('update trending hashtags cache', '35 * * * *', async () => {
msg: 'Failed to get trending hashtags',
error: errorJson(e),
});
return c.json([]);
}
});
const trendingTagsQuerySchema = z.object({
limit: z.coerce.number().catch(10).transform((value) => Math.min(Math.max(value, 0), 20)),
offset: z.number().nonnegative().catch(0),
});
const trendingTagsController: AppController = async (c) => {
const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query());
const trends = await trendingHashtagsCache;
return c.json(trends.slice(offset, offset + limit));
};
async function getTrendingHashtags(conf: DittoConf) {
const relay = await Storages.db();
async function getTrendingHashtags(conf: DittoConf, relay: NStore): Promise<TrendingHashtag[]> {
const trends = await getTrendingTags(relay, 't', await conf.signer.getPublicKey());
return trends.map((trend) => {
@ -72,21 +71,12 @@ async function getTrendingHashtags(conf: DittoConf) {
});
}
let trendingLinksCache = getTrendingLinks(Conf).catch((e: unknown) => {
logi({
level: 'error',
ns: 'ditto.trends.api',
type: 'links',
msg: 'Failed to get trending links',
error: errorJson(e),
});
return Promise.resolve([]);
});
Deno.cron('update trending links cache', '50 * * * *', async () => {
const trendingLinksController: AppController = async (c) => {
const { conf, relay } = c.var;
const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query());
try {
const trends = await getTrendingLinks(Conf);
trendingLinksCache = Promise.resolve(trends);
const trends = await getTrendingLinks(conf, relay);
return c.json(trends.slice(offset, offset + limit));
} catch (e) {
logi({
level: 'error',
@ -95,17 +85,11 @@ Deno.cron('update trending links cache', '50 * * * *', async () => {
msg: 'Failed to get trending links',
error: errorJson(e),
});
return c.json([]);
}
});
const trendingLinksController: AppController = async (c) => {
const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query());
const trends = await trendingLinksCache;
return c.json(trends.slice(offset, offset + limit));
};
async function getTrendingLinks(conf: DittoConf) {
const relay = await Storages.db();
async function getTrendingLinks(conf: DittoConf, relay: NStore): Promise<TrendingLink[]> {
const trends = await getTrendingTags(relay, 'r', await conf.signer.getPublicKey());
return Promise.all(trends.map(async (trend) => {
@ -162,7 +146,7 @@ const trendingStatusesController: AppController = async (c) => {
}
const results = await relay.query([{ kinds: [1, 20], ids }])
.then((events) => hydrateEvents({ events, relay }));
.then((events) => hydrateEvents({ ...c.var, events }));
// Sort events in the order they appear in the label.
const events = ids
@ -170,7 +154,7 @@ const trendingStatusesController: AppController = async (c) => {
.filter((event): event is NostrEvent => !!event);
const statuses = await Promise.all(
events.map((event) => renderStatus(event, {})),
events.map((event) => renderStatus(relay, event, {})),
);
return paginated(c, results, statuses);

View file

@ -1,6 +1,6 @@
import { logi } from '@soapbox/logi';
import { AppMiddleware } from '@/app.ts';
import { AppContext, AppMiddleware } from '@/app.ts';
import { getPathParams, MetadataEntities } from '@/utils/og-metadata.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
import { errorJson } from '@/utils/log.ts';
@ -9,14 +9,11 @@ import { renderMetadata } from '@/views/meta.ts';
import { getAuthor, getEvent } from '@/queries.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
import { renderAccount } from '@/views/mastodon/accounts.ts';
import { NStore } from '@nostrify/nostrify';
/** Placeholder to find & replace with metadata. */
const META_PLACEHOLDER = '<!--server-generated-meta-->' as const;
export const frontendController: AppMiddleware = async (c) => {
const { relay } = c.var;
c.header('Cache-Control', 'max-age=86400, s-maxage=30, public, stale-if-error=604800');
try {
@ -25,7 +22,7 @@ export const frontendController: AppMiddleware = async (c) => {
if (content.includes(META_PLACEHOLDER)) {
const params = getPathParams(c.req.path);
try {
const entities = await getEntities(relay, params ?? {});
const entities = await getEntities(c, params ?? {});
const meta = renderMetadata(c.req.url, entities);
return c.html(content.replace(META_PLACEHOLDER, meta));
} catch (e) {
@ -39,25 +36,27 @@ export const frontendController: AppMiddleware = async (c) => {
}
};
async function getEntities(relay: NStore, params: { acct?: string; statusId?: string }): Promise<MetadataEntities> {
async function getEntities(c: AppContext, params: { acct?: string; statusId?: string }): Promise<MetadataEntities> {
const { relay } = c.var;
const entities: MetadataEntities = {
instance: await getInstanceMetadata(relay),
};
if (params.statusId) {
const event = await getEvent(params.statusId, { kind: 1 });
const event = await getEvent(params.statusId, c.var);
if (event) {
entities.status = await renderStatus(event, {});
entities.status = await renderStatus(relay, event, {});
entities.account = entities.status?.account;
}
return entities;
}
if (params.acct) {
const pubkey = await lookupPubkey(params.acct.replace(/^@/, ''));
const event = pubkey ? await getAuthor(pubkey) : undefined;
const pubkey = await lookupPubkey(params.acct.replace(/^@/, ''), c.var);
const event = pubkey ? await getAuthor(pubkey, c.var) : undefined;
if (event) {
entities.account = await renderAccount(event);
entities.account = renderAccount(event);
}
}

View file

@ -1,31 +1,16 @@
import {
dbAvailableConnectionsGauge,
dbPoolSizeGauge,
relayPoolRelaysSizeGauge,
relayPoolSubscriptionsSizeGauge,
} from '@ditto/metrics';
import { dbAvailableConnectionsGauge, dbPoolSizeGauge } from '@ditto/metrics';
import { register } from 'prom-client';
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
/** Prometheus/OpenMetrics controller. */
export const metricsController: AppController = async (c) => {
const db = await Storages.database();
const pool = await Storages.client();
const { db } = c.var;
// Update some metrics at request time.
dbPoolSizeGauge.set(db.poolSize);
dbAvailableConnectionsGauge.set(db.availableConnections);
relayPoolRelaysSizeGauge.reset();
relayPoolSubscriptionsSizeGauge.reset();
for (const relay of pool.relays.values()) {
relayPoolRelaysSizeGauge.inc({ ready_state: relay.socket.readyState });
relayPoolSubscriptionsSizeGauge.inc(relay.subscriptions.length);
}
// Serve the metrics.
const metrics = await register.metrics();

View file

@ -16,7 +16,6 @@ import {
import { AppController } from '@/app.ts';
import { relayInfoController } from '@/controllers/nostr/relay-info.ts';
import * as pipeline from '@/pipeline.ts';
import { RelayError } from '@/RelayError.ts';
import { type DittoPgStore } from '@/storages/DittoPgStore.ts';
import { errorJson } from '@/utils/log.ts';
@ -159,7 +158,7 @@ function connectStream(conf: DittoConf, relay: DittoPgStore, socket: WebSocket,
try {
// This will store it (if eligible) and run other side-effects.
await pipeline.handleEvent(purifyEvent(event), { source: 'relay', signal: AbortSignal.timeout(1000) });
await relay.event(purifyEvent(event), { signal: AbortSignal.timeout(1000) });
send(['OK', event.id, true, '']);
} catch (e) {
if (e instanceof RelayError) {
@ -214,7 +213,7 @@ const relayController: AppController = (c, next) => {
ip = undefined;
}
const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { idleTimeout: 30 });
const { socket, response } = Deno.upgradeWebSocket(c.req.raw);
connectStream(conf, relay as DittoPgStore, socket, ip);
return response;

View file

@ -12,8 +12,6 @@ const emptyResult: NostrJson = { names: {}, relays: {} };
* https://github.com/nostr-protocol/nips/blob/master/05.md
*/
const nostrController: AppController = async (c) => {
const { relay } = c.var;
// If there are no query parameters, this will always return an empty result.
if (!Object.entries(c.req.queries()).length) {
c.header('Cache-Control', 'max-age=31536000, public, immutable, stale-while-revalidate=86400');
@ -22,7 +20,7 @@ const nostrController: AppController = async (c) => {
const result = nameSchema.safeParse(c.req.query('name'));
const name = result.success ? result.data : undefined;
const pointer = name ? await localNip05Lookup(relay, name) : undefined;
const pointer = name ? await localNip05Lookup(name, c.var) : undefined;
if (!name || !pointer) {
// Not found, cache for 5 minutes.

View file

@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { Storages } from '@/storages.ts';
import {
type TrendsCtx,
updateTrendingEvents,
updateTrendingHashtags,
updateTrendingLinks,
@ -10,15 +10,15 @@ import {
} from '@/trends.ts';
/** Start cron jobs for the application. */
export function cron() {
Deno.cron('update trending pubkeys', '0 * * * *', updateTrendingPubkeys);
Deno.cron('update trending zapped events', '7 * * * *', updateTrendingZappedEvents);
Deno.cron('update trending events', '15 * * * *', updateTrendingEvents);
Deno.cron('update trending hashtags', '30 * * * *', updateTrendingHashtags);
Deno.cron('update trending links', '45 * * * *', updateTrendingLinks);
export function cron(ctx: TrendsCtx) {
Deno.cron('update trending pubkeys', '0 * * * *', () => updateTrendingPubkeys(ctx));
Deno.cron('update trending zapped events', '7 * * * *', () => updateTrendingZappedEvents(ctx));
Deno.cron('update trending events', '15 * * * *', () => updateTrendingEvents(ctx));
Deno.cron('update trending hashtags', '30 * * * *', () => updateTrendingHashtags(ctx));
Deno.cron('update trending links', '45 * * * *', () => updateTrendingLinks(ctx));
Deno.cron('refresh top authors', '20 * * * *', async () => {
const kysely = await Storages.kysely();
const { kysely } = ctx.db;
await sql`refresh materialized view top_authors`.execute(kysely);
});
}

View file

@ -1,32 +1,38 @@
import { firehoseEventsCounter } from '@ditto/metrics';
import { Semaphore } from '@core/asyncutil';
import { NRelay, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { nostrNow } from '@/utils.ts';
import * as pipeline from '@/pipeline.ts';
const sem = new Semaphore(Conf.firehoseConcurrency);
interface FirehoseOpts {
pool: NRelay;
relay: NStore;
concurrency: number;
kinds: number[];
timeout?: number;
}
/**
* This function watches events on all known relays and performs
* side-effects based on them, such as trending hashtag tracking
* and storing events for notifications and the home feed.
*/
export async function startFirehose(): Promise<void> {
const store = await Storages.client();
export async function startFirehose(opts: FirehoseOpts): Promise<void> {
const { pool, relay, kinds, concurrency, timeout = 5000 } = opts;
for await (const msg of store.req([{ kinds: Conf.firehoseKinds, limit: 0, since: nostrNow() }])) {
const sem = new Semaphore(concurrency);
for await (const msg of pool.req([{ kinds, limit: 0, since: nostrNow() }])) {
if (msg[0] === 'EVENT') {
const event = msg[2];
logi({ level: 'debug', ns: 'ditto.event', source: 'firehose', id: event.id, kind: event.kind });
firehoseEventsCounter.inc({ kind: event.kind });
sem.lock(async () => {
try {
await pipeline.handleEvent(event, { source: 'firehose', signal: AbortSignal.timeout(5000) });
await relay.event(event, { signal: AbortSignal.timeout(timeout) });
} catch {
// Ignore
}

View file

@ -1,17 +1,15 @@
import { AppMiddleware } from '@/app.ts';
import { PleromaConfigDB } from '@/utils/PleromaConfigDB.ts';
import { Storages } from '@/storages.ts';
import { getPleromaConfigs } from '@/utils/pleroma.ts';
export const cspMiddleware = (): AppMiddleware => {
let configDBCache: Promise<PleromaConfigDB> | undefined;
export const cspMiddleware = (): AppMiddleware => {
return async (c, next) => {
const { conf } = c.var;
const store = await Storages.db();
const { conf, relay } = c.var;
if (!configDBCache) {
configDBCache = getPleromaConfigs(store);
configDBCache = getPleromaConfigs(relay);
}
const { host, protocol, origin } = conf.url;

View file

@ -1,368 +0,0 @@
import { DittoTables } from '@ditto/db';
import { pipelineEventsCounter, policyEventsCounter, webPushNotificationsCounter } from '@ditto/metrics';
import { NKinds, NostrEvent, NSchema as n } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { Kysely, UpdateObject } from 'kysely';
import tldts from 'tldts';
import { z } from 'zod';
import { pipelineEncounters } from '@/caches/pipelineEncounters.ts';
import { Conf } from '@/config.ts';
import { DittoPush } from '@/DittoPush.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { RelayError } from '@/RelayError.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { Storages } from '@/storages.ts';
import { eventAge, Time } from '@/utils.ts';
import { getAmount } from '@/utils/bolt11.ts';
import { faviconCache } from '@/utils/favicon.ts';
import { errorJson } from '@/utils/log.ts';
import { nip05Cache } from '@/utils/nip05.ts';
import { parseNoteContent, stripimeta } from '@/utils/note.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { updateStats } from '@/utils/stats.ts';
import { getTagSet } from '@/utils/tags.ts';
import { unfurlCardCached } from '@/utils/unfurl.ts';
import { renderWebPushNotification } from '@/views/mastodon/push.ts';
import { policyWorker } from '@/workers/policy.ts';
import { verifyEventWorker } from '@/workers/verify.ts';
interface PipelineOpts {
signal: AbortSignal;
source: 'relay' | 'api' | 'firehose' | 'pipeline' | 'notify' | 'internal';
}
/**
* Common pipeline function to process (and maybe store) events.
* It is idempotent, so it can be called multiple times for the same event.
*/
async function handleEvent(event: DittoEvent, opts: PipelineOpts): Promise<void> {
// Skip events that have already been encountered.
if (pipelineEncounters.get(event.id)) {
throw new RelayError('duplicate', 'already have this event');
}
// Reject events that are too far in the future.
if (eventAge(event) < -Time.minutes(1)) {
throw new RelayError('invalid', 'event too far in the future');
}
// Integer max value for Postgres.
if (event.kind >= 2_147_483_647) {
throw new RelayError('invalid', 'event kind too large');
}
// The only point of ephemeral events is to stream them,
// so throw an error if we're not even going to do that.
if (NKinds.ephemeral(event.kind) && !isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
// Block NIP-70 events, because we have no way to `AUTH`.
if (isProtectedEvent(event)) {
throw new RelayError('invalid', 'protected event');
}
// Validate the event's signature.
if (!(await verifyEventWorker(event))) {
throw new RelayError('invalid', 'invalid signature');
}
// Recheck encountered after async ops.
if (pipelineEncounters.has(event.id)) {
throw new RelayError('duplicate', 'already have this event');
}
// Set the event as encountered after verifying the signature.
pipelineEncounters.set(event.id, true);
// Log the event.
logi({ level: 'debug', ns: 'ditto.event', source: 'pipeline', id: event.id, kind: event.kind });
pipelineEventsCounter.inc({ kind: event.kind });
// NIP-46 events get special treatment.
// They are exempt from policies and other side-effects, and should be streamed out immediately.
// If streaming fails, an error should be returned.
if (event.kind === 24133) {
const store = await Storages.db();
await store.event(event, { signal: opts.signal });
}
// Ensure the event doesn't violate the policy.
if (event.pubkey !== await Conf.signer.getPublicKey()) {
await policyFilter(event, opts.signal);
}
// Prepare the event for additional checks.
// FIXME: This is kind of hacky. Should be reorganized to fetch only what's needed for each stage.
await hydrateEvent(event, opts.signal);
// Ensure that the author is not banned.
const n = getTagSet(event.user?.tags ?? [], 'n');
if (n.has('disabled')) {
throw new RelayError('blocked', 'author is blocked');
}
const kysely = await Storages.kysely();
try {
await storeEvent(purifyEvent(event), opts.signal);
} finally {
// This needs to run in steps, and should not block the API from responding.
Promise.allSettled([
handleZaps(kysely, event),
updateAuthorData(event, opts.signal),
prewarmLinkPreview(event, opts.signal),
generateSetEvents(event),
])
.then(() => webPush(event))
.catch(() => {});
}
}
async function policyFilter(event: NostrEvent, signal: AbortSignal): Promise<void> {
try {
const result = await policyWorker.call(event, signal);
const [, , ok, reason] = result;
logi({ level: 'debug', ns: 'ditto.policy', id: event.id, kind: event.kind, ok, reason });
policyEventsCounter.inc({ ok: String(ok) });
RelayError.assert(result);
} catch (e) {
if (e instanceof RelayError) {
throw e;
} else {
logi({ level: 'error', ns: 'ditto.policy', id: event.id, kind: event.kind, error: errorJson(e) });
throw new RelayError('blocked', 'policy error');
}
}
}
/** Check whether the event has a NIP-70 `-` tag. */
function isProtectedEvent(event: NostrEvent): boolean {
return event.tags.some(([name]) => name === '-');
}
/** Hydrate the event with the user, if applicable. */
async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise<void> {
await hydrateEvents({ events: [event], relay: await Storages.db(), signal });
}
/** Maybe store the event, if eligible. */
async function storeEvent(event: NostrEvent, signal?: AbortSignal): Promise<undefined> {
const store = await Storages.db();
try {
await store.transaction(async (store, kysely) => {
if (!NKinds.ephemeral(event.kind)) {
await updateStats({ event, store, kysely });
}
await store.event(event, { signal });
});
} catch (e) {
// If the failure is only because of updateStats (which runs first), insert the event anyway.
// We can't catch this in the transaction because the error aborts the transaction on the Postgres side.
if (e instanceof Error && e.message.includes('event_stats' satisfies keyof DittoTables)) {
await store.event(event, { signal });
} else {
throw e;
}
}
}
/** Parse kind 0 metadata and track indexes in the database. */
async function updateAuthorData(event: NostrEvent, signal: AbortSignal): Promise<void> {
if (event.kind !== 0) return;
// Parse metadata.
const metadata = n.json().pipe(n.metadata()).catch({}).safeParse(event.content);
if (!metadata.success) return;
const { name, nip05 } = metadata.data;
const kysely = await Storages.kysely();
const updates: UpdateObject<DittoTables, 'author_stats'> = {};
const authorStats = await kysely
.selectFrom('author_stats')
.selectAll()
.where('pubkey', '=', event.pubkey)
.executeTakeFirst();
const lastVerified = authorStats?.nip05_last_verified_at;
const eventNewer = !lastVerified || event.created_at > lastVerified;
try {
if (nip05 !== authorStats?.nip05 && eventNewer || !lastVerified) {
if (nip05) {
const tld = tldts.parse(nip05);
if (tld.isIcann && !tld.isIp && !tld.isPrivate) {
const pointer = await nip05Cache.fetch(nip05, { signal });
if (pointer.pubkey === event.pubkey) {
updates.nip05 = nip05;
updates.nip05_domain = tld.domain;
updates.nip05_hostname = tld.hostname;
updates.nip05_last_verified_at = event.created_at;
}
}
} else {
updates.nip05 = null;
updates.nip05_domain = null;
updates.nip05_hostname = null;
updates.nip05_last_verified_at = event.created_at;
}
}
} catch {
// Fallthrough.
}
// Fetch favicon.
const domain = nip05?.split('@')[1].toLowerCase();
if (domain) {
try {
await faviconCache.fetch(domain, { signal });
} catch {
// Fallthrough.
}
}
const search = [name, nip05].filter(Boolean).join(' ').trim();
if (search !== authorStats?.search) {
updates.search = search;
}
if (Object.keys(updates).length) {
await kysely.insertInto('author_stats')
.values({
pubkey: event.pubkey,
followers_count: 0,
following_count: 0,
notes_count: 0,
search,
...updates,
})
.onConflict((oc) => oc.column('pubkey').doUpdateSet(updates))
.execute();
}
}
async function prewarmLinkPreview(event: NostrEvent, signal: AbortSignal): Promise<void> {
const { firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), []);
if (firstUrl) {
await unfurlCardCached(firstUrl, signal);
}
}
/** Determine if the event is being received in a timely manner. */
function isFresh(event: NostrEvent): boolean {
return eventAge(event) < Time.minutes(1);
}
async function webPush(event: NostrEvent): Promise<void> {
if (!isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
const kysely = await Storages.kysely();
const pubkeys = getTagSet(event.tags, 'p');
if (!pubkeys.size) {
return;
}
const rows = await kysely
.selectFrom('push_subscriptions')
.selectAll()
.where('pubkey', 'in', [...pubkeys])
.execute();
for (const row of rows) {
const viewerPubkey = row.pubkey;
if (viewerPubkey === event.pubkey) {
continue; // Don't notify authors about their own events.
}
const message = await renderWebPushNotification(event, viewerPubkey);
if (!message) {
continue;
}
const subscription = {
endpoint: row.endpoint,
keys: {
auth: row.auth,
p256dh: row.p256dh,
},
};
await DittoPush.push(subscription, message);
webPushNotificationsCounter.inc({ type: message.notification_type });
}
}
async function generateSetEvents(event: NostrEvent): Promise<void> {
const signer = Conf.signer;
const pubkey = await signer.getPublicKey();
const tagsAdmin = event.tags.some(([name, value]) => ['p', 'P'].includes(name) && value === pubkey);
if (event.kind === 1984 && tagsAdmin) {
const rel = await signer.signEvent({
kind: 30383,
content: '',
tags: [
['d', event.id],
['p', event.pubkey],
['k', '1984'],
['n', 'open'],
...[...getTagSet(event.tags, 'p')].map((value) => ['P', value]),
...[...getTagSet(event.tags, 'e')].map((value) => ['e', value]),
],
created_at: Math.floor(Date.now() / 1000),
});
await handleEvent(rel, { source: 'pipeline', signal: AbortSignal.timeout(1000) });
}
if (event.kind === 3036 && tagsAdmin) {
const rel = await signer.signEvent({
kind: 30383,
content: '',
tags: [
['d', event.id],
['p', event.pubkey],
['k', '3036'],
['n', 'pending'],
],
created_at: Math.floor(Date.now() / 1000),
});
await handleEvent(rel, { source: 'pipeline', signal: AbortSignal.timeout(1000) });
}
}
/** Stores the event in the 'event_zaps' table */
async function handleZaps(kysely: Kysely<DittoTables>, event: NostrEvent) {
if (event.kind !== 9735) return;
const zapRequestString = event?.tags?.find(([name]) => name === 'description')?.[1];
if (!zapRequestString) return;
const zapRequest = n.json().pipe(n.event()).optional().catch(undefined).parse(zapRequestString);
if (!zapRequest) return;
const amountSchema = z.coerce.number().int().nonnegative().catch(0);
const amount_millisats = amountSchema.parse(getAmount(event?.tags.find(([name]) => name === 'bolt11')?.[1]));
if (!amount_millisats || amount_millisats < 1) return;
const zappedEventId = zapRequest.tags.find(([name]) => name === 'e')?.[1];
if (!zappedEventId) return;
try {
await kysely.insertInto('event_zaps').values({
receipt_id: event.id,
target_event_id: zappedEventId,
sender_pubkey: zapRequest.pubkey,
amount_millisats,
comment: zapRequest.content,
}).execute();
} catch {
// receipt_id is unique, do nothing
}
}
export { handleEvent, handleZaps, updateAuthorData };

View file

@ -1,73 +1,55 @@
import { DittoDB } from '@ditto/db';
import { DittoConf } from '@ditto/conf';
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { fallbackAuthor } from '@/utils.ts';
import { findReplyTag, getTagSet } from '@/utils/tags.ts';
interface GetEventOpts {
/** Signal to abort the request. */
db: DittoDB;
conf: DittoConf;
relay: NStore;
signal?: AbortSignal;
/** Event kind. */
kind?: number;
}
/**
* Get a Nostr event by its ID.
* @deprecated Use `relay.query` directly.
*/
const getEvent = async (
id: string,
opts: GetEventOpts = {},
): Promise<DittoEvent | undefined> => {
const relay = await Storages.db();
const { kind, signal = AbortSignal.timeout(1000) } = opts;
async function getEvent(id: string, opts: GetEventOpts): Promise<DittoEvent | undefined> {
const filter: NostrFilter = { ids: [id], limit: 1 };
if (kind) {
filter.kinds = [kind];
const events = await opts.relay.query([filter], opts);
const [event] = await hydrateEvents({ ...opts, events });
return event;
}
return await relay.query([filter], { limit: 1, signal })
.then((events) => hydrateEvents({ events, relay, signal }))
.then(([event]) => event);
};
/**
* Get a Nostr `set_medatadata` event for a user's pubkey.
* @deprecated Use `relay.query` directly.
*/
async function getAuthor(pubkey: string, opts: GetEventOpts = {}): Promise<NostrEvent | undefined> {
const relay = await Storages.db();
const { signal = AbortSignal.timeout(1000) } = opts;
const events = await relay.query([{ authors: [pubkey], kinds: [0], limit: 1 }], { limit: 1, signal });
const event = events[0] ?? fallbackAuthor(pubkey);
await hydrateEvents({ events: [event], relay, signal });
async function getAuthor(pubkey: string, opts: GetEventOpts): Promise<NostrEvent | undefined> {
const events = await opts.relay.query([{ authors: [pubkey], kinds: [0], limit: 1 }], opts);
const [event] = await hydrateEvents({ ...opts, events });
return event;
}
/** Get users the given pubkey follows. */
const getFollows = async (pubkey: string, signal?: AbortSignal): Promise<NostrEvent | undefined> => {
const store = await Storages.db();
const [event] = await store.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { limit: 1, signal });
const getFollows = async (relay: NStore, pubkey: string, signal?: AbortSignal): Promise<NostrEvent | undefined> => {
const [event] = await relay.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { signal });
return event;
};
/** Get pubkeys the user follows. */
async function getFollowedPubkeys(pubkey: string, signal?: AbortSignal): Promise<Set<string>> {
const event = await getFollows(pubkey, signal);
async function getFollowedPubkeys(relay: NStore, pubkey: string, signal?: AbortSignal): Promise<Set<string>> {
const event = await getFollows(relay, pubkey, signal);
if (!event) return new Set();
return getTagSet(event.tags, 'p');
}
/** Get pubkeys the user follows, including the user's own pubkey. */
async function getFeedPubkeys(pubkey: string): Promise<Set<string>> {
const authors = await getFollowedPubkeys(pubkey);
async function getFeedPubkeys(relay: NStore, pubkey: string): Promise<Set<string>> {
const authors = await getFollowedPubkeys(relay, pubkey);
return authors.add(pubkey);
}
@ -92,34 +74,11 @@ async function getAncestors(store: NStore, event: NostrEvent, result: NostrEvent
async function getDescendants(
store: NStore,
event: NostrEvent,
signal = AbortSignal.timeout(2000),
signal?: AbortSignal,
): Promise<NostrEvent[]> {
return await store
.query([{ kinds: [1], '#e': [event.id], since: event.created_at, limit: 200 }], { signal })
.then((events) => events.filter(({ tags }) => findReplyTag(tags)?.[1] === event.id));
}
/** Returns whether the pubkey is followed by a local user. */
async function isLocallyFollowed(pubkey: string): Promise<boolean> {
const { host } = Conf.url;
const store = await Storages.db();
const [event] = await store.query(
[{ kinds: [3], '#p': [pubkey], search: `domain:${host}`, limit: 1 }],
{ limit: 1 },
);
return Boolean(event);
}
export {
getAncestors,
getAuthor,
getDescendants,
getEvent,
getFeedPubkeys,
getFollowedPubkeys,
getFollows,
isLocallyFollowed,
};
export { getAncestors, getAuthor, getDescendants, getEvent, getFeedPubkeys, getFollowedPubkeys, getFollows };

View file

@ -22,16 +22,6 @@ const hashtagSchema = z.string().regex(/^\w{1,30}$/);
*/
const safeUrlSchema = z.string().max(2048).url();
/** WebSocket URL. */
const wsUrlSchema = z.string().refine((val) => {
try {
const { protocol } = new URL(val);
return protocol === 'wss:' || protocol === 'ws:';
} catch {
return false;
}
}, 'Invalid WebSocket URL');
/** https://github.com/colinhacks/zod/issues/1630#issuecomment-1365983831 */
const booleanParamSchema = z.enum(['true', 'false']).transform((value) => value === 'true');
@ -93,5 +83,4 @@ export {
safeUrlSchema,
sizesSchema,
walletSchema,
wsUrlSchema,
};

View file

@ -1,14 +0,0 @@
import { z } from 'zod';
/** Schema to parse pagination query params. */
export const paginationSchema = z.object({
max_id: z.string().transform((val) => {
if (!val.includes('-')) return val;
return val.split('-')[1];
}).optional().catch(undefined),
min_id: z.string().optional().catch(undefined),
since: z.coerce.number().nonnegative().optional().catch(undefined),
until: z.coerce.number().nonnegative().optional().catch(undefined),
limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)),
offset: z.coerce.number().nonnegative().catch(0),
});

View file

@ -1,13 +1,12 @@
// deno-lint-ignore-file require-await
import { HTTPException } from '@hono/hono/http-exception';
import { NConnectSigner, NostrEvent, NostrSigner } from '@nostrify/nostrify';
import { Storages } from '@/storages.ts';
import { NConnectSigner, NostrEvent, NostrSigner, NRelay } from '@nostrify/nostrify';
interface ConnectSignerOpts {
bunkerPubkey: string;
userPubkey: string;
signer: NostrSigner;
relay: NRelay;
relays?: string[];
}
@ -17,27 +16,23 @@ interface ConnectSignerOpts {
* Simple extension of nostrify's `NConnectSigner`, with our options to keep it DRY.
*/
export class ConnectSigner implements NostrSigner {
private signer: Promise<NConnectSigner>;
private signer: NConnectSigner;
constructor(private opts: ConnectSignerOpts) {
this.signer = this.init(opts.signer);
}
const { relay, signer } = this.opts;
async init(signer: NostrSigner): Promise<NConnectSigner> {
return new NConnectSigner({
this.signer = new NConnectSigner({
encryption: 'nip44',
pubkey: this.opts.bunkerPubkey,
// TODO: use a remote relay for `nprofile` signing (if present and `Conf.relay` isn't already in the list)
relay: await Storages.db(),
relay,
signer,
timeout: 60_000,
});
}
async signEvent(event: Omit<NostrEvent, 'id' | 'pubkey' | 'sig'>): Promise<NostrEvent> {
const signer = await this.signer;
try {
return await signer.signEvent(event);
return await this.signer.signEvent(event);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, { message: 'The event was not signed quickly enough' });
@ -49,9 +44,8 @@ export class ConnectSigner implements NostrSigner {
readonly nip04 = {
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
const signer = await this.signer;
try {
return await signer.nip04.encrypt(pubkey, plaintext);
return await this.signer.nip04.encrypt(pubkey, plaintext);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, {
@ -64,9 +58,8 @@ export class ConnectSigner implements NostrSigner {
},
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
const signer = await this.signer;
try {
return await signer.nip04.decrypt(pubkey, ciphertext);
return await this.signer.nip04.decrypt(pubkey, ciphertext);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, {
@ -81,9 +74,8 @@ export class ConnectSigner implements NostrSigner {
readonly nip44 = {
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
const signer = await this.signer;
try {
return await signer.nip44.encrypt(pubkey, plaintext);
return await this.signer.nip44.encrypt(pubkey, plaintext);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, {
@ -96,9 +88,8 @@ export class ConnectSigner implements NostrSigner {
},
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
const signer = await this.signer;
try {
return await signer.nip44.decrypt(pubkey, ciphertext);
return await this.signer.nip44.decrypt(pubkey, ciphertext);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, {

View file

@ -1,12 +0,0 @@
// Starts up applications required to run before the HTTP server is on.
import { Conf } from '@/config.ts';
import { cron } from '@/cron.ts';
import { startFirehose } from '@/firehose.ts';
if (Conf.firehoseEnabled) {
startFirehose();
}
if (Conf.cronEnabled) {
cron();
}

View file

@ -1,109 +0,0 @@
// deno-lint-ignore-file require-await
import { type DittoDB, DittoPolyPg } from '@ditto/db';
import { NPool, NRelay1 } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { Conf } from '@/config.ts';
import { wsUrlSchema } from '@/schema.ts';
import { DittoPgStore } from '@/storages/DittoPgStore.ts';
import { getRelays } from '@/utils/outbox.ts';
import { seedZapSplits } from '@/utils/zap-split.ts';
export class Storages {
private static _db: Promise<DittoPgStore> | undefined;
private static _database: Promise<DittoDB> | undefined;
private static _client: Promise<NPool<NRelay1>> | undefined;
public static async database(): Promise<DittoDB> {
if (!this._database) {
this._database = (async () => {
const db = DittoPolyPg.create(Conf.databaseUrl, {
poolSize: Conf.pg.poolSize,
debug: Conf.pgliteDebug,
});
await DittoPolyPg.migrate(db.kysely);
return db;
})();
}
return this._database;
}
public static async kysely(): Promise<DittoDB['kysely']> {
const { kysely } = await this.database();
return kysely;
}
/** SQL database to store events this Ditto server cares about. */
public static async db(): Promise<DittoPgStore> {
if (!this._db) {
this._db = (async () => {
const db = await this.database();
const store = new DittoPgStore({
db,
pubkey: await Conf.signer.getPublicKey(),
timeout: Conf.db.timeouts.default,
notify: Conf.notifyEnabled,
});
await seedZapSplits(store);
return store;
})();
}
return this._db;
}
/** Relay pool storage. */
public static async client(): Promise<NPool<NRelay1>> {
if (!this._client) {
this._client = (async () => {
const db = await this.db();
const [relayList] = await db.query([
{ kinds: [10002], authors: [await Conf.signer.getPublicKey()], limit: 1 },
]);
const tags = relayList?.tags ?? [];
const activeRelays = tags.reduce((acc, [name, url, marker]) => {
const valid = wsUrlSchema.safeParse(url).success;
if (valid && name === 'r' && (!marker || marker === 'write')) {
acc.push(url);
}
return acc;
}, []);
logi({
level: 'info',
ns: 'ditto.pool',
msg: `connecting to ${activeRelays.length} relays`,
relays: activeRelays,
});
return new NPool({
open(url) {
return new NRelay1(url, {
// Skip event verification (it's done in the pipeline).
verifyEvent: () => true,
log(log) {
logi(log);
},
});
},
reqRouter: async (filters) => {
return new Map(activeRelays.map((relay) => {
return [relay, filters];
}));
},
eventRouter: async (event) => {
const relaySet = await getRelays(await Storages.db(), event.pubkey);
relaySet.delete(Conf.relay);
const relays = [...relaySet].slice(0, 4);
return relays;
},
});
})();
}
return this._client;
}
}

View file

@ -0,0 +1,60 @@
import { logi } from '@soapbox/logi';
import { NostrEvent, NostrFilter, NostrRelayCLOSED, NostrRelayEOSE, NostrRelayEVENT, NRelay } from '@nostrify/nostrify';
import { errorJson } from '@/utils/log.ts';
import { purifyEvent } from '@/utils/purify.ts';
interface DittoAPIStoreOpts {
pool: NRelay;
relay: NRelay;
}
/**
* Store used by Ditto's Mastodon API implementation.
* It extends the RelayStore to publish events to the wider Nostr network.
*/
export class DittoAPIStore implements NRelay {
private ns = 'ditto.api.store';
constructor(private opts: DittoAPIStoreOpts) {}
req(
filters: NostrFilter[],
opts?: { signal?: AbortSignal },
): AsyncIterable<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED> {
const { relay } = this.opts;
return relay.req(filters, opts);
}
query(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise<NostrEvent[]> {
const { relay } = this.opts;
return relay.query(filters, opts);
}
async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise<void> {
const { pool, relay } = this.opts;
const { id, kind } = event;
await relay.event(event, opts);
(async () => {
try {
// `purifyEvent` is important, or you will suffer.
await pool.event(purifyEvent(event), opts);
} catch (e) {
logi({ level: 'error', ns: this.ns, source: 'publish', id, kind, error: errorJson(e) });
}
})();
}
async close(): Promise<void> {
const { pool, relay } = this.opts;
await pool.close();
await relay.close();
}
[Symbol.asyncDispose](): Promise<void> {
return this.close();
}
}

View file

@ -76,8 +76,8 @@ Deno.test('query events with domain search filter', async () => {
assertEquals(await store.query([{ search: '' }]), [event1]);
await kysely
.insertInto('author_stats')
.values({
.updateTable('author_stats')
.set({
pubkey: event1.pubkey,
nip05_domain: 'gleasonator.dev',
nip05_last_verified_at: event1.created_at,
@ -205,11 +205,21 @@ Deno.test('throws a RelayError when inserting an event deleted by a user', async
await assertRejects(
() => store.event(event),
RelayError,
// RelayError,
'event deleted by user',
);
});
Deno.test('inserting the same event twice', async () => {
await using db = await createTestDB({ pure: true });
const { store } = db;
const event = genEvent({ kind: 1 });
await store.event(event);
await store.event(event);
});
Deno.test('inserting replaceable events', async () => {
await using db = await createTestDB({ pure: true });
const { store } = db;
@ -225,6 +235,8 @@ Deno.test('inserting replaceable events', async () => {
const newerEvent = genEvent({ kind: 0, created_at: 999 }, sk);
await store.event(newerEvent);
assertEquals(await store.query([{ kinds: [0] }]), [newerEvent]);
await store.event(olderEvent); // doesn't throw
});
Deno.test("throws a RelayError when querying an event with a large 'since'", async () => {

View file

@ -31,6 +31,7 @@ import { abortError } from '@/utils/abort.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getMediaLinks } from '@/utils/note.ts';
import { updateStats } from '@/utils/stats.ts';
/** Function to decide whether or not to index a tag. */
type TagCondition = (opts: TagConditionOpts) => boolean;
@ -54,7 +55,7 @@ interface DittoPgStoreOpts {
/** Pubkey of the admin account. */
pubkey: string;
/** Timeout in milliseconds for database queries. */
timeout: number;
timeout?: number;
/** Whether the event returned should be a Nostr event or a Ditto event. Defaults to false. */
pure?: boolean;
/** Chunk size for streaming events. Defaults to 20. */
@ -144,13 +145,40 @@ export class DittoPgStore extends NPostgres {
await this.deleteEventsAdmin(event);
try {
await super.event(event, { ...opts, timeout: opts.timeout ?? this.opts.timeout });
await this.storeEvent(event, { ...opts, timeout: opts.timeout ?? this.opts.timeout });
this.fulfill(event); // don't await or catch (should never reject)
} catch (e) {
if (e instanceof Error && e.message === 'Cannot add a deleted event') {
throw new RelayError('blocked', 'event deleted by user');
} else if (e instanceof Error && e.message === 'Cannot replace an event with an older event') {
if (e instanceof Error) {
switch (e.message) {
case 'duplicate key value violates unique constraint "nostr_events_pkey"':
case 'duplicate key value violates unique constraint "author_stats_pkey"':
return;
case 'canceling statement due to statement timeout':
throw new RelayError('error', 'the event could not be added fast enough');
default:
throw e;
}
} else {
throw e;
}
}
}
/** Maybe store the event, if eligible. */
private async storeEvent(
event: NostrEvent,
opts: { signal?: AbortSignal; timeout?: number } = {},
): Promise<undefined> {
try {
await super.transaction(async (relay, kysely) => {
await updateStats({ event, relay, kysely: kysely as unknown as Kysely<DittoTables> });
await relay.event(event, opts);
});
} catch (e) {
// If the failure is only because of updateStats (which runs first), insert the event anyway.
// We can't catch this in the transaction because the error aborts the transaction on the Postgres side.
if (e instanceof Error && e.message.includes('event_stats' satisfies keyof DittoTables)) {
await super.event(event, opts);
} else {
throw e;
}

View file

@ -0,0 +1,91 @@
// deno-lint-ignore-file require-await
import { DittoConf } from '@ditto/conf';
import { NostrEvent, NostrFilter, NPool, type NRelay, NRelay1 } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
interface DittoPoolOpts {
conf: DittoConf;
relay: NRelay;
maxEventRelays?: number;
}
export class DittoPool extends NPool<NRelay1> {
private _opts: DittoPoolOpts;
constructor(opts: DittoPoolOpts) {
super({
open(url) {
return new NRelay1(url, {
// Skip event verification (it's done in the pipeline).
verifyEvent: () => true,
log: logi,
});
},
reqRouter: (filters) => {
return this.reqRouter(filters);
},
eventRouter: async (event) => {
return this.eventRouter(event);
},
});
this._opts = opts;
}
private async reqRouter(filters: NostrFilter[]): Promise<Map<string, NostrFilter[]>> {
const routes = new Map<string, NostrFilter[]>();
for (const relayUrl of await this.getRelayUrls({ marker: 'read' })) {
routes.set(relayUrl, filters);
}
return routes;
}
private async eventRouter(event: NostrEvent): Promise<string[]> {
const { conf, maxEventRelays = 4 } = this._opts;
const { pubkey } = event;
const relaySet = await this.getRelayUrls({ pubkey, marker: 'write' });
relaySet.delete(conf.relay);
return [...relaySet].slice(0, maxEventRelays);
}
private async getRelayUrls(opts: { pubkey?: string; marker?: 'read' | 'write' } = {}): Promise<Set<string>> {
const { conf, relay } = this._opts;
const relays = new Set<`wss://${string}`>();
const authors = new Set<string>([await conf.signer.getPublicKey()]);
if (opts.pubkey) {
authors.add(opts.pubkey);
}
const events = await relay.query([
{ kinds: [10002], authors: [...authors] },
]);
// Ensure user's own relay list is counted first.
if (opts.pubkey) {
events.sort((a) => a.pubkey === opts.pubkey ? -1 : 1);
}
for (const event of events) {
for (const [name, relayUrl, marker] of event.tags) {
if (name === 'r' && (!marker || !opts.marker || marker === opts.marker)) {
try {
const url = new URL(relayUrl);
if (url.protocol === 'wss:') {
relays.add(url.toString() as `wss://${string}`);
}
} catch {
// fallthrough
}
}
}
}
return relays;
}
}

View file

@ -0,0 +1,69 @@
import { DittoPolyPg } from '@ditto/db';
import { DittoConf } from '@ditto/conf';
import { genEvent, MockRelay } from '@nostrify/nostrify/test';
import { assertEquals } from '@std/assert';
import { generateSecretKey, getPublicKey } from 'nostr-tools';
import { DittoRelayStore } from './DittoRelayStore.ts';
import type { NostrMetadata } from '@nostrify/types';
Deno.test('updateAuthorData sets nip05', async () => {
const alex = generateSecretKey();
await using test = setupTest((req) => {
switch (req.url) {
case 'https://gleasonator.dev/.well-known/nostr.json?name=alex':
return jsonResponse({ names: { alex: getPublicKey(alex) } });
default:
return new Response('Not found', { status: 404 });
}
});
const { db, store } = test;
const metadata: NostrMetadata = { nip05: 'alex@gleasonator.dev' };
const event = genEvent({ kind: 0, content: JSON.stringify(metadata) }, alex);
await store.updateAuthorData(event);
const row = await db.kysely
.selectFrom('author_stats')
.selectAll()
.where('pubkey', '=', getPublicKey(alex))
.executeTakeFirst();
assertEquals(row?.nip05, 'alex@gleasonator.dev');
assertEquals(row?.nip05_domain, 'gleasonator.dev');
assertEquals(row?.nip05_hostname, 'gleasonator.dev');
});
function setupTest(cb: (req: Request) => Response | Promise<Response>) {
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new MockRelay();
const mockFetch: typeof fetch = async (input, init) => {
const req = new Request(input, init);
return await cb(req);
};
const store = new DittoRelayStore({ conf, db, relay, fetch: mockFetch });
return {
db,
store,
[Symbol.asyncDispose]: async () => {
await store[Symbol.asyncDispose]();
await db[Symbol.asyncDispose]();
},
};
}
function jsonResponse(body: unknown): Response {
return new Response(JSON.stringify(body), {
headers: {
'Content-Type': 'application/json',
},
});
}

View file

@ -0,0 +1,470 @@
import { DittoConf } from '@ditto/conf';
import { DittoDB, DittoTables } from '@ditto/db';
import {
cachedFaviconsSizeGauge,
cachedNip05sSizeGauge,
pipelineEventsCounter,
policyEventsCounter,
webPushNotificationsCounter,
} from '@ditto/metrics';
import {
NKinds,
NostrEvent,
NostrFilter,
NostrRelayCLOSED,
NostrRelayCOUNT,
NostrRelayEOSE,
NostrRelayEVENT,
NRelay,
NSchema as n,
} from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { UpdateObject } from 'kysely';
import { LRUCache } from 'lru-cache';
import tldts from 'tldts';
import { z } from 'zod';
import { DittoPush } from '@/DittoPush.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { RelayError } from '@/RelayError.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { eventAge, nostrNow, Time } from '@/utils.ts';
import { getAmount } from '@/utils/bolt11.ts';
import { errorJson } from '@/utils/log.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { getTagSet } from '@/utils/tags.ts';
import { PolicyWorker } from '@/workers/policy.ts';
import { verifyEventWorker } from '@/workers/verify.ts';
import { fetchFavicon, insertFavicon, queryFavicon } from '@/utils/favicon.ts';
import { lookupNip05 } from '@/utils/nip05.ts';
import { parseNoteContent, stripimeta } from '@/utils/note.ts';
import { SimpleLRU } from '@/utils/SimpleLRU.ts';
import { unfurlCardCached } from '@/utils/unfurl.ts';
import { renderWebPushNotification } from '@/views/mastodon/push.ts';
import { nip19 } from 'nostr-tools';
interface DittoRelayStoreOpts {
db: DittoDB;
conf: DittoConf;
relay: NRelay;
fetch?: typeof fetch;
}
/** Backing storage class for Ditto relay implementation at `/relay`. */
export class DittoRelayStore implements NRelay {
private push: DittoPush;
private encounters = new LRUCache<string, true>({ max: 5000 });
private controller = new AbortController();
private policyWorker: PolicyWorker;
private faviconCache: SimpleLRU<string, URL>;
private nip05Cache: SimpleLRU<string, nip19.ProfilePointer>;
private ns = 'ditto.relay.store';
constructor(private opts: DittoRelayStoreOpts) {
const { conf, db } = this.opts;
this.push = new DittoPush(opts);
this.policyWorker = new PolicyWorker(conf);
this.listen().catch((e: unknown) => {
logi({ level: 'error', ns: this.ns, source: 'listen', error: errorJson(e) });
});
this.faviconCache = new SimpleLRU<string, URL>(
async (domain, { signal }) => {
const row = await queryFavicon(db.kysely, domain);
if (row && (nostrNow() - row.last_updated_at) < (conf.caches.favicon.ttl / 1000)) {
return new URL(row.favicon);
}
const url = await fetchFavicon(domain, signal);
await insertFavicon(db.kysely, domain, url.href);
return url;
},
{ ...conf.caches.favicon, gauge: cachedFaviconsSizeGauge },
);
this.nip05Cache = new SimpleLRU<string, nip19.ProfilePointer>(
(nip05, { signal }) => {
return lookupNip05(nip05, { ...this.opts, signal });
},
{ ...conf.caches.nip05, gauge: cachedNip05sSizeGauge },
);
}
/** Open a firehose to the relay. */
private async listen(): Promise<void> {
const { relay } = this.opts;
const { signal } = this.controller;
for await (const msg of relay.req([{ limit: 0 }], { signal })) {
if (msg[0] === 'EVENT') {
const [, , event] = msg;
await this.event(event, { signal });
}
}
}
req(
filters: NostrFilter[],
opts?: { signal?: AbortSignal },
): AsyncIterable<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED> {
const { relay } = this.opts;
return relay.req(filters, opts);
}
/**
* Common pipeline function to process (and maybe store) events.
* It is idempotent, so it can be called multiple times for the same event.
*/
async event(event: DittoEvent, opts: { publish?: boolean; signal?: AbortSignal } = {}): Promise<void> {
const { conf, relay } = this.opts;
const { signal } = opts;
// Skip events that have already been encountered.
if (this.encounters.get(event.id)) {
throw new RelayError('duplicate', 'already have this event');
}
// Reject events that are too far in the future.
if (eventAge(event) < -Time.minutes(1)) {
throw new RelayError('invalid', 'event too far in the future');
}
// Integer max value for Postgres.
if (event.kind >= 2_147_483_647) {
throw new RelayError('invalid', 'event kind too large');
}
// The only point of ephemeral events is to stream them,
// so throw an error if we're not even going to do that.
if (NKinds.ephemeral(event.kind) && !this.isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
// Block NIP-70 events, because we have no way to `AUTH`.
if (event.tags.some(([name]) => name === '-')) {
throw new RelayError('invalid', 'protected event');
}
// Validate the event's signature.
if (!(await verifyEventWorker(event))) {
throw new RelayError('invalid', 'invalid signature');
}
// Recheck encountered after async ops.
if (this.encounters.has(event.id)) {
throw new RelayError('duplicate', 'already have this event');
}
// Set the event as encountered after verifying the signature.
this.encounters.set(event.id, true);
// Log the event.
logi({ level: 'debug', ns: 'ditto.event', source: 'pipeline', id: event.id, kind: event.kind });
pipelineEventsCounter.inc({ kind: event.kind });
// NIP-46 events get special treatment.
// They are exempt from policies and other side-effects, and should be streamed out immediately.
// If streaming fails, an error should be returned.
if (event.kind === 24133) {
await relay.event(event, { signal });
}
// Ensure the event doesn't violate the policy.
if (event.pubkey !== await conf.signer.getPublicKey()) {
await this.policyFilter(event, signal);
}
// Prepare the event for additional checks.
// FIXME: This is kind of hacky. Should be reorganized to fetch only what's needed for each stage.
await this.hydrateEvent(event, signal);
// Ensure that the author is not banned.
const n = getTagSet(event.user?.tags ?? [], 'n');
if (n.has('disabled')) {
throw new RelayError('blocked', 'author is blocked');
}
try {
await relay.event(purifyEvent(event), { signal });
} finally {
// This needs to run in steps, and should not block the API from responding.
Promise.allSettled([
this.handleZaps(event),
this.updateAuthorData(event, signal),
this.prewarmLinkPreview(event, signal),
this.generateSetEvents(event),
])
.then(() => this.webPush(event))
.catch(() => {});
}
}
private async policyFilter(event: NostrEvent, signal?: AbortSignal): Promise<void> {
try {
const result = await this.policyWorker.call(event, signal);
const [, , ok, reason] = result;
logi({ level: 'debug', ns: 'ditto.policy', id: event.id, kind: event.kind, ok, reason });
policyEventsCounter.inc({ ok: String(ok) });
RelayError.assert(result);
} catch (e) {
if (e instanceof RelayError) {
throw e;
} else {
logi({ level: 'error', ns: 'ditto.policy', id: event.id, kind: event.kind, error: errorJson(e) });
throw new RelayError('blocked', 'policy error');
}
}
}
/** Stores the event in the 'event_zaps' table */
private async handleZaps(event: NostrEvent) {
if (event.kind !== 9735) return;
const { db } = this.opts;
const zapRequestString = event?.tags?.find(([name]) => name === 'description')?.[1];
if (!zapRequestString) return;
const zapRequest = n.json().pipe(n.event()).optional().catch(undefined).parse(zapRequestString);
if (!zapRequest) return;
const amountSchema = z.coerce.number().int().nonnegative().catch(0);
const amount_millisats = amountSchema.parse(getAmount(event?.tags.find(([name]) => name === 'bolt11')?.[1]));
if (!amount_millisats || amount_millisats < 1) return;
const zappedEventId = zapRequest.tags.find(([name]) => name === 'e')?.[1];
if (!zappedEventId) return;
try {
await db.kysely.insertInto('event_zaps').values({
receipt_id: event.id,
target_event_id: zappedEventId,
sender_pubkey: zapRequest.pubkey,
amount_millisats,
comment: zapRequest.content,
}).execute();
} catch {
// receipt_id is unique, do nothing
}
}
/** Parse kind 0 metadata and track indexes in the database. */
async updateAuthorData(event: NostrEvent, signal?: AbortSignal): Promise<void> {
if (event.kind !== 0) return;
const { db } = this.opts;
// Parse metadata.
const metadata = n.json().pipe(n.metadata()).catch({}).safeParse(event.content);
if (!metadata.success) return;
const { name, nip05 } = metadata.data;
const updates: UpdateObject<DittoTables, 'author_stats'> = {};
const authorStats = await db.kysely
.selectFrom('author_stats')
.selectAll()
.where('pubkey', '=', event.pubkey)
.executeTakeFirst();
const lastVerified = authorStats?.nip05_last_verified_at;
const eventNewer = !lastVerified || event.created_at > lastVerified;
try {
if (nip05 !== authorStats?.nip05 && eventNewer || !lastVerified) {
if (nip05) {
const tld = tldts.parse(nip05);
if (tld.isIcann && !tld.isIp && !tld.isPrivate) {
const pointer = await this.nip05Cache.fetch(nip05, { signal });
if (pointer.pubkey === event.pubkey) {
updates.nip05 = nip05;
updates.nip05_domain = tld.domain;
updates.nip05_hostname = tld.hostname;
updates.nip05_last_verified_at = event.created_at;
}
}
} else {
updates.nip05 = null;
updates.nip05_domain = null;
updates.nip05_hostname = null;
updates.nip05_last_verified_at = event.created_at;
}
}
} catch {
// Fallthrough.
}
// Fetch favicon.
const domain = nip05?.split('@')[1].toLowerCase();
if (domain) {
try {
await this.faviconCache.fetch(domain, { signal });
} catch {
// Fallthrough.
}
}
const search = [name, nip05].filter(Boolean).join(' ').trim();
if (search !== authorStats?.search) {
updates.search = search;
}
if (Object.keys(updates).length) {
await db.kysely.insertInto('author_stats')
.values({
pubkey: event.pubkey,
followers_count: 0,
following_count: 0,
notes_count: 0,
search,
...updates,
})
.onConflict((oc) => oc.column('pubkey').doUpdateSet(updates))
.execute();
}
}
private async prewarmLinkPreview(event: NostrEvent, signal?: AbortSignal): Promise<void> {
const { firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), []);
if (firstUrl) {
await unfurlCardCached(firstUrl, signal);
}
}
private async generateSetEvents(event: NostrEvent): Promise<void> {
const { conf } = this.opts;
const signer = conf.signer;
const pubkey = await signer.getPublicKey();
const tagsAdmin = event.tags.some(([name, value]) => ['p', 'P'].includes(name) && value === pubkey);
if (event.kind === 1984 && tagsAdmin) {
const rel = await signer.signEvent({
kind: 30383,
content: '',
tags: [
['d', event.id],
['p', event.pubkey],
['k', '1984'],
['n', 'open'],
...[...getTagSet(event.tags, 'p')].map((value) => ['P', value]),
...[...getTagSet(event.tags, 'e')].map((value) => ['e', value]),
],
created_at: Math.floor(Date.now() / 1000),
});
await this.event(rel, { signal: AbortSignal.timeout(1000) });
}
if (event.kind === 3036 && tagsAdmin) {
const rel = await signer.signEvent({
kind: 30383,
content: '',
tags: [
['d', event.id],
['p', event.pubkey],
['k', '3036'],
['n', 'pending'],
],
created_at: Math.floor(Date.now() / 1000),
});
await this.event(rel, { signal: AbortSignal.timeout(1000) });
}
}
private async webPush(event: NostrEvent): Promise<void> {
if (!this.isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
const { db, relay } = this.opts;
const pubkeys = getTagSet(event.tags, 'p');
if (!pubkeys.size) {
return;
}
const rows = await db.kysely
.selectFrom('push_subscriptions')
.selectAll()
.where('pubkey', 'in', [...pubkeys])
.execute();
for (const row of rows) {
const viewerPubkey = row.pubkey;
if (viewerPubkey === event.pubkey) {
continue; // Don't notify authors about their own events.
}
const message = await renderWebPushNotification(relay, event, viewerPubkey);
if (!message) {
continue;
}
const subscription = {
endpoint: row.endpoint,
keys: {
auth: row.auth,
p256dh: row.p256dh,
},
};
await this.push.push(subscription, message);
webPushNotificationsCounter.inc({ type: message.notification_type });
}
}
/** Hydrate the event with the user, if applicable. */
private async hydrateEvent(event: NostrEvent, signal?: AbortSignal): Promise<DittoEvent> {
const [hydrated] = await hydrateEvents({ ...this.opts, events: [event], signal });
return hydrated;
}
/** Determine if the event is being received in a timely manner. */
private isFresh(event: NostrEvent): boolean {
return eventAge(event) < Time.minutes(1);
}
async query(filters: NostrFilter[], opts: { pure?: boolean; signal?: AbortSignal } = {}): Promise<DittoEvent[]> {
const { relay } = this.opts;
const { pure = true, signal } = opts; // TODO: make pure `false` by default
const events = await relay.query(filters, opts);
if (!pure) {
return hydrateEvents({ ...this.opts, events, signal });
}
return events;
}
count(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise<NostrRelayCOUNT[2]> {
const { relay } = this.opts;
if (!relay.count) {
return Promise.reject(new Error('Method not implemented.'));
}
return relay.count(filters, opts);
}
remove(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise<void> {
const { relay } = this.opts;
if (!relay.remove) {
return Promise.reject(new Error('Method not implemented.'));
}
return relay.remove(filters, opts);
}
async close(): Promise<void> {
const { relay } = this.opts;
this.controller.abort();
await relay.close();
}
[Symbol.asyncDispose](): Promise<void> {
return this.close();
}
}

View file

@ -1,13 +1,16 @@
import { DittoConf } from '@ditto/conf';
import { DummyDB } from '@ditto/db';
import { MockRelay } from '@nostrify/nostrify/test';
import { assertEquals } from '@std/assert';
import { generateSecretKey, nip19 } from 'nostr-tools';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { createTestDB, eventFixture } from '@/test.ts';
import { eventFixture } from '@/test.ts';
Deno.test('hydrateEvents(): author --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const event0 = await eventFixture('event-0');
const event1 = await eventFixture('event-1');
@ -16,19 +19,15 @@ Deno.test('hydrateEvents(): author --- WITHOUT stats', async () => {
await relay.event(event0);
await relay.event(event1);
await hydrateEvents({
events: [event1],
relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [event1] });
const expectedEvent = { ...event1, author: event0 };
assertEquals(event1, expectedEvent);
});
Deno.test('hydrateEvents(): repost --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const event0madePost = await eventFixture('event-0-the-one-who-post-and-users-repost');
const event0madeRepost = await eventFixture('event-0-the-one-who-repost');
@ -41,23 +40,20 @@ Deno.test('hydrateEvents(): repost --- WITHOUT stats', async () => {
await relay.event(event1reposted);
await relay.event(event6);
await hydrateEvents({
events: [event6],
relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [event6] });
const expectedEvent6 = {
...event6,
author: event0madeRepost,
repost: { ...event1reposted, author: event0madePost },
};
assertEquals(event6, expectedEvent6);
});
Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const event0madeQuoteRepost = await eventFixture('event-0-the-one-who-quote-repost');
const event0 = await eventFixture('event-0');
@ -70,11 +66,7 @@ Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => {
await relay.event(event1quoteRepost);
await relay.event(event1willBeQuoteReposted);
await hydrateEvents({
events: [event1quoteRepost],
relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [event1quoteRepost] });
const expectedEvent1quoteRepost = {
...event1quoteRepost,
@ -86,8 +78,8 @@ Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => {
});
Deno.test('hydrateEvents(): repost of quote repost --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const author = await eventFixture('event-0-makes-repost-with-quote-repost');
const event1 = await eventFixture('event-1-will-be-reposted-with-quote-repost');
@ -100,23 +92,20 @@ Deno.test('hydrateEvents(): repost of quote repost --- WITHOUT stats', async ()
await relay.event(event1quote);
await relay.event(event6);
await hydrateEvents({
events: [event6],
relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [event6] });
const expectedEvent6 = {
...event6,
author,
repost: { ...event1quote, author, quote: { author, ...event1 } },
};
assertEquals(event6, expectedEvent6);
});
Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const authorDictator = await eventFixture('kind-0-dictator');
const authorVictim = await eventFixture('kind-0-george-orwell');
@ -129,11 +118,7 @@ Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stat
await relay.event(reportEvent);
await relay.event(event1);
await hydrateEvents({
events: [reportEvent],
relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [reportEvent] });
const expectedEvent: DittoEvent = {
...reportEvent,
@ -141,12 +126,13 @@ Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stat
reported_notes: [event1],
reported_profile: authorVictim,
};
assertEquals(reportEvent, expectedEvent);
});
Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const zapSender = await eventFixture('kind-0-jack');
const zapReceipt = await eventFixture('kind-9735-jack-zap-patrick');
@ -159,11 +145,7 @@ Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 ---
await relay.event(zappedPost);
await relay.event(zapReceiver);
await hydrateEvents({
events: [zapReceipt],
relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [zapReceipt] });
const expectedEvent: DittoEvent = {
...zapReceipt,
@ -175,5 +157,14 @@ Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 ---
zap_amount: 5225000, // millisats
zap_message: '🫂',
};
assertEquals(zapReceipt, expectedEvent);
});
function setupTest() {
const db = new DummyDB();
const conf = new DittoConf(new Map([['DITTO_NSEC', nip19.nsecEncode(generateSecretKey())]]));
const relay = new MockRelay();
return { conf, db, relay };
}

View file

@ -1,28 +1,28 @@
import { DittoTables } from '@ditto/db';
import { DittoDB, DittoTables } from '@ditto/db';
import { DittoConf } from '@ditto/conf';
import { NStore } from '@nostrify/nostrify';
import { Kysely } from 'kysely';
import { matchFilter } from 'nostr-tools';
import { NSchema as n } from '@nostrify/nostrify';
import { z } from 'zod';
import { Conf } from '@/config.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { fallbackAuthor } from '@/utils.ts';
import { findQuoteTag } from '@/utils/tags.ts';
import { findQuoteInContent } from '@/utils/note.ts';
import { getAmount } from '@/utils/bolt11.ts';
import { Storages } from '@/storages.ts';
interface HydrateOpts {
events: DittoEvent[];
db: DittoDB;
conf: DittoConf;
relay: NStore;
events: DittoEvent[];
signal?: AbortSignal;
kysely?: Kysely<DittoTables>;
}
/** Hydrate events using the provided storage. */
async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
const { events, relay, signal, kysely = await Storages.kysely() } = opts;
const { conf, db, events } = opts;
if (!events.length) {
return events;
@ -30,28 +30,28 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
const cache = [...events];
for (const event of await gatherRelatedEvents({ events: cache, relay, signal })) {
for (const event of await gatherRelatedEvents({ ...opts, events: cache })) {
cache.push(event);
}
for (const event of await gatherQuotes({ events: cache, relay, signal })) {
for (const event of await gatherQuotes({ ...opts, events: cache })) {
cache.push(event);
}
for (const event of await gatherProfiles({ events: cache, relay, signal })) {
for (const event of await gatherProfiles({ ...opts, events: cache })) {
cache.push(event);
}
for (const event of await gatherUsers({ events: cache, relay, signal })) {
for (const event of await gatherUsers({ ...opts, events: cache })) {
cache.push(event);
}
for (const event of await gatherInfo({ events: cache, relay, signal })) {
for (const event of await gatherInfo({ ...opts, events: cache })) {
cache.push(event);
}
const authorStats = await gatherAuthorStats(cache, kysely as Kysely<DittoTables>);
const eventStats = await gatherEventStats(cache, kysely as Kysely<DittoTables>);
const authorStats = await gatherAuthorStats(cache, db.kysely);
const eventStats = await gatherEventStats(cache, db.kysely);
const domains = authorStats.reduce((result, { nip05_hostname }) => {
if (nip05_hostname) result.add(nip05_hostname);
@ -59,7 +59,7 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
}, new Set<string>());
const favicons = (
await kysely
await db.kysely
.selectFrom('domain_favicons')
.select(['domain', 'favicon'])
.where('domain', 'in', [...domains])
@ -79,7 +79,7 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
// Dedupe events.
const results = [...new Map(cache.map((event) => [event.id, event])).values()];
const admin = await Conf.signer.getPublicKey();
const admin = await conf.signer.getPublicKey();
// First connect all the events to each-other, then connect the connected events to the original list.
assembleEvents(admin, results, results, stats);
@ -317,7 +317,7 @@ async function gatherProfiles({ events, relay, signal }: HydrateOpts): Promise<D
}
/** Collect users from the events. */
async function gatherUsers({ events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
async function gatherUsers({ conf, events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
const pubkeys = new Set(events.map((event) => event.pubkey));
if (!pubkeys.size) {
@ -325,13 +325,13 @@ async function gatherUsers({ events, relay, signal }: HydrateOpts): Promise<Ditt
}
return relay.query(
[{ kinds: [30382], authors: [await Conf.signer.getPublicKey()], '#d': [...pubkeys], limit: pubkeys.size }],
[{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [...pubkeys], limit: pubkeys.size }],
{ signal },
);
}
/** Collect info events from the events. */
async function gatherInfo({ events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
async function gatherInfo({ conf, events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
const ids = new Set<string>();
for (const event of events) {
@ -345,7 +345,7 @@ async function gatherInfo({ events, relay, signal }: HydrateOpts): Promise<Ditto
}
return relay.query(
[{ kinds: [30383], authors: [await Conf.signer.getPublicKey()], '#d': [...ids], limit: ids.size }],
[{ kinds: [30383], authors: [await conf.signer.getPublicKey()], '#d': [...ids], limit: ids.size }],
{ signal },
);
}

View file

@ -13,9 +13,8 @@ export async function eventFixture(name: string): Promise<NostrEvent> {
/** Create a database for testing. It uses `DATABASE_URL`, or creates an in-memory database by default. */
export async function createTestDB(opts?: { pure?: boolean }) {
const db = DittoPolyPg.create(Conf.databaseUrl, { poolSize: 1 });
await DittoPolyPg.migrate(db.kysely);
const db = new DittoPolyPg(Conf.databaseUrl, { poolSize: 1 });
await db.migrate();
const store = new DittoPgStore({
db,
@ -26,8 +25,10 @@ export async function createTestDB(opts?: { pure?: boolean }) {
});
return {
db,
...db,
store,
kysely: db.kysely,
[Symbol.asyncDispose]: async () => {
const { rows } = await sql<
{ tablename: string }

View file

@ -1,11 +1,9 @@
import { DittoTables } from '@ditto/db';
import { NostrFilter } from '@nostrify/nostrify';
import { DittoConf } from '@ditto/conf';
import { DittoDB, DittoTables } from '@ditto/db';
import { NostrFilter, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { Kysely, sql } from 'kysely';
import { Conf } from '@/config.ts';
import { handleEvent } from '@/pipeline.ts';
import { Storages } from '@/storages.ts';
import { errorJson } from '@/utils/log.ts';
import { Time } from '@/utils/time.ts';
@ -63,8 +61,15 @@ export async function getTrendingTagValues(
}));
}
export interface TrendsCtx {
conf: DittoConf;
db: DittoDB;
relay: NStore;
}
/** Get trending tags and publish an event with them. */
export async function updateTrendingTags(
ctx: TrendsCtx,
l: string,
tagName: string,
kinds: number[],
@ -73,10 +78,11 @@ export async function updateTrendingTags(
aliases?: string[],
values?: string[],
) {
const { conf, db, relay } = ctx;
const params = { l, tagName, kinds, limit, extra, aliases, values };
logi({ level: 'info', ns: 'ditto.trends', msg: 'Updating trending', ...params });
const kysely = await Storages.kysely();
const signal = AbortSignal.timeout(1000);
const yesterday = Math.floor((Date.now() - Time.days(1)) / 1000);
@ -85,7 +91,7 @@ export async function updateTrendingTags(
const tagNames = aliases ? [tagName, ...aliases] : [tagName];
try {
const trends = await getTrendingTagValues(kysely, tagNames, {
const trends = await getTrendingTagValues(db.kysely, tagNames, {
kinds,
since: yesterday,
until: now,
@ -99,7 +105,7 @@ export async function updateTrendingTags(
return;
}
const signer = Conf.signer;
const signer = conf.signer;
const label = await signer.signEvent({
kind: 1985,
@ -112,7 +118,7 @@ export async function updateTrendingTags(
created_at: Math.floor(Date.now() / 1000),
});
await handleEvent(label, { source: 'internal', signal });
await relay.event(label, { signal });
logi({ level: 'info', ns: 'ditto.trends', msg: 'Trends updated', ...params });
} catch (e) {
logi({ level: 'error', ns: 'ditto.trends', msg: 'Error updating trends', ...params, error: errorJson(e) });
@ -120,28 +126,28 @@ export async function updateTrendingTags(
}
/** Update trending pubkeys. */
export function updateTrendingPubkeys(): Promise<void> {
return updateTrendingTags('#p', 'p', [1, 3, 6, 7, 9735], 40, Conf.relay);
export function updateTrendingPubkeys(ctx: TrendsCtx): Promise<void> {
return updateTrendingTags(ctx, '#p', 'p', [1, 3, 6, 7, 9735], 40, ctx.conf.relay);
}
/** Update trending zapped events. */
export function updateTrendingZappedEvents(): Promise<void> {
return updateTrendingTags('zapped', 'e', [9735], 40, Conf.relay, ['q']);
export function updateTrendingZappedEvents(ctx: TrendsCtx): Promise<void> {
return updateTrendingTags(ctx, 'zapped', 'e', [9735], 40, ctx.conf.relay, ['q']);
}
/** Update trending events. */
export async function updateTrendingEvents(): Promise<void> {
export async function updateTrendingEvents(ctx: TrendsCtx): Promise<void> {
const { conf, db } = ctx;
const results: Promise<void>[] = [
updateTrendingTags('#e', 'e', [1, 6, 7, 9735], 40, Conf.relay, ['q']),
updateTrendingTags(ctx, '#e', 'e', [1, 6, 7, 9735], 40, ctx.conf.relay, ['q']),
];
const kysely = await Storages.kysely();
for (const language of Conf.preferredLanguages ?? []) {
for (const language of conf.preferredLanguages ?? []) {
const yesterday = Math.floor((Date.now() - Time.days(1)) / 1000);
const now = Math.floor(Date.now() / 1000);
const rows = await kysely
const rows = await db.kysely
.selectFrom('nostr_events')
.select('nostr_events.id')
.where(sql`nostr_events.search_ext->>'language'`, '=', language)
@ -151,18 +157,20 @@ export async function updateTrendingEvents(): Promise<void> {
const ids = rows.map((row) => row.id);
results.push(updateTrendingTags(`#e.${language}`, 'e', [1, 6, 7, 9735], 40, Conf.relay, ['q'], ids));
results.push(
updateTrendingTags(ctx, `#e.${language}`, 'e', [1, 6, 7, 9735], 40, conf.relay, ['q'], ids),
);
}
await Promise.allSettled(results);
}
/** Update trending hashtags. */
export function updateTrendingHashtags(): Promise<void> {
return updateTrendingTags('#t', 't', [1], 20);
export function updateTrendingHashtags(ctx: TrendsCtx): Promise<void> {
return updateTrendingTags(ctx, '#t', 't', [1], 20);
}
/** Update trending links. */
export function updateTrendingLinks(): Promise<void> {
return updateTrendingTags('#r', 'r', [1], 20);
export function updateTrendingLinks(ctx: TrendsCtx): Promise<void> {
return updateTrendingTags(ctx, '#r', 'r', [1], 20);
}

View file

@ -1,25 +1,24 @@
import { User } from '@ditto/mastoapi/middleware';
import { DittoEnv } from '@ditto/mastoapi/router';
import { HTTPException } from '@hono/hono/http-exception';
import { NostrEvent, NostrFilter } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { EventTemplate } from 'nostr-tools';
import * as TypeFest from 'type-fest';
import { type AppContext } from '@/app.ts';
import { Conf } from '@/config.ts';
import * as pipeline from '@/pipeline.ts';
import { RelayError } from '@/RelayError.ts';
import { Storages } from '@/storages.ts';
import { nostrNow } from '@/utils.ts';
import { parseFormData } from '@/utils/formdata.ts';
import { errorJson } from '@/utils/log.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { Context } from '@hono/hono';
/** EventTemplate with defaults. */
type EventStub = TypeFest.SetOptional<EventTemplate, 'content' | 'created_at' | 'tags'>;
/** Publish an event through the pipeline. */
async function createEvent(t: EventStub, c: AppContext): Promise<NostrEvent> {
const { user } = c.var;
async function createEvent<E extends (DittoEnv & { Variables: { user?: User } })>(
t: EventStub,
c: Context<E>,
): Promise<NostrEvent> {
const { user, relay, signal } = c.var;
if (!user) {
throw new HTTPException(401, {
@ -34,7 +33,8 @@ async function createEvent(t: EventStub, c: AppContext): Promise<NostrEvent> {
...t,
});
return publishEvent(event, c);
await relay.event(event, { signal, publish: true });
return event;
}
/** Filter for fetching an existing event to update. */
@ -49,9 +49,9 @@ async function updateEvent<E extends EventStub>(
fn: (prev: NostrEvent) => E | Promise<E>,
c: AppContext,
): Promise<NostrEvent> {
const store = await Storages.db();
const { relay } = c.var;
const [prev] = await store.query(
const [prev] = await relay.query(
[filter],
{ signal: c.req.raw.signal },
);
@ -80,16 +80,18 @@ function updateListEvent(
/** Publish an admin event through the pipeline. */
async function createAdminEvent(t: EventStub, c: AppContext): Promise<NostrEvent> {
const signer = Conf.signer;
const { conf, relay, signal } = c.var;
const event = await signer.signEvent({
const event = await conf.signer.signEvent({
content: '',
created_at: nostrNow(),
tags: [],
...t,
});
return publishEvent(event, c);
// @ts-ignore `publish` is important for `DittoAPIStore`.
await relay.event(event, { signal, publish: true });
return event;
}
/** Fetch existing event, update its tags, then publish the new admin event. */
@ -111,8 +113,8 @@ async function updateAdminEvent<E extends EventStub>(
fn: (prev: NostrEvent | undefined) => E,
c: AppContext,
): Promise<NostrEvent> {
const store = await Storages.db();
const [prev] = await store.query([filter], { limit: 1, signal: c.req.raw.signal });
const { relay, signal } = c.var;
const [prev] = await relay.query([filter], { signal });
return createAdminEvent(fn(prev), c);
}
@ -125,8 +127,8 @@ function updateEventInfo(id: string, n: Record<string, boolean>, c: AppContext):
}
async function updateNames(k: number, d: string, n: Record<string, boolean>, c: AppContext): Promise<NostrEvent> {
const signer = Conf.signer;
const admin = await signer.getPublicKey();
const { conf } = c.var;
const admin = await conf.signer.getPublicKey();
return updateAdminEvent(
{ kinds: [k], authors: [admin], '#d': [d], limit: 1 },
@ -154,33 +156,6 @@ async function updateNames(k: number, d: string, n: Record<string, boolean>, c:
);
}
/** Push the event through the pipeline, rethrowing any RelayError. */
async function publishEvent(event: NostrEvent, c: AppContext): Promise<NostrEvent> {
logi({ level: 'info', ns: 'ditto.event', source: 'api', id: event.id, kind: event.kind });
try {
const promise = pipeline.handleEvent(event, { source: 'api', signal: c.req.raw.signal });
promise.then(async () => {
const client = await Storages.client();
await client.event(purifyEvent(event));
}).catch((e: unknown) => {
logi({ level: 'error', ns: 'ditto.pool', id: event.id, kind: event.kind, error: errorJson(e) });
});
await promise;
} catch (e) {
if (e instanceof RelayError) {
throw new HTTPException(422, {
res: c.json({ error: e.message }, 422),
});
} else {
throw e;
}
}
return event;
}
/** Parse request body to JSON, depending on the content-type of the request. */
async function parseBody(req: Request): Promise<unknown> {
switch (req.headers.get('content-type')?.split(';')[0]) {
@ -196,74 +171,8 @@ async function parseBody(req: Request): Promise<unknown> {
}
}
/** Build HTTP Link header for Mastodon API pagination. */
function buildLinkHeader(url: string, events: NostrEvent[]): string | undefined {
if (events.length <= 1) return;
const firstEvent = events[0];
const lastEvent = events[events.length - 1];
const { origin } = Conf.url;
const { pathname, search } = new URL(url);
const next = new URL(pathname + search, origin);
const prev = new URL(pathname + search, origin);
next.searchParams.set('until', String(lastEvent.created_at));
prev.searchParams.set('since', String(firstEvent.created_at));
return `<${next}>; rel="next", <${prev}>; rel="prev"`;
}
type HeaderRecord = Record<string, string | string[]>;
/** Return results with pagination headers. Assumes chronological sorting of events. */
function paginated(c: AppContext, events: NostrEvent[], body: object | unknown[], headers: HeaderRecord = {}) {
const link = buildLinkHeader(c.req.url, events);
if (link) {
headers.link = link;
}
// Filter out undefined entities.
const results = Array.isArray(body) ? body.filter(Boolean) : body;
return c.json(results, 200, headers);
}
/** Build HTTP Link header for paginating Nostr lists. */
function buildListLinkHeader(url: string, params: { offset: number; limit: number }): string | undefined {
const { origin } = Conf.url;
const { pathname, search } = new URL(url);
const { offset, limit } = params;
const next = new URL(pathname + search, origin);
const prev = new URL(pathname + search, origin);
next.searchParams.set('offset', String(offset + limit));
prev.searchParams.set('offset', String(Math.max(offset - limit, 0)));
next.searchParams.set('limit', String(limit));
prev.searchParams.set('limit', String(limit));
return `<${next}>; rel="next", <${prev}>; rel="prev"`;
}
/** paginate a list of tags. */
function paginatedList(
c: AppContext,
params: { offset: number; limit: number },
body: object | unknown[],
headers: HeaderRecord = {},
) {
const link = buildListLinkHeader(c.req.url, params);
const hasMore = Array.isArray(body) ? body.length > 0 : true;
if (link) {
headers.link = hasMore ? link : link.split(', ').find((link) => link.endsWith('; rel="prev"'))!;
}
// Filter out undefined entities.
const results = Array.isArray(body) ? body.filter(Boolean) : body;
return c.json(results, 200, headers);
}
/** Actors with Bluesky's `!no-unauthenticated` self-label should require authorization to view. */
function assertAuthenticated(c: AppContext, author: NostrEvent): void {
if (
@ -282,8 +191,6 @@ export {
createAdminEvent,
createEvent,
type EventStub,
paginated,
paginatedList,
parseBody,
updateAdminEvent,
updateEvent,

View file

@ -1,28 +0,0 @@
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
/** NIP-46 client-connect metadata. */
interface ConnectMetadata {
name: string;
description: string;
url: string;
}
/** Get NIP-46 `nostrconnect://` URI for the Ditto server. */
export async function getClientConnectUri(signal?: AbortSignal): Promise<string> {
const uri = new URL('nostrconnect://');
const { name, tagline } = await getInstanceMetadata(await Storages.db(), signal);
const metadata: ConnectMetadata = {
name,
description: tagline,
url: Conf.localDomain,
};
uri.host = await Conf.signer.getPublicKey();
uri.searchParams.set('relay', Conf.relay);
uri.searchParams.set('metadata', JSON.stringify(metadata));
return uri.toString();
}

View file

@ -1,36 +1,13 @@
import { DOMParser } from '@b-fuze/deno-dom';
import { DittoTables } from '@ditto/db';
import { cachedFaviconsSizeGauge } from '@ditto/metrics';
import { logi } from '@soapbox/logi';
import { safeFetch } from '@soapbox/safe-fetch';
import { Kysely } from 'kysely';
import tldts from 'tldts';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { nostrNow } from '@/utils.ts';
import { SimpleLRU } from '@/utils/SimpleLRU.ts';
export const faviconCache = new SimpleLRU<string, URL>(
async (domain, { signal }) => {
const kysely = await Storages.kysely();
const row = await queryFavicon(kysely, domain);
if (row && (nostrNow() - row.last_updated_at) < (Conf.caches.favicon.ttl / 1000)) {
return new URL(row.favicon);
}
const url = await fetchFavicon(domain, signal);
await insertFavicon(kysely, domain, url.href);
return url;
},
{ ...Conf.caches.favicon, gauge: cachedFaviconsSizeGauge },
);
async function queryFavicon(
export async function queryFavicon(
kysely: Kysely<DittoTables>,
domain: string,
): Promise<DittoTables['domain_favicons'] | undefined> {
@ -41,7 +18,7 @@ async function queryFavicon(
.executeTakeFirst();
}
async function insertFavicon(kysely: Kysely<DittoTables>, domain: string, favicon: string): Promise<void> {
export async function insertFavicon(kysely: Kysely<DittoTables>, domain: string, favicon: string): Promise<void> {
await kysely
.insertInto('domain_favicons')
.values({ domain, favicon, last_updated_at: nostrNow() })
@ -49,7 +26,7 @@ async function insertFavicon(kysely: Kysely<DittoTables>, domain: string, favico
.execute();
}
async function fetchFavicon(domain: string, signal?: AbortSignal): Promise<URL> {
export async function fetchFavicon(domain: string, signal?: AbortSignal): Promise<URL> {
logi({ level: 'info', ns: 'ditto.favicon', domain, state: 'started' });
const tld = tldts.parse(domain);

View file

@ -1,32 +1,42 @@
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
import { NostrEvent, NSchema as n, NStore } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { match } from 'path-to-regexp';
import tldts from 'tldts';
import { getAuthor } from '@/queries.ts';
import { bech32ToPubkey } from '@/utils.ts';
import { nip05Cache } from '@/utils/nip05.ts';
import { lookupNip05 } from '@/utils/nip05.ts';
import type { DittoConf } from '@ditto/conf';
import type { DittoDB } from '@ditto/db';
interface LookupAccountOpts {
db: DittoDB;
conf: DittoConf;
relay: NStore;
signal?: AbortSignal;
}
/** Resolve a bech32 or NIP-05 identifier to an account. */
export async function lookupAccount(
value: string,
signal = AbortSignal.timeout(3000),
opts: LookupAccountOpts,
): Promise<NostrEvent | undefined> {
const pubkey = await lookupPubkey(value, signal);
const pubkey = await lookupPubkey(value, opts);
if (pubkey) {
return getAuthor(pubkey);
return getAuthor(pubkey, opts);
}
}
/** Resolve a bech32 or NIP-05 identifier to a pubkey. */
export async function lookupPubkey(value: string, signal?: AbortSignal): Promise<string | undefined> {
export async function lookupPubkey(value: string, opts: LookupAccountOpts): Promise<string | undefined> {
if (n.bech32().safeParse(value).success) {
return bech32ToPubkey(value);
}
try {
const { pubkey } = await nip05Cache.fetch(value, { signal });
const { pubkey } = await lookupNip05(value, opts);
return pubkey;
} catch {
return;

View file

@ -1,28 +1,21 @@
import { cachedNip05sSizeGauge } from '@ditto/metrics';
import { DittoConf } from '@ditto/conf';
import { NIP05, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { safeFetch } from '@soapbox/safe-fetch';
import { nip19 } from 'nostr-tools';
import tldts from 'tldts';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { errorJson } from '@/utils/log.ts';
import { SimpleLRU } from '@/utils/SimpleLRU.ts';
export const nip05Cache = new SimpleLRU<string, nip19.ProfilePointer>(
async (nip05, { signal }) => {
const store = await Storages.db();
return getNip05(store, nip05, signal);
},
{ ...Conf.caches.nip05, gauge: cachedNip05sSizeGauge },
);
interface GetNip05Opts {
conf: DittoConf;
relay: NStore;
signal?: AbortSignal;
fetch?: typeof fetch;
}
async function getNip05(
store: NStore,
nip05: string,
signal?: AbortSignal,
): Promise<nip19.ProfilePointer> {
export async function lookupNip05(nip05: string, opts: GetNip05Opts): Promise<nip19.ProfilePointer> {
const { conf, signal } = opts;
const tld = tldts.parse(nip05);
if (!tld.isIcann || tld.isIp || tld.isPrivate) {
@ -34,8 +27,8 @@ async function getNip05(
const [name, domain] = nip05.split('@');
try {
if (domain === Conf.url.host) {
const pointer = await localNip05Lookup(store, name);
if (domain === conf.url.host) {
const pointer = await localNip05Lookup(name, opts);
if (pointer) {
logi({ level: 'info', ns: 'ditto.nip05', nip05, state: 'found', source: 'local', pubkey: pointer.pubkey });
return pointer;
@ -43,7 +36,7 @@ async function getNip05(
throw new Error(`Not found: ${nip05}`);
}
} else {
const pointer = await NIP05.lookup(nip05, { fetch: safeFetch, signal });
const pointer = await NIP05.lookup(nip05, { fetch: opts.fetch ?? safeFetch, signal });
logi({ level: 'info', ns: 'ditto.nip05', nip05, state: 'found', source: 'fetch', pubkey: pointer.pubkey });
return pointer;
}
@ -53,19 +46,24 @@ async function getNip05(
}
}
export async function localNip05Lookup(store: NStore, localpart: string): Promise<nip19.ProfilePointer | undefined> {
const name = `${localpart}@${Conf.url.host}`;
export async function localNip05Lookup(
localpart: string,
opts: GetNip05Opts,
): Promise<nip19.ProfilePointer | undefined> {
const { conf, relay, signal } = opts;
const [grant] = await store.query([{
const name = `${localpart}@${conf.url.host}`;
const [grant] = await relay.query([{
kinds: [30360],
'#d': [name, name.toLowerCase()],
authors: [await Conf.signer.getPublicKey()],
authors: [await conf.signer.getPublicKey()],
limit: 1,
}]);
}], { signal });
const pubkey = grant?.tags.find(([name]) => name === 'p')?.[1];
if (pubkey) {
return { pubkey, relays: [Conf.relay] };
return { pubkey, relays: [conf.relay] };
}
}

View file

@ -1,29 +0,0 @@
import { MockRelay } from '@nostrify/nostrify/test';
import { eventFixture } from '@/test.ts';
import { getRelays } from '@/utils/outbox.ts';
import { assertEquals } from '@std/assert';
Deno.test('Get write relays - kind 10002', async () => {
const db = new MockRelay();
const relayListMetadata = await eventFixture('kind-10002-alex');
await db.event(relayListMetadata);
const relays = await getRelays(db, relayListMetadata.pubkey);
assertEquals(relays.size, 6);
});
Deno.test('Get write relays with invalid URL - kind 10002', async () => {
const db = new MockRelay();
const relayListMetadata = await eventFixture('kind-10002-alex');
relayListMetadata.tags[0] = ['r', 'yolo'];
await db.event(relayListMetadata);
const relays = await getRelays(db, relayListMetadata.pubkey);
assertEquals(relays.size, 5);
});

View file

@ -1,28 +0,0 @@
import { NStore } from '@nostrify/nostrify';
import { Conf } from '@/config.ts';
export async function getRelays(store: NStore, pubkey: string): Promise<Set<string>> {
const relays = new Set<`wss://${string}`>();
const events = await store.query([
{ kinds: [10002], authors: [pubkey, await Conf.signer.getPublicKey()], limit: 2 },
]);
for (const event of events) {
for (const [name, relay, marker] of event.tags) {
if (name === 'r' && (marker === 'write' || !marker)) {
try {
const url = new URL(relay);
if (url.protocol === 'wss:') {
relays.add(url.toString() as `wss://${string}`);
}
} catch (_e) {
// fall through
}
}
}
}
return relays;
}

View file

@ -1,43 +1,48 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { NPostgres } from '@nostrify/db';
import { genEvent } from '@nostrify/nostrify/test';
import { assertEquals } from '@std/assert';
import { sql } from 'kysely';
import { generateSecretKey, getPublicKey } from 'nostr-tools';
import { createTestDB } from '@/test.ts';
import { countAuthorStats, getAuthorStats, getEventStats, getFollowDiff, updateStats } from '@/utils/stats.ts';
Deno.test('updateStats with kind 1 increments notes count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const sk = generateSecretKey();
const pubkey = getPublicKey(sk);
await updateStats({ ...db, event: genEvent({ kind: 1 }, sk) });
await updateStats({ ...test, event: genEvent({ kind: 1 }, sk) });
const stats = await getAuthorStats(db.kysely, pubkey);
const stats = await getAuthorStats(test.kysely, pubkey);
assertEquals(stats!.notes_count, 1);
});
Deno.test('updateStats with kind 1 increments replies count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { relay, kysely } = test;
const sk = generateSecretKey();
const note = genEvent({ kind: 1 }, sk);
await updateStats({ ...db, event: note });
await db.store.event(note);
await updateStats({ ...test, event: note });
await relay.event(note);
const reply = genEvent({ kind: 1, tags: [['e', note.id]] }, sk);
await updateStats({ ...db, event: reply });
await db.store.event(reply);
await updateStats({ ...test, event: reply });
await relay.event(reply);
const stats = await getEventStats(db.kysely, note.id);
const stats = await getEventStats(kysely, note.id);
assertEquals(stats!.replies_count, 1);
});
Deno.test('updateStats with kind 5 decrements notes count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { relay, kysely } = test;
const sk = generateSecretKey();
const pubkey = getPublicKey(sk);
@ -45,41 +50,43 @@ Deno.test('updateStats with kind 5 decrements notes count', async () => {
const create = genEvent({ kind: 1 }, sk);
const remove = genEvent({ kind: 5, tags: [['e', create.id]] }, sk);
await updateStats({ ...db, event: create });
assertEquals((await getAuthorStats(db.kysely, pubkey))!.notes_count, 1);
await db.store.event(create);
await updateStats({ ...test, event: create });
assertEquals((await getAuthorStats(kysely, pubkey))!.notes_count, 1);
await relay.event(create);
await updateStats({ ...db, event: remove });
assertEquals((await getAuthorStats(db.kysely, pubkey))!.notes_count, 0);
await db.store.event(remove);
await updateStats({ ...test, event: remove });
assertEquals((await getAuthorStats(kysely, pubkey))!.notes_count, 0);
await relay.event(remove);
});
Deno.test('updateStats with kind 3 increments followers count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { kysely } = test;
await updateStats({ ...db, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) });
await updateStats({ ...db, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) });
await updateStats({ ...db, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) });
await updateStats({ ...test, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) });
await updateStats({ ...test, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) });
await updateStats({ ...test, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) });
const stats = await getAuthorStats(db.kysely, 'alex');
const stats = await getAuthorStats(kysely, 'alex');
assertEquals(stats!.followers_count, 3);
});
Deno.test('updateStats with kind 3 decrements followers count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { relay, kysely } = test;
const sk = generateSecretKey();
const follow = genEvent({ kind: 3, tags: [['p', 'alex']], created_at: 0 }, sk);
const remove = genEvent({ kind: 3, tags: [], created_at: 1 }, sk);
await updateStats({ ...db, event: follow });
assertEquals((await getAuthorStats(db.kysely, 'alex'))!.followers_count, 1);
await db.store.event(follow);
await updateStats({ ...test, event: follow });
assertEquals((await getAuthorStats(kysely, 'alex'))!.followers_count, 1);
await relay.event(follow);
await updateStats({ ...db, event: remove });
assertEquals((await getAuthorStats(db.kysely, 'alex'))!.followers_count, 0);
await db.store.event(remove);
await updateStats({ ...test, event: remove });
assertEquals((await getAuthorStats(kysely, 'alex'))!.followers_count, 0);
await relay.event(remove);
});
Deno.test('getFollowDiff returns added and removed followers', () => {
@ -93,86 +100,91 @@ Deno.test('getFollowDiff returns added and removed followers', () => {
});
Deno.test('updateStats with kind 6 increments reposts count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { relay, kysely } = test;
const note = genEvent({ kind: 1 });
await updateStats({ ...db, event: note });
await db.store.event(note);
await updateStats({ ...test, event: note });
await relay.event(note);
const repost = genEvent({ kind: 6, tags: [['e', note.id]] });
await updateStats({ ...db, event: repost });
await db.store.event(repost);
await updateStats({ ...test, event: repost });
await relay.event(repost);
const stats = await getEventStats(db.kysely, note.id);
const stats = await getEventStats(kysely, note.id);
assertEquals(stats!.reposts_count, 1);
});
Deno.test('updateStats with kind 5 decrements reposts count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { relay, kysely } = test;
const note = genEvent({ kind: 1 });
await updateStats({ ...db, event: note });
await db.store.event(note);
await updateStats({ ...test, event: note });
await relay.event(note);
const sk = generateSecretKey();
const repost = genEvent({ kind: 6, tags: [['e', note.id]] }, sk);
await updateStats({ ...db, event: repost });
await db.store.event(repost);
await updateStats({ ...test, event: repost });
await relay.event(repost);
await updateStats({ ...db, event: genEvent({ kind: 5, tags: [['e', repost.id]] }, sk) });
await updateStats({ ...test, event: genEvent({ kind: 5, tags: [['e', repost.id]] }, sk) });
const stats = await getEventStats(db.kysely, note.id);
const stats = await getEventStats(kysely, note.id);
assertEquals(stats!.reposts_count, 0);
});
Deno.test('updateStats with kind 7 increments reactions count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { relay, kysely } = test;
const note = genEvent({ kind: 1 });
await updateStats({ ...db, event: note });
await db.store.event(note);
await updateStats({ ...test, event: note });
await relay.event(note);
await updateStats({ ...db, event: genEvent({ kind: 7, content: '+', tags: [['e', note.id]] }) });
await updateStats({ ...db, event: genEvent({ kind: 7, content: '😂', tags: [['e', note.id]] }) });
await updateStats({ ...test, event: genEvent({ kind: 7, content: '+', tags: [['e', note.id]] }) });
await updateStats({ ...test, event: genEvent({ kind: 7, content: '😂', tags: [['e', note.id]] }) });
const stats = await getEventStats(db.kysely, note.id);
const stats = await getEventStats(kysely, note.id);
assertEquals(stats!.reactions, JSON.stringify({ '+': 1, '😂': 1 }));
assertEquals(stats!.reactions_count, 2);
});
Deno.test('updateStats with kind 5 decrements reactions count', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { relay, kysely } = test;
const note = genEvent({ kind: 1 });
await updateStats({ ...db, event: note });
await db.store.event(note);
await updateStats({ ...test, event: note });
await relay.event(note);
const sk = generateSecretKey();
const reaction = genEvent({ kind: 7, content: '+', tags: [['e', note.id]] }, sk);
await updateStats({ ...db, event: reaction });
await db.store.event(reaction);
await updateStats({ ...test, event: reaction });
await relay.event(reaction);
await updateStats({ ...db, event: genEvent({ kind: 5, tags: [['e', reaction.id]] }, sk) });
await updateStats({ ...test, event: genEvent({ kind: 5, tags: [['e', reaction.id]] }, sk) });
const stats = await getEventStats(db.kysely, note.id);
const stats = await getEventStats(kysely, note.id);
assertEquals(stats!.reactions, JSON.stringify({}));
});
Deno.test('countAuthorStats counts author stats from the database', async () => {
await using db = await createTestDB();
await using test = await setupTest();
const { relay } = test;
const sk = generateSecretKey();
const pubkey = getPublicKey(sk);
await db.store.event(genEvent({ kind: 1, content: 'hello' }, sk));
await db.store.event(genEvent({ kind: 1, content: 'yolo' }, sk));
await db.store.event(genEvent({ kind: 3, tags: [['p', pubkey]] }));
await relay.event(genEvent({ kind: 1, content: 'hello' }, sk));
await relay.event(genEvent({ kind: 1, content: 'yolo' }, sk));
await relay.event(genEvent({ kind: 3, tags: [['p', pubkey]] }));
await db.kysely.insertInto('author_stats').values({
await test.kysely.insertInto('author_stats').values({
pubkey,
search: 'Yolo Lolo',
notes_count: 0,
@ -181,8 +193,28 @@ Deno.test('countAuthorStats counts author stats from the database', async () =>
}).onConflict((oc) => oc.column('pubkey').doUpdateSet({ 'search': 'baka' }))
.execute();
const stats = await countAuthorStats({ store: db.store, pubkey, kysely: db.kysely });
const stats = await countAuthorStats({ ...test, pubkey });
assertEquals(stats!.notes_count, 2);
assertEquals(stats!.followers_count, 1);
});
async function setupTest() {
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
await db.migrate();
const { kysely } = db;
const relay = new NPostgres(kysely);
return {
relay,
kysely,
[Symbol.asyncDispose]: async () => {
await sql`truncate table event_stats cascade`.execute(kysely);
await sql`truncate table author_stats cascade`.execute(kysely);
await db[Symbol.asyncDispose]();
},
};
}

View file

@ -9,14 +9,14 @@ import { findQuoteTag, findReplyTag, getTagSet } from '@/utils/tags.ts';
interface UpdateStatsOpts {
kysely: Kysely<DittoTables>;
store: NStore;
relay: NStore;
event: NostrEvent;
x?: 1 | -1;
}
/** Handle one event at a time and update relevant stats for it. */
// deno-lint-ignore require-await
export async function updateStats({ event, kysely, store, x = 1 }: UpdateStatsOpts): Promise<void> {
export async function updateStats({ event, kysely, relay, x = 1 }: UpdateStatsOpts): Promise<void> {
switch (event.kind) {
case 1:
case 20:
@ -24,9 +24,9 @@ export async function updateStats({ event, kysely, store, x = 1 }: UpdateStatsOp
case 30023:
return handleEvent1(kysely, event, x);
case 3:
return handleEvent3(kysely, event, x, store);
return handleEvent3(kysely, event, x, relay);
case 5:
return handleEvent5(kysely, event, -1, store);
return handleEvent5(kysely, event, -1, relay);
case 6:
return handleEvent6(kysely, event, x);
case 7:
@ -88,12 +88,12 @@ async function handleEvent1(kysely: Kysely<DittoTables>, event: NostrEvent, x: n
}
/** Update stats for kind 3 event. */
async function handleEvent3(kysely: Kysely<DittoTables>, event: NostrEvent, x: number, store: NStore): Promise<void> {
async function handleEvent3(kysely: Kysely<DittoTables>, event: NostrEvent, x: number, relay: NStore): Promise<void> {
const following = getTagSet(event.tags, 'p');
await updateAuthorStats(kysely, event.pubkey, () => ({ following_count: following.size }));
const [prev] = await store.query([
const [prev] = await relay.query([
{ kinds: [3], authors: [event.pubkey], limit: 1 },
]);
@ -117,12 +117,12 @@ async function handleEvent3(kysely: Kysely<DittoTables>, event: NostrEvent, x: n
}
/** Update stats for kind 5 event. */
async function handleEvent5(kysely: Kysely<DittoTables>, event: NostrEvent, x: -1, store: NStore): Promise<void> {
async function handleEvent5(kysely: Kysely<DittoTables>, event: NostrEvent, x: -1, relay: NStore): Promise<void> {
const id = event.tags.find(([name]) => name === 'e')?.[1];
if (id) {
const [target] = await store.query([{ ids: [id], authors: [event.pubkey], limit: 1 }]);
const [target] = await relay.query([{ ids: [id], authors: [event.pubkey], limit: 1 }]);
if (target) {
await updateStats({ event: target, kysely, store, x });
await updateStats({ event: target, kysely, relay, x });
}
}
}
@ -300,13 +300,13 @@ export async function updateEventStats(
/** Calculate author stats from the database. */
export async function countAuthorStats(
{ pubkey, store }: RefreshAuthorStatsOpts,
{ pubkey, relay }: RefreshAuthorStatsOpts,
): Promise<DittoTables['author_stats']> {
const [{ count: followers_count }, { count: notes_count }, [followList], [kind0]] = await Promise.all([
store.count([{ kinds: [3], '#p': [pubkey] }]),
store.count([{ kinds: [1, 20], authors: [pubkey] }]),
store.query([{ kinds: [3], authors: [pubkey], limit: 1 }]),
store.query([{ kinds: [0], authors: [pubkey], limit: 1 }]),
relay.count([{ kinds: [3], '#p': [pubkey] }]),
relay.count([{ kinds: [1, 20], authors: [pubkey] }]),
relay.query([{ kinds: [3], authors: [pubkey], limit: 1 }]),
relay.query([{ kinds: [0], authors: [pubkey], limit: 1 }]),
]);
let search: string = '';
const metadata = n.json().pipe(n.metadata()).catch({}).safeParse(kind0?.content);
@ -333,14 +333,14 @@ export async function countAuthorStats(
export interface RefreshAuthorStatsOpts {
pubkey: string;
kysely: Kysely<DittoTables>;
store: SetRequired<NStore, 'count'>;
relay: SetRequired<NStore, 'count'>;
}
/** Refresh the author's stats in the database. */
export async function refreshAuthorStats(
{ pubkey, kysely, store }: RefreshAuthorStatsOpts,
{ pubkey, kysely, relay }: RefreshAuthorStatsOpts,
): Promise<DittoTables['author_stats']> {
const stats = await countAuthorStats({ store, pubkey, kysely });
const stats = await countAuthorStats({ relay, pubkey, kysely });
await kysely.insertInto('author_stats')
.values(stats)

View file

@ -1,10 +1,9 @@
import { paginated, paginatedList, paginationSchema } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter } from '@nostrify/nostrify';
import { AppContext } from '@/app.ts';
import { paginationSchema } from '@/schemas/pagination.ts';
import { renderAccount } from '@/views/mastodon/accounts.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
import { paginated, paginatedList } from '@/utils/api.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { accountFromPubkey } from '@/views/mastodon/accounts.ts';
@ -25,7 +24,7 @@ async function renderEventAccounts(c: AppContext, filters: NostrFilter[], opts?:
const events = await relay.query(filters, { signal })
// Deduplicate by author.
.then((events) => Array.from(new Map(events.map((event) => [event.pubkey, event])).values()))
.then((events) => hydrateEvents({ events, relay, signal }))
.then((events) => hydrateEvents({ ...c.var, events, relay, signal }))
.then((events) => filterFn ? events.filter(filterFn) : events);
const accounts = await Promise.all(
@ -48,7 +47,7 @@ async function renderAccounts(c: AppContext, pubkeys: string[]) {
const { relay, signal } = c.var;
const events = await relay.query([{ kinds: [0], authors }], { signal })
.then((events) => hydrateEvents({ events, relay, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const accounts = await Promise.all(
authors.map((pubkey) => {
@ -74,7 +73,7 @@ async function renderStatuses(c: AppContext, ids: string[], signal = AbortSignal
const { limit } = pagination;
const events = await relay.query([{ kinds: [1, 20], ids, limit }], { signal })
.then((events) => hydrateEvents({ events, relay, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
if (!events.length) {
return c.json([]);
@ -85,7 +84,7 @@ async function renderStatuses(c: AppContext, ids: string[], signal = AbortSignal
const viewerPubkey = await user?.signer.getPublicKey();
const statuses = await Promise.all(
sortedEvents.map((event) => renderStatus(event, { viewerPubkey })),
sortedEvents.map((event) => renderStatus(relay, event, { viewerPubkey })),
);
// TODO: pagination with min_id and max_id based on the order of `ids`.

View file

@ -1,4 +1,4 @@
import { NostrEvent } from '@nostrify/nostrify';
import { NostrEvent, NStore } from '@nostrify/nostrify';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
import { Conf } from '@/config.ts';
@ -10,23 +10,23 @@ interface RenderNotificationOpts {
viewerPubkey: string;
}
async function renderNotification(event: DittoEvent, opts: RenderNotificationOpts) {
async function renderNotification(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) {
const mentioned = !!event.tags.find(([name, value]) => name === 'p' && value === opts.viewerPubkey);
if (event.kind === 1 && mentioned) {
return renderMention(event, opts);
return renderMention(store, event, opts);
}
if (event.kind === 6) {
return renderReblog(event, opts);
return renderReblog(store, event, opts);
}
if (event.kind === 7 && event.content === '+') {
return renderFavourite(event, opts);
return renderFavourite(store, event, opts);
}
if (event.kind === 7) {
return renderReaction(event, opts);
return renderReaction(store, event, opts);
}
if (event.kind === 30360 && event.pubkey === await Conf.signer.getPublicKey()) {
@ -34,12 +34,12 @@ async function renderNotification(event: DittoEvent, opts: RenderNotificationOpt
}
if (event.kind === 9735) {
return renderZap(event, opts);
return renderZap(store, event, opts);
}
}
async function renderMention(event: DittoEvent, opts: RenderNotificationOpts) {
const status = await renderStatus(event, opts);
async function renderMention(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) {
const status = await renderStatus(store, event, opts);
if (!status) return;
return {
@ -51,9 +51,9 @@ async function renderMention(event: DittoEvent, opts: RenderNotificationOpts) {
};
}
async function renderReblog(event: DittoEvent, opts: RenderNotificationOpts) {
async function renderReblog(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) {
if (event.repost?.kind !== 1) return;
const status = await renderStatus(event.repost, opts);
const status = await renderStatus(store, event.repost, opts);
if (!status) return;
const account = event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey);
@ -66,9 +66,9 @@ async function renderReblog(event: DittoEvent, opts: RenderNotificationOpts) {
};
}
async function renderFavourite(event: DittoEvent, opts: RenderNotificationOpts) {
async function renderFavourite(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) {
if (event.reacted?.kind !== 1) return;
const status = await renderStatus(event.reacted, opts);
const status = await renderStatus(store, event.reacted, opts);
if (!status) return;
const account = event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey);
@ -81,9 +81,9 @@ async function renderFavourite(event: DittoEvent, opts: RenderNotificationOpts)
};
}
async function renderReaction(event: DittoEvent, opts: RenderNotificationOpts) {
async function renderReaction(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) {
if (event.reacted?.kind !== 1) return;
const status = await renderStatus(event.reacted, opts);
const status = await renderStatus(store, event.reacted, opts);
if (!status) return;
const account = event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey);
@ -116,7 +116,7 @@ async function renderNameGrant(event: DittoEvent) {
};
}
async function renderZap(event: DittoEvent, opts: RenderNotificationOpts) {
async function renderZap(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) {
if (!event.zap_sender) return;
const { zap_amount = 0, zap_message = '' } = event;
@ -133,7 +133,7 @@ async function renderZap(event: DittoEvent, opts: RenderNotificationOpts) {
message: zap_message,
created_at: nostrDate(event.created_at).toISOString(),
account,
...(event.zapped ? { status: await renderStatus(event.zapped, opts) } : {}),
...(event.zapped ? { status: await renderStatus(store, event.zapped, opts) } : {}),
};
}

View file

@ -1,4 +1,4 @@
import type { NostrEvent } from '@nostrify/nostrify';
import type { NostrEvent, NStore } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { MastodonPush } from '@/types/MastodonPush.ts';
@ -9,10 +9,11 @@ import { renderNotification } from '@/views/mastodon/notifications.ts';
* Unlike other views, only one will be rendered at a time, so making use of async calls is okay.
*/
export async function renderWebPushNotification(
store: NStore,
event: NostrEvent,
viewerPubkey: string,
): Promise<MastodonPush | undefined> {
const notification = await renderNotification(event, { viewerPubkey });
const notification = await renderNotification(store, event, { viewerPubkey });
if (!notification) {
return;
}

View file

@ -1,3 +1,5 @@
import { NStore } from '@nostrify/nostrify';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
import { nostrDate } from '@/utils.ts';
@ -6,7 +8,7 @@ import { renderStatus } from '@/views/mastodon/statuses.ts';
import { getTagSet } from '@/utils/tags.ts';
/** Expects a `reportEvent` of kind 1984 and a `profile` of kind 0 of the person being reported */
async function renderReport(event: DittoEvent) {
function renderReport(event: DittoEvent) {
// The category is present in both the 'e' and 'p' tag, however, it is possible to report a user without reporting a note, so it's better to get the category from the 'p' tag
const category = event.tags.find(([name]) => name === 'p')?.[2];
const statusIds = event.tags.filter(([name]) => name === 'e').map((tag) => tag[1]) ?? [];
@ -23,9 +25,7 @@ async function renderReport(event: DittoEvent) {
created_at: nostrDate(event.created_at).toISOString(),
status_ids: statusIds,
rules_ids: null,
target_account: event.reported_profile
? await renderAccount(event.reported_profile)
: await accountFromPubkey(reportedPubkey),
target_account: event.reported_profile ? renderAccount(event.reported_profile) : accountFromPubkey(reportedPubkey),
};
}
@ -36,7 +36,7 @@ interface RenderAdminReportOpts {
/** Admin-level information about a filed report.
* Expects an event of kind 1984 fully hydrated.
* https://docs.joinmastodon.org/entities/Admin_Report */
async function renderAdminReport(event: DittoEvent, opts: RenderAdminReportOpts) {
async function renderAdminReport(store: NStore, event: DittoEvent, opts: RenderAdminReportOpts) {
const { viewerPubkey } = opts;
// The category is present in both the 'e' and 'p' tag, however, it is possible to report a user without reporting a note, so it's better to get the category from the 'p' tag
@ -45,7 +45,7 @@ async function renderAdminReport(event: DittoEvent, opts: RenderAdminReportOpts)
const statuses = [];
if (event.reported_notes) {
for (const status of event.reported_notes) {
statuses.push(await renderStatus(status, { viewerPubkey }));
statuses.push(await renderStatus(store, status, { viewerPubkey }));
}
}

View file

@ -1,4 +1,4 @@
import { NostrEvent } from '@nostrify/nostrify';
import { NostrEvent, NStore } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { Conf } from '@/config.ts';
@ -6,7 +6,6 @@ import { MastodonAttachment } from '@/entities/MastodonAttachment.ts';
import { MastodonMention } from '@/entities/MastodonMention.ts';
import { MastodonStatus } from '@/entities/MastodonStatus.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { Storages } from '@/storages.ts';
import { nostrDate } from '@/utils.ts';
import { getMediaLinks, parseNoteContent, stripimeta } from '@/utils/note.ts';
import { findReplyTag } from '@/utils/tags.ts';
@ -20,7 +19,11 @@ interface RenderStatusOpts {
depth?: number;
}
async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<MastodonStatus | undefined> {
async function renderStatus(
store: NStore,
event: DittoEvent,
opts: RenderStatusOpts,
): Promise<MastodonStatus | undefined> {
const { viewerPubkey, depth = 1 } = opts;
if (depth > 2 || depth < 0) return;
@ -38,8 +41,6 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
const replyId = findReplyTag(event.tags)?.[1];
const store = await Storages.db();
const mentions = event.mentions?.map((event) => renderMention(event)) ?? [];
const { html, links, firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), mentions);
@ -123,7 +124,7 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
tags: [],
emojis: renderEmojis(event),
poll: null,
quote: !event.quote ? null : await renderStatus(event.quote, { depth: depth + 1 }),
quote: !event.quote ? null : await renderStatus(store, event.quote, { depth: depth + 1 }),
quote_id: event.quote?.id ?? null,
uri: Conf.local(`/users/${account.acct}/statuses/${event.id}`),
url: Conf.local(`/@${account.acct}/${event.id}`),
@ -139,14 +140,18 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
};
}
async function renderReblog(event: DittoEvent, opts: RenderStatusOpts): Promise<MastodonStatus | undefined> {
async function renderReblog(
store: NStore,
event: DittoEvent,
opts: RenderStatusOpts,
): Promise<MastodonStatus | undefined> {
const { viewerPubkey } = opts;
if (!event.repost) return;
const status = await renderStatus(event, {}); // omit viewerPubkey intentionally
const status = await renderStatus(store, event, {}); // omit viewerPubkey intentionally
if (!status) return;
const reblog = await renderStatus(event.repost, { viewerPubkey }) ?? null;
const reblog = await renderStatus(store, event.repost, { viewerPubkey }) ?? null;
return {
...status,

View file

@ -1,16 +1,16 @@
import { DittoConf } from '@ditto/conf';
import { NostrEvent, NostrRelayOK, NPolicy } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import * as Comlink from 'comlink';
import { Conf } from '@/config.ts';
import type { CustomPolicy } from '@/workers/policy.worker.ts';
class PolicyWorker implements NPolicy {
export class PolicyWorker implements NPolicy {
private worker: Comlink.Remote<CustomPolicy>;
private ready: Promise<void>;
private enabled = true;
constructor() {
constructor(private conf: DittoConf) {
this.worker = Comlink.wrap<CustomPolicy>(
new Worker(
new URL('./policy.worker.ts', import.meta.url),
@ -19,8 +19,8 @@ class PolicyWorker implements NPolicy {
name: 'PolicyWorker',
deno: {
permissions: {
read: [Conf.denoDir, Conf.policy, Conf.dataDir],
write: [Conf.dataDir],
read: [conf.denoDir, conf.policy, conf.dataDir],
write: [conf.dataDir],
net: 'inherit',
env: false,
import: true,
@ -44,18 +44,20 @@ class PolicyWorker implements NPolicy {
}
private async init(): Promise<void> {
const conf = this.conf;
try {
await this.worker.init({
path: Conf.policy,
databaseUrl: Conf.databaseUrl,
pubkey: await Conf.signer.getPublicKey(),
path: conf.policy,
databaseUrl: conf.databaseUrl,
pubkey: await conf.signer.getPublicKey(),
});
logi({
level: 'info',
ns: 'ditto.system.policy',
msg: 'Using custom policy',
path: Conf.policy,
path: conf.policy,
enabled: true,
});
} catch (e) {
@ -76,16 +78,14 @@ class PolicyWorker implements NPolicy {
level: 'warn',
ns: 'ditto.system.policy',
msg: 'Custom policies are not supported with PGlite. The policy is disabled.',
path: Conf.policy,
path: conf.policy,
enabled: false,
});
this.enabled = false;
return;
}
throw new Error(`DITTO_POLICY (error importing policy): ${Conf.policy}`);
throw new Error(`DITTO_POLICY (error importing policy): ${conf.policy}`);
}
}
}
export const policyWorker = new PolicyWorker();

View file

@ -30,7 +30,7 @@ export class CustomPolicy implements NPolicy {
async init({ path, databaseUrl, pubkey }: PolicyInit): Promise<void> {
const Policy = (await import(path)).default;
const db = DittoPolyPg.create(databaseUrl, { poolSize: 1 });
const db = new DittoPolyPg(databaseUrl, { poolSize: 1 });
const store = new DittoPgStore({
db,

View file

@ -3,6 +3,7 @@
"version": "1.1.0",
"exports": {
"./middleware": "./middleware/mod.ts",
"./pagination": "./pagination/mod.ts",
"./router": "./router/mod.ts",
"./test": "./test.ts"
}

View file

@ -0,0 +1,3 @@
export { buildLinkHeader, buildListLinkHeader } from './link-header.ts';
export { paginated, paginatedList } from './paginate.ts';
export { paginationSchema } from './schema.ts';

View file

@ -1,18 +1,22 @@
import { buildLinkHeader, buildListLinkHeader } from './link-header.ts';
import type { DittoEnv } from '@ditto/mastoapi/router';
import type { Context } from '@hono/hono';
import type { NostrEvent } from '@nostrify/nostrify';
type HeaderRecord = Record<string, string | string[]>;
/** Return results with pagination headers. Assumes chronological sorting of events. */
export function paginated(
c: Context,
export function paginated<E extends DittoEnv>(
c: Context<E>,
events: NostrEvent[],
body: object | unknown[],
headers: HeaderRecord = {},
): Response {
const link = buildLinkHeader(c.req.url, events);
const { conf } = c.var;
const url = conf.local(c.req.url);
const link = buildLinkHeader(url, events);
if (link) {
headers.link = link;
@ -24,13 +28,16 @@ export function paginated(
}
/** paginate a list of tags. */
export function paginatedList(
c: Context,
export function paginatedList<E extends DittoEnv>(
c: Context<E>,
params: { offset: number; limit: number },
body: object | unknown[],
headers: HeaderRecord = {},
): Response {
const link = buildListLinkHeader(c.req.url, params);
const { conf } = c.var;
const url = conf.local(c.req.url);
const link = buildListLinkHeader(url, params);
const hasMore = Array.isArray(body) ? body.length > 0 : true;
if (link) {

View file

@ -1,7 +1,16 @@
import { z } from 'zod';
export interface Pagination {
max_id?: string;
min_id?: string;
since?: number;
until?: number;
limit: number;
offset: number;
}
/** Schema to parse pagination query params. */
export const paginationSchema = z.object({
export const paginationSchema: z.ZodType<Pagination> = z.object({
max_id: z.string().transform((val) => {
if (!val.includes('-')) return val;
return val.split('-')[1];
@ -11,4 +20,4 @@ export const paginationSchema = z.object({
until: z.coerce.number().nonnegative().optional().catch(undefined),
limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)),
offset: z.coerce.number().nonnegative().catch(0),
});
}) as z.ZodType<Pagination>;

View file

@ -7,7 +7,7 @@ import { DittoApp } from './DittoApp.ts';
import { DittoRoute } from './DittoRoute.ts';
Deno.test('DittoApp', async () => {
await using db = DittoPolyPg.create('memory://');
await using db = new DittoPolyPg('memory://');
const conf = new DittoConf(new Map());
const relay = new MockRelay();

View file

@ -1,13 +1,17 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { JsonParseStream } from '@std/json/json-parse-stream';
import { TextLineStream } from '@std/streams/text-line-stream';
import { Conf } from '../packages/ditto/config.ts';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
import { type EventStub } from '../packages/ditto/utils/api.ts';
import { nostrNow } from '../packages/ditto/utils.ts';
const signer = Conf.signer;
const store = await Storages.db();
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
const { signer } = conf;
const readable = Deno.stdin.readable
.pipeThrough(new TextDecoderStream())
@ -22,7 +26,7 @@ for await (const t of readable) {
...t as EventStub,
});
await store.event(event);
await relay.event(event);
}
Deno.exit(0);

View file

@ -1,15 +1,20 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { NSchema } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { Conf } from '../packages/ditto/config.ts';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
import { nostrNow } from '../packages/ditto/utils.ts';
const store = await Storages.db();
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
const [pubkeyOrNpub, role] = Deno.args;
const pubkey = pubkeyOrNpub.startsWith('npub1') ? nip19.decode(pubkeyOrNpub as `npub1${string}`).data : pubkeyOrNpub;
const { signer } = conf;
if (!NSchema.id().safeParse(pubkey).success) {
console.error('Invalid pubkey');
Deno.exit(1);
@ -20,10 +25,9 @@ if (!['admin', 'user'].includes(role)) {
Deno.exit(1);
}
const signer = Conf.signer;
const admin = await signer.getPublicKey();
const [existing] = await store.query([{
const [existing] = await relay.query([{
kinds: [30382],
authors: [admin],
'#d': [pubkey],
@ -57,6 +61,6 @@ const event = await signer.signEvent({
created_at: nostrNow(),
});
await store.event(event);
await relay.event(event);
Deno.exit(0);

View file

@ -1,7 +1,13 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { NostrFilter } from '@nostrify/nostrify';
import { Command, InvalidOptionArgumentError } from 'commander';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
interface ExportFilter {
authors?: string[];
@ -98,8 +104,6 @@ export function buildFilter(args: ExportFilter) {
}
async function exportEvents(args: ExportFilter) {
const store = await Storages.db();
let filter: NostrFilter = {};
try {
filter = buildFilter(args);
@ -108,7 +112,7 @@ async function exportEvents(args: ExportFilter) {
}
let count = 0;
for await (const msg of store.req([filter])) {
for await (const msg of relay.req([filter])) {
if (msg[0] === 'EOSE') {
break;
}

View file

@ -1,13 +1,16 @@
import { Semaphore } from '@core/asyncutil';
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { NostrEvent } from '@nostrify/nostrify';
import { JsonParseStream } from '@std/json/json-parse-stream';
import { TextLineStream } from '@std/streams/text-line-stream';
import { Conf } from '../packages/ditto/config.ts';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
const store = await Storages.db();
const sem = new Semaphore(Conf.pg.poolSize);
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
const sem = new Semaphore(conf.pg.poolSize);
console.warn('Importing events...');
@ -27,7 +30,7 @@ for await (const line of readable) {
sem.lock(async () => {
try {
await store.event(event);
await relay.event(event);
console.warn(`(${count}) Event<${event.kind}> ${event.id}`);
} catch (error) {
if (error instanceof Error && error.message.includes('violates unique constraint')) {

View file

@ -1,9 +1,9 @@
import { Storages } from '../packages/ditto/storages.ts';
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
// This migrates kysely internally.
const kysely = await Storages.kysely();
const conf = new DittoConf(Deno.env);
await using db = new DittoPolyPg(conf.databaseUrl);
// Close the connection before exiting.
await kysely.destroy();
await db.migrate();
Deno.exit();

View file

@ -1,16 +1,23 @@
import { policyWorker } from '../packages/ditto/workers/policy.ts';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
import { PolicyWorker } from '../packages/ditto/workers/policy.ts';
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
const policyWorker = new PolicyWorker(conf);
const db = await Storages.db();
let count = 0;
for await (const msg of db.req([{}])) {
for await (const msg of relay.req([{}])) {
const [type, , event] = msg;
if (type === 'EOSE') console.log('EOSE');
if (type !== 'EVENT') continue;
const [, , ok] = await policyWorker.call(event, AbortSignal.timeout(5000));
if (!ok) {
await db.remove([{ ids: [event.id] }]);
await relay.remove([{ ids: [event.id] }]);
count += 1;
}
}

View file

@ -1,11 +1,13 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { NostrEvent } from '@nostrify/nostrify';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
const kysely = await Storages.kysely();
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const query = kysely
const query = db.kysely
.selectFrom('nostr_events')
.select(['id', 'kind', 'content', 'pubkey', 'tags', 'created_at', 'sig']);
@ -14,7 +16,7 @@ for await (const row of query.stream()) {
const ext = DittoPgStore.indexExtensions(event);
try {
await kysely
await db.kysely
.updateTable('nostr_events')
.set('search_ext', ext)
.where('id', '=', event.id)

View file

@ -1,13 +1,21 @@
import { Semaphore } from '@core/asyncutil';
import { NostrEvent } from '@nostrify/nostrify';
import { updateAuthorData } from '../packages/ditto/pipeline.ts';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
import { DittoRelayStore } from '../packages/ditto/storages/DittoRelayStore.ts';
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const pgstore = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
const relaystore = new DittoRelayStore({ conf, db, relay: pgstore });
const kysely = await Storages.kysely();
const sem = new Semaphore(5);
const query = kysely
const query = db.kysely
.selectFrom('nostr_events')
.select(['id', 'kind', 'content', 'pubkey', 'tags', 'created_at', 'sig'])
.where('kind', '=', 0);
@ -19,7 +27,7 @@ for await (const row of query.stream(100)) {
sem.lock(async () => {
const event: NostrEvent = { ...row, created_at: Number(row.created_at) };
await updateAuthorData(event, AbortSignal.timeout(3000));
await relaystore.updateAuthorData(event, AbortSignal.timeout(3000));
});
}

View file

@ -1,11 +1,14 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { NSchema as n } from '@nostrify/nostrify';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
const store = await Storages.db();
const kysely = await Storages.kysely();
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
for await (const msg of store.req([{ kinds: [0] }])) {
for await (const msg of relay.req([{ kinds: [0] }])) {
if (msg[0] === 'EVENT') {
const { pubkey, content } = msg[2];
@ -13,7 +16,7 @@ for await (const msg of store.req([{ kinds: [0] }])) {
const search = [name, nip05].filter(Boolean).join(' ').trim();
try {
await kysely.insertInto('author_stats').values({
await db.kysely.insertInto('author_stats').values({
pubkey,
search,
followers_count: 0,

View file

@ -1,12 +1,14 @@
import { Conf } from '../packages/ditto/config.ts';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
const kysely = await Storages.kysely();
const statsQuery = kysely.selectFrom('author_stats').select('pubkey');
const { streakWindow } = Conf;
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const statsQuery = db.kysely.selectFrom('author_stats').select('pubkey');
const { streakWindow } = conf;
for await (const { pubkey } of statsQuery.stream(10)) {
const eventsQuery = kysely
const eventsQuery = db.kysely
.selectFrom('nostr_events')
.select('created_at')
.where('pubkey', '=', pubkey)
@ -38,7 +40,7 @@ for await (const { pubkey } of statsQuery.stream(10)) {
}
if (start && end) {
await kysely
await db.kysely
.updateTable('author_stats')
.set({
streak_end: end,

View file

@ -3,12 +3,16 @@
* by looking them up on a list of relays.
*/
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { NostrEvent, NRelay1, NSchema } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
const store = await Storages.db();
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
interface ImportEventsOpts {
profilesOnly: boolean;
@ -19,7 +23,7 @@ const importUsers = async (
authors: string[],
relays: string[],
opts?: Partial<ImportEventsOpts>,
doEvent: DoEvent = async (event: NostrEvent) => await store.event(event),
doEvent: DoEvent = async (event: NostrEvent) => await relay.event(event),
) => {
// Kind 0s + follow lists.
const profiles: Record<string, Record<number, NostrEvent>> = {};

View file

@ -1,9 +1,13 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { Command } from 'commander';
import { NostrEvent } from 'nostr-tools';
import { nostrNow } from '../packages/ditto/utils.ts';
import { Conf } from '../packages/ditto/config.ts';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
function die(code: number, ...args: unknown[]) {
console.error(...args);
@ -33,19 +37,19 @@ if (import.meta.main) {
content.lud16 = lightning;
content.name = name;
content.picture = image;
content.website = Conf.localDomain;
content.website = conf.localDomain;
const signer = Conf.signer;
const signer = conf.signer;
const bare: Omit<NostrEvent, 'id' | 'sig' | 'pubkey'> = {
created_at: nostrNow(),
kind: 0,
tags: [],
content: JSON.stringify(content),
created_at: Math.floor(Date.now() / 1000),
};
const signed = await signer.signEvent(bare);
console.log({ content, signed });
await Storages.db().then((store) => store.event(signed));
await relay.event(signed);
});
await kind0.parseAsync();

View file

@ -1,8 +1,16 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { nip19 } from 'nostr-tools';
import { Storages } from '../packages/ditto/storages.ts';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
import { refreshAuthorStats } from '../packages/ditto/utils/stats.ts';
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
const { kysely } = db;
let pubkey: string;
try {
const result = nip19.decode(Deno.args[0]);
@ -16,7 +24,4 @@ try {
Deno.exit(1);
}
const store = await Storages.db();
const kysely = await Storages.kysely();
await refreshAuthorStats({ pubkey, kysely, store });
await refreshAuthorStats({ pubkey, kysely, relay });

View file

@ -1,5 +1,8 @@
import { DittoConf } from '@ditto/conf';
import { DittoPolyPg } from '@ditto/db';
import { z } from 'zod';
import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts';
import {
updateTrendingEvents,
updateTrendingHashtags,
@ -8,6 +11,11 @@ import {
updateTrendingZappedEvents,
} from '../packages/ditto/trends.ts';
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() });
const ctx = { conf, db, relay };
const trendSchema = z.enum(['pubkeys', 'zapped_events', 'events', 'hashtags', 'links']);
const trends = trendSchema.array().parse(Deno.args);
@ -19,23 +27,23 @@ for (const trend of trends) {
switch (trend) {
case 'pubkeys':
console.log('Updating trending pubkeys...');
await updateTrendingPubkeys();
await updateTrendingPubkeys(ctx);
break;
case 'zapped_events':
console.log('Updating trending zapped events...');
await updateTrendingZappedEvents();
await updateTrendingZappedEvents(ctx);
break;
case 'events':
console.log('Updating trending events...');
await updateTrendingEvents();
await updateTrendingEvents(ctx);
break;
case 'hashtags':
console.log('Updating trending hashtags...');
await updateTrendingHashtags();
await updateTrendingHashtags(ctx);
break;
case 'links':
console.log('Updating trending links...');
await updateTrendingLinks();
await updateTrendingLinks(ctx);
break;
}
}