Merge branch 'main' into mint-cashu

Conflicts:
	packages/ditto/controllers/api/cashu.ts
	packages/ditto/middleware/swapNutzapsMiddleware.ts
This commit is contained in:
P. Reis 2025-02-24 20:07:12 -03:00
commit 99a20bd129
200 changed files with 4364 additions and 3694 deletions

View file

@ -1,4 +1,4 @@
image: denoland/deno:2.1.10
image: denoland/deno:2.2.0
default:
interruptible: true

View file

@ -1 +1 @@
deno 2.1.10
deno 2.2.0

View file

@ -1,4 +1,4 @@
FROM denoland/deno:2.1.10
FROM denoland/deno:2.2.0
ENV PORT 5000
WORKDIR /app

View file

@ -1,11 +1,17 @@
{
"version": "1.1.0",
"workspace": [
"./packages/api",
"./packages/conf",
"./packages/db",
"./packages/ditto",
"./packages/metrics"
"./packages/lang",
"./packages/mastoapi",
"./packages/metrics",
"./packages/nip98",
"./packages/policies",
"./packages/ratelimiter",
"./packages/translators",
"./packages/uploaders"
],
"tasks": {
"start": "deno run -A --env-file --deny-read=.env packages/ditto/server.ts",
@ -56,8 +62,8 @@
"@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1",
"@negrel/webpush": "jsr:@negrel/webpush@^0.3.0",
"@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0",
"@nostrify/db": "jsr:@nostrify/db@^0.39.0",
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.38.1",
"@nostrify/db": "jsr:@nostrify/db@^0.39.4",
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.39.1",
"@nostrify/policies": "jsr:@nostrify/policies@^0.36.1",
"@nostrify/types": "jsr:@nostrify/types@^0.36.0",
"@scure/base": "npm:@scure/base@^1.1.6",

59
deno.lock generated
View file

@ -31,15 +31,15 @@
"jsr:@hono/hono@^4.4.6": "4.6.15",
"jsr:@negrel/http-ece@0.6.0": "0.6.0",
"jsr:@negrel/webpush@0.3": "0.3.0",
"jsr:@nostrify/db@0.39": "0.39.0",
"jsr:@nostrify/db@~0.39.4": "0.39.4",
"jsr:@nostrify/nostrify@0.31": "0.31.0",
"jsr:@nostrify/nostrify@0.32": "0.32.0",
"jsr:@nostrify/nostrify@0.36": "0.36.2",
"jsr:@nostrify/nostrify@0.38": "0.38.1",
"jsr:@nostrify/nostrify@0.39": "0.39.1",
"jsr:@nostrify/nostrify@~0.22.1": "0.22.5",
"jsr:@nostrify/nostrify@~0.22.4": "0.22.4",
"jsr:@nostrify/nostrify@~0.22.5": "0.22.5",
"jsr:@nostrify/nostrify@~0.38.1": "0.38.1",
"jsr:@nostrify/nostrify@~0.39.1": "0.39.1",
"jsr:@nostrify/policies@0.33": "0.33.0",
"jsr:@nostrify/policies@0.33.1": "0.33.1",
"jsr:@nostrify/policies@0.34": "0.34.0",
@ -138,6 +138,7 @@
"npm:type-fest@^4.3.0": "4.18.2",
"npm:unfurl.js@^6.4.0": "6.4.0",
"npm:websocket-ts@^2.1.5": "2.1.5",
"npm:websocket-ts@^2.2.1": "2.2.1",
"npm:zod@^3.23.8": "3.23.8"
},
"jsr": {
@ -363,10 +364,10 @@
"jsr:@std/path@0.224.0"
]
},
"@nostrify/db@0.39.0": {
"integrity": "13a88c610eb15a5dd13848d5beec9170406376c9d05299ce5e5298452a5431ac",
"@nostrify/db@0.39.4": {
"integrity": "53fecea3b67394cf4f52795f89d1d065bdeb0627b8655cc7fc3a89d6b21adf01",
"dependencies": [
"jsr:@nostrify/nostrify@~0.38.1",
"jsr:@nostrify/nostrify@0.39",
"jsr:@nostrify/types@0.36",
"npm:kysely@~0.27.3",
"npm:nostr-tools@^2.10.4"
@ -383,7 +384,7 @@
"npm:kysely@~0.27.3",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.5.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -397,7 +398,7 @@
"npm:kysely@~0.27.3",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -412,7 +413,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -425,7 +426,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -438,7 +439,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -453,7 +454,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -466,7 +467,7 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.7.0",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
@ -481,13 +482,14 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.10.4",
"npm:websocket-ts",
"npm:websocket-ts@^2.1.5",
"npm:zod"
]
},
"@nostrify/nostrify@0.38.1": {
"integrity": "087d1be0d5c46420e6040b07c8cfb1a3ecb9808f23de54d22dd64d3eed001bce",
"@nostrify/nostrify@0.39.0": {
"integrity": "f7e052c32b8b9bafe0f2517dcf090e7d3df5aed38452a0cf61ade817d34067ee",
"dependencies": [
"jsr:@nostrify/nostrify@0.39",
"jsr:@nostrify/types@0.36",
"jsr:@std/crypto",
"jsr:@std/encoding@~0.224.1",
@ -496,7 +498,23 @@
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.10.4",
"npm:websocket-ts",
"npm:websocket-ts@^2.2.1",
"npm:zod"
]
},
"@nostrify/nostrify@0.39.1": {
"integrity": "84f98c815a07f4151bd02188a3525e438c416e9de632c79c9da9edbfca580d7f",
"dependencies": [
"jsr:@nostrify/nostrify@~0.39.1",
"jsr:@nostrify/types@0.36",
"jsr:@std/crypto",
"jsr:@std/encoding@~0.224.1",
"npm:@scure/base",
"npm:@scure/bip32",
"npm:@scure/bip39",
"npm:lru-cache@^10.2.0",
"npm:nostr-tools@^2.10.4",
"npm:websocket-ts@^2.2.1",
"npm:zod"
]
},
@ -1789,6 +1807,9 @@
"websocket-ts@2.1.5": {
"integrity": "sha512-rCNl9w6Hsir1azFm/pbjBEFzLD/gi7Th5ZgOxMifB6STUfTSovYAzryWw0TRvSZ1+Qu1Z5Plw4z42UfTNA9idA=="
},
"websocket-ts@2.2.1": {
"integrity": "sha512-YKPDfxlK5qOheLZ2bTIiktZO1bpfGdNCPJmTEaPW7G9UXI1GKjDdeacOrsULUS000OPNxDVOyAuKLuIWPqWM0Q=="
},
"whatwg-encoding@3.1.1": {
"integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
"dependencies": [
@ -2460,8 +2481,8 @@
"jsr:@gfx/canvas-wasm@~0.4.2",
"jsr:@hono/hono@^4.4.6",
"jsr:@negrel/webpush@0.3",
"jsr:@nostrify/db@0.39",
"jsr:@nostrify/nostrify@~0.38.1",
"jsr:@nostrify/db@~0.39.4",
"jsr:@nostrify/nostrify@~0.39.1",
"jsr:@nostrify/policies@~0.36.1",
"jsr:@nostrify/types@0.36",
"jsr:@soapbox/kysely-pglite@1",

View file

@ -1,7 +0,0 @@
{
"name": "@ditto/api",
"version": "1.1.0",
"exports": {
"./middleware": "./middleware/mod.ts"
}
}

View file

@ -1,19 +0,0 @@
import { Hono } from '@hono/hono';
import { assertEquals } from '@std/assert';
import { confMw } from './confMw.ts';
Deno.test('confMw', async () => {
const env = new Map([
['DITTO_NSEC', 'nsec19shyxpuzd0cq2p5078fwnws7tyykypud6z205fzhlmlrs2vpz6hs83zwkw'],
]);
const app = new Hono();
app.get('/', confMw(env), (c) => c.text(c.var.conf.pubkey));
const response = await app.request('/');
const body = await response.text();
assertEquals(body, '1ba0c5ed1bbbf3b7eb0d7843ba16836a0201ea68a76bafcba507358c45911ff6');
});

View file

@ -1,15 +0,0 @@
import { DittoConf } from '@ditto/conf';
import type { MiddlewareHandler } from '@hono/hono';
/** Set Ditto config. */
export function confMw(
env: { get(key: string): string | undefined },
): MiddlewareHandler<{ Variables: { conf: DittoConf } }> {
const conf = new DittoConf(env);
return async (c, next) => {
c.set('conf', conf);
await next();
};
}

View file

@ -1,22 +0,0 @@
import { Hono } from '@hono/hono';
import { assertEquals } from '@std/assert';
import { confMw } from './confMw.ts';
import { confRequiredMw } from './confRequiredMw.ts';
Deno.test('confRequiredMw', async (t) => {
const app = new Hono();
app.get('/without', confRequiredMw, (c) => c.text('ok'));
app.get('/with', confMw(new Map()), confRequiredMw, (c) => c.text('ok'));
await t.step('without conf returns 500', async () => {
const response = await app.request('/without');
assertEquals(response.status, 500);
});
await t.step('with conf returns 200', async () => {
const response = await app.request('/with');
assertEquals(response.status, 200);
});
});

View file

@ -1,15 +0,0 @@
import { HTTPException } from '@hono/hono/http-exception';
import type { DittoConf } from '@ditto/conf';
import type { MiddlewareHandler } from '@hono/hono';
/** Throws an error if conf isn't set. */
export const confRequiredMw: MiddlewareHandler<{ Variables: { conf: DittoConf } }> = async (c, next) => {
const { conf } = c.var;
if (!conf) {
throw new HTTPException(500, { message: 'Ditto config not set in request.' });
}
await next();
};

View file

@ -1,2 +0,0 @@
export { confMw } from './confMw.ts';
export { confRequiredMw } from './confRequiredMw.ts';

View file

@ -9,12 +9,11 @@ Deno.test('DittoConfig', async (t) => {
const config = new DittoConf(env);
await t.step('nsec', () => {
assertEquals(config.nsec, 'nsec19shyxpuzd0cq2p5078fwnws7tyykypud6z205fzhlmlrs2vpz6hs83zwkw');
});
await t.step('pubkey', () => {
assertEquals(config.pubkey, '1ba0c5ed1bbbf3b7eb0d7843ba16836a0201ea68a76bafcba507358c45911ff6');
await t.step('signer', async () => {
assertEquals(
await config.signer.getPublicKey(),
'1ba0c5ed1bbbf3b7eb0d7843ba16836a0201ea68a76bafcba507358c45911ff6',
);
});
});
@ -22,8 +21,8 @@ Deno.test('DittoConfig defaults', async (t) => {
const env = new Map<string, string>();
const config = new DittoConf(env);
await t.step('nsec throws', () => {
assertThrows(() => config.nsec);
await t.step('signer throws', () => {
assertThrows(() => config.signer);
});
await t.step('port', () => {

View file

@ -1,11 +1,11 @@
import Module from 'node:module';
import os from 'node:os';
import path from 'node:path';
import ISO6391, { type LanguageCode } from 'iso-639-1';
import { getPublicKey, nip19 } from 'nostr-tools';
import { NSecSigner } from '@nostrify/nostrify';
import { decodeBase64 } from '@std/encoding/base64';
import { encodeBase64Url } from '@std/encoding/base64url';
import ISO6391, { type LanguageCode } from 'iso-639-1';
import { nip19 } from 'nostr-tools';
import { getEcdsaPublicKey } from './utils/crypto.ts';
import { optionalBooleanSchema, optionalNumberSchema } from './utils/schema.ts';
@ -15,35 +15,36 @@ import { mergeURLPath } from './utils/url.ts';
export class DittoConf {
constructor(private env: { get(key: string): string | undefined }) {}
/** Cached parsed admin pubkey value. */
private _pubkey: string | undefined;
/** Cached parsed admin signer. */
private _signer: NSecSigner | undefined;
/** Cached parsed VAPID public key value. */
private _vapidPublicKey: Promise<string | undefined> | undefined;
/** Ditto admin secret key in nip19 format. This is the way it's configured by an admin. */
get nsec(): `nsec1${string}` {
const value = this.env.get('DITTO_NSEC');
if (!value) {
/**
* Ditto admin secret key in hex format.
* @deprecated Use `signer` instead. TODO: handle auth tokens.
*/
get seckey(): Uint8Array {
const nsec = this.env.get('DITTO_NSEC');
if (!nsec) {
throw new Error('Missing DITTO_NSEC');
}
if (!value.startsWith('nsec1')) {
if (!nsec.startsWith('nsec1')) {
throw new Error('Invalid DITTO_NSEC');
}
return value as `nsec1${string}`;
return nip19.decode(nsec as `nsec1${string}`).data;
}
/** Ditto admin secret key in hex format. */
get seckey(): Uint8Array {
return nip19.decode(this.nsec).data;
}
/** Ditto admin public key in hex format. */
get pubkey(): string {
if (!this._pubkey) {
this._pubkey = getPublicKey(this.seckey);
/** Ditto admin signer. */
get signer(): NSecSigner {
if (!this._signer) {
this._signer = new NSecSigner(this.seckey);
}
return this._pubkey;
return this._signer;
}
/** Port to use when serving the HTTP server. */
@ -354,7 +355,7 @@ export class DittoConf {
/** Absolute path to the data directory used by Ditto. */
get dataDir(): string {
return this.env.get('DITTO_DATA_DIR') || path.join(cwd(), 'data');
return this.env.get('DITTO_DATA_DIR') || path.join(Deno.cwd(), 'data');
}
/** Absolute path of the Deno directory. */
@ -465,12 +466,3 @@ export class DittoConf {
return Number(this.env.get('STREAK_WINDOW') || 129600);
}
}
/**
* HACK: get cwd without read permissions.
* https://github.com/denoland/deno/issues/27080#issuecomment-2504150155
*/
function cwd() {
// @ts-ignore Internal method, but it does exist.
return Module._nodeModulePaths('a')[0].slice(0, -15);
}

View file

@ -1,6 +0,0 @@
import { DittoDB } from './DittoDB.ts';
Deno.test('DittoDB', async () => {
const db = DittoDB.create('memory://');
await DittoDB.migrate(db.kysely);
});

View file

@ -1,69 +1,16 @@
import fs from 'node:fs/promises';
import path from 'node:path';
import type { Kysely } from 'kysely';
import { logi } from '@soapbox/logi';
import { FileMigrationProvider, type Kysely, Migrator } from 'kysely';
import { DittoPglite } from './adapters/DittoPglite.ts';
import { DittoPostgres } from './adapters/DittoPostgres.ts';
import type { JsonValue } from '@std/json';
import type { DittoDatabase, DittoDatabaseOpts } from './DittoDatabase.ts';
import type { DittoTables } from './DittoTables.ts';
export class DittoDB {
/** Open a new database connection. */
static create(databaseUrl: string, opts?: DittoDatabaseOpts): DittoDatabase {
const { protocol } = new URL(databaseUrl);
switch (protocol) {
case 'file:':
case 'memory:':
return DittoPglite.create(databaseUrl, opts);
case 'postgres:':
case 'postgresql:':
return DittoPostgres.create(databaseUrl, opts);
default:
throw new Error('Unsupported database URL.');
}
}
/** Migrate the database to the latest version. */
static async migrate(kysely: Kysely<DittoTables>) {
const migrator = new Migrator({
db: kysely,
provider: new FileMigrationProvider({
fs,
path,
migrationFolder: new URL(import.meta.resolve('./migrations')).pathname,
}),
});
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Running migrations...', state: 'started' });
const { results, error } = await migrator.migrateToLatest();
if (error) {
logi({
level: 'fatal',
ns: 'ditto.db.migration',
msg: 'Migration failed.',
state: 'failed',
results: results as unknown as JsonValue,
error: error instanceof Error ? error : null,
});
Deno.exit(1);
} else {
if (!results?.length) {
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Everything up-to-date.', state: 'skipped' });
} else {
logi({
level: 'info',
ns: 'ditto.db.migration',
msg: 'Migrations finished!',
state: 'migrated',
results: results as unknown as JsonValue,
});
}
}
}
export interface DittoDB extends AsyncDisposable {
readonly kysely: Kysely<DittoTables>;
readonly poolSize: number;
readonly availableConnections: number;
migrate(): Promise<void>;
listen(channel: string, callback: (payload: string) => void): void;
}
export interface DittoDBOpts {
poolSize?: number;
debug?: 0 | 1 | 2 | 3 | 4 | 5;
}

View file

@ -1,15 +0,0 @@
import type { Kysely } from 'kysely';
import type { DittoTables } from './DittoTables.ts';
export interface DittoDatabase {
readonly kysely: Kysely<DittoTables>;
readonly poolSize: number;
readonly availableConnections: number;
listen(channel: string, callback: (payload: string) => void): void;
}
export interface DittoDatabaseOpts {
poolSize?: number;
debug?: 0 | 1 | 2 | 3 | 4 | 5;
}

View file

@ -0,0 +1,52 @@
import fs from 'node:fs/promises';
import path from 'node:path';
import { logi } from '@soapbox/logi';
import { FileMigrationProvider, type Kysely, Migrator } from 'kysely';
import type { JsonValue } from '@std/json';
export class DittoPgMigrator {
private migrator: Migrator;
// deno-lint-ignore no-explicit-any
constructor(private kysely: Kysely<any>) {
this.migrator = new Migrator({
db: this.kysely,
provider: new FileMigrationProvider({
fs,
path,
migrationFolder: new URL(import.meta.resolve('./migrations')).pathname,
}),
});
}
async migrate(): Promise<void> {
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Running migrations...', state: 'started' });
const { results, error } = await this.migrator.migrateToLatest();
if (error) {
logi({
level: 'fatal',
ns: 'ditto.db.migration',
msg: 'Migration failed.',
state: 'failed',
results: results as unknown as JsonValue,
error: error instanceof Error ? error : null,
});
throw new Error('Migration failed.');
} else {
if (!results?.length) {
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Everything up-to-date.', state: 'skipped' });
} else {
logi({
level: 'info',
ns: 'ditto.db.migration',
msg: 'Migrations finished!',
state: 'migrated',
results: results as unknown as JsonValue,
});
}
}
}
}

View file

@ -2,8 +2,9 @@ import { assertEquals } from '@std/assert';
import { DittoPglite } from './DittoPglite.ts';
Deno.test('DittoPglite.create', async () => {
const db = DittoPglite.create('memory://');
Deno.test('DittoPglite', async () => {
const db = new DittoPglite('memory://');
await db.migrate();
assertEquals(db.poolSize, 1);
assertEquals(db.availableConnections, 1);

View file

@ -4,38 +4,49 @@ import { PgliteDialect } from '@soapbox/kysely-pglite';
import { Kysely } from 'kysely';
import { KyselyLogger } from '../KyselyLogger.ts';
import { DittoPgMigrator } from '../DittoPgMigrator.ts';
import { isWorker } from '../utils/worker.ts';
import type { DittoDatabase, DittoDatabaseOpts } from '../DittoDatabase.ts';
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
import type { DittoTables } from '../DittoTables.ts';
export class DittoPglite {
static create(databaseUrl: string, opts?: DittoDatabaseOpts): DittoDatabase {
export class DittoPglite implements DittoDB {
readonly poolSize = 1;
readonly availableConnections = 1;
readonly kysely: Kysely<DittoTables>;
private pglite: PGlite;
private migrator: DittoPgMigrator;
constructor(databaseUrl: string, opts?: DittoDBOpts) {
const url = new URL(databaseUrl);
if (url.protocol === 'file:' && isWorker()) {
throw new Error('PGlite is not supported in worker threads.');
}
const pglite = new PGlite(databaseUrl, {
this.pglite = new PGlite(databaseUrl, {
extensions: { pg_trgm },
debug: opts?.debug,
});
const kysely = new Kysely<DittoTables>({
dialect: new PgliteDialect({ database: pglite }),
this.kysely = new Kysely<DittoTables>({
dialect: new PgliteDialect({ database: this.pglite }),
log: KyselyLogger,
});
const listen = (channel: string, callback: (payload: string) => void): void => {
pglite.listen(channel, callback);
};
this.migrator = new DittoPgMigrator(this.kysely);
}
return {
kysely,
poolSize: 1,
availableConnections: 1,
listen,
};
listen(channel: string, callback: (payload: string) => void): void {
this.pglite.listen(channel, callback);
}
async migrate(): Promise<void> {
await this.migrator.migrate();
}
async [Symbol.asyncDispose](): Promise<void> {
await this.kysely.destroy();
}
}

View file

@ -0,0 +1,6 @@
import { DittoPolyPg } from './DittoPolyPg.ts';
Deno.test('DittoPolyPg', async () => {
const db = new DittoPolyPg('memory://');
await db.migrate();
});

View file

@ -0,0 +1,53 @@
import { DittoPglite } from './DittoPglite.ts';
import { DittoPostgres } from './DittoPostgres.ts';
import type { Kysely } from 'kysely';
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
import type { DittoTables } from '../DittoTables.ts';
/** Creates either a PGlite or Postgres connection depending on the databaseUrl. */
export class DittoPolyPg implements DittoDB {
private adapter: DittoDB;
/** Open a new database connection. */
constructor(databaseUrl: string, opts?: DittoDBOpts) {
const { protocol } = new URL(databaseUrl);
switch (protocol) {
case 'file:':
case 'memory:':
this.adapter = new DittoPglite(databaseUrl, opts);
break;
case 'postgres:':
case 'postgresql:':
this.adapter = new DittoPostgres(databaseUrl, opts);
break;
default:
throw new Error('Unsupported database URL.');
}
}
get kysely(): Kysely<DittoTables> {
return this.adapter.kysely;
}
async migrate(): Promise<void> {
await this.adapter.migrate();
}
listen(channel: string, callback: (payload: string) => void): void {
this.adapter.listen(channel, callback);
}
get poolSize(): number {
return this.adapter.poolSize;
}
get availableConnections(): number {
return this.adapter.availableConnections;
}
async [Symbol.asyncDispose](): Promise<void> {
await this.adapter[Symbol.asyncDispose]();
}
}

View file

@ -12,49 +12,54 @@ import {
import { type PostgresJSDialectConfig, PostgresJSDriver } from 'kysely-postgres-js';
import postgres from 'postgres';
import { DittoPgMigrator } from '../DittoPgMigrator.ts';
import { KyselyLogger } from '../KyselyLogger.ts';
import type { DittoDatabase, DittoDatabaseOpts } from '../DittoDatabase.ts';
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
import type { DittoTables } from '../DittoTables.ts';
export class DittoPostgres {
static create(databaseUrl: string, opts?: DittoDatabaseOpts): DittoDatabase {
const pg = postgres(databaseUrl, { max: opts?.poolSize });
export class DittoPostgres implements DittoDB {
private pg: ReturnType<typeof postgres>;
private migrator: DittoPgMigrator;
const kysely = new Kysely<DittoTables>({
readonly kysely: Kysely<DittoTables>;
constructor(databaseUrl: string, opts?: DittoDBOpts) {
this.pg = postgres(databaseUrl, { max: opts?.poolSize });
this.kysely = new Kysely<DittoTables>({
dialect: {
createAdapter() {
return new PostgresAdapter();
},
createDriver() {
return new PostgresJSDriver({
postgres: pg as unknown as PostgresJSDialectConfig['postgres'],
});
},
createIntrospector(db) {
return new PostgresIntrospector(db);
},
createQueryCompiler() {
return new DittoPostgresQueryCompiler();
},
createAdapter: () => new PostgresAdapter(),
createDriver: () =>
new PostgresJSDriver({ postgres: this.pg as unknown as PostgresJSDialectConfig['postgres'] }),
createIntrospector: (db) => new PostgresIntrospector(db),
createQueryCompiler: () => new DittoPostgresQueryCompiler(),
},
log: KyselyLogger,
});
const listen = (channel: string, callback: (payload: string) => void): void => {
pg.listen(channel, callback);
};
this.migrator = new DittoPgMigrator(this.kysely);
}
return {
kysely,
get poolSize() {
return pg.connections.open;
},
get availableConnections() {
return pg.connections.idle;
},
listen,
};
listen(channel: string, callback: (payload: string) => void): void {
this.pg.listen(channel, callback);
}
async migrate(): Promise<void> {
await this.migrator.migrate();
}
get poolSize(): number {
return this.pg.connections.open;
}
get availableConnections(): number {
return this.pg.connections.idle;
}
async [Symbol.asyncDispose](): Promise<void> {
await this.pg.end();
await this.kysely.destroy();
}
}

View file

@ -0,0 +1,11 @@
import { assertEquals } from '@std/assert';
import { DummyDB } from './DummyDB.ts';
Deno.test('DummyDB', async () => {
const db = new DummyDB();
await db.migrate();
const rows = await db.kysely.selectFrom('nostr_events').selectAll().execute();
assertEquals(rows, []);
});

View file

@ -0,0 +1,33 @@
import { DummyDriver, Kysely, PostgresAdapter, PostgresIntrospector, PostgresQueryCompiler } from 'kysely';
import type { DittoDB } from '../DittoDB.ts';
import type { DittoTables } from '../DittoTables.ts';
export class DummyDB implements DittoDB {
readonly kysely: Kysely<DittoTables>;
readonly poolSize = 0;
readonly availableConnections = 0;
constructor() {
this.kysely = new Kysely<DittoTables>({
dialect: {
createAdapter: () => new PostgresAdapter(),
createDriver: () => new DummyDriver(),
createIntrospector: (db) => new PostgresIntrospector(db),
createQueryCompiler: () => new PostgresQueryCompiler(),
},
});
}
listen(): void {
// noop
}
migrate(): Promise<void> {
return Promise.resolve();
}
[Symbol.asyncDispose](): Promise<void> {
return Promise.resolve();
}
}

View file

@ -0,0 +1,16 @@
import { type Kysely, sql } from 'kysely';
export async function up(db: Kysely<unknown>): Promise<void> {
const result = await sql<{ count: number }>`
SELECT COUNT(*) as count
FROM pg_indexes
WHERE indexname = 'nostr_events_new_pkey'
`.execute(db);
if (result.rows[0].count > 0) {
await sql`ALTER INDEX nostr_events_new_pkey RENAME TO nostr_events_pkey;`.execute(db);
}
}
export async function down(_db: Kysely<unknown>): Promise<void> {
}

View file

@ -1,4 +1,7 @@
export { DittoDB } from './DittoDB.ts';
export { DittoPglite } from './adapters/DittoPglite.ts';
export { DittoPolyPg } from './adapters/DittoPolyPg.ts';
export { DittoPostgres } from './adapters/DittoPostgres.ts';
export { DummyDB } from './adapters/DummyDB.ts';
export type { DittoDatabase } from './DittoDatabase.ts';
export type { DittoDB } from './DittoDB.ts';
export type { DittoTables } from './DittoTables.ts';

View file

@ -1,39 +1,41 @@
import { DittoConf } from '@ditto/conf';
import { ApplicationServer, PushMessageOptions, PushSubscriber, PushSubscription } from '@negrel/webpush';
import { NStore } from '@nostrify/types';
import { logi } from '@soapbox/logi';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
interface DittoPushOpts {
conf: DittoConf;
relay: NStore;
}
export class DittoPush {
static _server: Promise<ApplicationServer | undefined> | undefined;
private server: Promise<ApplicationServer | undefined>;
static get server(): Promise<ApplicationServer | undefined> {
if (!this._server) {
this._server = (async () => {
const store = await Storages.db();
const meta = await getInstanceMetadata(store);
const keys = await Conf.vapidKeys;
constructor(opts: DittoPushOpts) {
const { conf, relay } = opts;
if (keys) {
return await ApplicationServer.new({
contactInformation: `mailto:${meta.email}`,
vapidKeys: keys,
});
} else {
logi({
level: 'warn',
ns: 'ditto.push',
msg: 'VAPID keys are not set. Push notifications will be disabled.',
});
}
})();
}
this.server = (async () => {
const meta = await getInstanceMetadata(relay);
const keys = await conf.vapidKeys;
return this._server;
if (keys) {
return await ApplicationServer.new({
contactInformation: `mailto:${meta.email}`,
vapidKeys: keys,
});
} else {
logi({
level: 'warn',
ns: 'ditto.push',
msg: 'VAPID keys are not set. Push notifications will be disabled.',
});
}
})();
}
static async push(
async push(
subscription: PushSubscription,
json: object,
opts: PushMessageOptions = {},

View file

@ -1,16 +1,22 @@
import { confMw } from '@ditto/api/middleware';
import { type DittoConf } from '@ditto/conf';
import { DittoTables } from '@ditto/db';
import { type Context, Env as HonoEnv, Handler, Hono, Input as HonoInput, MiddlewareHandler } from '@hono/hono';
import { DittoConf } from '@ditto/conf';
import { DittoDB, DittoPolyPg } from '@ditto/db';
import { paginationMiddleware, tokenMiddleware, userMiddleware } from '@ditto/mastoapi/middleware';
import { DittoApp, type DittoEnv } from '@ditto/mastoapi/router';
import { relayPoolRelaysSizeGauge, relayPoolSubscriptionsSizeGauge } from '@ditto/metrics';
import { type DittoTranslator } from '@ditto/translators';
import { type Context, Handler, Input as HonoInput, MiddlewareHandler } from '@hono/hono';
import { every } from '@hono/hono/combine';
import { cors } from '@hono/hono/cors';
import { serveStatic } from '@hono/hono/deno';
import { NostrEvent, NostrSigner, NStore, NUploader } from '@nostrify/nostrify';
import { Kysely } from 'kysely';
import '@/startup.ts';
import { NostrEvent, NostrSigner, NRelay, NUploader } from '@nostrify/nostrify';
import { cron } from '@/cron.ts';
import { startFirehose } from '@/firehose.ts';
import { DittoAPIStore } from '@/storages/DittoAPIStore.ts';
import { DittoPgStore } from '@/storages/DittoPgStore.ts';
import { DittoPool } from '@/storages/DittoPool.ts';
import { Time } from '@/utils/time.ts';
import { seedZapSplits } from '@/utils/zap-split.ts';
import {
accountController,
@ -134,40 +140,38 @@ import { metricsController } from '@/controllers/metrics.ts';
import { manifestController } from '@/controllers/manifest.ts';
import { nodeInfoController, nodeInfoSchemaController } from '@/controllers/well-known/nodeinfo.ts';
import { nostrController } from '@/controllers/well-known/nostr.ts';
import { DittoTranslator } from '@/interfaces/DittoTranslator.ts';
import { auth98Middleware, requireProof, requireRole } from '@/middleware/auth98Middleware.ts';
import { cacheControlMiddleware } from '@/middleware/cacheControlMiddleware.ts';
import { cspMiddleware } from '@/middleware/cspMiddleware.ts';
import { metricsMiddleware } from '@/middleware/metricsMiddleware.ts';
import { notActivitypubMiddleware } from '@/middleware/notActivitypubMiddleware.ts';
import { paginationMiddleware } from '@/middleware/paginationMiddleware.ts';
import { rateLimitMiddleware } from '@/middleware/rateLimitMiddleware.ts';
import { requireSigner } from '@/middleware/requireSigner.ts';
import { signerMiddleware } from '@/middleware/signerMiddleware.ts';
import { storeMiddleware } from '@/middleware/storeMiddleware.ts';
import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts';
import { translatorMiddleware } from '@/middleware/translatorMiddleware.ts';
import { logiMiddleware } from '@/middleware/logiMiddleware.ts';
import { DittoRelayStore } from '@/storages/DittoRelayStore.ts';
export interface AppEnv extends HonoEnv {
export interface AppEnv extends DittoEnv {
Variables: {
conf: DittoConf;
/** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */
signer?: NostrSigner;
/** Uploader for the user to upload files. */
uploader?: NUploader;
/** NIP-98 signed event proving the pubkey is owned by the user. */
proof?: NostrEvent;
/** Kysely instance for the database. */
kysely: Kysely<DittoTables>;
/** Storage for the user, might filter out unwanted content. */
store: NStore;
db: DittoDB;
/** Base database store. No content filtering. */
relay: NRelay;
/** Normalized pagination params. */
pagination: { since?: number; until?: number; limit: number };
/** Normalized list pagination params. */
listPagination: { offset: number; limit: number };
/** Translation service. */
translator?: DittoTranslator;
signal: AbortSignal;
user?: {
/** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */
signer: NostrSigner;
/** User's relay. Might filter out unwanted content. */
relay: NRelay;
};
};
}
@ -176,38 +180,98 @@ type AppMiddleware = MiddlewareHandler<AppEnv>;
// deno-lint-ignore no-explicit-any
type AppController<P extends string = any> = Handler<AppEnv, P, HonoInput, Response | Promise<Response>>;
const app = new Hono<AppEnv>({ strict: false });
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl, {
poolSize: conf.pg.poolSize,
debug: conf.pgliteDebug,
});
await db.migrate();
const pgstore = new DittoPgStore({
db,
pubkey: await conf.signer.getPublicKey(),
timeout: conf.db.timeouts.default,
notify: conf.notifyEnabled,
});
const pool = new DittoPool({ conf, relay: pgstore });
const relay = new DittoRelayStore({ db, conf, relay: pgstore });
await seedZapSplits(relay);
if (conf.firehoseEnabled) {
startFirehose({
pool,
relay,
concurrency: conf.firehoseConcurrency,
kinds: conf.firehoseKinds,
});
}
if (conf.cronEnabled) {
cron({ conf, db, relay });
}
const app = new DittoApp({ conf, db, relay }, { strict: false });
/** User-provided files in the gitignored `public/` directory. */
const publicFiles = serveStatic({ root: './public/' });
/** Static files provided by the Ditto repo, checked into git. */
const staticFiles = serveStatic({ root: new URL('./static/', import.meta.url).pathname });
app.use(confMw(Deno.env), cacheControlMiddleware({ noStore: true }));
app.use(cacheControlMiddleware({ noStore: true }));
const ratelimit = every(
rateLimitMiddleware(30, Time.seconds(5), false),
rateLimitMiddleware(300, Time.minutes(5), false),
);
app.use('/api/*', metricsMiddleware, ratelimit, paginationMiddleware, logiMiddleware);
const socketTokenMiddleware = tokenMiddleware((c) => {
const token = c.req.header('sec-websocket-protocol');
if (token) {
return `Bearer ${token}`;
}
});
app.use(
'/api/*',
(c, next) => {
c.set('relay', new DittoAPIStore({ relay, pool }));
return next();
},
metricsMiddleware,
ratelimit,
paginationMiddleware(),
logiMiddleware,
);
app.use('/.well-known/*', metricsMiddleware, ratelimit, logiMiddleware);
app.use('/nodeinfo/*', metricsMiddleware, ratelimit, logiMiddleware);
app.use('/oauth/*', metricsMiddleware, ratelimit, logiMiddleware);
app.get('/api/v1/streaming', metricsMiddleware, ratelimit, streamingController);
app.get('/api/v1/streaming', socketTokenMiddleware, metricsMiddleware, ratelimit, streamingController);
app.get('/relay', metricsMiddleware, ratelimit, relayController);
app.use(
cspMiddleware(),
cors({ origin: '*', exposeHeaders: ['link'] }),
signerMiddleware,
tokenMiddleware(),
uploaderMiddleware,
auth98Middleware(),
storeMiddleware,
);
app.get('/metrics', metricsController);
app.get('/metrics', async (_c, next) => {
relayPoolRelaysSizeGauge.reset();
relayPoolSubscriptionsSizeGauge.reset();
for (const relay of pool.relays.values()) {
relayPoolRelaysSizeGauge.inc({ ready_state: relay.socket.readyState });
relayPoolSubscriptionsSizeGauge.inc(relay.subscriptions.length);
}
await next();
}, metricsController);
app.get(
'/.well-known/nodeinfo',
@ -251,27 +315,27 @@ app.post('/oauth/revoke', revokeTokenController);
app.post('/oauth/authorize', oauthAuthorizeController);
app.get('/oauth/authorize', oauthController);
app.post('/api/v1/accounts', requireProof({ pow: 20 }), createAccountController);
app.get('/api/v1/accounts/verify_credentials', requireSigner, verifyCredentialsController);
app.patch('/api/v1/accounts/update_credentials', requireSigner, updateCredentialsController);
app.post('/api/v1/accounts', userMiddleware({ verify: true }), createAccountController);
app.get('/api/v1/accounts/verify_credentials', userMiddleware(), verifyCredentialsController);
app.patch('/api/v1/accounts/update_credentials', userMiddleware(), updateCredentialsController);
app.get('/api/v1/accounts/search', accountSearchController);
app.get('/api/v1/accounts/lookup', accountLookupController);
app.get('/api/v1/accounts/relationships', requireSigner, relationshipsController);
app.get('/api/v1/accounts/familiar_followers', requireSigner, familiarFollowersController);
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/block', requireSigner, blockController);
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unblock', requireSigner, unblockController);
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/mute', requireSigner, muteController);
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unmute', requireSigner, unmuteController);
app.get('/api/v1/accounts/relationships', userMiddleware(), relationshipsController);
app.get('/api/v1/accounts/familiar_followers', userMiddleware(), familiarFollowersController);
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/block', userMiddleware(), blockController);
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unblock', userMiddleware(), unblockController);
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/mute', userMiddleware(), muteController);
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unmute', userMiddleware(), unmuteController);
app.post(
'/api/v1/accounts/:pubkey{[0-9a-f]{64}}/follow',
rateLimitMiddleware(2, Time.seconds(1)),
requireSigner,
userMiddleware(),
followController,
);
app.post(
'/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unfollow',
rateLimitMiddleware(2, Time.seconds(1)),
requireSigner,
userMiddleware(),
unfollowController,
);
app.get(
@ -295,22 +359,22 @@ app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/favourited_by', favouritedByControll
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/reblogged_by', rebloggedByController);
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/context', contextController);
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}', statusController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/favourite', requireSigner, favouriteController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/bookmark', requireSigner, bookmarkController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unbookmark', requireSigner, unbookmarkController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/pin', requireSigner, pinController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unpin', requireSigner, unpinController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/favourite', userMiddleware(), favouriteController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/bookmark', userMiddleware(), bookmarkController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unbookmark', userMiddleware(), unbookmarkController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/pin', userMiddleware(), pinController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unpin', userMiddleware(), unpinController);
app.post(
'/api/v1/statuses/:id{[0-9a-f]{64}}/translate',
requireSigner,
userMiddleware(),
rateLimitMiddleware(15, Time.minutes(1)),
translatorMiddleware,
translateController,
);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/reblog', requireSigner, reblogStatusController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unreblog', requireSigner, unreblogStatusController);
app.post('/api/v1/statuses', requireSigner, createStatusController);
app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}', requireSigner, deleteStatusController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/reblog', userMiddleware(), reblogStatusController);
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unreblog', userMiddleware(), unreblogStatusController);
app.post('/api/v1/statuses', userMiddleware(), createStatusController);
app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}', userMiddleware(), deleteStatusController);
app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/quotes', quotesController);
@ -321,7 +385,7 @@ app.put(
);
app.post('/api/v2/media', mediaController);
app.get('/api/v1/timelines/home', rateLimitMiddleware(8, Time.seconds(30)), requireSigner, homeTimelineController);
app.get('/api/v1/timelines/home', rateLimitMiddleware(8, Time.seconds(30)), userMiddleware(), homeTimelineController);
app.get('/api/v1/timelines/public', rateLimitMiddleware(8, Time.seconds(30)), publicTimelineController);
app.get('/api/v1/timelines/tag/:hashtag', rateLimitMiddleware(8, Time.seconds(30)), hashtagTimelineController);
app.get('/api/v1/timelines/suggested', rateLimitMiddleware(8, Time.seconds(30)), suggestedTimelineController);
@ -357,42 +421,42 @@ app.get('/api/v1/suggestions', suggestionsV1Controller);
app.get('/api/v2/suggestions', suggestionsV2Controller);
app.get('/api/v2/ditto/suggestions/local', localSuggestionsController);
app.get('/api/v1/notifications', rateLimitMiddleware(8, Time.seconds(30)), requireSigner, notificationsController);
app.get('/api/v1/notifications/:id', requireSigner, notificationController);
app.get('/api/v1/notifications', rateLimitMiddleware(8, Time.seconds(30)), userMiddleware(), notificationsController);
app.get('/api/v1/notifications/:id', userMiddleware(), notificationController);
app.get('/api/v1/favourites', requireSigner, favouritesController);
app.get('/api/v1/bookmarks', requireSigner, bookmarksController);
app.get('/api/v1/blocks', requireSigner, blocksController);
app.get('/api/v1/mutes', requireSigner, mutesController);
app.get('/api/v1/favourites', userMiddleware(), favouritesController);
app.get('/api/v1/bookmarks', userMiddleware(), bookmarksController);
app.get('/api/v1/blocks', userMiddleware(), blocksController);
app.get('/api/v1/mutes', userMiddleware(), mutesController);
app.get('/api/v1/markers', requireProof(), markersController);
app.post('/api/v1/markers', requireProof(), updateMarkersController);
app.get('/api/v1/markers', userMiddleware({ verify: true }), markersController);
app.post('/api/v1/markers', userMiddleware({ verify: true }), updateMarkersController);
app.get('/api/v1/push/subscription', requireSigner, getSubscriptionController);
app.post('/api/v1/push/subscription', requireProof(), pushSubscribeController);
app.get('/api/v1/push/subscription', userMiddleware(), getSubscriptionController);
app.post('/api/v1/push/subscription', userMiddleware({ verify: true }), pushSubscribeController);
app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions', reactionsController);
app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', reactionsController);
app.put('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', requireSigner, reactionController);
app.delete('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', requireSigner, deleteReactionController);
app.put('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', userMiddleware(), reactionController);
app.delete('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', userMiddleware(), deleteReactionController);
app.get('/api/v1/pleroma/admin/config', requireRole('admin'), configController);
app.post('/api/v1/pleroma/admin/config', requireRole('admin'), updateConfigController);
app.delete('/api/v1/pleroma/admin/statuses/:id', requireRole('admin'), pleromaAdminDeleteStatusController);
app.get('/api/v1/pleroma/admin/config', userMiddleware({ role: 'admin' }), configController);
app.post('/api/v1/pleroma/admin/config', userMiddleware({ role: 'admin' }), updateConfigController);
app.delete('/api/v1/pleroma/admin/statuses/:id', userMiddleware({ role: 'admin' }), pleromaAdminDeleteStatusController);
app.get('/api/v1/admin/ditto/relays', requireRole('admin'), adminRelaysController);
app.put('/api/v1/admin/ditto/relays', requireRole('admin'), adminSetRelaysController);
app.get('/api/v1/admin/ditto/relays', userMiddleware({ role: 'admin' }), adminRelaysController);
app.put('/api/v1/admin/ditto/relays', userMiddleware({ role: 'admin' }), adminSetRelaysController);
app.put('/api/v1/admin/ditto/instance', requireRole('admin'), updateInstanceController);
app.put('/api/v1/admin/ditto/instance', userMiddleware({ role: 'admin' }), updateInstanceController);
app.post('/api/v1/ditto/names', requireSigner, nameRequestController);
app.get('/api/v1/ditto/names', requireSigner, nameRequestsController);
app.post('/api/v1/ditto/names', userMiddleware(), nameRequestController);
app.get('/api/v1/ditto/names', userMiddleware(), nameRequestsController);
app.get('/api/v1/ditto/captcha', rateLimitMiddleware(3, Time.minutes(1)), captchaController);
app.post(
'/api/v1/ditto/captcha/:id/verify',
rateLimitMiddleware(8, Time.minutes(1)),
requireProof(),
userMiddleware({ verify: true }),
captchaVerifyController,
);
@ -403,44 +467,59 @@ app.get(
);
app.get('/api/v1/ditto/:id{[0-9a-f]{64}}/zap_splits', statusZapSplitsController);
app.put('/api/v1/admin/ditto/zap_splits', requireRole('admin'), updateZapSplitsController);
app.delete('/api/v1/admin/ditto/zap_splits', requireRole('admin'), deleteZapSplitsController);
app.put('/api/v1/admin/ditto/zap_splits', userMiddleware({ role: 'admin' }), updateZapSplitsController);
app.delete('/api/v1/admin/ditto/zap_splits', userMiddleware({ role: 'admin' }), deleteZapSplitsController);
app.post('/api/v1/ditto/zap', requireSigner, zapController);
app.post('/api/v1/ditto/zap', userMiddleware(), zapController);
app.get('/api/v1/ditto/statuses/:id{[0-9a-f]{64}}/zapped_by', zappedByController);
app.route('/api/v1/ditto/cashu', cashuApp);
app.post('/api/v1/reports', requireSigner, reportController);
app.get('/api/v1/admin/reports', requireSigner, requireRole('admin'), adminReportsController);
app.get('/api/v1/admin/reports/:id{[0-9a-f]{64}}', requireSigner, requireRole('admin'), adminReportController);
app.post('/api/v1/reports', userMiddleware(), reportController);
app.get('/api/v1/admin/reports', userMiddleware(), userMiddleware({ role: 'admin' }), adminReportsController);
app.get(
'/api/v1/admin/reports/:id{[0-9a-f]{64}}',
userMiddleware(),
userMiddleware({ role: 'admin' }),
adminReportController,
);
app.post(
'/api/v1/admin/reports/:id{[0-9a-f]{64}}/resolve',
requireSigner,
requireRole('admin'),
userMiddleware(),
userMiddleware({ role: 'admin' }),
adminReportResolveController,
);
app.post(
'/api/v1/admin/reports/:id{[0-9a-f]{64}}/reopen',
requireSigner,
requireRole('admin'),
userMiddleware(),
userMiddleware({ role: 'admin' }),
adminReportReopenController,
);
app.get('/api/v1/admin/accounts', requireRole('admin'), adminAccountsController);
app.post('/api/v1/admin/accounts/:id{[0-9a-f]{64}}/action', requireSigner, requireRole('admin'), adminActionController);
app.get('/api/v1/admin/accounts', userMiddleware({ role: 'admin' }), adminAccountsController);
app.post(
'/api/v1/admin/accounts/:id{[0-9a-f]{64}}/action',
userMiddleware(),
userMiddleware({ role: 'admin' }),
adminActionController,
);
app.post(
'/api/v1/admin/accounts/:id{[0-9a-f]{64}}/approve',
requireSigner,
requireRole('admin'),
userMiddleware(),
userMiddleware({ role: 'admin' }),
adminApproveController,
);
app.post('/api/v1/admin/accounts/:id{[0-9a-f]{64}}/reject', requireSigner, requireRole('admin'), adminRejectController);
app.post(
'/api/v1/admin/accounts/:id{[0-9a-f]{64}}/reject',
userMiddleware(),
userMiddleware({ role: 'admin' }),
adminRejectController,
);
app.put('/api/v1/pleroma/admin/users/tag', requireRole('admin'), pleromaAdminTagController);
app.delete('/api/v1/pleroma/admin/users/tag', requireRole('admin'), pleromaAdminUntagController);
app.patch('/api/v1/pleroma/admin/users/suggest', requireRole('admin'), pleromaAdminSuggestController);
app.patch('/api/v1/pleroma/admin/users/unsuggest', requireRole('admin'), pleromaAdminUnsuggestController);
app.put('/api/v1/pleroma/admin/users/tag', userMiddleware({ role: 'admin' }), pleromaAdminTagController);
app.delete('/api/v1/pleroma/admin/users/tag', userMiddleware({ role: 'admin' }), pleromaAdminUntagController);
app.patch('/api/v1/pleroma/admin/users/suggest', userMiddleware({ role: 'admin' }), pleromaAdminSuggestController);
app.patch('/api/v1/pleroma/admin/users/unsuggest', userMiddleware({ role: 'admin' }), pleromaAdminUnsuggestController);
// Not (yet) implemented.
app.get('/api/v1/custom_emojis', emptyArrayController);

View file

@ -1,14 +1,14 @@
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter, NSchema as n, NStore } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { getAuthor, getFollowedPubkeys } from '@/queries.ts';
import { booleanParamSchema, fileSchema } from '@/schema.ts';
import { Storages } from '@/storages.ts';
import { uploadFile } from '@/utils/upload.ts';
import { nostrNow } from '@/utils.ts';
import { assertAuthenticated, createEvent, paginated, parseBody, updateEvent, updateListEvent } from '@/utils/api.ts';
import { assertAuthenticated, createEvent, parseBody, updateEvent, updateListEvent } from '@/utils/api.ts';
import { extractIdentifier, lookupAccount, lookupPubkey } from '@/utils/lookup.ts';
import { renderAccounts, renderEventAccounts, renderStatuses } from '@/views.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
@ -26,7 +26,9 @@ const createAccountSchema = z.object({
});
const createAccountController: AppController = async (c) => {
const pubkey = await c.get('signer')?.getPublicKey()!;
const { user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const result = createAccountSchema.safeParse(await c.req.json());
if (!result.success) {
@ -46,15 +48,15 @@ const createAccountController: AppController = async (c) => {
};
const verifyCredentialsController: AppController = async (c) => {
const signer = c.get('signer')!;
const { relay, user } = c.var;
const signer = user!.signer;
const pubkey = await signer.getPublicKey();
const store = await Storages.db();
const [author, [settingsEvent]] = await Promise.all([
getAuthor(pubkey, { signal: AbortSignal.timeout(5000) }),
getAuthor(pubkey, c.var),
store.query([{
relay.query([{
kinds: [30078],
authors: [pubkey],
'#d': ['pub.ditto.pleroma_settings_store'],
@ -70,8 +72,8 @@ const verifyCredentialsController: AppController = async (c) => {
}
const account = author
? await renderAccount(author, { withSource: true, settingsStore })
: await accountFromPubkey(pubkey, { withSource: true, settingsStore });
? renderAccount(author, { withSource: true, settingsStore })
: accountFromPubkey(pubkey, { withSource: true, settingsStore });
return c.json(account);
};
@ -79,7 +81,7 @@ const verifyCredentialsController: AppController = async (c) => {
const accountController: AppController = async (c) => {
const pubkey = c.req.param('pubkey');
const event = await getAuthor(pubkey);
const event = await getAuthor(pubkey, c.var);
if (event) {
assertAuthenticated(c, event);
return c.json(await renderAccount(event));
@ -95,7 +97,7 @@ const accountLookupController: AppController = async (c) => {
return c.json({ error: 'Missing `acct` query parameter.' }, 422);
}
const event = await lookupAccount(decodeURIComponent(acct));
const event = await lookupAccount(decodeURIComponent(acct), c.var);
if (event) {
assertAuthenticated(c, event);
return c.json(await renderAccount(event));
@ -115,11 +117,10 @@ const accountSearchQuerySchema = z.object({
});
const accountSearchController: AppController = async (c) => {
const { signal } = c.req.raw;
const { limit } = c.get('pagination');
const { db, relay, user, pagination, signal } = c.var;
const { limit } = pagination;
const kysely = await Storages.kysely();
const viewerPubkey = await c.get('signer')?.getPublicKey();
const viewerPubkey = await user?.signer.getPublicKey();
const result = accountSearchQuerySchema.safeParse(c.req.query());
@ -128,13 +129,12 @@ const accountSearchController: AppController = async (c) => {
}
const query = decodeURIComponent(result.data.q);
const store = await Storages.search();
const lookup = extractIdentifier(query);
const event = await lookupAccount(lookup ?? query);
const event = await lookupAccount(lookup ?? query, c.var);
if (!event && lookup) {
const pubkey = await lookupPubkey(lookup);
const pubkey = await lookupPubkey(lookup, c.var);
return c.json(pubkey ? [accountFromPubkey(pubkey)] : []);
}
@ -143,9 +143,9 @@ const accountSearchController: AppController = async (c) => {
if (event) {
events.push(event);
} else {
const following = viewerPubkey ? await getFollowedPubkeys(viewerPubkey) : new Set<string>();
const authors = [...await getPubkeysBySearch(kysely, { q: query, limit, offset: 0, following })];
const profiles = await store.query([{ kinds: [0], authors, limit }], { signal });
const following = viewerPubkey ? await getFollowedPubkeys(relay, viewerPubkey, signal) : new Set<string>();
const authors = [...await getPubkeysBySearch(db.kysely, { q: query, limit, offset: 0, following })];
const profiles = await relay.query([{ kinds: [0], authors, limit }], { signal });
for (const pubkey of authors) {
const profile = profiles.find((event) => event.pubkey === pubkey);
@ -155,25 +155,25 @@ const accountSearchController: AppController = async (c) => {
}
}
const accounts = await hydrateEvents({ events, store, signal })
const accounts = await hydrateEvents({ ...c.var, events })
.then((events) => events.map((event) => renderAccount(event)));
return c.json(accounts);
};
const relationshipsController: AppController = async (c) => {
const pubkey = await c.get('signer')?.getPublicKey()!;
const { relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const ids = z.array(z.string()).safeParse(c.req.queries('id[]'));
if (!ids.success) {
return c.json({ error: 'Missing `id[]` query parameters.' }, 422);
}
const db = await Storages.db();
const [sourceEvents, targetEvents] = await Promise.all([
db.query([{ kinds: [3, 10000], authors: [pubkey] }]),
db.query([{ kinds: [3], authors: ids.data }]),
relay.query([{ kinds: [3, 10000], authors: [pubkey] }]),
relay.query([{ kinds: [3], authors: ids.data }]),
]);
const event3 = sourceEvents.find((event) => event.kind === 3 && event.pubkey === pubkey);
@ -201,31 +201,33 @@ const accountStatusesQuerySchema = z.object({
});
const accountStatusesController: AppController = async (c) => {
const { conf, user, signal } = c.var;
const pubkey = c.req.param('pubkey');
const { conf } = c.var;
const { since, until } = c.var.pagination;
const { pinned, limit, exclude_replies, tagged, only_media } = accountStatusesQuerySchema.parse(c.req.query());
const { signal } = c.req.raw;
const store = await Storages.db();
const { relay } = c.var;
const [[author], [user]] = await Promise.all([
store.query([{ kinds: [0], authors: [pubkey], limit: 1 }], { signal }),
store.query([{ kinds: [30382], authors: [conf.pubkey], '#d': [pubkey], limit: 1 }], { signal }),
const [[author], [userEvent]] = await Promise.all([
relay.query([{ kinds: [0], authors: [pubkey], limit: 1 }], { signal }),
relay.query([{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey], limit: 1 }], {
signal,
}),
]);
if (author) {
assertAuthenticated(c, author);
}
const names = getTagSet(user?.tags ?? [], 'n');
const names = getTagSet(userEvent?.tags ?? [], 'n');
if (names.has('disabled')) {
return c.json([]);
}
if (pinned) {
const [pinEvent] = await store.query([{ kinds: [10001], authors: [pubkey], limit: 1 }], { signal });
const [pinEvent] = await relay.query([{ kinds: [10001], authors: [pubkey], limit: 1 }], { signal });
if (pinEvent) {
const pinnedEventIds = getTagSet(pinEvent.tags, 'e');
return renderStatuses(c, [...pinnedEventIds].reverse());
@ -262,8 +264,8 @@ const accountStatusesController: AppController = async (c) => {
const opts = { signal, limit, timeout: conf.db.timeouts.timelines };
const events = await store.query([filter], opts)
.then((events) => hydrateEvents({ events, store, signal }))
const events = await relay.query([filter], opts)
.then((events) => hydrateEvents({ ...c.var, events }))
.then((events) => {
if (exclude_replies) {
return events.filter((event) => {
@ -274,12 +276,12 @@ const accountStatusesController: AppController = async (c) => {
return events;
});
const viewerPubkey = await c.get('signer')?.getPublicKey();
const viewerPubkey = await user?.signer.getPublicKey();
const statuses = await Promise.all(
events.map((event) => {
if (event.kind === 6) return renderReblog(event, { viewerPubkey });
return renderStatus(event, { viewerPubkey });
if (event.kind === 6) return renderReblog(relay, event, { viewerPubkey });
return renderStatus(relay, event, { viewerPubkey });
}),
);
return paginated(c, events, statuses);
@ -301,12 +303,11 @@ const updateCredentialsSchema = z.object({
});
const updateCredentialsController: AppController = async (c) => {
const signer = c.get('signer')!;
const pubkey = await signer.getPublicKey();
const { relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const body = await parseBody(c.req.raw);
const result = updateCredentialsSchema.safeParse(body);
const store = await Storages.db();
const signal = c.req.raw.signal;
if (!result.success) {
return c.json(result.error, 422);
@ -316,7 +317,7 @@ const updateCredentialsController: AppController = async (c) => {
let event: NostrEvent | undefined;
if (keys.length === 1 && keys[0] === 'pleroma_settings_store') {
event = (await store.query([{ kinds: [0], authors: [pubkey] }]))[0];
event = (await relay.query([{ kinds: [0], authors: [pubkey] }]))[0];
} else {
event = await updateEvent(
{ kinds: [0], authors: [pubkey], limit: 1 },
@ -372,7 +373,7 @@ const updateCredentialsController: AppController = async (c) => {
let account: MastodonAccount;
if (event) {
await hydrateEvents({ events: [event], store, signal });
await hydrateEvents({ ...c.var, events: [event] });
account = await renderAccount(event, { withSource: true, settingsStore });
} else {
account = await accountFromPubkey(pubkey, { withSource: true, settingsStore });
@ -391,7 +392,9 @@ const updateCredentialsController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/accounts/#follow */
const followController: AppController = async (c) => {
const sourcePubkey = await c.get('signer')?.getPublicKey()!;
const { relay, user } = c.var;
const sourcePubkey = await user!.signer.getPublicKey();
const targetPubkey = c.req.param('pubkey');
await updateListEvent(
@ -400,7 +403,7 @@ const followController: AppController = async (c) => {
c,
);
const relationship = await getRelationship(sourcePubkey, targetPubkey);
const relationship = await getRelationship(relay, sourcePubkey, targetPubkey);
relationship.following = true;
return c.json(relationship);
@ -408,7 +411,9 @@ const followController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/accounts/#unfollow */
const unfollowController: AppController = async (c) => {
const sourcePubkey = await c.get('signer')?.getPublicKey()!;
const { relay, user } = c.var;
const sourcePubkey = await user!.signer.getPublicKey();
const targetPubkey = c.req.param('pubkey');
await updateListEvent(
@ -417,7 +422,7 @@ const unfollowController: AppController = async (c) => {
c,
);
const relationship = await getRelationship(sourcePubkey, targetPubkey);
const relationship = await getRelationship(relay, sourcePubkey, targetPubkey);
return c.json(relationship);
};
@ -428,8 +433,9 @@ const followersController: AppController = (c) => {
};
const followingController: AppController = async (c) => {
const { relay, signal } = c.var;
const pubkey = c.req.param('pubkey');
const pubkeys = await getFollowedPubkeys(pubkey);
const pubkeys = await getFollowedPubkeys(relay, pubkey, signal);
return renderAccounts(c, [...pubkeys]);
};
@ -445,7 +451,9 @@ const unblockController: AppController = (c) => {
/** https://docs.joinmastodon.org/methods/accounts/#mute */
const muteController: AppController = async (c) => {
const sourcePubkey = await c.get('signer')?.getPublicKey()!;
const { relay, user } = c.var;
const sourcePubkey = await user!.signer.getPublicKey();
const targetPubkey = c.req.param('pubkey');
await updateListEvent(
@ -454,13 +462,15 @@ const muteController: AppController = async (c) => {
c,
);
const relationship = await getRelationship(sourcePubkey, targetPubkey);
const relationship = await getRelationship(relay, sourcePubkey, targetPubkey);
return c.json(relationship);
};
/** https://docs.joinmastodon.org/methods/accounts/#unmute */
const unmuteController: AppController = async (c) => {
const sourcePubkey = await c.get('signer')?.getPublicKey()!;
const { relay, user } = c.var;
const sourcePubkey = await user!.signer.getPublicKey();
const targetPubkey = c.req.param('pubkey');
await updateListEvent(
@ -469,19 +479,17 @@ const unmuteController: AppController = async (c) => {
c,
);
const relationship = await getRelationship(sourcePubkey, targetPubkey);
const relationship = await getRelationship(relay, sourcePubkey, targetPubkey);
return c.json(relationship);
};
const favouritesController: AppController = async (c) => {
const pubkey = await c.get('signer')?.getPublicKey()!;
const params = c.get('pagination');
const { signal } = c.req.raw;
const { relay, user, pagination, signal } = c.var;
const store = await Storages.db();
const pubkey = await user!.signer.getPublicKey();
const events7 = await store.query(
[{ kinds: [7], authors: [pubkey], ...params }],
const events7 = await relay.query(
[{ kinds: [7], authors: [pubkey], ...pagination }],
{ signal },
);
@ -489,28 +497,27 @@ const favouritesController: AppController = async (c) => {
.map((event) => event.tags.find((tag) => tag[0] === 'e')?.[1])
.filter((id): id is string => !!id);
const events1 = await store.query([{ kinds: [1, 20], ids }], { signal })
.then((events) => hydrateEvents({ events, store, signal }));
const events1 = await relay.query([{ kinds: [1, 20], ids }], { signal })
.then((events) => hydrateEvents({ ...c.var, events }));
const viewerPubkey = await c.get('signer')?.getPublicKey();
const viewerPubkey = await user?.signer.getPublicKey();
const statuses = await Promise.all(
events1.map((event) => renderStatus(event, { viewerPubkey })),
events1.map((event) => renderStatus(relay, event, { viewerPubkey })),
);
return paginated(c, events1, statuses);
};
const familiarFollowersController: AppController = async (c) => {
const store = await Storages.db();
const signer = c.get('signer')!;
const pubkey = await signer.getPublicKey();
const { relay, user, signal } = c.var;
const pubkey = await user!.signer.getPublicKey();
const ids = z.array(z.string()).parse(c.req.queries('id[]'));
const follows = await getFollowedPubkeys(pubkey);
const follows = await getFollowedPubkeys(relay, pubkey, signal);
const results = await Promise.all(ids.map(async (id) => {
const followLists = await store.query([{ kinds: [3], authors: [...follows], '#p': [id] }])
.then((events) => hydrateEvents({ events, store }));
const followLists = await relay.query([{ kinds: [3], authors: [...follows], '#p': [id] }])
.then((events) => hydrateEvents({ ...c.var, events }));
const accounts = await Promise.all(
followLists.map((event) => event.author ? renderAccount(event.author) : accountFromPubkey(event.pubkey)),
@ -522,12 +529,10 @@ const familiarFollowersController: AppController = async (c) => {
return c.json(results);
};
async function getRelationship(sourcePubkey: string, targetPubkey: string) {
const db = await Storages.db();
async function getRelationship(relay: NStore, sourcePubkey: string, targetPubkey: string) {
const [sourceEvents, targetEvents] = await Promise.all([
db.query([{ kinds: [3, 10000], authors: [sourcePubkey] }]),
db.query([{ kinds: [3], authors: [targetPubkey] }]),
relay.query([{ kinds: [3, 10000], authors: [sourcePubkey] }]),
relay.query([{ kinds: [3], authors: [targetPubkey] }]),
]);
return renderRelationship({

View file

@ -1,12 +1,12 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrFilter } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { booleanParamSchema } from '@/schema.ts';
import { Storages } from '@/storages.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { createAdminEvent, paginated, parseBody, updateEventInfo, updateUser } from '@/utils/api.ts';
import { createAdminEvent, parseBody, updateEventInfo, updateUser } from '@/utils/api.ts';
import { renderNameRequest } from '@/views/ditto.ts';
import { renderAdminAccount, renderAdminAccountFromPubkey } from '@/views/mastodon/admin-accounts.ts';
import { errorJson } from '@/utils/log.ts';
@ -29,10 +29,8 @@ const adminAccountQuerySchema = z.object({
});
const adminAccountsController: AppController = async (c) => {
const { conf } = c.var;
const store = await Storages.db();
const params = c.get('pagination');
const { signal } = c.req.raw;
const { conf, relay, signal, pagination } = c.var;
const {
local,
pending,
@ -43,13 +41,15 @@ const adminAccountsController: AppController = async (c) => {
staff,
} = adminAccountQuerySchema.parse(c.req.query());
const adminPubkey = await conf.signer.getPublicKey();
if (pending) {
if (disabled || silenced || suspended || sensitized) {
return c.json([]);
}
const orig = await store.query(
[{ kinds: [30383], authors: [conf.pubkey], '#k': ['3036'], '#n': ['pending'], ...params }],
const orig = await relay.query(
[{ kinds: [30383], authors: [adminPubkey], '#k': ['3036'], '#n': ['pending'], ...pagination }],
{ signal },
);
@ -59,8 +59,8 @@ const adminAccountsController: AppController = async (c) => {
.filter((id): id is string => !!id),
);
const events = await store.query([{ kinds: [3036], ids: [...ids] }])
.then((events) => hydrateEvents({ store, events, signal }));
const events = await relay.query([{ kinds: [3036], ids: [...ids] }])
.then((events) => hydrateEvents({ ...c.var, events }));
const nameRequests = await Promise.all(events.map(renderNameRequest));
return paginated(c, orig, nameRequests);
@ -86,7 +86,10 @@ const adminAccountsController: AppController = async (c) => {
n.push('moderator');
}
const events = await store.query([{ kinds: [30382], authors: [conf.pubkey], '#n': n, ...params }], { signal });
const events = await relay.query(
[{ kinds: [30382], authors: [adminPubkey], '#n': n, ...pagination }],
{ signal },
);
const pubkeys = new Set<string>(
events
@ -94,8 +97,8 @@ const adminAccountsController: AppController = async (c) => {
.filter((pubkey): pubkey is string => !!pubkey),
);
const authors = await store.query([{ kinds: [0], authors: [...pubkeys] }])
.then((events) => hydrateEvents({ store, events, signal }));
const authors = await relay.query([{ kinds: [0], authors: [...pubkeys] }])
.then((events) => hydrateEvents({ ...c.var, events }));
const accounts = await Promise.all(
[...pubkeys].map((pubkey) => {
@ -107,14 +110,14 @@ const adminAccountsController: AppController = async (c) => {
return paginated(c, events, accounts);
}
const filter: NostrFilter = { kinds: [0], ...params };
const filter: NostrFilter = { kinds: [0], ...pagination };
if (local) {
filter.search = `domain:${conf.url.host}`;
}
const events = await store.query([filter], { signal })
.then((events) => hydrateEvents({ store, events, signal }));
const events = await relay.query([filter], { signal })
.then((events) => hydrateEvents({ ...c.var, events }));
const accounts = await Promise.all(events.map(renderAdminAccount));
return paginated(c, events, accounts);
@ -125,9 +128,9 @@ const adminAccountActionSchema = z.object({
});
const adminActionController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay } = c.var;
const body = await parseBody(c.req.raw);
const store = await Storages.db();
const result = adminAccountActionSchema.safeParse(body);
const authorId = c.req.param('id');
@ -151,15 +154,17 @@ const adminActionController: AppController = async (c) => {
if (data.type === 'suspend') {
n.disabled = true;
n.suspended = true;
store.remove([{ authors: [authorId] }]).catch((e: unknown) => {
relay.remove!([{ authors: [authorId] }]).catch((e: unknown) => {
logi({ level: 'error', ns: 'ditto.api.admin.account.action', type: data.type, error: errorJson(e) });
});
}
if (data.type === 'revoke_name') {
n.revoke_name = true;
store.remove([{ kinds: [30360], authors: [conf.pubkey], '#p': [authorId] }]).catch((e: unknown) => {
logi({ level: 'error', ns: 'ditto.api.admin.account.action', type: data.type, error: errorJson(e) });
});
relay.remove!([{ kinds: [30360], authors: [await conf.signer.getPublicKey()], '#p': [authorId] }]).catch(
(e: unknown) => {
logi({ level: 'error', ns: 'ditto.api.admin.account.action', type: data.type, error: errorJson(e) });
},
);
}
await updateUser(authorId, n, c);
@ -170,9 +175,9 @@ const adminActionController: AppController = async (c) => {
const adminApproveController: AppController = async (c) => {
const { conf } = c.var;
const eventId = c.req.param('id');
const store = await Storages.db();
const { relay } = c.var;
const [event] = await store.query([{ kinds: [3036], ids: [eventId] }]);
const [event] = await relay.query([{ kinds: [3036], ids: [eventId] }]);
if (!event) {
return c.json({ error: 'Event not found' }, 404);
}
@ -185,7 +190,10 @@ const adminApproveController: AppController = async (c) => {
return c.json({ error: 'Invalid NIP-05' }, 400);
}
const [existing] = await store.query([{ kinds: [30360], authors: [conf.pubkey], '#d': [r], limit: 1 }]);
const [existing] = await relay.query([
{ kinds: [30360], authors: [await conf.signer.getPublicKey()], '#d': [r.toLowerCase()], limit: 1 },
]);
if (existing) {
return c.json({ error: 'NIP-05 already granted to another user' }, 400);
}
@ -193,7 +201,8 @@ const adminApproveController: AppController = async (c) => {
await createAdminEvent({
kind: 30360,
tags: [
['d', r],
['d', r.toLowerCase()],
['r', r],
['L', 'nip05.domain'],
['l', r.split('@')[1], 'nip05.domain'],
['p', event.pubkey],
@ -202,7 +211,7 @@ const adminApproveController: AppController = async (c) => {
}, c);
await updateEventInfo(eventId, { pending: false, approved: true, rejected: false }, c);
await hydrateEvents({ events: [event], store });
await hydrateEvents({ ...c.var, events: [event] });
const nameRequest = await renderNameRequest(event);
return c.json(nameRequest);
@ -210,15 +219,15 @@ const adminApproveController: AppController = async (c) => {
const adminRejectController: AppController = async (c) => {
const eventId = c.req.param('id');
const store = await Storages.db();
const { relay } = c.var;
const [event] = await store.query([{ kinds: [3036], ids: [eventId] }]);
const [event] = await relay.query([{ kinds: [3036], ids: [eventId] }]);
if (!event) {
return c.json({ error: 'Event not found' }, 404);
}
await updateEventInfo(eventId, { pending: false, approved: false, rejected: true }, c);
await hydrateEvents({ events: [event], store });
await hydrateEvents({ ...c.var, events: [event] });
const nameRequest = await renderNameRequest(event);
return c.json(nameRequest);

View file

@ -1,15 +1,14 @@
import { type AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { getTagSet } from '@/utils/tags.ts';
import { renderStatuses } from '@/views.ts';
/** https://docs.joinmastodon.org/methods/bookmarks/#get */
const bookmarksController: AppController = async (c) => {
const store = await Storages.db();
const pubkey = await c.get('signer')?.getPublicKey()!;
const { signal } = c.req.raw;
const { relay, user, signal } = c.var;
const [event10003] = await store.query(
const pubkey = await user!.signer.getPublicKey();
const [event10003] = await relay.query(
[{ kinds: [10003], authors: [pubkey], limit: 1 }],
{ signal },
);

View file

@ -152,9 +152,11 @@ const pointSchema = z.object({
/** Verify the captcha solution and sign an event in the database. */
export const captchaVerifyController: AppController = async (c) => {
const { user } = c.var;
const id = c.req.param('id');
const result = pointSchema.safeParse(await c.req.json());
const pubkey = await c.get('signer')!.getPublicKey();
const pubkey = await user!.signer.getPublicKey();
if (!result.success) {
return c.json({ error: 'Invalid input' }, { status: 422 });
@ -171,7 +173,7 @@ export const captchaVerifyController: AppController = async (c) => {
if (solved) {
captchas.delete(id);
await updateUser(pubkey, { captcha_solved: true }, c);
return new Response(null, { status: 204 });
return c.newResponse(null, { status: 204 });
}
return c.json({ error: 'Incorrect solution' }, { status: 400 });

View file

@ -1,54 +1,28 @@
import { confMw } from '@ditto/api/middleware';
import { Env as HonoEnv, Hono } from '@hono/hono';
import { NostrSigner, NSecSigner, NStore } from '@nostrify/nostrify';
import { generateSecretKey, getPublicKey } from 'nostr-tools';
import { DittoConf } from '@ditto/conf';
import { type User } from '@ditto/mastoapi/middleware';
import { DittoApp, DittoMiddleware } from '@ditto/mastoapi/router';
import { NSecSigner } from '@nostrify/nostrify';
import { genEvent } from '@nostrify/nostrify/test';
import { bytesToString, stringToBytes } from '@scure/base';
import { stub } from '@std/testing/mock';
import { assertEquals, assertExists, assertObjectMatch } from '@std/assert';
import { generateSecretKey, getPublicKey, nip19 } from 'nostr-tools';
import { createTestDB, genEvent } from '@/test.ts';
import cashuApp from '@/controllers/api/cashu.ts';
import cashuRoute from '@/controllers/api/cashu.ts';
import { createTestDB } from '@/test.ts';
import { walletSchema } from '@/schema.ts';
interface AppEnv extends HonoEnv {
Variables: {
/** Signer to get the logged-in user's pubkey, relays, and to sign events. */
signer: NostrSigner;
/** Storage for the user, might filter out unwanted content. */
store: NStore;
};
}
Deno.test('PUT /wallet must be successful', async () => {
await using test = await createTestRoute();
Deno.test('PUT /wallet must be successful', {
sanitizeOps: false,
sanitizeResources: false,
}, async () => {
using _mock = mockFetch();
await using db = await createTestDB();
const store = db.store;
const sk = generateSecretKey();
const signer = new NSecSigner(sk);
const { route, signer, sk, relay } = test;
const nostrPrivateKey = bytesToString('hex', sk);
const app = new Hono<AppEnv>().use(
async (c, next) => {
c.set('signer', signer);
await next();
},
async (c, next) => {
c.set('store', store);
await next();
},
);
app.use(confMw(new Map()));
app.route('/', cashuApp);
const response = await app.request('/wallet', {
const response = await route.request('/wallet', {
method: 'PUT',
headers: [['content-type', 'application/json']],
headers: {
'content-type': 'application/json',
},
body: JSON.stringify({
mints: [
'https://houston.mint.com',
@ -62,7 +36,7 @@ Deno.test('PUT /wallet must be successful', {
const pubkey = await signer.getPublicKey();
const [wallet] = await store.query([{ authors: [pubkey], kinds: [17375] }]);
const [wallet] = await relay.query([{ authors: [pubkey], kinds: [17375] }]);
assertExists(wallet);
assertEquals(wallet.kind, 17375);
@ -89,7 +63,7 @@ Deno.test('PUT /wallet must be successful', {
]);
assertEquals(data.balance, 0);
const [nutzap_info] = await store.query([{ authors: [pubkey], kinds: [10019] }]);
const [nutzap_info] = await relay.query([{ authors: [pubkey], kinds: [10019] }]);
assertExists(nutzap_info);
assertEquals(nutzap_info.kind, 10019);
@ -104,30 +78,14 @@ Deno.test('PUT /wallet must be successful', {
});
Deno.test('PUT /wallet must NOT be successful: wrong request body/schema', async () => {
using _mock = mockFetch();
await using db = await createTestDB();
const store = db.store;
await using test = await createTestRoute();
const { route } = test;
const sk = generateSecretKey();
const signer = new NSecSigner(sk);
const app = new Hono<AppEnv>().use(
async (c, next) => {
c.set('signer', signer);
await next();
},
async (c, next) => {
c.set('store', store);
await next();
},
);
app.use(confMw(new Map()));
app.route('/', cashuApp);
const response = await app.request('/wallet', {
const response = await route.request('/wallet', {
method: 'PUT',
headers: [['content-type', 'application/json']],
headers: {
'content-type': 'application/json',
},
body: JSON.stringify({
mints: [], // no mints should throw an error
}),
@ -140,32 +98,17 @@ Deno.test('PUT /wallet must NOT be successful: wrong request body/schema', async
});
Deno.test('PUT /wallet must NOT be successful: wallet already exists', async () => {
using _mock = mockFetch();
await using db = await createTestDB();
const store = db.store;
await using test = await createTestRoute();
const { route, sk, relay } = test;
const sk = generateSecretKey();
const signer = new NSecSigner(sk);
await relay.event(genEvent({ kind: 17375 }, sk));
const app = new Hono<AppEnv>().use(
async (c, next) => {
c.set('signer', signer);
await next();
},
async (c, next) => {
c.set('store', store);
await next();
},
);
app.use(confMw(new Map()));
app.route('/', cashuApp);
await db.store.event(genEvent({ kind: 17375 }, sk));
const response = await app.request('/wallet', {
const response = await route.request('/wallet', {
method: 'PUT',
headers: [['content-type', 'application/json']],
headers: {
'authorization': `Bearer ${nip19.nsecEncode(sk)}`,
'content-type': 'application/json',
},
body: JSON.stringify({
mints: ['https://mint.heart.com'],
}),
@ -178,32 +121,15 @@ Deno.test('PUT /wallet must NOT be successful: wallet already exists', async ()
});
Deno.test('GET /wallet must be successful', async () => {
using _mock = mockFetch();
await using db = await createTestDB();
const store = db.store;
await using test = await createTestRoute();
const { route, sk, relay, signer } = test;
const sk = generateSecretKey();
const signer = new NSecSigner(sk);
const pubkey = await signer.getPublicKey();
const privkey = bytesToString('hex', sk);
const p2pk = getPublicKey(stringToBytes('hex', privkey));
const app = new Hono<AppEnv>().use(
async (c, next) => {
c.set('signer', signer);
await next();
},
async (c, next) => {
c.set('store', store);
await next();
},
);
app.use(confMw(new Map()));
app.route('/', cashuApp);
// Wallet
await db.store.event(genEvent({
await relay.event(genEvent({
kind: 17375,
content: await signer.nip44.encrypt(
pubkey,
@ -215,7 +141,7 @@ Deno.test('GET /wallet must be successful', async () => {
}, sk));
// Nutzap information
await db.store.event(genEvent({
await relay.event(genEvent({
kind: 10019,
tags: [
['pubkey', p2pk],
@ -224,7 +150,7 @@ Deno.test('GET /wallet must be successful', async () => {
}, sk));
// Unspent proofs
await db.store.event(genEvent({
await relay.event(genEvent({
kind: 7375,
content: await signer.nip44.encrypt(
pubkey,
@ -265,7 +191,7 @@ Deno.test('GET /wallet must be successful', async () => {
// Nutzap
const senderSk = generateSecretKey();
await db.store.event(genEvent({
await relay.event(genEvent({
kind: 9321,
content: 'Nice post!',
tags: [
@ -278,7 +204,7 @@ Deno.test('GET /wallet must be successful', async () => {
],
}, senderSk));
const response = await app.request('/wallet', {
const response = await route.request('/wallet', {
method: 'GET',
});
@ -294,21 +220,10 @@ Deno.test('GET /wallet must be successful', async () => {
});
Deno.test('GET /mints must be successful', async () => {
using _mock = mockFetch();
await using db = await createTestDB();
const store = db.store;
await using test = await createTestRoute();
const { route } = test;
const app = new Hono<AppEnv>().use(
async (c, next) => {
c.set('store', store);
await next();
},
);
app.use(confMw(new Map()));
app.route('/', cashuApp);
const response = await app.request('/mints', {
const response = await route.request('/mints', {
method: 'GET',
});
@ -318,13 +233,42 @@ Deno.test('GET /mints must be successful', async () => {
assertEquals(body, { mints: [] });
});
function mockFetch() {
async function createTestRoute() {
const conf = new DittoConf(new Map());
const db = await createTestDB();
const relay = db.store;
const sk = generateSecretKey();
const signer = new NSecSigner(sk);
const route = new DittoApp({ db: db.db, relay, conf });
route.use(testUserMiddleware({ signer, relay }));
route.route('/', cashuRoute);
const mock = stub(globalThis, 'fetch', () => {
return Promise.resolve(new Response());
});
return {
[Symbol.dispose]: () => {
route,
db,
conf,
sk,
signer,
relay,
[Symbol.asyncDispose]: async () => {
mock.restore();
await db[Symbol.asyncDispose]();
await relay[Symbol.asyncDispose]();
},
};
}
function testUserMiddleware(user: User<NSecSigner>): DittoMiddleware<{ user: User<NSecSigner> }> {
return async (c, next) => {
c.set('user', user);
await next();
};
}

View file

@ -1,6 +1,6 @@
import { CashuMint, CashuWallet, MintQuoteState, Proof } from '@cashu/cashu-ts';
import { confRequiredMw } from '@ditto/api/middleware';
import { Hono } from '@hono/hono';
import { userMiddleware } from '@ditto/mastoapi/middleware';
import { DittoRoute } from '@ditto/mastoapi/router';
import { generateSecretKey, getPublicKey } from 'nostr-tools';
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
import { bytesToString } from '@scure/base';
@ -8,8 +8,6 @@ import { logi } from '@soapbox/logi';
import { z } from 'zod';
import { createEvent, parseBody } from '@/utils/api.ts';
import { requireNip44Signer } from '@/middleware/requireSigner.ts';
import { requireStore } from '@/middleware/storeMiddleware.ts';
import { swapNutzapsMiddleware } from '@/middleware/swapNutzapsMiddleware.ts';
import { walletSchema } from '@/schema.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
@ -22,7 +20,7 @@ import { tokenEventSchema } from '@/schemas/cashu.ts';
type Wallet = z.infer<typeof walletSchema>;
const app = new Hono().use('*', confRequiredMw, requireStore);
const route = new DittoRoute();
// app.delete('/wallet') -> 204
@ -48,9 +46,9 @@ const createMintQuoteSchema = z.object({
* Creates a new mint quote in a specific mint.
* https://github.com/cashubtc/nuts/blob/main/04.md#mint-quote
*/
app.post('/quote', requireNip44Signer, async (c) => {
const signer = c.var.signer;
const pubkey = await signer.getPublicKey();
route.post('/quote', userMiddleware({ enc: 'nip44' }), async (c) => {
const { user } = c.var;
const pubkey = await user.signer.getPublicKey();
const body = await parseBody(c.req.raw);
const result = createMintQuoteSchema.safeParse(body);
@ -69,7 +67,7 @@ app.post('/quote', requireNip44Signer, async (c) => {
await createEvent({
kind: 7374,
content: await signer.nip44.encrypt(pubkey, mintQuote.quote),
content: await user.signer.nip44.encrypt(pubkey, mintQuote.quote),
tags: [
['expiration', String(mintQuote.expiry)],
['mint', mintUrl],
@ -87,12 +85,9 @@ app.post('/quote', requireNip44Signer, async (c) => {
* Checks if the quote has been paid, if it has then mint new tokens.
* https://github.com/cashubtc/nuts/blob/main/04.md#minting-tokens
*/
app.post('/mint/:quote_id', requireNip44Signer, async (c) => {
const { conf } = c.var;
const signer = c.var.signer;
const { signal } = c.req.raw;
const store = c.get('store');
const pubkey = await signer.getPublicKey();
route.post('/mint/:quote_id', userMiddleware({ enc: 'nip44' }), async (c) => {
const { conf, user, relay, signal } = c.var;
const pubkey = await user.signer.getPublicKey();
const quote_id = c.req.param('quote_id');
const expiredQuoteIds: string[] = [];
@ -103,9 +98,9 @@ app.post('/mint/:quote_id', requireNip44Signer, async (c) => {
}, c);
};
const events = await store.query([{ kinds: [7374], authors: [pubkey] }], { signal });
const events = await relay.query([{ kinds: [7374], authors: [pubkey] }], { signal });
for (const event of events) {
const decryptedQuoteId = await signer.nip44.decrypt(pubkey, event.content);
const decryptedQuoteId = await user.signer.nip44.decrypt(pubkey, event.content);
const mintUrl = event.tags.find(([name]) => name === 'mint')?.[1];
const expiration = Number(event.tags.find(([name]) => name === 'expiration')?.[1]);
const now = nostrNow();
@ -124,7 +119,7 @@ app.post('/mint/:quote_id', requireNip44Signer, async (c) => {
const unspentProofs = await createEvent({
kind: 7375,
content: await signer.nip44.encrypt(
content: await user.signer.nip44.encrypt(
pubkey,
JSON.stringify({
mint: mintUrl,
@ -135,7 +130,7 @@ app.post('/mint/:quote_id', requireNip44Signer, async (c) => {
await createEvent({
kind: 7376,
content: await signer.nip44.encrypt(
content: await user.signer.nip44.encrypt(
pubkey,
JSON.stringify([
['direction', 'in'],
@ -179,12 +174,11 @@ const createWalletSchema = z.object({
* https://github.com/nostr-protocol/nips/blob/master/60.md
* https://github.com/nostr-protocol/nips/blob/master/61.md#nutzap-informational-event
*/
app.put('/wallet', requireNip44Signer, async (c) => {
const { conf, signer } = c.var;
const store = c.get('store');
const pubkey = await signer.getPublicKey();
route.put('/wallet', userMiddleware({ enc: 'nip44' }), async (c) => {
const { conf, user, relay, signal } = c.var;
const pubkey = await user.signer.getPublicKey();
const body = await parseBody(c.req.raw);
const { signal } = c.req.raw;
const result = createWalletSchema.safeParse(body);
if (!result.success) {
@ -193,7 +187,7 @@ app.put('/wallet', requireNip44Signer, async (c) => {
const { mints } = result.data;
const [event] = await store.query([{ authors: [pubkey], kinds: [17375] }], { signal });
const [event] = await relay.query([{ authors: [pubkey], kinds: [17375] }], { signal });
if (event) {
return c.json({ error: 'You already have a wallet 😏' }, 400);
}
@ -210,7 +204,7 @@ app.put('/wallet', requireNip44Signer, async (c) => {
walletContentTags.push(['mint', mint]);
}
const encryptedWalletContentTags = await signer.nip44.encrypt(pubkey, JSON.stringify(walletContentTags));
const encryptedWalletContentTags = await user.signer.nip44.encrypt(pubkey, JSON.stringify(walletContentTags));
// Wallet
await createEvent({
@ -240,13 +234,11 @@ app.put('/wallet', requireNip44Signer, async (c) => {
});
/** Gets a wallet, if it exists. */
app.get('/wallet', requireNip44Signer, swapNutzapsMiddleware, async (c) => {
const { conf, signer } = c.var;
const store = c.get('store');
const pubkey = await signer.getPublicKey();
const { signal } = c.req.raw;
route.get('/wallet', userMiddleware({ enc: 'nip44' }), swapNutzapsMiddleware, async (c) => {
const { conf, relay, user, signal } = c.var;
const pubkey = await user.signer.getPublicKey();
const { data, error } = await validateAndParseWallet(store, signer, pubkey, { signal });
const { data, error } = await validateAndParseWallet(relay, user.signer, pubkey, { signal });
if (error) {
return c.json({ error: error.message }, 404);
}
@ -255,11 +247,11 @@ app.get('/wallet', requireNip44Signer, swapNutzapsMiddleware, async (c) => {
let balance = 0;
const tokens = await store.query([{ authors: [pubkey], kinds: [7375] }], { signal });
const tokens = await relay.query([{ authors: [pubkey], kinds: [7375] }], { signal });
for (const token of tokens) {
try {
const decryptedContent: { mint: string; proofs: Proof[] } = JSON.parse(
await signer.nip44.decrypt(pubkey, token.content),
await user.signer.nip44.decrypt(pubkey, token.content),
);
if (!mints.includes(decryptedContent.mint)) {
@ -286,7 +278,7 @@ app.get('/wallet', requireNip44Signer, swapNutzapsMiddleware, async (c) => {
});
/** Get mints set by the CASHU_MINTS environment variable. */
app.get('/mints', (c) => {
route.get('/mints', (c) => {
const { conf } = c.var;
// TODO: Return full Mint information: https://github.com/cashubtc/nuts/blob/main/06.md
@ -303,11 +295,9 @@ const nutzapSchema = z.object({
});
/** Nutzaps a post or a user. */
app.post('/nutzap', requireNip44Signer, async (c) => {
const store = c.get('store');
const { signal } = c.req.raw;
const { conf, signer } = c.var;
const pubkey = await signer.getPublicKey();
route.post('/nutzap', userMiddleware({ enc: 'nip44' }), async (c) => {
const { conf, relay, user, signal } = c.var;
const pubkey = await user.signer.getPublicKey();
const body = await parseBody(c.req.raw);
const result = nutzapSchema.safeParse(body);
@ -319,13 +309,13 @@ app.post('/nutzap', requireNip44Signer, async (c) => {
let event: DittoEvent;
if (status_id) {
[event] = await store.query([{ kinds: [1], ids: [status_id] }], { signal });
[event] = await relay.query([{ kinds: [1], ids: [status_id] }], { signal });
if (!event) {
return c.json({ error: 'Status not found' }, 404);
}
await hydrateEvents({ events: [event], store, signal });
await hydrateEvents({ ...c.var, events: [event] });
} else {
[event] = await store.query([{ kinds: [0], authors: [account_id] }], { signal });
[event] = await relay.query([{ kinds: [0], authors: [account_id] }], { signal });
if (!event) {
return c.json({ error: 'Account not found' }, 404);
}
@ -335,7 +325,7 @@ app.post('/nutzap', requireNip44Signer, async (c) => {
return c.json({ error: 'Post author does not match author' }, 422);
}
const [nutzapInfo] = await store.query([{ kinds: [10019], authors: [account_id] }], { signal });
const [nutzapInfo] = await relay.query([{ kinds: [10019], authors: [account_id] }], { signal });
if (!nutzapInfo) {
return c.json({ error: 'Target user does not have a nutzap information event' }, 404);
}
@ -350,8 +340,8 @@ app.post('/nutzap', requireNip44Signer, async (c) => {
return c.json({ error: 'Target user does not have a cashu pubkey' }, 422);
}
const unspentProofs = await store.query([{ kinds: [7375], authors: [pubkey] }], { signal });
const organizedProofs = await organizeProofs(unspentProofs, signer);
const unspentProofs = await relay.query([{ kinds: [7375], authors: [pubkey] }], { signal });
const organizedProofs = await organizeProofs(unspentProofs, user.signer);
const proofsToBeUsed: Proof[] = [];
const eventsToBeDeleted: NostrEvent[] = [];
@ -372,7 +362,7 @@ app.post('/nutzap', requireNip44Signer, async (c) => {
}
const event = organizedProofs[mint][key].event;
const decryptedContent = await signer.nip44.decrypt(pubkey, event.content);
const decryptedContent = await user.signer.nip44.decrypt(pubkey, event.content);
const { data: token, success } = n.json().pipe(tokenEventSchema).safeParse(decryptedContent);
@ -402,7 +392,7 @@ app.post('/nutzap', requireNip44Signer, async (c) => {
const newUnspentProof = await createEvent({
kind: 7375,
content: await signer.nip44.encrypt(
content: await user.signer.nip44.encrypt(
pubkey,
JSON.stringify({
mint: selectedMint,
@ -414,7 +404,7 @@ app.post('/nutzap', requireNip44Signer, async (c) => {
await createEvent({
kind: 7376,
content: await signer.nip44.encrypt(
content: await user.signer.nip44.encrypt(
pubkey,
JSON.stringify([
['direction', 'out'],
@ -449,4 +439,4 @@ app.post('/nutzap', requireNip44Signer, async (c) => {
return c.json({ message: 'Nutzap with success!!!' }, 200); // TODO: return wallet entity
});
export default app;
export default route;

View file

@ -1,3 +1,4 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { z } from 'zod';
@ -5,22 +6,30 @@ import { AppController } from '@/app.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getAuthor } from '@/queries.ts';
import { addTag } from '@/utils/tags.ts';
import { createEvent, paginated, parseBody, updateAdminEvent } from '@/utils/api.ts';
import { createEvent, parseBody, updateAdminEvent } from '@/utils/api.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
import { deleteTag } from '@/utils/tags.ts';
import { DittoZapSplits, getZapSplits } from '@/utils/zap-split.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
import { screenshotsSchema } from '@/schemas/nostr.ts';
import { booleanParamSchema, percentageSchema, wsUrlSchema } from '@/schema.ts';
import { booleanParamSchema, percentageSchema } from '@/schema.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { renderNameRequest } from '@/views/ditto.ts';
import { accountFromPubkey } from '@/views/mastodon/accounts.ts';
import { renderAccount } from '@/views/mastodon/accounts.ts';
import { Storages } from '@/storages.ts';
import { updateListAdminEvent } from '@/utils/api.ts';
const markerSchema = z.enum(['read', 'write']);
/** WebSocket URL. */
const wsUrlSchema = z.string().refine((val): val is `wss://${string}` | `ws://${string}` => {
try {
const { protocol } = new URL(val);
return protocol === 'wss:' || protocol === 'ws:';
} catch {
return false;
}
}, 'Invalid WebSocket URL');
const relaySchema = z.object({
url: wsUrlSchema,
marker: markerSchema.optional(),
@ -29,11 +38,10 @@ const relaySchema = z.object({
type RelayEntity = z.infer<typeof relaySchema>;
export const adminRelaysController: AppController = async (c) => {
const { conf } = c.var;
const store = await Storages.db();
const { conf, relay } = c.var;
const [event] = await store.query([
{ kinds: [10002], authors: [conf.pubkey], limit: 1 },
const [event] = await relay.query([
{ kinds: [10002], authors: [await conf.signer.getPublicKey()], limit: 1 },
]);
if (!event) {
@ -44,17 +52,17 @@ export const adminRelaysController: AppController = async (c) => {
};
export const adminSetRelaysController: AppController = async (c) => {
const store = await Storages.db();
const { conf, relay } = c.var;
const relays = relaySchema.array().parse(await c.req.json());
const event = await new AdminSigner().signEvent({
const event = await conf.signer.signEvent({
kind: 10002,
tags: relays.map(({ url, marker }) => marker ? ['r', url, marker] : ['r', url]),
content: '',
created_at: Math.floor(Date.now() / 1000),
});
await store.event(event);
await relay.event(event);
return c.json(renderRelays(event));
};
@ -64,7 +72,7 @@ function renderRelays(event: NostrEvent): RelayEntity[] {
return event.tags.reduce((acc, [name, url, marker]) => {
if (name === 'r') {
const relay: RelayEntity = {
url,
url: url as `wss://${string}`,
marker: markerSchema.safeParse(marker).success ? marker as 'read' | 'write' : undefined,
};
acc.push(relay);
@ -79,30 +87,40 @@ const nameRequestSchema = z.object({
});
export const nameRequestController: AppController = async (c) => {
const store = await Storages.db();
const signer = c.get('signer')!;
const pubkey = await signer.getPublicKey();
const { conf } = c.var;
const { conf, relay, user } = c.var;
const { name, reason } = nameRequestSchema.parse(await c.req.json());
const pubkey = await user!.signer.getPublicKey();
const result = nameRequestSchema.safeParse(await c.req.json());
const [existing] = await store.query([{ kinds: [3036], authors: [pubkey], '#r': [name], limit: 1 }]);
if (!result.success) {
return c.json({ error: 'Invalid username', schema: result.error }, 400);
}
const { name, reason } = result.data;
const [existing] = await relay.query([{ kinds: [3036], authors: [pubkey], '#r': [name.toLowerCase()], limit: 1 }]);
if (existing) {
return c.json({ error: 'Name request already exists' }, 400);
}
const r: string[][] = [['r', name]];
if (name !== name.toLowerCase()) {
r.push(['r', name.toLowerCase()]);
}
const event = await createEvent({
kind: 3036,
content: reason,
tags: [
['r', name],
...r,
['L', 'nip05.domain'],
['l', name.split('@')[1], 'nip05.domain'],
['p', conf.pubkey],
['p', await conf.signer.getPublicKey()],
],
}, c);
await hydrateEvents({ events: [event], store: await Storages.db() });
await hydrateEvents({ ...c.var, events: [event] });
const nameRequest = await renderNameRequest(event);
return c.json(nameRequest);
@ -114,17 +132,15 @@ const nameRequestsSchema = z.object({
});
export const nameRequestsController: AppController = async (c) => {
const { conf } = c.var;
const store = await Storages.db();
const signer = c.get('signer')!;
const pubkey = await signer.getPublicKey();
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const params = c.get('pagination');
const { approved, rejected } = nameRequestsSchema.parse(c.req.query());
const filter: NostrFilter = {
kinds: [30383],
authors: [conf.pubkey],
authors: [await conf.signer.getPublicKey()],
'#k': ['3036'],
'#p': [pubkey],
...params,
@ -137,7 +153,7 @@ export const nameRequestsController: AppController = async (c) => {
filter['#n'] = ['rejected'];
}
const orig = await store.query([filter]);
const orig = await relay.query([filter]);
const ids = new Set<string>();
for (const event of orig) {
@ -151,8 +167,8 @@ export const nameRequestsController: AppController = async (c) => {
return c.json([]);
}
const events = await store.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }])
.then((events) => hydrateEvents({ store, events: events, signal: c.req.raw.signal }));
const events = await relay.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }])
.then((events) => hydrateEvents({ ...c.var, events }));
const nameRequests = await Promise.all(
events.map((event) => renderNameRequest(event)),
@ -170,16 +186,17 @@ const zapSplitSchema = z.record(
);
export const updateZapSplitsController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay } = c.var;
const body = await parseBody(c.req.raw);
const result = zapSplitSchema.safeParse(body);
const store = c.get('store');
if (!result.success) {
return c.json({ error: result.error }, 400);
}
const dittoZapSplit = await getZapSplits(store, conf.pubkey);
const adminPubkey = await conf.signer.getPublicKey();
const dittoZapSplit = await getZapSplits(relay, adminPubkey);
if (!dittoZapSplit) {
return c.json({ error: 'Zap split not activated, restart the server.' }, 404);
}
@ -188,11 +205,11 @@ export const updateZapSplitsController: AppController = async (c) => {
const pubkeys = Object.keys(data);
if (pubkeys.length < 1) {
return c.json(200);
return c.newResponse(null, { status: 204 });
}
await updateListAdminEvent(
{ kinds: [30078], authors: [conf.pubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 },
{ kinds: [30078], authors: [adminPubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 },
(tags) =>
pubkeys.reduce((accumulator, pubkey) => {
return addTag(accumulator, ['p', pubkey, data[pubkey].weight.toString(), data[pubkey].message]);
@ -200,22 +217,23 @@ export const updateZapSplitsController: AppController = async (c) => {
c,
);
return c.json(200);
return c.newResponse(null, { status: 204 });
};
const deleteZapSplitSchema = z.array(n.id()).min(1);
export const deleteZapSplitsController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay } = c.var;
const body = await parseBody(c.req.raw);
const result = deleteZapSplitSchema.safeParse(body);
const store = c.get('store');
if (!result.success) {
return c.json({ error: result.error }, 400);
}
const dittoZapSplit = await getZapSplits(store, conf.pubkey);
const adminPubkey = await conf.signer.getPublicKey();
const dittoZapSplit = await getZapSplits(relay, adminPubkey);
if (!dittoZapSplit) {
return c.json({ error: 'Zap split not activated, restart the server.' }, 404);
}
@ -223,7 +241,7 @@ export const deleteZapSplitsController: AppController = async (c) => {
const { data } = result;
await updateListAdminEvent(
{ kinds: [30078], authors: [conf.pubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 },
{ kinds: [30078], authors: [adminPubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 },
(tags) =>
data.reduce((accumulator, currentValue) => {
return deleteTag(accumulator, ['p', currentValue]);
@ -231,14 +249,13 @@ export const deleteZapSplitsController: AppController = async (c) => {
c,
);
return c.json(200);
return c.newResponse(null, { status: 204 });
};
export const getZapSplitsController: AppController = async (c) => {
const { conf } = c.var;
const store = c.get('store');
const { conf, relay } = c.var;
const dittoZapSplit: DittoZapSplits | undefined = await getZapSplits(store, conf.pubkey) ?? {};
const dittoZapSplit: DittoZapSplits | undefined = await getZapSplits(relay, await conf.signer.getPublicKey()) ?? {};
if (!dittoZapSplit) {
return c.json({ error: 'Zap split not activated, restart the server.' }, 404);
}
@ -246,7 +263,7 @@ export const getZapSplitsController: AppController = async (c) => {
const pubkeys = Object.keys(dittoZapSplit);
const zapSplits = await Promise.all(pubkeys.map(async (pubkey) => {
const author = await getAuthor(pubkey);
const author = await getAuthor(pubkey, c.var);
const account = author ? renderAccount(author) : accountFromPubkey(pubkey);
@ -261,11 +278,11 @@ export const getZapSplitsController: AppController = async (c) => {
};
export const statusZapSplitsController: AppController = async (c) => {
const store = c.get('store');
const id = c.req.param('id');
const { signal } = c.req.raw;
const { relay, signal } = c.var;
const [event] = await store.query([{ kinds: [1, 20], ids: [id], limit: 1 }], { signal });
const id = c.req.param('id');
const [event] = await relay.query([{ kinds: [1, 20], ids: [id], limit: 1 }], { signal });
if (!event) {
return c.json({ error: 'Event not found' }, 404);
}
@ -274,8 +291,8 @@ export const statusZapSplitsController: AppController = async (c) => {
const pubkeys = zapsTag.map((name) => name[1]);
const users = await store.query([{ authors: pubkeys, kinds: [0], limit: pubkeys.length }], { signal });
await hydrateEvents({ events: users, store, signal });
const users = await relay.query([{ authors: pubkeys, kinds: [0], limit: pubkeys.length }], { signal });
await hydrateEvents({ ...c.var, events: users });
const zapSplits = (await Promise.all(pubkeys.map((pubkey) => {
const author = (users.find((event) => event.pubkey === pubkey) as DittoEvent | undefined)?.author;
@ -308,16 +325,17 @@ const updateInstanceSchema = z.object({
});
export const updateInstanceController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, signal } = c.var;
const body = await parseBody(c.req.raw);
const result = updateInstanceSchema.safeParse(body);
const pubkey = conf.pubkey;
const pubkey = await conf.signer.getPublicKey();
if (!result.success) {
return c.json(result.error, 422);
}
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const meta = await getInstanceMetadata(relay, signal);
await updateAdminEvent(
{ kinds: [0], authors: [pubkey], limit: 1 },
@ -346,5 +364,5 @@ export const updateInstanceController: AppController = async (c) => {
c,
);
return c.json(204);
return c.newResponse(null, { status: 204 });
};

View file

@ -1,7 +1,6 @@
import denoJson from 'deno.json' with { type: 'json' };
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
const version = `3.0.0 (compatible; Ditto ${denoJson.version})`;
@ -16,9 +15,9 @@ const features = [
];
const instanceV1Controller: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, signal } = c.var;
const { host, protocol } = conf.url;
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const meta = await getInstanceMetadata(relay, signal);
/** Protocol to use for WebSocket URLs, depending on the protocol of the `LOCAL_DOMAIN`. */
const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:';
@ -68,7 +67,7 @@ const instanceV1Controller: AppController = async (c) => {
version,
email: meta.email,
nostr: {
pubkey: conf.pubkey,
pubkey: await conf.signer.getPublicKey(),
relay: `${wsProtocol}//${host}/relay`,
},
rules: [],
@ -76,9 +75,9 @@ const instanceV1Controller: AppController = async (c) => {
};
const instanceV2Controller: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, signal } = c.var;
const { host, protocol } = conf.url;
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const meta = await getInstanceMetadata(relay, signal);
/** Protocol to use for WebSocket URLs, depending on the protocol of the `LOCAL_DOMAIN`. */
const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:';
@ -141,7 +140,7 @@ const instanceV2Controller: AppController = async (c) => {
},
},
nostr: {
pubkey: conf.pubkey,
pubkey: await conf.signer.getPublicKey(),
relay: `${wsProtocol}//${host}/relay`,
},
pleroma: {
@ -165,7 +164,9 @@ const instanceV2Controller: AppController = async (c) => {
};
const instanceDescriptionController: AppController = async (c) => {
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const { relay, signal } = c.var;
const meta = await getInstanceMetadata(relay, signal);
return c.json({
content: meta.about,

View file

@ -14,7 +14,9 @@ interface Marker {
}
export const markersController: AppController = async (c) => {
const pubkey = await c.get('signer')?.getPublicKey()!;
const { user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const timelines = c.req.queries('timeline[]') ?? [];
const results = await kv.getMany<Marker[]>(
@ -37,7 +39,9 @@ const markerDataSchema = z.object({
});
export const updateMarkersController: AppController = async (c) => {
const pubkey = await c.get('signer')?.getPublicKey()!;
const { user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const record = z.record(z.enum(['home', 'notifications']), markerDataSchema).parse(await parseBody(c.req.raw));
const timelines = Object.keys(record) as Timeline[];

View file

@ -21,9 +21,10 @@ const mediaUpdateSchema = z.object({
});
const mediaController: AppController = async (c) => {
const pubkey = await c.get('signer')?.getPublicKey()!;
const { user, signal } = c.var;
const pubkey = await user!.signer.getPublicKey();
const result = mediaBodySchema.safeParse(await parseBody(c.req.raw));
const { signal } = c.req.raw;
if (!result.success) {
return c.json({ error: 'Bad request.', schema: result.error }, 422);

View file

@ -1,15 +1,14 @@
import { type AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { getTagSet } from '@/utils/tags.ts';
import { renderAccounts } from '@/views.ts';
/** https://docs.joinmastodon.org/methods/mutes/#get */
const mutesController: AppController = async (c) => {
const store = await Storages.db();
const pubkey = await c.get('signer')?.getPublicKey()!;
const { signal } = c.req.raw;
const { relay, user, signal } = c.var;
const [event10000] = await store.query(
const pubkey = await user!.signer.getPublicKey();
const [event10000] = await relay.query(
[{ kinds: [10000], authors: [pubkey], limit: 1 }],
{ signal },
);

View file

@ -1,10 +1,10 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { z } from 'zod';
import { AppContext, AppController } from '@/app.ts';
import { DittoPagination } from '@/interfaces/DittoPagination.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { paginated } from '@/utils/api.ts';
import { renderNotification } from '@/views/mastodon/notifications.ts';
/** Set of known notification types across backends. */
@ -30,8 +30,9 @@ const notificationsSchema = z.object({
});
const notificationsController: AppController = async (c) => {
const { conf } = c.var;
const pubkey = await c.get('signer')?.getPublicKey()!;
const { conf, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const params = c.get('pagination');
const types = notificationTypes
@ -68,29 +69,30 @@ const notificationsController: AppController = async (c) => {
}
if (types.has('ditto:name_grant') && !account_id) {
filters.push({ kinds: [30360], authors: [conf.pubkey], '#p': [pubkey], ...params });
filters.push({ kinds: [30360], authors: [await conf.signer.getPublicKey()], '#p': [pubkey], ...params });
}
return renderNotifications(filters, types, params, c);
};
const notificationController: AppController = async (c) => {
const { relay, user } = c.var;
const id = c.req.param('id');
const pubkey = await c.get('signer')?.getPublicKey()!;
const store = c.get('store');
const pubkey = await user!.signer.getPublicKey();
// Remove the timestamp from the ID.
const eventId = id.replace(/^\d+-/, '');
const [event] = await store.query([{ ids: [eventId] }]);
const [event] = await relay.query([{ ids: [eventId] }]);
if (!event) {
return c.json({ error: 'Event not found' }, { status: 404 });
}
await hydrateEvents({ events: [event], store });
await hydrateEvents({ ...c.var, events: [event] });
const notification = await renderNotification(event, { viewerPubkey: pubkey });
const notification = await renderNotification(relay, event, { viewerPubkey: pubkey });
if (!notification) {
return c.json({ error: 'Notification not found' }, { status: 404 });
@ -105,23 +107,23 @@ async function renderNotifications(
params: DittoPagination,
c: AppContext,
) {
const { conf } = c.var;
const store = c.get('store');
const pubkey = await c.get('signer')?.getPublicKey()!;
const { signal } = c.req.raw;
const { conf, user, signal } = c.var;
const relay = user!.relay;
const pubkey = await user!.signer.getPublicKey();
const opts = { signal, limit: params.limit, timeout: conf.db.timeouts.timelines };
const events = await store
const events = await relay
.query(filters, opts)
.then((events) => events.filter((event) => event.pubkey !== pubkey))
.then((events) => hydrateEvents({ events, store, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
if (!events.length) {
return c.json([]);
}
const notifications = (await Promise.all(events.map((event) => {
return renderNotification(event, { viewerPubkey: pubkey });
return renderNotification(relay, event, { viewerPubkey: pubkey });
})))
.filter((notification) => notification && types.has(notification.type));

View file

@ -3,8 +3,7 @@ import { escape } from 'entities';
import { generateSecretKey } from 'nostr-tools';
import { z } from 'zod';
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { AppContext, AppController } from '@/app.ts';
import { nostrNow } from '@/utils.ts';
import { parseBody } from '@/utils/api.ts';
import { aesEncrypt } from '@/utils/aes.ts';
@ -40,6 +39,7 @@ const createTokenSchema = z.discriminatedUnion('grant_type', [
const createTokenController: AppController = async (c) => {
const { conf } = c.var;
const body = await parseBody(c.req.raw);
const result = createTokenSchema.safeParse(body);
@ -50,7 +50,7 @@ const createTokenController: AppController = async (c) => {
switch (result.data.grant_type) {
case 'nostr_bunker':
return c.json({
access_token: await getToken(result.data, conf.seckey),
access_token: await getToken(c, result.data, conf.seckey),
token_type: 'Bearer',
scope: 'read write follow push',
created_at: nostrNow(),
@ -90,6 +90,8 @@ const revokeTokenSchema = z.object({
* https://docs.joinmastodon.org/methods/oauth/#revoke
*/
const revokeTokenController: AppController = async (c) => {
const { db } = c.var;
const body = await parseBody(c.req.raw);
const result = revokeTokenSchema.safeParse(body);
@ -99,10 +101,9 @@ const revokeTokenController: AppController = async (c) => {
const { token } = result.data;
const kysely = await Storages.kysely();
const tokenHash = await getTokenHash(token as `token1${string}`);
await kysely
await db.kysely
.deleteFrom('auth_tokens')
.where('token_hash', '=', tokenHash)
.execute();
@ -111,10 +112,11 @@ const revokeTokenController: AppController = async (c) => {
};
async function getToken(
c: AppContext,
{ pubkey: bunkerPubkey, secret, relays = [] }: { pubkey: string; secret?: string; relays?: string[] },
dittoSeckey: Uint8Array,
): Promise<`token1${string}`> {
const kysely = await Storages.kysely();
const { db, relay } = c.var;
const { token, hash } = await generateToken();
const nip46Seckey = generateSecretKey();
@ -123,14 +125,14 @@ async function getToken(
encryption: 'nip44',
pubkey: bunkerPubkey,
signer: new NSecSigner(nip46Seckey),
relay: await Storages.pubsub(), // TODO: Use the relays from the request.
relay,
timeout: 60_000,
});
await signer.connect(secret);
const userPubkey = await signer.getPublicKey();
await kysely.insertInto('auth_tokens').values({
await db.kysely.insertInto('auth_tokens').values({
token_hash: hash,
pubkey: userPubkey,
bunker_pubkey: bunkerPubkey,
@ -236,7 +238,7 @@ const oauthAuthorizeController: AppController = async (c) => {
const bunker = new URL(bunker_uri);
const token = await getToken({
const token = await getToken(c, {
pubkey: bunker.hostname,
secret: bunker.searchParams.get('secret') || undefined,
relays: bunker.searchParams.getAll('relay'),

View file

@ -2,15 +2,14 @@ import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { configSchema, elixirTupleSchema } from '@/schemas/pleroma-api.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
import { Storages } from '@/storages.ts';
import { createAdminEvent, updateAdminEvent, updateUser } from '@/utils/api.ts';
import { lookupPubkey } from '@/utils/lookup.ts';
import { getPleromaConfigs } from '@/utils/pleroma.ts';
const frontendConfigController: AppController = async (c) => {
const store = await Storages.db();
const configDB = await getPleromaConfigs(store, c.req.raw.signal);
const { relay, signal } = c.var;
const configDB = await getPleromaConfigs(relay, signal);
const frontendConfig = configDB.get(':pleroma', ':frontend_configurations');
if (frontendConfig) {
@ -26,25 +25,24 @@ const frontendConfigController: AppController = async (c) => {
};
const configController: AppController = async (c) => {
const store = await Storages.db();
const configs = await getPleromaConfigs(store, c.req.raw.signal);
const { relay, signal } = c.var;
const configs = await getPleromaConfigs(relay, signal);
return c.json({ configs, need_reboot: false });
};
/** Pleroma admin config controller. */
const updateConfigController: AppController = async (c) => {
const { conf } = c.var;
const { pubkey } = conf;
const { conf, relay, signal } = c.var;
const store = await Storages.db();
const configs = await getPleromaConfigs(store, c.req.raw.signal);
const configs = await getPleromaConfigs(relay, signal);
const { configs: newConfigs } = z.object({ configs: z.array(configSchema) }).parse(await c.req.json());
configs.merge(newConfigs);
await createAdminEvent({
kind: 30078,
content: await new AdminSigner().nip44.encrypt(pubkey, JSON.stringify(configs)),
content: await conf.signer.nip44.encrypt(await conf.signer.getPublicKey(), JSON.stringify(configs)),
tags: [
['d', 'pub.ditto.pleroma.config'],
['encrypted', 'nip44'],
@ -73,11 +71,11 @@ const pleromaAdminTagController: AppController = async (c) => {
const params = pleromaAdminTagSchema.parse(await c.req.json());
for (const nickname of params.nicknames) {
const pubkey = await lookupPubkey(nickname);
const pubkey = await lookupPubkey(nickname, c.var);
if (!pubkey) continue;
await updateAdminEvent(
{ kinds: [30382], authors: [conf.pubkey], '#d': [pubkey], limit: 1 },
{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey], limit: 1 },
(prev) => {
const tags = prev?.tags ?? [['d', pubkey]];
@ -98,7 +96,7 @@ const pleromaAdminTagController: AppController = async (c) => {
);
}
return new Response(null, { status: 204 });
return c.newResponse(null, { status: 204 });
};
const pleromaAdminUntagController: AppController = async (c) => {
@ -106,11 +104,11 @@ const pleromaAdminUntagController: AppController = async (c) => {
const params = pleromaAdminTagSchema.parse(await c.req.json());
for (const nickname of params.nicknames) {
const pubkey = await lookupPubkey(nickname);
const pubkey = await lookupPubkey(nickname, c.var);
if (!pubkey) continue;
await updateAdminEvent(
{ kinds: [30382], authors: [conf.pubkey], '#d': [pubkey], limit: 1 },
{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey], limit: 1 },
(prev) => ({
kind: 30382,
content: prev?.content ?? '',
@ -121,7 +119,7 @@ const pleromaAdminUntagController: AppController = async (c) => {
);
}
return new Response(null, { status: 204 });
return c.newResponse(null, { status: 204 });
};
const pleromaAdminSuggestSchema = z.object({
@ -132,24 +130,24 @@ const pleromaAdminSuggestController: AppController = async (c) => {
const { nicknames } = pleromaAdminSuggestSchema.parse(await c.req.json());
for (const nickname of nicknames) {
const pubkey = await lookupPubkey(nickname);
const pubkey = await lookupPubkey(nickname, c.var);
if (!pubkey) continue;
await updateUser(pubkey, { suggested: true }, c);
}
return new Response(null, { status: 204 });
return c.newResponse(null, { status: 204 });
};
const pleromaAdminUnsuggestController: AppController = async (c) => {
const { nicknames } = pleromaAdminSuggestSchema.parse(await c.req.json());
for (const nickname of nicknames) {
const pubkey = await lookupPubkey(nickname);
const pubkey = await lookupPubkey(nickname, c.var);
if (!pubkey) continue;
await updateUser(pubkey, { suggested: false }, c);
}
return new Response(null, { status: 204 });
return c.newResponse(null, { status: 204 });
};
export {

View file

@ -3,7 +3,6 @@ import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { parseBody } from '@/utils/api.ts';
import { getTokenHash } from '@/utils/auth.ts';
@ -42,7 +41,7 @@ const pushSubscribeSchema = z.object({
});
export const pushSubscribeController: AppController = async (c) => {
const { conf } = c.var;
const { conf, db, user } = c.var;
const vapidPublicKey = await conf.vapidPublicKey;
if (!vapidPublicKey) {
@ -50,9 +49,7 @@ export const pushSubscribeController: AppController = async (c) => {
}
const accessToken = getAccessToken(c.req.raw);
const kysely = await Storages.kysely();
const signer = c.get('signer')!;
const signer = user!.signer;
const result = pushSubscribeSchema.safeParse(await parseBody(c.req.raw));
@ -65,7 +62,7 @@ export const pushSubscribeController: AppController = async (c) => {
const pubkey = await signer.getPublicKey();
const tokenHash = await getTokenHash(accessToken);
const { id } = await kysely.transaction().execute(async (trx) => {
const { id } = await db.kysely.transaction().execute(async (trx) => {
await trx
.deleteFrom('push_subscriptions')
.where('token_hash', '=', tokenHash)
@ -97,7 +94,7 @@ export const pushSubscribeController: AppController = async (c) => {
};
export const getSubscriptionController: AppController = async (c) => {
const { conf } = c.var;
const { conf, db } = c.var;
const vapidPublicKey = await conf.vapidPublicKey;
if (!vapidPublicKey) {
@ -106,10 +103,9 @@ export const getSubscriptionController: AppController = async (c) => {
const accessToken = getAccessToken(c.req.raw);
const kysely = await Storages.kysely();
const tokenHash = await getTokenHash(accessToken);
const row = await kysely
const row = await db.kysely
.selectFrom('push_subscriptions')
.selectAll()
.where('token_hash', '=', tokenHash)

View file

@ -1,7 +1,6 @@
import { AppController } from '@/app.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { Storages } from '@/storages.ts';
import { createEvent } from '@/utils/api.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
@ -11,16 +10,15 @@ import { renderStatus } from '@/views/mastodon/statuses.ts';
* https://docs.pleroma.social/backend/development/API/pleroma_api/#put-apiv1pleromastatusesidreactionsemoji
*/
const reactionController: AppController = async (c) => {
const { relay, user } = c.var;
const id = c.req.param('id');
const emoji = c.req.param('emoji');
const signer = c.get('signer')!;
if (!/^\p{RGI_Emoji}$/v.test(emoji)) {
return c.json({ error: 'Invalid emoji' }, 400);
}
const store = await Storages.db();
const [event] = await store.query([{ kinds: [1, 20], ids: [id], limit: 1 }]);
const [event] = await relay.query([{ kinds: [1, 20], ids: [id], limit: 1 }]);
if (!event) {
return c.json({ error: 'Status not found' }, 404);
@ -33,9 +31,9 @@ const reactionController: AppController = async (c) => {
tags: [['e', id], ['p', event.pubkey]],
}, c);
await hydrateEvents({ events: [event], store });
await hydrateEvents({ ...c.var, events: [event] });
const status = await renderStatus(event, { viewerPubkey: await signer.getPublicKey() });
const status = await renderStatus(relay, event, { viewerPubkey: await user!.signer.getPublicKey() });
return c.json(status);
};
@ -45,17 +43,17 @@ const reactionController: AppController = async (c) => {
* https://docs.pleroma.social/backend/development/API/pleroma_api/#delete-apiv1pleromastatusesidreactionsemoji
*/
const deleteReactionController: AppController = async (c) => {
const { relay, user } = c.var;
const id = c.req.param('id');
const emoji = c.req.param('emoji');
const signer = c.get('signer')!;
const pubkey = await signer.getPublicKey();
const store = await Storages.db();
const pubkey = await user!.signer.getPublicKey();
if (!/^\p{RGI_Emoji}$/v.test(emoji)) {
return c.json({ error: 'Invalid emoji' }, 400);
}
const [event] = await store.query([
const [event] = await relay.query([
{ kinds: [1, 20], ids: [id], limit: 1 },
]);
@ -63,7 +61,7 @@ const deleteReactionController: AppController = async (c) => {
return c.json({ error: 'Status not found' }, 404);
}
const events = await store.query([
const events = await relay.query([
{ kinds: [7], authors: [pubkey], '#e': [id] },
]);
@ -78,7 +76,7 @@ const deleteReactionController: AppController = async (c) => {
tags,
}, c);
const status = renderStatus(event, { viewerPubkey: pubkey });
const status = renderStatus(relay, event, { viewerPubkey: pubkey });
return c.json(status);
};
@ -88,19 +86,20 @@ const deleteReactionController: AppController = async (c) => {
* https://docs.pleroma.social/backend/development/API/pleroma_api/#get-apiv1pleromastatusesidreactions
*/
const reactionsController: AppController = async (c) => {
const { relay, user } = c.var;
const id = c.req.param('id');
const store = await Storages.db();
const pubkey = await c.get('signer')?.getPublicKey();
const pubkey = await user?.signer.getPublicKey();
const emoji = c.req.param('emoji') as string | undefined;
if (typeof emoji === 'string' && !/^\p{RGI_Emoji}$/v.test(emoji)) {
return c.json({ error: 'Invalid emoji' }, 400);
}
const events = await store.query([{ kinds: [7], '#e': [id], limit: 100 }])
const events = await relay.query([{ kinds: [7], '#e': [id], limit: 100 }])
.then((events) => events.filter(({ content }) => /^\p{RGI_Emoji}$/v.test(content)))
.then((events) => events.filter((event) => !emoji || event.content === emoji))
.then((events) => hydrateEvents({ events, store }));
.then((events) => hydrateEvents({ ...c.var, events }));
/** Events grouped by emoji. */
const byEmoji = events.reduce((acc, event) => {

View file

@ -1,8 +1,9 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { createEvent, paginated, parseBody, updateEventInfo } from '@/utils/api.ts';
import { createEvent, parseBody, updateEventInfo } from '@/utils/api.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { renderAdminReport } from '@/views/mastodon/reports.ts';
import { renderReport } from '@/views/mastodon/reports.ts';
@ -19,7 +20,7 @@ const reportSchema = z.object({
/** https://docs.joinmastodon.org/methods/reports/#post */
const reportController: AppController = async (c) => {
const { conf } = c.var;
const store = c.get('store');
const body = await parseBody(c.req.raw);
const result = reportSchema.safeParse(body);
@ -36,7 +37,7 @@ const reportController: AppController = async (c) => {
const tags = [
['p', account_id, category],
['P', conf.pubkey],
['P', await conf.signer.getPublicKey()],
];
for (const status of status_ids) {
@ -49,7 +50,7 @@ const reportController: AppController = async (c) => {
tags,
}, c);
await hydrateEvents({ events: [event], store });
await hydrateEvents({ ...c.var, events: [event] });
return c.json(await renderReport(event));
};
@ -61,18 +62,16 @@ const adminReportsSchema = z.object({
/** https://docs.joinmastodon.org/methods/admin/reports/#get */
const adminReportsController: AppController = async (c) => {
const { conf } = c.var;
const store = c.get('store');
const viewerPubkey = await c.get('signer')?.getPublicKey();
const { conf, relay, user, pagination } = c.var;
const params = c.get('pagination');
const viewerPubkey = await user?.signer.getPublicKey();
const { resolved, account_id, target_account_id } = adminReportsSchema.parse(c.req.query());
const filter: NostrFilter = {
kinds: [30383],
authors: [conf.pubkey],
authors: [await conf.signer.getPublicKey()],
'#k': ['1984'],
...params,
...pagination,
};
if (typeof resolved === 'boolean') {
@ -85,7 +84,7 @@ const adminReportsController: AppController = async (c) => {
filter['#P'] = [target_account_id];
}
const orig = await store.query([filter]);
const orig = await relay.query([filter]);
const ids = new Set<string>();
for (const event of orig) {
@ -95,11 +94,11 @@ const adminReportsController: AppController = async (c) => {
}
}
const events = await store.query([{ kinds: [1984], ids: [...ids] }])
.then((events) => hydrateEvents({ store, events: events, signal: c.req.raw.signal }));
const events = await relay.query([{ kinds: [1984], ids: [...ids] }])
.then((events) => hydrateEvents({ ...c.var, events }));
const reports = await Promise.all(
events.map((event) => renderAdminReport(event, { viewerPubkey })),
events.map((event) => renderAdminReport(relay, event, { viewerPubkey })),
);
return paginated(c, orig, reports);
@ -107,12 +106,12 @@ const adminReportsController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/admin/reports/#get-one */
const adminReportController: AppController = async (c) => {
const eventId = c.req.param('id');
const { signal } = c.req.raw;
const store = c.get('store');
const pubkey = await c.get('signer')?.getPublicKey();
const { relay, user, signal } = c.var;
const [event] = await store.query([{
const eventId = c.req.param('id');
const pubkey = await user?.signer.getPublicKey();
const [event] = await relay.query([{
kinds: [1984],
ids: [eventId],
limit: 1,
@ -122,20 +121,20 @@ const adminReportController: AppController = async (c) => {
return c.json({ error: 'Not found' }, 404);
}
await hydrateEvents({ events: [event], store, signal });
await hydrateEvents({ ...c.var, events: [event] });
const report = await renderAdminReport(event, { viewerPubkey: pubkey });
const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey });
return c.json(report);
};
/** https://docs.joinmastodon.org/methods/admin/reports/#resolve */
const adminReportResolveController: AppController = async (c) => {
const eventId = c.req.param('id');
const { signal } = c.req.raw;
const store = c.get('store');
const pubkey = await c.get('signer')?.getPublicKey();
const { relay, user, signal } = c.var;
const [event] = await store.query([{
const eventId = c.req.param('id');
const pubkey = await user?.signer.getPublicKey();
const [event] = await relay.query([{
kinds: [1984],
ids: [eventId],
limit: 1,
@ -146,19 +145,19 @@ const adminReportResolveController: AppController = async (c) => {
}
await updateEventInfo(eventId, { open: false, closed: true }, c);
await hydrateEvents({ events: [event], store, signal });
await hydrateEvents({ ...c.var, events: [event] });
const report = await renderAdminReport(event, { viewerPubkey: pubkey });
const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey });
return c.json(report);
};
const adminReportReopenController: AppController = async (c) => {
const eventId = c.req.param('id');
const { signal } = c.req.raw;
const store = c.get('store');
const pubkey = await c.get('signer')?.getPublicKey();
const { relay, user, signal } = c.var;
const [event] = await store.query([{
const eventId = c.req.param('id');
const pubkey = await user?.signer.getPublicKey();
const [event] = await relay.query([{
kinds: [1984],
ids: [eventId],
limit: 1,
@ -169,9 +168,9 @@ const adminReportReopenController: AppController = async (c) => {
}
await updateEventInfo(eventId, { open: true, closed: false }, c);
await hydrateEvents({ events: [event], store, signal });
await hydrateEvents({ ...c.var, events: [event] });
const report = await renderAdminReport(event, { viewerPubkey: pubkey });
const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey });
return c.json(report);
};

View file

@ -1,18 +1,17 @@
import { paginated, paginatedList } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { AppController } from '@/app.ts';
import { AppContext, AppController } from '@/app.ts';
import { booleanParamSchema } from '@/schema.ts';
import { Storages } from '@/storages.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { extractIdentifier, lookupPubkey } from '@/utils/lookup.ts';
import { nip05Cache } from '@/utils/nip05.ts';
import { lookupNip05 } from '@/utils/nip05.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
import { getFollowedPubkeys } from '@/queries.ts';
import { getPubkeysBySearch } from '@/utils/search.ts';
import { paginated, paginatedList } from '@/utils/api.ts';
const searchQuerySchema = z.object({
q: z.string().transform(decodeURIComponent),
@ -26,21 +25,21 @@ const searchQuerySchema = z.object({
type SearchQuery = z.infer<typeof searchQuerySchema> & { since?: number; until?: number; limit: number };
const searchController: AppController = async (c) => {
const { relay, user, pagination, signal } = c.var;
const result = searchQuerySchema.safeParse(c.req.query());
const params = c.get('pagination');
const { signal } = c.req.raw;
const viewerPubkey = await c.get('signer')?.getPublicKey();
const viewerPubkey = await user?.signer.getPublicKey();
if (!result.success) {
return c.json({ error: 'Bad request', schema: result.error }, 422);
}
const event = await lookupEvent({ ...result.data, ...params }, signal);
const event = await lookupEvent(c, { ...result.data, ...pagination });
const lookup = extractIdentifier(result.data.q);
// Render account from pubkey.
if (!event && lookup) {
const pubkey = await lookupPubkey(lookup);
const pubkey = await lookupPubkey(lookup, c.var);
return c.json({
accounts: pubkey ? [accountFromPubkey(pubkey)] : [],
statuses: [],
@ -54,7 +53,7 @@ const searchController: AppController = async (c) => {
events = [event];
}
events.push(...(await searchEvents({ ...result.data, ...params, viewerPubkey }, signal)));
events.push(...(await searchEvents(c, { ...result.data, ...pagination, viewerPubkey }, signal)));
const [accounts, statuses] = await Promise.all([
Promise.all(
@ -66,7 +65,7 @@ const searchController: AppController = async (c) => {
Promise.all(
events
.filter((event) => event.kind === 1)
.map((event) => renderStatus(event, { viewerPubkey }))
.map((event) => renderStatus(relay, event, { viewerPubkey }))
.filter(Boolean),
),
]);
@ -78,7 +77,7 @@ const searchController: AppController = async (c) => {
};
if (result.data.type === 'accounts') {
return paginatedList(c, { ...result.data, ...params }, body);
return paginatedList(c, { ...result.data, ...pagination }, body);
} else {
return paginated(c, events, body);
}
@ -86,16 +85,17 @@ const searchController: AppController = async (c) => {
/** Get events for the search params. */
async function searchEvents(
c: AppContext,
{ q, type, since, until, limit, offset, account_id, viewerPubkey }: SearchQuery & { viewerPubkey?: string },
signal: AbortSignal,
): Promise<NostrEvent[]> {
const { relay, db } = c.var;
// Hashtag search is not supported.
if (type === 'hashtags') {
return Promise.resolve([]);
}
const store = await Storages.search();
const filter: NostrFilter = {
kinds: typeToKinds(type),
search: q,
@ -104,12 +104,10 @@ async function searchEvents(
limit,
};
const kysely = await Storages.kysely();
// For account search, use a special index, and prioritize followed accounts.
if (type === 'accounts') {
const following = viewerPubkey ? await getFollowedPubkeys(viewerPubkey) : new Set<string>();
const searchPubkeys = await getPubkeysBySearch(kysely, { q, limit, offset, following });
const following = viewerPubkey ? await getFollowedPubkeys(relay, viewerPubkey) : new Set<string>();
const searchPubkeys = await getPubkeysBySearch(db.kysely, { q, limit, offset, following });
filter.authors = [...searchPubkeys];
filter.search = undefined;
@ -121,9 +119,9 @@ async function searchEvents(
}
// Query the events.
let events = await store
let events = await relay
.query([filter], { signal })
.then((events) => hydrateEvents({ events, store, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
// When using an authors filter, return the events in the same order as the filter.
if (filter.authors) {
@ -148,17 +146,17 @@ function typeToKinds(type: SearchQuery['type']): number[] {
}
/** Resolve a searched value into an event, if applicable. */
async function lookupEvent(query: SearchQuery, signal: AbortSignal): Promise<NostrEvent | undefined> {
const filters = await getLookupFilters(query, signal);
const store = await Storages.search();
async function lookupEvent(c: AppContext, query: SearchQuery): Promise<NostrEvent | undefined> {
const { relay, signal } = c.var;
const filters = await getLookupFilters(c, query);
return store.query(filters, { limit: 1, signal })
.then((events) => hydrateEvents({ events, store, signal }))
return relay.query(filters, { signal })
.then((events) => hydrateEvents({ ...c.var, events }))
.then(([event]) => event);
}
/** Get filters to lookup the input value. */
async function getLookupFilters({ q, type, resolve }: SearchQuery, signal: AbortSignal): Promise<NostrFilter[]> {
async function getLookupFilters(c: AppContext, { q, type, resolve }: SearchQuery): Promise<NostrFilter[]> {
const accounts = !type || type === 'accounts';
const statuses = !type || type === 'statuses';
@ -199,7 +197,7 @@ async function getLookupFilters({ q, type, resolve }: SearchQuery, signal: Abort
}
try {
const { pubkey } = await nip05Cache.fetch(lookup, { signal });
const { pubkey } = await lookupNip05(lookup, c.var);
if (pubkey) {
return [{ kinds: [0], authors: [pubkey] }];
}

View file

@ -1,4 +1,5 @@
import { HTTPException } from '@hono/hono/http-exception';
import { paginated, paginatedList, paginationSchema } from '@ditto/mastoapi/pagination';
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
import 'linkify-plugin-hashtag';
import linkify from 'linkifyjs';
@ -13,9 +14,8 @@ import { addTag, deleteTag } from '@/utils/tags.ts';
import { asyncReplaceAll } from '@/utils/text.ts';
import { lookupPubkey } from '@/utils/lookup.ts';
import { languageSchema } from '@/schema.ts';
import { Storages } from '@/storages.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { assertAuthenticated, createEvent, paginated, paginatedList, parseBody, updateListEvent } from '@/utils/api.ts';
import { assertAuthenticated, createEvent, parseBody, updateListEvent } from '@/utils/api.ts';
import { getInvoice, getLnurl } from '@/utils/lnurl.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { getZapSplits } from '@/utils/zap-split.ts';
@ -46,18 +46,18 @@ const createStatusSchema = z.object({
);
const statusController: AppController = async (c) => {
const id = c.req.param('id');
const signal = AbortSignal.any([c.req.raw.signal, AbortSignal.timeout(1500)]);
const { relay, user } = c.var;
const event = await getEvent(id, { signal });
const id = c.req.param('id');
const event = await getEvent(id, c.var);
if (event?.author) {
assertAuthenticated(c, event.author);
}
if (event) {
const viewerPubkey = await c.get('signer')?.getPublicKey();
const status = await renderStatus(event, { viewerPubkey });
const viewerPubkey = await user?.signer.getPublicKey();
const status = await renderStatus(relay, event, { viewerPubkey });
return c.json(status);
}
@ -65,10 +65,10 @@ const statusController: AppController = async (c) => {
};
const createStatusController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, user } = c.var;
const body = await parseBody(c.req.raw);
const result = createStatusSchema.safeParse(body);
const store = c.get('store');
if (!result.success) {
return c.json({ error: 'Bad request', schema: result.error }, 400);
@ -87,14 +87,14 @@ const createStatusController: AppController = async (c) => {
const tags: string[][] = [];
if (data.in_reply_to_id) {
const [ancestor] = await store.query([{ ids: [data.in_reply_to_id] }]);
const [ancestor] = await relay.query([{ ids: [data.in_reply_to_id] }]);
if (!ancestor) {
return c.json({ error: 'Original post not found.' }, 404);
}
const rootId = ancestor.tags.find((tag) => tag[0] === 'e' && tag[3] === 'root')?.[1] ?? ancestor.id;
const root = rootId === ancestor.id ? ancestor : await store.query([{ ids: [rootId] }]).then(([event]) => event);
const root = rootId === ancestor.id ? ancestor : await relay.query([{ ids: [rootId] }]).then(([event]) => event);
if (root) {
tags.push(['e', root.id, conf.relay, 'root', root.pubkey]);
@ -108,7 +108,7 @@ const createStatusController: AppController = async (c) => {
let quoted: DittoEvent | undefined;
if (data.quote_id) {
[quoted] = await store.query([{ ids: [data.quote_id] }]);
[quoted] = await relay.query([{ ids: [data.quote_id] }]);
if (!quoted) {
return c.json({ error: 'Quoted post not found.' }, 404);
@ -153,7 +153,7 @@ const createStatusController: AppController = async (c) => {
data.status ?? '',
/(?<![\w/])@([\w@+._-]+)(?![\w/\.])/g,
async (match, username) => {
const pubkey = await lookupPubkey(username);
const pubkey = await lookupPubkey(username, c.var);
if (!pubkey) return match;
// Content addressing (default)
@ -171,7 +171,7 @@ const createStatusController: AppController = async (c) => {
// Explicit addressing
for (const to of data.to ?? []) {
const pubkey = await lookupPubkey(to);
const pubkey = await lookupPubkey(to, c.var);
if (pubkey) {
pubkeys.add(pubkey);
}
@ -190,13 +190,13 @@ const createStatusController: AppController = async (c) => {
}
}
const pubkey = await c.get('signer')?.getPublicKey()!;
const author = pubkey ? await getAuthor(pubkey) : undefined;
const pubkey = await user!.signer.getPublicKey();
const author = pubkey ? await getAuthor(pubkey, c.var) : undefined;
if (conf.zapSplitsEnabled) {
const meta = n.json().pipe(n.metadata()).catch({}).parse(author?.content);
const lnurl = getLnurl(meta);
const dittoZapSplit = await getZapSplits(store, conf.pubkey);
const dittoZapSplit = await getZapSplits(relay, await conf.signer.getPublicKey());
if (lnurl && dittoZapSplit) {
const totalSplit = Object.values(dittoZapSplit).reduce((total, { weight }) => total + weight, 0);
for (const zapPubkey in dittoZapSplit) {
@ -254,22 +254,18 @@ const createStatusController: AppController = async (c) => {
}, c);
if (data.quote_id) {
await hydrateEvents({
events: [event],
store: await Storages.db(),
signal: c.req.raw.signal,
});
await hydrateEvents({ ...c.var, events: [event] });
}
return c.json(await renderStatus({ ...event, author }, { viewerPubkey: author?.pubkey }));
return c.json(await renderStatus(relay, { ...event, author }, { viewerPubkey: author?.pubkey }));
};
const deleteStatusController: AppController = async (c) => {
const { conf } = c.var;
const id = c.req.param('id');
const pubkey = await c.get('signer')?.getPublicKey();
const { conf, relay, user } = c.var;
const event = await getEvent(id, { signal: c.req.raw.signal });
const id = c.req.param('id');
const pubkey = await user?.signer.getPublicKey();
const event = await getEvent(id, c.var);
if (event) {
if (event.pubkey === pubkey) {
@ -278,8 +274,8 @@ const deleteStatusController: AppController = async (c) => {
tags: [['e', id, conf.relay, '', pubkey]],
}, c);
const author = await getAuthor(event.pubkey);
return c.json(await renderStatus({ ...event, author }, { viewerPubkey: pubkey }));
const author = await getAuthor(event.pubkey, c.var);
return c.json(await renderStatus(relay, { ...event, author }, { viewerPubkey: pubkey }));
} else {
return c.json({ error: 'Unauthorized' }, 403);
}
@ -289,29 +285,26 @@ const deleteStatusController: AppController = async (c) => {
};
const contextController: AppController = async (c) => {
const { relay, user } = c.var;
const id = c.req.param('id');
const store = c.get('store');
const [event] = await store.query([{ kinds: [1, 20], ids: [id] }]);
const viewerPubkey = await c.get('signer')?.getPublicKey();
const [event] = await relay.query([{ kinds: [1, 20], ids: [id] }]);
const viewerPubkey = await user?.signer.getPublicKey();
async function renderStatuses(events: NostrEvent[]) {
const statuses = await Promise.all(
events.map((event) => renderStatus(event, { viewerPubkey })),
events.map((event) => renderStatus(relay, event, { viewerPubkey })),
);
return statuses.filter(Boolean);
}
if (event) {
const [ancestorEvents, descendantEvents] = await Promise.all([
getAncestors(store, event),
getDescendants(store, event),
getAncestors(relay, event),
getDescendants(relay, event),
]);
await hydrateEvents({
events: [...ancestorEvents, ...descendantEvents],
signal: c.req.raw.signal,
store,
});
await hydrateEvents({ ...c.var, events: [...ancestorEvents, ...descendantEvents] });
const [ancestors, descendants] = await Promise.all([
renderStatuses(ancestorEvents),
@ -325,10 +318,10 @@ const contextController: AppController = async (c) => {
};
const favouriteController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, user } = c.var;
const id = c.req.param('id');
const store = await Storages.db();
const [target] = await store.query([{ ids: [id], kinds: [1, 20] }]);
const [target] = await relay.query([{ ids: [id], kinds: [1, 20] }]);
if (target) {
await createEvent({
@ -340,9 +333,9 @@ const favouriteController: AppController = async (c) => {
],
}, c);
await hydrateEvents({ events: [target], store });
await hydrateEvents({ ...c.var, events: [target] });
const status = await renderStatus(target, { viewerPubkey: await c.get('signer')?.getPublicKey() });
const status = await renderStatus(relay, target, { viewerPubkey: await user?.signer.getPublicKey() });
if (status) {
status.favourited = true;
@ -366,13 +359,10 @@ const favouritedByController: AppController = (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#boost */
const reblogStatusController: AppController = async (c) => {
const { conf } = c.var;
const eventId = c.req.param('id');
const { signal } = c.req.raw;
const { conf, relay, user } = c.var;
const event = await getEvent(eventId, {
kind: 1,
});
const eventId = c.req.param('id');
const event = await getEvent(eventId, c.var);
if (!event) {
return c.json({ error: 'Event not found.' }, 404);
@ -386,30 +376,26 @@ const reblogStatusController: AppController = async (c) => {
],
}, c);
await hydrateEvents({
events: [reblogEvent],
store: await Storages.db(),
signal: signal,
});
await hydrateEvents({ ...c.var, events: [reblogEvent] });
const status = await renderReblog(reblogEvent, { viewerPubkey: await c.get('signer')?.getPublicKey() });
const status = await renderReblog(relay, reblogEvent, { viewerPubkey: await user?.signer.getPublicKey() });
return c.json(status);
};
/** https://docs.joinmastodon.org/methods/statuses/#unreblog */
const unreblogStatusController: AppController = async (c) => {
const { conf } = c.var;
const eventId = c.req.param('id');
const pubkey = await c.get('signer')?.getPublicKey()!;
const store = await Storages.db();
const { conf, relay, user } = c.var;
const [event] = await store.query([{ ids: [eventId], kinds: [1, 20] }]);
const eventId = c.req.param('id');
const pubkey = await user!.signer.getPublicKey();
const [event] = await relay.query([{ ids: [eventId], kinds: [1, 20] }]);
if (!event) {
return c.json({ error: 'Record not found' }, 404);
}
const [repostEvent] = await store.query(
const [repostEvent] = await relay.query(
[{ kinds: [6], authors: [pubkey], '#e': [event.id], limit: 1 }],
);
@ -422,7 +408,7 @@ const unreblogStatusController: AppController = async (c) => {
tags: [['e', repostEvent.id, conf.relay, '', repostEvent.pubkey]],
}, c);
return c.json(await renderStatus(event, { viewerPubkey: pubkey }));
return c.json(await renderStatus(relay, event, { viewerPubkey: pubkey }));
};
const rebloggedByController: AppController = (c) => {
@ -432,23 +418,23 @@ const rebloggedByController: AppController = (c) => {
};
const quotesController: AppController = async (c) => {
const id = c.req.param('id');
const params = c.get('pagination');
const store = await Storages.db();
const { relay, user, pagination } = c.var;
const [event] = await store.query([{ ids: [id], kinds: [1, 20] }]);
const id = c.req.param('id');
const [event] = await relay.query([{ ids: [id], kinds: [1, 20] }]);
if (!event) {
return c.json({ error: 'Event not found.' }, 404);
}
const quotes = await store
.query([{ kinds: [1, 20], '#q': [event.id], ...params }])
.then((events) => hydrateEvents({ events, store }));
const quotes = await relay
.query([{ kinds: [1, 20], '#q': [event.id], ...pagination }])
.then((events) => hydrateEvents({ ...c.var, events }));
const viewerPubkey = await c.get('signer')?.getPublicKey();
const viewerPubkey = await user?.signer.getPublicKey();
const statuses = await Promise.all(
quotes.map((event) => renderStatus(event, { viewerPubkey })),
quotes.map((event) => renderStatus(relay, event, { viewerPubkey })),
);
if (!statuses.length) {
@ -460,14 +446,11 @@ const quotesController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#bookmark */
const bookmarkController: AppController = async (c) => {
const { conf } = c.var;
const pubkey = await c.get('signer')?.getPublicKey()!;
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const eventId = c.req.param('id');
const event = await getEvent(eventId, {
kind: 1,
relations: ['author', 'event_stats', 'author_stats'],
});
const event = await getEvent(eventId, c.var);
if (event) {
await updateListEvent(
@ -476,7 +459,7 @@ const bookmarkController: AppController = async (c) => {
c,
);
const status = await renderStatus(event, { viewerPubkey: pubkey });
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
if (status) {
status.bookmarked = true;
}
@ -488,14 +471,12 @@ const bookmarkController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#unbookmark */
const unbookmarkController: AppController = async (c) => {
const { conf } = c.var;
const pubkey = await c.get('signer')?.getPublicKey()!;
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const eventId = c.req.param('id');
const event = await getEvent(eventId, {
kind: 1,
relations: ['author', 'event_stats', 'author_stats'],
});
const event = await getEvent(eventId, c.var);
if (event) {
await updateListEvent(
@ -504,7 +485,7 @@ const unbookmarkController: AppController = async (c) => {
c,
);
const status = await renderStatus(event, { viewerPubkey: pubkey });
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
if (status) {
status.bookmarked = false;
}
@ -516,14 +497,12 @@ const unbookmarkController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#pin */
const pinController: AppController = async (c) => {
const { conf } = c.var;
const pubkey = await c.get('signer')?.getPublicKey()!;
const { conf, relay, user } = c.var;
const pubkey = await user!.signer.getPublicKey();
const eventId = c.req.param('id');
const event = await getEvent(eventId, {
kind: 1,
relations: ['author', 'event_stats', 'author_stats'],
});
const event = await getEvent(eventId, c.var);
if (event) {
await updateListEvent(
@ -532,7 +511,7 @@ const pinController: AppController = async (c) => {
c,
);
const status = await renderStatus(event, { viewerPubkey: pubkey });
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
if (status) {
status.pinned = true;
}
@ -544,16 +523,12 @@ const pinController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#unpin */
const unpinController: AppController = async (c) => {
const { conf } = c.var;
const pubkey = await c.get('signer')?.getPublicKey()!;
const eventId = c.req.param('id');
const { signal } = c.req.raw;
const { conf, relay, user } = c.var;
const event = await getEvent(eventId, {
kind: 1,
relations: ['author', 'event_stats', 'author_stats'],
signal,
});
const pubkey = await user!.signer.getPublicKey();
const eventId = c.req.param('id');
const event = await getEvent(eventId, c.var);
if (event) {
await updateListEvent(
@ -562,7 +537,7 @@ const unpinController: AppController = async (c) => {
c,
);
const status = await renderStatus(event, { viewerPubkey: pubkey });
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
if (status) {
status.pinned = false;
}
@ -580,11 +555,10 @@ const zapSchema = z.object({
});
const zapController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, signal } = c.var;
const body = await parseBody(c.req.raw);
const result = zapSchema.safeParse(body);
const { signal } = c.req.raw;
const store = c.get('store');
if (!result.success) {
return c.json({ error: 'Bad request', schema: result.error }, 400);
@ -597,7 +571,7 @@ const zapController: AppController = async (c) => {
let lnurl: undefined | string;
if (status_id) {
target = await getEvent(status_id, { kind: 1, relations: ['author'], signal });
target = await getEvent(status_id, c.var);
const author = target?.author;
const meta = n.json().pipe(n.metadata()).catch({}).parse(author?.content);
lnurl = getLnurl(meta);
@ -611,7 +585,7 @@ const zapController: AppController = async (c) => {
);
}
} else {
[target] = await store.query([{ authors: [account_id], kinds: [0], limit: 1 }]);
[target] = await relay.query([{ authors: [account_id], kinds: [0], limit: 1 }]);
const meta = n.json().pipe(n.metadata()).catch({}).parse(target?.content);
lnurl = getLnurl(meta);
if (target && lnurl) {
@ -638,19 +612,19 @@ const zapController: AppController = async (c) => {
};
const zappedByController: AppController = async (c) => {
const id = c.req.param('id');
const params = c.get('listPagination');
const store = await Storages.db();
const kysely = await Storages.kysely();
const { db, relay } = c.var;
const zaps = await kysely.selectFrom('event_zaps')
const id = c.req.param('id');
const { offset, limit } = paginationSchema.parse(c.req.query());
const zaps = await db.kysely.selectFrom('event_zaps')
.selectAll()
.where('target_event_id', '=', id)
.orderBy('amount_millisats', 'desc')
.limit(params.limit)
.offset(params.offset).execute();
.limit(limit)
.offset(offset).execute();
const authors = await store.query([{ kinds: [0], authors: zaps.map((zap) => zap.sender_pubkey) }]);
const authors = await relay.query([{ kinds: [0], authors: zaps.map((zap) => zap.sender_pubkey) }]);
const results = (await Promise.all(
zaps.map(async (zap) => {
@ -668,7 +642,7 @@ const zappedByController: AppController = async (c) => {
}),
)).filter(Boolean);
return paginatedList(c, params, results);
return paginatedList(c, { limit, offset }, results);
};
export {

View file

@ -1,24 +1,21 @@
import { MuteListPolicy } from '@ditto/policies';
import {
streamingClientMessagesCounter,
streamingConnectionsGauge,
streamingServerMessagesCounter,
} from '@ditto/metrics';
import TTLCache from '@isaacs/ttlcache';
import { NostrEvent, NostrFilter } from '@nostrify/nostrify';
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { MuteListPolicy } from '@/policies/MuteListPolicy.ts';
import { getFeedPubkeys } from '@/queries.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { Storages } from '@/storages.ts';
import { getTokenHash } from '@/utils/auth.ts';
import { errorJson } from '@/utils/log.ts';
import { bech32ToPubkey, Time } from '@/utils.ts';
import { Time } from '@/utils.ts';
import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts';
import { renderNotification } from '@/views/mastodon/notifications.ts';
import { HTTPException } from '@hono/hono/http-exception';
/**
* Streaming timelines/categories.
@ -68,7 +65,7 @@ const limiter = new TTLCache<string, number>();
const connections = new Set<WebSocket>();
const streamingController: AppController = async (c) => {
const { conf } = c.var;
const { conf, relay, user } = c.var;
const upgrade = c.req.header('upgrade');
const token = c.req.header('sec-websocket-protocol');
const stream = streamSchema.optional().catch(undefined).parse(c.req.query('stream'));
@ -78,11 +75,6 @@ const streamingController: AppController = async (c) => {
return c.text('Please use websocket protocol', 400);
}
const pubkey = token ? await getTokenPubkey(token) : undefined;
if (token && !pubkey) {
return c.json({ error: 'Invalid access token' }, 401);
}
const ip = c.req.header('x-real-ip');
if (ip) {
const count = limiter.get(ip) ?? 0;
@ -91,12 +83,10 @@ const streamingController: AppController = async (c) => {
}
}
const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { protocol: token, idleTimeout: 30 });
const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { protocol: token });
const store = await Storages.db();
const pubsub = await Storages.pubsub();
const policy = pubkey ? new MuteListPolicy(pubkey, await Storages.admin()) : undefined;
const pubkey = await user?.signer.getPublicKey();
const policy = pubkey ? new MuteListPolicy(pubkey, relay) : undefined;
function send(e: StreamingEvent) {
if (socket.readyState === WebSocket.OPEN) {
@ -105,9 +95,13 @@ const streamingController: AppController = async (c) => {
}
}
async function sub(filters: NostrFilter[], render: (event: NostrEvent) => Promise<StreamingEvent | undefined>) {
async function sub(
filter: NostrFilter & { limit: 0 },
render: (event: NostrEvent) => Promise<StreamingEvent | undefined>,
) {
const { signal } = controller;
try {
for await (const msg of pubsub.req(filters, { signal: controller.signal })) {
for await (const msg of relay.req([filter], { signal })) {
if (msg[0] === 'EVENT') {
const event = msg[2];
@ -118,7 +112,7 @@ const streamingController: AppController = async (c) => {
}
}
await hydrateEvents({ events: [event], store, signal: AbortSignal.timeout(1000) });
await hydrateEvents({ ...c.var, events: [event], signal });
const result = await render(event);
@ -137,17 +131,17 @@ const streamingController: AppController = async (c) => {
streamingConnectionsGauge.set(connections.size);
if (!stream) return;
const topicFilter = await topicToFilter(stream, c.req.query(), pubkey, conf.url.host);
const topicFilter = await topicToFilter(relay, stream, c.req.query(), pubkey, conf.url.host);
if (topicFilter) {
sub([topicFilter], async (event) => {
sub(topicFilter, async (event) => {
let payload: object | undefined;
if (event.kind === 1) {
payload = await renderStatus(event, { viewerPubkey: pubkey });
payload = await renderStatus(relay, event, { viewerPubkey: pubkey });
}
if (event.kind === 6) {
payload = await renderReblog(event, { viewerPubkey: pubkey });
payload = await renderReblog(relay, event, { viewerPubkey: pubkey });
}
if (payload) {
@ -161,15 +155,15 @@ const streamingController: AppController = async (c) => {
}
if (['user', 'user:notification'].includes(stream) && pubkey) {
sub([{ '#p': [pubkey] }], async (event) => {
sub({ '#p': [pubkey], limit: 0 }, async (event) => {
if (event.pubkey === pubkey) return; // skip own events
const payload = await renderNotification(event, { viewerPubkey: pubkey });
const payload = await renderNotification(relay, event, { viewerPubkey: pubkey });
if (payload) {
return {
event: 'notification',
payload: JSON.stringify(payload),
stream: [stream],
};
} satisfies StreamingEvent;
}
});
return;
@ -205,48 +199,28 @@ const streamingController: AppController = async (c) => {
};
async function topicToFilter(
relay: NStore,
topic: Stream,
query: Record<string, string>,
pubkey: string | undefined,
host: string,
): Promise<NostrFilter | undefined> {
): Promise<(NostrFilter & { limit: 0 }) | undefined> {
switch (topic) {
case 'public':
return { kinds: [1, 6, 20] };
return { kinds: [1, 6, 20], limit: 0 };
case 'public:local':
return { kinds: [1, 6, 20], search: `domain:${host}` };
return { kinds: [1, 6, 20], search: `domain:${host}`, limit: 0 };
case 'hashtag':
if (query.tag) return { kinds: [1, 6, 20], '#t': [query.tag] };
if (query.tag) return { kinds: [1, 6, 20], '#t': [query.tag], limit: 0 };
break;
case 'hashtag:local':
if (query.tag) return { kinds: [1, 6, 20], '#t': [query.tag], search: `domain:${host}` };
if (query.tag) return { kinds: [1, 6, 20], '#t': [query.tag], search: `domain:${host}`, limit: 0 };
break;
case 'user':
// HACK: this puts the user's entire contacts list into RAM,
// and then calls `matchFilters` over it. Refreshing the page
// is required after following a new user.
return pubkey ? { kinds: [1, 6, 20], authors: [...await getFeedPubkeys(pubkey)] } : undefined;
}
}
async function getTokenPubkey(token: string): Promise<string | undefined> {
if (token.startsWith('token1')) {
const kysely = await Storages.kysely();
const tokenHash = await getTokenHash(token as `token1${string}`);
const row = await kysely
.selectFrom('auth_tokens')
.select('pubkey')
.where('token_hash', '=', tokenHash)
.executeTakeFirst();
if (!row) {
throw new HTTPException(401, { message: 'Invalid access token' });
}
return row.pubkey;
} else {
return bech32ToPubkey(token);
return pubkey ? { kinds: [1, 6, 20], authors: [...await getFeedPubkeys(relay, pubkey)], limit: 0 } : undefined;
}
}

View file

@ -1,38 +1,36 @@
import { paginated, paginatedList, paginationSchema } from '@ditto/mastoapi/pagination';
import { NostrFilter } from '@nostrify/nostrify';
import { matchFilter } from 'nostr-tools';
import { AppContext, AppController } from '@/app.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { paginated, paginatedList } from '@/utils/api.ts';
import { getTagSet } from '@/utils/tags.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
export const suggestionsV1Controller: AppController = async (c) => {
const signal = c.req.raw.signal;
const params = c.get('listPagination');
const suggestions = await renderV2Suggestions(c, params, signal);
const { signal } = c.var;
const { offset, limit } = paginationSchema.parse(c.req.query());
const suggestions = await renderV2Suggestions(c, { offset, limit }, signal);
const accounts = suggestions.map(({ account }) => account);
return paginatedList(c, params, accounts);
return paginatedList(c, { offset, limit }, accounts);
};
export const suggestionsV2Controller: AppController = async (c) => {
const signal = c.req.raw.signal;
const params = c.get('listPagination');
const suggestions = await renderV2Suggestions(c, params, signal);
return paginatedList(c, params, suggestions);
const { signal } = c.var;
const { offset, limit } = paginationSchema.parse(c.req.query());
const suggestions = await renderV2Suggestions(c, { offset, limit }, signal);
return paginatedList(c, { offset, limit }, suggestions);
};
async function renderV2Suggestions(c: AppContext, params: { offset: number; limit: number }, signal?: AbortSignal) {
const { conf } = c.var;
const { conf, relay, user } = c.var;
const { offset, limit } = params;
const store = c.get('store');
const signer = c.get('signer');
const pubkey = await signer?.getPublicKey();
const pubkey = await user?.signer.getPublicKey();
const filters: NostrFilter[] = [
{ kinds: [30382], authors: [conf.pubkey], '#n': ['suggested'], limit },
{ kinds: [1985], '#L': ['pub.ditto.trends'], '#l': [`#p`], authors: [conf.pubkey], limit: 1 },
{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#n': ['suggested'], limit },
{ kinds: [1985], '#L': ['pub.ditto.trends'], '#l': [`#p`], authors: [await conf.signer.getPublicKey()], limit: 1 },
];
if (pubkey) {
@ -40,14 +38,21 @@ async function renderV2Suggestions(c: AppContext, params: { offset: number; limi
filters.push({ kinds: [10000], authors: [pubkey], limit: 1 });
}
const events = await store.query(filters, { signal });
const events = await relay.query(filters, { signal });
const adminPubkey = await conf.signer.getPublicKey();
const [userEvents, followsEvent, mutesEvent, trendingEvent] = [
events.filter((event) => matchFilter({ kinds: [30382], authors: [conf.pubkey], '#n': ['suggested'] }, event)),
events.filter((event) => matchFilter({ kinds: [30382], authors: [adminPubkey], '#n': ['suggested'] }, event)),
pubkey ? events.find((event) => matchFilter({ kinds: [3], authors: [pubkey] }, event)) : undefined,
pubkey ? events.find((event) => matchFilter({ kinds: [10000], authors: [pubkey] }, event)) : undefined,
events.find((event) =>
matchFilter({ kinds: [1985], '#L': ['pub.ditto.trends'], '#l': [`#p`], authors: [conf.pubkey], limit: 1 }, event)
matchFilter({
kinds: [1985],
'#L': ['pub.ditto.trends'],
'#l': [`#p`],
authors: [adminPubkey],
limit: 1,
}, event)
),
];
@ -72,11 +77,11 @@ async function renderV2Suggestions(c: AppContext, params: { offset: number; limi
const authors = [...pubkeys].slice(offset, offset + limit);
const profiles = await store.query(
const profiles = await relay.query(
[{ kinds: [0], authors, limit: authors.length }],
{ signal },
)
.then((events) => hydrateEvents({ events, store, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
return Promise.all(authors.map(async (pubkey) => {
const profile = profiles.find((event) => event.pubkey === pubkey);
@ -89,13 +94,10 @@ async function renderV2Suggestions(c: AppContext, params: { offset: number; limi
}
export const localSuggestionsController: AppController = async (c) => {
const { conf } = c.var;
const signal = c.req.raw.signal;
const params = c.get('pagination');
const store = c.get('store');
const { conf, relay, pagination, signal } = c.var;
const grants = await store.query(
[{ kinds: [30360], authors: [conf.pubkey], ...params }],
const grants = await relay.query(
[{ kinds: [30360], authors: [await conf.signer.getPublicKey()], ...pagination }],
{ signal },
);
@ -108,11 +110,11 @@ export const localSuggestionsController: AppController = async (c) => {
}
}
const profiles = await store.query(
[{ kinds: [0], authors: [...pubkeys], search: `domain:${conf.url.host}`, ...params }],
const profiles = await relay.query(
[{ kinds: [0], authors: [...pubkeys], search: `domain:${conf.url.host}`, ...pagination }],
{ signal },
)
.then((events) => hydrateEvents({ store, events, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
const suggestions = [...pubkeys].map((pubkey) => {
const profile = profiles.find((event) => event.pubkey === pubkey);

View file

@ -1,3 +1,4 @@
import { paginated } from '@ditto/mastoapi/pagination';
import { NostrFilter } from '@nostrify/nostrify';
import { z } from 'zod';
@ -5,7 +6,6 @@ import { type AppContext, type AppController } from '@/app.ts';
import { getFeedPubkeys } from '@/queries.ts';
import { booleanParamSchema, languageSchema } from '@/schema.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { paginated } from '@/utils/api.ts';
import { getTagSet } from '@/utils/tags.ts';
import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts';
@ -15,8 +15,8 @@ const homeQuerySchema = z.object({
});
const homeTimelineController: AppController = async (c) => {
const params = c.get('pagination');
const pubkey = await c.get('signer')?.getPublicKey()!;
const { relay, user, pagination } = c.var;
const pubkey = await user?.signer.getPublicKey()!;
const result = homeQuerySchema.safeParse(c.req.query());
if (!result.success) {
@ -25,8 +25,8 @@ const homeTimelineController: AppController = async (c) => {
const { exclude_replies, only_media } = result.data;
const authors = [...await getFeedPubkeys(pubkey)];
const filter: NostrFilter = { authors, kinds: [1, 6, 20], ...params };
const authors = [...await getFeedPubkeys(relay, pubkey)];
const filter: NostrFilter = { authors, kinds: [1, 6, 20], ...pagination };
const search: string[] = [];
@ -90,41 +90,39 @@ const hashtagTimelineController: AppController = (c) => {
};
const suggestedTimelineController: AppController = async (c) => {
const { conf } = c.var;
const store = c.get('store');
const params = c.get('pagination');
const { conf, relay, pagination } = c.var;
const [follows] = await store.query(
[{ kinds: [3], authors: [conf.pubkey], limit: 1 }],
const [follows] = await relay.query(
[{ kinds: [3], authors: [await conf.signer.getPublicKey()], limit: 1 }],
);
const authors = [...getTagSet(follows?.tags ?? [], 'p')];
return renderStatuses(c, [{ authors, kinds: [1, 20], ...params }]);
return renderStatuses(c, [{ authors, kinds: [1, 20], ...pagination }]);
};
/** Render statuses for timelines. */
async function renderStatuses(c: AppContext, filters: NostrFilter[]) {
const { conf } = c.var;
const { signal } = c.req.raw;
const store = c.get('store');
const { conf, user, signal } = c.var;
const relay = user?.relay ?? c.var.relay;
const opts = { signal, timeout: conf.db.timeouts.timelines };
const events = await store
const events = await relay
.query(filters, opts)
.then((events) => hydrateEvents({ events, store, signal }));
.then((events) => hydrateEvents({ ...c.var, events }));
if (!events.length) {
return c.json([]);
}
const viewerPubkey = await c.get('signer')?.getPublicKey();
const viewerPubkey = await user?.signer.getPublicKey();
const statuses = (await Promise.all(events.map((event) => {
if (event.kind === 6) {
return renderReblog(event, { viewerPubkey });
return renderReblog(relay, event, { viewerPubkey });
}
return renderStatus(event, { viewerPubkey });
return renderStatus(relay, event, { viewerPubkey });
}))).filter(Boolean);
if (!statuses.length) {

View file

@ -1,4 +1,5 @@
import { cachedTranslationsSizeGauge } from '@ditto/metrics';
import { logi } from '@soapbox/logi';
import { LanguageCode } from 'iso-639-1';
import { z } from 'zod';
@ -9,14 +10,16 @@ import { getEvent } from '@/queries.ts';
import { localeSchema } from '@/schema.ts';
import { parseBody } from '@/utils/api.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
import { errorJson } from '@/utils/log.ts';
const translateSchema = z.object({
lang: localeSchema,
});
const translateController: AppController = async (c) => {
const { relay, user, signal } = c.var;
const result = translateSchema.safeParse(await parseBody(c.req.raw));
const { signal } = c.req.raw;
if (!result.success) {
return c.json({ error: 'Bad request.', schema: result.error }, 422);
@ -31,18 +34,18 @@ const translateController: AppController = async (c) => {
const id = c.req.param('id');
const event = await getEvent(id, { signal });
const event = await getEvent(id, c.var);
if (!event) {
return c.json({ error: 'Record not found' }, 400);
}
const viewerPubkey = await c.get('signer')?.getPublicKey();
const viewerPubkey = await user?.signer.getPublicKey();
if (lang.toLowerCase() === event?.language?.toLowerCase()) {
return c.json({ error: 'Source and target languages are the same. No translation needed.' }, 400);
}
const status = await renderStatus(event, { viewerPubkey });
const status = await renderStatus(relay, event, { viewerPubkey });
if (!status?.content) {
return c.json({ error: 'Bad request.', schema: result.error }, 400);
}
@ -130,7 +133,7 @@ const translateController: AppController = async (c) => {
}
}
mastodonTranslation.detected_source_language = data.source_lang;
mastodonTranslation.detected_source_language = data.sourceLang;
translationCache.set(cacheKey, mastodonTranslation);
cachedTranslationsSizeGauge.set(translationCache.size);
@ -140,6 +143,7 @@ const translateController: AppController = async (c) => {
if (e instanceof Error && e.message.includes('not supported')) {
return c.json({ error: `Translation of source language '${event.language}' not supported` }, 422);
}
logi({ level: 'error', ns: 'ditto.translate', error: errorJson(e) });
return c.json({ error: 'Service Unavailable' }, 503);
}
};

View file

@ -1,34 +1,44 @@
import { type DittoConf } from '@ditto/conf';
import { paginated, paginationSchema } from '@ditto/mastoapi/pagination';
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { z } from 'zod';
import { AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
import { paginationSchema } from '@/schemas/pagination.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { Storages } from '@/storages.ts';
import { generateDateRange, Time } from '@/utils/time.ts';
import { unfurlCardCached } from '@/utils/unfurl.ts';
import { paginated } from '@/utils/api.ts';
import { PreviewCard, unfurlCardCached } from '@/utils/unfurl.ts';
import { errorJson } from '@/utils/log.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
let trendingHashtagsCache = getTrendingHashtags(Conf).catch((e: unknown) => {
logi({
level: 'error',
ns: 'ditto.trends.api',
type: 'tags',
msg: 'Failed to get trending hashtags',
error: errorJson(e),
});
return Promise.resolve([]);
interface TrendHistory {
day: string;
accounts: string;
uses: string;
}
interface TrendingHashtag {
name: string;
url: string;
history: TrendHistory[];
}
interface TrendingLink extends PreviewCard {
history: TrendHistory[];
}
const trendingTagsQuerySchema = z.object({
limit: z.coerce.number().catch(10).transform((value) => Math.min(Math.max(value, 0), 20)),
offset: z.number().nonnegative().catch(0),
});
Deno.cron('update trending hashtags cache', '35 * * * *', async () => {
const trendingTagsController: AppController = async (c) => {
const { conf, relay } = c.var;
const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query());
try {
const trends = await getTrendingHashtags(Conf);
trendingHashtagsCache = Promise.resolve(trends);
const trends = await getTrendingHashtags(conf, relay);
return c.json(trends.slice(offset, offset + limit));
} catch (e) {
logi({
level: 'error',
@ -37,23 +47,12 @@ Deno.cron('update trending hashtags cache', '35 * * * *', async () => {
msg: 'Failed to get trending hashtags',
error: errorJson(e),
});
return c.json([]);
}
});
const trendingTagsQuerySchema = z.object({
limit: z.coerce.number().catch(10).transform((value) => Math.min(Math.max(value, 0), 20)),
offset: z.number().nonnegative().catch(0),
});
const trendingTagsController: AppController = async (c) => {
const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query());
const trends = await trendingHashtagsCache;
return c.json(trends.slice(offset, offset + limit));
};
async function getTrendingHashtags(conf: DittoConf) {
const store = await Storages.db();
const trends = await getTrendingTags(store, 't', conf.pubkey);
async function getTrendingHashtags(conf: DittoConf, relay: NStore): Promise<TrendingHashtag[]> {
const trends = await getTrendingTags(relay, 't', await conf.signer.getPublicKey());
return trends.map((trend) => {
const hashtag = trend.value;
@ -72,21 +71,12 @@ async function getTrendingHashtags(conf: DittoConf) {
});
}
let trendingLinksCache = getTrendingLinks(Conf).catch((e: unknown) => {
logi({
level: 'error',
ns: 'ditto.trends.api',
type: 'links',
msg: 'Failed to get trending links',
error: errorJson(e),
});
return Promise.resolve([]);
});
Deno.cron('update trending links cache', '50 * * * *', async () => {
const trendingLinksController: AppController = async (c) => {
const { conf, relay } = c.var;
const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query());
try {
const trends = await getTrendingLinks(Conf);
trendingLinksCache = Promise.resolve(trends);
const trends = await getTrendingLinks(conf, relay);
return c.json(trends.slice(offset, offset + limit));
} catch (e) {
logi({
level: 'error',
@ -95,18 +85,12 @@ Deno.cron('update trending links cache', '50 * * * *', async () => {
msg: 'Failed to get trending links',
error: errorJson(e),
});
return c.json([]);
}
});
const trendingLinksController: AppController = async (c) => {
const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query());
const trends = await trendingLinksCache;
return c.json(trends.slice(offset, offset + limit));
};
async function getTrendingLinks(conf: DittoConf) {
const store = await Storages.db();
const trends = await getTrendingTags(store, 'r', conf.pubkey);
async function getTrendingLinks(conf: DittoConf, relay: NStore): Promise<TrendingLink[]> {
const trends = await getTrendingTags(relay, 'r', await conf.signer.getPublicKey());
return Promise.all(trends.map(async (trend) => {
const link = trend.value;
@ -140,15 +124,14 @@ async function getTrendingLinks(conf: DittoConf) {
}
const trendingStatusesController: AppController = async (c) => {
const { conf } = c.var;
const store = await Storages.db();
const { conf, relay } = c.var;
const { limit, offset, until } = paginationSchema.parse(c.req.query());
const [label] = await store.query([{
const [label] = await relay.query([{
kinds: [1985],
'#L': ['pub.ditto.trends'],
'#l': ['#e'],
authors: [conf.pubkey],
authors: [await conf.signer.getPublicKey()],
until,
limit: 1,
}]);
@ -162,8 +145,8 @@ const trendingStatusesController: AppController = async (c) => {
return c.json([]);
}
const results = await store.query([{ kinds: [1, 20], ids }])
.then((events) => hydrateEvents({ events, store }));
const results = await relay.query([{ kinds: [1, 20], ids }])
.then((events) => hydrateEvents({ ...c.var, events }));
// Sort events in the order they appear in the label.
const events = ids
@ -171,7 +154,7 @@ const trendingStatusesController: AppController = async (c) => {
.filter((event): event is NostrEvent => !!event);
const statuses = await Promise.all(
events.map((event) => renderStatus(event, {})),
events.map((event) => renderStatus(relay, event, {})),
);
return paginated(c, results, statuses);

View file

@ -5,6 +5,9 @@ import { logi } from '@soapbox/logi';
import { errorJson } from '@/utils/log.ts';
export const errorHandler: ErrorHandler = (err, c) => {
const { method } = c.req;
const { pathname } = new URL(c.req.url);
c.header('Cache-Control', 'no-store');
if (err instanceof HTTPException) {
@ -19,7 +22,7 @@ export const errorHandler: ErrorHandler = (err, c) => {
return c.json({ error: 'The server was unable to respond in a timely manner' }, 500);
}
logi({ level: 'error', ns: 'ditto.http', msg: 'Unhandled error', error: errorJson(err) });
logi({ level: 'error', ns: 'ditto.http', msg: 'Unhandled error', method, pathname, error: errorJson(err) });
return c.json({ error: 'Something went wrong' }, 500);
};

View file

@ -1,7 +1,6 @@
import { logi } from '@soapbox/logi';
import { AppMiddleware } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { AppContext, AppMiddleware } from '@/app.ts';
import { getPathParams, MetadataEntities } from '@/utils/og-metadata.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
import { errorJson } from '@/utils/log.ts';
@ -23,7 +22,7 @@ export const frontendController: AppMiddleware = async (c) => {
if (content.includes(META_PLACEHOLDER)) {
const params = getPathParams(c.req.path);
try {
const entities = await getEntities(params ?? {});
const entities = await getEntities(c, params ?? {});
const meta = renderMetadata(c.req.url, entities);
return c.html(content.replace(META_PLACEHOLDER, meta));
} catch (e) {
@ -37,27 +36,27 @@ export const frontendController: AppMiddleware = async (c) => {
}
};
async function getEntities(params: { acct?: string; statusId?: string }): Promise<MetadataEntities> {
const store = await Storages.db();
async function getEntities(c: AppContext, params: { acct?: string; statusId?: string }): Promise<MetadataEntities> {
const { relay } = c.var;
const entities: MetadataEntities = {
instance: await getInstanceMetadata(store),
instance: await getInstanceMetadata(relay),
};
if (params.statusId) {
const event = await getEvent(params.statusId, { kind: 1 });
const event = await getEvent(params.statusId, c.var);
if (event) {
entities.status = await renderStatus(event, {});
entities.status = await renderStatus(relay, event, {});
entities.account = entities.status?.account;
}
return entities;
}
if (params.acct) {
const pubkey = await lookupPubkey(params.acct.replace(/^@/, ''));
const event = pubkey ? await getAuthor(pubkey) : undefined;
const pubkey = await lookupPubkey(params.acct.replace(/^@/, ''), c.var);
const event = pubkey ? await getAuthor(pubkey, c.var) : undefined;
if (event) {
entities.account = await renderAccount(event);
entities.account = renderAccount(event);
}
}

View file

@ -1,10 +1,11 @@
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { WebManifestCombined } from '@/types/webmanifest.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
export const manifestController: AppController = async (c) => {
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
const { relay, signal } = c.var;
const meta = await getInstanceMetadata(relay, signal);
const manifest: WebManifestCombined = {
description: meta.about,

View file

@ -1,31 +1,16 @@
import {
dbAvailableConnectionsGauge,
dbPoolSizeGauge,
relayPoolRelaysSizeGauge,
relayPoolSubscriptionsSizeGauge,
} from '@ditto/metrics';
import { dbAvailableConnectionsGauge, dbPoolSizeGauge } from '@ditto/metrics';
import { register } from 'prom-client';
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
/** Prometheus/OpenMetrics controller. */
export const metricsController: AppController = async (c) => {
const db = await Storages.database();
const pool = await Storages.client();
const { db } = c.var;
// Update some metrics at request time.
dbPoolSizeGauge.set(db.poolSize);
dbAvailableConnectionsGauge.set(db.availableConnections);
relayPoolRelaysSizeGauge.reset();
relayPoolSubscriptionsSizeGauge.reset();
for (const relay of pool.relays.values()) {
relayPoolRelaysSizeGauge.inc({ ready_state: relay.socket.readyState });
relayPoolSubscriptionsSizeGauge.inc(relay.subscriptions.length);
}
// Serve the metrics.
const metrics = await register.metrics();

View file

@ -1,20 +1,19 @@
import denoJson from 'deno.json' with { type: 'json' };
import { AppController } from '@/app.ts';
import { Storages } from '@/storages.ts';
import { getInstanceMetadata } from '@/utils/instance.ts';
const relayInfoController: AppController = async (c) => {
const { conf } = c.var;
const store = await Storages.db();
const meta = await getInstanceMetadata(store, c.req.raw.signal);
const { conf, relay, signal } = c.var;
const meta = await getInstanceMetadata(relay, signal);
c.res.headers.set('access-control-allow-origin', '*');
return c.json({
name: meta.name,
description: meta.about,
pubkey: conf.pubkey,
pubkey: await conf.signer.getPublicKey(),
contact: meta.email,
supported_nips: [1, 5, 9, 11, 16, 45, 50, 46, 98],
software: 'Ditto',

View file

@ -1,5 +1,6 @@
import { type DittoConf } from '@ditto/conf';
import { relayConnectionsGauge, relayEventsCounter, relayMessagesCounter } from '@ditto/metrics';
import { MemoryRateLimiter, MultiRateLimiter, type RateLimiter } from '@ditto/ratelimiter';
import { logi } from '@soapbox/logi';
import { JsonValue } from '@std/json';
import {
@ -15,19 +16,12 @@ import {
import { AppController } from '@/app.ts';
import { relayInfoController } from '@/controllers/nostr/relay-info.ts';
import * as pipeline from '@/pipeline.ts';
import { RelayError } from '@/RelayError.ts';
import { Storages } from '@/storages.ts';
import { type DittoPgStore } from '@/storages/DittoPgStore.ts';
import { errorJson } from '@/utils/log.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { MemoryRateLimiter } from '@/utils/ratelimiter/MemoryRateLimiter.ts';
import { MultiRateLimiter } from '@/utils/ratelimiter/MultiRateLimiter.ts';
import { RateLimiter } from '@/utils/ratelimiter/types.ts';
import { Time } from '@/utils/time.ts';
/** Limit of initial events returned for a subscription. */
const FILTER_LIMIT = 100;
const limiters = {
msg: new MemoryRateLimiter({ limit: 300, window: Time.minutes(1) }),
req: new MultiRateLimiter([
@ -47,9 +41,20 @@ const limiters = {
const connections = new Set<WebSocket>();
/** Set up the Websocket connection. */
function connectStream(socket: WebSocket, ip: string | undefined, conf: DittoConf) {
function connectStream(conf: DittoConf, relay: DittoPgStore, socket: WebSocket, ip: string | undefined) {
const controllers = new Map<string, AbortController>();
if (ip) {
const remaining = Object
.values(limiters)
.reduce((acc, limiter) => Math.min(acc, limiter.client(ip).remaining), Infinity);
if (remaining < 0) {
socket.close(1008, 'Rate limit exceeded');
return;
}
}
socket.onopen = () => {
connections.add(socket);
relayConnectionsGauge.set(connections.size);
@ -127,12 +132,9 @@ function connectStream(socket: WebSocket, ip: string | undefined, conf: DittoCon
controllers.get(subId)?.abort();
controllers.set(subId, controller);
const store = await Storages.db();
const pubsub = await Storages.pubsub();
try {
for (const event of await store.query(filters, { limit: FILTER_LIMIT, timeout: conf.db.timeouts.relay })) {
send(['EVENT', subId, purifyEvent(event)]);
for await (const [verb, , ...rest] of relay.req(filters, { limit: 100, timeout: conf.db.timeouts.relay })) {
send([verb, subId, ...rest] as NostrRelayMsg);
}
} catch (e) {
if (e instanceof RelayError) {
@ -145,18 +147,6 @@ function connectStream(socket: WebSocket, ip: string | undefined, conf: DittoCon
controllers.delete(subId);
return;
}
send(['EOSE', subId]);
try {
for await (const msg of pubsub.req(filters, { signal: controller.signal })) {
if (msg[0] === 'EVENT') {
send(['EVENT', subId, msg[2]]);
}
}
} catch {
controllers.delete(subId);
}
}
/** Handle EVENT. Store the event. */
@ -168,7 +158,7 @@ function connectStream(socket: WebSocket, ip: string | undefined, conf: DittoCon
try {
// This will store it (if eligible) and run other side-effects.
await pipeline.handleEvent(purifyEvent(event), { source: 'relay', signal: AbortSignal.timeout(1000) });
await relay.event(purifyEvent(event), { signal: AbortSignal.timeout(1000) });
send(['OK', event.id, true, '']);
} catch (e) {
if (e instanceof RelayError) {
@ -192,8 +182,7 @@ function connectStream(socket: WebSocket, ip: string | undefined, conf: DittoCon
/** Handle COUNT. Return the number of events matching the filters. */
async function handleCount([_, subId, ...filters]: NostrClientCOUNT): Promise<void> {
if (rateLimited(limiters.req)) return;
const store = await Storages.db();
const { count } = await store.count(filters, { timeout: conf.db.timeouts.relay });
const { count } = await relay.count(filters, { timeout: conf.db.timeouts.relay });
send(['COUNT', subId, { count, approximate: false }]);
}
@ -206,7 +195,7 @@ function connectStream(socket: WebSocket, ip: string | undefined, conf: DittoCon
}
const relayController: AppController = (c, next) => {
const { conf } = c.var;
const { conf, relay } = c.var;
const upgrade = c.req.header('upgrade');
// NIP-11: https://github.com/nostr-protocol/nips/blob/master/11.md
@ -224,18 +213,8 @@ const relayController: AppController = (c, next) => {
ip = undefined;
}
if (ip) {
const remaining = Object
.values(limiters)
.reduce((acc, limiter) => Math.min(acc, limiter.client(ip).remaining), Infinity);
if (remaining < 0) {
return c.json({ error: 'Rate limit exceeded' }, 429);
}
}
const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { idleTimeout: 30 });
connectStream(socket, ip, conf);
const { socket, response } = Deno.upgradeWebSocket(c.req.raw);
connectStream(conf, relay as DittoPgStore, socket, ip);
return response;
};

View file

@ -18,11 +18,9 @@ const nostrController: AppController = async (c) => {
return c.json(emptyResult);
}
const store = c.get('store');
const result = nameSchema.safeParse(c.req.query('name'));
const name = result.success ? result.data : undefined;
const pointer = name ? await localNip05Lookup(store, name) : undefined;
const pointer = name ? await localNip05Lookup(name, c.var) : undefined;
if (!name || !pointer) {
// Not found, cache for 5 minutes.

View file

@ -1,7 +1,7 @@
import { sql } from 'kysely';
import { Storages } from '@/storages.ts';
import {
type TrendsCtx,
updateTrendingEvents,
updateTrendingHashtags,
updateTrendingLinks,
@ -10,15 +10,15 @@ import {
} from '@/trends.ts';
/** Start cron jobs for the application. */
export function cron() {
Deno.cron('update trending pubkeys', '0 * * * *', updateTrendingPubkeys);
Deno.cron('update trending zapped events', '7 * * * *', updateTrendingZappedEvents);
Deno.cron('update trending events', '15 * * * *', updateTrendingEvents);
Deno.cron('update trending hashtags', '30 * * * *', updateTrendingHashtags);
Deno.cron('update trending links', '45 * * * *', updateTrendingLinks);
export function cron(ctx: TrendsCtx) {
Deno.cron('update trending pubkeys', '0 * * * *', () => updateTrendingPubkeys(ctx));
Deno.cron('update trending zapped events', '7 * * * *', () => updateTrendingZappedEvents(ctx));
Deno.cron('update trending events', '15 * * * *', () => updateTrendingEvents(ctx));
Deno.cron('update trending hashtags', '30 * * * *', () => updateTrendingHashtags(ctx));
Deno.cron('update trending links', '45 * * * *', () => updateTrendingLinks(ctx));
Deno.cron('refresh top authors', '20 * * * *', async () => {
const kysely = await Storages.kysely();
const { kysely } = ctx.db;
await sql`refresh materialized view top_authors`.execute(kysely);
});
}

View file

@ -1,46 +0,0 @@
import { assertEquals } from '@std/assert';
import event0 from '~/fixtures/events/event-0.json' with { type: 'json' };
import event1 from '~/fixtures/events/event-1.json' with { type: 'json' };
import { eventToMicroFilter, getFilterId, getFilterLimit, getMicroFilters, isMicrofilter } from './filter.ts';
Deno.test('getMicroFilters', () => {
const event = event0;
const microfilters = getMicroFilters(event);
assertEquals(microfilters.length, 2);
assertEquals(microfilters[0], { authors: [event.pubkey], kinds: [0] });
assertEquals(microfilters[1], { ids: [event.id] });
});
Deno.test('eventToMicroFilter', () => {
assertEquals(eventToMicroFilter(event0), { authors: [event0.pubkey], kinds: [0] });
assertEquals(eventToMicroFilter(event1), { ids: [event1.id] });
});
Deno.test('isMicrofilter', () => {
assertEquals(isMicrofilter({ ids: [event0.id] }), true);
assertEquals(isMicrofilter({ authors: [event0.pubkey], kinds: [0] }), true);
assertEquals(isMicrofilter({ ids: [event0.id], authors: [event0.pubkey], kinds: [0] }), false);
});
Deno.test('getFilterId', () => {
assertEquals(
getFilterId({ ids: [event0.id] }),
'{"ids":["63d38c9b483d2d98a46382eadefd272e0e4bdb106a5b6eddb400c4e76f693d35"]}',
);
assertEquals(
getFilterId({ authors: [event0.pubkey], kinds: [0] }),
'{"authors":["79c2cae114ea28a981e7559b4fe7854a473521a8d22a66bbab9fa248eb820ff6"],"kinds":[0]}',
);
});
Deno.test('getFilterLimit', () => {
assertEquals(getFilterLimit({ ids: [event0.id] }), 1);
assertEquals(getFilterLimit({ ids: [event0.id], limit: 2 }), 1);
assertEquals(getFilterLimit({ ids: [event0.id], limit: 0 }), 0);
assertEquals(getFilterLimit({ ids: [event0.id], limit: -1 }), 0);
assertEquals(getFilterLimit({ kinds: [0], authors: [event0.pubkey] }), 1);
assertEquals(getFilterLimit({ kinds: [1], authors: [event0.pubkey] }), Infinity);
assertEquals(getFilterLimit({}), Infinity);
});

View file

@ -1,97 +0,0 @@
import { NKinds, NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
import stringifyStable from 'fast-stable-stringify';
import { z } from 'zod';
/** Microfilter to get one specific event by ID. */
type IdMicrofilter = { ids: [NostrEvent['id']] };
/** Microfilter to get an author. */
type AuthorMicrofilter = { kinds: [0]; authors: [NostrEvent['pubkey']] };
/** Filter to get one specific event. */
type MicroFilter = IdMicrofilter | AuthorMicrofilter;
/** Get deterministic ID for a microfilter. */
function getFilterId(filter: MicroFilter): string {
if ('ids' in filter) {
return stringifyStable({ ids: [filter.ids[0]] });
} else {
return stringifyStable({
kinds: [filter.kinds[0]],
authors: [filter.authors[0]],
});
}
}
/** Get a microfilter from a Nostr event. */
function eventToMicroFilter(event: NostrEvent): MicroFilter {
const [microfilter] = getMicroFilters(event);
return microfilter;
}
/** Get all the microfilters for an event, in order of priority. */
function getMicroFilters(event: NostrEvent): MicroFilter[] {
const microfilters: MicroFilter[] = [];
if (event.kind === 0) {
microfilters.push({ kinds: [0], authors: [event.pubkey] });
}
microfilters.push({ ids: [event.id] });
return microfilters;
}
/** Microfilter schema. */
const microFilterSchema = z.union([
z.object({ ids: z.tuple([n.id()]) }).strict(),
z.object({ kinds: z.tuple([z.literal(0)]), authors: z.tuple([n.id()]) }).strict(),
]);
/** Checks whether the filter is a microfilter. */
function isMicrofilter(filter: NostrFilter): filter is MicroFilter {
return microFilterSchema.safeParse(filter).success;
}
/** Returns true if the filter could potentially return any stored events at all. */
function canFilter(filter: NostrFilter): boolean {
return getFilterLimit(filter) > 0;
}
/** Normalize the `limit` of each filter, and remove filters that can't produce any events. */
function normalizeFilters<F extends NostrFilter>(filters: F[]): F[] {
return filters.reduce<F[]>((acc, filter) => {
const limit = getFilterLimit(filter);
if (limit > 0) {
acc.push(limit === Infinity ? filter : { ...filter, limit });
}
return acc;
}, []);
}
/** Calculate the intrinsic limit of a filter. This function may return `Infinity`. */
function getFilterLimit(filter: NostrFilter): number {
if (filter.ids && !filter.ids.length) return 0;
if (filter.kinds && !filter.kinds.length) return 0;
if (filter.authors && !filter.authors.length) return 0;
for (const [key, value] of Object.entries(filter)) {
if (key[0] === '#' && Array.isArray(value) && !value.length) return 0;
}
return Math.min(
Math.max(0, filter.limit ?? Infinity),
filter.ids?.length ?? Infinity,
filter.authors?.length && filter.kinds?.every((kind) => NKinds.replaceable(kind))
? filter.authors.length * filter.kinds.length
: Infinity,
);
}
export {
type AuthorMicrofilter,
canFilter,
eventToMicroFilter,
getFilterId,
getFilterLimit,
getMicroFilters,
type IdMicrofilter,
isMicrofilter,
type MicroFilter,
normalizeFilters,
};

View file

@ -1,32 +1,38 @@
import { firehoseEventsCounter } from '@ditto/metrics';
import { Semaphore } from '@core/asyncutil';
import { NRelay, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { nostrNow } from '@/utils.ts';
import * as pipeline from '@/pipeline.ts';
const sem = new Semaphore(Conf.firehoseConcurrency);
interface FirehoseOpts {
pool: NRelay;
relay: NStore;
concurrency: number;
kinds: number[];
timeout?: number;
}
/**
* This function watches events on all known relays and performs
* side-effects based on them, such as trending hashtag tracking
* and storing events for notifications and the home feed.
*/
export async function startFirehose(): Promise<void> {
const store = await Storages.client();
export async function startFirehose(opts: FirehoseOpts): Promise<void> {
const { pool, relay, kinds, concurrency, timeout = 5000 } = opts;
for await (const msg of store.req([{ kinds: Conf.firehoseKinds, limit: 0, since: nostrNow() }])) {
const sem = new Semaphore(concurrency);
for await (const msg of pool.req([{ kinds, limit: 0, since: nostrNow() }])) {
if (msg[0] === 'EVENT') {
const event = msg[2];
logi({ level: 'debug', ns: 'ditto.event', source: 'firehose', id: event.id, kind: event.kind });
firehoseEventsCounter.inc({ kind: event.kind });
sem.lock(async () => {
try {
await pipeline.handleEvent(event, { source: 'firehose', signal: AbortSignal.timeout(5000) });
await relay.event(event, { signal: AbortSignal.timeout(timeout) });
} catch {
// Ignore
}

View file

@ -1,5 +0,0 @@
import { NostrEvent } from '@nostrify/nostrify';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
/** Additional properties that may be added by Ditto to events. */
export type DittoRelation = Exclude<keyof DittoEvent, keyof NostrEvent>;

View file

@ -1,116 +0,0 @@
import { HTTPException } from '@hono/hono/http-exception';
import { NostrEvent } from '@nostrify/nostrify';
import { type AppContext, type AppMiddleware } from '@/app.ts';
import { ReadOnlySigner } from '@/signers/ReadOnlySigner.ts';
import { Storages } from '@/storages.ts';
import { localRequest } from '@/utils/api.ts';
import {
buildAuthEventTemplate,
parseAuthRequest,
type ParseAuthRequestOpts,
validateAuthEvent,
} from '@/utils/nip98.ts';
/**
* NIP-98 auth.
* https://github.com/nostr-protocol/nips/blob/master/98.md
*/
function auth98Middleware(opts: ParseAuthRequestOpts = {}): AppMiddleware {
return async (c, next) => {
const req = localRequest(c);
const result = await parseAuthRequest(req, opts);
if (result.success) {
c.set('signer', new ReadOnlySigner(result.data.pubkey));
c.set('proof', result.data);
}
await next();
};
}
type UserRole = 'user' | 'admin';
/** Require the user to prove their role before invoking the controller. */
function requireRole(role: UserRole, opts?: ParseAuthRequestOpts): AppMiddleware {
return withProof(async (c, proof, next) => {
const { conf } = c.var;
const store = await Storages.db();
const [user] = await store.query([{
kinds: [30382],
authors: [conf.pubkey],
'#d': [proof.pubkey],
limit: 1,
}]);
if (user && matchesRole(user, role)) {
await next();
} else {
throw new HTTPException(401);
}
}, opts);
}
/** Require the user to demonstrate they own the pubkey by signing an event. */
function requireProof(opts?: ParseAuthRequestOpts): AppMiddleware {
return withProof(async (_c, _proof, next) => {
await next();
}, opts);
}
/** Check whether the user fulfills the role. */
function matchesRole(user: NostrEvent, role: UserRole): boolean {
return user.tags.some(([tag, value]) => tag === 'n' && value === role);
}
/** HOC to obtain proof in middleware. */
function withProof(
handler: (c: AppContext, proof: NostrEvent, next: () => Promise<void>) => Promise<void>,
opts?: ParseAuthRequestOpts,
): AppMiddleware {
return async (c, next) => {
const signer = c.get('signer');
const pubkey = await signer?.getPublicKey();
const proof = c.get('proof') || await obtainProof(c, opts);
// Prevent people from accidentally using the wrong account. This has no other security implications.
if (proof && pubkey && pubkey !== proof.pubkey) {
throw new HTTPException(401, { message: 'Pubkey mismatch' });
}
if (proof) {
c.set('proof', proof);
if (!signer) {
c.set('signer', new ReadOnlySigner(proof.pubkey));
}
await handler(c, proof, next);
} else {
throw new HTTPException(401, { message: 'No proof' });
}
};
}
/** Get the proof over Nostr Connect. */
async function obtainProof(c: AppContext, opts?: ParseAuthRequestOpts) {
const signer = c.get('signer');
if (!signer) {
throw new HTTPException(401, {
res: c.json({ error: 'No way to sign Nostr event' }, 401),
});
}
const req = localRequest(c);
const reqEvent = await buildAuthEventTemplate(req, opts);
const resEvent = await signer.signEvent(reqEvent);
const result = await validateAuthEvent(req, resEvent, opts);
if (result.success) {
return result.data;
}
}
export { auth98Middleware, requireProof, requireRole };

View file

@ -1,17 +1,15 @@
import { AppMiddleware } from '@/app.ts';
import { PleromaConfigDB } from '@/utils/PleromaConfigDB.ts';
import { Storages } from '@/storages.ts';
import { getPleromaConfigs } from '@/utils/pleroma.ts';
let configDBCache: Promise<PleromaConfigDB> | undefined;
export const cspMiddleware = (): AppMiddleware => {
let configDBCache: Promise<PleromaConfigDB> | undefined;
return async (c, next) => {
const { conf } = c.var;
const store = await Storages.db();
const { conf, relay } = c.var;
if (!configDBCache) {
configDBCache = getPleromaConfigs(store);
configDBCache = getPleromaConfigs(relay);
}
const { host, protocol, origin } = conf.url;

View file

@ -12,8 +12,8 @@ export const logiMiddleware: MiddlewareHandler = async (c, next) => {
await next();
const end = new Date();
const delta = (end.getTime() - start.getTime()) / 1000;
const duration = (end.getTime() - start.getTime()) / 1000;
const level = c.res.status >= 500 ? 'error' : 'info';
logi({ level, ns: 'ditto.http.response', method, pathname, status: c.res.status, delta });
logi({ level, ns: 'ditto.http.response', method, pathname, status: c.res.status, duration });
};

View file

@ -1,49 +0,0 @@
import { AppMiddleware } from '@/app.ts';
import { paginationSchema } from '@/schemas/pagination.ts';
import { Storages } from '@/storages.ts';
/** Fixes compatibility with Mastodon apps by that don't use `Link` headers. */
export const paginationMiddleware: AppMiddleware = async (c, next) => {
const pagination = paginationSchema.parse(c.req.query());
const {
max_id: maxId,
min_id: minId,
since,
until,
} = pagination;
if ((maxId && !until) || (minId && !since)) {
const ids: string[] = [];
if (maxId) ids.push(maxId);
if (minId) ids.push(minId);
if (ids.length) {
const store = await Storages.db();
const events = await store.query(
[{ ids, limit: ids.length }],
{ signal: c.req.raw.signal },
);
for (const event of events) {
if (!until && maxId === event.id) pagination.until = event.created_at;
if (!since && minId === event.id) pagination.since = event.created_at;
}
}
}
c.set('pagination', {
since: pagination.since,
until: pagination.until,
limit: pagination.limit,
});
c.set('listPagination', {
limit: pagination.limit,
offset: pagination.offset,
});
await next();
};

View file

@ -1,29 +0,0 @@
import { MiddlewareHandler } from '@hono/hono';
import { HTTPException } from '@hono/hono/http-exception';
import { NostrSigner } from '@nostrify/nostrify';
import { SetRequired } from 'type-fest';
/** Throw a 401 if a signer isn't set. */
export const requireSigner: MiddlewareHandler<{ Variables: { signer: NostrSigner } }> = async (c, next) => {
if (!c.get('signer')) {
throw new HTTPException(401, { message: 'No pubkey provided' });
}
await next();
};
/** Throw a 401 if a NIP-44 signer isn't set. */
export const requireNip44Signer: MiddlewareHandler<{ Variables: { signer: SetRequired<NostrSigner, 'nip44'> } }> =
async (c, next) => {
const signer = c.get('signer');
if (!signer) {
throw new HTTPException(401, { message: 'No pubkey provided' });
}
if (!signer.nip44) {
throw new HTTPException(401, { message: 'No NIP-44 signer provided' });
}
await next();
};

View file

@ -1,75 +0,0 @@
import { type DittoConf } from '@ditto/conf';
import { MiddlewareHandler } from '@hono/hono';
import { HTTPException } from '@hono/hono/http-exception';
import { NostrSigner, NSecSigner } from '@nostrify/nostrify';
import { nip19 } from 'nostr-tools';
import { ConnectSigner } from '@/signers/ConnectSigner.ts';
import { ReadOnlySigner } from '@/signers/ReadOnlySigner.ts';
import { Storages } from '@/storages.ts';
import { aesDecrypt } from '@/utils/aes.ts';
import { getTokenHash } from '@/utils/auth.ts';
/** We only accept "Bearer" type. */
const BEARER_REGEX = new RegExp(`^Bearer (${nip19.BECH32_REGEX.source})$`);
/** Make a `signer` object available to all controllers, or unset if the user isn't logged in. */
export const signerMiddleware: MiddlewareHandler<{ Variables: { signer: NostrSigner; conf: DittoConf } }> = async (
c,
next,
) => {
const { conf } = c.var;
const header = c.req.header('authorization');
const match = header?.match(BEARER_REGEX);
if (match) {
const [_, bech32] = match;
if (bech32.startsWith('token1')) {
try {
const kysely = await Storages.kysely();
const tokenHash = await getTokenHash(bech32 as `token1${string}`);
const { pubkey: userPubkey, bunker_pubkey: bunkerPubkey, nip46_sk_enc, nip46_relays } = await kysely
.selectFrom('auth_tokens')
.select(['pubkey', 'bunker_pubkey', 'nip46_sk_enc', 'nip46_relays'])
.where('token_hash', '=', tokenHash)
.executeTakeFirstOrThrow();
const nep46Seckey = await aesDecrypt(conf.seckey, nip46_sk_enc);
c.set(
'signer',
new ConnectSigner({
bunkerPubkey,
userPubkey,
signer: new NSecSigner(nep46Seckey),
relays: nip46_relays,
}),
);
} catch {
throw new HTTPException(401);
}
} else {
try {
const decoded = nip19.decode(bech32!);
switch (decoded.type) {
case 'npub':
c.set('signer', new ReadOnlySigner(decoded.data));
break;
case 'nprofile':
c.set('signer', new ReadOnlySigner(decoded.data.pubkey));
break;
case 'nsec':
c.set('signer', new NSecSigner(decoded.data));
break;
}
} catch {
throw new HTTPException(401);
}
}
}
await next();
};

View file

@ -1,28 +0,0 @@
import { MiddlewareHandler } from '@hono/hono';
import { NostrSigner, NStore } from '@nostrify/nostrify';
import { UserStore } from '@/storages/UserStore.ts';
import { Storages } from '@/storages.ts';
export const requireStore: MiddlewareHandler<{ Variables: { store: NStore } }> = async (c, next) => {
if (!c.get('store')) {
throw new Error('Store is required');
}
await next();
};
/** Store middleware. */
export const storeMiddleware: MiddlewareHandler<{ Variables: { signer?: NostrSigner; store: NStore } }> = async (
c,
next,
) => {
const pubkey = await c.get('signer')?.getPublicKey();
if (pubkey) {
const store = new UserStore(pubkey, await Storages.admin());
c.set('store', store);
} else {
c.set('store', await Storages.admin());
}
await next();
};

View file

@ -1,43 +1,36 @@
import { CashuMint, CashuWallet, getEncodedToken, type Proof } from '@cashu/cashu-ts';
import { type DittoConf } from '@ditto/conf';
import { MiddlewareHandler } from '@hono/hono';
import { HTTPException } from '@hono/hono/http-exception';
import { NostrEvent, NostrFilter, NostrSigner, NSchema as n, NStore } from '@nostrify/nostrify';
import { SetRequired } from 'type-fest';
import { NostrEvent, NostrFilter, NSchema as n, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { errorJson } from '@/utils/log.ts';
import { createEvent } from '@/utils/api.ts';
import { validateAndParseWallet } from '@/utils/cashu.ts';
import { proofSchema } from '@/schemas/cashu.ts';
import { MiddlewareHandler } from '@hono/hono/types';
/**
* Swap nutzaps into wallet (create new events) if the user has a wallet, otheriwse, just fallthrough.
* Errors are only thrown if 'signer' and 'store' middlewares are not set.
*/
export const swapNutzapsMiddleware: MiddlewareHandler<
{ Variables: { signer: SetRequired<NostrSigner, 'nip44'>; store: NStore; conf: DittoConf } }
> = async (c, next) => {
const { conf } = c.var;
const signer = c.get('signer');
const store = c.get('store');
export const swapNutzapsMiddleware: MiddlewareHandler = async (c, next) => {
const { conf, relay, user, signal } = c.var;
if (!signer) {
if (!user) {
throw new HTTPException(401, { message: 'No pubkey provided' });
}
if (!signer.nip44) {
if (!user.signer.nip44) {
throw new HTTPException(401, { message: 'No NIP-44 signer provided' });
}
if (!store) {
if (!relay) {
throw new HTTPException(401, { message: 'No store provided' });
}
const { signal } = c.req.raw;
const pubkey = await signer.getPublicKey();
const pubkey = await user.signer.getPublicKey();
const { data, error } = await validateAndParseWallet(store, signer, pubkey, { signal });
const { data, error } = await validateAndParseWallet(relay, user.signer, pubkey, { signal });
if (error && error.code === 'wallet-not-found') {
await next();
@ -52,12 +45,12 @@ export const swapNutzapsMiddleware: MiddlewareHandler<
const nutzapsFilter: NostrFilter = { kinds: [9321], '#p': [pubkey], '#u': mints };
const lastRedeemedNutzap = await getLastRedeemedNutzap(store, pubkey, { signal });
const lastRedeemedNutzap = await getLastRedeemedNutzap(relay, pubkey, { signal });
if (lastRedeemedNutzap) {
nutzapsFilter.since = lastRedeemedNutzap.created_at;
}
const mintsToProofs = await getMintsToProofs(store, nutzapsFilter, conf.relay, { signal });
const mintsToProofs = await getMintsToProofs(relay, nutzapsFilter, conf.relay, { signal });
for (const mint of Object.keys(mintsToProofs)) {
try {
@ -68,7 +61,7 @@ export const swapNutzapsMiddleware: MiddlewareHandler<
const unspentProofs = await createEvent({
kind: 7375,
content: await signer.nip44.encrypt(
content: await user.signer.nip44.encrypt(
pubkey,
JSON.stringify({
mint,
@ -83,7 +76,7 @@ export const swapNutzapsMiddleware: MiddlewareHandler<
await createEvent({
kind: 7376,
content: await signer.nip44.encrypt(
content: await user.signer.nip44.encrypt(
pubkey,
JSON.stringify([
['direction', 'in'],

View file

@ -1,8 +1,7 @@
import { DeepLTranslator, LibreTranslateTranslator } from '@ditto/translators';
import { safeFetch } from '@soapbox/safe-fetch';
import { AppMiddleware } from '@/app.ts';
import { DeepLTranslator } from '@/translators/DeepLTranslator.ts';
import { LibreTranslateTranslator } from '@/translators/LibreTranslateTranslator.ts';
/** Set the translator used for translating posts. */
export const translatorMiddleware: AppMiddleware = async (c, next) => {

View file

@ -1,14 +1,13 @@
import { DenoUploader, IPFSUploader, S3Uploader } from '@ditto/uploaders';
import { BlossomUploader, NostrBuildUploader } from '@nostrify/nostrify/uploaders';
import { safeFetch } from '@soapbox/safe-fetch';
import { AppMiddleware } from '@/app.ts';
import { DenoUploader } from '@/uploaders/DenoUploader.ts';
import { IPFSUploader } from '@/uploaders/IPFSUploader.ts';
import { S3Uploader } from '@/uploaders/S3Uploader.ts';
/** Set an uploader for the user. */
export const uploaderMiddleware: AppMiddleware = async (c, next) => {
const { signer, conf } = c.var;
const { user, conf } = c.var;
const signer = user?.signer;
switch (conf.uploader) {
case 's3':

View file

@ -1,38 +0,0 @@
import { Semaphore } from '@core/asyncutil';
import { pipelineEncounters } from '@/caches/pipelineEncounters.ts';
import { Conf } from '@/config.ts';
import * as pipeline from '@/pipeline.ts';
import { Storages } from '@/storages.ts';
import { logi } from '@soapbox/logi';
const sem = new Semaphore(1);
export async function startNotify(): Promise<void> {
const { listen } = await Storages.database();
const store = await Storages.db();
listen('nostr_event', (id) => {
if (pipelineEncounters.has(id)) {
logi({ level: 'debug', ns: 'ditto.notify', id, skipped: true });
return;
}
logi({ level: 'debug', ns: 'ditto.notify', id, skipped: false });
sem.lock(async () => {
try {
const signal = AbortSignal.timeout(Conf.db.timeouts.default);
const [event] = await store.query([{ ids: [id], limit: 1 }], { signal });
if (event) {
logi({ level: 'debug', ns: 'ditto.event', source: 'notify', id: event.id, kind: event.kind });
await pipeline.handleEvent(event, { source: 'notify', signal });
}
} catch {
// Ignore
}
});
});
}

View file

@ -1,401 +0,0 @@
import { DittoTables } from '@ditto/db';
import { pipelineEventsCounter, policyEventsCounter, webPushNotificationsCounter } from '@ditto/metrics';
import { NKinds, NostrEvent, NSchema as n } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { Kysely, UpdateObject } from 'kysely';
import tldts from 'tldts';
import { z } from 'zod';
import { pipelineEncounters } from '@/caches/pipelineEncounters.ts';
import { Conf } from '@/config.ts';
import { DittoPush } from '@/DittoPush.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { RelayError } from '@/RelayError.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { Storages } from '@/storages.ts';
import { eventAge, Time } from '@/utils.ts';
import { getAmount } from '@/utils/bolt11.ts';
import { faviconCache } from '@/utils/favicon.ts';
import { errorJson } from '@/utils/log.ts';
import { nip05Cache } from '@/utils/nip05.ts';
import { parseNoteContent, stripimeta } from '@/utils/note.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { updateStats } from '@/utils/stats.ts';
import { getTagSet } from '@/utils/tags.ts';
import { unfurlCardCached } from '@/utils/unfurl.ts';
import { renderWebPushNotification } from '@/views/mastodon/push.ts';
import { policyWorker } from '@/workers/policy.ts';
import { verifyEventWorker } from '@/workers/verify.ts';
interface PipelineOpts {
signal: AbortSignal;
source: 'relay' | 'api' | 'firehose' | 'pipeline' | 'notify' | 'internal';
}
/**
* Common pipeline function to process (and maybe store) events.
* It is idempotent, so it can be called multiple times for the same event.
*/
async function handleEvent(event: DittoEvent, opts: PipelineOpts): Promise<void> {
// Skip events that have already been encountered.
if (pipelineEncounters.get(event.id)) {
throw new RelayError('duplicate', 'already have this event');
}
// Reject events that are too far in the future.
if (eventAge(event) < -Time.minutes(1)) {
throw new RelayError('invalid', 'event too far in the future');
}
// Integer max value for Postgres.
if (event.kind >= 2_147_483_647) {
throw new RelayError('invalid', 'event kind too large');
}
// The only point of ephemeral events is to stream them,
// so throw an error if we're not even going to do that.
if (NKinds.ephemeral(event.kind) && !isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
// Block NIP-70 events, because we have no way to `AUTH`.
if (isProtectedEvent(event)) {
throw new RelayError('invalid', 'protected event');
}
// Validate the event's signature.
if (!(await verifyEventWorker(event))) {
throw new RelayError('invalid', 'invalid signature');
}
// Recheck encountered after async ops.
if (pipelineEncounters.has(event.id)) {
throw new RelayError('duplicate', 'already have this event');
}
// Set the event as encountered after verifying the signature.
pipelineEncounters.set(event.id, true);
// Log the event.
logi({ level: 'debug', ns: 'ditto.event', source: 'pipeline', id: event.id, kind: event.kind });
pipelineEventsCounter.inc({ kind: event.kind });
// NIP-46 events get special treatment.
// They are exempt from policies and other side-effects, and should be streamed out immediately.
// If streaming fails, an error should be returned.
if (event.kind === 24133) {
await streamOut(event);
return;
}
// Ensure the event doesn't violate the policy.
if (event.pubkey !== Conf.pubkey) {
await policyFilter(event, opts.signal);
}
// Prepare the event for additional checks.
// FIXME: This is kind of hacky. Should be reorganized to fetch only what's needed for each stage.
await hydrateEvent(event, opts.signal);
// Ensure that the author is not banned.
const n = getTagSet(event.user?.tags ?? [], 'n');
if (n.has('disabled')) {
throw new RelayError('blocked', 'author is blocked');
}
// Ephemeral events must throw if they are not streamed out.
if (NKinds.ephemeral(event.kind)) {
await Promise.all([
streamOut(event),
webPush(event),
]);
return;
}
// Events received through notify are thought to already be in the database, so they only need to be streamed.
if (opts.source === 'notify') {
await Promise.all([
streamOut(event),
webPush(event),
]);
return;
}
const kysely = await Storages.kysely();
try {
await storeEvent(purifyEvent(event), opts.signal);
} finally {
// This needs to run in steps, and should not block the API from responding.
Promise.allSettled([
handleZaps(kysely, event),
updateAuthorData(event, opts.signal),
prewarmLinkPreview(event, opts.signal),
generateSetEvents(event),
])
.then(() =>
Promise.allSettled([
streamOut(event),
webPush(event),
])
);
}
}
async function policyFilter(event: NostrEvent, signal: AbortSignal): Promise<void> {
try {
const result = await policyWorker.call(event, signal);
const [, , ok, reason] = result;
logi({ level: 'debug', ns: 'ditto.policy', id: event.id, kind: event.kind, ok, reason });
policyEventsCounter.inc({ ok: String(ok) });
RelayError.assert(result);
} catch (e) {
if (e instanceof RelayError) {
throw e;
} else {
logi({ level: 'error', ns: 'ditto.policy', id: event.id, kind: event.kind, error: errorJson(e) });
throw new RelayError('blocked', 'policy error');
}
}
}
/** Check whether the event has a NIP-70 `-` tag. */
function isProtectedEvent(event: NostrEvent): boolean {
return event.tags.some(([name]) => name === '-');
}
/** Hydrate the event with the user, if applicable. */
async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise<void> {
await hydrateEvents({ events: [event], store: await Storages.db(), signal });
}
/** Maybe store the event, if eligible. */
async function storeEvent(event: NostrEvent, signal?: AbortSignal): Promise<undefined> {
if (NKinds.ephemeral(event.kind)) return;
const store = await Storages.db();
try {
await store.transaction(async (store, kysely) => {
await updateStats({ event, store, kysely });
await store.event(event, { signal });
});
} catch (e) {
// If the failure is only because of updateStats (which runs first), insert the event anyway.
// We can't catch this in the transaction because the error aborts the transaction on the Postgres side.
if (e instanceof Error && e.message.includes('event_stats' satisfies keyof DittoTables)) {
await store.event(event, { signal });
} else {
throw e;
}
}
}
/** Parse kind 0 metadata and track indexes in the database. */
async function updateAuthorData(event: NostrEvent, signal: AbortSignal): Promise<void> {
if (event.kind !== 0) return;
// Parse metadata.
const metadata = n.json().pipe(n.metadata()).catch({}).safeParse(event.content);
if (!metadata.success) return;
const { name, nip05 } = metadata.data;
const kysely = await Storages.kysely();
const updates: UpdateObject<DittoTables, 'author_stats'> = {};
const authorStats = await kysely
.selectFrom('author_stats')
.selectAll()
.where('pubkey', '=', event.pubkey)
.executeTakeFirst();
const lastVerified = authorStats?.nip05_last_verified_at;
const eventNewer = !lastVerified || event.created_at > lastVerified;
try {
if (nip05 !== authorStats?.nip05 && eventNewer || !lastVerified) {
if (nip05) {
const tld = tldts.parse(nip05);
if (tld.isIcann && !tld.isIp && !tld.isPrivate) {
const pointer = await nip05Cache.fetch(nip05.toLowerCase(), { signal });
if (pointer.pubkey === event.pubkey) {
updates.nip05 = nip05;
updates.nip05_domain = tld.domain;
updates.nip05_hostname = tld.hostname;
updates.nip05_last_verified_at = event.created_at;
}
}
} else {
updates.nip05 = null;
updates.nip05_domain = null;
updates.nip05_hostname = null;
updates.nip05_last_verified_at = event.created_at;
}
}
} catch {
// Fallthrough.
}
// Fetch favicon.
const domain = nip05?.split('@')[1].toLowerCase();
if (domain) {
try {
await faviconCache.fetch(domain, { signal });
} catch {
// Fallthrough.
}
}
const search = [name, nip05].filter(Boolean).join(' ').trim();
if (search !== authorStats?.search) {
updates.search = search;
}
if (Object.keys(updates).length) {
await kysely.insertInto('author_stats')
.values({
pubkey: event.pubkey,
followers_count: 0,
following_count: 0,
notes_count: 0,
search,
...updates,
})
.onConflict((oc) => oc.column('pubkey').doUpdateSet(updates))
.execute();
}
}
async function prewarmLinkPreview(event: NostrEvent, signal: AbortSignal): Promise<void> {
const { firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), []);
if (firstUrl) {
await unfurlCardCached(firstUrl, signal);
}
}
/** Determine if the event is being received in a timely manner. */
function isFresh(event: NostrEvent): boolean {
return eventAge(event) < Time.minutes(1);
}
/** Distribute the event through active subscriptions. */
async function streamOut(event: NostrEvent): Promise<void> {
if (!isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
const pubsub = await Storages.pubsub();
await pubsub.event(event);
}
async function webPush(event: NostrEvent): Promise<void> {
if (!isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
const kysely = await Storages.kysely();
const pubkeys = getTagSet(event.tags, 'p');
if (!pubkeys.size) {
return;
}
const rows = await kysely
.selectFrom('push_subscriptions')
.selectAll()
.where('pubkey', 'in', [...pubkeys])
.execute();
for (const row of rows) {
const viewerPubkey = row.pubkey;
if (viewerPubkey === event.pubkey) {
continue; // Don't notify authors about their own events.
}
const message = await renderWebPushNotification(event, viewerPubkey);
if (!message) {
continue;
}
const subscription = {
endpoint: row.endpoint,
keys: {
auth: row.auth,
p256dh: row.p256dh,
},
};
await DittoPush.push(subscription, message);
webPushNotificationsCounter.inc({ type: message.notification_type });
}
}
async function generateSetEvents(event: NostrEvent): Promise<void> {
const tagsAdmin = event.tags.some(([name, value]) => ['p', 'P'].includes(name) && value === Conf.pubkey);
if (event.kind === 1984 && tagsAdmin) {
const signer = new AdminSigner();
const rel = await signer.signEvent({
kind: 30383,
content: '',
tags: [
['d', event.id],
['p', event.pubkey],
['k', '1984'],
['n', 'open'],
...[...getTagSet(event.tags, 'p')].map((pubkey) => ['P', pubkey]),
...[...getTagSet(event.tags, 'e')].map((pubkey) => ['e', pubkey]),
],
created_at: Math.floor(Date.now() / 1000),
});
await handleEvent(rel, { source: 'pipeline', signal: AbortSignal.timeout(1000) });
}
if (event.kind === 3036 && tagsAdmin) {
const signer = new AdminSigner();
const rel = await signer.signEvent({
kind: 30383,
content: '',
tags: [
['d', event.id],
['p', event.pubkey],
['k', '3036'],
['n', 'pending'],
],
created_at: Math.floor(Date.now() / 1000),
});
await handleEvent(rel, { source: 'pipeline', signal: AbortSignal.timeout(1000) });
}
}
/** Stores the event in the 'event_zaps' table */
async function handleZaps(kysely: Kysely<DittoTables>, event: NostrEvent) {
if (event.kind !== 9735) return;
const zapRequestString = event?.tags?.find(([name]) => name === 'description')?.[1];
if (!zapRequestString) return;
const zapRequest = n.json().pipe(n.event()).optional().catch(undefined).parse(zapRequestString);
if (!zapRequest) return;
const amountSchema = z.coerce.number().int().nonnegative().catch(0);
const amount_millisats = amountSchema.parse(getAmount(event?.tags.find(([name]) => name === 'bolt11')?.[1]));
if (!amount_millisats || amount_millisats < 1) return;
const zappedEventId = zapRequest.tags.find(([name]) => name === 'e')?.[1];
if (!zappedEventId) return;
try {
await kysely.insertInto('event_zaps').values({
receipt_id: event.id,
target_event_id: zappedEventId,
sender_pubkey: zapRequest.pubkey,
amount_millisats,
comment: zapRequest.content,
}).execute();
} catch {
// receipt_id is unique, do nothing
}
}
export { handleEvent, handleZaps, updateAuthorData };

View file

@ -1,76 +1,55 @@
import { DittoDB } from '@ditto/db';
import { DittoConf } from '@ditto/conf';
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { type DittoRelation } from '@/interfaces/DittoFilter.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { fallbackAuthor } from '@/utils.ts';
import { findReplyTag, getTagSet } from '@/utils/tags.ts';
interface GetEventOpts {
/** Signal to abort the request. */
db: DittoDB;
conf: DittoConf;
relay: NStore;
signal?: AbortSignal;
/** Event kind. */
kind?: number;
/** @deprecated Relations to include on the event. */
relations?: DittoRelation[];
}
/**
* Get a Nostr event by its ID.
* @deprecated Use `store.query` directly.
* @deprecated Use `relay.query` directly.
*/
const getEvent = async (
id: string,
opts: GetEventOpts = {},
): Promise<DittoEvent | undefined> => {
const store = await Storages.db();
const { kind, signal = AbortSignal.timeout(1000) } = opts;
async function getEvent(id: string, opts: GetEventOpts): Promise<DittoEvent | undefined> {
const filter: NostrFilter = { ids: [id], limit: 1 };
if (kind) {
filter.kinds = [kind];
}
return await store.query([filter], { limit: 1, signal })
.then((events) => hydrateEvents({ events, store, signal }))
.then(([event]) => event);
};
const events = await opts.relay.query([filter], opts);
const [event] = await hydrateEvents({ ...opts, events });
return event;
}
/**
* Get a Nostr `set_medatadata` event for a user's pubkey.
* @deprecated Use `store.query` directly.
* @deprecated Use `relay.query` directly.
*/
async function getAuthor(pubkey: string, opts: GetEventOpts = {}): Promise<NostrEvent | undefined> {
const store = await Storages.db();
const { signal = AbortSignal.timeout(1000) } = opts;
const events = await store.query([{ authors: [pubkey], kinds: [0], limit: 1 }], { limit: 1, signal });
const event = events[0] ?? fallbackAuthor(pubkey);
await hydrateEvents({ events: [event], store, signal });
async function getAuthor(pubkey: string, opts: GetEventOpts): Promise<NostrEvent | undefined> {
const events = await opts.relay.query([{ authors: [pubkey], kinds: [0], limit: 1 }], opts);
const [event] = await hydrateEvents({ ...opts, events });
return event;
}
/** Get users the given pubkey follows. */
const getFollows = async (pubkey: string, signal?: AbortSignal): Promise<NostrEvent | undefined> => {
const store = await Storages.db();
const [event] = await store.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { limit: 1, signal });
const getFollows = async (relay: NStore, pubkey: string, signal?: AbortSignal): Promise<NostrEvent | undefined> => {
const [event] = await relay.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { signal });
return event;
};
/** Get pubkeys the user follows. */
async function getFollowedPubkeys(pubkey: string, signal?: AbortSignal): Promise<Set<string>> {
const event = await getFollows(pubkey, signal);
async function getFollowedPubkeys(relay: NStore, pubkey: string, signal?: AbortSignal): Promise<Set<string>> {
const event = await getFollows(relay, pubkey, signal);
if (!event) return new Set();
return getTagSet(event.tags, 'p');
}
/** Get pubkeys the user follows, including the user's own pubkey. */
async function getFeedPubkeys(pubkey: string): Promise<Set<string>> {
const authors = await getFollowedPubkeys(pubkey);
async function getFeedPubkeys(relay: NStore, pubkey: string): Promise<Set<string>> {
const authors = await getFollowedPubkeys(relay, pubkey);
return authors.add(pubkey);
}
@ -95,34 +74,11 @@ async function getAncestors(store: NStore, event: NostrEvent, result: NostrEvent
async function getDescendants(
store: NStore,
event: NostrEvent,
signal = AbortSignal.timeout(2000),
signal?: AbortSignal,
): Promise<NostrEvent[]> {
return await store
.query([{ kinds: [1], '#e': [event.id], since: event.created_at, limit: 200 }], { signal })
.then((events) => events.filter(({ tags }) => findReplyTag(tags)?.[1] === event.id));
}
/** Returns whether the pubkey is followed by a local user. */
async function isLocallyFollowed(pubkey: string): Promise<boolean> {
const { host } = Conf.url;
const store = await Storages.db();
const [event] = await store.query(
[{ kinds: [3], '#p': [pubkey], search: `domain:${host}`, limit: 1 }],
{ limit: 1 },
);
return Boolean(event);
}
export {
getAncestors,
getAuthor,
getDescendants,
getEvent,
getFeedPubkeys,
getFollowedPubkeys,
getFollows,
isLocallyFollowed,
};
export { getAncestors, getAuthor, getDescendants, getEvent, getFeedPubkeys, getFollowedPubkeys, getFollows };

View file

@ -13,18 +13,6 @@ function filteredArray<T extends z.ZodTypeAny>(schema: T) {
));
}
/** https://developer.mozilla.org/en-US/docs/Glossary/Base64#the_unicode_problem */
const decode64Schema = z.string().transform((value, ctx) => {
try {
const binString = atob(value);
const bytes = Uint8Array.from(binString, (m) => m.codePointAt(0)!);
return new TextDecoder().decode(bytes);
} catch (_e) {
ctx.addIssue({ code: z.ZodIssueCode.custom, message: 'Invalid base64', fatal: true });
return z.NEVER;
}
});
/** Parses a hashtag, eg `#yolo`. */
const hashtagSchema = z.string().regex(/^\w{1,30}$/);
@ -34,16 +22,6 @@ const hashtagSchema = z.string().regex(/^\w{1,30}$/);
*/
const safeUrlSchema = z.string().max(2048).url();
/** WebSocket URL. */
const wsUrlSchema = z.string().refine((val) => {
try {
const { protocol } = new URL(val);
return protocol === 'wss:' || protocol === 'ws:';
} catch {
return false;
}
}, 'Invalid WebSocket URL');
/** https://github.com/colinhacks/zod/issues/1630#issuecomment-1365983831 */
const booleanParamSchema = z.enum(['true', 'false']).transform((value) => value === 'true');
@ -96,7 +74,6 @@ const walletSchema = z.object({
export {
booleanParamSchema,
decode64Schema,
fileSchema,
filteredArray,
hashtagSchema,
@ -106,5 +83,4 @@ export {
safeUrlSchema,
sizesSchema,
walletSchema,
wsUrlSchema,
};

View file

@ -1,14 +1,8 @@
import { NSchema as n } from '@nostrify/nostrify';
import { getEventHash, verifyEvent } from 'nostr-tools';
import { z } from 'zod';
import { safeUrlSchema, sizesSchema } from '@/schema.ts';
/** Nostr event schema that also verifies the event's signature. */
const signedEventSchema = n.event()
.refine((event) => event.id === getEventHash(event), 'Event ID does not match hash')
.refine(verifyEvent, 'Event signature is invalid');
/** Kind 0 standardized fields extended with Ditto custom fields. */
const metadataSchema = n.metadata().and(z.object({
fields: z.tuple([z.string(), z.string()]).array().optional().catch(undefined),
@ -68,12 +62,4 @@ const emojiTagSchema = z.tuple([z.literal('emoji'), z.string(), z.string().url()
/** NIP-30 custom emoji tag. */
type EmojiTag = z.infer<typeof emojiTagSchema>;
export {
type EmojiTag,
emojiTagSchema,
metadataSchema,
relayInfoDocSchema,
screenshotsSchema,
serverMetaSchema,
signedEventSchema,
};
export { type EmojiTag, emojiTagSchema, metadataSchema, relayInfoDocSchema, screenshotsSchema, serverMetaSchema };

View file

@ -1,9 +0,0 @@
import { NSecSigner } from '@nostrify/nostrify';
import { Conf } from '@/config.ts';
/** Sign events as the Ditto server. */
export class AdminSigner extends NSecSigner {
constructor() {
super(Conf.seckey);
}
}

View file

@ -1,13 +1,12 @@
// deno-lint-ignore-file require-await
import { HTTPException } from '@hono/hono/http-exception';
import { NConnectSigner, NostrEvent, NostrSigner } from '@nostrify/nostrify';
import { Storages } from '@/storages.ts';
import { NConnectSigner, NostrEvent, NostrSigner, NRelay } from '@nostrify/nostrify';
interface ConnectSignerOpts {
bunkerPubkey: string;
userPubkey: string;
signer: NostrSigner;
relay: NRelay;
relays?: string[];
}
@ -17,27 +16,23 @@ interface ConnectSignerOpts {
* Simple extension of nostrify's `NConnectSigner`, with our options to keep it DRY.
*/
export class ConnectSigner implements NostrSigner {
private signer: Promise<NConnectSigner>;
private signer: NConnectSigner;
constructor(private opts: ConnectSignerOpts) {
this.signer = this.init(opts.signer);
}
const { relay, signer } = this.opts;
async init(signer: NostrSigner): Promise<NConnectSigner> {
return new NConnectSigner({
this.signer = new NConnectSigner({
encryption: 'nip44',
pubkey: this.opts.bunkerPubkey,
// TODO: use a remote relay for `nprofile` signing (if present and `Conf.relay` isn't already in the list)
relay: await Storages.pubsub(),
relay,
signer,
timeout: 60_000,
});
}
async signEvent(event: Omit<NostrEvent, 'id' | 'pubkey' | 'sig'>): Promise<NostrEvent> {
const signer = await this.signer;
try {
return await signer.signEvent(event);
return await this.signer.signEvent(event);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, { message: 'The event was not signed quickly enough' });
@ -49,9 +44,8 @@ export class ConnectSigner implements NostrSigner {
readonly nip04 = {
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
const signer = await this.signer;
try {
return await signer.nip04.encrypt(pubkey, plaintext);
return await this.signer.nip04.encrypt(pubkey, plaintext);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, {
@ -64,9 +58,8 @@ export class ConnectSigner implements NostrSigner {
},
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
const signer = await this.signer;
try {
return await signer.nip04.decrypt(pubkey, ciphertext);
return await this.signer.nip04.decrypt(pubkey, ciphertext);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, {
@ -81,9 +74,8 @@ export class ConnectSigner implements NostrSigner {
readonly nip44 = {
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
const signer = await this.signer;
try {
return await signer.nip44.encrypt(pubkey, plaintext);
return await this.signer.nip44.encrypt(pubkey, plaintext);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, {
@ -96,9 +88,8 @@ export class ConnectSigner implements NostrSigner {
},
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
const signer = await this.signer;
try {
return await signer.nip44.decrypt(pubkey, ciphertext);
return await this.signer.nip44.decrypt(pubkey, ciphertext);
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
throw new HTTPException(408, {

View file

@ -1,17 +0,0 @@
// Starts up applications required to run before the HTTP server is on.
import { Conf } from '@/config.ts';
import { cron } from '@/cron.ts';
import { startFirehose } from '@/firehose.ts';
import { startNotify } from '@/notify.ts';
if (Conf.firehoseEnabled) {
startFirehose();
}
if (Conf.notifyEnabled) {
startNotify();
}
if (Conf.cronEnabled) {
cron();
}

View file

@ -1,140 +0,0 @@
// deno-lint-ignore-file require-await
import { type DittoDatabase, DittoDB } from '@ditto/db';
import { internalSubscriptionsSizeGauge } from '@ditto/metrics';
import { logi } from '@soapbox/logi';
import { Conf } from '@/config.ts';
import { wsUrlSchema } from '@/schema.ts';
import { AdminStore } from '@/storages/AdminStore.ts';
import { EventsDB } from '@/storages/EventsDB.ts';
import { SearchStore } from '@/storages/search-store.ts';
import { InternalRelay } from '@/storages/InternalRelay.ts';
import { NPool, NRelay1 } from '@nostrify/nostrify';
import { getRelays } from '@/utils/outbox.ts';
import { seedZapSplits } from '@/utils/zap-split.ts';
export class Storages {
private static _db: Promise<EventsDB> | undefined;
private static _database: Promise<DittoDatabase> | undefined;
private static _admin: Promise<AdminStore> | undefined;
private static _client: Promise<NPool<NRelay1>> | undefined;
private static _pubsub: Promise<InternalRelay> | undefined;
private static _search: Promise<SearchStore> | undefined;
public static async database(): Promise<DittoDatabase> {
if (!this._database) {
this._database = (async () => {
const db = DittoDB.create(Conf.databaseUrl, {
poolSize: Conf.pg.poolSize,
debug: Conf.pgliteDebug,
});
await DittoDB.migrate(db.kysely);
return db;
})();
}
return this._database;
}
public static async kysely(): Promise<DittoDatabase['kysely']> {
const { kysely } = await this.database();
return kysely;
}
/** SQL database to store events this Ditto server cares about. */
public static async db(): Promise<EventsDB> {
if (!this._db) {
this._db = (async () => {
const kysely = await this.kysely();
const store = new EventsDB({ kysely, pubkey: Conf.pubkey, timeout: Conf.db.timeouts.default });
await seedZapSplits(store);
return store;
})();
}
return this._db;
}
/** Admin user storage. */
public static async admin(): Promise<AdminStore> {
if (!this._admin) {
this._admin = Promise.resolve(new AdminStore(await this.db()));
}
return this._admin;
}
/** Internal pubsub relay between controllers and the pipeline. */
public static async pubsub(): Promise<InternalRelay> {
if (!this._pubsub) {
this._pubsub = Promise.resolve(new InternalRelay({ gauge: internalSubscriptionsSizeGauge }));
}
return this._pubsub;
}
/** Relay pool storage. */
public static async client(): Promise<NPool<NRelay1>> {
if (!this._client) {
this._client = (async () => {
const db = await this.db();
const [relayList] = await db.query([
{ kinds: [10002], authors: [Conf.pubkey], limit: 1 },
]);
const tags = relayList?.tags ?? [];
const activeRelays = tags.reduce((acc, [name, url, marker]) => {
const valid = wsUrlSchema.safeParse(url).success;
if (valid && name === 'r' && (!marker || marker === 'write')) {
acc.push(url);
}
return acc;
}, []);
logi({
level: 'info',
ns: 'ditto.pool',
msg: `connecting to ${activeRelays.length} relays`,
relays: activeRelays,
});
return new NPool({
open(url) {
return new NRelay1(url, {
// Skip event verification (it's done in the pipeline).
verifyEvent: () => true,
log(log) {
logi(log);
},
});
},
reqRouter: async (filters) => {
return new Map(activeRelays.map((relay) => {
return [relay, filters];
}));
},
eventRouter: async (event) => {
const relaySet = await getRelays(await Storages.db(), event.pubkey);
relaySet.delete(Conf.relay);
const relays = [...relaySet].slice(0, 4);
return relays;
},
});
})();
}
return this._client;
}
/** Storage to use for remote search. */
public static async search(): Promise<SearchStore> {
if (!this._search) {
this._search = Promise.resolve(
new SearchStore({
relay: Conf.searchRelay,
fallback: await this.db(),
}),
);
}
return this._search;
}
}

View file

@ -1,41 +0,0 @@
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { Conf } from '@/config.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getTagSet } from '@/utils/tags.ts';
/** A store that prevents banned users from being displayed. */
export class AdminStore implements NStore {
constructor(private store: NStore) {}
async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise<void> {
return await this.store.event(event, opts);
}
async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise<DittoEvent[]> {
const events = await this.store.query(filters, opts);
const pubkeys = new Set(events.map((event) => event.pubkey));
const users = await this.store.query([{
kinds: [30382],
authors: [Conf.pubkey],
'#d': [...pubkeys],
limit: pubkeys.size,
}]);
return events.filter((event) => {
const user = users.find(
({ kind, pubkey, tags }) =>
kind === 30382 && pubkey === Conf.pubkey && tags.find(([name]) => name === 'd')?.[1] === event.pubkey,
);
const n = getTagSet(user?.tags ?? [], 'n');
if (n.has('disabled')) {
return false;
}
return true;
});
}
}

View file

@ -0,0 +1,60 @@
import { logi } from '@soapbox/logi';
import { NostrEvent, NostrFilter, NostrRelayCLOSED, NostrRelayEOSE, NostrRelayEVENT, NRelay } from '@nostrify/nostrify';
import { errorJson } from '@/utils/log.ts';
import { purifyEvent } from '@/utils/purify.ts';
interface DittoAPIStoreOpts {
pool: NRelay;
relay: NRelay;
}
/**
* Store used by Ditto's Mastodon API implementation.
* It extends the RelayStore to publish events to the wider Nostr network.
*/
export class DittoAPIStore implements NRelay {
private ns = 'ditto.api.store';
constructor(private opts: DittoAPIStoreOpts) {}
req(
filters: NostrFilter[],
opts?: { signal?: AbortSignal },
): AsyncIterable<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED> {
const { relay } = this.opts;
return relay.req(filters, opts);
}
query(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise<NostrEvent[]> {
const { relay } = this.opts;
return relay.query(filters, opts);
}
async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise<void> {
const { pool, relay } = this.opts;
const { id, kind } = event;
await relay.event(event, opts);
(async () => {
try {
// `purifyEvent` is important, or you will suffer.
await pool.event(purifyEvent(event), opts);
} catch (e) {
logi({ level: 'error', ns: this.ns, source: 'publish', id, kind, error: errorJson(e) });
}
})();
}
async close(): Promise<void> {
const { pool, relay } = this.opts;
await pool.close();
await relay.close();
}
[Symbol.asyncDispose](): Promise<void> {
return this.close();
}
}

View file

@ -1,12 +1,41 @@
import { assertEquals, assertRejects } from '@std/assert';
import { NostrRelayMsg } from '@nostrify/nostrify';
import { genEvent } from '@nostrify/nostrify/test';
import { generateSecretKey } from 'nostr-tools';
import { RelayError } from '@/RelayError.ts';
import { eventFixture, genEvent } from '@/test.ts';
import { eventFixture } from '@/test.ts';
import { Conf } from '@/config.ts';
import { EventsDB } from '@/storages/EventsDB.ts';
import { DittoPgStore } from '@/storages/DittoPgStore.ts';
import { createTestDB } from '@/test.ts';
Deno.test('req streaming', async () => {
await using db = await createTestDB({ pure: true });
const { store: relay } = db;
const msgs: NostrRelayMsg[] = [];
const controller = new AbortController();
const promise = (async () => {
for await (const msg of relay.req([{ since: 0 }], { signal: controller.signal })) {
msgs.push(msg);
}
})();
const event = genEvent({ created_at: Math.floor(Date.now() / 1000) });
await relay.event(event);
controller.abort();
await promise;
const verbs = msgs.map(([verb]) => verb);
assertEquals(verbs, ['EOSE', 'EVENT', 'CLOSED']);
assertEquals(msgs[1][2], event);
assertEquals(relay.subs.size, 0); // cleanup
});
Deno.test('count filters', async () => {
await using db = await createTestDB({ pure: true });
const { store } = db;
@ -47,8 +76,8 @@ Deno.test('query events with domain search filter', async () => {
assertEquals(await store.query([{ search: '' }]), [event1]);
await kysely
.insertInto('author_stats')
.values({
.updateTable('author_stats')
.set({
pubkey: event1.pubkey,
nip05_domain: 'gleasonator.dev',
nip05_last_verified_at: event1.created_at,
@ -176,11 +205,21 @@ Deno.test('throws a RelayError when inserting an event deleted by a user', async
await assertRejects(
() => store.event(event),
RelayError,
// RelayError,
'event deleted by user',
);
});
Deno.test('inserting the same event twice', async () => {
await using db = await createTestDB({ pure: true });
const { store } = db;
const event = genEvent({ kind: 1 });
await store.event(event);
await store.event(event);
});
Deno.test('inserting replaceable events', async () => {
await using db = await createTestDB({ pure: true });
const { store } = db;
@ -196,6 +235,8 @@ Deno.test('inserting replaceable events', async () => {
const newerEvent = genEvent({ kind: 0, created_at: 999 }, sk);
await store.event(newerEvent);
assertEquals(await store.query([{ kinds: [0] }]), [newerEvent]);
await store.event(olderEvent); // doesn't throw
});
Deno.test("throws a RelayError when querying an event with a large 'since'", async () => {
@ -254,7 +295,7 @@ Deno.test('NPostgres.query with search', async (t) => {
});
});
Deno.test('EventsDB.indexTags indexes only the final `e` and `p` tag of kind 7 events', () => {
Deno.test('DittoPgStore.indexTags indexes only the final `e` and `p` tag of kind 7 events', () => {
const event = {
kind: 7,
id: 'a92549a442d306b32273aa9456ba48e3851a4e6203af3f567543298ab964b35b',
@ -285,7 +326,7 @@ Deno.test('EventsDB.indexTags indexes only the final `e` and `p` tag of kind 7 e
'44639d039a7f7fb8772fcfa13d134d3cda684ec34b6a777ead589676f9e8d81b08a24234066dcde1aacfbe193224940fba7586e7197c159757d3caf8f2b57e1b',
};
const tags = EventsDB.indexTags(event);
const tags = DittoPgStore.indexTags(event);
assertEquals(tags, [
['e', 'e3653ae41ffb510e5fc071555ecfbc94d2fc31e355d61d941e39a97ac6acb15b'],

View file

@ -1,15 +1,27 @@
// deno-lint-ignore-file require-await
import { DittoTables } from '@ditto/db';
import { type DittoDB, type DittoTables } from '@ditto/db';
import { detectLanguage } from '@ditto/lang';
import { NPostgres, NPostgresSchema } from '@nostrify/db';
import { dbEventsCounter } from '@ditto/metrics';
import { NIP50, NKinds, NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
import { dbEventsCounter, internalSubscriptionsSizeGauge } from '@ditto/metrics';
import {
NIP50,
NKinds,
NostrEvent,
NostrFilter,
NostrRelayCLOSED,
NostrRelayEOSE,
NostrRelayEVENT,
NSchema as n,
} from '@nostrify/nostrify';
import { Machina } from '@nostrify/nostrify/utils';
import { logi } from '@soapbox/logi';
import { JsonValue } from '@std/json';
import { LanguageCode } from 'iso-639-1';
import { Kysely } from 'kysely';
import linkify from 'linkifyjs';
import { nip27 } from 'nostr-tools';
import { LRUCache } from 'lru-cache';
import { matchFilter, nip27 } from 'nostr-tools';
import tldts from 'tldts';
import { z } from 'zod';
@ -18,8 +30,8 @@ import { isNostrId } from '@/utils.ts';
import { abortError } from '@/utils/abort.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { detectLanguage } from '@/utils/language.ts';
import { getMediaLinks } from '@/utils/note.ts';
import { updateStats } from '@/utils/stats.ts';
/** Function to decide whether or not to index a tag. */
type TagCondition = (opts: TagConditionOpts) => boolean;
@ -37,30 +49,47 @@ interface TagConditionOpts {
}
/** Options for the EventsDB store. */
interface EventsDBOpts {
interface DittoPgStoreOpts {
/** Kysely instance to use. */
kysely: Kysely<DittoTables>;
db: DittoDB;
/** Pubkey of the admin account. */
pubkey: string;
/** Timeout in milliseconds for database queries. */
timeout: number;
timeout?: number;
/** Whether the event returned should be a Nostr event or a Ditto event. Defaults to false. */
pure?: boolean;
/** Chunk size for streaming events. Defaults to 20. */
chunkSize?: number;
/** Batch size for fulfilling subscriptions. Defaults to 500. */
batchSize?: number;
/** Max age (in **seconds**) an event can be to be fulfilled to realtime subscribers. */
maxAge?: number;
/** Whether to listen for events from the database with NOTIFY. */
notify?: boolean;
}
/** Realtime subscription. */
interface Subscription {
filters: NostrFilter[];
machina: Machina<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED>;
}
/** SQL database storage adapter for Nostr events. */
class EventsDB extends NPostgres {
export class DittoPgStore extends NPostgres {
readonly subs = new Map<string, Subscription>();
readonly encounters = new LRUCache<string, boolean>({ max: 1000 });
/** Conditions for when to index certain tags. */
static tagConditions: Record<string, TagCondition> = {
'a': ({ count }) => count < 15,
'd': ({ event, count }) => count === 0 && NKinds.parameterizedReplaceable(event.kind),
'e': EventsDB.eTagCondition,
'e': DittoPgStore.eTagCondition,
'k': ({ count, value }) => count === 0 && Number.isInteger(Number(value)),
'L': ({ event, count }) => event.kind === 1985 || count === 0,
'l': ({ event, count }) => event.kind === 1985 || count === 0,
'n': ({ count, value }) => count < 50 && value.length < 50,
'P': ({ count, value }) => count === 0 && isNostrId(value),
'p': EventsDB.pTagCondition,
'p': DittoPgStore.pTagCondition,
'proxy': ({ count, value }) => count === 0 && value.length < 256,
'q': ({ event, count, value }) => count === 0 && event.kind === 1 && isNostrId(value),
'r': ({ event, count }) => (event.kind === 1985 ? count < 20 : count < 3),
@ -72,67 +101,43 @@ class EventsDB extends NPostgres {
},
};
static indexExtensions(event: NostrEvent): Record<string, string> {
const ext: Record<string, string> = {};
if (event.kind === 1) {
ext.reply = event.tags.some(([name]) => name === 'e').toString();
} else if (event.kind === 1111) {
ext.reply = event.tags.some(([name]) => ['e', 'E'].includes(name)).toString();
} else if (event.kind === 6) {
ext.reply = 'false';
}
if ([1, 20, 30023].includes(event.kind)) {
const language = detectLanguage(event.content, 0.90);
if (language) {
ext.language = language;
}
}
const imeta: string[][][] = event.tags
.filter(([name]) => name === 'imeta')
.map(([_, ...entries]) =>
entries.map((entry) => {
const split = entry.split(' ');
return [split[0], split.splice(1).join(' ')];
})
);
// quirks mode
if (!imeta.length && event.kind === 1) {
const links = linkify.find(event.content).filter(({ type }) => type === 'url');
imeta.push(...getMediaLinks(links));
}
if (imeta.length) {
ext.media = 'true';
if (imeta.every((tags) => tags.some(([name, value]) => name === 'm' && value.startsWith('video/')))) {
ext.video = 'true';
}
}
ext.protocol = event.tags.find(([name]) => name === 'proxy')?.[2] ?? 'nostr';
return ext;
}
constructor(private opts: EventsDBOpts) {
super(opts.kysely, {
indexTags: EventsDB.indexTags,
indexSearch: EventsDB.searchText,
indexExtensions: EventsDB.indexExtensions,
constructor(private opts: DittoPgStoreOpts) {
super(opts.db.kysely, {
indexTags: DittoPgStore.indexTags,
indexSearch: DittoPgStore.searchText,
indexExtensions: DittoPgStore.indexExtensions,
chunkSize: opts.chunkSize,
});
if (opts.notify) {
opts.db.listen('nostr_event', async (id) => {
if (this.encounters.has(id)) return;
this.encounters.set(id, true);
const [event] = await this.query([{ ids: [id] }]);
if (event) {
await this.fulfill(event);
}
});
}
}
/** Insert an event (and its tags) into the database. */
override async event(event: NostrEvent, opts: { signal?: AbortSignal; timeout?: number } = {}): Promise<void> {
event = purifyEvent(event);
logi({ level: 'debug', ns: 'ditto.event', source: 'db', id: event.id, kind: event.kind });
dbEventsCounter.inc({ kind: event.kind });
if (NKinds.ephemeral(event.kind)) {
return await this.fulfill(event);
}
if (this.opts.notify) {
this.encounters.set(event.id, true);
}
if (await this.isDeletedAdmin(event)) {
throw new RelayError('blocked', 'event deleted by admin');
}
@ -140,18 +145,88 @@ class EventsDB extends NPostgres {
await this.deleteEventsAdmin(event);
try {
await super.event(event, { ...opts, timeout: opts.timeout ?? this.opts.timeout });
await this.storeEvent(event, { ...opts, timeout: opts.timeout ?? this.opts.timeout });
this.fulfill(event); // don't await or catch (should never reject)
} catch (e) {
if (e instanceof Error && e.message === 'Cannot add a deleted event') {
throw new RelayError('blocked', 'event deleted by user');
} else if (e instanceof Error && e.message === 'Cannot replace an event with an older event') {
return;
if (e instanceof Error) {
switch (e.message) {
case 'duplicate key value violates unique constraint "nostr_events_pkey"':
case 'duplicate key value violates unique constraint "author_stats_pkey"':
return;
case 'canceling statement due to statement timeout':
throw new RelayError('error', 'the event could not be added fast enough');
default:
throw e;
}
} else {
throw e;
}
}
}
/** Maybe store the event, if eligible. */
private async storeEvent(
event: NostrEvent,
opts: { signal?: AbortSignal; timeout?: number } = {},
): Promise<undefined> {
try {
await super.transaction(async (relay, kysely) => {
await updateStats({ event, relay, kysely: kysely as unknown as Kysely<DittoTables> });
await relay.event(event, opts);
});
} catch (e) {
// If the failure is only because of updateStats (which runs first), insert the event anyway.
// We can't catch this in the transaction because the error aborts the transaction on the Postgres side.
if (e instanceof Error && e.message.includes('event_stats' satisfies keyof DittoTables)) {
await super.event(event, opts);
} else {
throw e;
}
}
}
/** Fulfill active subscriptions with this event. */
protected async fulfill(event: NostrEvent): Promise<void> {
const { maxAge = 60, batchSize = 500 } = this.opts;
const now = Math.floor(Date.now() / 1000);
const age = now - event.created_at;
if (age > maxAge) {
// Ephemeral events must be fulfilled, or else return an error to the client.
if (NKinds.ephemeral(event.kind)) {
throw new RelayError('invalid', 'event too old');
} else {
// Silently ignore old events.
return;
}
}
let count = 0;
for (const [subId, { filters, machina }] of this.subs.entries()) {
for (const filter of filters) {
count++;
if (this.matchesFilter(event, filter)) {
machina.push(['EVENT', subId, event]);
break;
}
// Yield to event loop.
if (count % batchSize === 0) {
await new Promise((resolve) => setTimeout(resolve, 0));
}
}
}
}
/** Check if the event fulfills the filter, according to Ditto criteria. */
protected matchesFilter(event: NostrEvent, filter: NostrFilter): boolean {
// TODO: support streaming by search.
return typeof filter.search !== 'string' && matchFilter(filter, event);
}
/** Check if an event has been deleted by the admin. */
private async isDeletedAdmin(event: NostrEvent): Promise<boolean> {
const filters: NostrFilter[] = [
@ -213,27 +288,89 @@ class EventsDB extends NPostgres {
}
}
override async *req(
filters: NostrFilter[],
opts: { timeout?: number; signal?: AbortSignal; limit?: number } = {},
): AsyncIterable<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED> {
const { db, chunkSize = 20 } = this.opts;
const { limit, timeout = this.opts.timeout, signal } = opts;
filters = await this.expandFilters(filters);
const subId = crypto.randomUUID();
const normalFilters = this.normalizeFilters(filters);
const machina = new Machina<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED>(signal);
if (normalFilters.length && limit !== 0) {
this.withTimeout(db.kysely as unknown as Kysely<NPostgresSchema>, timeout, async (trx) => {
let query = this.getEventsQuery(trx, normalFilters);
if (typeof opts.limit === 'number') {
query = query.limit(opts.limit);
}
for await (const row of query.stream(chunkSize)) {
const event = this.parseEventRow(row);
machina.push(['EVENT', subId, event]);
}
machina.push(['EOSE', subId]);
}).catch((error) => {
if (error instanceof Error && error.message.includes('timeout')) {
machina.push(['CLOSED', subId, 'error: the relay could not respond fast enough']);
} else {
machina.push(['CLOSED', subId, 'error: something went wrong']);
}
});
try {
for await (const msg of machina) {
const [verb] = msg;
yield msg;
if (verb === 'EOSE') {
break;
}
if (verb === 'CLOSED') {
return;
}
}
} catch {
yield ['CLOSED', subId, 'error: the relay could not respond fast enough'];
return;
}
} else {
yield ['EOSE', subId];
}
this.subs.set(subId, { filters, machina });
internalSubscriptionsSizeGauge.set(this.subs.size);
try {
for await (const msg of machina) {
yield msg;
}
} catch (e) {
if (e instanceof Error && e.name === 'AbortError') {
yield ['CLOSED', subId, 'error: the relay could not respond fast enough'];
} else {
yield ['CLOSED', subId, 'error: something went wrong'];
}
} finally {
this.subs.delete(subId);
internalSubscriptionsSizeGauge.set(this.subs.size);
}
}
/** Get events for filters from the database. */
override async query(
filters: NostrFilter[],
opts: { signal?: AbortSignal; timeout?: number; limit?: number } = {},
opts: { signal?: AbortSignal; pure?: boolean; timeout?: number; limit?: number } = {},
): Promise<DittoEvent[]> {
filters = await this.expandFilters(filters);
for (const filter of filters) {
if (filter.since && filter.since >= 2_147_483_647) {
throw new RelayError('invalid', 'since filter too far into the future');
}
if (filter.until && filter.until >= 2_147_483_647) {
throw new RelayError('invalid', 'until filter too far into the future');
}
for (const kind of filter.kinds ?? []) {
if (kind >= 2_147_483_647) {
throw new RelayError('invalid', 'kind filter too far into the future');
}
}
}
if (opts.signal?.aborted) return Promise.resolve([]);
logi({ level: 'debug', ns: 'ditto.req', source: 'db', filters: filters as JsonValue });
@ -323,7 +460,7 @@ class EventsDB extends NPostgres {
return event.tags.reduce<string[][]>((results, tag, index) => {
const [name, value] = tag;
const condition = EventsDB.tagConditions[name] as TagCondition | undefined;
const condition = DittoPgStore.tagConditions[name] as TagCondition | undefined;
if (value && condition && value.length < 200 && checkCondition(name, value, condition, index)) {
results.push(tag);
@ -334,16 +471,63 @@ class EventsDB extends NPostgres {
}, []);
}
static indexExtensions(event: NostrEvent): Record<string, string> {
const ext: Record<string, string> = {};
if (event.kind === 1) {
ext.reply = event.tags.some(([name]) => name === 'e').toString();
} else if (event.kind === 1111) {
ext.reply = event.tags.some(([name]) => ['e', 'E'].includes(name)).toString();
} else if (event.kind === 6) {
ext.reply = 'false';
}
if ([1, 20, 30023].includes(event.kind)) {
const language = detectLanguage(event.content, 0.90);
if (language) {
ext.language = language;
}
}
const imeta: string[][][] = event.tags
.filter(([name]) => name === 'imeta')
.map(([_, ...entries]) =>
entries.map((entry) => {
const split = entry.split(' ');
return [split[0], split.splice(1).join(' ')];
})
);
// quirks mode
if (!imeta.length && event.kind === 1) {
const links = linkify.find(event.content).filter(({ type }) => type === 'url');
imeta.push(...getMediaLinks(links));
}
if (imeta.length) {
ext.media = 'true';
if (imeta.every((tags) => tags.some(([name, value]) => name === 'm' && value.startsWith('video/')))) {
ext.video = 'true';
}
}
ext.protocol = event.tags.find(([name]) => name === 'proxy')?.[2] ?? 'nostr';
return ext;
}
/** Build a search index from the event. */
static searchText(event: NostrEvent): string {
switch (event.kind) {
case 0:
return EventsDB.buildUserSearchContent(event);
return DittoPgStore.buildUserSearchContent(event);
case 1:
case 20:
return nip27.replaceAll(event.content, () => '');
case 30009:
return EventsDB.buildTagsSearchContent(event.tags.filter(([t]) => t !== 'alt'));
return DittoPgStore.buildTagsSearchContent(event.tags.filter(([t]) => t !== 'alt'));
case 30360:
return event.tags.find(([name]) => name === 'd')?.[1] || '';
default:
@ -367,6 +551,18 @@ class EventsDB extends NPostgres {
filters = structuredClone(filters);
for (const filter of filters) {
if (filter.since && filter.since >= 2_147_483_647) {
throw new RelayError('invalid', 'since filter too far into the future');
}
if (filter.until && filter.until >= 2_147_483_647) {
throw new RelayError('invalid', 'until filter too far into the future');
}
for (const kind of filter.kinds ?? []) {
if (kind >= 2_147_483_647) {
throw new RelayError('invalid', 'kind filter too far into the future');
}
}
if (filter.search) {
const tokens = NIP50.parseInput(filter.search);
@ -385,7 +581,7 @@ class EventsDB extends NPostgres {
}
if (domains.size || hostnames.size) {
let query = this.opts.kysely
let query = this.opts.db.kysely
.selectFrom('author_stats')
.select('pubkey')
.where((eb) => {
@ -417,21 +613,33 @@ class EventsDB extends NPostgres {
.map((t) => typeof t === 'object' ? `${t.key}:${t.value}` : t)
.join(' ');
}
if (filter.kinds) {
// Ephemeral events are not stored, so don't bother querying for them.
// If this results in an empty kinds array, NDatabase will remove the filter before querying and return no results.
filter.kinds = filter.kinds.filter((kind) => !NKinds.ephemeral(kind));
}
}
return filters;
}
// deno-lint-ignore no-explicit-any
override async transaction(callback: (store: NPostgres, kysely: Kysely<any>) => Promise<void>): Promise<void> {
return super.transaction((store, kysely) => callback(store, kysely as unknown as Kysely<DittoTables>));
/** Execute the callback in a new transaction, unless the Kysely instance is already a transaction. */
private static override async trx<T = unknown>(
db: Kysely<DittoTables>,
callback: (trx: Kysely<DittoTables>) => Promise<T>,
): Promise<T> {
if (db.isTransaction) {
return await callback(db);
} else {
return await db.transaction().execute((trx) => callback(trx));
}
}
/** Execute NPostgres functions in a transaction. */
// @ts-ignore gg
override async transaction(
callback: (store: DittoPgStore, kysely: Kysely<DittoTables>) => Promise<void>,
): Promise<void> {
const { db } = this.opts;
await DittoPgStore.trx(db.kysely, async (trx) => {
const store = new DittoPgStore({ ...this.opts, db: { ...db, kysely: trx }, notify: false });
await callback(store, trx);
});
}
}
export { EventsDB };

View file

@ -0,0 +1,91 @@
// deno-lint-ignore-file require-await
import { DittoConf } from '@ditto/conf';
import { NostrEvent, NostrFilter, NPool, type NRelay, NRelay1 } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
interface DittoPoolOpts {
conf: DittoConf;
relay: NRelay;
maxEventRelays?: number;
}
export class DittoPool extends NPool<NRelay1> {
private _opts: DittoPoolOpts;
constructor(opts: DittoPoolOpts) {
super({
open(url) {
return new NRelay1(url, {
// Skip event verification (it's done in the pipeline).
verifyEvent: () => true,
log: logi,
});
},
reqRouter: (filters) => {
return this.reqRouter(filters);
},
eventRouter: async (event) => {
return this.eventRouter(event);
},
});
this._opts = opts;
}
private async reqRouter(filters: NostrFilter[]): Promise<Map<string, NostrFilter[]>> {
const routes = new Map<string, NostrFilter[]>();
for (const relayUrl of await this.getRelayUrls({ marker: 'read' })) {
routes.set(relayUrl, filters);
}
return routes;
}
private async eventRouter(event: NostrEvent): Promise<string[]> {
const { conf, maxEventRelays = 4 } = this._opts;
const { pubkey } = event;
const relaySet = await this.getRelayUrls({ pubkey, marker: 'write' });
relaySet.delete(conf.relay);
return [...relaySet].slice(0, maxEventRelays);
}
private async getRelayUrls(opts: { pubkey?: string; marker?: 'read' | 'write' } = {}): Promise<Set<string>> {
const { conf, relay } = this._opts;
const relays = new Set<`wss://${string}`>();
const authors = new Set<string>([await conf.signer.getPublicKey()]);
if (opts.pubkey) {
authors.add(opts.pubkey);
}
const events = await relay.query([
{ kinds: [10002], authors: [...authors] },
]);
// Ensure user's own relay list is counted first.
if (opts.pubkey) {
events.sort((a) => a.pubkey === opts.pubkey ? -1 : 1);
}
for (const event of events) {
for (const [name, relayUrl, marker] of event.tags) {
if (name === 'r' && (!marker || !opts.marker || marker === opts.marker)) {
try {
const url = new URL(relayUrl);
if (url.protocol === 'wss:') {
relays.add(url.toString() as `wss://${string}`);
}
} catch {
// fallthrough
}
}
}
}
return relays;
}
}

View file

@ -0,0 +1,69 @@
import { DittoPolyPg } from '@ditto/db';
import { DittoConf } from '@ditto/conf';
import { genEvent, MockRelay } from '@nostrify/nostrify/test';
import { assertEquals } from '@std/assert';
import { generateSecretKey, getPublicKey } from 'nostr-tools';
import { DittoRelayStore } from './DittoRelayStore.ts';
import type { NostrMetadata } from '@nostrify/types';
Deno.test('updateAuthorData sets nip05', async () => {
const alex = generateSecretKey();
await using test = setupTest((req) => {
switch (req.url) {
case 'https://gleasonator.dev/.well-known/nostr.json?name=alex':
return jsonResponse({ names: { alex: getPublicKey(alex) } });
default:
return new Response('Not found', { status: 404 });
}
});
const { db, store } = test;
const metadata: NostrMetadata = { nip05: 'alex@gleasonator.dev' };
const event = genEvent({ kind: 0, content: JSON.stringify(metadata) }, alex);
await store.updateAuthorData(event);
const row = await db.kysely
.selectFrom('author_stats')
.selectAll()
.where('pubkey', '=', getPublicKey(alex))
.executeTakeFirst();
assertEquals(row?.nip05, 'alex@gleasonator.dev');
assertEquals(row?.nip05_domain, 'gleasonator.dev');
assertEquals(row?.nip05_hostname, 'gleasonator.dev');
});
function setupTest(cb: (req: Request) => Response | Promise<Response>) {
const conf = new DittoConf(Deno.env);
const db = new DittoPolyPg(conf.databaseUrl);
const relay = new MockRelay();
const mockFetch: typeof fetch = async (input, init) => {
const req = new Request(input, init);
return await cb(req);
};
const store = new DittoRelayStore({ conf, db, relay, fetch: mockFetch });
return {
db,
store,
[Symbol.asyncDispose]: async () => {
await store[Symbol.asyncDispose]();
await db[Symbol.asyncDispose]();
},
};
}
function jsonResponse(body: unknown): Response {
return new Response(JSON.stringify(body), {
headers: {
'Content-Type': 'application/json',
},
});
}

View file

@ -0,0 +1,470 @@
import { DittoConf } from '@ditto/conf';
import { DittoDB, DittoTables } from '@ditto/db';
import {
cachedFaviconsSizeGauge,
cachedNip05sSizeGauge,
pipelineEventsCounter,
policyEventsCounter,
webPushNotificationsCounter,
} from '@ditto/metrics';
import {
NKinds,
NostrEvent,
NostrFilter,
NostrRelayCLOSED,
NostrRelayCOUNT,
NostrRelayEOSE,
NostrRelayEVENT,
NRelay,
NSchema as n,
} from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { UpdateObject } from 'kysely';
import { LRUCache } from 'lru-cache';
import tldts from 'tldts';
import { z } from 'zod';
import { DittoPush } from '@/DittoPush.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { RelayError } from '@/RelayError.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { eventAge, nostrNow, Time } from '@/utils.ts';
import { getAmount } from '@/utils/bolt11.ts';
import { errorJson } from '@/utils/log.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { getTagSet } from '@/utils/tags.ts';
import { PolicyWorker } from '@/workers/policy.ts';
import { verifyEventWorker } from '@/workers/verify.ts';
import { fetchFavicon, insertFavicon, queryFavicon } from '@/utils/favicon.ts';
import { lookupNip05 } from '@/utils/nip05.ts';
import { parseNoteContent, stripimeta } from '@/utils/note.ts';
import { SimpleLRU } from '@/utils/SimpleLRU.ts';
import { unfurlCardCached } from '@/utils/unfurl.ts';
import { renderWebPushNotification } from '@/views/mastodon/push.ts';
import { nip19 } from 'nostr-tools';
interface DittoRelayStoreOpts {
db: DittoDB;
conf: DittoConf;
relay: NRelay;
fetch?: typeof fetch;
}
/** Backing storage class for Ditto relay implementation at `/relay`. */
export class DittoRelayStore implements NRelay {
private push: DittoPush;
private encounters = new LRUCache<string, true>({ max: 5000 });
private controller = new AbortController();
private policyWorker: PolicyWorker;
private faviconCache: SimpleLRU<string, URL>;
private nip05Cache: SimpleLRU<string, nip19.ProfilePointer>;
private ns = 'ditto.relay.store';
constructor(private opts: DittoRelayStoreOpts) {
const { conf, db } = this.opts;
this.push = new DittoPush(opts);
this.policyWorker = new PolicyWorker(conf);
this.listen().catch((e: unknown) => {
logi({ level: 'error', ns: this.ns, source: 'listen', error: errorJson(e) });
});
this.faviconCache = new SimpleLRU<string, URL>(
async (domain, { signal }) => {
const row = await queryFavicon(db.kysely, domain);
if (row && (nostrNow() - row.last_updated_at) < (conf.caches.favicon.ttl / 1000)) {
return new URL(row.favicon);
}
const url = await fetchFavicon(domain, signal);
await insertFavicon(db.kysely, domain, url.href);
return url;
},
{ ...conf.caches.favicon, gauge: cachedFaviconsSizeGauge },
);
this.nip05Cache = new SimpleLRU<string, nip19.ProfilePointer>(
(nip05, { signal }) => {
return lookupNip05(nip05, { ...this.opts, signal });
},
{ ...conf.caches.nip05, gauge: cachedNip05sSizeGauge },
);
}
/** Open a firehose to the relay. */
private async listen(): Promise<void> {
const { relay } = this.opts;
const { signal } = this.controller;
for await (const msg of relay.req([{ limit: 0 }], { signal })) {
if (msg[0] === 'EVENT') {
const [, , event] = msg;
await this.event(event, { signal });
}
}
}
req(
filters: NostrFilter[],
opts?: { signal?: AbortSignal },
): AsyncIterable<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED> {
const { relay } = this.opts;
return relay.req(filters, opts);
}
/**
* Common pipeline function to process (and maybe store) events.
* It is idempotent, so it can be called multiple times for the same event.
*/
async event(event: DittoEvent, opts: { publish?: boolean; signal?: AbortSignal } = {}): Promise<void> {
const { conf, relay } = this.opts;
const { signal } = opts;
// Skip events that have already been encountered.
if (this.encounters.get(event.id)) {
throw new RelayError('duplicate', 'already have this event');
}
// Reject events that are too far in the future.
if (eventAge(event) < -Time.minutes(1)) {
throw new RelayError('invalid', 'event too far in the future');
}
// Integer max value for Postgres.
if (event.kind >= 2_147_483_647) {
throw new RelayError('invalid', 'event kind too large');
}
// The only point of ephemeral events is to stream them,
// so throw an error if we're not even going to do that.
if (NKinds.ephemeral(event.kind) && !this.isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
// Block NIP-70 events, because we have no way to `AUTH`.
if (event.tags.some(([name]) => name === '-')) {
throw new RelayError('invalid', 'protected event');
}
// Validate the event's signature.
if (!(await verifyEventWorker(event))) {
throw new RelayError('invalid', 'invalid signature');
}
// Recheck encountered after async ops.
if (this.encounters.has(event.id)) {
throw new RelayError('duplicate', 'already have this event');
}
// Set the event as encountered after verifying the signature.
this.encounters.set(event.id, true);
// Log the event.
logi({ level: 'debug', ns: 'ditto.event', source: 'pipeline', id: event.id, kind: event.kind });
pipelineEventsCounter.inc({ kind: event.kind });
// NIP-46 events get special treatment.
// They are exempt from policies and other side-effects, and should be streamed out immediately.
// If streaming fails, an error should be returned.
if (event.kind === 24133) {
await relay.event(event, { signal });
}
// Ensure the event doesn't violate the policy.
if (event.pubkey !== await conf.signer.getPublicKey()) {
await this.policyFilter(event, signal);
}
// Prepare the event for additional checks.
// FIXME: This is kind of hacky. Should be reorganized to fetch only what's needed for each stage.
await this.hydrateEvent(event, signal);
// Ensure that the author is not banned.
const n = getTagSet(event.user?.tags ?? [], 'n');
if (n.has('disabled')) {
throw new RelayError('blocked', 'author is blocked');
}
try {
await relay.event(purifyEvent(event), { signal });
} finally {
// This needs to run in steps, and should not block the API from responding.
Promise.allSettled([
this.handleZaps(event),
this.updateAuthorData(event, signal),
this.prewarmLinkPreview(event, signal),
this.generateSetEvents(event),
])
.then(() => this.webPush(event))
.catch(() => {});
}
}
private async policyFilter(event: NostrEvent, signal?: AbortSignal): Promise<void> {
try {
const result = await this.policyWorker.call(event, signal);
const [, , ok, reason] = result;
logi({ level: 'debug', ns: 'ditto.policy', id: event.id, kind: event.kind, ok, reason });
policyEventsCounter.inc({ ok: String(ok) });
RelayError.assert(result);
} catch (e) {
if (e instanceof RelayError) {
throw e;
} else {
logi({ level: 'error', ns: 'ditto.policy', id: event.id, kind: event.kind, error: errorJson(e) });
throw new RelayError('blocked', 'policy error');
}
}
}
/** Stores the event in the 'event_zaps' table */
private async handleZaps(event: NostrEvent) {
if (event.kind !== 9735) return;
const { db } = this.opts;
const zapRequestString = event?.tags?.find(([name]) => name === 'description')?.[1];
if (!zapRequestString) return;
const zapRequest = n.json().pipe(n.event()).optional().catch(undefined).parse(zapRequestString);
if (!zapRequest) return;
const amountSchema = z.coerce.number().int().nonnegative().catch(0);
const amount_millisats = amountSchema.parse(getAmount(event?.tags.find(([name]) => name === 'bolt11')?.[1]));
if (!amount_millisats || amount_millisats < 1) return;
const zappedEventId = zapRequest.tags.find(([name]) => name === 'e')?.[1];
if (!zappedEventId) return;
try {
await db.kysely.insertInto('event_zaps').values({
receipt_id: event.id,
target_event_id: zappedEventId,
sender_pubkey: zapRequest.pubkey,
amount_millisats,
comment: zapRequest.content,
}).execute();
} catch {
// receipt_id is unique, do nothing
}
}
/** Parse kind 0 metadata and track indexes in the database. */
async updateAuthorData(event: NostrEvent, signal?: AbortSignal): Promise<void> {
if (event.kind !== 0) return;
const { db } = this.opts;
// Parse metadata.
const metadata = n.json().pipe(n.metadata()).catch({}).safeParse(event.content);
if (!metadata.success) return;
const { name, nip05 } = metadata.data;
const updates: UpdateObject<DittoTables, 'author_stats'> = {};
const authorStats = await db.kysely
.selectFrom('author_stats')
.selectAll()
.where('pubkey', '=', event.pubkey)
.executeTakeFirst();
const lastVerified = authorStats?.nip05_last_verified_at;
const eventNewer = !lastVerified || event.created_at > lastVerified;
try {
if (nip05 !== authorStats?.nip05 && eventNewer || !lastVerified) {
if (nip05) {
const tld = tldts.parse(nip05);
if (tld.isIcann && !tld.isIp && !tld.isPrivate) {
const pointer = await this.nip05Cache.fetch(nip05, { signal });
if (pointer.pubkey === event.pubkey) {
updates.nip05 = nip05;
updates.nip05_domain = tld.domain;
updates.nip05_hostname = tld.hostname;
updates.nip05_last_verified_at = event.created_at;
}
}
} else {
updates.nip05 = null;
updates.nip05_domain = null;
updates.nip05_hostname = null;
updates.nip05_last_verified_at = event.created_at;
}
}
} catch {
// Fallthrough.
}
// Fetch favicon.
const domain = nip05?.split('@')[1].toLowerCase();
if (domain) {
try {
await this.faviconCache.fetch(domain, { signal });
} catch {
// Fallthrough.
}
}
const search = [name, nip05].filter(Boolean).join(' ').trim();
if (search !== authorStats?.search) {
updates.search = search;
}
if (Object.keys(updates).length) {
await db.kysely.insertInto('author_stats')
.values({
pubkey: event.pubkey,
followers_count: 0,
following_count: 0,
notes_count: 0,
search,
...updates,
})
.onConflict((oc) => oc.column('pubkey').doUpdateSet(updates))
.execute();
}
}
private async prewarmLinkPreview(event: NostrEvent, signal?: AbortSignal): Promise<void> {
const { firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), []);
if (firstUrl) {
await unfurlCardCached(firstUrl, signal);
}
}
private async generateSetEvents(event: NostrEvent): Promise<void> {
const { conf } = this.opts;
const signer = conf.signer;
const pubkey = await signer.getPublicKey();
const tagsAdmin = event.tags.some(([name, value]) => ['p', 'P'].includes(name) && value === pubkey);
if (event.kind === 1984 && tagsAdmin) {
const rel = await signer.signEvent({
kind: 30383,
content: '',
tags: [
['d', event.id],
['p', event.pubkey],
['k', '1984'],
['n', 'open'],
...[...getTagSet(event.tags, 'p')].map((value) => ['P', value]),
...[...getTagSet(event.tags, 'e')].map((value) => ['e', value]),
],
created_at: Math.floor(Date.now() / 1000),
});
await this.event(rel, { signal: AbortSignal.timeout(1000) });
}
if (event.kind === 3036 && tagsAdmin) {
const rel = await signer.signEvent({
kind: 30383,
content: '',
tags: [
['d', event.id],
['p', event.pubkey],
['k', '3036'],
['n', 'pending'],
],
created_at: Math.floor(Date.now() / 1000),
});
await this.event(rel, { signal: AbortSignal.timeout(1000) });
}
}
private async webPush(event: NostrEvent): Promise<void> {
if (!this.isFresh(event)) {
throw new RelayError('invalid', 'event too old');
}
const { db, relay } = this.opts;
const pubkeys = getTagSet(event.tags, 'p');
if (!pubkeys.size) {
return;
}
const rows = await db.kysely
.selectFrom('push_subscriptions')
.selectAll()
.where('pubkey', 'in', [...pubkeys])
.execute();
for (const row of rows) {
const viewerPubkey = row.pubkey;
if (viewerPubkey === event.pubkey) {
continue; // Don't notify authors about their own events.
}
const message = await renderWebPushNotification(relay, event, viewerPubkey);
if (!message) {
continue;
}
const subscription = {
endpoint: row.endpoint,
keys: {
auth: row.auth,
p256dh: row.p256dh,
},
};
await this.push.push(subscription, message);
webPushNotificationsCounter.inc({ type: message.notification_type });
}
}
/** Hydrate the event with the user, if applicable. */
private async hydrateEvent(event: NostrEvent, signal?: AbortSignal): Promise<DittoEvent> {
const [hydrated] = await hydrateEvents({ ...this.opts, events: [event], signal });
return hydrated;
}
/** Determine if the event is being received in a timely manner. */
private isFresh(event: NostrEvent): boolean {
return eventAge(event) < Time.minutes(1);
}
async query(filters: NostrFilter[], opts: { pure?: boolean; signal?: AbortSignal } = {}): Promise<DittoEvent[]> {
const { relay } = this.opts;
const { pure = true, signal } = opts; // TODO: make pure `false` by default
const events = await relay.query(filters, opts);
if (!pure) {
return hydrateEvents({ ...this.opts, events, signal });
}
return events;
}
count(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise<NostrRelayCOUNT[2]> {
const { relay } = this.opts;
if (!relay.count) {
return Promise.reject(new Error('Method not implemented.'));
}
return relay.count(filters, opts);
}
remove(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise<void> {
const { relay } = this.opts;
if (!relay.remove) {
return Promise.reject(new Error('Method not implemented.'));
}
return relay.remove(filters, opts);
}
async close(): Promise<void> {
const { relay } = this.opts;
this.controller.abort();
await relay.close();
}
[Symbol.asyncDispose](): Promise<void> {
return this.close();
}
}

View file

@ -1,23 +0,0 @@
import { assertEquals } from '@std/assert';
import { eventFixture } from '@/test.ts';
import { InternalRelay } from './InternalRelay.ts';
Deno.test('InternalRelay', async () => {
const relay = new InternalRelay();
const event1 = await eventFixture('event-1');
const promise = new Promise((resolve) => setTimeout(() => resolve(relay.event(event1)), 0));
for await (const msg of relay.req([{}])) {
if (msg[0] === 'EVENT') {
assertEquals(relay.subs.size, 1);
assertEquals(msg[2], event1);
break;
}
}
await promise;
assertEquals(relay.subs.size, 0); // cleanup
});

View file

@ -1,86 +0,0 @@
// deno-lint-ignore-file require-await
import {
NIP50,
NostrEvent,
NostrFilter,
NostrRelayCLOSED,
NostrRelayEOSE,
NostrRelayEVENT,
NRelay,
} from '@nostrify/nostrify';
import { Machina } from '@nostrify/nostrify/utils';
import { matchFilter } from 'nostr-tools';
import { Gauge } from 'prom-client';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { purifyEvent } from '@/utils/purify.ts';
interface InternalRelayOpts {
gauge?: Gauge;
}
/**
* PubSub event store for streaming events within the application.
* The pipeline should push events to it, then anything in the application can subscribe to it.
*/
export class InternalRelay implements NRelay {
readonly subs = new Map<string, { filters: NostrFilter[]; machina: Machina<NostrEvent> }>();
constructor(private opts: InternalRelayOpts = {}) {}
async *req(
filters: NostrFilter[],
opts?: { signal?: AbortSignal },
): AsyncGenerator<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED> {
const id = crypto.randomUUID();
const machina = new Machina<NostrEvent>(opts?.signal);
yield ['EOSE', id];
this.subs.set(id, { filters, machina });
this.opts.gauge?.set(this.subs.size);
try {
for await (const event of machina) {
yield ['EVENT', id, event];
}
} finally {
this.subs.delete(id);
this.opts.gauge?.set(this.subs.size);
}
}
async event(event: DittoEvent): Promise<void> {
for (const { filters, machina } of this.subs.values()) {
for (const filter of filters) {
if (matchFilter(filter, event)) {
if (filter.search) {
const tokens = NIP50.parseInput(filter.search);
const domain = (tokens.find((t) =>
typeof t === 'object' && t.key === 'domain'
) as { key: 'domain'; value: string } | undefined)?.value;
if (domain === event.author_stats?.nip05_hostname) {
machina.push(purifyEvent(event));
break;
}
} else {
machina.push(purifyEvent(event));
break;
}
}
}
}
return Promise.resolve();
}
async query(): Promise<NostrEvent[]> {
return [];
}
async close(): Promise<void> {
return Promise.resolve();
}
}

View file

@ -1,43 +0,0 @@
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getTagSet } from '@/utils/tags.ts';
export class UserStore implements NStore {
private promise: Promise<DittoEvent[]> | undefined;
constructor(private pubkey: string, private store: NStore) {}
async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise<void> {
return await this.store.event(event, opts);
}
/**
* Query events that `pubkey` did not mute
* https://github.com/nostr-protocol/nips/blob/master/51.md#standard-lists
*/
async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise<DittoEvent[]> {
const events = await this.store.query(filters, opts);
const pubkeys = await this.getMutedPubkeys();
return events.filter((event) => {
return event.kind === 0 || !pubkeys.has(event.pubkey);
});
}
private async getMuteList(): Promise<DittoEvent | undefined> {
if (!this.promise) {
this.promise = this.store.query([{ authors: [this.pubkey], kinds: [10000], limit: 1 }]);
}
const [muteList] = await this.promise;
return muteList;
}
private async getMutedPubkeys(): Promise<Set<string>> {
const mutedPubkeysEvent = await this.getMuteList();
if (!mutedPubkeysEvent) {
return new Set();
}
return getTagSet(mutedPubkeysEvent.tags, 'p');
}
}

View file

@ -1,5 +1,6 @@
import { jsonlEvents } from '@nostrify/nostrify/test';
import { assembleEvents } from '@/storages/hydrate.ts';
import { jsonlEvents } from '@/test.ts';
const testEvents = await jsonlEvents('fixtures/hydrated.jsonl');
const testStats = JSON.parse(await Deno.readTextFile('fixtures/stats.json'));
@ -9,5 +10,5 @@ const testStats = JSON.parse(await Deno.readTextFile('fixtures/stats.json'));
const events = testEvents.slice(0, 20);
Deno.bench('assembleEvents with home feed', () => {
assembleEvents(events, testEvents, testStats);
assembleEvents('', events, testEvents, testStats);
});

View file

@ -1,13 +1,16 @@
import { DittoConf } from '@ditto/conf';
import { DummyDB } from '@ditto/db';
import { MockRelay } from '@nostrify/nostrify/test';
import { assertEquals } from '@std/assert';
import { generateSecretKey, nip19 } from 'nostr-tools';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { createTestDB, eventFixture } from '@/test.ts';
import { eventFixture } from '@/test.ts';
Deno.test('hydrateEvents(): author --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const event0 = await eventFixture('event-0');
const event1 = await eventFixture('event-1');
@ -16,19 +19,15 @@ Deno.test('hydrateEvents(): author --- WITHOUT stats', async () => {
await relay.event(event0);
await relay.event(event1);
await hydrateEvents({
events: [event1],
store: relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [event1] });
const expectedEvent = { ...event1, author: event0 };
assertEquals(event1, expectedEvent);
});
Deno.test('hydrateEvents(): repost --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const event0madePost = await eventFixture('event-0-the-one-who-post-and-users-repost');
const event0madeRepost = await eventFixture('event-0-the-one-who-repost');
@ -41,23 +40,20 @@ Deno.test('hydrateEvents(): repost --- WITHOUT stats', async () => {
await relay.event(event1reposted);
await relay.event(event6);
await hydrateEvents({
events: [event6],
store: relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [event6] });
const expectedEvent6 = {
...event6,
author: event0madeRepost,
repost: { ...event1reposted, author: event0madePost },
};
assertEquals(event6, expectedEvent6);
});
Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const event0madeQuoteRepost = await eventFixture('event-0-the-one-who-quote-repost');
const event0 = await eventFixture('event-0');
@ -70,11 +66,7 @@ Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => {
await relay.event(event1quoteRepost);
await relay.event(event1willBeQuoteReposted);
await hydrateEvents({
events: [event1quoteRepost],
store: relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [event1quoteRepost] });
const expectedEvent1quoteRepost = {
...event1quoteRepost,
@ -86,8 +78,8 @@ Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => {
});
Deno.test('hydrateEvents(): repost of quote repost --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const author = await eventFixture('event-0-makes-repost-with-quote-repost');
const event1 = await eventFixture('event-1-will-be-reposted-with-quote-repost');
@ -100,23 +92,20 @@ Deno.test('hydrateEvents(): repost of quote repost --- WITHOUT stats', async ()
await relay.event(event1quote);
await relay.event(event6);
await hydrateEvents({
events: [event6],
store: relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [event6] });
const expectedEvent6 = {
...event6,
author,
repost: { ...event1quote, author, quote: { author, ...event1 } },
};
assertEquals(event6, expectedEvent6);
});
Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const authorDictator = await eventFixture('kind-0-dictator');
const authorVictim = await eventFixture('kind-0-george-orwell');
@ -129,11 +118,7 @@ Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stat
await relay.event(reportEvent);
await relay.event(event1);
await hydrateEvents({
events: [reportEvent],
store: relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [reportEvent] });
const expectedEvent: DittoEvent = {
...reportEvent,
@ -141,12 +126,13 @@ Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stat
reported_notes: [event1],
reported_profile: authorVictim,
};
assertEquals(reportEvent, expectedEvent);
});
Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 --- WITHOUT stats', async () => {
const relay = new MockRelay();
await using db = await createTestDB();
const opts = setupTest();
const { relay } = opts;
const zapSender = await eventFixture('kind-0-jack');
const zapReceipt = await eventFixture('kind-9735-jack-zap-patrick');
@ -159,11 +145,7 @@ Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 ---
await relay.event(zappedPost);
await relay.event(zapReceiver);
await hydrateEvents({
events: [zapReceipt],
store: relay,
kysely: db.kysely,
});
await hydrateEvents({ ...opts, events: [zapReceipt] });
const expectedEvent: DittoEvent = {
...zapReceipt,
@ -175,5 +157,14 @@ Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 ---
zap_amount: 5225000, // millisats
zap_message: '🫂',
};
assertEquals(zapReceipt, expectedEvent);
});
function setupTest() {
const db = new DummyDB();
const conf = new DittoConf(new Map([['DITTO_NSEC', nip19.nsecEncode(generateSecretKey())]]));
const relay = new MockRelay();
return { conf, db, relay };
}

View file

@ -1,28 +1,28 @@
import { DittoTables } from '@ditto/db';
import { DittoDB, DittoTables } from '@ditto/db';
import { DittoConf } from '@ditto/conf';
import { NStore } from '@nostrify/nostrify';
import { Kysely } from 'kysely';
import { matchFilter } from 'nostr-tools';
import { NSchema as n } from '@nostrify/nostrify';
import { z } from 'zod';
import { Conf } from '@/config.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { fallbackAuthor } from '@/utils.ts';
import { findQuoteTag } from '@/utils/tags.ts';
import { findQuoteInContent } from '@/utils/note.ts';
import { getAmount } from '@/utils/bolt11.ts';
import { Storages } from '@/storages.ts';
interface HydrateOpts {
db: DittoDB;
conf: DittoConf;
relay: NStore;
events: DittoEvent[];
store: NStore;
signal?: AbortSignal;
kysely?: Kysely<DittoTables>;
}
/** Hydrate events using the provided storage. */
async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
const { events, store, signal, kysely = await Storages.kysely() } = opts;
const { conf, db, events } = opts;
if (!events.length) {
return events;
@ -30,28 +30,28 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
const cache = [...events];
for (const event of await gatherRelatedEvents({ events: cache, store, signal })) {
for (const event of await gatherRelatedEvents({ ...opts, events: cache })) {
cache.push(event);
}
for (const event of await gatherQuotes({ events: cache, store, signal })) {
for (const event of await gatherQuotes({ ...opts, events: cache })) {
cache.push(event);
}
for (const event of await gatherProfiles({ events: cache, store, signal })) {
for (const event of await gatherProfiles({ ...opts, events: cache })) {
cache.push(event);
}
for (const event of await gatherUsers({ events: cache, store, signal })) {
for (const event of await gatherUsers({ ...opts, events: cache })) {
cache.push(event);
}
for (const event of await gatherInfo({ events: cache, store, signal })) {
for (const event of await gatherInfo({ ...opts, events: cache })) {
cache.push(event);
}
const authorStats = await gatherAuthorStats(cache, kysely as Kysely<DittoTables>);
const eventStats = await gatherEventStats(cache, kysely as Kysely<DittoTables>);
const authorStats = await gatherAuthorStats(cache, db.kysely);
const eventStats = await gatherEventStats(cache, db.kysely);
const domains = authorStats.reduce((result, { nip05_hostname }) => {
if (nip05_hostname) result.add(nip05_hostname);
@ -59,7 +59,7 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
}, new Set<string>());
const favicons = (
await kysely
await db.kysely
.selectFrom('domain_favicons')
.select(['domain', 'favicon'])
.where('domain', 'in', [...domains])
@ -79,15 +79,18 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
// Dedupe events.
const results = [...new Map(cache.map((event) => [event.id, event])).values()];
const admin = await conf.signer.getPublicKey();
// First connect all the events to each-other, then connect the connected events to the original list.
assembleEvents(results, results, stats);
assembleEvents(events, results, stats);
assembleEvents(admin, results, results, stats);
assembleEvents(admin, events, results, stats);
return events;
}
/** Connect the events in list `b` to the DittoEvent fields in list `a`. */
export function assembleEvents(
admin: string,
a: DittoEvent[],
b: DittoEvent[],
stats: {
@ -96,8 +99,6 @@ export function assembleEvents(
favicons: Record<string, string>;
},
): DittoEvent[] {
const admin = Conf.pubkey;
const authorStats = stats.authors.reduce((result, { pubkey, ...stat }) => {
result[pubkey] = {
...stat,
@ -198,7 +199,7 @@ export function assembleEvents(
}
/** Collect event targets (eg reposts, quote posts, reacted posts, etc.) */
function gatherRelatedEvents({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
function gatherRelatedEvents({ events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
const ids = new Set<string>();
for (const event of events) {
@ -233,14 +234,14 @@ function gatherRelatedEvents({ events, store, signal }: HydrateOpts): Promise<Di
}
}
return store.query(
return relay.query(
[{ ids: [...ids], limit: ids.size }],
{ signal },
);
}
/** Collect quotes from the events. */
function gatherQuotes({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
function gatherQuotes({ events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
const ids = new Set<string>();
for (const event of events) {
@ -252,14 +253,14 @@ function gatherQuotes({ events, store, signal }: HydrateOpts): Promise<DittoEven
}
}
return store.query(
return relay.query(
[{ ids: [...ids], limit: ids.size }],
{ signal },
);
}
/** Collect profiles from the events. */
async function gatherProfiles({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
async function gatherProfiles({ events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
const pubkeys = new Set<string>();
for (const event of events) {
@ -299,7 +300,7 @@ async function gatherProfiles({ events, store, signal }: HydrateOpts): Promise<D
}
}
const authors = await store.query(
const authors = await relay.query(
[{ kinds: [0], authors: [...pubkeys], limit: pubkeys.size }],
{ signal },
);
@ -316,21 +317,21 @@ async function gatherProfiles({ events, store, signal }: HydrateOpts): Promise<D
}
/** Collect users from the events. */
function gatherUsers({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
async function gatherUsers({ conf, events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
const pubkeys = new Set(events.map((event) => event.pubkey));
if (!pubkeys.size) {
return Promise.resolve([]);
}
return store.query(
[{ kinds: [30382], authors: [Conf.pubkey], '#d': [...pubkeys], limit: pubkeys.size }],
return relay.query(
[{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [...pubkeys], limit: pubkeys.size }],
{ signal },
);
}
/** Collect info events from the events. */
function gatherInfo({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
async function gatherInfo({ conf, events, relay, signal }: HydrateOpts): Promise<DittoEvent[]> {
const ids = new Set<string>();
for (const event of events) {
@ -343,8 +344,8 @@ function gatherInfo({ events, store, signal }: HydrateOpts): Promise<DittoEvent[
return Promise.resolve([]);
}
return store.query(
[{ kinds: [30383], authors: [Conf.pubkey], '#d': [...ids], limit: ids.size }],
return relay.query(
[{ kinds: [30383], authors: [await conf.signer.getPublicKey()], '#d': [...ids], limit: ids.size }],
{ signal },
);
}

View file

@ -1,60 +0,0 @@
import { NostrEvent, NostrFilter, NRelay1, NStore } from '@nostrify/nostrify';
import { logi } from '@soapbox/logi';
import { JsonValue } from '@std/json';
import { normalizeFilters } from '@/filter.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { abortError } from '@/utils/abort.ts';
interface SearchStoreOpts {
relay: string | undefined;
fallback: NStore;
hydrator?: NStore;
}
class SearchStore implements NStore {
#fallback: NStore;
#hydrator: NStore;
#relay: NRelay1 | undefined;
constructor(opts: SearchStoreOpts) {
this.#fallback = opts.fallback;
this.#hydrator = opts.hydrator ?? this;
if (opts.relay) {
this.#relay = new NRelay1(opts.relay);
}
}
event(_event: NostrEvent, _opts?: { signal?: AbortSignal }): Promise<void> {
return Promise.reject(new Error('EVENT not implemented.'));
}
async query(filters: NostrFilter[], opts?: { signal?: AbortSignal; limit?: number }): Promise<DittoEvent[]> {
filters = normalizeFilters(filters);
if (opts?.signal?.aborted) return Promise.reject(abortError());
if (!filters.length) return Promise.resolve([]);
logi({ level: 'debug', ns: 'ditto.req', source: 'search', filters: filters as JsonValue });
const query = filters[0]?.search;
if (this.#relay && this.#relay.socket.readyState === WebSocket.OPEN) {
logi({ level: 'debug', ns: 'ditto.search', query, source: 'relay', relay: this.#relay.socket.url });
const events = await this.#relay.query(filters, opts);
return hydrateEvents({
events,
store: this.#hydrator,
signal: opts?.signal,
});
} else {
logi({ level: 'debug', ns: 'ditto.search', query, source: 'db' });
return this.#fallback.query(filters, opts);
}
}
}
export { SearchStore };

View file

@ -1,12 +1,8 @@
import { DittoDB } from '@ditto/db';
import ISO6391, { LanguageCode } from 'iso-639-1';
import lande from 'lande';
import { DittoPolyPg } from '@ditto/db';
import { NostrEvent } from '@nostrify/nostrify';
import { finalizeEvent, generateSecretKey } from 'nostr-tools';
import { Conf } from '@/config.ts';
import { EventsDB } from '@/storages/EventsDB.ts';
import { purifyEvent } from '@/utils/purify.ts';
import { DittoPgStore } from '@/storages/DittoPgStore.ts';
import { sql } from 'kysely';
/** Import an event fixture by name in tests. */
@ -15,52 +11,35 @@ export async function eventFixture(name: string): Promise<NostrEvent> {
return structuredClone(result.default);
}
/** Import a JSONL fixture by name in tests. */
export async function jsonlEvents(path: string): Promise<NostrEvent[]> {
const data = await Deno.readTextFile(path);
return data.split('\n').map((line) => JSON.parse(line));
}
/** Generate an event for use in tests. */
export function genEvent(t: Partial<NostrEvent> = {}, sk: Uint8Array = generateSecretKey()): NostrEvent {
const event = finalizeEvent({
kind: 255,
created_at: 0,
content: '',
tags: [],
...t,
}, sk);
return purifyEvent(event);
}
/** Create a database for testing. It uses `DATABASE_URL`, or creates an in-memory database by default. */
export async function createTestDB(opts?: { pure?: boolean }) {
const { kysely } = DittoDB.create(Conf.databaseUrl, { poolSize: 1 });
const db = new DittoPolyPg(Conf.databaseUrl, { poolSize: 1 });
await db.migrate();
await DittoDB.migrate(kysely);
const store = new EventsDB({
kysely,
const store = new DittoPgStore({
db,
timeout: Conf.db.timeouts.default,
pubkey: Conf.pubkey,
pubkey: await Conf.signer.getPublicKey(),
pure: opts?.pure ?? false,
notify: true,
});
return {
db,
...db,
store,
kysely,
kysely: db.kysely,
[Symbol.asyncDispose]: async () => {
const { rows } = await sql<
{ tablename: string }
>`select tablename from pg_tables where schemaname = current_schema()`.execute(kysely);
>`select tablename from pg_tables where schemaname = current_schema()`.execute(db.kysely);
for (const { tablename } of rows) {
if (tablename.startsWith('kysely_')) continue;
await sql`truncate table ${sql.ref(tablename)} cascade`.execute(kysely);
await sql`truncate table ${sql.ref(tablename)} cascade`.execute(db.kysely);
}
await kysely.destroy();
await db[Symbol.asyncDispose]();
},
};
}
@ -68,15 +47,3 @@ export async function createTestDB(opts?: { pure?: boolean }) {
export function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
export function getLanguage(text: string): LanguageCode | undefined {
const [topResult] = lande(text);
if (topResult) {
const [iso6393] = topResult;
const locale = new Intl.Locale(iso6393);
if (ISO6391.validate(locale.language)) {
return locale.language;
}
}
return;
}

View file

@ -1,74 +0,0 @@
import { assert, assertEquals } from '@std/assert';
import { Conf } from '@/config.ts';
import { DeepLTranslator } from '@/translators/DeepLTranslator.ts';
import { getLanguage } from '@/test.ts';
const {
deeplBaseUrl: baseUrl,
deeplApiKey: apiKey,
translationProvider,
} = Conf;
const deepl = 'deepl';
Deno.test('DeepL translation with source language omitted', {
ignore: !(translationProvider === deepl && apiKey),
}, async () => {
const translator = new DeepLTranslator({ fetch: fetch, baseUrl, apiKey: apiKey! });
const data = await translator.translate(
[
'Bom dia amigos',
'Meu nome é Patrick',
'Eu irei morar na America, eu prometo. Mas antes, eu devo mencionar que o lande está interpretando este texto como italiano, que estranho.',
],
undefined,
'en',
);
assertEquals(data.source_lang, 'pt');
assertEquals(getLanguage(data.results[0]), 'en');
assertEquals(getLanguage(data.results[1]), 'en');
assertEquals(getLanguage(data.results[2]), 'en');
});
Deno.test('DeepL translation with source language set', {
ignore: !(translationProvider === deepl && apiKey),
}, async () => {
const translator = new DeepLTranslator({ fetch: fetch, baseUrl, apiKey: apiKey as string });
const data = await translator.translate(
[
'Bom dia amigos',
'Meu nome é Patrick',
'Eu irei morar na America, eu prometo. Mas antes, eu devo mencionar que o lande está interpretando este texto como italiano, que estranho.',
],
'pt',
'en',
);
assertEquals(data.source_lang, 'pt');
assertEquals(getLanguage(data.results[0]), 'en');
assertEquals(getLanguage(data.results[1]), 'en');
assertEquals(getLanguage(data.results[2]), 'en');
});
Deno.test("DeepL translation doesn't alter Nostr URIs", {
ignore: !(translationProvider === deepl && apiKey),
}, async () => {
const translator = new DeepLTranslator({ fetch: fetch, baseUrl, apiKey: apiKey as string });
const patrick =
'nostr:nprofile1qy2hwumn8ghj7erfw36x7tnsw43z7un9d3shjqpqgujeqakgt7fyp6zjggxhyy7ft623qtcaay5lkc8n8gkry4cvnrzqep59se';
const danidfra =
'nostr:nprofile1qy2hwumn8ghj7erfw36x7tnsw43z7un9d3shjqpqe6tnvlr46lv3lwdu80r07kanhk6jcxy5r07w9umgv9kuhu9dl5hsz44l8s';
const input =
`Thanks to work by ${patrick} and ${danidfra} , it's now possible to filter the global feed by language on #Ditto!`;
const { results: [output] } = await translator.translate([input], 'en', 'pt');
assert(output.includes(patrick));
assert(output.includes(danidfra));
});

View file

@ -1,55 +0,0 @@
import { assertEquals } from '@std/assert';
import { Conf } from '@/config.ts';
import { LibreTranslateTranslator } from '@/translators/LibreTranslateTranslator.ts';
import { getLanguage } from '@/test.ts';
const {
libretranslateBaseUrl: baseUrl,
libretranslateApiKey: apiKey,
translationProvider,
} = Conf;
const libretranslate = 'libretranslate';
Deno.test('LibreTranslate translation with source language omitted', {
ignore: !(translationProvider === libretranslate && apiKey),
}, async () => {
const translator = new LibreTranslateTranslator({ fetch: fetch, baseUrl, apiKey: apiKey! });
const data = await translator.translate(
[
'Bom dia amigos',
'Meu nome é Patrick, um nome belo ou feio? A questão é mais profunda do que parece.',
'A respiração é mais importante do que comer e tomar agua.',
],
undefined,
'ca',
);
assertEquals(data.source_lang, 'pt');
assertEquals(getLanguage(data.results[0]), 'ca');
assertEquals(getLanguage(data.results[1]), 'ca');
assertEquals(getLanguage(data.results[2]), 'ca');
});
Deno.test('LibreTranslate translation with source language set', {
ignore: !(translationProvider === libretranslate && apiKey),
}, async () => {
const translator = new LibreTranslateTranslator({ fetch: fetch, baseUrl, apiKey: apiKey! });
const data = await translator.translate(
[
'Bom dia amigos',
'Meu nome é Patrick, um nome belo ou feio? A questão é mais profunda do que parece.',
'A respiração é mais importante do que comer e tomar agua.',
],
'pt',
'ca',
);
assertEquals(data.source_lang, 'pt');
assertEquals(getLanguage(data.results[0]), 'ca');
assertEquals(getLanguage(data.results[1]), 'ca');
assertEquals(getLanguage(data.results[2]), 'ca');
});

Some files were not shown because too many files have changed in this diff Show more