Merge branch 'main' into mint-cashu
|
|
@ -8,11 +8,12 @@ stages:
|
|||
|
||||
test:
|
||||
stage: test
|
||||
timeout: 2 minutes
|
||||
script:
|
||||
- deno fmt --check
|
||||
- deno task lint
|
||||
- deno task check
|
||||
- deno task test --coverage=cov_profile
|
||||
- deno task test --ignore=packages/transcode --coverage=cov_profile
|
||||
- deno coverage cov_profile
|
||||
coverage: /All files[^\|]*\|[^\|]*\s+([\d\.]+)/
|
||||
services:
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
"./packages/nip98",
|
||||
"./packages/policies",
|
||||
"./packages/ratelimiter",
|
||||
"./packages/transcode",
|
||||
"./packages/translators",
|
||||
"./packages/uploaders"
|
||||
],
|
||||
|
|
@ -73,6 +74,7 @@
|
|||
"@soapbox/logi": "jsr:@soapbox/logi@^0.3.0",
|
||||
"@soapbox/safe-fetch": "jsr:@soapbox/safe-fetch@^2.0.0",
|
||||
"@std/assert": "jsr:@std/assert@^0.225.1",
|
||||
"@std/async": "jsr:@std/async@^1.0.10",
|
||||
"@std/cli": "jsr:@std/cli@^0.223.0",
|
||||
"@std/crypto": "jsr:@std/crypto@^0.224.0",
|
||||
"@std/encoding": "jsr:@std/encoding@^0.224.0",
|
||||
|
|
|
|||
13
deno.lock
generated
|
|
@ -28,6 +28,7 @@
|
|||
"jsr:@gleasonator/policy@0.9.2": "0.9.2",
|
||||
"jsr:@gleasonator/policy@0.9.3": "0.9.3",
|
||||
"jsr:@gleasonator/policy@0.9.4": "0.9.4",
|
||||
"jsr:@gleasonator/policy@0.9.5": "0.9.5",
|
||||
"jsr:@hono/hono@^4.4.6": "4.6.15",
|
||||
"jsr:@negrel/http-ece@0.6.0": "0.6.0",
|
||||
"jsr:@negrel/webpush@0.3": "0.3.0",
|
||||
|
|
@ -58,6 +59,7 @@
|
|||
"jsr:@std/assert@^1.0.10": "1.0.11",
|
||||
"jsr:@std/assert@~0.213.1": "0.213.1",
|
||||
"jsr:@std/assert@~0.225.1": "0.225.3",
|
||||
"jsr:@std/async@^1.0.10": "1.0.10",
|
||||
"jsr:@std/bytes@0.223": "0.223.0",
|
||||
"jsr:@std/bytes@0.224": "0.224.0",
|
||||
"jsr:@std/bytes@0.224.0": "0.224.0",
|
||||
|
|
@ -317,6 +319,13 @@
|
|||
"jsr:@nostrify/policies@~0.36.1"
|
||||
]
|
||||
},
|
||||
"@gleasonator/policy@0.9.5": {
|
||||
"integrity": "8ce76ad719b5d002bb1799c60f2deb4d450b32d590e0f4c211919aa68f1ea963",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@0.36",
|
||||
"jsr:@nostrify/policies@~0.36.1"
|
||||
]
|
||||
},
|
||||
"@hono/hono@4.4.6": {
|
||||
"integrity": "aa557ca9930787ee86b9ca1730691f1ce1c379174c2cb244d5934db2b6314453"
|
||||
},
|
||||
|
|
@ -604,6 +613,9 @@
|
|||
"jsr:@std/internal@^1.0.5"
|
||||
]
|
||||
},
|
||||
"@std/async@1.0.10": {
|
||||
"integrity": "2ff1b1c7d33d1416159989b0f69e59ec7ee8cb58510df01e454def2108b3dbec"
|
||||
},
|
||||
"@std/bytes@0.223.0": {
|
||||
"integrity": "84b75052cd8680942c397c2631318772b295019098f40aac5c36cead4cba51a8"
|
||||
},
|
||||
|
|
@ -2489,6 +2501,7 @@
|
|||
"jsr:@soapbox/logi@0.3",
|
||||
"jsr:@soapbox/safe-fetch@2",
|
||||
"jsr:@std/assert@~0.225.1",
|
||||
"jsr:@std/async@^1.0.10",
|
||||
"jsr:@std/cli@0.223",
|
||||
"jsr:@std/crypto@0.224",
|
||||
"jsr:@std/encoding@0.224",
|
||||
|
|
|
|||
|
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 9.5 KiB After Width: | Height: | Size: 9.5 KiB |
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 13 KiB |
|
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
|
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 22 KiB |
|
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 19 KiB |
|
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/captcha",
|
||||
"version": "1.0.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -279,6 +279,11 @@ export class DittoConf {
|
|||
return optionalBooleanSchema.parse(this.env.get('MEDIA_ANALYZE')) ?? false;
|
||||
}
|
||||
|
||||
/** Whether to transcode uploaded video files with ffmpeg. */
|
||||
get mediaTranscode(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('MEDIA_TRANSCODE')) ?? false;
|
||||
}
|
||||
|
||||
/** Max upload size for files in number of bytes. Default 100MiB. */
|
||||
get maxUploadSize(): number {
|
||||
return Number(this.env.get('MAX_UPLOAD_SIZE') || 100 * 1024 * 1024);
|
||||
|
|
@ -417,7 +422,6 @@ export class DittoConf {
|
|||
get caches(): {
|
||||
nip05: { max: number; ttl: number };
|
||||
favicon: { max: number; ttl: number };
|
||||
linkPreview: { max: number; ttl: number };
|
||||
translation: { max: number; ttl: number };
|
||||
} {
|
||||
const env = this.env;
|
||||
|
|
@ -437,13 +441,6 @@ export class DittoConf {
|
|||
ttl: Number(env.get('DITTO_CACHE_FAVICON_TTL') || 1 * 60 * 60 * 1000),
|
||||
};
|
||||
},
|
||||
/** Link preview cache settings. */
|
||||
get linkPreview(): { max: number; ttl: number } {
|
||||
return {
|
||||
max: Number(env.get('DITTO_CACHE_LINK_PREVIEW_MAX') || 3000),
|
||||
ttl: Number(env.get('DITTO_CACHE_LINK_PREVIEW_TTL') || 12 * 60 * 60 * 1000),
|
||||
};
|
||||
},
|
||||
/** Translation cache settings. */
|
||||
get translation(): { max: number; ttl: number } {
|
||||
return {
|
||||
|
|
@ -480,4 +477,14 @@ export class DittoConf {
|
|||
get precheck(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('DITTO_PRECHECK')) ?? true;
|
||||
}
|
||||
|
||||
/** Path to `ffmpeg` executable. */
|
||||
get ffmpegPath(): string {
|
||||
return this.env.get('FFMPEG_PATH') || 'ffmpeg';
|
||||
}
|
||||
|
||||
/** Path to `ffprobe` executable. */
|
||||
get ffprobePath(): string {
|
||||
return this.env.get('FFPROBE_PATH') || 'ffprobe';
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/conf",
|
||||
"version": "1.1.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import type { NPostgresSchema } from '@nostrify/db';
|
||||
import type { Generated } from 'kysely';
|
||||
|
||||
import type { MastodonPreviewCard } from '@ditto/mastoapi/types';
|
||||
|
||||
export interface DittoTables extends NPostgresSchema {
|
||||
auth_tokens: AuthTokenRow;
|
||||
author_stats: AuthorStatsRow;
|
||||
|
|
@ -34,6 +36,7 @@ interface EventStatsRow {
|
|||
quotes_count: number;
|
||||
reactions: string;
|
||||
zaps_amount: number;
|
||||
link_preview?: MastodonPreviewCard;
|
||||
}
|
||||
|
||||
interface AuthTokenRow {
|
||||
|
|
|
|||
|
|
@ -1,14 +1,22 @@
|
|||
import { assertEquals } from '@std/assert';
|
||||
import { assertEquals, assertRejects } from '@std/assert';
|
||||
|
||||
import { DittoPglite } from './DittoPglite.ts';
|
||||
|
||||
Deno.test('DittoPglite', async () => {
|
||||
const db = new DittoPglite('memory://');
|
||||
await using db = new DittoPglite('memory://');
|
||||
await db.migrate();
|
||||
|
||||
assertEquals(db.poolSize, 1);
|
||||
assertEquals(db.availableConnections, 1);
|
||||
|
||||
await db.kysely.destroy();
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
});
|
||||
|
||||
Deno.test('DittoPglite query after closing', async () => {
|
||||
const db = new DittoPglite('memory://');
|
||||
await db[Symbol.asyncDispose]();
|
||||
|
||||
await assertRejects(
|
||||
() => db.kysely.selectFrom('nostr_events').selectAll().execute(),
|
||||
Error,
|
||||
'PGlite is closed',
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -47,6 +47,16 @@ export class DittoPglite implements DittoDB {
|
|||
}
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.kysely.destroy();
|
||||
try {
|
||||
// FIXME: `kysely.destroy()` calls `pglite.close()` internally, but it doesn't work.
|
||||
await this.pglite.close();
|
||||
await this.kysely.destroy();
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message === 'PGlite is closed') {
|
||||
// Make dispose idempotent.
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
22
packages/db/adapters/DittoPostgres.test.ts
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
|
||||
import { DittoPostgres } from './DittoPostgres.ts';
|
||||
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const isPostgres = /^postgres(?:ql)?:/.test(conf.databaseUrl);
|
||||
|
||||
Deno.test('DittoPostgres', { ignore: !isPostgres }, async () => {
|
||||
await using db = new DittoPostgres(conf.databaseUrl);
|
||||
await db.migrate();
|
||||
});
|
||||
|
||||
// FIXME: There is a problem with postgres-js where queries just hang after the database is closed.
|
||||
|
||||
// Deno.test('DittoPostgres query after closing', { ignore: !isPostgres }, async () => {
|
||||
// const db = new DittoPostgres(conf.databaseUrl);
|
||||
// await db[Symbol.asyncDispose]();
|
||||
//
|
||||
// await assertRejects(
|
||||
// () => db.kysely.selectFrom('nostr_events').selectAll().execute(),
|
||||
// );
|
||||
// });
|
||||
|
|
@ -58,7 +58,7 @@ export class DittoPostgres implements DittoDB {
|
|||
}
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.pg.end();
|
||||
await this.pg.end({ timeout: 0 }); // force-close the connections
|
||||
await this.kysely.destroy();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
25
packages/db/adapters/TestDB.test.ts
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { NPostgres } from '@nostrify/db';
|
||||
import { genEvent } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { DittoPolyPg } from './DittoPolyPg.ts';
|
||||
import { TestDB } from './TestDB.ts';
|
||||
|
||||
Deno.test('TestDB', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const store = new NPostgres(orig.kysely);
|
||||
await store.event(genEvent());
|
||||
|
||||
assertEquals((await store.count([{}])).count, 1);
|
||||
|
||||
await db.clear();
|
||||
|
||||
assertEquals((await store.count([{}])).count, 0);
|
||||
});
|
||||
49
packages/db/adapters/TestDB.ts
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
import type { DittoDB } from '../DittoDB.ts';
|
||||
import type { DittoTables } from '../DittoTables.ts';
|
||||
|
||||
/** Wraps another DittoDB implementation to clear all data when disposed. */
|
||||
export class TestDB implements DittoDB {
|
||||
constructor(private db: DittoDB) {}
|
||||
|
||||
get kysely(): Kysely<DittoTables> {
|
||||
return this.db.kysely;
|
||||
}
|
||||
|
||||
get poolSize(): number {
|
||||
return this.db.poolSize;
|
||||
}
|
||||
|
||||
get availableConnections(): number {
|
||||
return this.db.availableConnections;
|
||||
}
|
||||
|
||||
migrate(): Promise<void> {
|
||||
return this.db.migrate();
|
||||
}
|
||||
|
||||
listen(channel: string, callback: (payload: string) => void): void {
|
||||
return this.db.listen(channel, callback);
|
||||
}
|
||||
|
||||
/** Truncate all tables. */
|
||||
async clear(): Promise<void> {
|
||||
const query = sql<{ tablename: string }>`select tablename from pg_tables where schemaname = current_schema()`;
|
||||
|
||||
const { rows } = await query.execute(this.db.kysely);
|
||||
|
||||
for (const { tablename } of rows) {
|
||||
if (tablename.startsWith('kysely_')) {
|
||||
continue; // Skip Kysely's internal tables
|
||||
} else {
|
||||
await sql`truncate table ${sql.ref(tablename)} cascade`.execute(this.db.kysely);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.clear();
|
||||
await this.db[Symbol.asyncDispose]();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/db",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
|
|
|
|||
9
packages/db/migrations/053_link_preview.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.alterTable('event_stats').addColumn('link_preview', 'jsonb').execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.alterTable('event_stats').dropColumn('link_preview').execute();
|
||||
}
|
||||
|
|
@ -2,6 +2,7 @@ export { DittoPglite } from './adapters/DittoPglite.ts';
|
|||
export { DittoPolyPg } from './adapters/DittoPolyPg.ts';
|
||||
export { DittoPostgres } from './adapters/DittoPostgres.ts';
|
||||
export { DummyDB } from './adapters/DummyDB.ts';
|
||||
export { TestDB } from './adapters/TestDB.ts';
|
||||
|
||||
export type { DittoDB } from './DittoDB.ts';
|
||||
export type { DittoTables } from './DittoTables.ts';
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import { type Context, Handler, Input as HonoInput, MiddlewareHandler } from '@h
|
|||
import { every } from '@hono/hono/combine';
|
||||
import { cors } from '@hono/hono/cors';
|
||||
import { serveStatic } from '@hono/hono/deno';
|
||||
import { NostrEvent, NostrSigner, NRelay, NUploader } from '@nostrify/nostrify';
|
||||
import { NostrEvent, NostrSigner, NPool, NRelay, NUploader } from '@nostrify/nostrify';
|
||||
|
||||
import { cron } from '@/cron.ts';
|
||||
import { startFirehose } from '@/firehose.ts';
|
||||
|
|
@ -55,8 +55,6 @@ import {
|
|||
adminSetRelaysController,
|
||||
deleteZapSplitsController,
|
||||
getZapSplitsController,
|
||||
nameRequestController,
|
||||
nameRequestsController,
|
||||
statusZapSplitsController,
|
||||
updateInstanceController,
|
||||
updateZapSplitsController,
|
||||
|
|
@ -149,6 +147,8 @@ import { rateLimitMiddleware } from '@/middleware/rateLimitMiddleware.ts';
|
|||
import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts';
|
||||
import { translatorMiddleware } from '@/middleware/translatorMiddleware.ts';
|
||||
import { logiMiddleware } from '@/middleware/logiMiddleware.ts';
|
||||
import dittoNamesRoute from '@/routes/dittoNamesRoute.ts';
|
||||
import pleromaAdminPermissionGroupsRoute from '@/routes/pleromaAdminPermissionGroupsRoute.ts';
|
||||
import { DittoRelayStore } from '@/storages/DittoRelayStore.ts';
|
||||
|
||||
export interface AppEnv extends DittoEnv {
|
||||
|
|
@ -167,6 +167,7 @@ export interface AppEnv extends DittoEnv {
|
|||
/** User's relay. Might filter out unwanted content. */
|
||||
relay: NRelay;
|
||||
};
|
||||
pool?: NPool<NRelay>;
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -194,7 +195,7 @@ const pgstore = new DittoPgStore({
|
|||
});
|
||||
|
||||
const pool = new DittoPool({ conf, relay: pgstore });
|
||||
const relay = new DittoRelayStore({ db, conf, relay: pgstore });
|
||||
const relay = new DittoRelayStore({ db, conf, pool, relay: pgstore });
|
||||
|
||||
await seedZapSplits({ conf, relay });
|
||||
|
||||
|
|
@ -234,8 +235,9 @@ const socketTokenMiddleware = tokenMiddleware((c) => {
|
|||
|
||||
app.use(
|
||||
'/api/*',
|
||||
(c, next) => {
|
||||
(c: Context<DittoEnv & { Variables: { pool: NPool<NRelay> } }>, next) => {
|
||||
c.set('relay', new DittoAPIStore({ relay, pool }));
|
||||
c.set('pool', pool);
|
||||
return next();
|
||||
},
|
||||
metricsMiddleware,
|
||||
|
|
@ -440,14 +442,14 @@ app.delete('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', userMi
|
|||
app.get('/api/v1/pleroma/admin/config', userMiddleware({ role: 'admin' }), configController);
|
||||
app.post('/api/v1/pleroma/admin/config', userMiddleware({ role: 'admin' }), updateConfigController);
|
||||
app.delete('/api/v1/pleroma/admin/statuses/:id', userMiddleware({ role: 'admin' }), pleromaAdminDeleteStatusController);
|
||||
app.route('/api/v1/pleroma/admin/users/permission_group', pleromaAdminPermissionGroupsRoute);
|
||||
|
||||
app.get('/api/v1/admin/ditto/relays', userMiddleware({ role: 'admin' }), adminRelaysController);
|
||||
app.put('/api/v1/admin/ditto/relays', userMiddleware({ role: 'admin' }), adminSetRelaysController);
|
||||
|
||||
app.put('/api/v1/admin/ditto/instance', userMiddleware({ role: 'admin' }), updateInstanceController);
|
||||
|
||||
app.post('/api/v1/ditto/names', userMiddleware(), nameRequestController);
|
||||
app.get('/api/v1/ditto/names', userMiddleware(), nameRequestsController);
|
||||
app.route('/api/v1/ditto/names', dittoNamesRoute);
|
||||
|
||||
app.get('/api/v1/ditto/captcha', rateLimitMiddleware(3, Time.minutes(1)), captchaController);
|
||||
app.post(
|
||||
|
|
|
|||
|
|
@ -1,3 +0,0 @@
|
|||
import { LRUCache } from 'lru-cache';
|
||||
|
||||
export const pipelineEncounters = new LRUCache<string, true>({ max: 5000 });
|
||||
|
|
@ -128,7 +128,7 @@ const adminAccountActionSchema = z.object({
|
|||
});
|
||||
|
||||
const adminActionController: AppController = async (c) => {
|
||||
const { conf, relay, requestId } = c.var;
|
||||
const { conf, relay, requestId, signal } = c.var;
|
||||
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = adminAccountActionSchema.safeParse(body);
|
||||
|
|
@ -161,7 +161,23 @@ const adminActionController: AppController = async (c) => {
|
|||
if (data.type === 'revoke_name') {
|
||||
n.revoke_name = true;
|
||||
try {
|
||||
await relay.remove!([{ kinds: [30360], authors: [await conf.signer.getPublicKey()], '#p': [authorId] }]);
|
||||
const [event] = await relay.query([{
|
||||
kinds: [30360],
|
||||
authors: [await conf.signer.getPublicKey()],
|
||||
'#p': [authorId],
|
||||
}], { signal });
|
||||
|
||||
if (event) {
|
||||
await createAdminEvent({
|
||||
kind: 5,
|
||||
tags: [
|
||||
['e', event.id],
|
||||
['k', '30360'],
|
||||
],
|
||||
}, c);
|
||||
} else {
|
||||
return c.json({ error: 'Name grant not found' }, 404);
|
||||
}
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.api.admin.account.action', type: data.type, requestId, error: errorJson(e) });
|
||||
return c.json({ error: 'Unexpected runtime error' }, 500);
|
||||
|
|
|
|||
|
|
@ -1,19 +1,17 @@
|
|||
import { paginated } from '@ditto/mastoapi/pagination';
|
||||
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
|
||||
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AppController } from '@/app.ts';
|
||||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { getAuthor } from '@/queries.ts';
|
||||
import { addTag } from '@/utils/tags.ts';
|
||||
import { createEvent, parseBody, updateAdminEvent } from '@/utils/api.ts';
|
||||
import { parseBody, updateAdminEvent } from '@/utils/api.ts';
|
||||
import { getInstanceMetadata } from '@/utils/instance.ts';
|
||||
import { deleteTag } from '@/utils/tags.ts';
|
||||
import { DittoZapSplits, getZapSplits } from '@/utils/zap-split.ts';
|
||||
import { screenshotsSchema } from '@/schemas/nostr.ts';
|
||||
import { booleanParamSchema, percentageSchema } from '@/schema.ts';
|
||||
import { percentageSchema } from '@/schema.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { renderNameRequest } from '@/views/ditto.ts';
|
||||
import { accountFromPubkey } from '@/views/mastodon/accounts.ts';
|
||||
import { renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { updateListAdminEvent } from '@/utils/api.ts';
|
||||
|
|
@ -81,102 +79,6 @@ function renderRelays(event: NostrEvent): RelayEntity[] {
|
|||
}, [] as RelayEntity[]);
|
||||
}
|
||||
|
||||
const nameRequestSchema = z.object({
|
||||
name: z.string().email(),
|
||||
reason: z.string().max(500).optional(),
|
||||
});
|
||||
|
||||
export const nameRequestController: AppController = async (c) => {
|
||||
const { conf, relay, user } = c.var;
|
||||
|
||||
const pubkey = await user!.signer.getPublicKey();
|
||||
const result = nameRequestSchema.safeParse(await c.req.json());
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Invalid username', schema: result.error }, 400);
|
||||
}
|
||||
|
||||
const { name, reason } = result.data;
|
||||
|
||||
const [existing] = await relay.query([{ kinds: [3036], authors: [pubkey], '#r': [name.toLowerCase()], limit: 1 }]);
|
||||
if (existing) {
|
||||
return c.json({ error: 'Name request already exists' }, 400);
|
||||
}
|
||||
|
||||
const r: string[][] = [['r', name]];
|
||||
|
||||
if (name !== name.toLowerCase()) {
|
||||
r.push(['r', name.toLowerCase()]);
|
||||
}
|
||||
|
||||
const event = await createEvent({
|
||||
kind: 3036,
|
||||
content: reason,
|
||||
tags: [
|
||||
...r,
|
||||
['L', 'nip05.domain'],
|
||||
['l', name.split('@')[1], 'nip05.domain'],
|
||||
['p', await conf.signer.getPublicKey()],
|
||||
],
|
||||
}, c);
|
||||
|
||||
await hydrateEvents({ ...c.var, events: [event] });
|
||||
|
||||
const nameRequest = await renderNameRequest(event);
|
||||
return c.json(nameRequest);
|
||||
};
|
||||
|
||||
const nameRequestsSchema = z.object({
|
||||
approved: booleanParamSchema.optional(),
|
||||
rejected: booleanParamSchema.optional(),
|
||||
});
|
||||
|
||||
export const nameRequestsController: AppController = async (c) => {
|
||||
const { conf, relay, user } = c.var;
|
||||
const pubkey = await user!.signer.getPublicKey();
|
||||
|
||||
const params = c.get('pagination');
|
||||
const { approved, rejected } = nameRequestsSchema.parse(c.req.query());
|
||||
|
||||
const filter: NostrFilter = {
|
||||
kinds: [30383],
|
||||
authors: [await conf.signer.getPublicKey()],
|
||||
'#k': ['3036'],
|
||||
'#p': [pubkey],
|
||||
...params,
|
||||
};
|
||||
|
||||
if (approved) {
|
||||
filter['#n'] = ['approved'];
|
||||
}
|
||||
if (rejected) {
|
||||
filter['#n'] = ['rejected'];
|
||||
}
|
||||
|
||||
const orig = await relay.query([filter]);
|
||||
const ids = new Set<string>();
|
||||
|
||||
for (const event of orig) {
|
||||
const d = event.tags.find(([name]) => name === 'd')?.[1];
|
||||
if (d) {
|
||||
ids.add(d);
|
||||
}
|
||||
}
|
||||
|
||||
if (!ids.size) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
const events = await relay.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }])
|
||||
.then((events) => hydrateEvents({ ...c.var, events }));
|
||||
|
||||
const nameRequests = await Promise.all(
|
||||
events.map((event) => renderNameRequest(event)),
|
||||
);
|
||||
|
||||
return paginated(c, orig, nameRequests);
|
||||
};
|
||||
|
||||
const zapSplitSchema = z.record(
|
||||
n.id(),
|
||||
z.object({
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
import { type AppController } from '@/app.ts';
|
||||
import { configSchema, elixirTupleSchema } from '@/schemas/pleroma-api.ts';
|
||||
import { createAdminEvent, updateAdminEvent, updateUser } from '@/utils/api.ts';
|
||||
import { lookupPubkey } from '@/utils/lookup.ts';
|
||||
import { getPleromaConfigs } from '@/utils/pleroma.ts';
|
||||
import { configSchema, elixirTupleSchema } from '@/schemas/pleroma-api.ts';
|
||||
|
||||
const frontendConfigController: AppController = async (c) => {
|
||||
const configDB = await getPleromaConfigs(c.var);
|
||||
|
|
|
|||
|
|
@ -1,13 +1,11 @@
|
|||
import { paginated, paginatedList } from '@ditto/mastoapi/pagination';
|
||||
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AppContext, AppController } from '@/app.ts';
|
||||
import { booleanParamSchema } from '@/schema.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { extractIdentifier, lookupPubkey } from '@/utils/lookup.ts';
|
||||
import { lookupNip05 } from '@/utils/nip05.ts';
|
||||
import { extractIdentifier, lookupEvent, lookupPubkey } from '@/utils/lookup.ts';
|
||||
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { renderStatus } from '@/views/mastodon/statuses.ts';
|
||||
import { getFollowedPubkeys } from '@/queries.ts';
|
||||
|
|
@ -34,7 +32,11 @@ const searchController: AppController = async (c) => {
|
|||
return c.json({ error: 'Bad request', schema: result.error }, 422);
|
||||
}
|
||||
|
||||
const event = await lookupEvent(c, { ...result.data, ...pagination });
|
||||
if (!c.var.pool) {
|
||||
throw new Error('Ditto pool not available');
|
||||
}
|
||||
|
||||
const event = await lookupEvent(result.data.q, { ...c.var, pool: c.var.pool });
|
||||
const lookup = extractIdentifier(result.data.q);
|
||||
|
||||
// Render account from pubkey.
|
||||
|
|
@ -50,7 +52,7 @@ const searchController: AppController = async (c) => {
|
|||
let events: NostrEvent[] = [];
|
||||
|
||||
if (event) {
|
||||
events = [event];
|
||||
events = await hydrateEvents({ ...c.var, events: [event] });
|
||||
}
|
||||
|
||||
events.push(...(await searchEvents(c, { ...result.data, ...pagination, viewerPubkey }, signal)));
|
||||
|
|
@ -145,67 +147,4 @@ function typeToKinds(type: SearchQuery['type']): number[] {
|
|||
}
|
||||
}
|
||||
|
||||
/** Resolve a searched value into an event, if applicable. */
|
||||
async function lookupEvent(c: AppContext, query: SearchQuery): Promise<NostrEvent | undefined> {
|
||||
const { relay, signal } = c.var;
|
||||
const filters = await getLookupFilters(c, query);
|
||||
|
||||
return relay.query(filters, { signal })
|
||||
.then((events) => hydrateEvents({ ...c.var, events }))
|
||||
.then(([event]) => event);
|
||||
}
|
||||
|
||||
/** Get filters to lookup the input value. */
|
||||
async function getLookupFilters(c: AppContext, { q, type, resolve }: SearchQuery): Promise<NostrFilter[]> {
|
||||
const accounts = !type || type === 'accounts';
|
||||
const statuses = !type || type === 'statuses';
|
||||
|
||||
if (!resolve || type === 'hashtags') {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (n.id().safeParse(q).success) {
|
||||
const filters: NostrFilter[] = [];
|
||||
if (accounts) filters.push({ kinds: [0], authors: [q] });
|
||||
if (statuses) filters.push({ kinds: [1, 20], ids: [q] });
|
||||
return filters;
|
||||
}
|
||||
|
||||
const lookup = extractIdentifier(q);
|
||||
if (!lookup) return [];
|
||||
|
||||
try {
|
||||
const result = nip19.decode(lookup);
|
||||
const filters: NostrFilter[] = [];
|
||||
switch (result.type) {
|
||||
case 'npub':
|
||||
if (accounts) filters.push({ kinds: [0], authors: [result.data] });
|
||||
break;
|
||||
case 'nprofile':
|
||||
if (accounts) filters.push({ kinds: [0], authors: [result.data.pubkey] });
|
||||
break;
|
||||
case 'note':
|
||||
if (statuses) filters.push({ kinds: [1, 20], ids: [result.data] });
|
||||
break;
|
||||
case 'nevent':
|
||||
if (statuses) filters.push({ kinds: [1, 20], ids: [result.data.id] });
|
||||
break;
|
||||
}
|
||||
return filters;
|
||||
} catch {
|
||||
// fall through
|
||||
}
|
||||
|
||||
try {
|
||||
const { pubkey } = await lookupNip05(lookup, c.var);
|
||||
if (pubkey) {
|
||||
return [{ kinds: [0], authors: [pubkey] }];
|
||||
}
|
||||
} catch {
|
||||
// fall through
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
export { searchController };
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ import { z } from 'zod';
|
|||
import { AppController } from '@/app.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { generateDateRange, Time } from '@/utils/time.ts';
|
||||
import { unfurlCardCached } from '@/utils/unfurl.ts';
|
||||
import { errorJson } from '@/utils/log.ts';
|
||||
import { renderStatus } from '@/views/mastodon/statuses.ts';
|
||||
|
||||
|
|
@ -94,9 +93,8 @@ const trendingLinksController: AppController = async (c) => {
|
|||
async function getTrendingLinks(conf: DittoConf, relay: NStore): Promise<TrendingLink[]> {
|
||||
const trends = await getTrendingTags(relay, 'r', await conf.signer.getPublicKey());
|
||||
|
||||
return Promise.all(trends.map(async (trend) => {
|
||||
return Promise.all(trends.map((trend) => {
|
||||
const link = trend.value;
|
||||
const card = await unfurlCardCached(link);
|
||||
|
||||
const history = trend.history.map(({ day, authors, uses }) => ({
|
||||
day: String(day),
|
||||
|
|
@ -119,7 +117,6 @@ async function getTrendingLinks(conf: DittoConf, relay: NStore): Promise<Trendin
|
|||
image: null,
|
||||
embed_url: '',
|
||||
blurhash: null,
|
||||
...card,
|
||||
history,
|
||||
};
|
||||
}));
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ function connectStream(socket: WebSocket, ip: string | undefined, opts: ConnectS
|
|||
.reduce((acc, limiter) => Math.min(acc, limiter.client(ip).remaining), Infinity);
|
||||
|
||||
if (remaining < 0) {
|
||||
socket.close(1008, 'Rate limit exceeded');
|
||||
closeSocket(1008, 'Rate limit exceeded');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
@ -74,7 +74,7 @@ function connectStream(socket: WebSocket, ip: string | undefined, opts: ConnectS
|
|||
if (rateLimited(limiters.msg)) return;
|
||||
|
||||
if (typeof e.data !== 'string') {
|
||||
socket.close(1003, 'Invalid message');
|
||||
closeSocket(1003, 'Invalid message');
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -95,13 +95,28 @@ function connectStream(socket: WebSocket, ip: string | undefined, opts: ConnectS
|
|||
};
|
||||
|
||||
socket.onclose = () => {
|
||||
handleSocketClose();
|
||||
};
|
||||
|
||||
// HACK: Due to a bug in Deno, we need to call the close handler manually.
|
||||
// https://github.com/denoland/deno/issues/27924
|
||||
function closeSocket(code?: number, reason?: string): void {
|
||||
for (const controller of controllers.values()) {
|
||||
controller.abort();
|
||||
}
|
||||
send(['NOTICE', `closed: ${reason} (${code})`]);
|
||||
socket.close(code, reason);
|
||||
handleSocketClose();
|
||||
}
|
||||
|
||||
function handleSocketClose() {
|
||||
connections.delete(socket);
|
||||
relayConnectionsGauge.set(connections.size);
|
||||
|
||||
for (const controller of controllers.values()) {
|
||||
controller.abort();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function rateLimited(limiter: Pick<RateLimiter, 'client'>): boolean {
|
||||
if (ip) {
|
||||
|
|
@ -109,7 +124,7 @@ function connectStream(socket: WebSocket, ip: string | undefined, opts: ConnectS
|
|||
try {
|
||||
client.hit();
|
||||
} catch {
|
||||
socket.close(1008, 'Rate limit exceeded');
|
||||
closeSocket(1008, 'Rate limit exceeded');
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
@ -141,10 +156,17 @@ function connectStream(socket: WebSocket, ip: string | undefined, opts: ConnectS
|
|||
const controller = new AbortController();
|
||||
controllers.get(subId)?.abort();
|
||||
controllers.set(subId, controller);
|
||||
const signal = controller.signal;
|
||||
|
||||
try {
|
||||
for await (const [verb, , ...rest] of relay.req(filters, { limit: 100, timeout: conf.db.timeouts.relay })) {
|
||||
send([verb, subId, ...rest] as NostrRelayMsg);
|
||||
for await (const msg of relay.req(filters, { limit: 100, signal, timeout: conf.db.timeouts.relay })) {
|
||||
if (msg[0] === 'EVENT') {
|
||||
const [, , event] = msg;
|
||||
send(['EVENT', subId, purifyEvent(event)]);
|
||||
} else {
|
||||
const [verb, , ...rest] = msg;
|
||||
send([verb, subId, ...rest] as NostrRelayMsg);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof RelayError) {
|
||||
|
|
@ -154,8 +176,8 @@ function connectStream(socket: WebSocket, ip: string | undefined, opts: ConnectS
|
|||
} else {
|
||||
send(['CLOSED', subId, 'error: something went wrong']);
|
||||
}
|
||||
} finally {
|
||||
controllers.delete(subId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,27 +5,23 @@ import { AppController } from '@/app.ts';
|
|||
import { localNip05Lookup } from '@/utils/nip05.ts';
|
||||
|
||||
const nameSchema = z.string().min(1).regex(/^[\w.-]+$/);
|
||||
const emptyResult: NostrJson = { names: {}, relays: {} };
|
||||
|
||||
/**
|
||||
* Serves NIP-05's nostr.json.
|
||||
* https://github.com/nostr-protocol/nips/blob/master/05.md
|
||||
*/
|
||||
const nostrController: AppController = async (c) => {
|
||||
// If there are no query parameters, this will always return an empty result.
|
||||
if (!Object.entries(c.req.queries()).length) {
|
||||
c.header('Cache-Control', 'max-age=31536000, public, immutable, stale-while-revalidate=86400');
|
||||
return c.json(emptyResult);
|
||||
const result = nameSchema.safeParse(c.req.query('name'));
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Invalid name parameter' }, { status: 422 });
|
||||
}
|
||||
|
||||
const result = nameSchema.safeParse(c.req.query('name'));
|
||||
const name = result.success ? result.data : undefined;
|
||||
const name = result.data;
|
||||
const pointer = name ? await localNip05Lookup(name, c.var) : undefined;
|
||||
|
||||
if (!name || !pointer) {
|
||||
// Not found, cache for 5 minutes.
|
||||
c.header('Cache-Control', 'max-age=300, public, stale-while-revalidate=30');
|
||||
return c.json(emptyResult);
|
||||
if (!pointer) {
|
||||
return c.json({ names: {}, relays: {} } satisfies NostrJson, { status: 404 });
|
||||
}
|
||||
|
||||
const { pubkey, relays = [] } = pointer;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/ditto",
|
||||
"version": "1.1.0",
|
||||
"exports": {},
|
||||
"imports": {
|
||||
"@/": "./",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import { NostrEvent } from '@nostrify/nostrify';
|
||||
import { LanguageCode } from 'iso-639-1';
|
||||
|
||||
import type { MastodonPreviewCard } from '@ditto/mastoapi/types';
|
||||
|
||||
/** Ditto internal stats for the event's author. */
|
||||
export interface AuthorStats {
|
||||
followers_count: number;
|
||||
|
|
@ -22,6 +24,7 @@ export interface EventStats {
|
|||
quotes_count: number;
|
||||
reactions: Record<string, number>;
|
||||
zaps_amount: number;
|
||||
link_preview?: MastodonPreviewCard;
|
||||
}
|
||||
|
||||
/** Internal Event representation used by Ditto, including extra keys. */
|
||||
|
|
|
|||
59
packages/ditto/routes/dittoNamesRoute.test.ts
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import route from './dittoNamesRoute.ts';
|
||||
|
||||
Deno.test('POST / creates a name request event', async () => {
|
||||
await using app = new TestApp(route);
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
const user = app.user();
|
||||
|
||||
const response = await app.api.post('/', { name: 'Alex@Ditto.pub', reason: 'for testing' });
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
|
||||
const [event] = await relay.query([{ kinds: [3036], authors: [await user.signer.getPublicKey()] }]);
|
||||
|
||||
assertEquals(event?.tags, [
|
||||
['r', 'Alex@Ditto.pub'],
|
||||
['r', 'alex@ditto.pub'],
|
||||
['L', 'nip05.domain'],
|
||||
['l', 'ditto.pub', 'nip05.domain'],
|
||||
['p', await conf.signer.getPublicKey()],
|
||||
]);
|
||||
|
||||
assertEquals(event?.content, 'for testing');
|
||||
});
|
||||
|
||||
Deno.test('POST / can be called multiple times with the same name', async () => {
|
||||
await using app = new TestApp(route);
|
||||
|
||||
app.user();
|
||||
|
||||
const response1 = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
const response2 = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
|
||||
assertEquals(response1.status, 200);
|
||||
assertEquals(response2.status, 200);
|
||||
});
|
||||
|
||||
Deno.test('POST / returns 400 if the name has already been granted', async () => {
|
||||
await using app = new TestApp(route);
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
app.user();
|
||||
|
||||
const grant = await conf.signer.signEvent({
|
||||
kind: 30360,
|
||||
tags: [['d', 'alex@ditto.pub']],
|
||||
content: '',
|
||||
created_at: 0,
|
||||
});
|
||||
|
||||
await relay.event(grant);
|
||||
|
||||
const response = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
|
||||
assertEquals(response.status, 400);
|
||||
});
|
||||
130
packages/ditto/routes/dittoNamesRoute.ts
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
import { paginationMiddleware, userMiddleware } from '@ditto/mastoapi/middleware';
|
||||
import { DittoRoute } from '@ditto/mastoapi/router';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createEvent } from '@/utils/api.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { renderNameRequest } from '@/views/ditto.ts';
|
||||
import { booleanParamSchema } from '@/schema.ts';
|
||||
import { NostrFilter } from '@nostrify/nostrify';
|
||||
|
||||
const nameRequestSchema = z.object({
|
||||
name: z.string().email(),
|
||||
reason: z.string().max(500).optional(),
|
||||
});
|
||||
|
||||
const route = new DittoRoute();
|
||||
|
||||
route.post('/', userMiddleware(), async (c) => {
|
||||
const { conf, relay, user } = c.var;
|
||||
|
||||
const result = nameRequestSchema.safeParse(await c.req.json());
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Invalid username', schema: result.error }, 422);
|
||||
}
|
||||
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
const adminPubkey = await conf.signer.getPublicKey();
|
||||
|
||||
const { name, reason } = result.data;
|
||||
const [_localpart, domain] = name.split('@');
|
||||
|
||||
if (domain.toLowerCase() !== conf.url.host.toLowerCase()) {
|
||||
return c.json({ error: 'Unsupported domain' }, 422);
|
||||
}
|
||||
|
||||
const d = name.toLowerCase();
|
||||
|
||||
const [grant] = await relay.query([{ kinds: [30360], authors: [adminPubkey], '#d': [d] }]);
|
||||
if (grant) {
|
||||
return c.json({ error: 'Name has already been granted' }, 400);
|
||||
}
|
||||
|
||||
const [pending] = await relay.query([{
|
||||
kinds: [30383],
|
||||
authors: [adminPubkey],
|
||||
'#p': [pubkey],
|
||||
'#k': ['3036'],
|
||||
'#r': [d],
|
||||
'#n': ['pending'],
|
||||
limit: 1,
|
||||
}]);
|
||||
if (pending) {
|
||||
return c.json({ error: 'You have already requested that name, and it is pending approval by staff' }, 400);
|
||||
}
|
||||
|
||||
const tags: string[][] = [['r', name]];
|
||||
|
||||
if (name !== name.toLowerCase()) {
|
||||
tags.push(['r', name.toLowerCase()]);
|
||||
}
|
||||
|
||||
const event = await createEvent({
|
||||
kind: 3036,
|
||||
content: reason,
|
||||
tags: [
|
||||
...tags,
|
||||
['L', 'nip05.domain'],
|
||||
['l', domain.toLowerCase(), 'nip05.domain'],
|
||||
['p', await conf.signer.getPublicKey()],
|
||||
],
|
||||
}, c);
|
||||
|
||||
await hydrateEvents({ ...c.var, events: [event] });
|
||||
|
||||
const nameRequest = await renderNameRequest(event);
|
||||
return c.json(nameRequest);
|
||||
});
|
||||
|
||||
const nameRequestsSchema = z.object({
|
||||
approved: booleanParamSchema.optional(),
|
||||
rejected: booleanParamSchema.optional(),
|
||||
});
|
||||
|
||||
route.get('/', paginationMiddleware(), userMiddleware(), async (c) => {
|
||||
const { conf, relay, user, pagination } = c.var;
|
||||
const pubkey = await user!.signer.getPublicKey();
|
||||
|
||||
const { approved, rejected } = nameRequestsSchema.parse(c.req.query());
|
||||
|
||||
const filter: NostrFilter = {
|
||||
kinds: [30383],
|
||||
authors: [await conf.signer.getPublicKey()],
|
||||
'#k': ['3036'],
|
||||
'#p': [pubkey],
|
||||
...pagination,
|
||||
};
|
||||
|
||||
if (approved) {
|
||||
filter['#n'] = ['approved'];
|
||||
}
|
||||
if (rejected) {
|
||||
filter['#n'] = ['rejected'];
|
||||
}
|
||||
|
||||
const orig = await relay.query([filter]);
|
||||
const ids = new Set<string>();
|
||||
|
||||
for (const event of orig) {
|
||||
const d = event.tags.find(([name]) => name === 'd')?.[1];
|
||||
if (d) {
|
||||
ids.add(d);
|
||||
}
|
||||
}
|
||||
|
||||
if (!ids.size) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
const events = await relay.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }])
|
||||
.then((events) => hydrateEvents({ ...c.var, events }));
|
||||
|
||||
const nameRequests = await Promise.all(
|
||||
events.map((event) => renderNameRequest(event)),
|
||||
);
|
||||
|
||||
return c.var.paginate(orig, nameRequests);
|
||||
});
|
||||
|
||||
export default route;
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
|
||||
import route from './pleromaAdminPermissionGroupsRoute.ts';
|
||||
|
||||
Deno.test('POST /admin returns 403 if user is not an admin', async () => {
|
||||
await using app = new TestApp(route);
|
||||
|
||||
app.user();
|
||||
|
||||
const response = await app.api.post('/admin', { nicknames: ['alex@ditto.pub'] });
|
||||
|
||||
assertEquals(response.status, 403);
|
||||
});
|
||||
|
||||
Deno.test('POST /admin promotes to admin', async () => {
|
||||
await using app = new TestApp(route);
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
await app.admin();
|
||||
|
||||
const pawn = app.createUser();
|
||||
const pubkey = await pawn.signer.getPublicKey();
|
||||
|
||||
const response = await app.api.post('/admin', { nicknames: [nip19.npubEncode(pubkey)] });
|
||||
const json = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(json, { is_admin: true });
|
||||
|
||||
const [event] = await relay.query([{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey] }]);
|
||||
|
||||
assertEquals(event.tags, [['d', pubkey], ['n', 'admin']]);
|
||||
});
|
||||
|
||||
Deno.test('POST /moderator promotes to moderator', async () => {
|
||||
await using app = new TestApp(route);
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
await app.admin();
|
||||
|
||||
const pawn = app.createUser();
|
||||
const pubkey = await pawn.signer.getPublicKey();
|
||||
|
||||
const response = await app.api.post('/moderator', { nicknames: [nip19.npubEncode(pubkey)] });
|
||||
const json = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(json, { is_moderator: true });
|
||||
|
||||
const [event] = await relay.query([{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey] }]);
|
||||
|
||||
assertEquals(event.tags, [['d', pubkey], ['n', 'moderator']]);
|
||||
});
|
||||
|
||||
Deno.test('POST /:group with an invalid group returns 422', async () => {
|
||||
await using app = new TestApp(route);
|
||||
|
||||
await app.admin();
|
||||
|
||||
const pawn = app.createUser();
|
||||
const pubkey = await pawn.signer.getPublicKey();
|
||||
|
||||
const response = await app.api.post('/yolo', { nicknames: [nip19.npubEncode(pubkey)] });
|
||||
|
||||
assertEquals(response.status, 422);
|
||||
});
|
||||
40
packages/ditto/routes/pleromaAdminPermissionGroupsRoute.ts
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
import { userMiddleware } from '@ditto/mastoapi/middleware';
|
||||
import { DittoRoute } from '@ditto/mastoapi/router';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { parseBody, updateUser } from '@/utils/api.ts';
|
||||
import { lookupPubkey } from '@/utils/lookup.ts';
|
||||
|
||||
const route = new DittoRoute();
|
||||
|
||||
const pleromaPromoteAdminSchema = z.object({
|
||||
nicknames: z.string().array(),
|
||||
});
|
||||
|
||||
route.post('/:group', userMiddleware({ role: 'admin' }), async (c) => {
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = pleromaPromoteAdminSchema.safeParse(body);
|
||||
const group = c.req.param('group');
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Bad request', schema: result.error }, 422);
|
||||
}
|
||||
|
||||
if (!['admin', 'moderator'].includes(group)) {
|
||||
return c.json({ error: 'Bad request', schema: 'Invalid group' }, 422);
|
||||
}
|
||||
|
||||
const { data } = result;
|
||||
const { nicknames } = data;
|
||||
|
||||
for (const nickname of nicknames) {
|
||||
const pubkey = await lookupPubkey(nickname, c.var);
|
||||
if (pubkey) {
|
||||
await updateUser(pubkey, { [group]: true }, c);
|
||||
}
|
||||
}
|
||||
|
||||
return c.json({ [`is_${group}`]: true }, 200);
|
||||
});
|
||||
|
||||
export default route;
|
||||
|
|
@ -118,7 +118,7 @@ export class DittoPgStore extends NPostgres {
|
|||
const [event] = await this.query([{ ids: [id] }]);
|
||||
|
||||
if (event) {
|
||||
await this.fulfill(event);
|
||||
await this.fulfill(purifyEvent(event));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
@ -375,7 +375,7 @@ export class DittoPgStore extends NPostgres {
|
|||
/** Get events for filters from the database. */
|
||||
override async query(
|
||||
filters: NostrFilter[],
|
||||
opts: { signal?: AbortSignal; pure?: boolean; timeout?: number; limit?: number } = {},
|
||||
opts: { signal?: AbortSignal; timeout?: number; limit?: number } = {},
|
||||
): Promise<DittoEvent[]> {
|
||||
filters = await this.expandFilters(filters);
|
||||
|
||||
|
|
|
|||
77
packages/ditto/storages/DittoPool.test.ts
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { genEvent, MockRelay } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { generateSecretKey, getPublicKey, nip19 } from 'nostr-tools';
|
||||
|
||||
import { DittoPool } from './DittoPool.ts';
|
||||
|
||||
Deno.test('DittoPool.reqRouter', async (t) => {
|
||||
const nsec = generateSecretKey();
|
||||
const conf = new DittoConf(new Map([['DITTO_NSEC', nip19.nsecEncode(nsec)]]));
|
||||
const relay = new MockRelay();
|
||||
|
||||
const pool = new DittoPool({ conf, relay });
|
||||
|
||||
const [alex, mk] = [
|
||||
generateKeypair(),
|
||||
generateKeypair(),
|
||||
];
|
||||
|
||||
const [ditto, henhouse, gleasonator] = [
|
||||
'wss://ditto.pub/relay',
|
||||
'wss://henhouse.social/relay',
|
||||
'wss://gleasonator.dev/relay',
|
||||
];
|
||||
|
||||
const events = [
|
||||
genEvent({ kind: 10002, tags: [['r', gleasonator], ['r', ditto]] }, alex.sk),
|
||||
genEvent({ kind: 10002, tags: [['r', henhouse], ['r', ditto]] }, mk.sk),
|
||||
];
|
||||
|
||||
for (const event of events) {
|
||||
await relay.event(event);
|
||||
}
|
||||
|
||||
await t.step('no authors', async () => {
|
||||
const reqRoutes = await pool.reqRouter([{ kinds: [1] }]);
|
||||
assertEquals(reqRoutes, new Map());
|
||||
});
|
||||
|
||||
await t.step('single author', async () => {
|
||||
const reqRoutes = await pool.reqRouter([{ kinds: [10002], authors: [alex.pk] }]);
|
||||
|
||||
const expected = new Map([
|
||||
[ditto, [{ kinds: [10002], authors: [alex.pk] }]],
|
||||
[gleasonator, [{ kinds: [10002], authors: [alex.pk] }]],
|
||||
]);
|
||||
|
||||
assertEquals(reqRoutes, expected);
|
||||
});
|
||||
|
||||
await t.step('multiple authors', async () => {
|
||||
const reqRoutes = await pool.reqRouter([{ kinds: [10002], authors: [alex.pk, mk.pk] }]);
|
||||
|
||||
const expected = new Map([
|
||||
[ditto, [{ kinds: [10002], authors: [alex.pk, mk.pk] }]],
|
||||
[henhouse, [{ kinds: [10002], authors: [mk.pk] }]],
|
||||
[gleasonator, [{ kinds: [10002], authors: [alex.pk] }]],
|
||||
]);
|
||||
|
||||
assertEquals(reqRoutes, expected);
|
||||
});
|
||||
|
||||
await t.step('no authors with fallback', async () => {
|
||||
const fallback = genEvent({ kind: 10002, tags: [['r', ditto]] }, nsec);
|
||||
await relay.event(fallback);
|
||||
|
||||
const reqRoutes = await pool.reqRouter([{ kinds: [1] }]);
|
||||
const expected = new Map([[ditto, [{ kinds: [1] }]]]);
|
||||
|
||||
assertEquals(reqRoutes, expected);
|
||||
});
|
||||
});
|
||||
|
||||
function generateKeypair(): { pk: string; sk: Uint8Array } {
|
||||
const sk = generateSecretKey();
|
||||
return { pk: getPublicKey(sk), sk };
|
||||
}
|
||||
|
|
@ -6,6 +6,7 @@ import { logi } from '@soapbox/logi';
|
|||
interface DittoPoolOpts {
|
||||
conf: DittoConf;
|
||||
relay: NRelay;
|
||||
maxReqRelays?: number;
|
||||
maxEventRelays?: number;
|
||||
}
|
||||
|
||||
|
|
@ -32,18 +33,62 @@ export class DittoPool extends NPool<NRelay1> {
|
|||
this._opts = opts;
|
||||
}
|
||||
|
||||
private async reqRouter(filters: NostrFilter[]): Promise<Map<string, NostrFilter[]>> {
|
||||
const routes = new Map<string, NostrFilter[]>();
|
||||
async reqRouter(filters: NostrFilter[]): Promise<Map<string, NostrFilter[]>> {
|
||||
const { conf, relay, maxReqRelays = 5 } = this._opts;
|
||||
|
||||
for (const relayUrl of await this.getRelayUrls({ marker: 'read' })) {
|
||||
routes.set(relayUrl, filters);
|
||||
const routes = new Map<string, NostrFilter[]>();
|
||||
const authors = new Set<string>();
|
||||
|
||||
for (const filter of filters) {
|
||||
if (filter.authors) {
|
||||
for (const author of filter.authors) {
|
||||
authors.add(author);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const pubkey = await conf.signer.getPublicKey();
|
||||
const map = new Map<string, NostrEvent>();
|
||||
|
||||
for (const event of await relay.query([{ kinds: [10002], authors: [pubkey, ...authors] }])) {
|
||||
map.set(event.pubkey, event);
|
||||
}
|
||||
|
||||
for (const filter of filters) {
|
||||
if (filter.authors) {
|
||||
const relayAuthors = new Map<`wss://${string}`, Set<string>>();
|
||||
|
||||
for (const author of filter.authors) {
|
||||
const event = map.get(author) ?? map.get(pubkey);
|
||||
if (event) {
|
||||
for (const relayUrl of [...this.getEventRelayUrls(event, 'write')].slice(0, maxReqRelays)) {
|
||||
const value = relayAuthors.get(relayUrl);
|
||||
relayAuthors.set(relayUrl, value ? new Set([...value, author]) : new Set([author]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const [relayUrl, authors] of relayAuthors) {
|
||||
const value = routes.get(relayUrl);
|
||||
const _filter = { ...filter, authors: [...authors] };
|
||||
routes.set(relayUrl, value ? [...value, _filter] : [_filter]);
|
||||
}
|
||||
} else {
|
||||
const event = map.get(pubkey);
|
||||
if (event) {
|
||||
for (const relayUrl of [...this.getEventRelayUrls(event, 'read')].slice(0, maxReqRelays)) {
|
||||
const value = routes.get(relayUrl);
|
||||
routes.set(relayUrl, value ? [...value, filter] : [filter]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return routes;
|
||||
}
|
||||
|
||||
private async eventRouter(event: NostrEvent): Promise<string[]> {
|
||||
const { conf, maxEventRelays = 4 } = this._opts;
|
||||
async eventRouter(event: NostrEvent): Promise<string[]> {
|
||||
const { conf, maxEventRelays = 10 } = this._opts;
|
||||
const { pubkey } = event;
|
||||
|
||||
const relaySet = await this.getRelayUrls({ pubkey, marker: 'write' });
|
||||
|
|
@ -72,16 +117,26 @@ export class DittoPool extends NPool<NRelay1> {
|
|||
}
|
||||
|
||||
for (const event of events) {
|
||||
for (const [name, relayUrl, marker] of event.tags) {
|
||||
if (name === 'r' && (!marker || !opts.marker || marker === opts.marker)) {
|
||||
try {
|
||||
const url = new URL(relayUrl);
|
||||
if (url.protocol === 'wss:') {
|
||||
relays.add(url.toString() as `wss://${string}`);
|
||||
}
|
||||
} catch {
|
||||
// fallthrough
|
||||
for (const relayUrl of this.getEventRelayUrls(event, opts.marker)) {
|
||||
relays.add(relayUrl);
|
||||
}
|
||||
}
|
||||
|
||||
return relays;
|
||||
}
|
||||
|
||||
private getEventRelayUrls(event: NostrEvent, marker?: 'read' | 'write'): Set<`wss://${string}`> {
|
||||
const relays = new Set<`wss://${string}`>();
|
||||
|
||||
for (const [name, relayUrl, _marker] of event.tags) {
|
||||
if (name === 'r' && (!marker || !_marker || marker === _marker)) {
|
||||
try {
|
||||
const url = new URL(relayUrl);
|
||||
if (url.protocol === 'wss:') {
|
||||
relays.add(url.toString() as `wss://${string}`);
|
||||
}
|
||||
} catch {
|
||||
// fallthrough
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,11 +2,39 @@ import { DittoPolyPg } from '@ditto/db';
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { genEvent, MockRelay } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { waitFor } from '@std/async/unstable-wait-for';
|
||||
import { generateSecretKey, getPublicKey } from 'nostr-tools';
|
||||
|
||||
import { DittoRelayStore } from './DittoRelayStore.ts';
|
||||
import { DittoRelayStore } from '@/storages/DittoRelayStore.ts';
|
||||
|
||||
import type { NostrMetadata } from '@nostrify/types';
|
||||
import { nostrNow } from '@/utils.ts';
|
||||
|
||||
Deno.test('generates set event for nip05 request', async () => {
|
||||
await using test = setupTest();
|
||||
|
||||
const admin = await test.conf.signer.getPublicKey();
|
||||
const event = genEvent({ kind: 3036, tags: [['r', 'alex@gleasonator.dev'], ['p', admin]] });
|
||||
|
||||
await test.store.event(event);
|
||||
|
||||
const filter = { kinds: [30383], authors: [admin], '#d': [event.id] };
|
||||
|
||||
await waitFor(async () => {
|
||||
const { count } = await test.store.count([filter]);
|
||||
return count > 0;
|
||||
}, 3000);
|
||||
|
||||
const [result] = await test.store.query([filter]);
|
||||
|
||||
assertEquals(result?.tags, [
|
||||
['d', event.id],
|
||||
['p', event.pubkey],
|
||||
['k', '3036'],
|
||||
['r', 'alex@gleasonator.dev'],
|
||||
['n', 'pending'],
|
||||
]);
|
||||
});
|
||||
|
||||
Deno.test('updateAuthorData sets nip05', async () => {
|
||||
const alex = generateSecretKey();
|
||||
|
|
@ -38,20 +66,136 @@ Deno.test('updateAuthorData sets nip05', async () => {
|
|||
assertEquals(row?.nip05_hostname, 'gleasonator.dev');
|
||||
});
|
||||
|
||||
function setupTest(cb: (req: Request) => Response | Promise<Response>) {
|
||||
Deno.test('Admin revokes nip05 grant and nip05 column gets null', async () => {
|
||||
const alex = generateSecretKey();
|
||||
|
||||
await using test = setupTest((req) => {
|
||||
switch (req.url) {
|
||||
case 'https://gleasonator.dev/.well-known/nostr.json?name=alex':
|
||||
return jsonResponse({ names: { alex: getPublicKey(alex) } });
|
||||
default:
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
});
|
||||
|
||||
const { db, store, conf } = test;
|
||||
|
||||
const metadata: NostrMetadata = { nip05: 'alex@gleasonator.dev' };
|
||||
const event = genEvent({ kind: 0, content: JSON.stringify(metadata) }, alex);
|
||||
|
||||
await store.event(event);
|
||||
|
||||
await waitFor(async () => {
|
||||
const row = await db.kysely
|
||||
.selectFrom('author_stats')
|
||||
.selectAll()
|
||||
.where('pubkey', '=', getPublicKey(alex))
|
||||
.executeTakeFirst();
|
||||
|
||||
assertEquals(row?.nip05, 'alex@gleasonator.dev');
|
||||
assertEquals(row?.nip05_domain, 'gleasonator.dev');
|
||||
assertEquals(row?.nip05_hostname, 'gleasonator.dev');
|
||||
|
||||
return true;
|
||||
}, 3000);
|
||||
|
||||
const grant = await conf.signer.signEvent({
|
||||
kind: 30360,
|
||||
tags: [
|
||||
['d', 'alex@gleasonator.dev'],
|
||||
['r', 'alex@gleasonator.dev'],
|
||||
['L', 'nip05.domain'],
|
||||
['l', 'gleasonator.dev', 'nip05.domain'],
|
||||
['p', event.pubkey],
|
||||
['e', 'whatever'],
|
||||
],
|
||||
created_at: nostrNow(),
|
||||
content: '',
|
||||
});
|
||||
|
||||
await store.event(grant);
|
||||
|
||||
const adminDeletion = await conf.signer.signEvent({
|
||||
kind: 5,
|
||||
tags: [
|
||||
['k', '30360'],
|
||||
['e', grant.id],
|
||||
],
|
||||
created_at: nostrNow(),
|
||||
content: '',
|
||||
});
|
||||
|
||||
await store.event(adminDeletion);
|
||||
|
||||
const nullRow = await db.kysely
|
||||
.selectFrom('author_stats')
|
||||
.selectAll()
|
||||
.where('pubkey', '=', getPublicKey(alex))
|
||||
.executeTakeFirst();
|
||||
|
||||
assertEquals(nullRow?.nip05, null);
|
||||
assertEquals(nullRow?.nip05_domain, null);
|
||||
assertEquals(nullRow?.nip05_hostname, null);
|
||||
});
|
||||
|
||||
Deno.test('fetchRelated', async () => {
|
||||
await using test = setupTest();
|
||||
const { pool, store } = test;
|
||||
|
||||
const post = genEvent({ kind: 1, content: 'hi' });
|
||||
const reply = genEvent({ kind: 1, content: 'wussup?', tags: [['e', post.id], ['p', post.pubkey]] });
|
||||
|
||||
await pool.event(post);
|
||||
await pool.event(reply);
|
||||
|
||||
await store.event(reply);
|
||||
|
||||
await waitFor(async () => {
|
||||
const { count } = await test.store.count([{ ids: [post.id] }]);
|
||||
return count > 0;
|
||||
}, 3000);
|
||||
});
|
||||
|
||||
Deno.test('event author is fetched', async () => {
|
||||
await using test = setupTest();
|
||||
const { pool, store } = test;
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const pubkey = getPublicKey(sk);
|
||||
|
||||
const post = genEvent({ kind: 1 }, sk);
|
||||
const author = genEvent({ kind: 0 }, sk);
|
||||
|
||||
await pool.event(author);
|
||||
await store.event(post);
|
||||
|
||||
const [result] = await store.query([{ kinds: [0], authors: [pubkey] }]);
|
||||
|
||||
assertEquals(result?.id, author.id);
|
||||
});
|
||||
|
||||
function setupTest(cb?: (req: Request) => Response | Promise<Response>) {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const db = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
const pool = new MockRelay();
|
||||
const relay = new MockRelay();
|
||||
|
||||
const mockFetch: typeof fetch = async (input, init) => {
|
||||
const req = new Request(input, init);
|
||||
return await cb(req);
|
||||
if (cb) {
|
||||
return await cb(req);
|
||||
} else {
|
||||
return new Response('Not mocked', { status: 404 });
|
||||
}
|
||||
};
|
||||
|
||||
const store = new DittoRelayStore({ conf, db, relay, fetch: mockFetch });
|
||||
const store = new DittoRelayStore({ conf, db, pool, relay, fetch: mockFetch });
|
||||
|
||||
return {
|
||||
db,
|
||||
conf,
|
||||
pool,
|
||||
store,
|
||||
[Symbol.asyncDispose]: async () => {
|
||||
await store[Symbol.asyncDispose]();
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { DittoDB, DittoTables } from '@ditto/db';
|
||||
import {
|
||||
activeAuthorSubscriptionsGauge,
|
||||
cachedFaviconsSizeGauge,
|
||||
cachedNip05sSizeGauge,
|
||||
pipelineEventsCounter,
|
||||
|
|
@ -18,6 +19,7 @@ import {
|
|||
NRelay,
|
||||
NSchema as n,
|
||||
} from '@nostrify/nostrify';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
import { logi } from '@soapbox/logi';
|
||||
import { UpdateObject } from 'kysely';
|
||||
import { LRUCache } from 'lru-cache';
|
||||
|
|
@ -28,7 +30,7 @@ import { DittoPush } from '@/DittoPush.ts';
|
|||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { RelayError } from '@/RelayError.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { eventAge, nostrNow, Time } from '@/utils.ts';
|
||||
import { eventAge, isNostrId, nostrNow, Time } from '@/utils.ts';
|
||||
import { getAmount } from '@/utils/bolt11.ts';
|
||||
import { errorJson } from '@/utils/log.ts';
|
||||
import { purifyEvent } from '@/utils/purify.ts';
|
||||
|
|
@ -39,13 +41,13 @@ import { fetchFavicon, insertFavicon, queryFavicon } from '@/utils/favicon.ts';
|
|||
import { lookupNip05 } from '@/utils/nip05.ts';
|
||||
import { parseNoteContent, stripimeta } from '@/utils/note.ts';
|
||||
import { SimpleLRU } from '@/utils/SimpleLRU.ts';
|
||||
import { unfurlCardCached } from '@/utils/unfurl.ts';
|
||||
import { unfurlCard } from '@/utils/unfurl.ts';
|
||||
import { renderWebPushNotification } from '@/views/mastodon/push.ts';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
|
||||
interface DittoRelayStoreOpts {
|
||||
db: DittoDB;
|
||||
conf: DittoConf;
|
||||
pool: NRelay;
|
||||
relay: NRelay;
|
||||
fetch?: typeof fetch;
|
||||
}
|
||||
|
|
@ -54,6 +56,7 @@ interface DittoRelayStoreOpts {
|
|||
export class DittoRelayStore implements NRelay {
|
||||
private push: DittoPush;
|
||||
private encounters = new LRUCache<string, true>({ max: 5000 });
|
||||
private authorEncounters = new LRUCache<string, true>({ max: 5000, ttl: Time.hours(4) });
|
||||
private controller = new AbortController();
|
||||
private policyWorker: PolicyWorker;
|
||||
|
||||
|
|
@ -68,10 +71,6 @@ export class DittoRelayStore implements NRelay {
|
|||
this.push = new DittoPush(opts);
|
||||
this.policyWorker = new PolicyWorker(conf);
|
||||
|
||||
this.listen().catch((e: unknown) => {
|
||||
logi({ level: 'error', ns: this.ns, source: 'listen', error: errorJson(e) });
|
||||
});
|
||||
|
||||
this.faviconCache = new SimpleLRU<string, URL>(
|
||||
async (domain, { signal }) => {
|
||||
const row = await queryFavicon(db.kysely, domain);
|
||||
|
|
@ -93,17 +92,30 @@ export class DittoRelayStore implements NRelay {
|
|||
},
|
||||
{ ...conf.caches.nip05, gauge: cachedNip05sSizeGauge },
|
||||
);
|
||||
|
||||
this.listen().catch((e: unknown) => {
|
||||
if (e instanceof Error && e.name === 'AbortError') {
|
||||
return; // `this.close()` was called. This is expected.
|
||||
}
|
||||
|
||||
throw e;
|
||||
});
|
||||
}
|
||||
|
||||
/** Open a firehose to the relay. */
|
||||
private async listen(): Promise<void> {
|
||||
const { relay } = this.opts;
|
||||
const { signal } = this.controller;
|
||||
const { signal } = this.controller; // this controller only aborts when `this.close()` is called
|
||||
|
||||
for await (const msg of relay.req([{ limit: 0 }], { signal })) {
|
||||
if (msg[0] === 'EVENT') {
|
||||
const [, , event] = msg;
|
||||
await this.event(event, { signal });
|
||||
const { id, kind } = event;
|
||||
try {
|
||||
await this.event(event, { signal });
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: this.ns, id, kind, source: 'listen', error: errorJson(e) });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -120,13 +132,13 @@ export class DittoRelayStore implements NRelay {
|
|||
* Common pipeline function to process (and maybe store) events.
|
||||
* It is idempotent, so it can be called multiple times for the same event.
|
||||
*/
|
||||
async event(event: DittoEvent, opts: { publish?: boolean; signal?: AbortSignal } = {}): Promise<void> {
|
||||
const { conf, relay } = this.opts;
|
||||
async event(event: DittoEvent, opts: { signal?: AbortSignal } = {}): Promise<void> {
|
||||
const { conf, relay, pool } = this.opts;
|
||||
const { signal } = opts;
|
||||
|
||||
// Skip events that have already been encountered.
|
||||
if (this.encounters.get(event.id)) {
|
||||
throw new RelayError('duplicate', 'already have this event');
|
||||
return; // NIP-01: duplicate events should have ok `true`
|
||||
}
|
||||
// Reject events that are too far in the future.
|
||||
if (eventAge(event) < -Time.minutes(1)) {
|
||||
|
|
@ -151,7 +163,7 @@ export class DittoRelayStore implements NRelay {
|
|||
}
|
||||
// Recheck encountered after async ops.
|
||||
if (this.encounters.has(event.id)) {
|
||||
throw new RelayError('duplicate', 'already have this event');
|
||||
return;
|
||||
}
|
||||
// Set the event as encountered after verifying the signature.
|
||||
this.encounters.set(event.id, true);
|
||||
|
|
@ -167,15 +179,33 @@ export class DittoRelayStore implements NRelay {
|
|||
await relay.event(event, { signal });
|
||||
}
|
||||
|
||||
// Ensure the event doesn't violate the policy.
|
||||
if (event.pubkey !== await conf.signer.getPublicKey()) {
|
||||
await this.policyFilter(event, signal);
|
||||
}
|
||||
|
||||
// Prepare the event for additional checks.
|
||||
// FIXME: This is kind of hacky. Should be reorganized to fetch only what's needed for each stage.
|
||||
await this.hydrateEvent(event, signal);
|
||||
|
||||
// Try to fetch a kind 0 for the user if we don't have one yet.
|
||||
// TODO: Create a more elaborate system to refresh all replaceable events by addr.
|
||||
if (event.kind !== 0 && !event.author?.sig && !this.authorEncounters.get(event.pubkey)) {
|
||||
activeAuthorSubscriptionsGauge.inc();
|
||||
this.authorEncounters.set(event.pubkey, true);
|
||||
|
||||
const [author] = await pool.query(
|
||||
[{ kinds: [0], authors: [event.pubkey], limit: 1 }],
|
||||
{ signal: AbortSignal.timeout(1000) },
|
||||
);
|
||||
|
||||
if (author) {
|
||||
// await because it's important to have the kind 0 before the policy filter.
|
||||
await this.event(author, { signal });
|
||||
}
|
||||
activeAuthorSubscriptionsGauge.dec();
|
||||
}
|
||||
|
||||
// Ensure the event doesn't violate the policy.
|
||||
if (event.pubkey !== await conf.signer.getPublicKey()) {
|
||||
await this.policyFilter(purifyEvent(event), signal);
|
||||
}
|
||||
|
||||
// Ensure that the author is not banned.
|
||||
const n = getTagSet(event.user?.tags ?? [], 'n');
|
||||
if (n.has('disabled')) {
|
||||
|
|
@ -183,16 +213,23 @@ export class DittoRelayStore implements NRelay {
|
|||
}
|
||||
|
||||
try {
|
||||
await this.handleRevokeNip05(event, signal);
|
||||
await relay.event(purifyEvent(event), { signal });
|
||||
} finally {
|
||||
// This needs to run in steps, and should not block the API from responding.
|
||||
const signal = AbortSignal.timeout(5000);
|
||||
Promise.allSettled([
|
||||
this.handleZaps(event),
|
||||
this.updateAuthorData(event, signal),
|
||||
this.prewarmLinkPreview(event, signal),
|
||||
this.warmLinkPreview(event, signal),
|
||||
this.generateSetEvents(event),
|
||||
])
|
||||
.then(() => this.webPush(event))
|
||||
.then(() =>
|
||||
Promise.allSettled([
|
||||
this.webPush(event),
|
||||
this.fetchRelated(event),
|
||||
])
|
||||
)
|
||||
.catch(() => {});
|
||||
}
|
||||
}
|
||||
|
|
@ -245,6 +282,42 @@ export class DittoRelayStore implements NRelay {
|
|||
}
|
||||
}
|
||||
|
||||
/** Sets the nip05 column to null if the event is a revocation of a nip05 */
|
||||
private async handleRevokeNip05(event: NostrEvent, signal?: AbortSignal): Promise<void> {
|
||||
const { conf, relay, db } = this.opts;
|
||||
|
||||
if (event.kind !== 5 || await conf.signer.getPublicKey() !== event.pubkey) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!event.tags.some(([name, value]) => name === 'k' && value === '30360')) {
|
||||
return;
|
||||
}
|
||||
|
||||
const eventId = event.tags.find(([name]) => name === 'e')?.[1];
|
||||
if (!eventId || !isNostrId(eventId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [grant] = await relay.query([{ kinds: [30360], ids: [eventId] }], { signal });
|
||||
if (!grant) {
|
||||
return;
|
||||
}
|
||||
|
||||
const authorId = grant.tags.find(([name]) => name === 'p')?.[1];
|
||||
if (!authorId || !isNostrId(authorId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
await db.kysely.updateTable('author_stats').set({
|
||||
nip05: null,
|
||||
nip05_domain: null,
|
||||
nip05_hostname: null,
|
||||
nip05_last_verified_at: null,
|
||||
}).where('pubkey', '=', authorId)
|
||||
.execute();
|
||||
}
|
||||
|
||||
/** Parse kind 0 metadata and track indexes in the database. */
|
||||
async updateAuthorData(event: NostrEvent, signal?: AbortSignal): Promise<void> {
|
||||
if (event.kind !== 0) return;
|
||||
|
|
@ -323,11 +396,68 @@ export class DittoRelayStore implements NRelay {
|
|||
}
|
||||
}
|
||||
|
||||
private async prewarmLinkPreview(event: NostrEvent, signal?: AbortSignal): Promise<void> {
|
||||
const { firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), [], this.opts);
|
||||
private async fetchRelated(event: NostrEvent): Promise<void> {
|
||||
const ids = new Set<string>();
|
||||
|
||||
if (firstUrl) {
|
||||
await unfurlCardCached(firstUrl, signal);
|
||||
for (const tag of event.tags) {
|
||||
const [name, value] = tag;
|
||||
|
||||
if ((name === 'e' || name === 'q') && isNostrId(value) && !this.encounters.has(value)) {
|
||||
ids.add(value);
|
||||
}
|
||||
}
|
||||
|
||||
const { db, pool } = this.opts;
|
||||
|
||||
if (ids.size) {
|
||||
const query = db.kysely
|
||||
.selectFrom('nostr_events')
|
||||
.select('id')
|
||||
.where('id', 'in', [...ids]);
|
||||
|
||||
for (const row of await query.execute().catch(() => [])) {
|
||||
ids.delete(row.id);
|
||||
}
|
||||
}
|
||||
|
||||
if (ids.size) {
|
||||
const signal = AbortSignal.timeout(1000);
|
||||
|
||||
for (const event of await pool.query([{ ids: [...ids] }], { signal }).catch(() => [])) {
|
||||
await this.event(event).catch(() => {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async warmLinkPreview(event: NostrEvent, signal?: AbortSignal): Promise<void> {
|
||||
const { db, conf } = this.opts;
|
||||
|
||||
if (event.kind === 1) {
|
||||
const { firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), [], this.opts);
|
||||
|
||||
console.log({ firstUrl });
|
||||
|
||||
if (firstUrl) {
|
||||
const linkPreview = await unfurlCard(firstUrl, { conf, signal });
|
||||
|
||||
console.log(linkPreview);
|
||||
|
||||
if (linkPreview) {
|
||||
await db.kysely.insertInto('event_stats')
|
||||
.values({
|
||||
event_id: event.id,
|
||||
replies_count: 0,
|
||||
reposts_count: 0,
|
||||
reactions_count: 0,
|
||||
quotes_count: 0,
|
||||
reactions: '{}',
|
||||
zaps_amount: 0,
|
||||
link_preview: linkPreview,
|
||||
})
|
||||
.onConflict((oc) => oc.column('event_id').doUpdateSet({ link_preview: linkPreview }))
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -358,19 +488,24 @@ export class DittoRelayStore implements NRelay {
|
|||
}
|
||||
|
||||
if (event.kind === 3036 && tagsAdmin) {
|
||||
const rel = await signer.signEvent({
|
||||
kind: 30383,
|
||||
content: '',
|
||||
tags: [
|
||||
['d', event.id],
|
||||
['p', event.pubkey],
|
||||
['k', '3036'],
|
||||
['n', 'pending'],
|
||||
],
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
});
|
||||
const r = event.tags.find(([name]) => name === 'r')?.[1];
|
||||
|
||||
await this.event(rel, { signal: AbortSignal.timeout(1000) });
|
||||
if (r) {
|
||||
const rel = await signer.signEvent({
|
||||
kind: 30383,
|
||||
content: '',
|
||||
tags: [
|
||||
['d', event.id],
|
||||
['p', event.pubkey],
|
||||
['k', '3036'],
|
||||
['r', r.toLowerCase()],
|
||||
['n', 'pending'],
|
||||
],
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
});
|
||||
|
||||
await this.event(rel, { signal: AbortSignal.timeout(1000) });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -58,17 +58,19 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
|
|||
return result;
|
||||
}, new Set<string>());
|
||||
|
||||
const favicons = (
|
||||
await db.kysely
|
||||
.selectFrom('domain_favicons')
|
||||
.select(['domain', 'favicon'])
|
||||
.where('domain', 'in', [...domains])
|
||||
.execute()
|
||||
)
|
||||
.reduce((result, { domain, favicon }) => {
|
||||
result[domain] = favicon;
|
||||
return result;
|
||||
}, {} as Record<string, string>);
|
||||
const favicons: Record<string, string> = domains.size
|
||||
? (
|
||||
await db.kysely
|
||||
.selectFrom('domain_favicons')
|
||||
.select(['domain', 'favicon'])
|
||||
.where('domain', 'in', [...domains])
|
||||
.execute()
|
||||
)
|
||||
.reduce((result, { domain, favicon }) => {
|
||||
result[domain] = favicon;
|
||||
return result;
|
||||
}, {} as Record<string, string>)
|
||||
: {};
|
||||
|
||||
const stats = {
|
||||
authors: authorStats,
|
||||
|
|
@ -409,6 +411,7 @@ async function gatherEventStats(
|
|||
quotes_count: Math.max(0, row.quotes_count),
|
||||
reactions: row.reactions,
|
||||
zaps_amount: Math.max(0, row.zaps_amount),
|
||||
link_preview: row.link_preview,
|
||||
}));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -27,10 +27,10 @@ async function createEvent<E extends (DittoEnv & { Variables: { user?: User } })
|
|||
}
|
||||
|
||||
const event = await user.signer.signEvent({
|
||||
content: '',
|
||||
created_at: nostrNow(),
|
||||
tags: [],
|
||||
...t,
|
||||
content: t.content ?? '',
|
||||
created_at: t.created_at ?? nostrNow(),
|
||||
tags: t.tags ?? [],
|
||||
});
|
||||
|
||||
await relay.event(event, { signal, publish: true });
|
||||
|
|
@ -118,7 +118,7 @@ async function updateAdminEvent<E extends EventStub>(
|
|||
return createAdminEvent(fn(prev), c);
|
||||
}
|
||||
|
||||
function updateUser(pubkey: string, n: Record<string, boolean>, c: AppContext): Promise<NostrEvent> {
|
||||
function updateUser(pubkey: string, n: Record<string, boolean>, c: Context): Promise<NostrEvent> {
|
||||
return updateNames(30382, pubkey, n, c);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { NostrEvent, NSchema as n, NStore } from '@nostrify/nostrify';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
import { NKinds, NostrEvent, NostrFilter, NPool, NRelay, NSchema as n, NStore } from '@nostrify/nostrify';
|
||||
import { nip19, sortEvents } from 'nostr-tools';
|
||||
import { match } from 'path-to-regexp';
|
||||
import tldts from 'tldts';
|
||||
|
||||
|
|
@ -85,13 +85,167 @@ export function extractIdentifier(value: string): string | undefined {
|
|||
|
||||
value = value.replace(/^@/, '');
|
||||
|
||||
if (n.bech32().safeParse(value).success) {
|
||||
if (isBech32(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const { isIcann, domain } = tldts.parse(value);
|
||||
|
||||
if (isIcann && domain) {
|
||||
if (isUsername(value)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
interface LookupEventsOpts {
|
||||
db: DittoDB;
|
||||
conf: DittoConf;
|
||||
pool: NPool<NRelay>;
|
||||
relay: NStore;
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
export async function lookupEvent(value: string, opts: LookupEventsOpts): Promise<NostrEvent | undefined> {
|
||||
const { pool, relay, signal } = opts;
|
||||
|
||||
const identifier = extractIdentifier(value);
|
||||
if (!identifier) return;
|
||||
|
||||
let result: DittoPointer;
|
||||
|
||||
if (isBech32(identifier)) {
|
||||
result = bech32ToPointer(identifier);
|
||||
} else if (isUsername(identifier)) {
|
||||
result = { type: 'address', pointer: { kind: 0, identifier: '', ...await lookupNip05(identifier, opts) } };
|
||||
} else {
|
||||
throw new Error('Unsupported identifier: neither bech32 nor username');
|
||||
}
|
||||
|
||||
const filter = pointerToFilter(result);
|
||||
const relayUrls = new Set<string>(result.pointer.relays ?? []);
|
||||
|
||||
const [event] = await relay.query([filter], { signal });
|
||||
|
||||
if (event) {
|
||||
return event;
|
||||
}
|
||||
|
||||
let pubkey: string | undefined;
|
||||
|
||||
if (result.type === 'address') {
|
||||
pubkey = result.pointer.pubkey;
|
||||
} else if (result.type === 'event') {
|
||||
pubkey = result.pointer.author;
|
||||
}
|
||||
|
||||
if (pubkey) {
|
||||
let [relayList] = await relay.query([{ kinds: [10002], authors: [pubkey] }], { signal });
|
||||
|
||||
if (!relayList) {
|
||||
[relayList] = await pool.query([{ kinds: [10002], authors: [pubkey] }], { signal });
|
||||
if (relayList) {
|
||||
await relay.event(relayList);
|
||||
}
|
||||
}
|
||||
|
||||
if (relayList) {
|
||||
for (const relayUrl of getEventRelayUrls(relayList)) {
|
||||
relayUrls.add(relayUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const urls = [...relayUrls].slice(0, 5);
|
||||
|
||||
if (result.type === 'address') {
|
||||
const results = await Promise.all(urls.map((relayUrl) => pool.relay(relayUrl).query([filter], { signal })));
|
||||
const [event] = sortEvents(results.flat());
|
||||
if (event) {
|
||||
await relay.event(event, { signal });
|
||||
return event;
|
||||
}
|
||||
}
|
||||
|
||||
if (result.type === 'event') {
|
||||
const [event] = await Promise.any(urls.map((relayUrl) => pool.relay(relayUrl).query([filter], { signal })));
|
||||
if (event) {
|
||||
await relay.event(event, { signal });
|
||||
return event;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type DittoPointer = { type: 'event'; pointer: nip19.EventPointer } | { type: 'address'; pointer: nip19.AddressPointer };
|
||||
|
||||
function bech32ToPointer(bech32: string): DittoPointer {
|
||||
const decoded = nip19.decode(bech32);
|
||||
|
||||
switch (decoded.type) {
|
||||
case 'note':
|
||||
return { type: 'event', pointer: { id: decoded.data } };
|
||||
case 'nevent':
|
||||
return { type: 'event', pointer: decoded.data };
|
||||
case 'npub':
|
||||
return { type: 'address', pointer: { kind: 0, identifier: '', pubkey: decoded.data } };
|
||||
case 'nprofile':
|
||||
return { type: 'address', pointer: { kind: 0, identifier: '', ...decoded.data } };
|
||||
case 'naddr':
|
||||
return { type: 'address', pointer: decoded.data };
|
||||
}
|
||||
|
||||
throw new Error('Invalid bech32 pointer');
|
||||
}
|
||||
|
||||
function pointerToFilter(pointer: DittoPointer): NostrFilter {
|
||||
switch (pointer.type) {
|
||||
case 'event': {
|
||||
const { id, kind, author } = pointer.pointer;
|
||||
const filter: NostrFilter = { ids: [id] };
|
||||
|
||||
if (kind) {
|
||||
filter.kinds = [kind];
|
||||
}
|
||||
|
||||
if (author) {
|
||||
filter.authors = [author];
|
||||
}
|
||||
|
||||
return filter;
|
||||
}
|
||||
case 'address': {
|
||||
const { kind, identifier, pubkey } = pointer.pointer;
|
||||
const filter: NostrFilter = { kinds: [kind], authors: [pubkey] };
|
||||
|
||||
if (NKinds.replaceable(kind)) {
|
||||
filter['#d'] = [identifier];
|
||||
}
|
||||
|
||||
return filter;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isUsername(value: string): boolean {
|
||||
const { isIcann, domain } = tldts.parse(value);
|
||||
return Boolean(isIcann && domain);
|
||||
}
|
||||
|
||||
function isBech32(value: string): value is `${string}1${string}` {
|
||||
return n.bech32().safeParse(value).success;
|
||||
}
|
||||
|
||||
function getEventRelayUrls(event: NostrEvent, marker?: 'read' | 'write'): Set<`wss://${string}`> {
|
||||
const relays = new Set<`wss://${string}`>();
|
||||
|
||||
for (const [name, relayUrl, _marker] of event.tags) {
|
||||
if (name === 'r' && (!marker || !_marker || marker === _marker)) {
|
||||
try {
|
||||
const url = new URL(relayUrl);
|
||||
if (url.protocol === 'wss:') {
|
||||
relays.add(url.toString() as `wss://${string}`);
|
||||
}
|
||||
} catch {
|
||||
// fallthrough
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return relays;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,23 +1,27 @@
|
|||
import { cachedLinkPreviewSizeGauge } from '@ditto/metrics';
|
||||
import TTLCache from '@isaacs/ttlcache';
|
||||
import { logi } from '@soapbox/logi';
|
||||
import { safeFetch } from '@soapbox/safe-fetch';
|
||||
import DOMPurify from 'isomorphic-dompurify';
|
||||
import { unfurl } from 'unfurl.js';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { errorJson } from '@/utils/log.ts';
|
||||
|
||||
import type { DittoConf } from '@ditto/conf';
|
||||
import type { MastodonPreviewCard } from '@ditto/mastoapi/types';
|
||||
|
||||
async function unfurlCard(url: string, signal: AbortSignal): Promise<MastodonPreviewCard | null> {
|
||||
interface UnfurlCardOpts {
|
||||
conf: DittoConf;
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
export async function unfurlCard(url: string, opts: UnfurlCardOpts): Promise<MastodonPreviewCard | null> {
|
||||
const { conf, signal } = opts;
|
||||
try {
|
||||
const result = await unfurl(url, {
|
||||
fetch: (url) =>
|
||||
safeFetch(url, {
|
||||
headers: {
|
||||
'Accept': 'text/html, application/xhtml+xml',
|
||||
'User-Agent': Conf.fetchUserAgent,
|
||||
'User-Agent': conf.fetchUserAgent,
|
||||
},
|
||||
signal,
|
||||
}),
|
||||
|
|
@ -54,19 +58,3 @@ async function unfurlCard(url: string, signal: AbortSignal): Promise<MastodonPre
|
|||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** TTL cache for preview cards. */
|
||||
const previewCardCache = new TTLCache<string, Promise<MastodonPreviewCard | null>>(Conf.caches.linkPreview);
|
||||
|
||||
/** Unfurl card from cache if available, otherwise fetch it. */
|
||||
export function unfurlCardCached(url: string, signal = AbortSignal.timeout(1000)): Promise<MastodonPreviewCard | null> {
|
||||
const cached = previewCardCache.get(url);
|
||||
if (cached !== undefined) {
|
||||
return cached;
|
||||
} else {
|
||||
const card = unfurlCard(url, signal);
|
||||
previewCardCache.set(url, card);
|
||||
cachedLinkPreviewSizeGauge.set(previewCardCache.size);
|
||||
return card;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { analyzeFile, extractVideoFrame, transcodeVideo } from '@ditto/transcode';
|
||||
import { ScopedPerformance } from '@esroyo/scoped-performance';
|
||||
import { HTTPException } from '@hono/hono/http-exception';
|
||||
import { logi } from '@soapbox/logi';
|
||||
import { crypto } from '@std/crypto';
|
||||
|
|
@ -21,7 +23,11 @@ export async function uploadFile(
|
|||
meta: FileMeta,
|
||||
signal?: AbortSignal,
|
||||
): Promise<DittoUpload> {
|
||||
using perf = new ScopedPerformance();
|
||||
perf.mark('start');
|
||||
|
||||
const { conf, uploader } = c.var;
|
||||
const { ffmpegPath, ffprobePath, mediaAnalyze, mediaTranscode } = conf;
|
||||
|
||||
if (!uploader) {
|
||||
throw new HTTPException(500, {
|
||||
|
|
@ -35,7 +41,43 @@ export async function uploadFile(
|
|||
throw new Error('File size is too large.');
|
||||
}
|
||||
|
||||
const [baseType] = file.type.split('/');
|
||||
|
||||
perf.mark('probe-start');
|
||||
const probe = mediaTranscode ? await analyzeFile(file.stream(), { ffprobePath }).catch(() => null) : null;
|
||||
const video = probe?.streams.find((stream) => stream.codec_type === 'video');
|
||||
perf.mark('probe-end');
|
||||
|
||||
perf.mark('transcode-start');
|
||||
if (baseType === 'video' && mediaTranscode) {
|
||||
let needsTranscode = false;
|
||||
|
||||
for (const stream of probe?.streams ?? []) {
|
||||
if (stream.codec_type === 'video' && stream.codec_name !== 'h264') {
|
||||
needsTranscode = true;
|
||||
break;
|
||||
}
|
||||
if (stream.codec_type === 'audio' && stream.codec_name !== 'aac') {
|
||||
needsTranscode = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (needsTranscode) {
|
||||
const tmp = new URL('file://' + await Deno.makeTempFile());
|
||||
await Deno.writeFile(tmp, file.stream());
|
||||
const stream = transcodeVideo(tmp, { ffmpegPath });
|
||||
const transcoded = await new Response(stream).bytes();
|
||||
file = new File([transcoded], file.name, { type: 'video/mp4' });
|
||||
await Deno.remove(tmp);
|
||||
}
|
||||
}
|
||||
perf.mark('transcode-end');
|
||||
|
||||
perf.mark('upload-start');
|
||||
const tags = await uploader.upload(file, { signal });
|
||||
perf.mark('upload-end');
|
||||
|
||||
const url = tags[0][1];
|
||||
|
||||
if (description) {
|
||||
|
|
@ -46,6 +88,8 @@ export async function uploadFile(
|
|||
const m = tags.find(([key]) => key === 'm')?.[1];
|
||||
const dim = tags.find(([key]) => key === 'dim')?.[1];
|
||||
const size = tags.find(([key]) => key === 'size')?.[1];
|
||||
const image = tags.find(([key]) => key === 'image')?.[1];
|
||||
const thumb = tags.find(([key]) => key === 'thumb')?.[1];
|
||||
const blurhash = tags.find(([key]) => key === 'blurhash')?.[1];
|
||||
|
||||
if (!x) {
|
||||
|
|
@ -61,34 +105,50 @@ export async function uploadFile(
|
|||
tags.push(['size', file.size.toString()]);
|
||||
}
|
||||
|
||||
// If the uploader didn't already, try to get a blurhash and media dimensions.
|
||||
// This requires `MEDIA_ANALYZE=true` to be configured because it comes with security tradeoffs.
|
||||
if (conf.mediaAnalyze && (!blurhash || !dim)) {
|
||||
perf.mark('analyze-start');
|
||||
|
||||
if (baseType === 'video' && mediaAnalyze && mediaTranscode && video && (!image || !thumb)) {
|
||||
try {
|
||||
const bytes = await new Response(file.stream()).bytes();
|
||||
const img = sharp(bytes);
|
||||
const tmp = new URL('file://' + await Deno.makeTempFile());
|
||||
await Deno.writeFile(tmp, file.stream());
|
||||
const frame = await extractVideoFrame(tmp, '00:00:01', { ffmpegPath });
|
||||
await Deno.remove(tmp);
|
||||
const [[, url]] = await uploader.upload(new File([frame], 'thumb.jpg', { type: 'image/jpeg' }), { signal });
|
||||
|
||||
const { width, height } = await img.metadata();
|
||||
|
||||
if (!dim && (width && height)) {
|
||||
tags.push(['dim', `${width}x${height}`]);
|
||||
if (!image) {
|
||||
tags.push(['image', url]);
|
||||
}
|
||||
|
||||
if (!blurhash && (width && height)) {
|
||||
const pixels = await img
|
||||
.raw()
|
||||
.ensureAlpha()
|
||||
.toBuffer({ resolveWithObject: false })
|
||||
.then((buffer) => new Uint8ClampedArray(buffer));
|
||||
if (!dim) {
|
||||
tags.push(['dim', await getImageDim(frame)]);
|
||||
}
|
||||
|
||||
const blurhash = encode(pixels, width, height, 4, 4);
|
||||
tags.push(['blurhash', blurhash]);
|
||||
if (!blurhash) {
|
||||
tags.push(['blurhash', await getBlurhash(frame)]);
|
||||
}
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.upload.analyze', error: errorJson(e) });
|
||||
}
|
||||
}
|
||||
|
||||
if (baseType === 'image' && mediaAnalyze && (!blurhash || !dim)) {
|
||||
try {
|
||||
const bytes = await new Response(file.stream()).bytes();
|
||||
|
||||
if (!dim) {
|
||||
tags.push(['dim', await getImageDim(bytes)]);
|
||||
}
|
||||
|
||||
if (!blurhash) {
|
||||
tags.push(['blurhash', await getBlurhash(bytes)]);
|
||||
}
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.upload.analyze', error: errorJson(e) });
|
||||
}
|
||||
}
|
||||
|
||||
perf.mark('analyze-end');
|
||||
|
||||
const upload = {
|
||||
id: crypto.randomUUID(),
|
||||
url,
|
||||
|
|
@ -99,5 +159,62 @@ export async function uploadFile(
|
|||
|
||||
dittoUploads.set(upload.id, upload);
|
||||
|
||||
const timing = [
|
||||
perf.measure('probe', 'probe-start', 'probe-end'),
|
||||
perf.measure('transcode', 'transcode-start', 'transcode-end'),
|
||||
perf.measure('upload', 'upload-start', 'upload-end'),
|
||||
perf.measure('analyze', 'analyze-start', 'analyze-end'),
|
||||
].reduce<Record<string, number>>((acc, m) => {
|
||||
const name = m.name.split('::')[1]; // ScopedPerformance uses `::` to separate the name.
|
||||
acc[name] = m.duration / 1000; // Convert to seconds for logging.
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
perf.mark('end');
|
||||
|
||||
logi({
|
||||
level: 'info',
|
||||
ns: 'ditto.upload',
|
||||
upload: { ...upload, uploadedAt: upload.uploadedAt.toISOString() },
|
||||
timing,
|
||||
duration: perf.measure('total', 'start', 'end').duration / 1000,
|
||||
});
|
||||
|
||||
return upload;
|
||||
}
|
||||
|
||||
async function getImageDim(bytes: Uint8Array): Promise<`${number}x${number}`> {
|
||||
const img = sharp(bytes);
|
||||
const { width, height } = await img.metadata();
|
||||
|
||||
if (!width || !height) {
|
||||
throw new Error('Image metadata is missing.');
|
||||
}
|
||||
|
||||
return `${width}x${height}`;
|
||||
}
|
||||
|
||||
/** Get a blurhash from an image file. */
|
||||
async function getBlurhash(bytes: Uint8Array, maxDim = 64): Promise<string> {
|
||||
const img = sharp(bytes);
|
||||
|
||||
const { width, height } = await img.metadata();
|
||||
|
||||
if (!width || !height) {
|
||||
throw new Error('Image metadata is missing.');
|
||||
}
|
||||
|
||||
const { data, info } = await img
|
||||
.raw()
|
||||
.ensureAlpha()
|
||||
.resize({
|
||||
width: width > height ? undefined : maxDim,
|
||||
height: height > width ? undefined : maxDim,
|
||||
fit: 'inside',
|
||||
})
|
||||
.toBuffer({ resolveWithObject: true });
|
||||
|
||||
const pixels = new Uint8ClampedArray(data);
|
||||
|
||||
return encode(pixels, info.width, info.height, 4, 4);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ function renderAttachment(
|
|||
const alt = tags.find(([name]) => name === 'alt')?.[1];
|
||||
const cid = tags.find(([name]) => name === 'cid')?.[1];
|
||||
const dim = tags.find(([name]) => name === 'dim')?.[1];
|
||||
const image = tags.find(([key]) => key === 'image')?.[1];
|
||||
const thumb = tags.find(([key]) => key === 'thumb')?.[1];
|
||||
const blurhash = tags.find(([name]) => name === 'blurhash')?.[1];
|
||||
|
||||
if (!url) return;
|
||||
|
|
@ -34,7 +36,7 @@ function renderAttachment(
|
|||
id: id ?? url,
|
||||
type: getAttachmentType(m ?? ''),
|
||||
url,
|
||||
preview_url: url,
|
||||
preview_url: image ?? thumb ?? url,
|
||||
remote_url: null,
|
||||
description: alt ?? '',
|
||||
blurhash: blurhash || null,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
|
|||
import { nostrDate } from '@/utils.ts';
|
||||
import { getMediaLinks, parseNoteContent, stripimeta } from '@/utils/note.ts';
|
||||
import { findReplyTag } from '@/utils/tags.ts';
|
||||
import { unfurlCardCached } from '@/utils/unfurl.ts';
|
||||
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { renderAttachment } from '@/views/mastodon/attachments.ts';
|
||||
import { renderEmojis } from '@/views/mastodon/emojis.ts';
|
||||
|
|
@ -42,21 +41,17 @@ async function renderStatus(
|
|||
|
||||
const mentions = event.mentions?.map((event) => renderMention(event)) ?? [];
|
||||
|
||||
const { html, links, firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), mentions, { conf: Conf });
|
||||
const { html, links } = parseNoteContent(stripimeta(event.content, event.tags), mentions, { conf: Conf });
|
||||
|
||||
const [card, relatedEvents] = await Promise
|
||||
.all([
|
||||
firstUrl ? unfurlCardCached(firstUrl, AbortSignal.timeout(500)) : null,
|
||||
viewerPubkey
|
||||
? await store.query([
|
||||
{ kinds: [6], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [7], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [9734], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [10001], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [10003], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
])
|
||||
: [],
|
||||
]);
|
||||
const relatedEvents = viewerPubkey
|
||||
? await store.query([
|
||||
{ kinds: [6], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [7], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [9734], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [10001], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [10003], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
])
|
||||
: [];
|
||||
|
||||
const reactionEvent = relatedEvents.find((event) => event.kind === 7);
|
||||
const repostEvent = relatedEvents.find((event) => event.kind === 6);
|
||||
|
|
@ -96,7 +91,7 @@ async function renderStatus(
|
|||
return {
|
||||
id: event.id,
|
||||
account,
|
||||
card,
|
||||
card: event.event_stats?.link_preview ?? null,
|
||||
content: compatMentions + html,
|
||||
created_at: nostrDate(event.created_at).toISOString(),
|
||||
in_reply_to_id: replyId ?? null,
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ export class CustomPolicy implements NPolicy {
|
|||
timeout: 5_000,
|
||||
});
|
||||
|
||||
this.policy = new Policy({ store, pubkey });
|
||||
this.policy = new Policy({ db, store, pubkey });
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/lang",
|
||||
"version": "1.1.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./language.ts"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/mastoapi",
|
||||
"version": "1.1.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
"./middleware": "./middleware/mod.ts",
|
||||
"./pagination": "./pagination/mod.ts",
|
||||
|
|
|
|||
|
|
@ -1,21 +1,26 @@
|
|||
import { setUser, testApp } from '@ditto/mastoapi/test';
|
||||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { userMiddleware } from './userMiddleware.ts';
|
||||
import { ReadOnlySigner } from '../signers/ReadOnlySigner.ts';
|
||||
|
||||
Deno.test('no user 401', async () => {
|
||||
const { app } = testApp();
|
||||
await using app = new TestApp();
|
||||
const response = await app.use(userMiddleware()).request('/');
|
||||
assertEquals(response.status, 401);
|
||||
});
|
||||
|
||||
Deno.test('unsupported signer 400', async () => {
|
||||
const { app, relay } = testApp();
|
||||
const signer = new ReadOnlySigner('0461fcbecc4c3374439932d6b8f11269ccdb7cc973ad7a50ae362db135a474dd');
|
||||
await using app = new TestApp();
|
||||
|
||||
const user = {
|
||||
signer: new ReadOnlySigner('0461fcbecc4c3374439932d6b8f11269ccdb7cc973ad7a50ae362db135a474dd'),
|
||||
relay: app.var.relay,
|
||||
};
|
||||
|
||||
app.user(user);
|
||||
|
||||
const response = await app
|
||||
.use(setUser({ signer, relay }))
|
||||
.use(userMiddleware({ enc: 'nip44' }))
|
||||
.use((c, next) => {
|
||||
c.var.user.signer.nip44.encrypt; // test that the type is set
|
||||
|
|
@ -27,10 +32,11 @@ Deno.test('unsupported signer 400', async () => {
|
|||
});
|
||||
|
||||
Deno.test('with user 200', async () => {
|
||||
const { app, user } = testApp();
|
||||
await using app = new TestApp();
|
||||
|
||||
app.user();
|
||||
|
||||
const response = await app
|
||||
.use(setUser(user))
|
||||
.use(userMiddleware())
|
||||
.get('/', (c) => c.text('ok'))
|
||||
.request('/');
|
||||
|
|
@ -39,10 +45,11 @@ Deno.test('with user 200', async () => {
|
|||
});
|
||||
|
||||
Deno.test('user and role 403', async () => {
|
||||
const { app, user } = testApp();
|
||||
await using app = new TestApp();
|
||||
|
||||
app.user();
|
||||
|
||||
const response = await app
|
||||
.use(setUser(user))
|
||||
.use(userMiddleware({ role: 'admin' }))
|
||||
.request('/');
|
||||
|
||||
|
|
@ -50,7 +57,10 @@ Deno.test('user and role 403', async () => {
|
|||
});
|
||||
|
||||
Deno.test('admin role 200', async () => {
|
||||
const { conf, app, user, relay } = testApp();
|
||||
await using app = new TestApp();
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
const user = app.user();
|
||||
|
||||
const event = await conf.signer.signEvent({
|
||||
kind: 30382,
|
||||
|
|
@ -65,7 +75,6 @@ Deno.test('admin role 200', async () => {
|
|||
await relay.event(event);
|
||||
|
||||
const response = await app
|
||||
.use(setUser(user))
|
||||
.use(userMiddleware({ role: 'admin' }))
|
||||
.get('/', (c) => c.text('ok'))
|
||||
.request('/');
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { DittoPolyPg } from '@ditto/db';
|
||||
import { DummyDB } from '@ditto/db';
|
||||
import { Hono } from '@hono/hono';
|
||||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { DittoApp } from './DittoApp.ts';
|
||||
import { DittoRoute } from './DittoRoute.ts';
|
||||
|
||||
Deno.test('DittoApp', async () => {
|
||||
await using db = new DittoPolyPg('memory://');
|
||||
await using db = new DummyDB();
|
||||
const conf = new DittoConf(new Map());
|
||||
const relay = new MockRelay();
|
||||
|
||||
|
|
@ -20,4 +21,11 @@ Deno.test('DittoApp', async () => {
|
|||
|
||||
// @ts-expect-error Passing a non-DittoRoute to route.
|
||||
app.route('/', hono);
|
||||
|
||||
app.get('/error', () => {
|
||||
throw new Error('test error');
|
||||
});
|
||||
|
||||
const response = await app.request('/error');
|
||||
assertEquals(response.status, 500);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,11 +3,13 @@ import { Hono } from '@hono/hono';
|
|||
import type { HonoOptions } from '@hono/hono/hono-base';
|
||||
import type { DittoEnv } from './DittoEnv.ts';
|
||||
|
||||
export type DittoAppOpts = Omit<DittoEnv['Variables'], 'signal' | 'requestId'> & HonoOptions<DittoEnv>;
|
||||
|
||||
export class DittoApp extends Hono<DittoEnv> {
|
||||
// @ts-ignore Require a DittoRoute for type safety.
|
||||
declare route: (path: string, app: Hono<DittoEnv>) => Hono<DittoEnv>;
|
||||
|
||||
constructor(opts: Omit<DittoEnv['Variables'], 'signal' | 'requestId'> & HonoOptions<DittoEnv>) {
|
||||
constructor(protected opts: DittoAppOpts) {
|
||||
super(opts);
|
||||
|
||||
this.use((c, next) => {
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
import { assertEquals } from '@std/assert';
|
||||
import { assertRejects } from '@std/assert';
|
||||
|
||||
import { DittoRoute } from './DittoRoute.ts';
|
||||
|
||||
Deno.test('DittoRoute', async () => {
|
||||
const route = new DittoRoute();
|
||||
const response = await route.request('/');
|
||||
const body = await response.json();
|
||||
|
||||
assertEquals(response.status, 500);
|
||||
assertEquals(body, { error: 'Missing required variable: db' });
|
||||
await assertRejects(
|
||||
async () => {
|
||||
await route.request('/');
|
||||
},
|
||||
Error,
|
||||
'Missing required variable: db',
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ export class DittoRoute extends Hono<DittoEnv> {
|
|||
}
|
||||
|
||||
private throwMissingVar(name: string): never {
|
||||
throw new HTTPException(500, { message: `Missing required variable: ${name}` });
|
||||
throw new Error(`Missing required variable: ${name}`);
|
||||
}
|
||||
|
||||
private _errorHandler: ErrorHandler = (error, c) => {
|
||||
|
|
@ -50,6 +50,6 @@ export class DittoRoute extends Hono<DittoEnv> {
|
|||
}
|
||||
}
|
||||
|
||||
return c.json({ error: 'Something went wrong' }, 500);
|
||||
throw error;
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,41 +1 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { type DittoDB, DummyDB } from '@ditto/db';
|
||||
import { DittoApp, type DittoMiddleware } from '@ditto/mastoapi/router';
|
||||
import { type NostrSigner, type NRelay, NSecSigner } from '@nostrify/nostrify';
|
||||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
import { generateSecretKey, nip19 } from 'nostr-tools';
|
||||
|
||||
import type { User } from '@ditto/mastoapi/middleware';
|
||||
|
||||
export function testApp(): {
|
||||
app: DittoApp;
|
||||
relay: NRelay;
|
||||
conf: DittoConf;
|
||||
db: DittoDB;
|
||||
user: {
|
||||
signer: NostrSigner;
|
||||
relay: NRelay;
|
||||
};
|
||||
} {
|
||||
const db = new DummyDB();
|
||||
|
||||
const nsec = nip19.nsecEncode(generateSecretKey());
|
||||
const conf = new DittoConf(new Map([['DITTO_NSEC', nsec]]));
|
||||
|
||||
const relay = new MockRelay();
|
||||
const app = new DittoApp({ conf, relay, db });
|
||||
|
||||
const user = {
|
||||
signer: new NSecSigner(generateSecretKey()),
|
||||
relay,
|
||||
};
|
||||
|
||||
return { app, relay, conf, db, user };
|
||||
}
|
||||
|
||||
export function setUser<S extends NostrSigner>(user: User<S>): DittoMiddleware<{ user: User<S> }> {
|
||||
return async (c, next) => {
|
||||
c.set('user', user);
|
||||
await next();
|
||||
};
|
||||
}
|
||||
export { TestApp } from './test/TestApp.ts';
|
||||
|
|
|
|||
121
packages/mastoapi/test/TestApp.ts
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { type DittoDB, DummyDB } from '@ditto/db';
|
||||
import { HTTPException } from '@hono/hono/http-exception';
|
||||
import { type NRelay, NSecSigner } from '@nostrify/nostrify';
|
||||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
import { generateSecretKey, nip19 } from 'nostr-tools';
|
||||
|
||||
import { DittoApp, type DittoAppOpts } from '../router/DittoApp.ts';
|
||||
|
||||
import type { Context } from '@hono/hono';
|
||||
import type { User } from '../middleware/User.ts';
|
||||
import type { DittoRoute } from '../router/DittoRoute.ts';
|
||||
|
||||
interface DittoVars {
|
||||
db: DittoDB;
|
||||
conf: DittoConf;
|
||||
relay: NRelay;
|
||||
}
|
||||
|
||||
export class TestApp extends DittoApp implements AsyncDisposable {
|
||||
private _user?: User;
|
||||
|
||||
constructor(route?: DittoRoute, opts?: Partial<DittoAppOpts>) {
|
||||
const nsec = nip19.nsecEncode(generateSecretKey());
|
||||
|
||||
const conf = opts?.conf ?? new DittoConf(
|
||||
new Map([
|
||||
['DITTO_NSEC', nsec],
|
||||
['LOCAL_DOMAIN', 'https://ditto.pub'],
|
||||
]),
|
||||
);
|
||||
|
||||
const db = opts?.db ?? new DummyDB();
|
||||
const relay = opts?.relay ?? new MockRelay();
|
||||
|
||||
super({
|
||||
db,
|
||||
conf,
|
||||
relay,
|
||||
...opts,
|
||||
});
|
||||
|
||||
this.use(async (c: Context<{ Variables: { user?: User } }>, next) => {
|
||||
c.set('user', this._user);
|
||||
await next();
|
||||
});
|
||||
|
||||
if (route) {
|
||||
this.route('/', route);
|
||||
}
|
||||
|
||||
this.onError((err, c) => {
|
||||
if (err instanceof HTTPException) {
|
||||
if (err.res) {
|
||||
return err.res;
|
||||
} else {
|
||||
return c.json({ error: err.message }, err.status);
|
||||
}
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
get var(): DittoVars {
|
||||
return {
|
||||
db: this.opts.db,
|
||||
conf: this.opts.conf,
|
||||
relay: this.opts.relay,
|
||||
};
|
||||
}
|
||||
|
||||
async admin(user?: User): Promise<User> {
|
||||
const { conf, relay } = this.opts;
|
||||
user ??= this.createUser();
|
||||
|
||||
const event = await conf.signer.signEvent({
|
||||
kind: 30382,
|
||||
content: '',
|
||||
tags: [
|
||||
['d', await user.signer.getPublicKey()],
|
||||
['n', 'admin'],
|
||||
],
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
});
|
||||
|
||||
await relay.event(event);
|
||||
|
||||
return this.user(user);
|
||||
}
|
||||
|
||||
user(user?: User): User {
|
||||
user ??= this.createUser();
|
||||
this._user = user;
|
||||
return user;
|
||||
}
|
||||
|
||||
createUser(sk?: Uint8Array): User {
|
||||
return {
|
||||
relay: this.opts.relay,
|
||||
signer: new NSecSigner(sk ?? generateSecretKey()),
|
||||
};
|
||||
}
|
||||
|
||||
api = {
|
||||
get: async (path: string): Promise<Response> => {
|
||||
return await this.request(path);
|
||||
},
|
||||
post: async (path: string, body: unknown): Promise<Response> => {
|
||||
return await this.request(path, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.opts.db[Symbol.asyncDispose]();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/metrics",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./metrics.ts"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -149,3 +149,8 @@ export const webPushNotificationsCounter: Counter<'type'> = new Counter({
|
|||
help: 'Total number of Web Push notifications sent',
|
||||
labelNames: ['type'],
|
||||
});
|
||||
|
||||
export const activeAuthorSubscriptionsGauge: Gauge = new Gauge({
|
||||
name: `${prefix}_active_author_subscriptions`,
|
||||
help: "Number of active REQ's to find kind 0 events from the pool",
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/nip98",
|
||||
"version": "1.0.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./nip98.ts"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/policies",
|
||||
"version": "1.1.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/ratelimiter",
|
||||
"version": "1.1.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
|
|
|
|||
1
packages/transcode/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
tmp/
|
||||
13
packages/transcode/analyze.test.ts
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import { assertObjectMatch } from '@std/assert';
|
||||
|
||||
import { analyzeFile } from './analyze.ts';
|
||||
|
||||
Deno.test('analyzeFile', async () => {
|
||||
const uri = new URL('./buckbunny.mp4', import.meta.url);
|
||||
|
||||
const { streams } = await analyzeFile(uri);
|
||||
|
||||
const videoStream = streams.find((stream) => stream.codec_type === 'video')!;
|
||||
|
||||
assertObjectMatch(videoStream, { width: 1920, height: 1080 });
|
||||
});
|
||||
102
packages/transcode/analyze.ts
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
import { ffprobe } from './ffprobe.ts';
|
||||
|
||||
interface AnalyzeResult {
|
||||
streams: Stream[];
|
||||
format: Format;
|
||||
}
|
||||
|
||||
interface Stream {
|
||||
index: number;
|
||||
codec_tag_string: string;
|
||||
codec_tag: string;
|
||||
codec_name?: string;
|
||||
codec_long_name?: string;
|
||||
profile?: string;
|
||||
codec_type?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
coded_width?: number;
|
||||
coded_height?: number;
|
||||
closed_captions?: number;
|
||||
has_b_frames?: number;
|
||||
sample_aspect_ratio?: string;
|
||||
display_aspect_ratio?: string;
|
||||
pix_fmt?: string;
|
||||
level?: number;
|
||||
color_range?: string;
|
||||
color_space?: string;
|
||||
color_transfer?: string;
|
||||
color_primaries?: string;
|
||||
chroma_location?: string;
|
||||
field_order?: string;
|
||||
refs?: number;
|
||||
sample_fmt?: string;
|
||||
sample_rate?: string;
|
||||
channels?: number;
|
||||
channel_layout?: string;
|
||||
bits_per_sample?: number;
|
||||
id?: string;
|
||||
r_frame_rate?: string;
|
||||
avg_frame_rate?: string;
|
||||
time_base?: string;
|
||||
start_pts?: number;
|
||||
start_time?: string;
|
||||
duration_ts?: number;
|
||||
duration?: string;
|
||||
bit_rate?: string;
|
||||
max_bit_rate?: string;
|
||||
bits_per_raw_sample?: string;
|
||||
nb_frames?: string;
|
||||
nb_read_frames?: string;
|
||||
nb_read_packets?: string;
|
||||
disposition?: Disposition;
|
||||
tags?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface Format {
|
||||
filename: string;
|
||||
nb_streams: number;
|
||||
nb_programs: number;
|
||||
format_name: string;
|
||||
probe_score: number;
|
||||
format_long_name?: string;
|
||||
start_time?: string;
|
||||
duration?: string;
|
||||
size?: string;
|
||||
bit_rate?: string;
|
||||
tags?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface Disposition {
|
||||
default: number;
|
||||
dub: number;
|
||||
original: number;
|
||||
comment: number;
|
||||
lyrics: number;
|
||||
karaoke: number;
|
||||
forced: number;
|
||||
hearing_impaired: number;
|
||||
visual_impaired: number;
|
||||
clean_effects: number;
|
||||
attached_pic: number;
|
||||
timed_thumbnails: number;
|
||||
captions: number;
|
||||
descriptions: number;
|
||||
metadata: number;
|
||||
dependent: number;
|
||||
still_image: number;
|
||||
}
|
||||
|
||||
export function analyzeFile(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
opts?: { ffprobePath?: string | URL },
|
||||
): Promise<AnalyzeResult> {
|
||||
const stream = ffprobe(input, {
|
||||
'loglevel': 'fatal',
|
||||
'show_streams': '',
|
||||
'show_format': '',
|
||||
'of': 'json',
|
||||
}, opts);
|
||||
|
||||
return new Response(stream).json();
|
||||
}
|
||||
BIN
packages/transcode/buckbunny.mp4
Normal file
7
packages/transcode/deno.json
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@ditto/transcode",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
}
|
||||
31
packages/transcode/ffmpeg.test.ts
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import { ffmpeg } from './ffmpeg.ts';
|
||||
|
||||
const uri = new URL('./buckbunny.mp4', import.meta.url);
|
||||
|
||||
Deno.test('ffmpeg', async () => {
|
||||
await using file = await Deno.open(uri);
|
||||
|
||||
const output = ffmpeg(file.readable, {
|
||||
'c:v': 'libx264',
|
||||
'preset': 'veryfast',
|
||||
'loglevel': 'fatal',
|
||||
'movflags': 'frag_keyframe+empty_moov',
|
||||
'f': 'mp4',
|
||||
});
|
||||
|
||||
await Deno.mkdir(new URL('./tmp', import.meta.url), { recursive: true });
|
||||
await Deno.writeFile(new URL('./tmp/transcoded-1.mp4', import.meta.url), output);
|
||||
});
|
||||
|
||||
Deno.test('ffmpeg from file URI', async () => {
|
||||
const output = ffmpeg(uri, {
|
||||
'c:v': 'libx264',
|
||||
'preset': 'veryfast',
|
||||
'loglevel': 'fatal',
|
||||
'movflags': 'frag_keyframe+empty_moov',
|
||||
'f': 'mp4',
|
||||
});
|
||||
|
||||
await Deno.mkdir(new URL('./tmp', import.meta.url), { recursive: true });
|
||||
await Deno.writeFile(new URL('./tmp/transcoded-2.mp4', import.meta.url), output);
|
||||
});
|
||||
58
packages/transcode/ffmpeg.ts
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
export interface FFmpegFlags {
|
||||
'safe'?: string;
|
||||
'nostdin'?: string;
|
||||
'c:v'?: string;
|
||||
'preset'?: string;
|
||||
'loglevel'?: string;
|
||||
'crf'?: string;
|
||||
'c:a'?: string;
|
||||
'b:a'?: string;
|
||||
'movflags'?: string;
|
||||
'f'?: string;
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
||||
export function ffmpeg(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
flags: FFmpegFlags,
|
||||
opts?: { ffmpegPath?: string | URL },
|
||||
): ReadableStream<Uint8Array> {
|
||||
const { ffmpegPath = 'ffmpeg' } = opts ?? {};
|
||||
|
||||
const args = ['-i', input instanceof URL ? input.href : 'pipe:0'];
|
||||
|
||||
for (const [key, value] of Object.entries(flags)) {
|
||||
if (typeof value === 'string') {
|
||||
if (value) {
|
||||
args.push(`-${key}`, value);
|
||||
} else {
|
||||
args.push(`-${key}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
args.push('pipe:1'); // Output to stdout
|
||||
|
||||
// Spawn the FFmpeg process
|
||||
const command = new Deno.Command(ffmpegPath, {
|
||||
args,
|
||||
stdin: input instanceof ReadableStream ? 'piped' : 'null',
|
||||
stdout: 'piped',
|
||||
});
|
||||
|
||||
const child = command.spawn();
|
||||
|
||||
// Pipe the input stream into FFmpeg stdin and ensure completion
|
||||
if (input instanceof ReadableStream) {
|
||||
input.pipeTo(child.stdin).catch((e: unknown) => {
|
||||
if (e instanceof Error && e.name === 'BrokenPipe') {
|
||||
// Ignore. ffprobe closes the pipe once it has read the metadata.
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Return the FFmpeg stdout stream
|
||||
return child.stdout;
|
||||
}
|
||||
33
packages/transcode/ffprobe.test.ts
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
import { assertObjectMatch } from '@std/assert';
|
||||
|
||||
import { ffprobe } from './ffprobe.ts';
|
||||
|
||||
const uri = new URL('./buckbunny.mp4', import.meta.url);
|
||||
|
||||
Deno.test('ffprobe from ReadableStream', async () => {
|
||||
await using file = await Deno.open(uri);
|
||||
|
||||
const stream = ffprobe(file.readable, {
|
||||
'v': 'error',
|
||||
'select_streams': 'v:0',
|
||||
'show_entries': 'stream=width,height',
|
||||
'of': 'json',
|
||||
});
|
||||
|
||||
const { streams: [dimensions] } = await new Response(stream).json();
|
||||
|
||||
assertObjectMatch(dimensions, { width: 1920, height: 1080 });
|
||||
});
|
||||
|
||||
Deno.test('ffprobe from file URI', async () => {
|
||||
const stream = ffprobe(uri, {
|
||||
'v': 'error',
|
||||
'select_streams': 'v:0',
|
||||
'show_entries': 'stream=width,height',
|
||||
'of': 'json',
|
||||
});
|
||||
|
||||
const { streams: [dimensions] } = await new Response(stream).json();
|
||||
|
||||
assertObjectMatch(dimensions, { width: 1920, height: 1080 });
|
||||
});
|
||||
56
packages/transcode/ffprobe.ts
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
export interface FFprobeFlags {
|
||||
'v'?: string;
|
||||
'select_streams'?: string;
|
||||
'show_entries'?: string;
|
||||
'of'?: string;
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
||||
export function ffprobe(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
flags: FFprobeFlags,
|
||||
opts?: { ffprobePath?: string | URL },
|
||||
): ReadableStream<Uint8Array> {
|
||||
const { ffprobePath = 'ffprobe' } = opts ?? {};
|
||||
|
||||
const args = [];
|
||||
|
||||
for (const [key, value] of Object.entries(flags)) {
|
||||
if (typeof value === 'string') {
|
||||
if (value) {
|
||||
args.push(`-${key}`, value);
|
||||
} else {
|
||||
args.push(`-${key}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (input instanceof URL) {
|
||||
args.push('-i', input.href);
|
||||
} else {
|
||||
args.push('-i', 'pipe:0');
|
||||
}
|
||||
|
||||
// Spawn the FFprobe process
|
||||
const command = new Deno.Command(ffprobePath, {
|
||||
args,
|
||||
stdin: input instanceof ReadableStream ? 'piped' : 'null',
|
||||
stdout: 'piped',
|
||||
});
|
||||
|
||||
const child = command.spawn();
|
||||
|
||||
// Pipe the input stream into FFmpeg stdin and ensure completion
|
||||
if (input instanceof ReadableStream) {
|
||||
input.pipeTo(child.stdin).catch((e: unknown) => {
|
||||
if (e instanceof Error && e.name === 'BrokenPipe') {
|
||||
// Ignore. ffprobe closes the pipe once it has read the metadata.
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Return the FFmpeg stdout stream
|
||||
return child.stdout;
|
||||
}
|
||||
12
packages/transcode/frame.test.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import { extractVideoFrame } from './frame.ts';
|
||||
|
||||
const uri = new URL('./buckbunny.mp4', import.meta.url);
|
||||
|
||||
Deno.test('extractVideoFrame', async () => {
|
||||
await using file = await Deno.open(uri);
|
||||
|
||||
const result = await extractVideoFrame(file.readable);
|
||||
|
||||
await Deno.mkdir(new URL('./tmp', import.meta.url), { recursive: true });
|
||||
await Deno.writeFile(new URL('./tmp/poster.jpg', import.meta.url), result);
|
||||
});
|
||||
17
packages/transcode/frame.ts
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import { ffmpeg } from './ffmpeg.ts';
|
||||
|
||||
export function extractVideoFrame(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
ss: string = '00:00:01',
|
||||
opts?: { ffmpegPath?: string | URL },
|
||||
): Promise<Uint8Array> {
|
||||
const output = ffmpeg(input, {
|
||||
'ss': ss, // Seek to timestamp
|
||||
'frames:v': '1', // Extract only 1 frame
|
||||
'q:v': '2', // High-quality JPEG (lower = better quality)
|
||||
'f': 'image2', // Force image format
|
||||
'loglevel': 'fatal',
|
||||
}, opts);
|
||||
|
||||
return new Response(output).bytes();
|
||||
}
|
||||
5
packages/transcode/mod.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
export { analyzeFile } from './analyze.ts';
|
||||
export { ffmpeg, type FFmpegFlags } from './ffmpeg.ts';
|
||||
export { ffprobe, type FFprobeFlags } from './ffprobe.ts';
|
||||
export { extractVideoFrame } from './frame.ts';
|
||||
export { transcodeVideo } from './transcode.ts';
|
||||
9
packages/transcode/transcode.test.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import { transcodeVideo } from './transcode.ts';
|
||||
|
||||
Deno.test('transcodeVideo', async () => {
|
||||
await using file = await Deno.open(new URL('./buckbunny.mp4', import.meta.url));
|
||||
const output = transcodeVideo(file.readable);
|
||||
|
||||
await Deno.mkdir(new URL('./tmp', import.meta.url), { recursive: true });
|
||||
await Deno.writeFile(new URL('./tmp/buckbunny-transcoded.mp4', import.meta.url), output);
|
||||
});
|
||||
19
packages/transcode/transcode.ts
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import { ffmpeg } from './ffmpeg.ts';
|
||||
|
||||
export function transcodeVideo(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
opts?: { ffmpegPath?: string | URL },
|
||||
): ReadableStream<Uint8Array> {
|
||||
return ffmpeg(input, {
|
||||
'safe': '1', // Safe mode
|
||||
'nostdin': '', // Disable stdin
|
||||
'c:v': 'libx264', // Convert to H.264
|
||||
'preset': 'veryfast', // Encoding speed
|
||||
'loglevel': 'fatal', // Suppress logs
|
||||
'crf': '23', // Compression level (lower = better quality)
|
||||
'c:a': 'aac', // Convert to AAC audio
|
||||
'b:a': '128k', // Audio bitrate
|
||||
'movflags': 'frag_keyframe+empty_moov', // Ensures MP4 streaming compatibility
|
||||
'f': 'mp4', // Force MP4 format
|
||||
}, opts);
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/translators",
|
||||
"version": "1.1.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@ditto/uploaders",
|
||||
"version": "1.1.0",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import { Semaphore } from '@core/asyncutil';
|
||||
import { NostrEvent } from '@nostrify/nostrify';
|
||||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
|
||||
import { DittoConf } from '@ditto/conf';
|
||||
import { DittoPolyPg } from '@ditto/db';
|
||||
|
|
@ -11,7 +12,7 @@ const conf = new DittoConf(Deno.env);
|
|||
const db = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
const pgstore = new DittoPgStore({ db, conf });
|
||||
const relaystore = new DittoRelayStore({ conf, db, relay: pgstore });
|
||||
const relaystore = new DittoRelayStore({ conf, db, pool: new MockRelay(), relay: pgstore });
|
||||
|
||||
const sem = new Semaphore(5);
|
||||
|
||||
|
|
|
|||