diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 88567db9..b754ff1e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: denoland/deno:2.1.1 +image: denoland/deno:2.2.0 default: interruptible: true @@ -10,7 +10,7 @@ test: stage: test script: - deno fmt --check - - deno lint + - deno task lint - deno task check - deno task test --coverage=cov_profile - deno coverage cov_profile diff --git a/.tool-versions b/.tool-versions index 821ce0ce..f9adf79b 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1 @@ -deno 2.1.1 \ No newline at end of file +deno 2.2.0 \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index 298c3be5..35b505d2 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -8,7 +8,7 @@ "request": "launch", "name": "Launch Program", "type": "node", - "program": "${workspaceFolder}/src/server.ts", + "program": "${workspaceFolder}/packages/ditto/server.ts", "cwd": "${workspaceFolder}", "runtimeExecutable": "deno", "runtimeArgs": [ diff --git a/Dockerfile b/Dockerfile index f1644334..0b8724a0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,10 @@ -FROM denoland/deno:2.1.1 +FROM denoland/deno:2.2.0 ENV PORT 5000 WORKDIR /app RUN mkdir -p data && chown -R deno data COPY . . -RUN deno cache --allow-import src/server.ts +RUN deno cache --allow-import packages/ditto/server.ts RUN apt-get update && apt-get install -y unzip curl RUN deno task soapbox CMD deno task start diff --git a/ansible/playbook.yml b/ansible/playbook.yml deleted file mode 100644 index 59c2c8ea..00000000 --- a/ansible/playbook.yml +++ /dev/null @@ -1,30 +0,0 @@ ---- -- name: Update Ditto - hosts: all - become: true - tasks: - - name: Update Deno - shell: - cmd: curl -fsSL https://deno.land/x/install/install.sh | sh - environment: - DENO_INSTALL: /usr/local - become_user: root - - - name: Update Soapbox - shell: - cmd: deno task soapbox - chdir: /opt/ditto - become_user: ditto - - - name: Update ditto from the main branch - git: - repo: 'https://gitlab.com/soapbox-pub/ditto.git' - dest: '/opt/ditto' - version: main - become_user: ditto - - - name: Restart ditto service - systemd: - name: ditto - state: restarted - become_user: root diff --git a/deno.json b/deno.json index b37f7e8c..75f94cdd 100644 --- a/deno.json +++ b/deno.json @@ -1,17 +1,31 @@ { "version": "1.1.0", + "workspace": [ + "./packages/conf", + "./packages/db", + "./packages/ditto", + "./packages/lang", + "./packages/mastoapi", + "./packages/metrics", + "./packages/nip98", + "./packages/policies", + "./packages/ratelimiter", + "./packages/translators", + "./packages/uploaders" + ], "tasks": { - "start": "deno run -A --env-file --deny-read=.env src/server.ts", - "dev": "deno run -A --env-file --deny-read=.env --watch src/server.ts", + "start": "deno run -A --env-file --deny-read=.env packages/ditto/server.ts", + "dev": "deno run -A --env-file --deny-read=.env --watch packages/ditto/server.ts", "hook": "deno run --allow-read --allow-run --allow-write https://deno.land/x/deno_hooks@0.1.1/mod.ts", "db:export": "deno run -A --env-file --deny-read=.env scripts/db-export.ts", "db:import": "deno run -A --env-file --deny-read=.env scripts/db-import.ts", "db:cleanup": "deno run -A --env-file --deny-read=.env scripts/db-policy.ts", "db:migrate": "deno run -A --env-file --deny-read=.env scripts/db-migrate.ts", "nostr:pull": "deno run -A --env-file --deny-read=.env scripts/nostr-pull.ts", - "debug": "deno run -A --env-file --deny-read=.env --inspect src/server.ts", + "debug": "deno run -A --env-file --deny-read=.env --inspect packages/ditto/server.ts", "test": "deno test -A --env-file=.env.test --deny-read=.env --junit-path=./deno-test.xml", "check": "deno check --allow-import .", + "lint": "deno lint --allow-import", "nsec": "deno run scripts/nsec.ts", "admin:event": "deno run -A --env-file --deny-read=.env scripts/admin-event.ts", "admin:role": "deno run -A --env-file --deny-read=.env scripts/admin-role.ts", @@ -20,8 +34,11 @@ "stats:recompute": "deno run -A --env-file --deny-read=.env scripts/stats-recompute.ts", "soapbox": "curl -O https://dl.soapbox.pub/main/soapbox.zip && mkdir -p public && mv soapbox.zip public/ && cd public/ && unzip -o soapbox.zip && rm soapbox.zip", "trends": "deno run -A --env-file --deny-read=.env scripts/trends.ts", - "clean:deps": "deno cache --reload src/app.ts", + "clean:deps": "deno cache --reload packages/ditto/app.ts", + "db:populate:nip05": "deno run -A --env-file --deny-read=.env scripts/db-populate-nip05.ts", "db:populate-search": "deno run -A --env-file --deny-read=.env scripts/db-populate-search.ts", + "db:populate-extensions": "deno run -A --env-file --deny-read=.env scripts/db-populate-extensions.ts", + "db:streak:recompute": "deno run -A --env-file --deny-read=.env scripts/db-streak-recompute.ts", "vapid": "deno run scripts/vapid.ts" }, "unstable": [ @@ -34,25 +51,26 @@ "./public" ], "imports": { - "@/": "./src/", "@b-fuze/deno-dom": "jsr:@b-fuze/deno-dom@^0.1.47", "@bradenmacdonald/s3-lite-client": "jsr:@bradenmacdonald/s3-lite-client@^0.7.4", + "@cashu/cashu-ts": "npm:@cashu/cashu-ts@^2.2.0", + "@core/asyncutil": "jsr:@core/asyncutil@^1.2.0", "@electric-sql/pglite": "npm:@electric-sql/pglite@^0.2.8", "@esroyo/scoped-performance": "jsr:@esroyo/scoped-performance@^3.1.0", "@gfx/canvas-wasm": "jsr:@gfx/canvas-wasm@^0.4.2", "@hono/hono": "jsr:@hono/hono@^4.4.6", "@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1", - "@lambdalisue/async": "jsr:@lambdalisue/async@^2.1.1", "@negrel/webpush": "jsr:@negrel/webpush@^0.3.0", "@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0", - "@nostrify/db": "jsr:@nostrify/db@^0.36.1", - "@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.36.0", - "@nostrify/policies": "jsr:@nostrify/policies@^0.35.0", + "@nostrify/db": "jsr:@nostrify/db@^0.39.4", + "@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.39.1", + "@nostrify/policies": "jsr:@nostrify/policies@^0.36.1", + "@nostrify/types": "jsr:@nostrify/types@^0.36.0", "@scure/base": "npm:@scure/base@^1.1.6", "@sentry/deno": "https://deno.land/x/sentry@7.112.2/index.mjs", "@soapbox/kysely-pglite": "jsr:@soapbox/kysely-pglite@^1.0.0", + "@soapbox/logi": "jsr:@soapbox/logi@^0.3.0", "@soapbox/safe-fetch": "jsr:@soapbox/safe-fetch@^2.0.0", - "@soapbox/stickynotes": "jsr:@soapbox/stickynotes@^0.4.0", "@std/assert": "jsr:@std/assert@^0.225.1", "@std/cli": "jsr:@std/cli@^0.223.0", "@std/crypto": "jsr:@std/crypto@^0.224.0", @@ -61,16 +79,16 @@ "@std/json": "jsr:@std/json@^0.223.0", "@std/media-types": "jsr:@std/media-types@^0.224.1", "@std/streams": "jsr:@std/streams@^0.223.0", + "@std/testing": "jsr:@std/testing@^1.0.9", "blurhash": "npm:blurhash@2.0.5", "comlink": "npm:comlink@^4.4.1", "comlink-async-generator": "npm:comlink-async-generator@^0.0.1", "commander": "npm:commander@12.1.0", - "deno.json": "./deno.json", "entities": "npm:entities@^4.5.0", "fast-stable-stringify": "npm:fast-stable-stringify@^1.0.0", "formdata-helper": "npm:formdata-helper@^0.3.0", "hono-rate-limiter": "npm:hono-rate-limiter@^0.3.0", - "iso-639-1": "npm:iso-639-1@2.1.15", + "iso-639-1": "npm:iso-639-1@^3.1.5", "isomorphic-dompurify": "npm:isomorphic-dompurify@^2.16.0", "kysely": "npm:kysely@^0.27.4", "kysely-postgres-js": "npm:kysely-postgres-js@2.0.0", @@ -83,7 +101,6 @@ "nostr-tools": "npm:nostr-tools@2.5.1", "nostr-wasm": "npm:nostr-wasm@^0.1.0", "path-to-regexp": "npm:path-to-regexp@^7.1.0", - "png-to-ico": "npm:png-to-ico@^2.1.8", "postgres": "https://gitlab.com/soapbox-pub/postgres.js/-/raw/e79d7d2039446fbf7a37d4eca0d17e94a94b8b53/deno/mod.js", "prom-client": "npm:prom-client@^15.1.2", "question-deno": "https://raw.githubusercontent.com/ocpu/question-deno/10022b8e52555335aa510adb08b0a300df3cf904/mod.ts", @@ -95,16 +112,6 @@ "zod": "npm:zod@^3.23.8", "~/fixtures/": "./fixtures/" }, - "lint": { - "rules": { - "tags": [ - "recommended" - ], - "exclude": [ - "no-explicit-any" - ] - } - }, "fmt": { "useTabs": false, "lineWidth": 120, diff --git a/deno.lock b/deno.lock index f9169200..1f039c17 100644 --- a/deno.lock +++ b/deno.lock @@ -3,6 +3,7 @@ "specifiers": { "jsr:@b-fuze/deno-dom@~0.1.47": "0.1.48", "jsr:@bradenmacdonald/s3-lite-client@~0.7.4": "0.7.6", + "jsr:@core/asyncutil@^1.2.0": "1.2.0", "jsr:@denosaurs/plug@1.0.3": "1.0.3", "jsr:@esroyo/scoped-performance@^3.1.0": "3.1.0", "jsr:@gfx/canvas-wasm@~0.4.2": "0.4.2", @@ -26,33 +27,35 @@ "jsr:@gleasonator/policy@0.9.1": "0.9.1", "jsr:@gleasonator/policy@0.9.2": "0.9.2", "jsr:@gleasonator/policy@0.9.3": "0.9.3", - "jsr:@hono/hono@^4.4.6": "4.6.2", - "jsr:@lambdalisue/async@^2.1.1": "2.1.1", + "jsr:@gleasonator/policy@0.9.4": "0.9.4", + "jsr:@hono/hono@^4.4.6": "4.6.15", "jsr:@negrel/http-ece@0.6.0": "0.6.0", "jsr:@negrel/webpush@0.3": "0.3.0", - "jsr:@nostrify/db@~0.36.1": "0.36.1", + "jsr:@nostrify/db@~0.39.4": "0.39.4", "jsr:@nostrify/nostrify@0.31": "0.31.0", "jsr:@nostrify/nostrify@0.32": "0.32.0", - "jsr:@nostrify/nostrify@0.35": "0.35.0", - "jsr:@nostrify/nostrify@0.36": "0.36.0", + "jsr:@nostrify/nostrify@0.36": "0.36.2", + "jsr:@nostrify/nostrify@0.39": "0.39.1", "jsr:@nostrify/nostrify@~0.22.1": "0.22.5", "jsr:@nostrify/nostrify@~0.22.4": "0.22.4", "jsr:@nostrify/nostrify@~0.22.5": "0.22.5", + "jsr:@nostrify/nostrify@~0.39.1": "0.39.1", "jsr:@nostrify/policies@0.33": "0.33.0", "jsr:@nostrify/policies@0.33.1": "0.33.1", "jsr:@nostrify/policies@0.34": "0.34.0", - "jsr:@nostrify/policies@0.35": "0.35.0", "jsr:@nostrify/policies@0.36": "0.36.0", "jsr:@nostrify/policies@~0.33.1": "0.33.1", "jsr:@nostrify/policies@~0.36.1": "0.36.1", "jsr:@nostrify/types@0.30": "0.30.1", "jsr:@nostrify/types@0.35": "0.35.0", + "jsr:@nostrify/types@0.36": "0.36.0", "jsr:@nostrify/types@~0.30.1": "0.30.1", "jsr:@soapbox/kysely-pglite@1": "1.0.0", + "jsr:@soapbox/logi@0.3": "0.3.0", "jsr:@soapbox/safe-fetch@2": "2.0.0", - "jsr:@soapbox/stickynotes@0.4": "0.4.0", "jsr:@std/assert@0.223": "0.223.0", "jsr:@std/assert@0.224": "0.224.0", + "jsr:@std/assert@^1.0.10": "1.0.11", "jsr:@std/assert@~0.213.1": "0.213.1", "jsr:@std/assert@~0.225.1": "0.225.3", "jsr:@std/bytes@0.223": "0.223.0", @@ -60,10 +63,11 @@ "jsr:@std/bytes@0.224.0": "0.224.0", "jsr:@std/bytes@^1.0.0-rc.3": "1.0.0", "jsr:@std/bytes@^1.0.1-rc.3": "1.0.2", - "jsr:@std/bytes@^1.0.2": "1.0.2", + "jsr:@std/bytes@^1.0.2": "1.0.4", "jsr:@std/bytes@^1.0.2-rc.3": "1.0.2", "jsr:@std/cli@0.223": "0.223.0", "jsr:@std/crypto@0.224": "0.224.0", + "jsr:@std/data-structures@^1.0.6": "1.0.6", "jsr:@std/encoding@0.213.1": "0.213.1", "jsr:@std/encoding@0.224": "0.224.3", "jsr:@std/encoding@0.224.0": "0.224.0", @@ -71,18 +75,23 @@ "jsr:@std/encoding@~0.224.1": "0.224.3", "jsr:@std/fmt@0.213.1": "0.213.1", "jsr:@std/fs@0.213.1": "0.213.1", + "jsr:@std/fs@^1.0.9": "1.0.11", "jsr:@std/fs@~0.229.3": "0.229.3", - "jsr:@std/internal@1": "1.0.4", + "jsr:@std/internal@1": "1.0.5", + "jsr:@std/internal@^1.0.5": "1.0.5", "jsr:@std/io@0.223": "0.223.0", - "jsr:@std/io@0.224": "0.224.8", + "jsr:@std/io@0.224": "0.224.9", "jsr:@std/json@0.223": "0.223.0", "jsr:@std/media-types@0.224.0": "0.224.0", "jsr:@std/media-types@~0.224.1": "0.224.1", "jsr:@std/path@0.213.1": "0.213.1", "jsr:@std/path@0.224.0": "0.224.0", "jsr:@std/path@1.0.0-rc.1": "1.0.0-rc.1", + "jsr:@std/path@^1.0.8": "1.0.8", "jsr:@std/path@~0.213.1": "0.213.1", "jsr:@std/streams@0.223": "0.223.0", + "jsr:@std/testing@^1.0.9": "1.0.9", + "npm:@cashu/cashu-ts@^2.2.0": "2.2.0", "npm:@electric-sql/pglite@~0.2.8": "0.2.8", "npm:@isaacs/ttlcache@^1.4.1": "1.4.1", "npm:@noble/hashes@^1.4.0": "1.4.0", @@ -90,7 +99,7 @@ "npm:@scure/base@^1.1.6": "1.1.6", "npm:@scure/bip32@^1.4.0": "1.4.0", "npm:@scure/bip39@^1.3.0": "1.3.0", - "npm:@types/node@*": "18.16.19", + "npm:@types/node@*": "22.5.4", "npm:blurhash@2.0.5": "2.0.5", "npm:comlink-async-generator@*": "0.0.1", "npm:comlink-async-generator@^0.0.1": "0.0.1", @@ -100,7 +109,7 @@ "npm:fast-stable-stringify@1": "1.0.0", "npm:formdata-helper@0.3": "0.3.0", "npm:hono-rate-limiter@0.3": "0.3.0_hono@4.2.5", - "npm:iso-639-1@2.1.15": "2.1.15", + "npm:iso-639-1@^3.1.5": "3.1.5", "npm:isomorphic-dompurify@^2.16.0": "2.16.0", "npm:kysely-postgres-js@2.0.0": "2.0.0_kysely@0.27.3_postgres@3.4.4", "npm:kysely@~0.27.2": "0.27.4", @@ -115,11 +124,11 @@ "npm:lru-cache@^10.2.0": "10.2.2", "npm:lru-cache@^10.2.2": "10.2.2", "npm:nostr-tools@2.5.1": "2.5.1", + "npm:nostr-tools@^2.10.4": "2.10.4", "npm:nostr-tools@^2.5.0": "2.5.1", "npm:nostr-tools@^2.7.0": "2.7.0", "npm:nostr-wasm@0.1": "0.1.0", "npm:path-to-regexp@^7.1.0": "7.1.0", - "npm:png-to-ico@^2.1.8": "2.1.8", "npm:postgres@3.4.4": "3.4.4", "npm:prom-client@^15.1.2": "15.1.2", "npm:sharp@~0.33.5": "0.33.5", @@ -129,6 +138,7 @@ "npm:type-fest@^4.3.0": "4.18.2", "npm:unfurl.js@^6.4.0": "6.4.0", "npm:websocket-ts@^2.1.5": "2.1.5", + "npm:websocket-ts@^2.2.1": "2.2.1", "npm:zod@^3.23.8": "3.23.8" }, "jsr": { @@ -139,7 +149,10 @@ ] }, "@b-fuze/deno-dom@0.1.48": { - "integrity": "bf5b591aef2e9e9c59adfcbb93a9ecd45bab5b7c8263625beafa5c8f1662e7da" + "integrity": "bf5b591aef2e9e9c59adfcbb93a9ecd45bab5b7c8263625beafa5c8f1662e7da", + "dependencies": [ + "jsr:@denosaurs/plug" + ] }, "@bradenmacdonald/s3-lite-client@0.7.6": { "integrity": "2b5976dca95d207dc88e23f9807e3eecbc441b0cf547dcda5784afe6668404d1", @@ -147,6 +160,9 @@ "jsr:@std/io@0.224" ] }, + "@core/asyncutil@1.2.0": { + "integrity": "9967f15190c60df032c13f72ce5ac73d185c34f31c53dc918d8800025854c118" + }, "@denosaurs/plug@1.0.3": { "integrity": "b010544e386bea0ff3a1d05e0c88f704ea28cbd4d753439c2f1ee021a85d4640", "dependencies": [ @@ -294,6 +310,13 @@ "jsr:@nostrify/policies@~0.36.1" ] }, + "@gleasonator/policy@0.9.4": { + "integrity": "5d5b8a585b8e3cd6e6b7daed2cfa61cd1a3e5945691f092eb98f8671384c3657", + "dependencies": [ + "jsr:@nostrify/nostrify@0.36", + "jsr:@nostrify/policies@~0.36.1" + ] + }, "@hono/hono@4.4.6": { "integrity": "aa557ca9930787ee86b9ca1730691f1ce1c379174c2cb244d5934db2b6314453" }, @@ -321,8 +344,8 @@ "@hono/hono@4.6.2": { "integrity": "35fcf3be4687825080b01bed7bbe2ac66f8d8b8939f0bad459661bf3b46d916f" }, - "@lambdalisue/async@2.1.1": { - "integrity": "1fc9bc6f4ed50215cd2f7217842b18cea80f81c25744f88f8c5eb4be5a1c9ab4" + "@hono/hono@4.6.15": { + "integrity": "935b3b12e98e4b22bcd1aa4dbe6587321e431c79829eba61f535b4ede39fd8b1" }, "@negrel/http-ece@0.6.0": { "integrity": "7afdd81b86ea5b21a9677b323c01c3338705e11cc2bfed250870f5349d8f86f7", @@ -341,13 +364,13 @@ "jsr:@std/path@0.224.0" ] }, - "@nostrify/db@0.36.1": { - "integrity": "b65b89ca6fe98d9dbcc0402b5c9c07b8430c2c91f84ba4128ff2eeed70c3d49f", + "@nostrify/db@0.39.4": { + "integrity": "53fecea3b67394cf4f52795f89d1d065bdeb0627b8655cc7fc3a89d6b21adf01", "dependencies": [ - "jsr:@nostrify/nostrify@0.36", - "jsr:@nostrify/types@0.35", + "jsr:@nostrify/nostrify@0.39", + "jsr:@nostrify/types@0.36", "npm:kysely@~0.27.3", - "npm:nostr-tools@^2.7.0" + "npm:nostr-tools@^2.10.4" ] }, "@nostrify/nostrify@0.22.4": { @@ -361,7 +384,7 @@ "npm:kysely@~0.27.3", "npm:lru-cache@^10.2.0", "npm:nostr-tools@^2.5.0", - "npm:websocket-ts", + "npm:websocket-ts@^2.1.5", "npm:zod" ] }, @@ -375,7 +398,7 @@ "npm:kysely@~0.27.3", "npm:lru-cache@^10.2.0", "npm:nostr-tools@^2.7.0", - "npm:websocket-ts", + "npm:websocket-ts@^2.1.5", "npm:zod" ] }, @@ -390,7 +413,7 @@ "npm:@scure/bip39", "npm:lru-cache@^10.2.0", "npm:nostr-tools@^2.7.0", - "npm:websocket-ts", + "npm:websocket-ts@^2.1.5", "npm:zod" ] }, @@ -403,7 +426,7 @@ "npm:@scure/bip39", "npm:lru-cache@^10.2.0", "npm:nostr-tools@^2.7.0", - "npm:websocket-ts", + "npm:websocket-ts@^2.1.5", "npm:zod" ] }, @@ -416,20 +439,7 @@ "npm:@scure/bip39", "npm:lru-cache@^10.2.0", "npm:nostr-tools@^2.7.0", - "npm:websocket-ts", - "npm:zod" - ] - }, - "@nostrify/nostrify@0.35.0": { - "integrity": "9bfef4883838b8b4cb2e2b28a60b72de95391ca5b789bc7206a2baea054dea55", - "dependencies": [ - "jsr:@nostrify/types@0.35", - "jsr:@std/encoding@~0.224.1", - "npm:@scure/bip32", - "npm:@scure/bip39", - "npm:lru-cache@^10.2.0", - "npm:nostr-tools@^2.7.0", - "npm:websocket-ts", + "npm:websocket-ts@^2.1.5", "npm:zod" ] }, @@ -444,7 +454,67 @@ "npm:@scure/bip39", "npm:lru-cache@^10.2.0", "npm:nostr-tools@^2.7.0", - "npm:websocket-ts", + "npm:websocket-ts@^2.1.5", + "npm:zod" + ] + }, + "@nostrify/nostrify@0.36.2": { + "integrity": "cc4787ca170b623a2e5dfed1baa4426077daa6143af728ea7dd325d58f4d04d6", + "dependencies": [ + "jsr:@nostrify/types@0.35", + "jsr:@std/encoding@~0.224.1", + "npm:@scure/bip32", + "npm:@scure/bip39", + "npm:lru-cache@^10.2.0", + "npm:nostr-tools@^2.7.0", + "npm:websocket-ts@^2.1.5", + "npm:zod" + ] + }, + "@nostrify/nostrify@0.38.0": { + "integrity": "9ec7920057ee3a4dcbaef7e706dedea622bfdfdf0f6aac11047443f88d953deb", + "dependencies": [ + "jsr:@nostrify/types@0.36", + "jsr:@std/crypto", + "jsr:@std/encoding@~0.224.1", + "npm:@scure/base", + "npm:@scure/bip32", + "npm:@scure/bip39", + "npm:lru-cache@^10.2.0", + "npm:nostr-tools@^2.10.4", + "npm:websocket-ts@^2.1.5", + "npm:zod" + ] + }, + "@nostrify/nostrify@0.39.0": { + "integrity": "f7e052c32b8b9bafe0f2517dcf090e7d3df5aed38452a0cf61ade817d34067ee", + "dependencies": [ + "jsr:@nostrify/nostrify@0.39", + "jsr:@nostrify/types@0.36", + "jsr:@std/crypto", + "jsr:@std/encoding@~0.224.1", + "npm:@scure/base", + "npm:@scure/bip32", + "npm:@scure/bip39", + "npm:lru-cache@^10.2.0", + "npm:nostr-tools@^2.10.4", + "npm:websocket-ts@^2.2.1", + "npm:zod" + ] + }, + "@nostrify/nostrify@0.39.1": { + "integrity": "84f98c815a07f4151bd02188a3525e438c416e9de632c79c9da9edbfca580d7f", + "dependencies": [ + "jsr:@nostrify/nostrify@~0.39.1", + "jsr:@nostrify/types@0.36", + "jsr:@std/crypto", + "jsr:@std/encoding@~0.224.1", + "npm:@scure/base", + "npm:@scure/bip32", + "npm:@scure/bip39", + "npm:lru-cache@^10.2.0", + "npm:nostr-tools@^2.10.4", + "npm:websocket-ts@^2.2.1", "npm:zod" ] }, @@ -470,14 +540,6 @@ "npm:nostr-tools@^2.7.0" ] }, - "@nostrify/policies@0.35.0": { - "integrity": "b828fac9f253e460a9587c05588b7dae6a0a32c5a9c9083e449219887b9e8e20", - "dependencies": [ - "jsr:@nostrify/nostrify@0.35", - "jsr:@nostrify/types@0.35", - "npm:nostr-tools@^2.7.0" - ] - }, "@nostrify/policies@0.36.0": { "integrity": "ad1930de48ce03cdf34da456af1563b487581d1d86683cd416ad760ae40b1fb3", "dependencies": [ @@ -503,21 +565,24 @@ "@nostrify/types@0.35.0": { "integrity": "b8d515563d467072694557d5626fa1600f74e83197eef45dd86a9a99c64f7fe6" }, + "@nostrify/types@0.36.0": { + "integrity": "b3413467debcbd298d217483df4e2aae6c335a34765c90ac7811cf7c637600e7" + }, "@soapbox/kysely-pglite@1.0.0": { "integrity": "0954b1bf3deab051c479cba966b1e6ed5a0a966aa21d1f40143ec8f5efcd475d", "dependencies": [ "npm:kysely@~0.27.4" ] }, + "@soapbox/logi@0.3.0": { + "integrity": "5aa5121e82422b0a1b5ec81790f75407c16c788d10af629cecef9a35d1b4c290" + }, "@soapbox/safe-fetch@2.0.0": { "integrity": "f451d686501c76a0faa058fe9d2073676282a8a42c3b93c59159eb9191f11b5f", "dependencies": [ "npm:tldts@^6.1.61" ] }, - "@soapbox/stickynotes@0.4.0": { - "integrity": "60bfe61ab3d7e04bf708273b1e2d391a59534bdf29e54160e98d7afd328ca1ec" - }, "@std/assert@0.213.1": { "integrity": "24c28178b30c8e0782c18e8e94ea72b16282207569cdd10ffb9d1d26f2edebfe" }, @@ -530,7 +595,13 @@ "@std/assert@0.225.3": { "integrity": "b3c2847aecf6955b50644cdb9cf072004ea3d1998dd7579fc0acb99dbb23bd4f", "dependencies": [ - "jsr:@std/internal" + "jsr:@std/internal@1" + ] + }, + "@std/assert@1.0.11": { + "integrity": "2461ef3c368fe88bc60e186e7744a93112f16fd110022e113a0849e94d1c83c1", + "dependencies": [ + "jsr:@std/internal@^1.0.5" ] }, "@std/bytes@0.223.0": { @@ -545,6 +616,9 @@ "@std/bytes@1.0.2": { "integrity": "fbdee322bbd8c599a6af186a1603b3355e59a5fb1baa139f8f4c3c9a1b3e3d57" }, + "@std/bytes@1.0.4": { + "integrity": "11a0debe522707c95c7b7ef89b478c13fb1583a7cfb9a85674cd2cc2e3a28abc" + }, "@std/cli@0.223.0": { "integrity": "2feb7970f2028904c3edc22ea916ce9538113dfc170844f3eae03578c333c356", "dependencies": [ @@ -558,6 +632,9 @@ "jsr:@std/encoding@0.224" ] }, + "@std/data-structures@1.0.6": { + "integrity": "76a7fd8080c66604c0496220a791860492ab21a04a63a969c0b9a0609bbbb760" + }, "@std/dotenv@0.224.0": { "integrity": "d9234cdf551507dcda60abb6c474289843741d8c07ee8ce540c60f5c1b220a1d" }, @@ -589,6 +666,12 @@ "jsr:@std/path@1.0.0-rc.1" ] }, + "@std/fs@1.0.11": { + "integrity": "ba674672693340c5ebdd018b4fe1af46cb08741f42b4c538154e97d217b55bdd", + "dependencies": [ + "jsr:@std/path@^1.0.8" + ] + }, "@std/internal@1.0.0": { "integrity": "ac6a6dfebf838582c4b4f61a6907374e27e05bedb6ce276e0f1608fe84e7cd9a" }, @@ -601,6 +684,9 @@ "@std/internal@1.0.4": { "integrity": "62e8e4911527e5e4f307741a795c0b0a9e6958d0b3790716ae71ce085f755422" }, + "@std/internal@1.0.5": { + "integrity": "54a546004f769c1ac9e025abd15a76b6671ddc9687e2313b67376125650dc7ba" + }, "@std/io@0.223.0": { "integrity": "2d8c3c2ab3a515619b90da2c6ff5ea7b75a94383259ef4d02116b228393f84f1", "dependencies": [ @@ -650,6 +736,12 @@ "jsr:@std/bytes@^1.0.2" ] }, + "@std/io@0.224.9": { + "integrity": "4414664b6926f665102e73c969cfda06d2c4c59bd5d0c603fd4f1b1c840d6ee3", + "dependencies": [ + "jsr:@std/bytes@^1.0.2" + ] + }, "@std/json@0.223.0": { "integrity": "9a4a255931dd0397924c6b10bb6a72fe3e28ddd876b981ada2e3b8dd0764163f", "dependencies": [ @@ -677,6 +769,9 @@ "@std/path@1.0.0-rc.1": { "integrity": "b8c00ae2f19106a6bb7cbf1ab9be52aa70de1605daeb2dbdc4f87a7cbaf10ff6" }, + "@std/path@1.0.8": { + "integrity": "548fa456bb6a04d3c1a1e7477986b6cffbce95102d0bb447c67c4ee70e0364be" + }, "@std/streams@0.223.0": { "integrity": "d6b28e498ced3960b04dc5d251f2dcfc1df244b5ec5a48dc23a8f9b490be3b99", "dependencies": [ @@ -684,9 +779,38 @@ "jsr:@std/bytes@0.223", "jsr:@std/io@0.223" ] + }, + "@std/testing@1.0.9": { + "integrity": "9bdd4ac07cb13e7594ac30e90f6ceef7254ac83a9aeaa089be0008f33aab5cd4", + "dependencies": [ + "jsr:@std/assert@^1.0.10", + "jsr:@std/data-structures", + "jsr:@std/fs@^1.0.9", + "jsr:@std/internal@^1.0.5", + "jsr:@std/path@^1.0.8" + ] } }, "npm": { + "@cashu/cashu-ts@2.2.0": { + "integrity": "sha512-7b6pGyjjpm3uAJvmOL+ztpRxqp1qnmzGpydp+Pu30pOjxj93EhejPTJVrZMDJ0P35y6u5+5jIjHF4k0fpovvmg==", + "dependencies": [ + "@cashu/crypto", + "@noble/curves@1.4.0", + "@noble/hashes@1.4.0", + "buffer" + ] + }, + "@cashu/crypto@0.3.4": { + "integrity": "sha512-mfv1Pj4iL1PXzUj9NKIJbmncCLMqYfnEDqh/OPxAX0nNBt6BOnVJJLjLWFlQeYxlnEfWABSNkrqPje1t5zcyhA==", + "dependencies": [ + "@noble/curves@1.8.1", + "@noble/hashes@1.7.1", + "@scure/bip32@1.6.2", + "@scure/bip39@1.5.4", + "buffer" + ] + }, "@electric-sql/pglite@0.2.8": { "integrity": "sha512-0wSmQu22euBRzR5ghqyIHnBH4MfwlkL5WstOrrA3KOsjEWEglvoL/gH92JajEUA6Ufei/+qbkB2hVloC/K/RxQ==" }, @@ -804,6 +928,12 @@ "@noble/hashes@1.4.0" ] }, + "@noble/curves@1.8.1": { + "integrity": "sha512-warwspo+UYUPep0Q+vtdVB4Ugn8GGQj8iyB3gnRWsztmUHTI3S1nhdiWNsPUGL0vud7JlRRk1XEu7Lq1KGTnMQ==", + "dependencies": [ + "@noble/hashes@1.7.1" + ] + }, "@noble/hashes@1.3.1": { "integrity": "sha512-EbqwksQwz9xDRGfDST86whPBgM65E0OH/pCgqW0GBVzO22bNE+NuIbeTb714+IfSjU3aRk47EUvXIb5bTsenKA==" }, @@ -813,6 +943,9 @@ "@noble/hashes@1.4.0": { "integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==" }, + "@noble/hashes@1.7.1": { + "integrity": "sha512-B8XBPsn4vT/KJAGqDzbwztd+6Yte3P4V7iafm24bxgDe/mlRuK6xmWPuCNrKt2vDafZ8MfJLlchDG/vYafQEjQ==" + }, "@noble/secp256k1@2.1.0": { "integrity": "sha512-XLEQQNdablO0XZOIniFQimiXsZDNwaYgL96dZwC54Q30imSbAOFf3NKtepc+cXyuZf5Q1HCgbqgZ2UFFuHVcEw==" }, @@ -825,6 +958,9 @@ "@scure/base@1.1.6": { "integrity": "sha512-ok9AWwhcgYuGG3Zfhyqg+zwl+Wn5uE+dwC0NV/2qQkx4dABbb/bx96vWu8NSj+BNjjSjno+JRYRjle1jV08k3g==" }, + "@scure/base@1.2.4": { + "integrity": "sha512-5Yy9czTO47mqz+/J8GM6GIId4umdCk1wc1q8rKERQulIoc8VP9pzDcghv10Tl2E7R96ZUx/PhND3ESYUQX8NuQ==" + }, "@scure/bip32@1.3.1": { "integrity": "sha512-osvveYtyzdEVbt3OfwwXFr4P2iVBL5u1Q3q4ONBfDY/UpOuXmOlbgwc1xECEboY8wIays8Yt6onaWMUdUbfl0A==", "dependencies": [ @@ -841,6 +977,14 @@ "@scure/base@1.1.6" ] }, + "@scure/bip32@1.6.2": { + "integrity": "sha512-t96EPDMbtGgtb7onKKqxRLfE5g05k7uHnHRM2xdE6BP/ZmxaLtPek4J4KfVn/90IQNrU1IOAqMgiDtUdtbe3nw==", + "dependencies": [ + "@noble/curves@1.8.1", + "@noble/hashes@1.7.1", + "@scure/base@1.2.4" + ] + }, "@scure/bip39@1.2.1": { "integrity": "sha512-Z3/Fsz1yr904dduJD0NpiyRHhRYHdcnyh73FZWiV+/qhWi83wNJ3NWolYqCEN+ZWsUz2TWwajJggcRE9r1zUYg==", "dependencies": [ @@ -855,17 +999,24 @@ "@scure/base@1.1.6" ] }, + "@scure/bip39@1.5.4": { + "integrity": "sha512-TFM4ni0vKvCfBpohoh+/lY05i9gRbSwXWngAsF4CABQxoaOHijxuaZ2R6cStDQ5CHtHO9aGJTr4ksVJASRRyMA==", + "dependencies": [ + "@noble/hashes@1.7.1", + "@scure/base@1.2.4" + ] + }, "@types/dompurify@3.0.5": { "integrity": "sha512-1Wg0g3BtQF7sSb27fJQAKck1HECM6zV1EB66j8JH9i3LCjYabJa0FSdiSgsD5K/RbrsR0SiraKacLB+T8ZVYAg==", "dependencies": [ "@types/trusted-types" ] }, - "@types/node@17.0.45": { - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==" - }, - "@types/node@18.16.19": { - "integrity": "sha512-IXl7o+R9iti9eBW4Wg2hx1xQDig183jj7YLn8F7udNceyfkbn1ZxmzZXuak20gR40D7pIkIY1kYGx5VIGbaHKA==" + "@types/node@22.5.4": { + "integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==", + "dependencies": [ + "undici-types" + ] }, "@types/trusted-types@2.0.7": { "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==" @@ -891,6 +1042,9 @@ "asynckit@0.4.0": { "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, + "base64-js@1.5.1": { + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" + }, "bintrees@1.0.2": { "integrity": "sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==" }, @@ -903,6 +1057,13 @@ "fill-range" ] }, + "buffer@6.0.3": { + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "dependencies": [ + "base64-js", + "ieee754" + ] + }, "chalk@5.3.0": { "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==" }, @@ -1141,6 +1302,9 @@ "safer-buffer" ] }, + "ieee754@1.2.1": { + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" + }, "image-size@1.1.1": { "integrity": "sha512-541xKlUw6jr/6gGuk92F+mYM5zaFAc5ahphvkqvNe2bQ6gVBkd6bfrmVJ2t4KDAfikAYZyIqTnktX3i6/aQDrQ==", "dependencies": [ @@ -1174,8 +1338,8 @@ "isexe@2.0.0": { "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, - "iso-639-1@2.1.15": { - "integrity": "sha512-7c7mBznZu2ktfvyT582E2msM+Udc1EjOyhVRE/0ZsjD9LBtWSm23h3PtiRh2a35XoUsTQQjJXaJzuLjXsOdFDg==" + "iso-639-1@3.1.5": { + "integrity": "sha512-gXkz5+KN7HrG0Q5UGqSMO2qB9AsbEeyLP54kF1YrMsIxmu+g4BdB7rflReZTSTZGpfj8wywu6pfPBCylPIzGQA==" }, "isomorphic-dompurify@2.16.0": { "integrity": "sha512-cXhX2owp8rPxafCr0ywqy2CGI/4ceLNgWkWBEvUz64KTbtg3oRL2ZRqq/zW0pzt4YtDjkHLbwcp/lozpKzAQjg==", @@ -1333,6 +1497,18 @@ "whatwg-url@5.0.0" ] }, + "nostr-tools@2.10.4": { + "integrity": "sha512-biU7sk+jxHgVASfobg2T5ttxOGGSt69wEVBC51sHHOEaKAAdzHBLV/I2l9Rf61UzClhliZwNouYhqIso4a3HYg==", + "dependencies": [ + "@noble/ciphers", + "@noble/curves@1.2.0", + "@noble/hashes@1.3.1", + "@scure/base@1.1.1", + "@scure/bip32@1.3.1", + "@scure/bip39@1.2.1", + "nostr-wasm" + ] + }, "nostr-tools@2.5.1": { "integrity": "sha512-bpkhGGAhdiCN0irfV+xoH3YP5CQeOXyXzUq7SYeM6D56xwTXZCPEmBlUGqFVfQidvRsoVeVxeAiOXW2c2HxoRQ==", "dependencies": [ @@ -1402,14 +1578,6 @@ "pidtree@0.6.0": { "integrity": "sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==" }, - "png-to-ico@2.1.8": { - "integrity": "sha512-Nf+IIn/cZ/DIZVdGveJp86NG5uNib1ZXMiDd/8x32HCTeKSvgpyg6D/6tUBn1QO/zybzoMK0/mc3QRgAyXdv9w==", - "dependencies": [ - "@types/node@17.0.45", - "minimist", - "pngjs" - ] - }, "pngjs@6.0.0": { "integrity": "sha512-TRzzuFRRmEoSW/p1KVAmiOgPco2Irlah+bGFCeNfJXxxYGwSw7YwAOAcd7X28K/m5bjBWKsC29KyoMfHbypayg==" }, @@ -1611,6 +1779,9 @@ "type-fest@4.18.2": { "integrity": "sha512-+suCYpfJLAe4OXS6+PPXjW3urOS4IoP9waSiLuXfLgqZODKw/aWwASvzqE886wA0kQgGy0mIWyhd87VpqIy6Xg==" }, + "undici-types@6.19.8": { + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + }, "unfurl.js@6.4.0": { "integrity": "sha512-DogJFWPkOWMcu2xPdpmbcsL+diOOJInD3/jXOv6saX1upnWmMK8ndAtDWUfJkuInqNI9yzADud4ID9T+9UeWCw==", "dependencies": [ @@ -1636,6 +1807,9 @@ "websocket-ts@2.1.5": { "integrity": "sha512-rCNl9w6Hsir1azFm/pbjBEFzLD/gi7Th5ZgOxMifB6STUfTSovYAzryWw0TRvSZ1+Qu1Z5Plw4z42UfTNA9idA==" }, + "websocket-ts@2.2.1": { + "integrity": "sha512-YKPDfxlK5qOheLZ2bTIiktZO1bpfGdNCPJmTEaPW7G9UXI1GKjDdeacOrsULUS000OPNxDVOyAuKLuIWPqWM0Q==" + }, "whatwg-encoding@3.1.1": { "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", "dependencies": [ @@ -2302,17 +2476,18 @@ "dependencies": [ "jsr:@b-fuze/deno-dom@~0.1.47", "jsr:@bradenmacdonald/s3-lite-client@~0.7.4", + "jsr:@core/asyncutil@^1.2.0", "jsr:@esroyo/scoped-performance@^3.1.0", "jsr:@gfx/canvas-wasm@~0.4.2", "jsr:@hono/hono@^4.4.6", - "jsr:@lambdalisue/async@^2.1.1", "jsr:@negrel/webpush@0.3", - "jsr:@nostrify/db@~0.36.1", - "jsr:@nostrify/nostrify@0.36", - "jsr:@nostrify/policies@0.35", + "jsr:@nostrify/db@~0.39.4", + "jsr:@nostrify/nostrify@~0.39.1", + "jsr:@nostrify/policies@~0.36.1", + "jsr:@nostrify/types@0.36", "jsr:@soapbox/kysely-pglite@1", + "jsr:@soapbox/logi@0.3", "jsr:@soapbox/safe-fetch@2", - "jsr:@soapbox/stickynotes@0.4", "jsr:@std/assert@~0.225.1", "jsr:@std/cli@0.223", "jsr:@std/crypto@0.224", @@ -2321,6 +2496,8 @@ "jsr:@std/json@0.223", "jsr:@std/media-types@~0.224.1", "jsr:@std/streams@0.223", + "jsr:@std/testing@^1.0.9", + "npm:@cashu/cashu-ts@^2.2.0", "npm:@electric-sql/pglite@~0.2.8", "npm:@isaacs/ttlcache@^1.4.1", "npm:@noble/secp256k1@2", @@ -2333,7 +2510,7 @@ "npm:fast-stable-stringify@1", "npm:formdata-helper@0.3", "npm:hono-rate-limiter@0.3", - "npm:iso-639-1@2.1.15", + "npm:iso-639-1@^3.1.5", "npm:isomorphic-dompurify@^2.16.0", "npm:kysely-postgres-js@2.0.0", "npm:kysely@~0.27.4", @@ -2346,7 +2523,6 @@ "npm:nostr-tools@2.5.1", "npm:nostr-wasm@0.1", "npm:path-to-regexp@^7.1.0", - "npm:png-to-ico@^2.1.8", "npm:prom-client@^15.1.2", "npm:sharp@~0.33.5", "npm:tldts@^6.0.14", diff --git a/docs/auth.md b/docs/auth.md deleted file mode 100644 index 119a7dc2..00000000 --- a/docs/auth.md +++ /dev/null @@ -1,23 +0,0 @@ -# Authentication in Ditto - -One of the main benefits of Nostr is that users control their keys. Instead of a username and password, the user has a public key (`npub` or `pubkey`) and private key (`nsec`). The public key is a globally-unique identifier for the user, and the private key can be used to sign events, producing a signature that only the pubkey could have produced. - -With keys, users have full control over their identity. They can move between servers freely, and post to multiple servers at once. But with such power comes great responsibilities. Users cannot lose control of their key, or they'll lose control over their account forever. - -## Managing Keys - -There are several ways to manage keys in Nostr, and they all come with trade-offs. It's new territory, and people are still coming up with new ideas. - -The main concerns are how to **conveniently log in on multiple devices**, and **who/what to trust with your key.** - -### Current Solutions - -1. **Private key text.** Users copy their key between devices/apps, giving apps full control over their key. Users might email the key to themselves, or better yet use a password manager, or apps might even provide a QR code for other apps to scan. This method is convenient, but it's not secure. Keys can get compromised in transit, or by a malicious or vulnerable app. - -2. **Browser extension.** For web clients, an extension can expose `getPublicKey` and `signEvent` functions to web-pages without exposing the private key directly. This option is secure, but it only works well for laptop/desktop devices. On mobile, only FireFox can do it, with no support from Safari or Chrome. It also offers no way to share a key across devices on its own. - -3. **Remote signer**. Users can run a remote signer program and then connect apps to it. The signer should be running 24/7, so it's best suited for running on a server. This idea has evolved into the creation of "bunker" services. Bunkers allow users to have a traditional username and password and login from anywhere. This method solves a lot of problems, but it also creates some problems. Users have to create an account on a separate website before they can log into your website. This makes it an option for more advanced users. Also, it's concerning that the administrator of the bunker server has full control over your keys. None of this is a problem if you run your own remote signer, but it's not a mainstream option. - -4. **Custodial**. Apps which make you log you in with a username/password, and then keep Nostr keys for each user in their database. You might not even be able to export your keys. This option may be easier for users at first, but it puts a whole lot of liability on the server, since leaks can cause permanent damage. It also gives up a lot of the benefits of Nostr. - -Each of these ideas could be improved upon greatly with new experiments and technical progress. But to Ditto, user freedom matters the most, so we're focusing on non-custodial solution. Even though there are security risks to copying around keys, the onus is on the user. The user may fall victim to a targeted attack (or make a stupid mistake), whereas custodial servers have the ability to wipe out entire demographics of users at once. Therefore we believe that custodial solutions are actually _less_ secure than users copying around keys. Users must take precautions about which apps to trust with their private key until we improve upon the area to make it more secure (likely with better support of browser extensions, OS key management, and more). diff --git a/docs/debugging.md b/docs/debugging.md deleted file mode 100644 index 879f36cd..00000000 --- a/docs/debugging.md +++ /dev/null @@ -1,27 +0,0 @@ -# Debugging Ditto - -Running the command `deno task debug` will start the Ditto server in debug mode, making it possible to inspect with Chromium-based browsers by visiting `chrome://inspect`. - -From there, go to the "Performance" tab and click "Start profiling". Perform the actions you want to profile, then click "Stop profiling". You can then inspect the call stack and see where the time is being spent. - -## Remote debugging - -If the Ditto server is on a separate machine, you will first need to put it into debug mode. Edit its systemd file (usually located at `/etc/systemd/system/ditto.service`) and change `deno task start` to `deno task debug` in the `ExecStart` line. Then run `systemctl daemon-reload` and `systemctl restart ditto`. - -To access the debugger remotely, you can use SSH port forwarding. Run this command on your local machine, replacing `@` with the SSH login for the remote machine: - -```sh -ssh -L 9229:localhost:9229 @ -``` - -Then, in Chromium, go to `chrome://inspect` and the Ditto server should be available. - -## SQL performance - -To track slow queries, first set `DEBUG=ditto:sql` in the environment so only SQL logs are shown. - -Then, grep for any logs above 0.001s: - -```sh -journalctl -fu ditto | grep -v '(0.00s)' -``` diff --git a/docs/installation.md b/docs/installation.md deleted file mode 100644 index 9077a7cb..00000000 --- a/docs/installation.md +++ /dev/null @@ -1,15 +0,0 @@ -# Installing Ditto - -First, install Deno: - -```sh -curl -fsSL https://deno.land/x/install/install.sh | sudo DENO_INSTALL=/usr/local sh -``` - -Now, run Ditto: - -```sh -deno run -A https://gitlab.com/soapbox-pub/ditto/-/raw/main/src/server.ts -``` - -That's it! Ditto is now running on your machine. diff --git a/docs/mastodon-api.md b/docs/mastodon-api.md deleted file mode 100644 index 48684b6f..00000000 --- a/docs/mastodon-api.md +++ /dev/null @@ -1,9 +0,0 @@ -# Mastodon API - -Ditto implements Mastodon's client-server API, a REST API used by Mastodon mobile apps and frontends to interact with Mastodon servers. While it was originally designed for Mastodon, it has been adopted by other ActivityPub servers such as Pleroma, Mitra, Friendica, and many others. - -Note that Mastodon API is **not** ActivityPub. It is not the API used to federate between servers. Instead, it enables user interfaces, mobile apps, bots, and other clients to interact with Mastodon servers. - -Mastodon is built in Ruby on Rails, and its API is inspired by Twitter's legacy REST API. Rails, being an MVC framework, has "models", which it maps directly to "Entities" in its API. - -Endpoints return either a single Entity, or an array of Entities. Entities Entities are JSON objects with a specific structure, and are documented in the [Mastodon API documentation](https://docs.joinmastodon.org/api/). diff --git a/installation/Caddyfile b/installation/Caddyfile new file mode 100644 index 00000000..191031d4 --- /dev/null +++ b/installation/Caddyfile @@ -0,0 +1,34 @@ +# Cloudflare real IP configuration for rate-limiting +# { +# servers { +# # https://www.cloudflare.com/ips/ +# trusted_proxies static 173.245.48.0/20 103.21.244.0/22 103.22.200.0/22 103.31.4.0/22 141.101.64.0/18 108.162.192.0/18 190.93.240.0/20 188.114.96.0/20 197.234.240.0/22 198.41.128.0/17 162.158.0.0/15 104.16.0.0/13 104.24.0.0/14 172.64.0.0/13 131.0.72.0/22 2400:cb00::/32 2606:4700::/32 2803:f800::/32 2405:b500::/32 2405:8100::/32 2a06:98c0::/29 2c0f:f248::/32 +# trusted_proxies_strict +# } +# } + +example.com { + log + request_header X-Real-IP {client_ip} + + @public path /packs/* /instance/* /images/* /favicon.ico /sw.js /sw.js.map + + handle /packs/* { + root * /opt/ditto/public + header Cache-Control "max-age=31536000, public, immutable" + file_server + } + + handle @public { + root * /opt/ditto/public + file_server + } + + handle /metrics { + respond "Access denied" 403 + } + + handle { + reverse_proxy :4036 + } +} \ No newline at end of file diff --git a/installation/ditto.service b/installation/ditto.service index eb6b3425..0423b0fa 100644 --- a/installation/ditto.service +++ b/installation/ditto.service @@ -6,6 +6,7 @@ After=network-online.target [Service] Type=simple User=ditto +SyslogIdentifier=ditto WorkingDirectory=/opt/ditto ExecStart=/usr/local/bin/deno task start Restart=on-failure diff --git a/packages/conf/DittoConf.test.ts b/packages/conf/DittoConf.test.ts new file mode 100644 index 00000000..b6c2b707 --- /dev/null +++ b/packages/conf/DittoConf.test.ts @@ -0,0 +1,31 @@ +import { assertEquals, assertThrows } from '@std/assert'; + +import { DittoConf } from './DittoConf.ts'; + +Deno.test('DittoConfig', async (t) => { + const env = new Map([ + ['DITTO_NSEC', 'nsec19shyxpuzd0cq2p5078fwnws7tyykypud6z205fzhlmlrs2vpz6hs83zwkw'], + ]); + + const config = new DittoConf(env); + + await t.step('signer', async () => { + assertEquals( + await config.signer.getPublicKey(), + '1ba0c5ed1bbbf3b7eb0d7843ba16836a0201ea68a76bafcba507358c45911ff6', + ); + }); +}); + +Deno.test('DittoConfig defaults', async (t) => { + const env = new Map(); + const config = new DittoConf(env); + + await t.step('signer throws', () => { + assertThrows(() => config.signer); + }); + + await t.step('port', () => { + assertEquals(config.port, 4036); + }); +}); diff --git a/packages/conf/DittoConf.ts b/packages/conf/DittoConf.ts new file mode 100644 index 00000000..b7f5be79 --- /dev/null +++ b/packages/conf/DittoConf.ts @@ -0,0 +1,468 @@ +import os from 'node:os'; +import path from 'node:path'; + +import { NSecSigner } from '@nostrify/nostrify'; +import { decodeBase64 } from '@std/encoding/base64'; +import { encodeBase64Url } from '@std/encoding/base64url'; +import ISO6391, { type LanguageCode } from 'iso-639-1'; +import { nip19 } from 'nostr-tools'; + +import { getEcdsaPublicKey } from './utils/crypto.ts'; +import { optionalBooleanSchema, optionalNumberSchema } from './utils/schema.ts'; +import { mergeURLPath } from './utils/url.ts'; + +/** Ditto application-wide configuration. */ +export class DittoConf { + constructor(private env: { get(key: string): string | undefined }) {} + + /** Cached parsed admin signer. */ + private _signer: NSecSigner | undefined; + + /** Cached parsed VAPID public key value. */ + private _vapidPublicKey: Promise | undefined; + + /** + * Ditto admin secret key in hex format. + * @deprecated Use `signer` instead. TODO: handle auth tokens. + */ + get seckey(): Uint8Array { + const nsec = this.env.get('DITTO_NSEC'); + + if (!nsec) { + throw new Error('Missing DITTO_NSEC'); + } + + if (!nsec.startsWith('nsec1')) { + throw new Error('Invalid DITTO_NSEC'); + } + + return nip19.decode(nsec as `nsec1${string}`).data; + } + + /** Ditto admin signer. */ + get signer(): NSecSigner { + if (!this._signer) { + this._signer = new NSecSigner(this.seckey); + } + return this._signer; + } + + /** Port to use when serving the HTTP server. */ + get port(): number { + return parseInt(this.env.get('PORT') || '4036'); + } + + /** IP addresses not affected by rate limiting. */ + get ipWhitelist(): string[] { + return this.env.get('IP_WHITELIST')?.split(',') || []; + } + + /** Relay URL to the Ditto server's relay. */ + get relay(): `wss://${string}` | `ws://${string}` { + const { protocol, host } = this.url; + return `${protocol === 'https:' ? 'wss:' : 'ws:'}//${host}/relay`; + } + + /** Relay to use for NIP-50 `search` queries. */ + get searchRelay(): string | undefined { + return this.env.get('SEARCH_RELAY'); + } + + /** Origin of the Ditto server, including the protocol and port. */ + get localDomain(): string { + return this.env.get('LOCAL_DOMAIN') || `http://localhost:${this.port}`; + } + + /** Link to an external nostr viewer. */ + get externalDomain(): string { + return this.env.get('NOSTR_EXTERNAL') || 'https://njump.me'; + } + + /** Get a link to a nip19-encoded entity in the configured external viewer. */ + external(path: string): string { + return new URL(path, this.externalDomain).toString(); + } + + /** + * Heroku-style database URL. This is used in production to connect to the + * database. + * + * Follows the format: + * + * ```txt + * protocol://username:password@host:port/database_name + * ``` + */ + get databaseUrl(): string { + return this.env.get('DATABASE_URL') ?? 'file://data/pgdata'; + } + + /** PGlite debug level. 0 disables logging. */ + get pgliteDebug(): 0 | 1 | 2 | 3 | 4 | 5 { + return Number(this.env.get('PGLITE_DEBUG') || 0) as 0 | 1 | 2 | 3 | 4 | 5; + } + + get vapidPublicKey(): Promise { + if (!this._vapidPublicKey) { + this._vapidPublicKey = (async () => { + const keys = await this.vapidKeys; + if (keys) { + const { publicKey } = keys; + const bytes = await crypto.subtle.exportKey('raw', publicKey); + return encodeBase64Url(bytes); + } + })(); + } + + return this._vapidPublicKey; + } + + get vapidKeys(): Promise { + return (async () => { + const encoded = this.env.get('VAPID_PRIVATE_KEY'); + + if (!encoded) { + return; + } + + const keyData = decodeBase64(encoded); + + const privateKey = await crypto.subtle.importKey( + 'pkcs8', + keyData, + { name: 'ECDSA', namedCurve: 'P-256' }, + true, + ['sign'], + ); + const publicKey = await getEcdsaPublicKey(privateKey, true); + + return { privateKey, publicKey }; + })(); + } + + get db(): { timeouts: { default: number; relay: number; timelines: number } } { + const env = this.env; + return { + /** Database query timeout configurations. */ + timeouts: { + /** Default query timeout when another setting isn't more specific. */ + get default(): number { + return Number(env.get('DB_TIMEOUT_DEFAULT') || 5_000); + }, + /** Timeout used for queries made through the Nostr relay. */ + get relay(): number { + return Number(env.get('DB_TIMEOUT_RELAY') || 1_000); + }, + /** Timeout used for timelines such as home, notifications, hashtag, etc. */ + get timelines(): number { + return Number(env.get('DB_TIMEOUT_TIMELINES') || 15_000); + }, + }, + }; + } + + /** Time-to-live for captchas in milliseconds. */ + get captchaTTL(): number { + return Number(this.env.get('CAPTCHA_TTL') || 5 * 60 * 1000); + } + + /** Character limit to enforce for posts made through Mastodon API. */ + get postCharLimit(): number { + return Number(this.env.get('POST_CHAR_LIMIT') || 5000); + } + + /** S3 media storage configuration. */ + get s3(): { + endPoint?: string; + region?: string; + accessKey?: string; + secretKey?: string; + bucket?: string; + pathStyle?: boolean; + port?: number; + sessionToken?: string; + useSSL?: boolean; + } { + const env = this.env; + + return { + get endPoint(): string | undefined { + return env.get('S3_ENDPOINT'); + }, + get region(): string | undefined { + return env.get('S3_REGION'); + }, + get accessKey(): string | undefined { + return env.get('S3_ACCESS_KEY'); + }, + get secretKey(): string | undefined { + return env.get('S3_SECRET_KEY'); + }, + get bucket(): string | undefined { + return env.get('S3_BUCKET'); + }, + get pathStyle(): boolean | undefined { + return optionalBooleanSchema.parse(env.get('S3_PATH_STYLE')); + }, + get port(): number | undefined { + return optionalNumberSchema.parse(env.get('S3_PORT')); + }, + get sessionToken(): string | undefined { + return env.get('S3_SESSION_TOKEN'); + }, + get useSSL(): boolean | undefined { + return optionalBooleanSchema.parse(env.get('S3_USE_SSL')); + }, + }; + } + + /** IPFS uploader configuration. */ + get ipfs(): { apiUrl: string } { + const env = this.env; + + return { + /** Base URL for private IPFS API calls. */ + get apiUrl(): string { + return env.get('IPFS_API_URL') || 'http://localhost:5001'; + }, + }; + } + + /** nostr.build API endpoint when the `nostrbuild` uploader is used. */ + get nostrbuildEndpoint(): string { + return this.env.get('NOSTRBUILD_ENDPOINT') || 'https://nostr.build/api/v2/upload/files'; + } + + /** Default Blossom servers to use when the `blossom` uploader is set. */ + get blossomServers(): string[] { + return this.env.get('BLOSSOM_SERVERS')?.split(',') || ['https://blossom.primal.net/']; + } + + /** Module to upload files with. */ + get uploader(): string | undefined { + return this.env.get('DITTO_UPLOADER'); + } + + /** Location to use for local uploads. */ + get uploadsDir(): string { + return this.env.get('UPLOADS_DIR') || 'data/uploads'; + } + + /** Media base URL for uploads. */ + get mediaDomain(): string { + const value = this.env.get('MEDIA_DOMAIN'); + + if (!value) { + const url = this.url; + url.host = `media.${url.host}`; + return url.toString(); + } + + return value; + } + + /** + * Whether to analyze media metadata with [blurhash](https://www.npmjs.com/package/blurhash) and [sharp](https://www.npmjs.com/package/sharp). + * This is prone to security vulnerabilities, which is why it's not enabled by default. + */ + get mediaAnalyze(): boolean { + return optionalBooleanSchema.parse(this.env.get('MEDIA_ANALYZE')) ?? false; + } + + /** Max upload size for files in number of bytes. Default 100MiB. */ + get maxUploadSize(): number { + return Number(this.env.get('MAX_UPLOAD_SIZE') || 100 * 1024 * 1024); + } + + /** Usernames that regular users cannot sign up with. */ + get forbiddenUsernames(): string[] { + return this.env.get('FORBIDDEN_USERNAMES')?.split(',') || [ + '_', + 'admin', + 'administrator', + 'root', + 'sysadmin', + 'system', + ]; + } + + /** Domain of the Ditto server as a `URL` object, for easily grabbing the `hostname`, etc. */ + get url(): URL { + return new URL(this.localDomain); + } + + /** Merges the path with the localDomain. */ + local(path: string): string { + return mergeURLPath(this.localDomain, path); + } + + /** URL to send Sentry errors to. */ + get sentryDsn(): string | undefined { + return this.env.get('SENTRY_DSN'); + } + + /** Postgres settings. */ + get pg(): { poolSize: number } { + const env = this.env; + + return { + /** Number of connections to use in the pool. */ + get poolSize(): number { + return Number(env.get('PG_POOL_SIZE') ?? 20); + }, + }; + } + + /** Whether to enable requesting events from known relays. */ + get firehoseEnabled(): boolean { + return optionalBooleanSchema.parse(this.env.get('FIREHOSE_ENABLED')) ?? true; + } + + /** Number of events the firehose is allowed to process at one time before they have to wait in a queue. */ + get firehoseConcurrency(): number { + return Math.ceil(Number(this.env.get('FIREHOSE_CONCURRENCY') ?? 1)); + } + + /** Nostr event kinds of events to listen for on the firehose. */ + get firehoseKinds(): number[] { + return (this.env.get('FIREHOSE_KINDS') ?? '0, 1, 3, 5, 6, 7, 20, 9735, 10002') + .split(/[, ]+/g) + .map(Number); + } + + /** + * Whether Ditto should subscribe to Nostr events from the Postgres database itself. + * This would make Nostr events inserted directly into Postgres available to the streaming API and relay. + */ + get notifyEnabled(): boolean { + return optionalBooleanSchema.parse(this.env.get('NOTIFY_ENABLED')) ?? true; + } + + /** Whether to enable Ditto cron jobs. */ + get cronEnabled(): boolean { + return optionalBooleanSchema.parse(this.env.get('CRON_ENABLED')) ?? true; + } + + /** User-Agent to use when fetching link previews. Pretend to be Facebook by default. */ + get fetchUserAgent(): string { + return this.env.get('DITTO_FETCH_USER_AGENT') ?? 'facebookexternalhit'; + } + + /** Path to the custom policy module. Must be an absolute path, https:, npm:, or jsr: URI. */ + get policy(): string { + return this.env.get('DITTO_POLICY') || path.join(this.dataDir, 'policy.ts'); + } + + /** Absolute path to the data directory used by Ditto. */ + get dataDir(): string { + return this.env.get('DITTO_DATA_DIR') || path.join(Deno.cwd(), 'data'); + } + + /** Absolute path of the Deno directory. */ + get denoDir(): string { + return this.env.get('DENO_DIR') || `${os.userInfo().homedir}/.cache/deno`; + } + + /** Whether zap splits should be enabled. */ + get zapSplitsEnabled(): boolean { + return optionalBooleanSchema.parse(this.env.get('ZAP_SPLITS_ENABLED')) ?? false; + } + + /** Languages this server wishes to highlight. Used when querying trends.*/ + get preferredLanguages(): LanguageCode[] | undefined { + return this.env.get('DITTO_LANGUAGES')?.split(',')?.filter(ISO6391.validate); + } + + /** Mints to be displayed in the UI when the user decides to create a wallet.*/ + get cashuMints(): string[] { + return this.env.get('CASHU_MINTS')?.split(',') ?? []; + } + + /** Translation provider used to translate posts. */ + get translationProvider(): string | undefined { + return this.env.get('TRANSLATION_PROVIDER'); + } + + /** DeepL URL endpoint. */ + get deeplBaseUrl(): string | undefined { + return this.env.get('DEEPL_BASE_URL'); + } + + /** DeepL API KEY. */ + get deeplApiKey(): string | undefined { + return this.env.get('DEEPL_API_KEY'); + } + + /** LibreTranslate URL endpoint. */ + get libretranslateBaseUrl(): string | undefined { + return this.env.get('LIBRETRANSLATE_BASE_URL'); + } + + /** LibreTranslate API KEY. */ + get libretranslateApiKey(): string | undefined { + return this.env.get('LIBRETRANSLATE_API_KEY'); + } + + /** Cache settings. */ + get caches(): { + nip05: { max: number; ttl: number }; + favicon: { max: number; ttl: number }; + linkPreview: { max: number; ttl: number }; + translation: { max: number; ttl: number }; + } { + const env = this.env; + + return { + /** NIP-05 cache settings. */ + get nip05(): { max: number; ttl: number } { + return { + max: Number(env.get('DITTO_CACHE_NIP05_MAX') || 3000), + ttl: Number(env.get('DITTO_CACHE_NIP05_TTL') || 1 * 60 * 60 * 1000), + }; + }, + /** Favicon cache settings. */ + get favicon(): { max: number; ttl: number } { + return { + max: Number(env.get('DITTO_CACHE_FAVICON_MAX') || 500), + ttl: Number(env.get('DITTO_CACHE_FAVICON_TTL') || 1 * 60 * 60 * 1000), + }; + }, + /** Link preview cache settings. */ + get linkPreview(): { max: number; ttl: number } { + return { + max: Number(env.get('DITTO_CACHE_LINK_PREVIEW_MAX') || 3000), + ttl: Number(env.get('DITTO_CACHE_LINK_PREVIEW_TTL') || 12 * 60 * 60 * 1000), + }; + }, + /** Translation cache settings. */ + get translation(): { max: number; ttl: number } { + return { + max: Number(env.get('DITTO_CACHE_TRANSLATION_MAX') || 1000), + ttl: Number(env.get('DITTO_CACHE_TRANSLATION_TTL') || 6 * 60 * 60 * 1000), + }; + }, + }; + } + + /** Custom profile fields configuration. */ + get profileFields(): { maxFields: number; nameLength: number; valueLength: number } { + const env = this.env; + + return { + get maxFields(): number { + return Number(env.get('PROFILE_FIELDS_MAX_FIELDS') || 10); + }, + get nameLength(): number { + return Number(env.get('PROFILE_FIELDS_NAME_LENGTH') || 255); + }, + get valueLength(): number { + return Number(env.get('PROFILE_FIELDS_VALUE_LENGTH') || 2047); + }, + }; + } + + /** Maximum time between events before a streak is broken, *in seconds*. */ + get streakWindow(): number { + return Number(this.env.get('STREAK_WINDOW') || 129600); + } +} diff --git a/packages/conf/deno.json b/packages/conf/deno.json new file mode 100644 index 00000000..7ba0a49a --- /dev/null +++ b/packages/conf/deno.json @@ -0,0 +1,7 @@ +{ + "name": "@ditto/conf", + "version": "1.1.0", + "exports": { + ".": "./mod.ts" + } +} diff --git a/packages/conf/mod.ts b/packages/conf/mod.ts new file mode 100644 index 00000000..4d7ef2b7 --- /dev/null +++ b/packages/conf/mod.ts @@ -0,0 +1 @@ +export { DittoConf } from './DittoConf.ts'; diff --git a/src/utils/crypto.test.ts b/packages/conf/utils/crypto.test.ts similarity index 92% rename from src/utils/crypto.test.ts rename to packages/conf/utils/crypto.test.ts index d2b444a1..b3f758eb 100644 --- a/src/utils/crypto.test.ts +++ b/packages/conf/utils/crypto.test.ts @@ -1,6 +1,6 @@ import { assertEquals } from '@std/assert'; -import { getEcdsaPublicKey } from '@/utils/crypto.ts'; +import { getEcdsaPublicKey } from './crypto.ts'; Deno.test('getEcdsaPublicKey', async () => { const { publicKey, privateKey } = await crypto.subtle.generateKey( diff --git a/src/utils/crypto.ts b/packages/conf/utils/crypto.ts similarity index 100% rename from src/utils/crypto.ts rename to packages/conf/utils/crypto.ts diff --git a/packages/conf/utils/schema.test.ts b/packages/conf/utils/schema.test.ts new file mode 100644 index 00000000..9a52efe0 --- /dev/null +++ b/packages/conf/utils/schema.test.ts @@ -0,0 +1,17 @@ +import { assertEquals, assertThrows } from '@std/assert'; + +import { optionalBooleanSchema, optionalNumberSchema } from './schema.ts'; + +Deno.test('optionalBooleanSchema', () => { + assertEquals(optionalBooleanSchema.parse('true'), true); + assertEquals(optionalBooleanSchema.parse('false'), false); + assertEquals(optionalBooleanSchema.parse(undefined), undefined); + + assertThrows(() => optionalBooleanSchema.parse('invalid')); +}); + +Deno.test('optionalNumberSchema', () => { + assertEquals(optionalNumberSchema.parse('123'), 123); + assertEquals(optionalNumberSchema.parse('invalid'), NaN); // maybe this should throw? + assertEquals(optionalNumberSchema.parse(undefined), undefined); +}); diff --git a/packages/conf/utils/schema.ts b/packages/conf/utils/schema.ts new file mode 100644 index 00000000..dcd1f85e --- /dev/null +++ b/packages/conf/utils/schema.ts @@ -0,0 +1,11 @@ +import { z } from 'zod'; + +export const optionalBooleanSchema = z + .enum(['true', 'false']) + .optional() + .transform((value) => value !== undefined ? value === 'true' : undefined); + +export const optionalNumberSchema = z + .string() + .optional() + .transform((value) => value !== undefined ? Number(value) : undefined); diff --git a/packages/conf/utils/url.test.ts b/packages/conf/utils/url.test.ts new file mode 100644 index 00000000..1da9773c --- /dev/null +++ b/packages/conf/utils/url.test.ts @@ -0,0 +1,9 @@ +import { assertEquals } from '@std/assert'; + +import { mergeURLPath } from './url.ts'; + +Deno.test('mergeURLPath', () => { + assertEquals(mergeURLPath('https://mario.com', '/path'), 'https://mario.com/path'); + assertEquals(mergeURLPath('https://mario.com', 'https://luigi.com/path'), 'https://mario.com/path'); + assertEquals(mergeURLPath('https://mario.com', 'https://luigi.com/path?q=1'), 'https://mario.com/path?q=1'); +}); diff --git a/packages/conf/utils/url.ts b/packages/conf/utils/url.ts new file mode 100644 index 00000000..f7287bab --- /dev/null +++ b/packages/conf/utils/url.ts @@ -0,0 +1,23 @@ +/** + * Produce a URL whose origin is guaranteed to be the same as the base URL. + * The path is either an absolute path (starting with `/`), or a full URL. In either case, only its path is used. + */ +export function mergeURLPath( + /** Base URL. Result is guaranteed to use this URL's origin. */ + base: string, + /** Either an absolute path (starting with `/`), or a full URL. If a full URL, its path */ + path: string, +): string { + const url = new URL( + path.startsWith('/') ? path : new URL(path).pathname, + base, + ); + + if (!path.startsWith('/')) { + // Copy query parameters from the original URL to the new URL + const originalUrl = new URL(path); + url.search = originalUrl.search; + } + + return url.toString(); +} diff --git a/src/db/DittoDatabase.ts b/packages/db/DittoDB.ts similarity index 54% rename from src/db/DittoDatabase.ts rename to packages/db/DittoDB.ts index 3979cd12..0afbddfd 100644 --- a/src/db/DittoDatabase.ts +++ b/packages/db/DittoDB.ts @@ -1,15 +1,16 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -import { DittoTables } from '@/db/DittoTables.ts'; +import type { DittoTables } from './DittoTables.ts'; -export interface DittoDatabase { +export interface DittoDB extends AsyncDisposable { readonly kysely: Kysely; readonly poolSize: number; readonly availableConnections: number; + migrate(): Promise; listen(channel: string, callback: (payload: string) => void): void; } -export interface DittoDatabaseOpts { +export interface DittoDBOpts { poolSize?: number; debug?: 0 | 1 | 2 | 3 | 4 | 5; } diff --git a/packages/db/DittoPgMigrator.ts b/packages/db/DittoPgMigrator.ts new file mode 100644 index 00000000..45407fe4 --- /dev/null +++ b/packages/db/DittoPgMigrator.ts @@ -0,0 +1,52 @@ +import fs from 'node:fs/promises'; +import path from 'node:path'; + +import { logi } from '@soapbox/logi'; +import { FileMigrationProvider, type Kysely, Migrator } from 'kysely'; + +import type { JsonValue } from '@std/json'; + +export class DittoPgMigrator { + private migrator: Migrator; + + // deno-lint-ignore no-explicit-any + constructor(private kysely: Kysely) { + this.migrator = new Migrator({ + db: this.kysely, + provider: new FileMigrationProvider({ + fs, + path, + migrationFolder: new URL(import.meta.resolve('./migrations')).pathname, + }), + }); + } + + async migrate(): Promise { + logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Running migrations...', state: 'started' }); + const { results, error } = await this.migrator.migrateToLatest(); + + if (error) { + logi({ + level: 'fatal', + ns: 'ditto.db.migration', + msg: 'Migration failed.', + state: 'failed', + results: results as unknown as JsonValue, + error: error instanceof Error ? error : null, + }); + throw new Error('Migration failed.'); + } else { + if (!results?.length) { + logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Everything up-to-date.', state: 'skipped' }); + } else { + logi({ + level: 'info', + ns: 'ditto.db.migration', + msg: 'Migrations finished!', + state: 'migrated', + results: results as unknown as JsonValue, + }); + } + } + } +} diff --git a/src/db/DittoTables.ts b/packages/db/DittoTables.ts similarity index 73% rename from src/db/DittoTables.ts rename to packages/db/DittoTables.ts index ec21170e..92226a84 100644 --- a/src/db/DittoTables.ts +++ b/packages/db/DittoTables.ts @@ -1,27 +1,29 @@ -import { Generated } from 'kysely'; - -import { NPostgresSchema } from '@nostrify/db'; +import type { NPostgresSchema } from '@nostrify/db'; +import type { Generated } from 'kysely'; export interface DittoTables extends NPostgresSchema { - nostr_events: NostrEventsRow; auth_tokens: AuthTokenRow; author_stats: AuthorStatsRow; + domain_favicons: DomainFaviconRow; event_stats: EventStatsRow; - pubkey_domains: PubkeyDomainRow; event_zaps: EventZapRow; push_subscriptions: PushSubscriptionRow; + /** This is a materialized view of `author_stats` pre-sorted by followers_count. */ + top_authors: Pick; } -type NostrEventsRow = NPostgresSchema['nostr_events'] & { - language: string | null; -}; - interface AuthorStatsRow { pubkey: string; followers_count: number; following_count: number; notes_count: number; search: string; + streak_start: number | null; + streak_end: number | null; + nip05: string | null; + nip05_domain: string | null; + nip05_hostname: string | null; + nip05_last_verified_at: number | null; } interface EventStatsRow { @@ -43,9 +45,9 @@ interface AuthTokenRow { created_at: Date; } -interface PubkeyDomainRow { - pubkey: string; +interface DomainFaviconRow { domain: string; + favicon: string; last_updated_at: number; } diff --git a/packages/db/KyselyLogger.ts b/packages/db/KyselyLogger.ts new file mode 100644 index 00000000..333e4285 --- /dev/null +++ b/packages/db/KyselyLogger.ts @@ -0,0 +1,30 @@ +import { dbQueriesCounter, dbQueryDurationHistogram } from '@ditto/metrics'; +import { logi, type LogiValue } from '@soapbox/logi'; + +import type { Logger } from 'kysely'; + +/** Log the SQL for queries. */ +export const KyselyLogger: Logger = (event) => { + const { query, queryDurationMillis } = event; + const { parameters, sql } = query; + + const duration = queryDurationMillis / 1000; + + dbQueriesCounter.inc(); + dbQueryDurationHistogram.observe(duration); + + if (event.level === 'query') { + logi({ level: 'debug', ns: 'ditto.sql', sql, parameters: parameters as LogiValue, duration }); + } + + if (event.level === 'error') { + logi({ + level: 'error', + ns: 'ditto.sql', + sql, + parameters: parameters as LogiValue, + error: event.error instanceof Error ? event.error : null, + duration, + }); + } +}; diff --git a/packages/db/adapters/DittoPglite.test.ts b/packages/db/adapters/DittoPglite.test.ts new file mode 100644 index 00000000..b0d9f4d1 --- /dev/null +++ b/packages/db/adapters/DittoPglite.test.ts @@ -0,0 +1,14 @@ +import { assertEquals } from '@std/assert'; + +import { DittoPglite } from './DittoPglite.ts'; + +Deno.test('DittoPglite', async () => { + const db = new DittoPglite('memory://'); + await db.migrate(); + + assertEquals(db.poolSize, 1); + assertEquals(db.availableConnections, 1); + + await db.kysely.destroy(); + await new Promise((resolve) => setTimeout(resolve, 100)); +}); diff --git a/packages/db/adapters/DittoPglite.ts b/packages/db/adapters/DittoPglite.ts new file mode 100644 index 00000000..7fcd5bab --- /dev/null +++ b/packages/db/adapters/DittoPglite.ts @@ -0,0 +1,52 @@ +import { PGlite } from '@electric-sql/pglite'; +import { pg_trgm } from '@electric-sql/pglite/contrib/pg_trgm'; +import { PgliteDialect } from '@soapbox/kysely-pglite'; +import { Kysely } from 'kysely'; + +import { KyselyLogger } from '../KyselyLogger.ts'; +import { DittoPgMigrator } from '../DittoPgMigrator.ts'; +import { isWorker } from '../utils/worker.ts'; + +import type { DittoDB, DittoDBOpts } from '../DittoDB.ts'; +import type { DittoTables } from '../DittoTables.ts'; + +export class DittoPglite implements DittoDB { + readonly poolSize = 1; + readonly availableConnections = 1; + readonly kysely: Kysely; + + private pglite: PGlite; + private migrator: DittoPgMigrator; + + constructor(databaseUrl: string, opts?: DittoDBOpts) { + const url = new URL(databaseUrl); + + if (url.protocol === 'file:' && isWorker()) { + throw new Error('PGlite is not supported in worker threads.'); + } + + this.pglite = new PGlite(databaseUrl, { + extensions: { pg_trgm }, + debug: opts?.debug, + }); + + this.kysely = new Kysely({ + dialect: new PgliteDialect({ database: this.pglite }), + log: KyselyLogger, + }); + + this.migrator = new DittoPgMigrator(this.kysely); + } + + listen(channel: string, callback: (payload: string) => void): void { + this.pglite.listen(channel, callback); + } + + async migrate(): Promise { + await this.migrator.migrate(); + } + + async [Symbol.asyncDispose](): Promise { + await this.kysely.destroy(); + } +} diff --git a/packages/db/adapters/DittoPolyPg.test.ts b/packages/db/adapters/DittoPolyPg.test.ts new file mode 100644 index 00000000..d38d8eb1 --- /dev/null +++ b/packages/db/adapters/DittoPolyPg.test.ts @@ -0,0 +1,6 @@ +import { DittoPolyPg } from './DittoPolyPg.ts'; + +Deno.test('DittoPolyPg', async () => { + const db = new DittoPolyPg('memory://'); + await db.migrate(); +}); diff --git a/packages/db/adapters/DittoPolyPg.ts b/packages/db/adapters/DittoPolyPg.ts new file mode 100644 index 00000000..2d9358cd --- /dev/null +++ b/packages/db/adapters/DittoPolyPg.ts @@ -0,0 +1,53 @@ +import { DittoPglite } from './DittoPglite.ts'; +import { DittoPostgres } from './DittoPostgres.ts'; + +import type { Kysely } from 'kysely'; +import type { DittoDB, DittoDBOpts } from '../DittoDB.ts'; +import type { DittoTables } from '../DittoTables.ts'; + +/** Creates either a PGlite or Postgres connection depending on the databaseUrl. */ +export class DittoPolyPg implements DittoDB { + private adapter: DittoDB; + + /** Open a new database connection. */ + constructor(databaseUrl: string, opts?: DittoDBOpts) { + const { protocol } = new URL(databaseUrl); + + switch (protocol) { + case 'file:': + case 'memory:': + this.adapter = new DittoPglite(databaseUrl, opts); + break; + case 'postgres:': + case 'postgresql:': + this.adapter = new DittoPostgres(databaseUrl, opts); + break; + default: + throw new Error('Unsupported database URL.'); + } + } + + get kysely(): Kysely { + return this.adapter.kysely; + } + + async migrate(): Promise { + await this.adapter.migrate(); + } + + listen(channel: string, callback: (payload: string) => void): void { + this.adapter.listen(channel, callback); + } + + get poolSize(): number { + return this.adapter.poolSize; + } + + get availableConnections(): number { + return this.adapter.availableConnections; + } + + async [Symbol.asyncDispose](): Promise { + await this.adapter[Symbol.asyncDispose](); + } +} diff --git a/packages/db/adapters/DittoPostgres.ts b/packages/db/adapters/DittoPostgres.ts new file mode 100644 index 00000000..ba16b09e --- /dev/null +++ b/packages/db/adapters/DittoPostgres.ts @@ -0,0 +1,79 @@ +import { + type BinaryOperationNode, + FunctionNode, + Kysely, + OperatorNode, + PostgresAdapter, + PostgresIntrospector, + PostgresQueryCompiler, + PrimitiveValueListNode, + ValueNode, +} from 'kysely'; +import { type PostgresJSDialectConfig, PostgresJSDriver } from 'kysely-postgres-js'; +import postgres from 'postgres'; + +import { DittoPgMigrator } from '../DittoPgMigrator.ts'; +import { KyselyLogger } from '../KyselyLogger.ts'; + +import type { DittoDB, DittoDBOpts } from '../DittoDB.ts'; +import type { DittoTables } from '../DittoTables.ts'; + +export class DittoPostgres implements DittoDB { + private pg: ReturnType; + private migrator: DittoPgMigrator; + + readonly kysely: Kysely; + + constructor(databaseUrl: string, opts?: DittoDBOpts) { + this.pg = postgres(databaseUrl, { max: opts?.poolSize }); + + this.kysely = new Kysely({ + dialect: { + createAdapter: () => new PostgresAdapter(), + createDriver: () => + new PostgresJSDriver({ postgres: this.pg as unknown as PostgresJSDialectConfig['postgres'] }), + createIntrospector: (db) => new PostgresIntrospector(db), + createQueryCompiler: () => new DittoPostgresQueryCompiler(), + }, + log: KyselyLogger, + }); + + this.migrator = new DittoPgMigrator(this.kysely); + } + + listen(channel: string, callback: (payload: string) => void): void { + this.pg.listen(channel, callback); + } + + async migrate(): Promise { + await this.migrator.migrate(); + } + + get poolSize(): number { + return this.pg.connections.open; + } + + get availableConnections(): number { + return this.pg.connections.idle; + } + + async [Symbol.asyncDispose](): Promise { + await this.pg.end(); + await this.kysely.destroy(); + } +} + +/** Converts `in` queries to `any` to improve prepared statements on Postgres. */ +class DittoPostgresQueryCompiler extends PostgresQueryCompiler { + protected override visitBinaryOperation(node: BinaryOperationNode): void { + if ( + OperatorNode.is(node.operator) && node.operator.operator === 'in' && PrimitiveValueListNode.is(node.rightOperand) + ) { + this.visitNode(node.leftOperand); + this.append(' = '); + this.visitNode(FunctionNode.create('any', [ValueNode.create(node.rightOperand.values)])); + } else { + super.visitBinaryOperation(node); + } + } +} diff --git a/packages/db/adapters/DummyDB.test.ts b/packages/db/adapters/DummyDB.test.ts new file mode 100644 index 00000000..a58ddcb0 --- /dev/null +++ b/packages/db/adapters/DummyDB.test.ts @@ -0,0 +1,11 @@ +import { assertEquals } from '@std/assert'; +import { DummyDB } from './DummyDB.ts'; + +Deno.test('DummyDB', async () => { + const db = new DummyDB(); + await db.migrate(); + + const rows = await db.kysely.selectFrom('nostr_events').selectAll().execute(); + + assertEquals(rows, []); +}); diff --git a/packages/db/adapters/DummyDB.ts b/packages/db/adapters/DummyDB.ts new file mode 100644 index 00000000..669b679d --- /dev/null +++ b/packages/db/adapters/DummyDB.ts @@ -0,0 +1,33 @@ +import { DummyDriver, Kysely, PostgresAdapter, PostgresIntrospector, PostgresQueryCompiler } from 'kysely'; + +import type { DittoDB } from '../DittoDB.ts'; +import type { DittoTables } from '../DittoTables.ts'; + +export class DummyDB implements DittoDB { + readonly kysely: Kysely; + readonly poolSize = 0; + readonly availableConnections = 0; + + constructor() { + this.kysely = new Kysely({ + dialect: { + createAdapter: () => new PostgresAdapter(), + createDriver: () => new DummyDriver(), + createIntrospector: (db) => new PostgresIntrospector(db), + createQueryCompiler: () => new PostgresQueryCompiler(), + }, + }); + } + + listen(): void { + // noop + } + + migrate(): Promise { + return Promise.resolve(); + } + + [Symbol.asyncDispose](): Promise { + return Promise.resolve(); + } +} diff --git a/packages/db/deno.json b/packages/db/deno.json new file mode 100644 index 00000000..51570d2f --- /dev/null +++ b/packages/db/deno.json @@ -0,0 +1,6 @@ +{ + "name": "@ditto/db", + "exports": { + ".": "./mod.ts" + } +} diff --git a/src/db/migrations/000_create_events.ts b/packages/db/migrations/000_create_events.ts similarity index 89% rename from src/db/migrations/000_create_events.ts rename to packages/db/migrations/000_create_events.ts index f08a614e..e5f27f5d 100644 --- a/src/db/migrations/000_create_events.ts +++ b/packages/db/migrations/000_create_events.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('events') .addColumn('id', 'text', (col) => col.primaryKey()) @@ -52,7 +52,7 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('events').execute(); await db.schema.dropTable('tags').execute(); await db.schema.dropTable('users').execute(); diff --git a/src/db/migrations/001_add_relays.ts b/packages/db/migrations/001_add_relays.ts similarity index 63% rename from src/db/migrations/001_add_relays.ts rename to packages/db/migrations/001_add_relays.ts index 11c68844..4d286fcb 100644 --- a/src/db/migrations/001_add_relays.ts +++ b/packages/db/migrations/001_add_relays.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('relays') .addColumn('url', 'text', (col) => col.primaryKey()) @@ -9,6 +9,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('relays').execute(); } diff --git a/packages/db/migrations/002_events_fts.ts b/packages/db/migrations/002_events_fts.ts new file mode 100644 index 00000000..d88d0e7f --- /dev/null +++ b/packages/db/migrations/002_events_fts.ts @@ -0,0 +1,8 @@ +import type { Kysely } from 'kysely'; + +export async function up(_db: Kysely): Promise { + // This migration used to create an FTS table for SQLite, but SQLite support was removed. +} + +export async function down(_db: Kysely): Promise { +} diff --git a/packages/db/migrations/003_events_admin.ts b/packages/db/migrations/003_events_admin.ts new file mode 100644 index 00000000..9d555c2d --- /dev/null +++ b/packages/db/migrations/003_events_admin.ts @@ -0,0 +1,8 @@ +import type { Kysely } from 'kysely'; + +export async function up(_db: Kysely): Promise { +} + +export async function down(db: Kysely): Promise { + await db.schema.alterTable('users').dropColumn('admin').execute(); +} diff --git a/packages/db/migrations/004_add_user_indexes.ts b/packages/db/migrations/004_add_user_indexes.ts new file mode 100644 index 00000000..77fe1d31 --- /dev/null +++ b/packages/db/migrations/004_add_user_indexes.ts @@ -0,0 +1,9 @@ +import type { Kysely } from 'kysely'; + +export async function up(_db: Kysely): Promise { +} + +export async function down(db: Kysely): Promise { + await db.schema.dropIndex('idx_users_pubkey').execute(); + await db.schema.dropIndex('idx_users_username').execute(); +} diff --git a/src/db/migrations/005_rework_tags.ts b/packages/db/migrations/005_rework_tags.ts similarity index 89% rename from src/db/migrations/005_rework_tags.ts rename to packages/db/migrations/005_rework_tags.ts index 1f95810e..9d0cfcd2 100644 --- a/src/db/migrations/005_rework_tags.ts +++ b/packages/db/migrations/005_rework_tags.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('tags_new') .addColumn('tag', 'text', (col) => col.notNull()) @@ -42,7 +42,7 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('tags').execute(); await db.schema diff --git a/packages/db/migrations/006_pragma.ts b/packages/db/migrations/006_pragma.ts new file mode 100644 index 00000000..d56df6db --- /dev/null +++ b/packages/db/migrations/006_pragma.ts @@ -0,0 +1,7 @@ +import type { Kysely } from 'kysely'; + +export async function up(_db: Kysely): Promise { +} + +export async function down(_db: Kysely): Promise { +} diff --git a/src/db/migrations/007_unattached_media.ts b/packages/db/migrations/007_unattached_media.ts similarity index 82% rename from src/db/migrations/007_unattached_media.ts rename to packages/db/migrations/007_unattached_media.ts index a36c5d35..1a7a2b18 100644 --- a/src/db/migrations/007_unattached_media.ts +++ b/packages/db/migrations/007_unattached_media.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('unattached_media') .addColumn('id', 'text', (c) => c.primaryKey()) @@ -29,6 +29,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('unattached_media').execute(); } diff --git a/packages/db/migrations/008_wal.ts b/packages/db/migrations/008_wal.ts new file mode 100644 index 00000000..d56df6db --- /dev/null +++ b/packages/db/migrations/008_wal.ts @@ -0,0 +1,7 @@ +import type { Kysely } from 'kysely'; + +export async function up(_db: Kysely): Promise { +} + +export async function down(_db: Kysely): Promise { +} diff --git a/src/db/migrations/009_add_stats.ts b/packages/db/migrations/009_add_stats.ts similarity index 84% rename from src/db/migrations/009_add_stats.ts rename to packages/db/migrations/009_add_stats.ts index ef1c4438..a25ee09f 100644 --- a/src/db/migrations/009_add_stats.ts +++ b/packages/db/migrations/009_add_stats.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('author_stats') .addColumn('pubkey', 'text', (col) => col.primaryKey()) @@ -18,7 +18,7 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('author_stats').execute(); await db.schema.dropTable('event_stats').execute(); } diff --git a/packages/db/migrations/010_drop_users.ts b/packages/db/migrations/010_drop_users.ts new file mode 100644 index 00000000..3175cb04 --- /dev/null +++ b/packages/db/migrations/010_drop_users.ts @@ -0,0 +1,8 @@ +import type { Kysely } from 'kysely'; + +export async function up(db: Kysely): Promise { + await db.schema.dropTable('users').ifExists().execute(); +} + +export async function down(_db: Kysely): Promise { +} diff --git a/src/db/migrations/011_kind_author_index.ts b/packages/db/migrations/011_kind_author_index.ts similarity index 59% rename from src/db/migrations/011_kind_author_index.ts rename to packages/db/migrations/011_kind_author_index.ts index c41910b4..03da79ab 100644 --- a/src/db/migrations/011_kind_author_index.ts +++ b/packages/db/migrations/011_kind_author_index.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createIndex('idx_events_kind_pubkey_created_at') .on('events') @@ -8,6 +8,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('idx_events_kind_pubkey_created_at').execute(); } diff --git a/src/db/migrations/012_tags_composite_index.ts b/packages/db/migrations/012_tags_composite_index.ts similarity index 75% rename from src/db/migrations/012_tags_composite_index.ts rename to packages/db/migrations/012_tags_composite_index.ts index 412fa599..9cca3cc1 100644 --- a/src/db/migrations/012_tags_composite_index.ts +++ b/packages/db/migrations/012_tags_composite_index.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema.dropIndex('idx_tags_tag').execute(); await db.schema.dropIndex('idx_tags_value').execute(); @@ -11,7 +11,7 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('idx_tags_tag_value').execute(); await db.schema diff --git a/src/db/migrations/013_soft_deletion.ts b/packages/db/migrations/013_soft_deletion.ts similarity index 50% rename from src/db/migrations/013_soft_deletion.ts rename to packages/db/migrations/013_soft_deletion.ts index df19da50..7b336635 100644 --- a/src/db/migrations/013_soft_deletion.ts +++ b/packages/db/migrations/013_soft_deletion.ts @@ -1,9 +1,9 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema.alterTable('events').addColumn('deleted_at', 'integer').execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('events').dropColumn('deleted_at').execute(); } diff --git a/src/db/migrations/014_stats_indexes.ts.ts b/packages/db/migrations/014_stats_indexes.ts.ts similarity index 69% rename from src/db/migrations/014_stats_indexes.ts.ts rename to packages/db/migrations/014_stats_indexes.ts.ts index 0f27a7fa..7f8db099 100644 --- a/src/db/migrations/014_stats_indexes.ts.ts +++ b/packages/db/migrations/014_stats_indexes.ts.ts @@ -1,11 +1,11 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema.createIndex('idx_author_stats_pubkey').on('author_stats').column('pubkey').execute(); await db.schema.createIndex('idx_event_stats_event_id').on('event_stats').column('event_id').execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('idx_author_stats_pubkey').on('author_stats').execute(); await db.schema.dropIndex('idx_event_stats_event_id').on('event_stats').execute(); } diff --git a/src/db/migrations/015_add_pubkey_domains.ts b/packages/db/migrations/015_add_pubkey_domains.ts similarity index 71% rename from src/db/migrations/015_add_pubkey_domains.ts rename to packages/db/migrations/015_add_pubkey_domains.ts index 4b7e23c4..625de519 100644 --- a/src/db/migrations/015_add_pubkey_domains.ts +++ b/packages/db/migrations/015_add_pubkey_domains.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('pubkey_domains') .ifNotExists() @@ -16,6 +16,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('pubkey_domains').execute(); } diff --git a/src/db/migrations/016_pubkey_domains_updated_at.ts b/packages/db/migrations/016_pubkey_domains_updated_at.ts similarity index 59% rename from src/db/migrations/016_pubkey_domains_updated_at.ts rename to packages/db/migrations/016_pubkey_domains_updated_at.ts index 8b1f75d0..8343d036 100644 --- a/src/db/migrations/016_pubkey_domains_updated_at.ts +++ b/packages/db/migrations/016_pubkey_domains_updated_at.ts @@ -1,12 +1,12 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .alterTable('pubkey_domains') .addColumn('last_updated_at', 'integer', (col) => col.notNull().defaultTo(0)) .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('pubkey_domains').dropColumn('last_updated_at').execute(); } diff --git a/src/db/migrations/017_rm_relays.ts b/packages/db/migrations/017_rm_relays.ts similarity index 63% rename from src/db/migrations/017_rm_relays.ts rename to packages/db/migrations/017_rm_relays.ts index 70a274d0..ccf53e67 100644 --- a/src/db/migrations/017_rm_relays.ts +++ b/packages/db/migrations/017_rm_relays.ts @@ -1,10 +1,10 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema.dropTable('relays').execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema .createTable('relays') .addColumn('url', 'text', (col) => col.primaryKey()) diff --git a/src/db/migrations/018_events_created_at_kind_index.ts b/packages/db/migrations/018_events_created_at_kind_index.ts similarity index 59% rename from src/db/migrations/018_events_created_at_kind_index.ts rename to packages/db/migrations/018_events_created_at_kind_index.ts index 8e6c67c0..d6a9dcc1 100644 --- a/src/db/migrations/018_events_created_at_kind_index.ts +++ b/packages/db/migrations/018_events_created_at_kind_index.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createIndex('idx_events_created_at_kind') .on('events') @@ -9,6 +9,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('idx_events_created_at_kind').ifExists().execute(); } diff --git a/src/db/migrations/019_ndatabase_schema.ts b/packages/db/migrations/019_ndatabase_schema.ts similarity index 73% rename from src/db/migrations/019_ndatabase_schema.ts rename to packages/db/migrations/019_ndatabase_schema.ts index 79d8cbc9..736cd0bc 100644 --- a/src/db/migrations/019_ndatabase_schema.ts +++ b/packages/db/migrations/019_ndatabase_schema.ts @@ -1,12 +1,12 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema.alterTable('events').renameTo('nostr_events').execute(); await db.schema.alterTable('tags').renameTo('nostr_tags').execute(); await db.schema.alterTable('nostr_tags').renameColumn('tag', 'name').execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('nostr_events').renameTo('events').execute(); await db.schema.alterTable('nostr_tags').renameTo('tags').execute(); await db.schema.alterTable('tags').renameColumn('name', 'tag').execute(); diff --git a/src/db/migrations/020_drop_deleted_at.ts b/packages/db/migrations/020_drop_deleted_at.ts similarity index 69% rename from src/db/migrations/020_drop_deleted_at.ts rename to packages/db/migrations/020_drop_deleted_at.ts index 4894b9f5..6ba81031 100644 --- a/src/db/migrations/020_drop_deleted_at.ts +++ b/packages/db/migrations/020_drop_deleted_at.ts @@ -1,10 +1,11 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; +// deno-lint-ignore no-explicit-any export async function up(db: Kysely): Promise { await db.deleteFrom('nostr_events').where('deleted_at', 'is not', null).execute(); await db.schema.alterTable('nostr_events').dropColumn('deleted_at').execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('nostr_events').addColumn('deleted_at', 'integer').execute(); } diff --git a/src/db/migrations/020_pgfts.ts b/packages/db/migrations/020_pgfts.ts similarity index 65% rename from src/db/migrations/020_pgfts.ts rename to packages/db/migrations/020_pgfts.ts index 26e320ec..e69bd508 100644 --- a/src/db/migrations/020_pgfts.ts +++ b/packages/db/migrations/020_pgfts.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema.createTable('nostr_pgfts') .ifNotExists() .addColumn('event_id', 'text', (c) => c.primaryKey().references('nostr_events.id').onDelete('cascade')) @@ -8,6 +8,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('nostr_pgfts').ifExists().execute(); } diff --git a/src/db/migrations/021_pgfts_index.ts b/packages/db/migrations/021_pgfts_index.ts similarity index 60% rename from src/db/migrations/021_pgfts_index.ts rename to packages/db/migrations/021_pgfts_index.ts index 7ad24546..38e80aed 100644 --- a/src/db/migrations/021_pgfts_index.ts +++ b/packages/db/migrations/021_pgfts_index.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createIndex('nostr_pgfts_gin_search_vec') .ifNotExists() @@ -10,6 +10,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('nostr_pgfts_gin_search_vec').ifExists().execute(); } diff --git a/src/db/migrations/022_event_stats_reactions.ts b/packages/db/migrations/022_event_stats_reactions.ts similarity index 56% rename from src/db/migrations/022_event_stats_reactions.ts rename to packages/db/migrations/022_event_stats_reactions.ts index 0bc69147..45c780b6 100644 --- a/src/db/migrations/022_event_stats_reactions.ts +++ b/packages/db/migrations/022_event_stats_reactions.ts @@ -1,12 +1,12 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .alterTable('event_stats') .addColumn('reactions', 'text', (col) => col.defaultTo('{}')) .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('event_stats').dropColumn('reactions').execute(); } diff --git a/src/db/migrations/023_add_nip46_tokens.ts b/packages/db/migrations/023_add_nip46_tokens.ts similarity index 76% rename from src/db/migrations/023_add_nip46_tokens.ts rename to packages/db/migrations/023_add_nip46_tokens.ts index 01d71640..45c1522c 100644 --- a/src/db/migrations/023_add_nip46_tokens.ts +++ b/packages/db/migrations/023_add_nip46_tokens.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('nip46_tokens') .addColumn('api_token', 'text', (col) => col.primaryKey().notNull()) @@ -12,6 +12,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('nip46_tokens').execute(); } diff --git a/src/db/migrations/024_event_stats_quotes_count.ts b/packages/db/migrations/024_event_stats_quotes_count.ts similarity index 58% rename from src/db/migrations/024_event_stats_quotes_count.ts rename to packages/db/migrations/024_event_stats_quotes_count.ts index f62baf57..b9808bd2 100644 --- a/src/db/migrations/024_event_stats_quotes_count.ts +++ b/packages/db/migrations/024_event_stats_quotes_count.ts @@ -1,12 +1,12 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .alterTable('event_stats') .addColumn('quotes_count', 'integer', (col) => col.notNull().defaultTo(0)) .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('event_stats').dropColumn('quotes_count').execute(); } diff --git a/src/db/migrations/025_event_stats_add_zap_count.ts b/packages/db/migrations/025_event_stats_add_zap_count.ts similarity index 58% rename from src/db/migrations/025_event_stats_add_zap_count.ts rename to packages/db/migrations/025_event_stats_add_zap_count.ts index 91479907..0507fd18 100644 --- a/src/db/migrations/025_event_stats_add_zap_count.ts +++ b/packages/db/migrations/025_event_stats_add_zap_count.ts @@ -1,12 +1,12 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .alterTable('event_stats') .addColumn('zaps_amount', 'integer', (col) => col.notNull().defaultTo(0)) .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('event_stats').dropColumn('zaps_amount').execute(); } diff --git a/src/db/migrations/026_tags_name_index.ts b/packages/db/migrations/026_tags_name_index.ts similarity index 54% rename from src/db/migrations/026_tags_name_index.ts rename to packages/db/migrations/026_tags_name_index.ts index a15587fb..3703953b 100644 --- a/src/db/migrations/026_tags_name_index.ts +++ b/packages/db/migrations/026_tags_name_index.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createIndex('idx_tags_name') .on('nostr_tags') @@ -9,6 +9,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('idx_tags_name').ifExists().execute(); } diff --git a/src/db/migrations/027_add_zap_events.ts b/packages/db/migrations/027_add_zap_events.ts similarity index 84% rename from src/db/migrations/027_add_zap_events.ts rename to packages/db/migrations/027_add_zap_events.ts index 2fcc101c..8ccb8158 100644 --- a/src/db/migrations/027_add_zap_events.ts +++ b/packages/db/migrations/027_add_zap_events.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('event_zaps') .addColumn('receipt_id', 'text', (col) => col.primaryKey()) @@ -25,7 +25,7 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('idx_event_zaps_amount_millisats').ifExists().execute(); await db.schema.dropIndex('idx_event_zaps_target_event_id').ifExists().execute(); await db.schema.dropTable('event_zaps').execute(); diff --git a/src/db/migrations/028_stable_sort.ts b/packages/db/migrations/028_stable_sort.ts similarity index 86% rename from src/db/migrations/028_stable_sort.ts rename to packages/db/migrations/028_stable_sort.ts index 191f32ca..76d771f5 100644 --- a/src/db/migrations/028_stable_sort.ts +++ b/packages/db/migrations/028_stable_sort.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createIndex('nostr_events_created_at_kind') .on('nostr_events') @@ -19,7 +19,7 @@ export async function up(db: Kysely): Promise { await db.schema.dropIndex('idx_events_kind_pubkey_created_at').execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('nostr_events_created_at_kind').execute(); await db.schema.dropIndex('nostr_events_kind_pubkey_created_at').execute(); diff --git a/src/db/migrations/029_tag_queries.ts b/packages/db/migrations/029_tag_queries.ts similarity index 95% rename from src/db/migrations/029_tag_queries.ts rename to packages/db/migrations/029_tag_queries.ts index 5a27d720..9a2fd2b3 100644 --- a/src/db/migrations/029_tag_queries.ts +++ b/packages/db/migrations/029_tag_queries.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('nostr_tags_new') .addColumn('event_id', 'text', (col) => col.notNull().references('nostr_events.id').onDelete('cascade')) @@ -66,7 +66,7 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema .createTable('nostr_tags_old') .addColumn('event_id', 'text', (col) => col.references('nostr_events.id').onDelete('cascade')) diff --git a/src/db/migrations/030_pg_events_jsonb.ts b/packages/db/migrations/030_pg_events_jsonb.ts similarity index 96% rename from src/db/migrations/030_pg_events_jsonb.ts rename to packages/db/migrations/030_pg_events_jsonb.ts index dcd6ad85..b6a6328b 100644 --- a/src/db/migrations/030_pg_events_jsonb.ts +++ b/packages/db/migrations/030_pg_events_jsonb.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { // Create new table and indexes. await db.schema .createTable('nostr_events_new') @@ -132,6 +132,6 @@ If you don't want to wait, you can create a fresh database and then import your await db.schema.alterTable('nostr_events_new').renameTo('nostr_events').execute(); } -export function down(_db: Kysely): Promise { +export function down(_db: Kysely): Promise { throw new Error("Sorry, you can't migrate back from here."); } diff --git a/src/db/migrations/031_rm_unattached_media.ts b/packages/db/migrations/031_rm_unattached_media.ts similarity index 82% rename from src/db/migrations/031_rm_unattached_media.ts rename to packages/db/migrations/031_rm_unattached_media.ts index febd85e1..48e9e97b 100644 --- a/src/db/migrations/031_rm_unattached_media.ts +++ b/packages/db/migrations/031_rm_unattached_media.ts @@ -1,10 +1,10 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema.dropTable('unattached_media').execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema .createTable('unattached_media') .addColumn('id', 'text', (c) => c.primaryKey()) diff --git a/src/db/migrations/032_add_author_search.ts b/packages/db/migrations/032_add_author_search.ts similarity index 75% rename from src/db/migrations/032_add_author_search.ts rename to packages/db/migrations/032_add_author_search.ts index 4323c252..d5c45c1b 100644 --- a/src/db/migrations/032_add_author_search.ts +++ b/packages/db/migrations/032_add_author_search.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('author_search') .addColumn('pubkey', 'char(64)', (col) => col.primaryKey()) @@ -12,7 +12,7 @@ export async function up(db: Kysely): Promise { await sql`CREATE INDEX author_search_search_idx ON author_search USING GIN (search gin_trgm_ops)`.execute(db); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('author_search_search_idx').ifExists().execute(); await db.schema.dropTable('author_search').execute(); } diff --git a/src/db/migrations/033_add_language.ts b/packages/db/migrations/033_add_language.ts similarity index 71% rename from src/db/migrations/033_add_language.ts rename to packages/db/migrations/033_add_language.ts index 77bfc37e..9b680ee9 100644 --- a/src/db/migrations/033_add_language.ts +++ b/packages/db/migrations/033_add_language.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema.alterTable('nostr_events').addColumn('language', 'char(2)').execute(); await db.schema.createIndex('nostr_events_language_created_idx') @@ -9,7 +9,7 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('nostr_events').dropColumn('language').execute(); await db.schema.dropIndex('nostr_events_language_created_idx').execute(); } diff --git a/src/db/migrations/034_move_author_search_to_author_stats.ts b/packages/db/migrations/034_move_author_search_to_author_stats.ts similarity index 86% rename from src/db/migrations/034_move_author_search_to_author_stats.ts rename to packages/db/migrations/034_move_author_search_to_author_stats.ts index 6d21ca39..8c57c639 100644 --- a/src/db/migrations/034_move_author_search_to_author_stats.ts +++ b/packages/db/migrations/034_move_author_search_to_author_stats.ts @@ -1,5 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; +// deno-lint-ignore no-explicit-any export async function up(db: Kysely): Promise { await db.schema .alterTable('author_stats') @@ -26,7 +27,7 @@ export async function up(db: Kysely): Promise { await db.schema.dropTable('author_search').execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('author_stats_search_idx').ifExists().execute(); await db.schema.alterTable('author_stats').dropColumn('search').execute(); } diff --git a/src/db/migrations/035_author_stats_followers_index.ts b/packages/db/migrations/035_author_stats_followers_index.ts similarity index 71% rename from src/db/migrations/035_author_stats_followers_index.ts rename to packages/db/migrations/035_author_stats_followers_index.ts index 0509d403..d6b12f87 100644 --- a/src/db/migrations/035_author_stats_followers_index.ts +++ b/packages/db/migrations/035_author_stats_followers_index.ts @@ -1,6 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createIndex('author_stats_followers_count_idx') .ifNotExists() @@ -12,6 +12,6 @@ export async function up(db: Kysely): Promise { await db.schema.dropIndex('idx_author_stats_pubkey').ifExists().execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropIndex('author_stats_followers_count_idx').ifExists().execute(); } diff --git a/src/db/migrations/036_stats64.ts b/packages/db/migrations/036_stats64.ts similarity index 83% rename from src/db/migrations/036_stats64.ts rename to packages/db/migrations/036_stats64.ts index fa9d357e..f9c4eabc 100644 --- a/src/db/migrations/036_stats64.ts +++ b/packages/db/migrations/036_stats64.ts @@ -1,5 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; +// deno-lint-ignore no-explicit-any export async function up(db: Kysely): Promise { await db.deleteFrom('event_stats').where(sql`length(event_id)`, '>', 64).execute(); await db.deleteFrom('author_stats').where(sql`length(pubkey)`, '>', 64).execute(); @@ -8,7 +9,7 @@ export async function up(db: Kysely): Promise { await db.schema.alterTable('author_stats').alterColumn('pubkey', (col) => col.setDataType('char(64)')).execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.alterTable('event_stats').alterColumn('event_id', (col) => col.setDataType('text')).execute(); await db.schema.alterTable('author_stats').alterColumn('pubkey', (col) => col.setDataType('text')).execute(); } diff --git a/src/db/migrations/037_auth_tokens.ts b/packages/db/migrations/037_auth_tokens.ts similarity index 68% rename from src/db/migrations/037_auth_tokens.ts rename to packages/db/migrations/037_auth_tokens.ts index 2f6d1890..f7ac340c 100644 --- a/src/db/migrations/037_auth_tokens.ts +++ b/packages/db/migrations/037_auth_tokens.ts @@ -1,8 +1,4 @@ -import { Kysely, sql } from 'kysely'; - -import { Conf } from '@/config.ts'; -import { aesEncrypt } from '@/utils/aes.ts'; -import { getTokenHash } from '@/utils/auth.ts'; +import { type Kysely, sql } from 'kysely'; interface DB { nip46_tokens: { @@ -32,19 +28,6 @@ export async function up(db: Kysely): Promise { .addColumn('created_at', 'timestamp', (col) => col.defaultTo(sql`CURRENT_TIMESTAMP`)) .execute(); - // There are probably not that many tokens in the database yet, so this should be fine. - const tokens = await db.selectFrom('nip46_tokens').selectAll().execute(); - - for (const token of tokens) { - await db.insertInto('auth_tokens').values({ - token_hash: await getTokenHash(token.api_token), - pubkey: token.user_pubkey, - nip46_sk_enc: await aesEncrypt(Conf.seckey, token.server_seckey), - nip46_relays: JSON.parse(token.relays), - created_at: token.connected_at, - }).execute(); - } - await db.schema.dropTable('nip46_tokens').execute(); } diff --git a/src/db/migrations/038_push_subscriptions.ts b/packages/db/migrations/038_push_subscriptions.ts similarity index 84% rename from src/db/migrations/038_push_subscriptions.ts rename to packages/db/migrations/038_push_subscriptions.ts index ecce1b1f..b06e82d5 100644 --- a/src/db/migrations/038_push_subscriptions.ts +++ b/packages/db/migrations/038_push_subscriptions.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await db.schema .createTable('push_subscriptions') .addColumn('id', 'bigserial', (c) => c.primaryKey()) @@ -22,6 +22,6 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema.dropTable('push_subscriptions').execute(); } diff --git a/src/db/migrations/039_pg_notify.ts b/packages/db/migrations/039_pg_notify.ts similarity index 83% rename from src/db/migrations/039_pg_notify.ts rename to packages/db/migrations/039_pg_notify.ts index 6d75844d..2a91b6cf 100644 --- a/src/db/migrations/039_pg_notify.ts +++ b/packages/db/migrations/039_pg_notify.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await sql` CREATE OR REPLACE FUNCTION notify_nostr_event() RETURNS TRIGGER AS $$ @@ -31,7 +31,7 @@ export async function up(db: Kysely): Promise { `.execute(db); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await sql`DROP TRIGGER nostr_event_trigger ON nostr_events`.execute(db); await sql`DROP FUNCTION notify_nostr_event()`.execute(db); } diff --git a/src/db/migrations/040_add_bunker_pubkey.ts b/packages/db/migrations/040_add_bunker_pubkey.ts similarity index 78% rename from src/db/migrations/040_add_bunker_pubkey.ts rename to packages/db/migrations/040_add_bunker_pubkey.ts index 58ab0a5e..60b5b942 100644 --- a/src/db/migrations/040_add_bunker_pubkey.ts +++ b/packages/db/migrations/040_add_bunker_pubkey.ts @@ -1,5 +1,6 @@ -import { Kysely } from 'kysely'; +import type { Kysely } from 'kysely'; +// deno-lint-ignore no-explicit-any export async function up(db: Kysely): Promise { await db.schema .alterTable('auth_tokens') @@ -14,7 +15,7 @@ export async function up(db: Kysely): Promise { .execute(); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await db.schema .alterTable('auth_tokens') .dropColumn('bunker_pubkey') diff --git a/src/db/migrations/041_pg_notify_id_only.ts b/packages/db/migrations/041_pg_notify_id_only.ts similarity index 78% rename from src/db/migrations/041_pg_notify_id_only.ts rename to packages/db/migrations/041_pg_notify_id_only.ts index 192dd42f..ca14802a 100644 --- a/src/db/migrations/041_pg_notify_id_only.ts +++ b/packages/db/migrations/041_pg_notify_id_only.ts @@ -1,6 +1,6 @@ -import { Kysely, sql } from 'kysely'; +import { type Kysely, sql } from 'kysely'; -export async function up(db: Kysely): Promise { +export async function up(db: Kysely): Promise { await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db); await sql` @@ -21,7 +21,7 @@ export async function up(db: Kysely): Promise { `.execute(db); } -export async function down(db: Kysely): Promise { +export async function down(db: Kysely): Promise { await sql`DROP TRIGGER nostr_event_trigger ON nostr_events`.execute(db); await sql`DROP FUNCTION notify_nostr_event()`.execute(db); } diff --git a/packages/db/migrations/042_add_search_ext.ts b/packages/db/migrations/042_add_search_ext.ts new file mode 100644 index 00000000..11e2c3c0 --- /dev/null +++ b/packages/db/migrations/042_add_search_ext.ts @@ -0,0 +1,38 @@ +import { type Kysely, sql } from 'kysely'; + +export async function up(db: Kysely): Promise { + await db.schema + .alterTable('nostr_events') + .addColumn('search_ext', 'jsonb', (col) => col.notNull().defaultTo(sql`'{}'::jsonb`)) + .execute(); + + await db.schema + .alterTable('nostr_events') + .addCheckConstraint('nostr_events_search_ext_chk', sql`jsonb_typeof(search_ext) = 'object'`) + .execute(); + + await db.schema + .createIndex('nostr_events_search_ext_idx').using('gin') + .on('nostr_events') + .column('search_ext') + .ifNotExists() + .execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema + .dropIndex('nostr_events_search_ext_idx') + .on('nostr_events') + .ifExists() + .execute(); + + await db.schema + .alterTable('nostr_events') + .dropConstraint('nostr_events_search_ext_chk') + .execute(); + + await db.schema + .alterTable('nostr_events') + .dropColumn('search_ext') + .execute(); +} diff --git a/packages/db/migrations/043_rm_language.ts b/packages/db/migrations/043_rm_language.ts new file mode 100644 index 00000000..eb69aca0 --- /dev/null +++ b/packages/db/migrations/043_rm_language.ts @@ -0,0 +1,14 @@ +import type { Kysely } from 'kysely'; + +export async function up(db: Kysely): Promise { + await db.schema.alterTable('nostr_events').dropColumn('language').execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.alterTable('nostr_events').addColumn('language', 'char(2)').execute(); + + await db.schema.createIndex('nostr_events_language_created_idx') + .on('nostr_events') + .columns(['language', 'created_at desc', 'id asc', 'kind']) + .execute(); +} diff --git a/packages/db/migrations/044_search_ext_drop_default.ts b/packages/db/migrations/044_search_ext_drop_default.ts new file mode 100644 index 00000000..e714bd62 --- /dev/null +++ b/packages/db/migrations/044_search_ext_drop_default.ts @@ -0,0 +1,12 @@ +import type { Kysely } from 'kysely'; + +export async function up(db: Kysely): Promise { + await db.schema.alterTable('nostr_events').alterColumn('search_ext', (col) => col.dropDefault()).execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema + .alterTable('nostr_events') + .alterColumn('search_ext', (col) => col.setDefault("'{}'::jsonb")) + .execute(); +} diff --git a/packages/db/migrations/045_streaks.ts b/packages/db/migrations/045_streaks.ts new file mode 100644 index 00000000..e08727a8 --- /dev/null +++ b/packages/db/migrations/045_streaks.ts @@ -0,0 +1,17 @@ +import type { Kysely } from 'kysely'; + +export async function up(db: Kysely): Promise { + await db.schema + .alterTable('author_stats') + .addColumn('streak_start', 'integer') + .addColumn('streak_end', 'integer') + .execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema + .alterTable('author_stats') + .dropColumn('streak_start') + .dropColumn('streak_end') + .execute(); +} diff --git a/packages/db/migrations/046_author_stats_nip05.ts b/packages/db/migrations/046_author_stats_nip05.ts new file mode 100644 index 00000000..9cb5299a --- /dev/null +++ b/packages/db/migrations/046_author_stats_nip05.ts @@ -0,0 +1,48 @@ +import { type Kysely, sql } from 'kysely'; + +export async function up(db: Kysely): Promise { + await db.schema + .alterTable('author_stats') + .addColumn('nip05', 'varchar(320)') + .addColumn('nip05_domain', 'varchar(253)') + .addColumn('nip05_hostname', 'varchar(253)') + .addColumn('nip05_last_verified_at', 'integer') + .execute(); + + await db.schema + .alterTable('author_stats') + .addCheckConstraint('author_stats_nip05_domain_lowercase_chk', sql`nip05_domain = lower(nip05_domain)`) + .execute(); + + await db.schema + .alterTable('author_stats') + .addCheckConstraint('author_stats_nip05_hostname_lowercase_chk', sql`nip05_hostname = lower(nip05_hostname)`) + .execute(); + + await db.schema + .alterTable('author_stats') + .addCheckConstraint('author_stats_nip05_hostname_domain_chk', sql`nip05_hostname like '%' || nip05_domain`) + .execute(); + + await db.schema + .createIndex('author_stats_nip05_domain_idx') + .on('author_stats') + .column('nip05_domain') + .execute(); + + await db.schema + .createIndex('author_stats_nip05_hostname_idx') + .on('author_stats') + .column('nip05_hostname') + .execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema + .alterTable('author_stats') + .dropColumn('nip05') + .dropColumn('nip05_domain') + .dropColumn('nip05_hostname') + .dropColumn('nip05_last_verified_at') + .execute(); +} diff --git a/packages/db/migrations/047_add_domain_favicons.ts b/packages/db/migrations/047_add_domain_favicons.ts new file mode 100644 index 00000000..1086d157 --- /dev/null +++ b/packages/db/migrations/047_add_domain_favicons.ts @@ -0,0 +1,15 @@ +import { type Kysely, sql } from 'kysely'; + +export async function up(db: Kysely): Promise { + await db.schema + .createTable('domain_favicons') + .addColumn('domain', 'varchar(253)', (col) => col.primaryKey()) + .addColumn('favicon', 'varchar(2048)', (col) => col.notNull()) + .addColumn('last_updated_at', 'integer', (col) => col.notNull()) + .addCheckConstraint('domain_favicons_https_chk', sql`favicon ~* '^https:\\/\\/'`) + .execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.dropTable('domain_favicons').execute(); +} diff --git a/packages/db/migrations/048_rm_pubkey_domains.ts b/packages/db/migrations/048_rm_pubkey_domains.ts new file mode 100644 index 00000000..d88681fd --- /dev/null +++ b/packages/db/migrations/048_rm_pubkey_domains.ts @@ -0,0 +1,22 @@ +import type { Kysely } from 'kysely'; + +export async function up(db: Kysely): Promise { + await db.schema.dropTable('pubkey_domains').execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema + .createTable('pubkey_domains') + .ifNotExists() + .addColumn('pubkey', 'text', (col) => col.primaryKey()) + .addColumn('domain', 'text', (col) => col.notNull()) + .addColumn('last_updated_at', 'integer', (col) => col.notNull().defaultTo(0)) + .execute(); + + await db.schema + .createIndex('pubkey_domains_domain_index') + .on('pubkey_domains') + .column('domain') + .ifNotExists() + .execute(); +} diff --git a/packages/db/migrations/049_author_stats_sorted.ts b/packages/db/migrations/049_author_stats_sorted.ts new file mode 100644 index 00000000..0f18864e --- /dev/null +++ b/packages/db/migrations/049_author_stats_sorted.ts @@ -0,0 +1,21 @@ +import { type Kysely, sql } from 'kysely'; + +// deno-lint-ignore no-explicit-any +export async function up(db: Kysely): Promise { + await db.schema + .createView('top_authors') + .materialized() + .as(db.selectFrom('author_stats').select(['pubkey', 'followers_count', 'search']).orderBy('followers_count desc')) + .execute(); + + await sql`CREATE INDEX top_authors_search_idx ON top_authors USING GIN (search gin_trgm_ops)`.execute(db); + + await db.schema.createIndex('top_authors_pubkey_idx').on('top_authors').column('pubkey').execute(); + + await db.schema.dropIndex('author_stats_search_idx').execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.dropView('top_authors').execute(); + await sql`CREATE INDEX author_stats_search_idx ON author_stats USING GIN (search gin_trgm_ops)`.execute(db); +} diff --git a/packages/db/migrations/050_notify_only_insert.ts b/packages/db/migrations/050_notify_only_insert.ts new file mode 100644 index 00000000..6cbf2dde --- /dev/null +++ b/packages/db/migrations/050_notify_only_insert.ts @@ -0,0 +1,21 @@ +import { type Kysely, sql } from 'kysely'; + +export async function up(db: Kysely): Promise { + await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db); + + await sql` + CREATE TRIGGER nostr_event_trigger + AFTER INSERT ON nostr_events + FOR EACH ROW EXECUTE FUNCTION notify_nostr_event() + `.execute(db); +} + +export async function down(db: Kysely): Promise { + await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db); + + await sql` + CREATE TRIGGER nostr_event_trigger + AFTER INSERT OR UPDATE ON nostr_events + FOR EACH ROW EXECUTE FUNCTION notify_nostr_event() + `.execute(db); +} diff --git a/packages/db/migrations/051_notify_replaceable.ts b/packages/db/migrations/051_notify_replaceable.ts new file mode 100644 index 00000000..b4c91787 --- /dev/null +++ b/packages/db/migrations/051_notify_replaceable.ts @@ -0,0 +1,45 @@ +import { type Kysely, sql } from 'kysely'; + +export async function up(db: Kysely): Promise { + await sql` + CREATE OR REPLACE FUNCTION notify_nostr_event() + RETURNS TRIGGER AS $$ + BEGIN + IF OLD.id IS DISTINCT FROM NEW.id THEN + PERFORM pg_notify('nostr_event', NEW.id::text); + END IF; + + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + `.execute(db); + + await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db); + + await sql` + CREATE TRIGGER nostr_event_trigger + AFTER INSERT OR UPDATE ON nostr_events + FOR EACH ROW EXECUTE FUNCTION notify_nostr_event() + `.execute(db); +} + +export async function down(db: Kysely): Promise { + await sql` + CREATE OR REPLACE FUNCTION notify_nostr_event() + RETURNS TRIGGER AS $$ + BEGIN + PERFORM pg_notify('nostr_event', NEW.id::text); + + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + `.execute(db); + + await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db); + + await sql` + CREATE TRIGGER nostr_event_trigger + AFTER INSERT ON nostr_events + FOR EACH ROW EXECUTE FUNCTION notify_nostr_event() + `.execute(db); +} diff --git a/packages/db/migrations/052_rename_pkey.ts b/packages/db/migrations/052_rename_pkey.ts new file mode 100644 index 00000000..cf2bedf8 --- /dev/null +++ b/packages/db/migrations/052_rename_pkey.ts @@ -0,0 +1,16 @@ +import { type Kysely, sql } from 'kysely'; + +export async function up(db: Kysely): Promise { + const result = await sql<{ count: number }>` + SELECT COUNT(*) as count + FROM pg_indexes + WHERE indexname = 'nostr_events_new_pkey' + `.execute(db); + + if (result.rows[0].count > 0) { + await sql`ALTER INDEX nostr_events_new_pkey RENAME TO nostr_events_pkey;`.execute(db); + } +} + +export async function down(_db: Kysely): Promise { +} diff --git a/packages/db/mod.ts b/packages/db/mod.ts new file mode 100644 index 00000000..2766e524 --- /dev/null +++ b/packages/db/mod.ts @@ -0,0 +1,7 @@ +export { DittoPglite } from './adapters/DittoPglite.ts'; +export { DittoPolyPg } from './adapters/DittoPolyPg.ts'; +export { DittoPostgres } from './adapters/DittoPostgres.ts'; +export { DummyDB } from './adapters/DummyDB.ts'; + +export type { DittoDB } from './DittoDB.ts'; +export type { DittoTables } from './DittoTables.ts'; diff --git a/src/utils/worker.test.ts b/packages/db/utils/worker.test.ts similarity index 81% rename from src/utils/worker.test.ts rename to packages/db/utils/worker.test.ts index 89845e2b..73a90b87 100644 --- a/src/utils/worker.test.ts +++ b/packages/db/utils/worker.test.ts @@ -1,14 +1,16 @@ import { assertEquals } from '@std/assert'; -import { isWorker } from '@/utils/worker.ts'; +import { isWorker } from './worker.ts'; Deno.test('isWorker from the main thread returns false', () => { assertEquals(isWorker(), false); }); Deno.test('isWorker from a worker thread returns true', async () => { + const url = new URL('./worker.ts', import.meta.url); + const script = ` - import { isWorker } from '@/utils/worker.ts'; + import { isWorker } from '${url.href}'; postMessage(isWorker()); self.close(); `; diff --git a/src/utils/worker.ts b/packages/db/utils/worker.ts similarity index 100% rename from src/utils/worker.ts rename to packages/db/utils/worker.ts diff --git a/packages/ditto/DittoPush.ts b/packages/ditto/DittoPush.ts new file mode 100644 index 00000000..3a378300 --- /dev/null +++ b/packages/ditto/DittoPush.ts @@ -0,0 +1,53 @@ +import { DittoConf } from '@ditto/conf'; +import { ApplicationServer, PushMessageOptions, PushSubscriber, PushSubscription } from '@negrel/webpush'; +import { NStore } from '@nostrify/types'; +import { logi } from '@soapbox/logi'; + +import { getInstanceMetadata } from '@/utils/instance.ts'; + +interface DittoPushOpts { + conf: DittoConf; + relay: NStore; +} + +export class DittoPush { + private server: Promise; + + constructor(opts: DittoPushOpts) { + const { conf, relay } = opts; + + this.server = (async () => { + const meta = await getInstanceMetadata(relay); + const keys = await conf.vapidKeys; + + if (keys) { + return await ApplicationServer.new({ + contactInformation: `mailto:${meta.email}`, + vapidKeys: keys, + }); + } else { + logi({ + level: 'warn', + ns: 'ditto.push', + msg: 'VAPID keys are not set. Push notifications will be disabled.', + }); + } + })(); + } + + async push( + subscription: PushSubscription, + json: object, + opts: PushMessageOptions = {}, + ): Promise { + const server = await this.server; + + if (!server) { + return; + } + + const subscriber = new PushSubscriber(server, subscription); + const text = JSON.stringify(json); + return subscriber.pushTextMessage(text, opts); + } +} diff --git a/src/DittoUploads.ts b/packages/ditto/DittoUploads.ts similarity index 100% rename from src/DittoUploads.ts rename to packages/ditto/DittoUploads.ts diff --git a/src/RelayError.test.ts b/packages/ditto/RelayError.test.ts similarity index 100% rename from src/RelayError.test.ts rename to packages/ditto/RelayError.test.ts diff --git a/src/RelayError.ts b/packages/ditto/RelayError.ts similarity index 100% rename from src/RelayError.ts rename to packages/ditto/RelayError.ts diff --git a/packages/ditto/app.ts b/packages/ditto/app.ts new file mode 100644 index 00000000..5a84a80d --- /dev/null +++ b/packages/ditto/app.ts @@ -0,0 +1,586 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoDB, DittoPolyPg } from '@ditto/db'; +import { paginationMiddleware, tokenMiddleware, userMiddleware } from '@ditto/mastoapi/middleware'; +import { DittoApp, type DittoEnv } from '@ditto/mastoapi/router'; +import { relayPoolRelaysSizeGauge, relayPoolSubscriptionsSizeGauge } from '@ditto/metrics'; +import { type DittoTranslator } from '@ditto/translators'; +import { type Context, Handler, Input as HonoInput, MiddlewareHandler } from '@hono/hono'; +import { every } from '@hono/hono/combine'; +import { cors } from '@hono/hono/cors'; +import { serveStatic } from '@hono/hono/deno'; +import { NostrEvent, NostrSigner, NRelay, NUploader } from '@nostrify/nostrify'; + +import { cron } from '@/cron.ts'; +import { startFirehose } from '@/firehose.ts'; +import { DittoAPIStore } from '@/storages/DittoAPIStore.ts'; +import { DittoPgStore } from '@/storages/DittoPgStore.ts'; +import { DittoPool } from '@/storages/DittoPool.ts'; +import { Time } from '@/utils/time.ts'; +import { seedZapSplits } from '@/utils/zap-split.ts'; + +import { + accountController, + accountLookupController, + accountSearchController, + accountStatusesController, + blockController, + createAccountController, + familiarFollowersController, + favouritesController, + followController, + followersController, + followingController, + muteController, + relationshipsController, + unblockController, + unfollowController, + unmuteController, + updateCredentialsController, + verifyCredentialsController, +} from '@/controllers/api/accounts.ts'; +import { + adminAccountsController, + adminActionController, + adminApproveController, + adminRejectController, +} from '@/controllers/api/admin.ts'; +import { appCredentialsController, createAppController } from '@/controllers/api/apps.ts'; +import { blocksController } from '@/controllers/api/blocks.ts'; +import { bookmarksController } from '@/controllers/api/bookmarks.ts'; +import cashuApp from '@/controllers/api/cashu.ts'; +import { captchaController, captchaVerifyController } from '@/controllers/api/captcha.ts'; +import { + adminRelaysController, + adminSetRelaysController, + deleteZapSplitsController, + getZapSplitsController, + nameRequestController, + nameRequestsController, + statusZapSplitsController, + updateInstanceController, + updateZapSplitsController, +} from '@/controllers/api/ditto.ts'; +import { emptyArrayController, notImplementedController } from '@/controllers/api/fallback.ts'; +import { + instanceDescriptionController, + instanceV1Controller, + instanceV2Controller, +} from '@/controllers/api/instance.ts'; +import { markersController, updateMarkersController } from '@/controllers/api/markers.ts'; +import { mediaController, updateMediaController } from '@/controllers/api/media.ts'; +import { mutesController } from '@/controllers/api/mutes.ts'; +import { notificationController, notificationsController } from '@/controllers/api/notifications.ts'; +import { + createTokenController, + oauthAuthorizeController, + oauthController, + revokeTokenController, +} from '@/controllers/api/oauth.ts'; +import { + configController, + frontendConfigController, + pleromaAdminDeleteStatusController, + pleromaAdminSuggestController, + pleromaAdminTagController, + pleromaAdminUnsuggestController, + pleromaAdminUntagController, + updateConfigController, +} from '@/controllers/api/pleroma.ts'; +import { preferencesController } from '@/controllers/api/preferences.ts'; +import { getSubscriptionController, pushSubscribeController } from '@/controllers/api/push.ts'; +import { deleteReactionController, reactionController, reactionsController } from '@/controllers/api/reactions.ts'; +import { relayController } from '@/controllers/nostr/relay.ts'; +import { + adminReportController, + adminReportReopenController, + adminReportResolveController, + adminReportsController, + reportController, +} from '@/controllers/api/reports.ts'; +import { searchController } from '@/controllers/api/search.ts'; +import { + bookmarkController, + contextController, + createStatusController, + deleteStatusController, + favouriteController, + favouritedByController, + pinController, + quotesController, + rebloggedByController, + reblogStatusController, + statusController, + unbookmarkController, + unpinController, + unreblogStatusController, + zapController, + zappedByController, +} from '@/controllers/api/statuses.ts'; +import { streamingController } from '@/controllers/api/streaming.ts'; +import { + localSuggestionsController, + suggestionsV1Controller, + suggestionsV2Controller, +} from '@/controllers/api/suggestions.ts'; +import { + hashtagTimelineController, + homeTimelineController, + publicTimelineController, + suggestedTimelineController, +} from '@/controllers/api/timelines.ts'; +import { + trendingLinksController, + trendingStatusesController, + trendingTagsController, +} from '@/controllers/api/trends.ts'; +import { translateController } from '@/controllers/api/translate.ts'; +import { errorHandler } from '@/controllers/error.ts'; +import { frontendController } from '@/controllers/frontend.ts'; +import { metricsController } from '@/controllers/metrics.ts'; +import { manifestController } from '@/controllers/manifest.ts'; +import { nodeInfoController, nodeInfoSchemaController } from '@/controllers/well-known/nodeinfo.ts'; +import { nostrController } from '@/controllers/well-known/nostr.ts'; +import { cacheControlMiddleware } from '@/middleware/cacheControlMiddleware.ts'; +import { cspMiddleware } from '@/middleware/cspMiddleware.ts'; +import { metricsMiddleware } from '@/middleware/metricsMiddleware.ts'; +import { notActivitypubMiddleware } from '@/middleware/notActivitypubMiddleware.ts'; +import { rateLimitMiddleware } from '@/middleware/rateLimitMiddleware.ts'; +import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts'; +import { translatorMiddleware } from '@/middleware/translatorMiddleware.ts'; +import { logiMiddleware } from '@/middleware/logiMiddleware.ts'; +import { DittoRelayStore } from '@/storages/DittoRelayStore.ts'; + +export interface AppEnv extends DittoEnv { + Variables: { + conf: DittoConf; + /** Uploader for the user to upload files. */ + uploader?: NUploader; + /** NIP-98 signed event proving the pubkey is owned by the user. */ + proof?: NostrEvent; + /** Kysely instance for the database. */ + db: DittoDB; + /** Base database store. No content filtering. */ + relay: NRelay; + /** Normalized pagination params. */ + pagination: { since?: number; until?: number; limit: number }; + /** Translation service. */ + translator?: DittoTranslator; + signal: AbortSignal; + user?: { + /** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */ + signer: NostrSigner; + /** User's relay. Might filter out unwanted content. */ + relay: NRelay; + }; + }; +} + +type AppContext = Context; +type AppMiddleware = MiddlewareHandler; +// deno-lint-ignore no-explicit-any +type AppController

= Handler>; + +const conf = new DittoConf(Deno.env); + +const db = new DittoPolyPg(conf.databaseUrl, { + poolSize: conf.pg.poolSize, + debug: conf.pgliteDebug, +}); + +await db.migrate(); + +const pgstore = new DittoPgStore({ + db, + pubkey: await conf.signer.getPublicKey(), + timeout: conf.db.timeouts.default, + notify: conf.notifyEnabled, +}); + +const pool = new DittoPool({ conf, relay: pgstore }); +const relay = new DittoRelayStore({ db, conf, relay: pgstore }); + +await seedZapSplits(relay); + +if (conf.firehoseEnabled) { + startFirehose({ + pool, + relay, + concurrency: conf.firehoseConcurrency, + kinds: conf.firehoseKinds, + }); +} + +if (conf.cronEnabled) { + cron({ conf, db, relay }); +} + +const app = new DittoApp({ conf, db, relay }, { strict: false }); + +/** User-provided files in the gitignored `public/` directory. */ +const publicFiles = serveStatic({ root: './public/' }); +/** Static files provided by the Ditto repo, checked into git. */ +const staticFiles = serveStatic({ root: new URL('./static/', import.meta.url).pathname }); + +app.use(cacheControlMiddleware({ noStore: true })); + +const ratelimit = every( + rateLimitMiddleware(30, Time.seconds(5), false), + rateLimitMiddleware(300, Time.minutes(5), false), +); + +const socketTokenMiddleware = tokenMiddleware((c) => { + const token = c.req.header('sec-websocket-protocol'); + if (token) { + return `Bearer ${token}`; + } +}); + +app.use( + '/api/*', + (c, next) => { + c.set('relay', new DittoAPIStore({ relay, pool })); + return next(); + }, + metricsMiddleware, + ratelimit, + paginationMiddleware(), + logiMiddleware, +); + +app.use('/.well-known/*', metricsMiddleware, ratelimit, logiMiddleware); +app.use('/nodeinfo/*', metricsMiddleware, ratelimit, logiMiddleware); +app.use('/oauth/*', metricsMiddleware, ratelimit, logiMiddleware); + +app.get('/api/v1/streaming', socketTokenMiddleware, metricsMiddleware, ratelimit, streamingController); +app.get('/relay', metricsMiddleware, ratelimit, relayController); + +app.use( + cspMiddleware(), + cors({ origin: '*', exposeHeaders: ['link'] }), + tokenMiddleware(), + uploaderMiddleware, +); + +app.get('/metrics', async (_c, next) => { + relayPoolRelaysSizeGauge.reset(); + relayPoolSubscriptionsSizeGauge.reset(); + + for (const relay of pool.relays.values()) { + relayPoolRelaysSizeGauge.inc({ ready_state: relay.socket.readyState }); + relayPoolSubscriptionsSizeGauge.inc(relay.subscriptions.length); + } + + await next(); +}, metricsController); + +app.get( + '/.well-known/nodeinfo', + cacheControlMiddleware({ maxAge: 300, staleWhileRevalidate: 300, staleIfError: 21600, public: true }), + nodeInfoController, +); +app.get('/.well-known/nostr.json', nostrController); + +app.get( + '/nodeinfo/:version', + cacheControlMiddleware({ maxAge: 300, staleWhileRevalidate: 300, staleIfError: 21600, public: true }), + nodeInfoSchemaController, +); +app.get( + '/manifest.webmanifest', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, staleIfError: 21600, public: true }), + manifestController, +); + +app.get( + '/api/v1/instance', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, staleIfError: 21600, public: true }), + instanceV1Controller, +); +app.get( + '/api/v2/instance', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, staleIfError: 21600, public: true }), + instanceV2Controller, +); +app.get( + '/api/v1/instance/extended_description', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, staleIfError: 21600, public: true }), + instanceDescriptionController, +); + +app.get('/api/v1/apps/verify_credentials', appCredentialsController); +app.post('/api/v1/apps', createAppController); + +app.post('/oauth/token', createTokenController); +app.post('/oauth/revoke', revokeTokenController); +app.post('/oauth/authorize', oauthAuthorizeController); +app.get('/oauth/authorize', oauthController); + +app.post('/api/v1/accounts', userMiddleware({ verify: true }), createAccountController); +app.get('/api/v1/accounts/verify_credentials', userMiddleware(), verifyCredentialsController); +app.patch('/api/v1/accounts/update_credentials', userMiddleware(), updateCredentialsController); +app.get('/api/v1/accounts/search', accountSearchController); +app.get('/api/v1/accounts/lookup', accountLookupController); +app.get('/api/v1/accounts/relationships', userMiddleware(), relationshipsController); +app.get('/api/v1/accounts/familiar_followers', userMiddleware(), familiarFollowersController); +app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/block', userMiddleware(), blockController); +app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unblock', userMiddleware(), unblockController); +app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/mute', userMiddleware(), muteController); +app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unmute', userMiddleware(), unmuteController); +app.post( + '/api/v1/accounts/:pubkey{[0-9a-f]{64}}/follow', + rateLimitMiddleware(2, Time.seconds(1)), + userMiddleware(), + followController, +); +app.post( + '/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unfollow', + rateLimitMiddleware(2, Time.seconds(1)), + userMiddleware(), + unfollowController, +); +app.get( + '/api/v1/accounts/:pubkey{[0-9a-f]{64}}/followers', + rateLimitMiddleware(8, Time.seconds(30)), + followersController, +); +app.get( + '/api/v1/accounts/:pubkey{[0-9a-f]{64}}/following', + rateLimitMiddleware(8, Time.seconds(30)), + followingController, +); +app.get( + '/api/v1/accounts/:pubkey{[0-9a-f]{64}}/statuses', + rateLimitMiddleware(12, Time.seconds(30)), + accountStatusesController, +); +app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}', accountController); + +app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/favourited_by', favouritedByController); +app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/reblogged_by', rebloggedByController); +app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/context', contextController); +app.get('/api/v1/statuses/:id{[0-9a-f]{64}}', statusController); +app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/favourite', userMiddleware(), favouriteController); +app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/bookmark', userMiddleware(), bookmarkController); +app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unbookmark', userMiddleware(), unbookmarkController); +app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/pin', userMiddleware(), pinController); +app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unpin', userMiddleware(), unpinController); +app.post( + '/api/v1/statuses/:id{[0-9a-f]{64}}/translate', + userMiddleware(), + rateLimitMiddleware(15, Time.minutes(1)), + translatorMiddleware, + translateController, +); +app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/reblog', userMiddleware(), reblogStatusController); +app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unreblog', userMiddleware(), unreblogStatusController); +app.post('/api/v1/statuses', userMiddleware(), createStatusController); +app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}', userMiddleware(), deleteStatusController); + +app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/quotes', quotesController); + +app.post('/api/v1/media', mediaController); +app.put( + '/api/v1/media/:id{[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}}', + updateMediaController, +); +app.post('/api/v2/media', mediaController); + +app.get('/api/v1/timelines/home', rateLimitMiddleware(8, Time.seconds(30)), userMiddleware(), homeTimelineController); +app.get('/api/v1/timelines/public', rateLimitMiddleware(8, Time.seconds(30)), publicTimelineController); +app.get('/api/v1/timelines/tag/:hashtag', rateLimitMiddleware(8, Time.seconds(30)), hashtagTimelineController); +app.get('/api/v1/timelines/suggested', rateLimitMiddleware(8, Time.seconds(30)), suggestedTimelineController); + +app.get('/api/v1/preferences', preferencesController); +app.get('/api/v1/search', searchController); +app.get('/api/v2/search', searchController); + +app.get( + '/api/pleroma/frontend_configurations', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, staleIfError: 21600, public: true }), + frontendConfigController, +); + +app.get('/api/v1/trends/statuses', rateLimitMiddleware(8, Time.seconds(30)), trendingStatusesController); +app.get( + '/api/v1/trends/links', + cacheControlMiddleware({ maxAge: 300, staleWhileRevalidate: 300, staleIfError: 21600, public: true }), + trendingLinksController, +); +app.get( + '/api/v1/trends/tags', + cacheControlMiddleware({ maxAge: 300, staleWhileRevalidate: 300, staleIfError: 21600, public: true }), + trendingTagsController, +); +app.get( + '/api/v1/trends', + cacheControlMiddleware({ maxAge: 300, staleWhileRevalidate: 300, staleIfError: 21600, public: true }), + trendingTagsController, +); + +app.get('/api/v1/suggestions', suggestionsV1Controller); +app.get('/api/v2/suggestions', suggestionsV2Controller); +app.get('/api/v2/ditto/suggestions/local', localSuggestionsController); + +app.get('/api/v1/notifications', rateLimitMiddleware(8, Time.seconds(30)), userMiddleware(), notificationsController); +app.get('/api/v1/notifications/:id', userMiddleware(), notificationController); + +app.get('/api/v1/favourites', userMiddleware(), favouritesController); +app.get('/api/v1/bookmarks', userMiddleware(), bookmarksController); +app.get('/api/v1/blocks', userMiddleware(), blocksController); +app.get('/api/v1/mutes', userMiddleware(), mutesController); + +app.get('/api/v1/markers', userMiddleware({ verify: true }), markersController); +app.post('/api/v1/markers', userMiddleware({ verify: true }), updateMarkersController); + +app.get('/api/v1/push/subscription', userMiddleware(), getSubscriptionController); +app.post('/api/v1/push/subscription', userMiddleware({ verify: true }), pushSubscribeController); + +app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions', reactionsController); +app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', reactionsController); +app.put('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', userMiddleware(), reactionController); +app.delete('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', userMiddleware(), deleteReactionController); + +app.get('/api/v1/pleroma/admin/config', userMiddleware({ role: 'admin' }), configController); +app.post('/api/v1/pleroma/admin/config', userMiddleware({ role: 'admin' }), updateConfigController); +app.delete('/api/v1/pleroma/admin/statuses/:id', userMiddleware({ role: 'admin' }), pleromaAdminDeleteStatusController); + +app.get('/api/v1/admin/ditto/relays', userMiddleware({ role: 'admin' }), adminRelaysController); +app.put('/api/v1/admin/ditto/relays', userMiddleware({ role: 'admin' }), adminSetRelaysController); + +app.put('/api/v1/admin/ditto/instance', userMiddleware({ role: 'admin' }), updateInstanceController); + +app.post('/api/v1/ditto/names', userMiddleware(), nameRequestController); +app.get('/api/v1/ditto/names', userMiddleware(), nameRequestsController); + +app.get('/api/v1/ditto/captcha', rateLimitMiddleware(3, Time.minutes(1)), captchaController); +app.post( + '/api/v1/ditto/captcha/:id/verify', + rateLimitMiddleware(8, Time.minutes(1)), + userMiddleware({ verify: true }), + captchaVerifyController, +); + +app.get( + '/api/v1/ditto/zap_splits', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, public: true }), + getZapSplitsController, +); +app.get('/api/v1/ditto/:id{[0-9a-f]{64}}/zap_splits', statusZapSplitsController); + +app.put('/api/v1/admin/ditto/zap_splits', userMiddleware({ role: 'admin' }), updateZapSplitsController); +app.delete('/api/v1/admin/ditto/zap_splits', userMiddleware({ role: 'admin' }), deleteZapSplitsController); + +app.post('/api/v1/ditto/zap', userMiddleware(), zapController); +app.get('/api/v1/ditto/statuses/:id{[0-9a-f]{64}}/zapped_by', zappedByController); + +app.route('/api/v1/ditto/cashu', cashuApp); + +app.post('/api/v1/reports', userMiddleware(), reportController); +app.get('/api/v1/admin/reports', userMiddleware(), userMiddleware({ role: 'admin' }), adminReportsController); +app.get( + '/api/v1/admin/reports/:id{[0-9a-f]{64}}', + userMiddleware(), + userMiddleware({ role: 'admin' }), + adminReportController, +); +app.post( + '/api/v1/admin/reports/:id{[0-9a-f]{64}}/resolve', + userMiddleware(), + userMiddleware({ role: 'admin' }), + adminReportResolveController, +); +app.post( + '/api/v1/admin/reports/:id{[0-9a-f]{64}}/reopen', + userMiddleware(), + userMiddleware({ role: 'admin' }), + adminReportReopenController, +); + +app.get('/api/v1/admin/accounts', userMiddleware({ role: 'admin' }), adminAccountsController); +app.post( + '/api/v1/admin/accounts/:id{[0-9a-f]{64}}/action', + userMiddleware(), + userMiddleware({ role: 'admin' }), + adminActionController, +); +app.post( + '/api/v1/admin/accounts/:id{[0-9a-f]{64}}/approve', + userMiddleware(), + userMiddleware({ role: 'admin' }), + adminApproveController, +); +app.post( + '/api/v1/admin/accounts/:id{[0-9a-f]{64}}/reject', + userMiddleware(), + userMiddleware({ role: 'admin' }), + adminRejectController, +); + +app.put('/api/v1/pleroma/admin/users/tag', userMiddleware({ role: 'admin' }), pleromaAdminTagController); +app.delete('/api/v1/pleroma/admin/users/tag', userMiddleware({ role: 'admin' }), pleromaAdminUntagController); +app.patch('/api/v1/pleroma/admin/users/suggest', userMiddleware({ role: 'admin' }), pleromaAdminSuggestController); +app.patch('/api/v1/pleroma/admin/users/unsuggest', userMiddleware({ role: 'admin' }), pleromaAdminUnsuggestController); + +// Not (yet) implemented. +app.get('/api/v1/custom_emojis', emptyArrayController); +app.get('/api/v1/filters', emptyArrayController); +app.get('/api/v1/domain_blocks', emptyArrayController); +app.get('/api/v1/conversations', emptyArrayController); +app.get('/api/v1/lists', emptyArrayController); + +app.use('/api/*', notImplementedController); +app.use('/.well-known/*', publicFiles, notImplementedController); +app.use('/nodeinfo/*', notImplementedController); +app.use('/oauth/*', notImplementedController); + +// Known frontend routes +app.get('/:acct{@.*}', frontendController); +app.get('/:acct{@.*}/*', frontendController); +app.get('/:bech32{^[\x21-\x7E]{1,83}1[023456789acdefghjklmnpqrstuvwxyz]{6,}$}', frontendController); +app.get('/users/*', notActivitypubMiddleware, frontendController); +app.get('/tags/*', frontendController); +app.get('/statuses/*', frontendController); +app.get('/notice/*', frontendController); +app.get('/timeline/*', frontendController); + +// Known static file routes +app.get('/sw.js', publicFiles); +app.get( + '/favicon.ico', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, staleIfError: 21600, public: true }), + publicFiles, + staticFiles, +); +app.get( + '/images/*', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, staleIfError: 21600, public: true }), + publicFiles, + staticFiles, +); +app.get( + '/instance/*', + cacheControlMiddleware({ maxAge: 5, staleWhileRevalidate: 5, staleIfError: 21600, public: true }), + publicFiles, +); + +// Packs contains immutable static files +app.get( + '/packs/*', + cacheControlMiddleware({ + maxAge: 31536000, + staleWhileRevalidate: 86400, + staleIfError: 21600, + public: true, + immutable: true, + }), + publicFiles, +); + +app.get('/', ratelimit, frontendController); +app.get('*', publicFiles, staticFiles, ratelimit, frontendController); + +app.onError(errorHandler); + +export default app; + +export type { AppContext, AppController, AppMiddleware }; diff --git a/src/assets/captcha/bg/A Large Body of Water Surrounded By Mountains.jpg b/packages/ditto/assets/captcha/bg/A Large Body of Water Surrounded By Mountains.jpg similarity index 100% rename from src/assets/captcha/bg/A Large Body of Water Surrounded By Mountains.jpg rename to packages/ditto/assets/captcha/bg/A Large Body of Water Surrounded By Mountains.jpg diff --git a/src/assets/captcha/bg/A Trail of Footprints In The Sand.jpg b/packages/ditto/assets/captcha/bg/A Trail of Footprints In The Sand.jpg similarity index 100% rename from src/assets/captcha/bg/A Trail of Footprints In The Sand.jpg rename to packages/ditto/assets/captcha/bg/A Trail of Footprints In The Sand.jpg diff --git a/src/assets/captcha/bg/Ashim DSilva.jpg b/packages/ditto/assets/captcha/bg/Ashim DSilva.jpg similarity index 100% rename from src/assets/captcha/bg/Ashim DSilva.jpg rename to packages/ditto/assets/captcha/bg/Ashim DSilva.jpg diff --git a/src/assets/captcha/bg/Canazei Granite Ridges.jpg b/packages/ditto/assets/captcha/bg/Canazei Granite Ridges.jpg similarity index 100% rename from src/assets/captcha/bg/Canazei Granite Ridges.jpg rename to packages/ditto/assets/captcha/bg/Canazei Granite Ridges.jpg diff --git a/src/assets/captcha/bg/Martin Adams.jpg b/packages/ditto/assets/captcha/bg/Martin Adams.jpg similarity index 100% rename from src/assets/captcha/bg/Martin Adams.jpg rename to packages/ditto/assets/captcha/bg/Martin Adams.jpg diff --git a/src/assets/captcha/bg/Morskie Oko.jpg b/packages/ditto/assets/captcha/bg/Morskie Oko.jpg similarity index 100% rename from src/assets/captcha/bg/Morskie Oko.jpg rename to packages/ditto/assets/captcha/bg/Morskie Oko.jpg diff --git a/src/assets/captcha/bg/Mr. Lee.jpg b/packages/ditto/assets/captcha/bg/Mr. Lee.jpg similarity index 100% rename from src/assets/captcha/bg/Mr. Lee.jpg rename to packages/ditto/assets/captcha/bg/Mr. Lee.jpg diff --git a/src/assets/captcha/bg/Nattu Adnan.jpg b/packages/ditto/assets/captcha/bg/Nattu Adnan.jpg similarity index 100% rename from src/assets/captcha/bg/Nattu Adnan.jpg rename to packages/ditto/assets/captcha/bg/Nattu Adnan.jpg diff --git a/src/assets/captcha/bg/Photo by SpaceX.jpg b/packages/ditto/assets/captcha/bg/Photo by SpaceX.jpg similarity index 100% rename from src/assets/captcha/bg/Photo by SpaceX.jpg rename to packages/ditto/assets/captcha/bg/Photo by SpaceX.jpg diff --git a/src/assets/captcha/bg/Photo of Valley.jpg b/packages/ditto/assets/captcha/bg/Photo of Valley.jpg similarity index 100% rename from src/assets/captcha/bg/Photo of Valley.jpg rename to packages/ditto/assets/captcha/bg/Photo of Valley.jpg diff --git a/src/assets/captcha/bg/Snow-Capped Mountain.jpg b/packages/ditto/assets/captcha/bg/Snow-Capped Mountain.jpg similarity index 100% rename from src/assets/captcha/bg/Snow-Capped Mountain.jpg rename to packages/ditto/assets/captcha/bg/Snow-Capped Mountain.jpg diff --git a/src/assets/captcha/bg/Sunset by the Pier.jpg b/packages/ditto/assets/captcha/bg/Sunset by the Pier.jpg similarity index 100% rename from src/assets/captcha/bg/Sunset by the Pier.jpg rename to packages/ditto/assets/captcha/bg/Sunset by the Pier.jpg diff --git a/src/assets/captcha/bg/Tj Holowaychuk.jpg b/packages/ditto/assets/captcha/bg/Tj Holowaychuk.jpg similarity index 100% rename from src/assets/captcha/bg/Tj Holowaychuk.jpg rename to packages/ditto/assets/captcha/bg/Tj Holowaychuk.jpg diff --git a/src/assets/captcha/bg/Viktor Forgacs.jpg b/packages/ditto/assets/captcha/bg/Viktor Forgacs.jpg similarity index 100% rename from src/assets/captcha/bg/Viktor Forgacs.jpg rename to packages/ditto/assets/captcha/bg/Viktor Forgacs.jpg diff --git a/src/assets/captcha/bg/copyright.txt b/packages/ditto/assets/captcha/bg/copyright.txt similarity index 100% rename from src/assets/captcha/bg/copyright.txt rename to packages/ditto/assets/captcha/bg/copyright.txt diff --git a/src/assets/captcha/puzzle-hole.png b/packages/ditto/assets/captcha/puzzle-hole.png similarity index 100% rename from src/assets/captcha/puzzle-hole.png rename to packages/ditto/assets/captcha/puzzle-hole.png diff --git a/src/assets/captcha/puzzle-hole.svg b/packages/ditto/assets/captcha/puzzle-hole.svg similarity index 100% rename from src/assets/captcha/puzzle-hole.svg rename to packages/ditto/assets/captcha/puzzle-hole.svg diff --git a/src/assets/captcha/puzzle-mask.png b/packages/ditto/assets/captcha/puzzle-mask.png similarity index 100% rename from src/assets/captcha/puzzle-mask.png rename to packages/ditto/assets/captcha/puzzle-mask.png diff --git a/src/assets/captcha/puzzle-mask.svg b/packages/ditto/assets/captcha/puzzle-mask.svg similarity index 100% rename from src/assets/captcha/puzzle-mask.svg rename to packages/ditto/assets/captcha/puzzle-mask.svg diff --git a/packages/ditto/caches/pipelineEncounters.ts b/packages/ditto/caches/pipelineEncounters.ts new file mode 100644 index 00000000..491a416f --- /dev/null +++ b/packages/ditto/caches/pipelineEncounters.ts @@ -0,0 +1,3 @@ +import { LRUCache } from 'lru-cache'; + +export const pipelineEncounters = new LRUCache({ max: 5000 }); diff --git a/src/caches/translationCache.ts b/packages/ditto/caches/translationCache.ts similarity index 100% rename from src/caches/translationCache.ts rename to packages/ditto/caches/translationCache.ts diff --git a/packages/ditto/config.ts b/packages/ditto/config.ts new file mode 100644 index 00000000..59554920 --- /dev/null +++ b/packages/ditto/config.ts @@ -0,0 +1,4 @@ +import { DittoConf } from '@ditto/conf'; + +/** @deprecated Use middleware to set/get the config instead. */ +export const Conf = new DittoConf(Deno.env); diff --git a/src/controllers/api/accounts.ts b/packages/ditto/controllers/api/accounts.ts similarity index 69% rename from src/controllers/api/accounts.ts rename to packages/ditto/controllers/api/accounts.ts index 03284a15..ad9dde19 100644 --- a/src/controllers/api/accounts.ts +++ b/packages/ditto/controllers/api/accounts.ts @@ -1,15 +1,14 @@ -import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify'; +import { paginated } from '@ditto/mastoapi/pagination'; +import { NostrEvent, NostrFilter, NSchema as n, NStore } from '@nostrify/nostrify'; import { nip19 } from 'nostr-tools'; import { z } from 'zod'; import { type AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { getAuthor, getFollowedPubkeys } from '@/queries.ts'; import { booleanParamSchema, fileSchema } from '@/schema.ts'; -import { Storages } from '@/storages.ts'; import { uploadFile } from '@/utils/upload.ts'; import { nostrNow } from '@/utils.ts'; -import { assertAuthenticated, createEvent, paginated, parseBody, updateEvent, updateListEvent } from '@/utils/api.ts'; +import { assertAuthenticated, createEvent, parseBody, updateEvent, updateListEvent } from '@/utils/api.ts'; import { extractIdentifier, lookupAccount, lookupPubkey } from '@/utils/lookup.ts'; import { renderAccounts, renderEventAccounts, renderStatuses } from '@/views.ts'; import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts'; @@ -22,23 +21,24 @@ import { addTag, deleteTag, findReplyTag, getTagSet } from '@/utils/tags.ts'; import { getPubkeysBySearch } from '@/utils/search.ts'; import { MastodonAccount } from '@/entities/MastodonAccount.ts'; -const usernameSchema = z - .string().min(1).max(30) - .regex(/^[a-z0-9_]+$/i) - .refine((username) => !Conf.forbiddenUsernames.includes(username), 'Username is reserved.'); - const createAccountSchema = z.object({ - username: usernameSchema, + username: z.string().min(1).max(30).regex(/^[a-z0-9_]+$/i), }); const createAccountController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { user } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const result = createAccountSchema.safeParse(await c.req.json()); if (!result.success) { return c.json({ error: 'Bad request', schema: result.error }, 400); } + if (c.var.conf.forbiddenUsernames.includes(result.data.username)) { + return c.json({ error: 'Username is reserved.' }, 422); + } + return c.json({ access_token: nip19.npubEncode(pubkey), token_type: 'Bearer', @@ -48,15 +48,15 @@ const createAccountController: AppController = async (c) => { }; const verifyCredentialsController: AppController = async (c) => { - const signer = c.get('signer')!; + const { relay, user } = c.var; + + const signer = user!.signer; const pubkey = await signer.getPublicKey(); - const store = await Storages.db(); - const [author, [settingsEvent]] = await Promise.all([ - getAuthor(pubkey, { signal: AbortSignal.timeout(5000) }), + getAuthor(pubkey, c.var), - store.query([{ + relay.query([{ kinds: [30078], authors: [pubkey], '#d': ['pub.ditto.pleroma_settings_store'], @@ -72,8 +72,8 @@ const verifyCredentialsController: AppController = async (c) => { } const account = author - ? await renderAccount(author, { withSource: true, settingsStore }) - : await accountFromPubkey(pubkey, { withSource: true, settingsStore }); + ? renderAccount(author, { withSource: true, settingsStore }) + : accountFromPubkey(pubkey, { withSource: true, settingsStore }); return c.json(account); }; @@ -81,7 +81,7 @@ const verifyCredentialsController: AppController = async (c) => { const accountController: AppController = async (c) => { const pubkey = c.req.param('pubkey'); - const event = await getAuthor(pubkey); + const event = await getAuthor(pubkey, c.var); if (event) { assertAuthenticated(c, event); return c.json(await renderAccount(event)); @@ -97,14 +97,14 @@ const accountLookupController: AppController = async (c) => { return c.json({ error: 'Missing `acct` query parameter.' }, 422); } - const event = await lookupAccount(decodeURIComponent(acct)); + const event = await lookupAccount(decodeURIComponent(acct), c.var); if (event) { assertAuthenticated(c, event); - return c.json(await renderAccount(event)); + return c.json(renderAccount(event)); } try { const pubkey = bech32ToPubkey(decodeURIComponent(acct)); - return c.json(await accountFromPubkey(pubkey!)); + return c.json(accountFromPubkey(pubkey!)); } catch { return c.json({ error: 'Could not find user.' }, 404); } @@ -112,15 +112,15 @@ const accountLookupController: AppController = async (c) => { const accountSearchQuerySchema = z.object({ q: z.string().transform(decodeURIComponent), - resolve: booleanParamSchema.optional().transform(Boolean), + resolve: booleanParamSchema.optional(), following: z.boolean().default(false), }); const accountSearchController: AppController = async (c) => { - const { signal } = c.req.raw; - const { limit } = c.get('pagination'); - const kysely = await Storages.kysely(); - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const { db, relay, user, pagination, signal } = c.var; + const { limit } = pagination; + + const viewerPubkey = await user?.signer.getPublicKey(); const result = accountSearchQuerySchema.safeParse(c.req.query()); @@ -129,51 +129,51 @@ const accountSearchController: AppController = async (c) => { } const query = decodeURIComponent(result.data.q); - const store = await Storages.search(); const lookup = extractIdentifier(query); - const event = await lookupAccount(lookup ?? query); + const event = await lookupAccount(lookup ?? query, c.var); if (!event && lookup) { - const pubkey = await lookupPubkey(lookup); - return c.json(pubkey ? [await accountFromPubkey(pubkey)] : []); + const pubkey = await lookupPubkey(lookup, c.var); + return c.json(pubkey ? [accountFromPubkey(pubkey)] : []); } - const followedPubkeys: Set = viewerPubkey ? await getFollowedPubkeys(viewerPubkey) : new Set(); - const pubkeys = Array.from(await getPubkeysBySearch(kysely, { q: query, limit, offset: 0, followedPubkeys })); + const events: NostrEvent[] = []; - let events = event ? [event] : await store.query([{ kinds: [0], authors: pubkeys, limit }], { - signal, - }); + if (event) { + events.push(event); + } else { + const following = viewerPubkey ? await getFollowedPubkeys(relay, viewerPubkey, signal) : new Set(); + const authors = [...await getPubkeysBySearch(db.kysely, { q: query, limit, offset: 0, following })]; + const profiles = await relay.query([{ kinds: [0], authors, limit }], { signal }); - if (!event) { - events = pubkeys - .map((pubkey) => events.find((event) => event.pubkey === pubkey)) - .filter((event) => !!event); + for (const pubkey of authors) { + const profile = profiles.find((event) => event.pubkey === pubkey); + if (profile) { + events.push(profile); + } + } } - const accounts = await hydrateEvents({ events, store, signal }).then( - (events) => - Promise.all( - events.map((event) => renderAccount(event)), - ), - ); + + const accounts = await hydrateEvents({ ...c.var, events }) + .then((events) => events.map((event) => renderAccount(event))); return c.json(accounts); }; const relationshipsController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { relay, user } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const ids = z.array(z.string()).safeParse(c.req.queries('id[]')); if (!ids.success) { return c.json({ error: 'Missing `id[]` query parameters.' }, 422); } - const db = await Storages.db(); - const [sourceEvents, targetEvents] = await Promise.all([ - db.query([{ kinds: [3, 10000], authors: [pubkey] }]), - db.query([{ kinds: [3], authors: ids.data }]), + relay.query([{ kinds: [3, 10000], authors: [pubkey] }]), + relay.query([{ kinds: [3], authors: ids.data }]), ]); const event3 = sourceEvents.find((event) => event.kind === 3 && event.pubkey === pubkey); @@ -197,33 +197,37 @@ const accountStatusesQuerySchema = z.object({ limit: z.coerce.number().nonnegative().transform((v) => Math.min(v, 40)).catch(20), exclude_replies: booleanParamSchema.optional(), tagged: z.string().optional(), + only_media: booleanParamSchema.optional(), }); const accountStatusesController: AppController = async (c) => { + const { conf, user, signal } = c.var; + const pubkey = c.req.param('pubkey'); - const { since, until } = c.get('pagination'); - const { pinned, limit, exclude_replies, tagged } = accountStatusesQuerySchema.parse(c.req.query()); - const { signal } = c.req.raw; + const { since, until } = c.var.pagination; + const { pinned, limit, exclude_replies, tagged, only_media } = accountStatusesQuerySchema.parse(c.req.query()); - const store = await Storages.db(); + const { relay } = c.var; - const [[author], [user]] = await Promise.all([ - store.query([{ kinds: [0], authors: [pubkey], limit: 1 }], { signal }), - store.query([{ kinds: [30382], authors: [Conf.pubkey], '#d': [pubkey], limit: 1 }], { signal }), + const [[author], [userEvent]] = await Promise.all([ + relay.query([{ kinds: [0], authors: [pubkey], limit: 1 }], { signal }), + relay.query([{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey], limit: 1 }], { + signal, + }), ]); if (author) { assertAuthenticated(c, author); } - const names = getTagSet(user?.tags ?? [], 'n'); + const names = getTagSet(userEvent?.tags ?? [], 'n'); if (names.has('disabled')) { return c.json([]); } if (pinned) { - const [pinEvent] = await store.query([{ kinds: [10001], authors: [pubkey], limit: 1 }], { signal }); + const [pinEvent] = await relay.query([{ kinds: [10001], authors: [pubkey], limit: 1 }], { signal }); if (pinEvent) { const pinnedEventIds = getTagSet(pinEvent.tags, 'e'); return renderStatuses(c, [...pinnedEventIds].reverse()); @@ -234,20 +238,34 @@ const accountStatusesController: AppController = async (c) => { const filter: NostrFilter = { authors: [pubkey], - kinds: [1, 6], + kinds: [1, 6, 20], since, until, limit, }; + const search: string[] = []; + + if (only_media) { + search.push('media:true'); + } + + if (exclude_replies) { + search.push('reply:false'); + } + if (tagged) { filter['#t'] = [tagged]; } - const opts = { signal, limit, timeout: Conf.db.timeouts.timelines }; + if (search.length) { + filter.search = search.join(' '); + } - const events = await store.query([filter], opts) - .then((events) => hydrateEvents({ events, store, signal })) + const opts = { signal, limit, timeout: conf.db.timeouts.timelines }; + + const events = await relay.query([filter], opts) + .then((events) => hydrateEvents({ ...c.var, events })) .then((events) => { if (exclude_replies) { return events.filter((event) => { @@ -258,12 +276,12 @@ const accountStatusesController: AppController = async (c) => { return events; }); - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const viewerPubkey = await user?.signer.getPublicKey(); const statuses = await Promise.all( events.map((event) => { - if (event.kind === 6) return renderReblog(event, { viewerPubkey }); - return renderStatus(event, { viewerPubkey }); + if (event.kind === 6) return renderReblog(relay, event, { viewerPubkey }); + return renderStatus(relay, event, { viewerPubkey }); }), ); return paginated(c, events, statuses); @@ -285,12 +303,11 @@ const updateCredentialsSchema = z.object({ }); const updateCredentialsController: AppController = async (c) => { - const signer = c.get('signer')!; - const pubkey = await signer.getPublicKey(); + const { relay, user } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const body = await parseBody(c.req.raw); const result = updateCredentialsSchema.safeParse(body); - const store = await Storages.db(); - const signal = c.req.raw.signal; if (!result.success) { return c.json(result.error, 422); @@ -300,7 +317,7 @@ const updateCredentialsController: AppController = async (c) => { let event: NostrEvent | undefined; if (keys.length === 1 && keys[0] === 'pleroma_settings_store') { - event = (await store.query([{ kinds: [0], authors: [pubkey] }]))[0]; + event = (await relay.query([{ kinds: [0], authors: [pubkey] }]))[0]; } else { event = await updateEvent( { kinds: [0], authors: [pubkey], limit: 1 }, @@ -356,7 +373,7 @@ const updateCredentialsController: AppController = async (c) => { let account: MastodonAccount; if (event) { - await hydrateEvents({ events: [event], store, signal }); + await hydrateEvents({ ...c.var, events: [event] }); account = await renderAccount(event, { withSource: true, settingsStore }); } else { account = await accountFromPubkey(pubkey, { withSource: true, settingsStore }); @@ -375,7 +392,9 @@ const updateCredentialsController: AppController = async (c) => { /** https://docs.joinmastodon.org/methods/accounts/#follow */ const followController: AppController = async (c) => { - const sourcePubkey = await c.get('signer')?.getPublicKey()!; + const { relay, user } = c.var; + + const sourcePubkey = await user!.signer.getPublicKey(); const targetPubkey = c.req.param('pubkey'); await updateListEvent( @@ -384,7 +403,7 @@ const followController: AppController = async (c) => { c, ); - const relationship = await getRelationship(sourcePubkey, targetPubkey); + const relationship = await getRelationship(relay, sourcePubkey, targetPubkey); relationship.following = true; return c.json(relationship); @@ -392,7 +411,9 @@ const followController: AppController = async (c) => { /** https://docs.joinmastodon.org/methods/accounts/#unfollow */ const unfollowController: AppController = async (c) => { - const sourcePubkey = await c.get('signer')?.getPublicKey()!; + const { relay, user } = c.var; + + const sourcePubkey = await user!.signer.getPublicKey(); const targetPubkey = c.req.param('pubkey'); await updateListEvent( @@ -401,7 +422,7 @@ const unfollowController: AppController = async (c) => { c, ); - const relationship = await getRelationship(sourcePubkey, targetPubkey); + const relationship = await getRelationship(relay, sourcePubkey, targetPubkey); return c.json(relationship); }; @@ -412,8 +433,9 @@ const followersController: AppController = (c) => { }; const followingController: AppController = async (c) => { + const { relay, signal } = c.var; const pubkey = c.req.param('pubkey'); - const pubkeys = await getFollowedPubkeys(pubkey); + const pubkeys = await getFollowedPubkeys(relay, pubkey, signal); return renderAccounts(c, [...pubkeys]); }; @@ -429,7 +451,9 @@ const unblockController: AppController = (c) => { /** https://docs.joinmastodon.org/methods/accounts/#mute */ const muteController: AppController = async (c) => { - const sourcePubkey = await c.get('signer')?.getPublicKey()!; + const { relay, user } = c.var; + + const sourcePubkey = await user!.signer.getPublicKey(); const targetPubkey = c.req.param('pubkey'); await updateListEvent( @@ -438,13 +462,15 @@ const muteController: AppController = async (c) => { c, ); - const relationship = await getRelationship(sourcePubkey, targetPubkey); + const relationship = await getRelationship(relay, sourcePubkey, targetPubkey); return c.json(relationship); }; /** https://docs.joinmastodon.org/methods/accounts/#unmute */ const unmuteController: AppController = async (c) => { - const sourcePubkey = await c.get('signer')?.getPublicKey()!; + const { relay, user } = c.var; + + const sourcePubkey = await user!.signer.getPublicKey(); const targetPubkey = c.req.param('pubkey'); await updateListEvent( @@ -453,19 +479,17 @@ const unmuteController: AppController = async (c) => { c, ); - const relationship = await getRelationship(sourcePubkey, targetPubkey); + const relationship = await getRelationship(relay, sourcePubkey, targetPubkey); return c.json(relationship); }; const favouritesController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; - const params = c.get('pagination'); - const { signal } = c.req.raw; + const { relay, user, pagination, signal } = c.var; - const store = await Storages.db(); + const pubkey = await user!.signer.getPublicKey(); - const events7 = await store.query( - [{ kinds: [7], authors: [pubkey], ...params }], + const events7 = await relay.query( + [{ kinds: [7], authors: [pubkey], ...pagination }], { signal }, ); @@ -473,28 +497,27 @@ const favouritesController: AppController = async (c) => { .map((event) => event.tags.find((tag) => tag[0] === 'e')?.[1]) .filter((id): id is string => !!id); - const events1 = await store.query([{ kinds: [1], ids }], { signal }) - .then((events) => hydrateEvents({ events, store, signal })); + const events1 = await relay.query([{ kinds: [1, 20], ids }], { signal }) + .then((events) => hydrateEvents({ ...c.var, events })); - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const viewerPubkey = await user?.signer.getPublicKey(); const statuses = await Promise.all( - events1.map((event) => renderStatus(event, { viewerPubkey })), + events1.map((event) => renderStatus(relay, event, { viewerPubkey })), ); return paginated(c, events1, statuses); }; const familiarFollowersController: AppController = async (c) => { - const store = await Storages.db(); - const signer = c.get('signer')!; - const pubkey = await signer.getPublicKey(); + const { relay, user, signal } = c.var; + const pubkey = await user!.signer.getPublicKey(); const ids = z.array(z.string()).parse(c.req.queries('id[]')); - const follows = await getFollowedPubkeys(pubkey); + const follows = await getFollowedPubkeys(relay, pubkey, signal); const results = await Promise.all(ids.map(async (id) => { - const followLists = await store.query([{ kinds: [3], authors: [...follows], '#p': [id] }]) - .then((events) => hydrateEvents({ events, store })); + const followLists = await relay.query([{ kinds: [3], authors: [...follows], '#p': [id] }]) + .then((events) => hydrateEvents({ ...c.var, events })); const accounts = await Promise.all( followLists.map((event) => event.author ? renderAccount(event.author) : accountFromPubkey(event.pubkey)), @@ -506,12 +529,10 @@ const familiarFollowersController: AppController = async (c) => { return c.json(results); }; -async function getRelationship(sourcePubkey: string, targetPubkey: string) { - const db = await Storages.db(); - +async function getRelationship(relay: NStore, sourcePubkey: string, targetPubkey: string) { const [sourceEvents, targetEvents] = await Promise.all([ - db.query([{ kinds: [3, 10000], authors: [sourcePubkey] }]), - db.query([{ kinds: [3], authors: [targetPubkey] }]), + relay.query([{ kinds: [3, 10000], authors: [sourcePubkey] }]), + relay.query([{ kinds: [3], authors: [targetPubkey] }]), ]); return renderRelationship({ diff --git a/src/controllers/api/admin.ts b/packages/ditto/controllers/api/admin.ts similarity index 70% rename from src/controllers/api/admin.ts rename to packages/ditto/controllers/api/admin.ts index 2a9dae1f..411aa841 100644 --- a/src/controllers/api/admin.ts +++ b/packages/ditto/controllers/api/admin.ts @@ -1,14 +1,15 @@ +import { paginated } from '@ditto/mastoapi/pagination'; import { NostrFilter } from '@nostrify/nostrify'; +import { logi } from '@soapbox/logi'; import { z } from 'zod'; import { type AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { booleanParamSchema } from '@/schema.ts'; -import { Storages } from '@/storages.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { createAdminEvent, paginated, parseBody, updateEventInfo, updateUser } from '@/utils/api.ts'; +import { createAdminEvent, parseBody, updateEventInfo, updateUser } from '@/utils/api.ts'; import { renderNameRequest } from '@/views/ditto.ts'; import { renderAdminAccount, renderAdminAccountFromPubkey } from '@/views/mastodon/admin-accounts.ts'; +import { errorJson } from '@/utils/log.ts'; const adminAccountQuerySchema = z.object({ local: booleanParamSchema.optional(), @@ -28,9 +29,8 @@ const adminAccountQuerySchema = z.object({ }); const adminAccountsController: AppController = async (c) => { - const store = await Storages.db(); - const params = c.get('pagination'); - const { signal } = c.req.raw; + const { conf, relay, signal, pagination } = c.var; + const { local, pending, @@ -41,13 +41,15 @@ const adminAccountsController: AppController = async (c) => { staff, } = adminAccountQuerySchema.parse(c.req.query()); + const adminPubkey = await conf.signer.getPublicKey(); + if (pending) { if (disabled || silenced || suspended || sensitized) { return c.json([]); } - const orig = await store.query( - [{ kinds: [30383], authors: [Conf.pubkey], '#k': ['3036'], '#n': ['pending'], ...params }], + const orig = await relay.query( + [{ kinds: [30383], authors: [adminPubkey], '#k': ['3036'], '#n': ['pending'], ...pagination }], { signal }, ); @@ -57,8 +59,8 @@ const adminAccountsController: AppController = async (c) => { .filter((id): id is string => !!id), ); - const events = await store.query([{ kinds: [3036], ids: [...ids] }]) - .then((events) => hydrateEvents({ store, events, signal })); + const events = await relay.query([{ kinds: [3036], ids: [...ids] }]) + .then((events) => hydrateEvents({ ...c.var, events })); const nameRequests = await Promise.all(events.map(renderNameRequest)); return paginated(c, orig, nameRequests); @@ -84,7 +86,10 @@ const adminAccountsController: AppController = async (c) => { n.push('moderator'); } - const events = await store.query([{ kinds: [30382], authors: [Conf.pubkey], '#n': n, ...params }], { signal }); + const events = await relay.query( + [{ kinds: [30382], authors: [adminPubkey], '#n': n, ...pagination }], + { signal }, + ); const pubkeys = new Set( events @@ -92,8 +97,8 @@ const adminAccountsController: AppController = async (c) => { .filter((pubkey): pubkey is string => !!pubkey), ); - const authors = await store.query([{ kinds: [0], authors: [...pubkeys] }]) - .then((events) => hydrateEvents({ store, events, signal })); + const authors = await relay.query([{ kinds: [0], authors: [...pubkeys] }]) + .then((events) => hydrateEvents({ ...c.var, events })); const accounts = await Promise.all( [...pubkeys].map((pubkey) => { @@ -105,14 +110,14 @@ const adminAccountsController: AppController = async (c) => { return paginated(c, events, accounts); } - const filter: NostrFilter = { kinds: [0], ...params }; + const filter: NostrFilter = { kinds: [0], ...pagination }; if (local) { - filter.search = `domain:${Conf.url.host}`; + filter.search = `domain:${conf.url.host}`; } - const events = await store.query([filter], { signal }) - .then((events) => hydrateEvents({ store, events, signal })); + const events = await relay.query([filter], { signal }) + .then((events) => hydrateEvents({ ...c.var, events })); const accounts = await Promise.all(events.map(renderAdminAccount)); return paginated(c, events, accounts); @@ -123,8 +128,9 @@ const adminAccountActionSchema = z.object({ }); const adminActionController: AppController = async (c) => { + const { conf, relay } = c.var; + const body = await parseBody(c.req.raw); - const store = await Storages.db(); const result = adminAccountActionSchema.safeParse(body); const authorId = c.req.param('id'); @@ -148,11 +154,17 @@ const adminActionController: AppController = async (c) => { if (data.type === 'suspend') { n.disabled = true; n.suspended = true; - store.remove([{ authors: [authorId] }]).catch(console.warn); + relay.remove!([{ authors: [authorId] }]).catch((e: unknown) => { + logi({ level: 'error', ns: 'ditto.api.admin.account.action', type: data.type, error: errorJson(e) }); + }); } if (data.type === 'revoke_name') { n.revoke_name = true; - store.remove([{ kinds: [30360], authors: [Conf.pubkey], '#p': [authorId] }]).catch(console.warn); + relay.remove!([{ kinds: [30360], authors: [await conf.signer.getPublicKey()], '#p': [authorId] }]).catch( + (e: unknown) => { + logi({ level: 'error', ns: 'ditto.api.admin.account.action', type: data.type, error: errorJson(e) }); + }, + ); } await updateUser(authorId, n, c); @@ -161,10 +173,11 @@ const adminActionController: AppController = async (c) => { }; const adminApproveController: AppController = async (c) => { + const { conf } = c.var; const eventId = c.req.param('id'); - const store = await Storages.db(); + const { relay } = c.var; - const [event] = await store.query([{ kinds: [3036], ids: [eventId] }]); + const [event] = await relay.query([{ kinds: [3036], ids: [eventId] }]); if (!event) { return c.json({ error: 'Event not found' }, 404); } @@ -177,7 +190,10 @@ const adminApproveController: AppController = async (c) => { return c.json({ error: 'Invalid NIP-05' }, 400); } - const [existing] = await store.query([{ kinds: [30360], authors: [Conf.pubkey], '#d': [r], limit: 1 }]); + const [existing] = await relay.query([ + { kinds: [30360], authors: [await conf.signer.getPublicKey()], '#d': [r.toLowerCase()], limit: 1 }, + ]); + if (existing) { return c.json({ error: 'NIP-05 already granted to another user' }, 400); } @@ -185,7 +201,8 @@ const adminApproveController: AppController = async (c) => { await createAdminEvent({ kind: 30360, tags: [ - ['d', r], + ['d', r.toLowerCase()], + ['r', r], ['L', 'nip05.domain'], ['l', r.split('@')[1], 'nip05.domain'], ['p', event.pubkey], @@ -194,7 +211,7 @@ const adminApproveController: AppController = async (c) => { }, c); await updateEventInfo(eventId, { pending: false, approved: true, rejected: false }, c); - await hydrateEvents({ events: [event], store }); + await hydrateEvents({ ...c.var, events: [event] }); const nameRequest = await renderNameRequest(event); return c.json(nameRequest); @@ -202,15 +219,15 @@ const adminApproveController: AppController = async (c) => { const adminRejectController: AppController = async (c) => { const eventId = c.req.param('id'); - const store = await Storages.db(); + const { relay } = c.var; - const [event] = await store.query([{ kinds: [3036], ids: [eventId] }]); + const [event] = await relay.query([{ kinds: [3036], ids: [eventId] }]); if (!event) { return c.json({ error: 'Event not found' }, 404); } await updateEventInfo(eventId, { pending: false, approved: false, rejected: true }, c); - await hydrateEvents({ events: [event], store }); + await hydrateEvents({ ...c.var, events: [event] }); const nameRequest = await renderNameRequest(event); return c.json(nameRequest); diff --git a/src/controllers/api/apps.ts b/packages/ditto/controllers/api/apps.ts similarity index 100% rename from src/controllers/api/apps.ts rename to packages/ditto/controllers/api/apps.ts diff --git a/src/controllers/api/blocks.ts b/packages/ditto/controllers/api/blocks.ts similarity index 100% rename from src/controllers/api/blocks.ts rename to packages/ditto/controllers/api/blocks.ts diff --git a/src/controllers/api/bookmarks.ts b/packages/ditto/controllers/api/bookmarks.ts similarity index 71% rename from src/controllers/api/bookmarks.ts rename to packages/ditto/controllers/api/bookmarks.ts index 6d80b500..e5253986 100644 --- a/src/controllers/api/bookmarks.ts +++ b/packages/ditto/controllers/api/bookmarks.ts @@ -1,15 +1,14 @@ import { type AppController } from '@/app.ts'; -import { Storages } from '@/storages.ts'; import { getTagSet } from '@/utils/tags.ts'; import { renderStatuses } from '@/views.ts'; /** https://docs.joinmastodon.org/methods/bookmarks/#get */ const bookmarksController: AppController = async (c) => { - const store = await Storages.db(); - const pubkey = await c.get('signer')?.getPublicKey()!; - const { signal } = c.req.raw; + const { relay, user, signal } = c.var; - const [event10003] = await store.query( + const pubkey = await user!.signer.getPublicKey(); + + const [event10003] = await relay.query( [{ kinds: [10003], authors: [pubkey], limit: 1 }], { signal }, ); diff --git a/src/controllers/api/captcha.ts b/packages/ditto/controllers/api/captcha.ts similarity index 96% rename from src/controllers/api/captcha.ts rename to packages/ditto/controllers/api/captcha.ts index 1bb92118..790913af 100644 --- a/src/controllers/api/captcha.ts +++ b/packages/ditto/controllers/api/captcha.ts @@ -3,7 +3,6 @@ import TTLCache from '@isaacs/ttlcache'; import { z } from 'zod'; import { AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { updateUser } from '@/utils/api.ts'; interface Point { @@ -24,6 +23,8 @@ const PUZZLE_SIZE = { w: 65, h: 65 }; /** Puzzle captcha controller. */ export const captchaController: AppController = async (c) => { + const { conf } = c.var; + const { bg, puzzle, solution } = generateCaptcha( await imagesAsync, BG_SIZE, @@ -32,7 +33,7 @@ export const captchaController: AppController = async (c) => { const id = crypto.randomUUID(); const now = new Date(); - const ttl = Conf.captchaTTL; + const ttl = conf.captchaTTL; captchas.set(id, solution, { ttl }); @@ -151,9 +152,11 @@ const pointSchema = z.object({ /** Verify the captcha solution and sign an event in the database. */ export const captchaVerifyController: AppController = async (c) => { + const { user } = c.var; + const id = c.req.param('id'); const result = pointSchema.safeParse(await c.req.json()); - const pubkey = await c.get('signer')!.getPublicKey(); + const pubkey = await user!.signer.getPublicKey(); if (!result.success) { return c.json({ error: 'Invalid input' }, { status: 422 }); @@ -170,7 +173,7 @@ export const captchaVerifyController: AppController = async (c) => { if (solved) { captchas.delete(id); await updateUser(pubkey, { captcha_solved: true }, c); - return new Response(null, { status: 204 }); + return c.newResponse(null, { status: 204 }); } return c.json({ error: 'Incorrect solution' }, { status: 400 }); diff --git a/packages/ditto/controllers/api/cashu.test.ts b/packages/ditto/controllers/api/cashu.test.ts new file mode 100644 index 00000000..75017b11 --- /dev/null +++ b/packages/ditto/controllers/api/cashu.test.ts @@ -0,0 +1,275 @@ +import { DittoConf } from '@ditto/conf'; +import { type User } from '@ditto/mastoapi/middleware'; +import { DittoApp, DittoMiddleware } from '@ditto/mastoapi/router'; +import { NSecSigner } from '@nostrify/nostrify'; +import { genEvent } from '@nostrify/nostrify/test'; +import { bytesToString, stringToBytes } from '@scure/base'; +import { stub } from '@std/testing/mock'; +import { assertEquals, assertExists, assertObjectMatch } from '@std/assert'; +import { generateSecretKey, getPublicKey, nip19 } from 'nostr-tools'; + +import { createTestDB } from '@/test.ts'; + +import cashuRoute from './cashu.ts'; +import { walletSchema } from '@/schema.ts'; + +Deno.test('PUT /wallet must be successful', async () => { + await using test = await createTestRoute(); + + const { route, signer, sk, relay } = test; + const nostrPrivateKey = bytesToString('hex', sk); + + const response = await route.request('/wallet', { + method: 'PUT', + headers: { + 'content-type': 'application/json', + }, + body: JSON.stringify({ + mints: [ + 'https://houston.mint.com', + 'https://houston.mint.com', // duplicate on purpose + 'https://cuiaba.mint.com', + ], + }), + }); + + assertEquals(response.status, 200); + + const pubkey = await signer.getPublicKey(); + + const [wallet] = await relay.query([{ authors: [pubkey], kinds: [17375] }]); + + assertExists(wallet); + assertEquals(wallet.kind, 17375); + + const { data, success } = walletSchema.safeParse(await response.json()); + + assertEquals(success, true); + if (!data) return; // get rid of typescript error possibly undefined + + const decryptedContent: string[][] = JSON.parse(await signer.nip44.decrypt(pubkey, wallet.content)); + + const privkey = decryptedContent.find(([value]) => value === 'privkey')?.[1]!; + const p2pk = getPublicKey(stringToBytes('hex', privkey)); + + assertEquals(nostrPrivateKey !== privkey, true); + + assertEquals(data.pubkey_p2pk, p2pk); + assertEquals(data.mints, [ + 'https://houston.mint.com', + 'https://cuiaba.mint.com', + ]); + assertEquals(data.relays, [ + 'ws://localhost:4036/relay', + ]); + assertEquals(data.balance, 0); + + const [nutzap_info] = await relay.query([{ authors: [pubkey], kinds: [10019] }]); + + assertExists(nutzap_info); + assertEquals(nutzap_info.kind, 10019); + assertEquals(nutzap_info.tags.length, 4); + + const nutzap_p2pk = nutzap_info.tags.find(([value]) => value === 'pubkey')?.[1]!; + + assertEquals(nutzap_p2pk, p2pk); + assertEquals([nutzap_info.tags.find(([name]) => name === 'relay')?.[1]!], [ + 'ws://localhost:4036/relay', + ]); +}); + +Deno.test('PUT /wallet must NOT be successful: wrong request body/schema', async () => { + await using test = await createTestRoute(); + const { route } = test; + + const response = await route.request('/wallet', { + method: 'PUT', + headers: { + 'content-type': 'application/json', + }, + body: JSON.stringify({ + mints: [], // no mints should throw an error + }), + }); + + const body = await response.json(); + + assertEquals(response.status, 400); + assertObjectMatch(body, { error: 'Bad schema' }); +}); + +Deno.test('PUT /wallet must NOT be successful: wallet already exists', async () => { + await using test = await createTestRoute(); + const { route, sk, relay } = test; + + await relay.event(genEvent({ kind: 17375 }, sk)); + + const response = await route.request('/wallet', { + method: 'PUT', + headers: { + 'authorization': `Bearer ${nip19.nsecEncode(sk)}`, + 'content-type': 'application/json', + }, + body: JSON.stringify({ + mints: ['https://mint.heart.com'], + }), + }); + + const body2 = await response.json(); + + assertEquals(response.status, 400); + assertEquals(body2, { error: 'You already have a wallet 😏' }); +}); + +Deno.test('GET /wallet must be successful', async () => { + await using test = await createTestRoute(); + const { route, sk, relay, signer } = test; + + const pubkey = await signer.getPublicKey(); + const privkey = bytesToString('hex', sk); + const p2pk = getPublicKey(stringToBytes('hex', privkey)); + + // Wallet + await relay.event(genEvent({ + kind: 17375, + content: await signer.nip44.encrypt( + pubkey, + JSON.stringify([ + ['privkey', privkey], + ['mint', 'https://mint.soul.com'], + ]), + ), + }, sk)); + + // Nutzap information + await relay.event(genEvent({ + kind: 10019, + tags: [ + ['pubkey', p2pk], + ['mint', 'https://mint.soul.com'], + ], + }, sk)); + + // Unspent proofs + await relay.event(genEvent({ + kind: 7375, + content: await signer.nip44.encrypt( + pubkey, + JSON.stringify({ + mint: 'https://mint.soul.com', + proofs: [ + { + id: '005c2502034d4f12', + amount: 25, + secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=', + C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46', + }, + { + id: '005c2502034d4f12', + amount: 25, + secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=', + C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46', + }, + { + id: '005c2502034d4f12', + amount: 25, + secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=', + C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46', + }, + { + id: '005c2502034d4f12', + amount: 25, + secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=', + C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46', + }, + ], + del: [], + }), + ), + }, sk)); + + // TODO: find a way to have a Mock mint so operations like 'swap', 'mint' and 'melt' can be tested (this will be a bit hard). + // Nutzap + const senderSk = generateSecretKey(); + + await relay.event(genEvent({ + kind: 9321, + content: 'Nice post!', + tags: [ + ['p', pubkey], + ['u', 'https://mint.soul.com'], + [ + 'proof', + '{"amount":1,"C":"02277c66191736eb72fce9d975d08e3191f8f96afb73ab1eec37e4465683066d3f","id":"000a93d6f8a1d2c4","secret":"[\\"P2PK\\",{\\"nonce\\":\\"b00bdd0467b0090a25bdf2d2f0d45ac4e355c482c1418350f273a04fedaaee83\\",\\"data\\":\\"02eaee8939e3565e48cc62967e2fde9d8e2a4b3ec0081f29eceff5c64ef10ac1ed\\"}]"}', + ], + ], + }, senderSk)); + + const response = await route.request('/wallet', { + method: 'GET', + }); + + const body = await response.json(); + + assertEquals(response.status, 200); + assertEquals(body, { + pubkey_p2pk: p2pk, + mints: ['https://mint.soul.com'], + relays: ['ws://localhost:4036/relay'], + balance: 100, + }); +}); + +Deno.test('GET /mints must be successful', async () => { + await using test = await createTestRoute(); + const { route } = test; + + const response = await route.request('/mints', { + method: 'GET', + }); + + const body = await response.json(); + + assertEquals(response.status, 200); + assertEquals(body, { mints: [] }); +}); + +async function createTestRoute() { + const conf = new DittoConf(new Map()); + + const db = await createTestDB(); + const relay = db.store; + + const sk = generateSecretKey(); + const signer = new NSecSigner(sk); + + const route = new DittoApp({ db: db.db, relay, conf }); + + route.use(testUserMiddleware({ signer, relay })); + route.route('/', cashuRoute); + + const mock = stub(globalThis, 'fetch', () => { + return Promise.resolve(new Response()); + }); + + return { + route, + db, + conf, + sk, + signer, + relay, + [Symbol.asyncDispose]: async () => { + mock.restore(); + await db[Symbol.asyncDispose](); + await relay[Symbol.asyncDispose](); + }, + }; +} + +function testUserMiddleware(user: User): DittoMiddleware<{ user: User }> { + return async (c, next) => { + c.set('user', user); + await next(); + }; +} diff --git a/packages/ditto/controllers/api/cashu.ts b/packages/ditto/controllers/api/cashu.ts new file mode 100644 index 00000000..4546dda3 --- /dev/null +++ b/packages/ditto/controllers/api/cashu.ts @@ -0,0 +1,167 @@ +import { Proof } from '@cashu/cashu-ts'; +import { userMiddleware } from '@ditto/mastoapi/middleware'; +import { DittoRoute } from '@ditto/mastoapi/router'; +import { generateSecretKey, getPublicKey } from 'nostr-tools'; +import { bytesToString, stringToBytes } from '@scure/base'; +import { z } from 'zod'; + +import { createEvent, parseBody } from '@/utils/api.ts'; +import { walletSchema } from '@/schema.ts'; +import { swapNutzapsMiddleware } from '@/middleware/swapNutzapsMiddleware.ts'; +import { isNostrId } from '@/utils.ts'; +import { logi } from '@soapbox/logi'; +import { errorJson } from '@/utils/log.ts'; + +type Wallet = z.infer; + +const route = new DittoRoute(); + +// app.delete('/wallet') -> 204 + +// app.post(swapMiddleware, '/nutzap'); + +/* GET /api/v1/ditto/cashu/wallet -> Wallet, 404 */ +/* PUT /api/v1/ditto/cashu/wallet -> Wallet */ +/* DELETE /api/v1/ditto/cashu/wallet -> 204 */ + +interface Nutzap { + amount: number; + event_id?: string; + mint: string; // mint the nutzap was created + recipient_pubkey: string; +} + +const createCashuWalletAndNutzapInfoSchema = z.object({ + mints: z.array(z.string().url()).nonempty().transform((val) => { + return [...new Set(val)]; + }), +}); + +/** + * Creates a replaceable Cashu wallet and a replaceable nutzap information event. + * https://github.com/nostr-protocol/nips/blob/master/60.md + * https://github.com/nostr-protocol/nips/blob/master/61.md#nutzap-informational-event + */ +route.put('/wallet', userMiddleware({ enc: 'nip44' }), async (c) => { + const { conf, user, relay, signal } = c.var; + + const pubkey = await user.signer.getPublicKey(); + const body = await parseBody(c.req.raw); + const result = createCashuWalletAndNutzapInfoSchema.safeParse(body); + + if (!result.success) { + return c.json({ error: 'Bad schema', schema: result.error }, 400); + } + + const { mints } = result.data; + + const [event] = await relay.query([{ authors: [pubkey], kinds: [17375] }], { signal }); + if (event) { + return c.json({ error: 'You already have a wallet 😏' }, 400); + } + + const walletContentTags: string[][] = []; + + const sk = generateSecretKey(); + const privkey = bytesToString('hex', sk); + const p2pk = getPublicKey(stringToBytes('hex', privkey)); + + walletContentTags.push(['privkey', privkey]); + + for (const mint of mints) { + walletContentTags.push(['mint', mint]); + } + + const encryptedWalletContentTags = await user.signer.nip44.encrypt(pubkey, JSON.stringify(walletContentTags)); + + // Wallet + await createEvent({ + kind: 17375, + content: encryptedWalletContentTags, + }, c); + + // Nutzap information + await createEvent({ + kind: 10019, + tags: [ + ...mints.map((mint) => ['mint', mint, 'sat']), + ['relay', conf.relay], // TODO: add more relays once things get more stable + ['pubkey', p2pk], + ], + }, c); + + // TODO: hydrate wallet and add a 'balance' field when a 'renderWallet' view function is created + const walletEntity: Wallet = { + pubkey_p2pk: p2pk, + mints, + relays: [conf.relay], + balance: 0, // Newly created wallet, balance is zero. + }; + + return c.json(walletEntity, 200); +}); + +/** Gets a wallet, if it exists. */ +route.get('/wallet', userMiddleware({ enc: 'nip44' }), swapNutzapsMiddleware, async (c) => { + const { conf, relay, user, signal } = c.var; + + const pubkey = await user.signer.getPublicKey(); + + const [event] = await relay.query([{ authors: [pubkey], kinds: [17375] }], { signal }); + if (!event) { + return c.json({ error: 'Wallet not found' }, 404); + } + + const decryptedContent: string[][] = JSON.parse(await user.signer.nip44.decrypt(pubkey, event.content)); + + const privkey = decryptedContent.find(([value]) => value === 'privkey')?.[1]; + if (!privkey || !isNostrId(privkey)) { + return c.json({ error: 'Wallet does not contain privkey or privkey is not a valid nostr id.' }, 422); + } + + const p2pk = getPublicKey(stringToBytes('hex', privkey)); + + let balance = 0; + const mints: string[] = []; + + const tokens = await relay.query([{ authors: [pubkey], kinds: [7375] }], { signal }); + for (const token of tokens) { + try { + const decryptedContent: { mint: string; proofs: Proof[] } = JSON.parse( + await user.signer.nip44.decrypt(pubkey, token.content), + ); + + if (!mints.includes(decryptedContent.mint)) { + mints.push(decryptedContent.mint); + } + + balance += decryptedContent.proofs.reduce((accumulator, current) => { + return accumulator + current.amount; + }, 0); + } catch (e) { + logi({ level: 'error', ns: 'ditto.api.cashu.wallet.swap', error: errorJson(e) }); + } + } + + // TODO: maybe change the 'Wallet' type data structure so each mint is a key and the value are the tokens associated with a given mint + const walletEntity: Wallet = { + pubkey_p2pk: p2pk, + mints, + relays: [conf.relay], + balance, + }; + + return c.json(walletEntity, 200); +}); + +/** Get mints set by the CASHU_MINTS environment variable. */ +route.get('/mints', (c) => { + const { conf } = c.var; + + // TODO: Return full Mint information: https://github.com/cashubtc/nuts/blob/main/06.md + const mints = conf.cashuMints; + + return c.json({ mints }, 200); +}); + +export default route; diff --git a/src/controllers/api/ditto.ts b/packages/ditto/controllers/api/ditto.ts similarity index 70% rename from src/controllers/api/ditto.ts rename to packages/ditto/controllers/api/ditto.ts index 765862ec..38c72eb4 100644 --- a/src/controllers/api/ditto.ts +++ b/packages/ditto/controllers/api/ditto.ts @@ -1,27 +1,35 @@ +import { paginated } from '@ditto/mastoapi/pagination'; import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify'; import { z } from 'zod'; import { AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { addTag } from '@/utils/tags.ts'; +import { DittoEvent } from '@/interfaces/DittoEvent.ts'; import { getAuthor } from '@/queries.ts'; -import { createEvent, paginated, parseBody, updateAdminEvent } from '@/utils/api.ts'; +import { addTag } from '@/utils/tags.ts'; +import { createEvent, parseBody, updateAdminEvent } from '@/utils/api.ts'; import { getInstanceMetadata } from '@/utils/instance.ts'; import { deleteTag } from '@/utils/tags.ts'; -import { DittoEvent } from '@/interfaces/DittoEvent.ts'; import { DittoZapSplits, getZapSplits } from '@/utils/zap-split.ts'; -import { AdminSigner } from '@/signers/AdminSigner.ts'; import { screenshotsSchema } from '@/schemas/nostr.ts'; -import { booleanParamSchema, percentageSchema, wsUrlSchema } from '@/schema.ts'; +import { booleanParamSchema, percentageSchema } from '@/schema.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; import { renderNameRequest } from '@/views/ditto.ts'; import { accountFromPubkey } from '@/views/mastodon/accounts.ts'; import { renderAccount } from '@/views/mastodon/accounts.ts'; -import { Storages } from '@/storages.ts'; import { updateListAdminEvent } from '@/utils/api.ts'; const markerSchema = z.enum(['read', 'write']); +/** WebSocket URL. */ +const wsUrlSchema = z.string().refine((val): val is `wss://${string}` | `ws://${string}` => { + try { + const { protocol } = new URL(val); + return protocol === 'wss:' || protocol === 'ws:'; + } catch { + return false; + } +}, 'Invalid WebSocket URL'); + const relaySchema = z.object({ url: wsUrlSchema, marker: markerSchema.optional(), @@ -30,10 +38,10 @@ const relaySchema = z.object({ type RelayEntity = z.infer; export const adminRelaysController: AppController = async (c) => { - const store = await Storages.db(); + const { conf, relay } = c.var; - const [event] = await store.query([ - { kinds: [10002], authors: [Conf.pubkey], limit: 1 }, + const [event] = await relay.query([ + { kinds: [10002], authors: [await conf.signer.getPublicKey()], limit: 1 }, ]); if (!event) { @@ -44,17 +52,17 @@ export const adminRelaysController: AppController = async (c) => { }; export const adminSetRelaysController: AppController = async (c) => { - const store = await Storages.db(); + const { conf, relay } = c.var; const relays = relaySchema.array().parse(await c.req.json()); - const event = await new AdminSigner().signEvent({ + const event = await conf.signer.signEvent({ kind: 10002, tags: relays.map(({ url, marker }) => marker ? ['r', url, marker] : ['r', url]), content: '', created_at: Math.floor(Date.now() / 1000), }); - await store.event(event); + await relay.event(event); return c.json(renderRelays(event)); }; @@ -64,7 +72,7 @@ function renderRelays(event: NostrEvent): RelayEntity[] { return event.tags.reduce((acc, [name, url, marker]) => { if (name === 'r') { const relay: RelayEntity = { - url, + url: url as `wss://${string}`, marker: markerSchema.safeParse(marker).success ? marker as 'read' | 'write' : undefined, }; acc.push(relay); @@ -79,29 +87,40 @@ const nameRequestSchema = z.object({ }); export const nameRequestController: AppController = async (c) => { - const store = await Storages.db(); - const signer = c.get('signer')!; - const pubkey = await signer.getPublicKey(); + const { conf, relay, user } = c.var; - const { name, reason } = nameRequestSchema.parse(await c.req.json()); + const pubkey = await user!.signer.getPublicKey(); + const result = nameRequestSchema.safeParse(await c.req.json()); - const [existing] = await store.query([{ kinds: [3036], authors: [pubkey], '#r': [name], limit: 1 }]); + if (!result.success) { + return c.json({ error: 'Invalid username', schema: result.error }, 400); + } + + const { name, reason } = result.data; + + const [existing] = await relay.query([{ kinds: [3036], authors: [pubkey], '#r': [name.toLowerCase()], limit: 1 }]); if (existing) { return c.json({ error: 'Name request already exists' }, 400); } + const r: string[][] = [['r', name]]; + + if (name !== name.toLowerCase()) { + r.push(['r', name.toLowerCase()]); + } + const event = await createEvent({ kind: 3036, content: reason, tags: [ - ['r', name], + ...r, ['L', 'nip05.domain'], ['l', name.split('@')[1], 'nip05.domain'], - ['p', Conf.pubkey], + ['p', await conf.signer.getPublicKey()], ], }, c); - await hydrateEvents({ events: [event], store: await Storages.db() }); + await hydrateEvents({ ...c.var, events: [event] }); const nameRequest = await renderNameRequest(event); return c.json(nameRequest); @@ -113,16 +132,15 @@ const nameRequestsSchema = z.object({ }); export const nameRequestsController: AppController = async (c) => { - const store = await Storages.db(); - const signer = c.get('signer')!; - const pubkey = await signer.getPublicKey(); + const { conf, relay, user } = c.var; + const pubkey = await user!.signer.getPublicKey(); const params = c.get('pagination'); const { approved, rejected } = nameRequestsSchema.parse(c.req.query()); const filter: NostrFilter = { kinds: [30383], - authors: [Conf.pubkey], + authors: [await conf.signer.getPublicKey()], '#k': ['3036'], '#p': [pubkey], ...params, @@ -135,7 +153,7 @@ export const nameRequestsController: AppController = async (c) => { filter['#n'] = ['rejected']; } - const orig = await store.query([filter]); + const orig = await relay.query([filter]); const ids = new Set(); for (const event of orig) { @@ -149,8 +167,8 @@ export const nameRequestsController: AppController = async (c) => { return c.json([]); } - const events = await store.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }]) - .then((events) => hydrateEvents({ store, events: events, signal: c.req.raw.signal })); + const events = await relay.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }]) + .then((events) => hydrateEvents({ ...c.var, events })); const nameRequests = await Promise.all( events.map((event) => renderNameRequest(event)), @@ -168,15 +186,17 @@ const zapSplitSchema = z.record( ); export const updateZapSplitsController: AppController = async (c) => { + const { conf, relay } = c.var; const body = await parseBody(c.req.raw); const result = zapSplitSchema.safeParse(body); - const store = c.get('store'); if (!result.success) { return c.json({ error: result.error }, 400); } - const dittoZapSplit = await getZapSplits(store, Conf.pubkey); + const adminPubkey = await conf.signer.getPublicKey(); + + const dittoZapSplit = await getZapSplits(relay, adminPubkey); if (!dittoZapSplit) { return c.json({ error: 'Zap split not activated, restart the server.' }, 404); } @@ -185,11 +205,11 @@ export const updateZapSplitsController: AppController = async (c) => { const pubkeys = Object.keys(data); if (pubkeys.length < 1) { - return c.json(200); + return c.newResponse(null, { status: 204 }); } await updateListAdminEvent( - { kinds: [30078], authors: [Conf.pubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 }, + { kinds: [30078], authors: [adminPubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 }, (tags) => pubkeys.reduce((accumulator, pubkey) => { return addTag(accumulator, ['p', pubkey, data[pubkey].weight.toString(), data[pubkey].message]); @@ -197,21 +217,23 @@ export const updateZapSplitsController: AppController = async (c) => { c, ); - return c.json(200); + return c.newResponse(null, { status: 204 }); }; const deleteZapSplitSchema = z.array(n.id()).min(1); export const deleteZapSplitsController: AppController = async (c) => { + const { conf, relay } = c.var; const body = await parseBody(c.req.raw); const result = deleteZapSplitSchema.safeParse(body); - const store = c.get('store'); if (!result.success) { return c.json({ error: result.error }, 400); } - const dittoZapSplit = await getZapSplits(store, Conf.pubkey); + const adminPubkey = await conf.signer.getPublicKey(); + + const dittoZapSplit = await getZapSplits(relay, adminPubkey); if (!dittoZapSplit) { return c.json({ error: 'Zap split not activated, restart the server.' }, 404); } @@ -219,7 +241,7 @@ export const deleteZapSplitsController: AppController = async (c) => { const { data } = result; await updateListAdminEvent( - { kinds: [30078], authors: [Conf.pubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 }, + { kinds: [30078], authors: [adminPubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 }, (tags) => data.reduce((accumulator, currentValue) => { return deleteTag(accumulator, ['p', currentValue]); @@ -227,13 +249,13 @@ export const deleteZapSplitsController: AppController = async (c) => { c, ); - return c.json(200); + return c.newResponse(null, { status: 204 }); }; export const getZapSplitsController: AppController = async (c) => { - const store = c.get('store'); + const { conf, relay } = c.var; - const dittoZapSplit: DittoZapSplits | undefined = await getZapSplits(store, Conf.pubkey) ?? {}; + const dittoZapSplit: DittoZapSplits | undefined = await getZapSplits(relay, await conf.signer.getPublicKey()) ?? {}; if (!dittoZapSplit) { return c.json({ error: 'Zap split not activated, restart the server.' }, 404); } @@ -241,9 +263,9 @@ export const getZapSplitsController: AppController = async (c) => { const pubkeys = Object.keys(dittoZapSplit); const zapSplits = await Promise.all(pubkeys.map(async (pubkey) => { - const author = await getAuthor(pubkey); + const author = await getAuthor(pubkey, c.var); - const account = author ? await renderAccount(author) : await accountFromPubkey(pubkey); + const account = author ? renderAccount(author) : accountFromPubkey(pubkey); return { account, @@ -256,11 +278,11 @@ export const getZapSplitsController: AppController = async (c) => { }; export const statusZapSplitsController: AppController = async (c) => { - const store = c.get('store'); - const id = c.req.param('id'); - const { signal } = c.req.raw; + const { relay, signal } = c.var; - const [event] = await store.query([{ kinds: [1], ids: [id], limit: 1 }], { signal }); + const id = c.req.param('id'); + + const [event] = await relay.query([{ kinds: [1, 20], ids: [id], limit: 1 }], { signal }); if (!event) { return c.json({ error: 'Event not found' }, 404); } @@ -269,12 +291,12 @@ export const statusZapSplitsController: AppController = async (c) => { const pubkeys = zapsTag.map((name) => name[1]); - const users = await store.query([{ authors: pubkeys, kinds: [0], limit: pubkeys.length }], { signal }); - await hydrateEvents({ events: users, store, signal }); + const users = await relay.query([{ authors: pubkeys, kinds: [0], limit: pubkeys.length }], { signal }); + await hydrateEvents({ ...c.var, events: users }); - const zapSplits = (await Promise.all(pubkeys.map(async (pubkey) => { + const zapSplits = (await Promise.all(pubkeys.map((pubkey) => { const author = (users.find((event) => event.pubkey === pubkey) as DittoEvent | undefined)?.author; - const account = author ? await renderAccount(author) : await accountFromPubkey(pubkey); + const account = author ? renderAccount(author) : accountFromPubkey(pubkey); const weight = percentageSchema.catch(0).parse(zapsTag.find((name) => name[1] === pubkey)![3]) ?? 0; @@ -303,15 +325,17 @@ const updateInstanceSchema = z.object({ }); export const updateInstanceController: AppController = async (c) => { + const { conf, relay, signal } = c.var; + const body = await parseBody(c.req.raw); const result = updateInstanceSchema.safeParse(body); - const pubkey = Conf.pubkey; + const pubkey = await conf.signer.getPublicKey(); if (!result.success) { return c.json(result.error, 422); } - const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal); + const meta = await getInstanceMetadata(relay, signal); await updateAdminEvent( { kinds: [0], authors: [pubkey], limit: 1 }, @@ -340,5 +364,5 @@ export const updateInstanceController: AppController = async (c) => { c, ); - return c.json(204); + return c.newResponse(null, { status: 204 }); }; diff --git a/packages/ditto/controllers/api/fallback.ts b/packages/ditto/controllers/api/fallback.ts new file mode 100644 index 00000000..5794c544 --- /dev/null +++ b/packages/ditto/controllers/api/fallback.ts @@ -0,0 +1,13 @@ +import { Handler } from '@hono/hono'; + +const emptyArrayController: Handler = (c) => { + c.header('Cache-Control', 'max-age=300, public, stale-while-revalidate=60'); + return c.json([]); +}; + +const notImplementedController: Handler = (c) => { + c.header('Cache-Control', 'max-age=300, public, stale-while-revalidate=60'); + return c.json({ error: 'Not implemented' }, 404); +}; + +export { emptyArrayController, notImplementedController }; diff --git a/src/controllers/api/instance.ts b/packages/ditto/controllers/api/instance.ts similarity index 76% rename from src/controllers/api/instance.ts rename to packages/ditto/controllers/api/instance.ts index 92e517c3..1fb742e5 100644 --- a/src/controllers/api/instance.ts +++ b/packages/ditto/controllers/api/instance.ts @@ -1,8 +1,6 @@ import denoJson from 'deno.json' with { type: 'json' }; import { AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { Storages } from '@/storages.ts'; import { getInstanceMetadata } from '@/utils/instance.ts'; const version = `3.0.0 (compatible; Ditto ${denoJson.version})`; @@ -17,8 +15,9 @@ const features = [ ]; const instanceV1Controller: AppController = async (c) => { - const { host, protocol } = Conf.url; - const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal); + const { conf, relay, signal } = c.var; + const { host, protocol } = conf.url; + const meta = await getInstanceMetadata(relay, signal); /** Protocol to use for WebSocket URLs, depending on the protocol of the `LOCAL_DOMAIN`. */ const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:'; @@ -29,7 +28,7 @@ const instanceV1Controller: AppController = async (c) => { description: meta.about, short_description: meta.tagline, registrations: true, - max_toot_chars: Conf.postCharLimit, + max_toot_chars: conf.postCharLimit, configuration: { media_attachments: { image_size_limit: 100000000, @@ -42,7 +41,7 @@ const instanceV1Controller: AppController = async (c) => { min_expiration: 0, }, statuses: { - max_characters: Conf.postCharLimit, + max_characters: conf.postCharLimit, max_media_attachments: 20, }, }, @@ -50,9 +49,9 @@ const instanceV1Controller: AppController = async (c) => { metadata: { features, fields_limits: { - max_fields: Conf.profileFields.maxFields, - name_length: Conf.profileFields.nameLength, - value_length: Conf.profileFields.valueLength, + max_fields: conf.profileFields.maxFields, + name_length: conf.profileFields.nameLength, + value_length: conf.profileFields.valueLength, }, }, }, @@ -68,7 +67,7 @@ const instanceV1Controller: AppController = async (c) => { version, email: meta.email, nostr: { - pubkey: Conf.pubkey, + pubkey: await conf.signer.getPublicKey(), relay: `${wsProtocol}//${host}/relay`, }, rules: [], @@ -76,8 +75,9 @@ const instanceV1Controller: AppController = async (c) => { }; const instanceV2Controller: AppController = async (c) => { - const { host, protocol } = Conf.url; - const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal); + const { conf, relay, signal } = c.var; + const { host, protocol } = conf.url; + const meta = await getInstanceMetadata(relay, signal); /** Protocol to use for WebSocket URLs, depending on the protocol of the `LOCAL_DOMAIN`. */ const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:'; @@ -111,15 +111,15 @@ const instanceV2Controller: AppController = async (c) => { streaming: `${wsProtocol}//${host}`, }, vapid: { - public_key: await Conf.vapidPublicKey, + public_key: await conf.vapidPublicKey, }, accounts: { max_featured_tags: 10, max_pinned_statuses: 5, }, statuses: { - max_characters: Conf.postCharLimit, - max_media_attachments: 4, + max_characters: conf.postCharLimit, + max_media_attachments: 20, characters_reserved_per_url: 23, }, media_attachments: { @@ -136,20 +136,20 @@ const instanceV2Controller: AppController = async (c) => { max_expiration: 2629746, }, translation: { - enabled: Boolean(Conf.translationProvider), + enabled: Boolean(conf.translationProvider), }, }, nostr: { - pubkey: Conf.pubkey, + pubkey: await conf.signer.getPublicKey(), relay: `${wsProtocol}//${host}/relay`, }, pleroma: { metadata: { features, fields_limits: { - max_fields: Conf.profileFields.maxFields, - name_length: Conf.profileFields.nameLength, - value_length: Conf.profileFields.valueLength, + max_fields: conf.profileFields.maxFields, + name_length: conf.profileFields.nameLength, + value_length: conf.profileFields.valueLength, }, }, }, @@ -164,7 +164,9 @@ const instanceV2Controller: AppController = async (c) => { }; const instanceDescriptionController: AppController = async (c) => { - const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal); + const { relay, signal } = c.var; + + const meta = await getInstanceMetadata(relay, signal); return c.json({ content: meta.about, diff --git a/src/controllers/api/markers.ts b/packages/ditto/controllers/api/markers.ts similarity index 91% rename from src/controllers/api/markers.ts rename to packages/ditto/controllers/api/markers.ts index 005ebbe5..7e7cb8dd 100644 --- a/src/controllers/api/markers.ts +++ b/packages/ditto/controllers/api/markers.ts @@ -14,7 +14,9 @@ interface Marker { } export const markersController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { user } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const timelines = c.req.queries('timeline[]') ?? []; const results = await kv.getMany( @@ -37,7 +39,9 @@ const markerDataSchema = z.object({ }); export const updateMarkersController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { user } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const record = z.record(z.enum(['home', 'notifications']), markerDataSchema).parse(await parseBody(c.req.raw)); const timelines = Object.keys(record) as Timeline[]; diff --git a/src/controllers/api/media.ts b/packages/ditto/controllers/api/media.ts similarity index 87% rename from src/controllers/api/media.ts rename to packages/ditto/controllers/api/media.ts index 7dc398ca..c6c6b062 100644 --- a/src/controllers/api/media.ts +++ b/packages/ditto/controllers/api/media.ts @@ -1,11 +1,13 @@ +import { logi } from '@soapbox/logi'; import { z } from 'zod'; import { AppController } from '@/app.ts'; +import { dittoUploads } from '@/DittoUploads.ts'; import { fileSchema } from '@/schema.ts'; import { parseBody } from '@/utils/api.ts'; import { renderAttachment } from '@/views/mastodon/attachments.ts'; +import { errorJson } from '@/utils/log.ts'; import { uploadFile } from '@/utils/upload.ts'; -import { dittoUploads } from '@/DittoUploads.ts'; const mediaBodySchema = z.object({ file: fileSchema, @@ -19,9 +21,10 @@ const mediaUpdateSchema = z.object({ }); const mediaController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { user, signal } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const result = mediaBodySchema.safeParse(await parseBody(c.req.raw)); - const { signal } = c.req.raw; if (!result.success) { return c.json({ error: 'Bad request.', schema: result.error }, 422); @@ -32,7 +35,7 @@ const mediaController: AppController = async (c) => { const media = await uploadFile(c, file, { pubkey, description }, signal); return c.json(renderAttachment(media)); } catch (e) { - console.error(e); + logi({ level: 'error', ns: 'ditto.api.media', error: errorJson(e) }); return c.json({ error: 'Failed to upload file.' }, 500); } }; diff --git a/src/controllers/api/mutes.ts b/packages/ditto/controllers/api/mutes.ts similarity index 70% rename from src/controllers/api/mutes.ts rename to packages/ditto/controllers/api/mutes.ts index 90b5f545..9ce9c5e9 100644 --- a/src/controllers/api/mutes.ts +++ b/packages/ditto/controllers/api/mutes.ts @@ -1,15 +1,14 @@ import { type AppController } from '@/app.ts'; -import { Storages } from '@/storages.ts'; import { getTagSet } from '@/utils/tags.ts'; import { renderAccounts } from '@/views.ts'; /** https://docs.joinmastodon.org/methods/mutes/#get */ const mutesController: AppController = async (c) => { - const store = await Storages.db(); - const pubkey = await c.get('signer')?.getPublicKey()!; - const { signal } = c.req.raw; + const { relay, user, signal } = c.var; - const [event10000] = await store.query( + const pubkey = await user!.signer.getPublicKey(); + + const [event10000] = await relay.query( [{ kinds: [10000], authors: [pubkey], limit: 1 }], { signal }, ); diff --git a/src/controllers/api/notifications.ts b/packages/ditto/controllers/api/notifications.ts similarity index 75% rename from src/controllers/api/notifications.ts rename to packages/ditto/controllers/api/notifications.ts index 1c251563..53edf354 100644 --- a/src/controllers/api/notifications.ts +++ b/packages/ditto/controllers/api/notifications.ts @@ -1,11 +1,10 @@ +import { paginated } from '@ditto/mastoapi/pagination'; import { NostrFilter, NSchema as n } from '@nostrify/nostrify'; import { z } from 'zod'; import { AppContext, AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { DittoPagination } from '@/interfaces/DittoPagination.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { paginated } from '@/utils/api.ts'; import { renderNotification } from '@/views/mastodon/notifications.ts'; /** Set of known notification types across backends. */ @@ -31,7 +30,9 @@ const notificationsSchema = z.object({ }); const notificationsController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { conf, user } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const params = c.get('pagination'); const types = notificationTypes @@ -68,29 +69,30 @@ const notificationsController: AppController = async (c) => { } if (types.has('ditto:name_grant') && !account_id) { - filters.push({ kinds: [30360], authors: [Conf.pubkey], '#p': [pubkey], ...params }); + filters.push({ kinds: [30360], authors: [await conf.signer.getPublicKey()], '#p': [pubkey], ...params }); } return renderNotifications(filters, types, params, c); }; const notificationController: AppController = async (c) => { + const { relay, user } = c.var; + const id = c.req.param('id'); - const pubkey = await c.get('signer')?.getPublicKey()!; - const store = c.get('store'); + const pubkey = await user!.signer.getPublicKey(); // Remove the timestamp from the ID. const eventId = id.replace(/^\d+-/, ''); - const [event] = await store.query([{ ids: [eventId] }]); + const [event] = await relay.query([{ ids: [eventId] }]); if (!event) { return c.json({ error: 'Event not found' }, { status: 404 }); } - await hydrateEvents({ events: [event], store }); + await hydrateEvents({ ...c.var, events: [event] }); - const notification = await renderNotification(event, { viewerPubkey: pubkey }); + const notification = await renderNotification(relay, event, { viewerPubkey: pubkey }); if (!notification) { return c.json({ error: 'Notification not found' }, { status: 404 }); @@ -105,22 +107,23 @@ async function renderNotifications( params: DittoPagination, c: AppContext, ) { - const store = c.get('store'); - const pubkey = await c.get('signer')?.getPublicKey()!; - const { signal } = c.req.raw; - const opts = { signal, limit: params.limit, timeout: Conf.db.timeouts.timelines }; + const { conf, user, signal } = c.var; - const events = await store + const relay = user!.relay; + const pubkey = await user!.signer.getPublicKey(); + const opts = { signal, limit: params.limit, timeout: conf.db.timeouts.timelines }; + + const events = await relay .query(filters, opts) .then((events) => events.filter((event) => event.pubkey !== pubkey)) - .then((events) => hydrateEvents({ events, store, signal })); + .then((events) => hydrateEvents({ ...c.var, events })); if (!events.length) { return c.json([]); } const notifications = (await Promise.all(events.map((event) => { - return renderNotification(event, { viewerPubkey: pubkey }); + return renderNotification(relay, event, { viewerPubkey: pubkey }); }))) .filter((notification) => notification && types.has(notification.type)); diff --git a/src/controllers/api/oauth.ts b/packages/ditto/controllers/api/oauth.ts similarity index 92% rename from src/controllers/api/oauth.ts rename to packages/ditto/controllers/api/oauth.ts index 2804df60..aa4ed125 100644 --- a/src/controllers/api/oauth.ts +++ b/packages/ditto/controllers/api/oauth.ts @@ -3,9 +3,7 @@ import { escape } from 'entities'; import { generateSecretKey } from 'nostr-tools'; import { z } from 'zod'; -import { AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { Storages } from '@/storages.ts'; +import { AppContext, AppController } from '@/app.ts'; import { nostrNow } from '@/utils.ts'; import { parseBody } from '@/utils/api.ts'; import { aesEncrypt } from '@/utils/aes.ts'; @@ -40,6 +38,8 @@ const createTokenSchema = z.discriminatedUnion('grant_type', [ ]); const createTokenController: AppController = async (c) => { + const { conf } = c.var; + const body = await parseBody(c.req.raw); const result = createTokenSchema.safeParse(body); @@ -50,7 +50,7 @@ const createTokenController: AppController = async (c) => { switch (result.data.grant_type) { case 'nostr_bunker': return c.json({ - access_token: await getToken(result.data), + access_token: await getToken(c, result.data, conf.seckey), token_type: 'Bearer', scope: 'read write follow push', created_at: nostrNow(), @@ -90,6 +90,8 @@ const revokeTokenSchema = z.object({ * https://docs.joinmastodon.org/methods/oauth/#revoke */ const revokeTokenController: AppController = async (c) => { + const { db } = c.var; + const body = await parseBody(c.req.raw); const result = revokeTokenSchema.safeParse(body); @@ -99,10 +101,9 @@ const revokeTokenController: AppController = async (c) => { const { token } = result.data; - const kysely = await Storages.kysely(); const tokenHash = await getTokenHash(token as `token1${string}`); - await kysely + await db.kysely .deleteFrom('auth_tokens') .where('token_hash', '=', tokenHash) .execute(); @@ -111,9 +112,11 @@ const revokeTokenController: AppController = async (c) => { }; async function getToken( + c: AppContext, { pubkey: bunkerPubkey, secret, relays = [] }: { pubkey: string; secret?: string; relays?: string[] }, + dittoSeckey: Uint8Array, ): Promise<`token1${string}`> { - const kysely = await Storages.kysely(); + const { db, relay } = c.var; const { token, hash } = await generateToken(); const nip46Seckey = generateSecretKey(); @@ -122,18 +125,18 @@ async function getToken( encryption: 'nip44', pubkey: bunkerPubkey, signer: new NSecSigner(nip46Seckey), - relay: await Storages.pubsub(), // TODO: Use the relays from the request. + relay, timeout: 60_000, }); await signer.connect(secret); const userPubkey = await signer.getPublicKey(); - await kysely.insertInto('auth_tokens').values({ + await db.kysely.insertInto('auth_tokens').values({ token_hash: hash, pubkey: userPubkey, bunker_pubkey: bunkerPubkey, - nip46_sk_enc: await aesEncrypt(Conf.seckey, nip46Seckey), + nip46_sk_enc: await aesEncrypt(dittoSeckey, nip46Seckey), nip46_relays: relays, created_at: new Date(), }).execute(); @@ -143,6 +146,7 @@ async function getToken( /** Display the OAuth form. */ const oauthController: AppController = (c) => { + const { conf } = c.var; const encodedUri = c.req.query('redirect_uri'); if (!encodedUri) { return c.text('Missing `redirect_uri` query param.', 422); @@ -192,7 +196,7 @@ const oauthController: AppController = (c) => { -

Sign in with a Nostr bunker app. Please configure the app to use this relay: ${Conf.relay}

+

Sign in with a Nostr bunker app. Please configure the app to use this relay: ${conf.relay}

`); @@ -220,6 +224,8 @@ const oauthAuthorizeSchema = z.object({ /** Controller the OAuth form is POSTed to. */ const oauthAuthorizeController: AppController = async (c) => { + const { conf } = c.var; + /** FormData results in JSON. */ const result = oauthAuthorizeSchema.safeParse(await parseBody(c.req.raw)); @@ -232,11 +238,11 @@ const oauthAuthorizeController: AppController = async (c) => { const bunker = new URL(bunker_uri); - const token = await getToken({ + const token = await getToken(c, { pubkey: bunker.hostname, secret: bunker.searchParams.get('secret') || undefined, relays: bunker.searchParams.getAll('relay'), - }); + }, conf.seckey); if (redirectUri === 'urn:ietf:wg:oauth:2.0:oob') { return c.text(token); diff --git a/src/controllers/api/pleroma.ts b/packages/ditto/controllers/api/pleroma.ts similarity index 77% rename from src/controllers/api/pleroma.ts rename to packages/ditto/controllers/api/pleroma.ts index d9289df1..ef27696d 100644 --- a/src/controllers/api/pleroma.ts +++ b/packages/ditto/controllers/api/pleroma.ts @@ -1,17 +1,15 @@ import { z } from 'zod'; import { type AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { configSchema, elixirTupleSchema } from '@/schemas/pleroma-api.ts'; -import { AdminSigner } from '@/signers/AdminSigner.ts'; -import { Storages } from '@/storages.ts'; import { createAdminEvent, updateAdminEvent, updateUser } from '@/utils/api.ts'; import { lookupPubkey } from '@/utils/lookup.ts'; import { getPleromaConfigs } from '@/utils/pleroma.ts'; const frontendConfigController: AppController = async (c) => { - const store = await Storages.db(); - const configDB = await getPleromaConfigs(store, c.req.raw.signal); + const { relay, signal } = c.var; + + const configDB = await getPleromaConfigs(relay, signal); const frontendConfig = configDB.get(':pleroma', ':frontend_configurations'); if (frontendConfig) { @@ -27,24 +25,24 @@ const frontendConfigController: AppController = async (c) => { }; const configController: AppController = async (c) => { - const store = await Storages.db(); - const configs = await getPleromaConfigs(store, c.req.raw.signal); + const { relay, signal } = c.var; + + const configs = await getPleromaConfigs(relay, signal); return c.json({ configs, need_reboot: false }); }; /** Pleroma admin config controller. */ const updateConfigController: AppController = async (c) => { - const { pubkey } = Conf; + const { conf, relay, signal } = c.var; - const store = await Storages.db(); - const configs = await getPleromaConfigs(store, c.req.raw.signal); + const configs = await getPleromaConfigs(relay, signal); const { configs: newConfigs } = z.object({ configs: z.array(configSchema) }).parse(await c.req.json()); configs.merge(newConfigs); await createAdminEvent({ kind: 30078, - content: await new AdminSigner().nip44.encrypt(pubkey, JSON.stringify(configs)), + content: await conf.signer.nip44.encrypt(await conf.signer.getPublicKey(), JSON.stringify(configs)), tags: [ ['d', 'pub.ditto.pleroma.config'], ['encrypted', 'nip44'], @@ -69,14 +67,15 @@ const pleromaAdminTagSchema = z.object({ }); const pleromaAdminTagController: AppController = async (c) => { + const { conf } = c.var; const params = pleromaAdminTagSchema.parse(await c.req.json()); for (const nickname of params.nicknames) { - const pubkey = await lookupPubkey(nickname); + const pubkey = await lookupPubkey(nickname, c.var); if (!pubkey) continue; await updateAdminEvent( - { kinds: [30382], authors: [Conf.pubkey], '#d': [pubkey], limit: 1 }, + { kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey], limit: 1 }, (prev) => { const tags = prev?.tags ?? [['d', pubkey]]; @@ -97,18 +96,19 @@ const pleromaAdminTagController: AppController = async (c) => { ); } - return new Response(null, { status: 204 }); + return c.newResponse(null, { status: 204 }); }; const pleromaAdminUntagController: AppController = async (c) => { + const { conf } = c.var; const params = pleromaAdminTagSchema.parse(await c.req.json()); for (const nickname of params.nicknames) { - const pubkey = await lookupPubkey(nickname); + const pubkey = await lookupPubkey(nickname, c.var); if (!pubkey) continue; await updateAdminEvent( - { kinds: [30382], authors: [Conf.pubkey], '#d': [pubkey], limit: 1 }, + { kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey], limit: 1 }, (prev) => ({ kind: 30382, content: prev?.content ?? '', @@ -119,7 +119,7 @@ const pleromaAdminUntagController: AppController = async (c) => { ); } - return new Response(null, { status: 204 }); + return c.newResponse(null, { status: 204 }); }; const pleromaAdminSuggestSchema = z.object({ @@ -130,24 +130,24 @@ const pleromaAdminSuggestController: AppController = async (c) => { const { nicknames } = pleromaAdminSuggestSchema.parse(await c.req.json()); for (const nickname of nicknames) { - const pubkey = await lookupPubkey(nickname); + const pubkey = await lookupPubkey(nickname, c.var); if (!pubkey) continue; await updateUser(pubkey, { suggested: true }, c); } - return new Response(null, { status: 204 }); + return c.newResponse(null, { status: 204 }); }; const pleromaAdminUnsuggestController: AppController = async (c) => { const { nicknames } = pleromaAdminSuggestSchema.parse(await c.req.json()); for (const nickname of nicknames) { - const pubkey = await lookupPubkey(nickname); + const pubkey = await lookupPubkey(nickname, c.var); if (!pubkey) continue; await updateUser(pubkey, { suggested: false }, c); } - return new Response(null, { status: 204 }); + return c.newResponse(null, { status: 204 }); }; export { diff --git a/src/controllers/api/preferences.ts b/packages/ditto/controllers/api/preferences.ts similarity index 100% rename from src/controllers/api/preferences.ts rename to packages/ditto/controllers/api/preferences.ts diff --git a/src/controllers/api/push.ts b/packages/ditto/controllers/api/push.ts similarity index 90% rename from src/controllers/api/push.ts rename to packages/ditto/controllers/api/push.ts index 0fa7c107..c99963aa 100644 --- a/src/controllers/api/push.ts +++ b/packages/ditto/controllers/api/push.ts @@ -3,8 +3,6 @@ import { nip19 } from 'nostr-tools'; import { z } from 'zod'; import { AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { Storages } from '@/storages.ts'; import { parseBody } from '@/utils/api.ts'; import { getTokenHash } from '@/utils/auth.ts'; @@ -43,16 +41,15 @@ const pushSubscribeSchema = z.object({ }); export const pushSubscribeController: AppController = async (c) => { - const vapidPublicKey = await Conf.vapidPublicKey; + const { conf, db, user } = c.var; + const vapidPublicKey = await conf.vapidPublicKey; if (!vapidPublicKey) { return c.json({ error: 'The administrator of this server has not enabled Web Push notifications.' }, 404); } const accessToken = getAccessToken(c.req.raw); - - const kysely = await Storages.kysely(); - const signer = c.get('signer')!; + const signer = user!.signer; const result = pushSubscribeSchema.safeParse(await parseBody(c.req.raw)); @@ -65,7 +62,7 @@ export const pushSubscribeController: AppController = async (c) => { const pubkey = await signer.getPublicKey(); const tokenHash = await getTokenHash(accessToken); - const { id } = await kysely.transaction().execute(async (trx) => { + const { id } = await db.kysely.transaction().execute(async (trx) => { await trx .deleteFrom('push_subscriptions') .where('token_hash', '=', tokenHash) @@ -97,7 +94,8 @@ export const pushSubscribeController: AppController = async (c) => { }; export const getSubscriptionController: AppController = async (c) => { - const vapidPublicKey = await Conf.vapidPublicKey; + const { conf, db } = c.var; + const vapidPublicKey = await conf.vapidPublicKey; if (!vapidPublicKey) { return c.json({ error: 'The administrator of this server has not enabled Web Push notifications.' }, 404); @@ -105,10 +103,9 @@ export const getSubscriptionController: AppController = async (c) => { const accessToken = getAccessToken(c.req.raw); - const kysely = await Storages.kysely(); const tokenHash = await getTokenHash(accessToken); - const row = await kysely + const row = await db.kysely .selectFrom('push_subscriptions') .selectAll() .where('token_hash', '=', tokenHash) diff --git a/src/controllers/api/reactions.ts b/packages/ditto/controllers/api/reactions.ts similarity index 78% rename from src/controllers/api/reactions.ts rename to packages/ditto/controllers/api/reactions.ts index b7a18549..3f5fda42 100644 --- a/src/controllers/api/reactions.ts +++ b/packages/ditto/controllers/api/reactions.ts @@ -1,7 +1,6 @@ import { AppController } from '@/app.ts'; import { DittoEvent } from '@/interfaces/DittoEvent.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { Storages } from '@/storages.ts'; import { createEvent } from '@/utils/api.ts'; import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts'; import { renderStatus } from '@/views/mastodon/statuses.ts'; @@ -11,16 +10,15 @@ import { renderStatus } from '@/views/mastodon/statuses.ts'; * https://docs.pleroma.social/backend/development/API/pleroma_api/#put-apiv1pleromastatusesidreactionsemoji */ const reactionController: AppController = async (c) => { + const { relay, user } = c.var; const id = c.req.param('id'); const emoji = c.req.param('emoji'); - const signer = c.get('signer')!; if (!/^\p{RGI_Emoji}$/v.test(emoji)) { return c.json({ error: 'Invalid emoji' }, 400); } - const store = await Storages.db(); - const [event] = await store.query([{ kinds: [1], ids: [id], limit: 1 }]); + const [event] = await relay.query([{ kinds: [1, 20], ids: [id], limit: 1 }]); if (!event) { return c.json({ error: 'Status not found' }, 404); @@ -32,9 +30,9 @@ const reactionController: AppController = async (c) => { tags: [['e', id], ['p', event.pubkey]], }, c); - await hydrateEvents({ events: [event], store }); + await hydrateEvents({ ...c.var, events: [event] }); - const status = await renderStatus(event, { viewerPubkey: await signer.getPublicKey() }); + const status = await renderStatus(relay, event, { viewerPubkey: await user!.signer.getPublicKey() }); return c.json(status); }; @@ -44,25 +42,25 @@ const reactionController: AppController = async (c) => { * https://docs.pleroma.social/backend/development/API/pleroma_api/#delete-apiv1pleromastatusesidreactionsemoji */ const deleteReactionController: AppController = async (c) => { + const { relay, user } = c.var; + const id = c.req.param('id'); const emoji = c.req.param('emoji'); - const signer = c.get('signer')!; - const pubkey = await signer.getPublicKey(); - const store = await Storages.db(); + const pubkey = await user!.signer.getPublicKey(); if (!/^\p{RGI_Emoji}$/v.test(emoji)) { return c.json({ error: 'Invalid emoji' }, 400); } - const [event] = await store.query([ - { kinds: [1], ids: [id], limit: 1 }, + const [event] = await relay.query([ + { kinds: [1, 20], ids: [id], limit: 1 }, ]); if (!event) { return c.json({ error: 'Status not found' }, 404); } - const events = await store.query([ + const events = await relay.query([ { kinds: [7], authors: [pubkey], '#e': [id] }, ]); @@ -76,7 +74,7 @@ const deleteReactionController: AppController = async (c) => { tags, }, c); - const status = renderStatus(event, { viewerPubkey: pubkey }); + const status = renderStatus(relay, event, { viewerPubkey: pubkey }); return c.json(status); }; @@ -86,19 +84,20 @@ const deleteReactionController: AppController = async (c) => { * https://docs.pleroma.social/backend/development/API/pleroma_api/#get-apiv1pleromastatusesidreactions */ const reactionsController: AppController = async (c) => { + const { relay, user } = c.var; + const id = c.req.param('id'); - const store = await Storages.db(); - const pubkey = await c.get('signer')?.getPublicKey(); + const pubkey = await user?.signer.getPublicKey(); const emoji = c.req.param('emoji') as string | undefined; if (typeof emoji === 'string' && !/^\p{RGI_Emoji}$/v.test(emoji)) { return c.json({ error: 'Invalid emoji' }, 400); } - const events = await store.query([{ kinds: [7], '#e': [id], limit: 100 }]) + const events = await relay.query([{ kinds: [7], '#e': [id], limit: 100 }]) .then((events) => events.filter(({ content }) => /^\p{RGI_Emoji}$/v.test(content))) .then((events) => events.filter((event) => !emoji || event.content === emoji)) - .then((events) => hydrateEvents({ events, store })); + .then((events) => hydrateEvents({ ...c.var, events })); /** Events grouped by emoji. */ const byEmoji = events.reduce((acc, event) => { diff --git a/src/controllers/api/reports.ts b/packages/ditto/controllers/api/reports.ts similarity index 68% rename from src/controllers/api/reports.ts rename to packages/ditto/controllers/api/reports.ts index 97d08751..66dde2e2 100644 --- a/src/controllers/api/reports.ts +++ b/packages/ditto/controllers/api/reports.ts @@ -1,9 +1,9 @@ +import { paginated } from '@ditto/mastoapi/pagination'; import { NostrFilter, NSchema as n } from '@nostrify/nostrify'; import { z } from 'zod'; import { type AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { createEvent, paginated, parseBody, updateEventInfo } from '@/utils/api.ts'; +import { createEvent, parseBody, updateEventInfo } from '@/utils/api.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; import { renderAdminReport } from '@/views/mastodon/reports.ts'; import { renderReport } from '@/views/mastodon/reports.ts'; @@ -19,7 +19,8 @@ const reportSchema = z.object({ /** https://docs.joinmastodon.org/methods/reports/#post */ const reportController: AppController = async (c) => { - const store = c.get('store'); + const { conf } = c.var; + const body = await parseBody(c.req.raw); const result = reportSchema.safeParse(body); @@ -36,7 +37,7 @@ const reportController: AppController = async (c) => { const tags = [ ['p', account_id, category], - ['P', Conf.pubkey], + ['P', await conf.signer.getPublicKey()], ]; for (const status of status_ids) { @@ -49,7 +50,7 @@ const reportController: AppController = async (c) => { tags, }, c); - await hydrateEvents({ events: [event], store }); + await hydrateEvents({ ...c.var, events: [event] }); return c.json(await renderReport(event)); }; @@ -61,17 +62,16 @@ const adminReportsSchema = z.object({ /** https://docs.joinmastodon.org/methods/admin/reports/#get */ const adminReportsController: AppController = async (c) => { - const store = c.get('store'); - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const { conf, relay, user, pagination } = c.var; - const params = c.get('pagination'); + const viewerPubkey = await user?.signer.getPublicKey(); const { resolved, account_id, target_account_id } = adminReportsSchema.parse(c.req.query()); const filter: NostrFilter = { kinds: [30383], - authors: [Conf.pubkey], + authors: [await conf.signer.getPublicKey()], '#k': ['1984'], - ...params, + ...pagination, }; if (typeof resolved === 'boolean') { @@ -84,7 +84,7 @@ const adminReportsController: AppController = async (c) => { filter['#P'] = [target_account_id]; } - const orig = await store.query([filter]); + const orig = await relay.query([filter]); const ids = new Set(); for (const event of orig) { @@ -94,11 +94,11 @@ const adminReportsController: AppController = async (c) => { } } - const events = await store.query([{ kinds: [1984], ids: [...ids] }]) - .then((events) => hydrateEvents({ store, events: events, signal: c.req.raw.signal })); + const events = await relay.query([{ kinds: [1984], ids: [...ids] }]) + .then((events) => hydrateEvents({ ...c.var, events })); const reports = await Promise.all( - events.map((event) => renderAdminReport(event, { viewerPubkey })), + events.map((event) => renderAdminReport(relay, event, { viewerPubkey })), ); return paginated(c, orig, reports); @@ -106,12 +106,12 @@ const adminReportsController: AppController = async (c) => { /** https://docs.joinmastodon.org/methods/admin/reports/#get-one */ const adminReportController: AppController = async (c) => { - const eventId = c.req.param('id'); - const { signal } = c.req.raw; - const store = c.get('store'); - const pubkey = await c.get('signer')?.getPublicKey(); + const { relay, user, signal } = c.var; - const [event] = await store.query([{ + const eventId = c.req.param('id'); + const pubkey = await user?.signer.getPublicKey(); + + const [event] = await relay.query([{ kinds: [1984], ids: [eventId], limit: 1, @@ -121,20 +121,20 @@ const adminReportController: AppController = async (c) => { return c.json({ error: 'Not found' }, 404); } - await hydrateEvents({ events: [event], store, signal }); + await hydrateEvents({ ...c.var, events: [event] }); - const report = await renderAdminReport(event, { viewerPubkey: pubkey }); + const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey }); return c.json(report); }; /** https://docs.joinmastodon.org/methods/admin/reports/#resolve */ const adminReportResolveController: AppController = async (c) => { - const eventId = c.req.param('id'); - const { signal } = c.req.raw; - const store = c.get('store'); - const pubkey = await c.get('signer')?.getPublicKey(); + const { relay, user, signal } = c.var; - const [event] = await store.query([{ + const eventId = c.req.param('id'); + const pubkey = await user?.signer.getPublicKey(); + + const [event] = await relay.query([{ kinds: [1984], ids: [eventId], limit: 1, @@ -145,19 +145,19 @@ const adminReportResolveController: AppController = async (c) => { } await updateEventInfo(eventId, { open: false, closed: true }, c); - await hydrateEvents({ events: [event], store, signal }); + await hydrateEvents({ ...c.var, events: [event] }); - const report = await renderAdminReport(event, { viewerPubkey: pubkey }); + const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey }); return c.json(report); }; const adminReportReopenController: AppController = async (c) => { - const eventId = c.req.param('id'); - const { signal } = c.req.raw; - const store = c.get('store'); - const pubkey = await c.get('signer')?.getPublicKey(); + const { relay, user, signal } = c.var; - const [event] = await store.query([{ + const eventId = c.req.param('id'); + const pubkey = await user?.signer.getPublicKey(); + + const [event] = await relay.query([{ kinds: [1984], ids: [eventId], limit: 1, @@ -168,9 +168,9 @@ const adminReportReopenController: AppController = async (c) => { } await updateEventInfo(eventId, { open: true, closed: false }, c); - await hydrateEvents({ events: [event], store, signal }); + await hydrateEvents({ ...c.var, events: [event] }); - const report = await renderAdminReport(event, { viewerPubkey: pubkey }); + const report = await renderAdminReport(relay, event, { viewerPubkey: pubkey }); return c.json(report); }; diff --git a/src/controllers/api/search.ts b/packages/ditto/controllers/api/search.ts similarity index 64% rename from src/controllers/api/search.ts rename to packages/ditto/controllers/api/search.ts index 4c3aa75f..964f0729 100644 --- a/src/controllers/api/search.ts +++ b/packages/ditto/controllers/api/search.ts @@ -1,17 +1,17 @@ +import { paginated, paginatedList } from '@ditto/mastoapi/pagination'; import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify'; import { nip19 } from 'nostr-tools'; import { z } from 'zod'; -import { AppController } from '@/app.ts'; +import { AppContext, AppController } from '@/app.ts'; import { booleanParamSchema } from '@/schema.ts'; -import { Storages } from '@/storages.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; import { extractIdentifier, lookupPubkey } from '@/utils/lookup.ts'; -import { nip05Cache } from '@/utils/nip05.ts'; +import { lookupNip05 } from '@/utils/nip05.ts'; import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts'; import { renderStatus } from '@/views/mastodon/statuses.ts'; import { getFollowedPubkeys } from '@/queries.ts'; -import { getIdsBySearch, getPubkeysBySearch } from '@/utils/search.ts'; +import { getPubkeysBySearch } from '@/utils/search.ts'; const searchQuerySchema = z.object({ q: z.string().transform(decodeURIComponent), @@ -19,29 +19,29 @@ const searchQuerySchema = z.object({ resolve: booleanParamSchema.optional().transform(Boolean), following: z.boolean().default(false), account_id: n.id().optional(), - limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)), offset: z.coerce.number().nonnegative().catch(0), }); -type SearchQuery = z.infer; +type SearchQuery = z.infer & { since?: number; until?: number; limit: number }; const searchController: AppController = async (c) => { + const { relay, user, pagination, signal } = c.var; + const result = searchQuerySchema.safeParse(c.req.query()); - const { signal } = c.req.raw; - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const viewerPubkey = await user?.signer.getPublicKey(); if (!result.success) { return c.json({ error: 'Bad request', schema: result.error }, 422); } - const event = await lookupEvent(result.data, signal); + const event = await lookupEvent(c, { ...result.data, ...pagination }); const lookup = extractIdentifier(result.data.q); // Render account from pubkey. if (!event && lookup) { - const pubkey = await lookupPubkey(lookup); + const pubkey = await lookupPubkey(lookup, c.var); return c.json({ - accounts: pubkey ? [await accountFromPubkey(pubkey)] : [], + accounts: pubkey ? [accountFromPubkey(pubkey)] : [], statuses: [], hashtags: [], }); @@ -52,7 +52,8 @@ const searchController: AppController = async (c) => { if (event) { events = [event]; } - events.push(...(await searchEvents({ ...result.data, viewerPubkey }, signal))); + + events.push(...(await searchEvents(c, { ...result.data, ...pagination, viewerPubkey }, signal))); const [accounts, statuses] = await Promise.all([ Promise.all( @@ -64,63 +65,63 @@ const searchController: AppController = async (c) => { Promise.all( events .filter((event) => event.kind === 1) - .map((event) => renderStatus(event, { viewerPubkey })) + .map((event) => renderStatus(relay, event, { viewerPubkey })) .filter(Boolean), ), ]); - return c.json({ + const body = { accounts, statuses, hashtags: [], - }); + }; + + if (result.data.type === 'accounts') { + return paginatedList(c, { ...result.data, ...pagination }, body); + } else { + return paginated(c, events, body); + } }; /** Get events for the search params. */ async function searchEvents( - { q, type, limit, offset, account_id, viewerPubkey }: SearchQuery & { viewerPubkey?: string }, + c: AppContext, + { q, type, since, until, limit, offset, account_id, viewerPubkey }: SearchQuery & { viewerPubkey?: string }, signal: AbortSignal, ): Promise { + const { relay, db } = c.var; + // Hashtag search is not supported. if (type === 'hashtags') { return Promise.resolve([]); } - const store = await Storages.search(); - const filter: NostrFilter = { kinds: typeToKinds(type), search: q, + since, + until, limit, }; - const kysely = await Storages.kysely(); - // For account search, use a special index, and prioritize followed accounts. if (type === 'accounts') { - const followedPubkeys = viewerPubkey ? await getFollowedPubkeys(viewerPubkey) : new Set(); - const searchPubkeys = await getPubkeysBySearch(kysely, { q, limit, offset, followedPubkeys }); + const following = viewerPubkey ? await getFollowedPubkeys(relay, viewerPubkey) : new Set(); + const searchPubkeys = await getPubkeysBySearch(db.kysely, { q, limit, offset, following }); filter.authors = [...searchPubkeys]; filter.search = undefined; } - // For status search, use a specific query so it supports offset and is open to customizations. - if (type === 'statuses') { - const ids = await getIdsBySearch(kysely, { q, limit, offset }); - filter.ids = [...ids]; - filter.search = undefined; - } - // Results should only be shown from one author. if (account_id) { filter.authors = [account_id]; } // Query the events. - let events = await store + let events = await relay .query([filter], { signal }) - .then((events) => hydrateEvents({ events, store, signal })); + .then((events) => hydrateEvents({ ...c.var, events })); // When using an authors filter, return the events in the same order as the filter. if (filter.authors) { @@ -145,17 +146,17 @@ function typeToKinds(type: SearchQuery['type']): number[] { } /** Resolve a searched value into an event, if applicable. */ -async function lookupEvent(query: SearchQuery, signal: AbortSignal): Promise { - const filters = await getLookupFilters(query, signal); - const store = await Storages.search(); +async function lookupEvent(c: AppContext, query: SearchQuery): Promise { + const { relay, signal } = c.var; + const filters = await getLookupFilters(c, query); - return store.query(filters, { limit: 1, signal }) - .then((events) => hydrateEvents({ events, store, signal })) + return relay.query(filters, { signal }) + .then((events) => hydrateEvents({ ...c.var, events })) .then(([event]) => event); } /** Get filters to lookup the input value. */ -async function getLookupFilters({ q, type, resolve }: SearchQuery, signal: AbortSignal): Promise { +async function getLookupFilters(c: AppContext, { q, type, resolve }: SearchQuery): Promise { const accounts = !type || type === 'accounts'; const statuses = !type || type === 'statuses'; @@ -166,7 +167,7 @@ async function getLookupFilters({ q, type, resolve }: SearchQuery, signal: Abort if (n.id().safeParse(q).success) { const filters: NostrFilter[] = []; if (accounts) filters.push({ kinds: [0], authors: [q] }); - if (statuses) filters.push({ kinds: [1], ids: [q] }); + if (statuses) filters.push({ kinds: [1, 20], ids: [q] }); return filters; } @@ -184,10 +185,10 @@ async function getLookupFilters({ q, type, resolve }: SearchQuery, signal: Abort if (accounts) filters.push({ kinds: [0], authors: [result.data.pubkey] }); break; case 'note': - if (statuses) filters.push({ kinds: [1], ids: [result.data] }); + if (statuses) filters.push({ kinds: [1, 20], ids: [result.data] }); break; case 'nevent': - if (statuses) filters.push({ kinds: [1], ids: [result.data.id] }); + if (statuses) filters.push({ kinds: [1, 20], ids: [result.data.id] }); break; } return filters; @@ -196,7 +197,7 @@ async function getLookupFilters({ q, type, resolve }: SearchQuery, signal: Abort } try { - const { pubkey } = await nip05Cache.fetch(lookup, { signal }); + const { pubkey } = await lookupNip05(lookup, c.var); if (pubkey) { return [{ kinds: [0], authors: [pubkey] }]; } diff --git a/src/controllers/api/statuses.ts b/packages/ditto/controllers/api/statuses.ts similarity index 64% rename from src/controllers/api/statuses.ts rename to packages/ditto/controllers/api/statuses.ts index 7ea9fd60..8bc04151 100644 --- a/src/controllers/api/statuses.ts +++ b/packages/ditto/controllers/api/statuses.ts @@ -1,4 +1,5 @@ import { HTTPException } from '@hono/hono/http-exception'; +import { paginated, paginatedList, paginationSchema } from '@ditto/mastoapi/pagination'; import { NostrEvent, NSchema as n } from '@nostrify/nostrify'; import 'linkify-plugin-hashtag'; import linkify from 'linkifyjs'; @@ -6,7 +7,6 @@ import { nip19 } from 'nostr-tools'; import { z } from 'zod'; import { type AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { DittoUpload, dittoUploads } from '@/DittoUploads.ts'; import { DittoEvent } from '@/interfaces/DittoEvent.ts'; import { getAncestors, getAuthor, getDescendants, getEvent } from '@/queries.ts'; @@ -14,9 +14,8 @@ import { addTag, deleteTag } from '@/utils/tags.ts'; import { asyncReplaceAll } from '@/utils/text.ts'; import { lookupPubkey } from '@/utils/lookup.ts'; import { languageSchema } from '@/schema.ts'; -import { Storages } from '@/storages.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { assertAuthenticated, createEvent, paginated, paginatedList, parseBody, updateListEvent } from '@/utils/api.ts'; +import { assertAuthenticated, createEvent, parseBody, updateListEvent } from '@/utils/api.ts'; import { getInvoice, getLnurl } from '@/utils/lnurl.ts'; import { purifyEvent } from '@/utils/purify.ts'; import { getZapSplits } from '@/utils/zap-split.ts'; @@ -47,18 +46,18 @@ const createStatusSchema = z.object({ ); const statusController: AppController = async (c) => { - const id = c.req.param('id'); - const signal = AbortSignal.any([c.req.raw.signal, AbortSignal.timeout(1500)]); + const { relay, user } = c.var; - const event = await getEvent(id, { signal }); + const id = c.req.param('id'); + const event = await getEvent(id, c.var); if (event?.author) { assertAuthenticated(c, event.author); } if (event) { - const viewerPubkey = await c.get('signer')?.getPublicKey(); - const status = await renderStatus(event, { viewerPubkey }); + const viewerPubkey = await user?.signer.getPublicKey(); + const status = await renderStatus(relay, event, { viewerPubkey }); return c.json(status); } @@ -66,9 +65,10 @@ const statusController: AppController = async (c) => { }; const createStatusController: AppController = async (c) => { + const { conf, relay, user } = c.var; + const body = await parseBody(c.req.raw); const result = createStatusSchema.safeParse(body); - const store = c.get('store'); if (!result.success) { return c.json({ error: 'Bad request', schema: result.error }, 400); @@ -87,34 +87,34 @@ const createStatusController: AppController = async (c) => { const tags: string[][] = []; if (data.in_reply_to_id) { - const ancestor = await getEvent(data.in_reply_to_id); + const [ancestor] = await relay.query([{ ids: [data.in_reply_to_id] }]); if (!ancestor) { return c.json({ error: 'Original post not found.' }, 404); } const rootId = ancestor.tags.find((tag) => tag[0] === 'e' && tag[3] === 'root')?.[1] ?? ancestor.id; - const root = rootId === ancestor.id ? ancestor : await getEvent(rootId); + const root = rootId === ancestor.id ? ancestor : await relay.query([{ ids: [rootId] }]).then(([event]) => event); if (root) { - tags.push(['e', root.id, Conf.relay, 'root', root.pubkey]); + tags.push(['e', root.id, conf.relay, 'root', root.pubkey]); } else { - tags.push(['e', rootId, Conf.relay, 'root']); + tags.push(['e', rootId, conf.relay, 'root']); } - tags.push(['e', ancestor.id, Conf.relay, 'reply', ancestor.pubkey]); + tags.push(['e', ancestor.id, conf.relay, 'reply', ancestor.pubkey]); } let quoted: DittoEvent | undefined; if (data.quote_id) { - quoted = await getEvent(data.quote_id); + [quoted] = await relay.query([{ ids: [data.quote_id] }]); if (!quoted) { return c.json({ error: 'Quoted post not found.' }, 404); } - tags.push(['q', quoted.id, Conf.relay, quoted.pubkey]); + tags.push(['q', quoted.id, conf.relay, quoted.pubkey]); } if (data.sensitive && data.spoiler_text) { @@ -149,11 +149,11 @@ const createStatusController: AppController = async (c) => { const pubkeys = new Set(); - const content = await asyncReplaceAll( + let content = await asyncReplaceAll( data.status ?? '', - /(? { - const pubkey = await lookupPubkey(username); + const pubkey = await lookupPubkey(username, c.var); if (!pubkey) return match; // Content addressing (default) @@ -162,7 +162,7 @@ const createStatusController: AppController = async (c) => { } try { - return `nostr:${nip19.nprofileEncode({ pubkey, relays: [Conf.relay] })}`; + return `nostr:${nip19.nprofileEncode({ pubkey, relays: [conf.relay] })}`; } catch { return match; } @@ -171,14 +171,14 @@ const createStatusController: AppController = async (c) => { // Explicit addressing for (const to of data.to ?? []) { - const pubkey = await lookupPubkey(to); + const pubkey = await lookupPubkey(to, c.var); if (pubkey) { pubkeys.add(pubkey); } } for (const pubkey of pubkeys) { - tags.push(['p', pubkey, Conf.relay]); + tags.push(['p', pubkey, conf.relay]); } for (const link of linkify.find(data.status ?? '')) { @@ -190,25 +190,13 @@ const createStatusController: AppController = async (c) => { } } - const mediaUrls: string[] = media - .map(({ url }) => url) - .filter((url): url is string => Boolean(url)); + const pubkey = await user!.signer.getPublicKey(); + const author = pubkey ? await getAuthor(pubkey, c.var) : undefined; - const quoteCompat = quoted - ? `\n\nnostr:${ - nip19.neventEncode({ id: quoted.id, kind: quoted.kind, author: quoted.pubkey, relays: [Conf.relay] }) - }` - : ''; - - const mediaCompat = mediaUrls.length ? `\n\n${mediaUrls.join('\n')}` : ''; - - const pubkey = await c.get('signer')?.getPublicKey()!; - const author = pubkey ? await getAuthor(pubkey) : undefined; - - if (Conf.zapSplitsEnabled) { + if (conf.zapSplitsEnabled) { const meta = n.json().pipe(n.metadata()).catch({}).parse(author?.content); const lnurl = getLnurl(meta); - const dittoZapSplit = await getZapSplits(store, Conf.pubkey); + const dittoZapSplit = await getZapSplits(relay, await conf.signer.getPublicKey()); if (lnurl && dittoZapSplit) { const totalSplit = Object.values(dittoZapSplit).reduce((total, { weight }) => total + weight, 0); for (const zapPubkey in dittoZapSplit) { @@ -216,7 +204,7 @@ const createStatusController: AppController = async (c) => { tags.push([ 'zap', zapPubkey, - Conf.relay, + conf.relay, (Math.max(0, 100 - totalSplit) + dittoZapSplit[zapPubkey].weight).toString(), ]); continue; @@ -224,49 +212,70 @@ const createStatusController: AppController = async (c) => { tags.push([ 'zap', zapPubkey, - Conf.relay, + conf.relay, dittoZapSplit[zapPubkey].weight.toString(), dittoZapSplit[zapPubkey].message, ]); } if (totalSplit && !dittoZapSplit[pubkey]) { - tags.push(['zap', pubkey, Conf.relay, Math.max(0, 100 - totalSplit).toString()]); + tags.push(['zap', pubkey, conf.relay, Math.max(0, 100 - totalSplit).toString()]); } } } + const mediaUrls: string[] = media + .map(({ url }) => url) + .filter((url): url is string => Boolean(url)); + + if (quoted) { + if (content) { + content += '\n\n'; + } + const nevent = nip19.neventEncode({ + id: quoted.id, + kind: quoted.kind, + author: quoted.pubkey, + relays: [conf.relay], + }); + content += `nostr:${nevent}`; + } + + if (mediaUrls.length) { + if (content) { + content += '\n\n'; + } + content += mediaUrls.join('\n'); + } + const event = await createEvent({ kind: 1, - content: content + quoteCompat + mediaCompat, + content, tags, }, c); if (data.quote_id) { - await hydrateEvents({ - events: [event], - store: await Storages.db(), - signal: c.req.raw.signal, - }); + await hydrateEvents({ ...c.var, events: [event] }); } - return c.json(await renderStatus({ ...event, author }, { viewerPubkey: author?.pubkey })); + return c.json(await renderStatus(relay, { ...event, author }, { viewerPubkey: author?.pubkey })); }; const deleteStatusController: AppController = async (c) => { - const id = c.req.param('id'); - const pubkey = await c.get('signer')?.getPublicKey(); + const { conf, relay, user } = c.var; - const event = await getEvent(id, { signal: c.req.raw.signal }); + const id = c.req.param('id'); + const pubkey = await user?.signer.getPublicKey(); + const event = await getEvent(id, c.var); if (event) { if (event.pubkey === pubkey) { await createEvent({ kind: 5, - tags: [['e', id, Conf.relay, '', pubkey]], + tags: [['e', id, conf.relay, '', pubkey]], }, c); - const author = await getAuthor(event.pubkey); - return c.json(await renderStatus({ ...event, author }, { viewerPubkey: pubkey })); + const author = await getAuthor(event.pubkey, c.var); + return c.json(await renderStatus(relay, { ...event, author }, { viewerPubkey: pubkey })); } else { return c.json({ error: 'Unauthorized' }, 403); } @@ -276,29 +285,26 @@ const deleteStatusController: AppController = async (c) => { }; const contextController: AppController = async (c) => { + const { relay, user } = c.var; + const id = c.req.param('id'); - const store = c.get('store'); - const event = await getEvent(id, { kind: 1, relations: ['author', 'event_stats', 'author_stats'] }); - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const [event] = await relay.query([{ kinds: [1, 20], ids: [id] }]); + const viewerPubkey = await user?.signer.getPublicKey(); async function renderStatuses(events: NostrEvent[]) { const statuses = await Promise.all( - events.map((event) => renderStatus(event, { viewerPubkey })), + events.map((event) => renderStatus(relay, event, { viewerPubkey })), ); return statuses.filter(Boolean); } if (event) { const [ancestorEvents, descendantEvents] = await Promise.all([ - getAncestors(store, event), - getDescendants(store, event), + getAncestors(relay, event), + getDescendants(relay, event), ]); - await hydrateEvents({ - events: [...ancestorEvents, ...descendantEvents], - signal: c.req.raw.signal, - store, - }); + await hydrateEvents({ ...c.var, events: [...ancestorEvents, ...descendantEvents] }); const [ancestors, descendants] = await Promise.all([ renderStatuses(ancestorEvents), @@ -312,11 +318,24 @@ const contextController: AppController = async (c) => { }; const favouriteController: AppController = async (c) => { + const { conf, relay, user } = c.var; + const id = c.req.param('id'); - const target = await getEvent(id, { kind: 1, relations: ['author', 'event_stats', 'author_stats'] }); + const [target] = await relay.query([{ ids: [id], kinds: [1, 20] }]); if (target) { - const status = await renderStatus(target, { viewerPubkey: await c.get('signer')?.getPublicKey() }); + await createEvent({ + kind: 7, + content: '+', + tags: [ + ['e', target.id, conf.relay, '', target.pubkey], + ['p', target.pubkey, conf.relay], + ], + }, c); + + await hydrateEvents({ ...c.var, events: [target] }); + + const status = await renderStatus(relay, target, { viewerPubkey: await user?.signer.getPublicKey() }); if (status) { status.favourited = true; @@ -329,44 +348,6 @@ const favouriteController: AppController = async (c) => { } }; -const unfavouriteController: AppController = async (c) => { - const id = c.req.param('id'); - const signer = c.get('signer')!; - const pubkey = await signer.getPublicKey(); - const store = await Storages.db(); - const { signal } = c.req.raw; - - const [event] = await store.query([{ ids: [id] }], { signal }); - if (!event) { - return c.json({ error: 'Record not found.' }, 404); - } - - const favouriteEvents = await store.query([ - { kinds: [7], authors: [pubkey], '#e': [id] }, - ]); - if (!favouriteEvents.length) { - return c.json({ error: 'Record not found.' }, 404); - } - - favouriteEvents.forEach(async (e) => { - if (e.content === '+') { - await createEvent({ - kind: 5, - tags: [ - ['e', e.id], - ], - content: 'unfavourite', - }, c); - } - }) - - await hydrateEvents({ events: [event], store, signal }) - - const status = await renderStatus(event, { viewerPubkey: pubkey }); - - return c.json(status); -}; - const favouritedByController: AppController = (c) => { const id = c.req.param('id'); const params = c.get('pagination'); @@ -378,12 +359,10 @@ const favouritedByController: AppController = (c) => { /** https://docs.joinmastodon.org/methods/statuses/#boost */ const reblogStatusController: AppController = async (c) => { - const eventId = c.req.param('id'); - const { signal } = c.req.raw; + const { conf, relay, user } = c.var; - const event = await getEvent(eventId, { - kind: 1, - }); + const eventId = c.req.param('id'); + const event = await getEvent(eventId, c.var); if (!event) { return c.json({ error: 'Event not found.' }, 404); @@ -392,34 +371,31 @@ const reblogStatusController: AppController = async (c) => { const reblogEvent = await createEvent({ kind: 6, tags: [ - ['e', event.id, Conf.relay, '', event.pubkey], - ['p', event.pubkey, Conf.relay], + ['e', event.id, conf.relay, '', event.pubkey], + ['p', event.pubkey, conf.relay], ], }, c); - await hydrateEvents({ - events: [reblogEvent], - store: await Storages.db(), - signal: signal, - }); + await hydrateEvents({ ...c.var, events: [reblogEvent] }); - const status = await renderReblog(reblogEvent, { viewerPubkey: await c.get('signer')?.getPublicKey() }); + const status = await renderReblog(relay, reblogEvent, { viewerPubkey: await user?.signer.getPublicKey() }); return c.json(status); }; /** https://docs.joinmastodon.org/methods/statuses/#unreblog */ const unreblogStatusController: AppController = async (c) => { - const eventId = c.req.param('id'); - const pubkey = await c.get('signer')?.getPublicKey()!; - const store = await Storages.db(); + const { conf, relay, user } = c.var; - const [event] = await store.query([{ ids: [eventId], kinds: [1] }]); + const eventId = c.req.param('id'); + const pubkey = await user!.signer.getPublicKey(); + + const [event] = await relay.query([{ ids: [eventId], kinds: [1, 20] }]); if (!event) { return c.json({ error: 'Record not found' }, 404); } - const [repostEvent] = await store.query( + const [repostEvent] = await relay.query( [{ kinds: [6], authors: [pubkey], '#e': [event.id], limit: 1 }], ); @@ -429,10 +405,10 @@ const unreblogStatusController: AppController = async (c) => { await createEvent({ kind: 5, - tags: [['e', repostEvent.id, Conf.relay, '', repostEvent.pubkey]], + tags: [['e', repostEvent.id, conf.relay, '', repostEvent.pubkey]], }, c); - return c.json(await renderStatus(event, { viewerPubkey: pubkey })); + return c.json(await renderStatus(relay, event, { viewerPubkey: pubkey })); }; const rebloggedByController: AppController = (c) => { @@ -442,23 +418,23 @@ const rebloggedByController: AppController = (c) => { }; const quotesController: AppController = async (c) => { - const id = c.req.param('id'); - const params = c.get('pagination'); - const store = await Storages.db(); + const { relay, user, pagination } = c.var; - const [event] = await store.query([{ ids: [id], kinds: [1] }]); + const id = c.req.param('id'); + + const [event] = await relay.query([{ ids: [id], kinds: [1, 20] }]); if (!event) { return c.json({ error: 'Event not found.' }, 404); } - const quotes = await store - .query([{ kinds: [1], '#q': [event.id], ...params }]) - .then((events) => hydrateEvents({ events, store })); + const quotes = await relay + .query([{ kinds: [1, 20], '#q': [event.id], ...pagination }]) + .then((events) => hydrateEvents({ ...c.var, events })); - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const viewerPubkey = await user?.signer.getPublicKey(); const statuses = await Promise.all( - quotes.map((event) => renderStatus(event, { viewerPubkey })), + quotes.map((event) => renderStatus(relay, event, { viewerPubkey })), ); if (!statuses.length) { @@ -470,22 +446,20 @@ const quotesController: AppController = async (c) => { /** https://docs.joinmastodon.org/methods/statuses/#bookmark */ const bookmarkController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { conf, relay, user } = c.var; + const pubkey = await user!.signer.getPublicKey(); const eventId = c.req.param('id'); - const event = await getEvent(eventId, { - kind: 1, - relations: ['author', 'event_stats', 'author_stats'], - }); + const event = await getEvent(eventId, c.var); if (event) { await updateListEvent( { kinds: [10003], authors: [pubkey], limit: 1 }, - (tags) => addTag(tags, ['e', event.id, Conf.relay, '', event.pubkey]), + (tags) => addTag(tags, ['e', event.id, conf.relay, '', event.pubkey]), c, ); - const status = await renderStatus(event, { viewerPubkey: pubkey }); + const status = await renderStatus(relay, event, { viewerPubkey: pubkey }); if (status) { status.bookmarked = true; } @@ -497,22 +471,21 @@ const bookmarkController: AppController = async (c) => { /** https://docs.joinmastodon.org/methods/statuses/#unbookmark */ const unbookmarkController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { conf, relay, user } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const eventId = c.req.param('id'); - const event = await getEvent(eventId, { - kind: 1, - relations: ['author', 'event_stats', 'author_stats'], - }); + const event = await getEvent(eventId, c.var); if (event) { await updateListEvent( { kinds: [10003], authors: [pubkey], limit: 1 }, - (tags) => deleteTag(tags, ['e', event.id, Conf.relay, '', event.pubkey]), + (tags) => deleteTag(tags, ['e', event.id, conf.relay, '', event.pubkey]), c, ); - const status = await renderStatus(event, { viewerPubkey: pubkey }); + const status = await renderStatus(relay, event, { viewerPubkey: pubkey }); if (status) { status.bookmarked = false; } @@ -524,22 +497,21 @@ const unbookmarkController: AppController = async (c) => { /** https://docs.joinmastodon.org/methods/statuses/#pin */ const pinController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; + const { conf, relay, user } = c.var; + + const pubkey = await user!.signer.getPublicKey(); const eventId = c.req.param('id'); - const event = await getEvent(eventId, { - kind: 1, - relations: ['author', 'event_stats', 'author_stats'], - }); + const event = await getEvent(eventId, c.var); if (event) { await updateListEvent( { kinds: [10001], authors: [pubkey], limit: 1 }, - (tags) => addTag(tags, ['e', event.id, Conf.relay, '', event.pubkey]), + (tags) => addTag(tags, ['e', event.id, conf.relay, '', event.pubkey]), c, ); - const status = await renderStatus(event, { viewerPubkey: pubkey }); + const status = await renderStatus(relay, event, { viewerPubkey: pubkey }); if (status) { status.pinned = true; } @@ -551,24 +523,21 @@ const pinController: AppController = async (c) => { /** https://docs.joinmastodon.org/methods/statuses/#unpin */ const unpinController: AppController = async (c) => { - const pubkey = await c.get('signer')?.getPublicKey()!; - const eventId = c.req.param('id'); - const { signal } = c.req.raw; + const { conf, relay, user } = c.var; - const event = await getEvent(eventId, { - kind: 1, - relations: ['author', 'event_stats', 'author_stats'], - signal, - }); + const pubkey = await user!.signer.getPublicKey(); + const eventId = c.req.param('id'); + + const event = await getEvent(eventId, c.var); if (event) { await updateListEvent( { kinds: [10001], authors: [pubkey], limit: 1 }, - (tags) => deleteTag(tags, ['e', event.id, Conf.relay, '', event.pubkey]), + (tags) => deleteTag(tags, ['e', event.id, conf.relay, '', event.pubkey]), c, ); - const status = await renderStatus(event, { viewerPubkey: pubkey }); + const status = await renderStatus(relay, event, { viewerPubkey: pubkey }); if (status) { status.pinned = false; } @@ -586,10 +555,10 @@ const zapSchema = z.object({ }); const zapController: AppController = async (c) => { + const { conf, relay, signal } = c.var; + const body = await parseBody(c.req.raw); const result = zapSchema.safeParse(body); - const { signal } = c.req.raw; - const store = c.get('store'); if (!result.success) { return c.json({ error: 'Bad request', schema: result.error }, 400); @@ -602,28 +571,28 @@ const zapController: AppController = async (c) => { let lnurl: undefined | string; if (status_id) { - target = await getEvent(status_id, { kind: 1, relations: ['author'], signal }); + target = await getEvent(status_id, c.var); const author = target?.author; const meta = n.json().pipe(n.metadata()).catch({}).parse(author?.content); lnurl = getLnurl(meta); if (target && lnurl) { tags.push( - ['e', target.id, Conf.relay], - ['p', target.pubkey, Conf.relay], + ['e', target.id, conf.relay], + ['p', target.pubkey, conf.relay], ['amount', amount.toString()], - ['relays', Conf.relay], + ['relays', conf.relay], ['lnurl', lnurl], ); } } else { - [target] = await store.query([{ authors: [account_id], kinds: [0], limit: 1 }]); + [target] = await relay.query([{ authors: [account_id], kinds: [0], limit: 1 }]); const meta = n.json().pipe(n.metadata()).catch({}).parse(target?.content); lnurl = getLnurl(meta); if (target && lnurl) { tags.push( - ['p', target.pubkey, Conf.relay], + ['p', target.pubkey, conf.relay], ['amount', amount.toString()], - ['relays', Conf.relay], + ['relays', conf.relay], ['lnurl', lnurl], ); } @@ -643,19 +612,19 @@ const zapController: AppController = async (c) => { }; const zappedByController: AppController = async (c) => { - const id = c.req.param('id'); - const params = c.get('listPagination'); - const store = await Storages.db(); - const kysely = await Storages.kysely(); + const { db, relay } = c.var; - const zaps = await kysely.selectFrom('event_zaps') + const id = c.req.param('id'); + const { offset, limit } = paginationSchema.parse(c.req.query()); + + const zaps = await db.kysely.selectFrom('event_zaps') .selectAll() .where('target_event_id', '=', id) .orderBy('amount_millisats', 'desc') - .limit(params.limit) - .offset(params.offset).execute(); + .limit(limit) + .offset(offset).execute(); - const authors = await store.query([{ kinds: [0], authors: zaps.map((zap) => zap.sender_pubkey) }]); + const authors = await relay.query([{ kinds: [0], authors: zaps.map((zap) => zap.sender_pubkey) }]); const results = (await Promise.all( zaps.map(async (zap) => { @@ -673,7 +642,7 @@ const zappedByController: AppController = async (c) => { }), )).filter(Boolean); - return paginatedList(c, params, results); + return paginatedList(c, { limit, offset }, results); }; export { @@ -689,7 +658,6 @@ export { reblogStatusController, statusController, unbookmarkController, - unfavouriteController, unpinController, unreblogStatusController, zapController, diff --git a/src/controllers/api/streaming.ts b/packages/ditto/controllers/api/streaming.ts similarity index 64% rename from src/controllers/api/streaming.ts rename to packages/ditto/controllers/api/streaming.ts index 5e90085d..e6924641 100644 --- a/src/controllers/api/streaming.ts +++ b/packages/ditto/controllers/api/streaming.ts @@ -1,26 +1,22 @@ -import TTLCache from '@isaacs/ttlcache'; -import { NostrEvent, NostrFilter } from '@nostrify/nostrify'; -import { Stickynotes } from '@soapbox/stickynotes'; -import { z } from 'zod'; - -import { type AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; +import { MuteListPolicy } from '@ditto/policies'; import { streamingClientMessagesCounter, streamingConnectionsGauge, streamingServerMessagesCounter, -} from '@/metrics.ts'; -import { MuteListPolicy } from '@/policies/MuteListPolicy.ts'; +} from '@ditto/metrics'; +import TTLCache from '@isaacs/ttlcache'; +import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify'; +import { logi } from '@soapbox/logi'; +import { z } from 'zod'; + +import { type AppController } from '@/app.ts'; import { getFeedPubkeys } from '@/queries.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { Storages } from '@/storages.ts'; -import { getTokenHash } from '@/utils/auth.ts'; -import { bech32ToPubkey, Time } from '@/utils.ts'; +import { errorJson } from '@/utils/log.ts'; +import { Time } from '@/utils.ts'; import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts'; import { renderNotification } from '@/views/mastodon/notifications.ts'; -const console = new Stickynotes('ditto:streaming'); - /** * Streaming timelines/categories. * https://docs.joinmastodon.org/methods/streaming/#streams @@ -69,6 +65,7 @@ const limiter = new TTLCache(); const connections = new Set(); const streamingController: AppController = async (c) => { + const { conf, relay, user } = c.var; const upgrade = c.req.header('upgrade'); const token = c.req.header('sec-websocket-protocol'); const stream = streamSchema.optional().catch(undefined).parse(c.req.query('stream')); @@ -78,11 +75,6 @@ const streamingController: AppController = async (c) => { return c.text('Please use websocket protocol', 400); } - const pubkey = token ? await getTokenPubkey(token) : undefined; - if (token && !pubkey) { - return c.json({ error: 'Invalid access token' }, 401); - } - const ip = c.req.header('x-real-ip'); if (ip) { const count = limiter.get(ip) ?? 0; @@ -91,24 +83,25 @@ const streamingController: AppController = async (c) => { } } - const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { protocol: token, idleTimeout: 30 }); + const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { protocol: token }); - const store = await Storages.db(); - const pubsub = await Storages.pubsub(); - - const policy = pubkey ? new MuteListPolicy(pubkey, await Storages.admin()) : undefined; + const pubkey = await user?.signer.getPublicKey(); + const policy = pubkey ? new MuteListPolicy(pubkey, relay) : undefined; function send(e: StreamingEvent) { if (socket.readyState === WebSocket.OPEN) { - console.debug('send', e.event, e.payload); streamingServerMessagesCounter.inc(); socket.send(JSON.stringify(e)); } } - async function sub(filters: NostrFilter[], render: (event: NostrEvent) => Promise) { + async function sub( + filter: NostrFilter & { limit: 0 }, + render: (event: NostrEvent) => Promise, + ) { + const { signal } = controller; try { - for await (const msg of pubsub.req(filters, { signal: controller.signal })) { + for await (const msg of relay.req([filter], { signal })) { if (msg[0] === 'EVENT') { const event = msg[2]; @@ -119,7 +112,7 @@ const streamingController: AppController = async (c) => { } } - await hydrateEvents({ events: [event], store, signal: AbortSignal.timeout(1000) }); + await hydrateEvents({ ...c.var, events: [event], signal }); const result = await render(event); @@ -129,7 +122,7 @@ const streamingController: AppController = async (c) => { } } } catch (e) { - console.debug('streaming error:', e); + logi({ level: 'error', ns: 'ditto.streaming', msg: 'Error in streaming', error: errorJson(e) }); } } @@ -138,17 +131,17 @@ const streamingController: AppController = async (c) => { streamingConnectionsGauge.set(connections.size); if (!stream) return; - const topicFilter = await topicToFilter(stream, c.req.query(), pubkey); + const topicFilter = await topicToFilter(relay, stream, c.req.query(), pubkey, conf.url.host); if (topicFilter) { - sub([topicFilter], async (event) => { + sub(topicFilter, async (event) => { let payload: object | undefined; if (event.kind === 1) { - payload = await renderStatus(event, { viewerPubkey: pubkey }); + payload = await renderStatus(relay, event, { viewerPubkey: pubkey }); } if (event.kind === 6) { - payload = await renderReblog(event, { viewerPubkey: pubkey }); + payload = await renderReblog(relay, event, { viewerPubkey: pubkey }); } if (payload) { @@ -162,15 +155,15 @@ const streamingController: AppController = async (c) => { } if (['user', 'user:notification'].includes(stream) && pubkey) { - sub([{ '#p': [pubkey] }], async (event) => { + sub({ '#p': [pubkey], limit: 0 }, async (event) => { if (event.pubkey === pubkey) return; // skip own events - const payload = await renderNotification(event, { viewerPubkey: pubkey }); + const payload = await renderNotification(relay, event, { viewerPubkey: pubkey }); if (payload) { return { event: 'notification', payload: JSON.stringify(payload), stream: [stream], - }; + } satisfies StreamingEvent; } }); return; @@ -206,45 +199,28 @@ const streamingController: AppController = async (c) => { }; async function topicToFilter( + relay: NStore, topic: Stream, query: Record, pubkey: string | undefined, -): Promise { - const { host } = Conf.url; - + host: string, +): Promise<(NostrFilter & { limit: 0 }) | undefined> { switch (topic) { case 'public': - return { kinds: [1, 6] }; + return { kinds: [1, 6, 20], limit: 0 }; case 'public:local': - return { kinds: [1, 6], search: `domain:${host}` }; + return { kinds: [1, 6, 20], search: `domain:${host}`, limit: 0 }; case 'hashtag': - if (query.tag) return { kinds: [1, 6], '#t': [query.tag] }; + if (query.tag) return { kinds: [1, 6, 20], '#t': [query.tag], limit: 0 }; break; case 'hashtag:local': - if (query.tag) return { kinds: [1, 6], '#t': [query.tag], search: `domain:${host}` }; + if (query.tag) return { kinds: [1, 6, 20], '#t': [query.tag], search: `domain:${host}`, limit: 0 }; break; case 'user': // HACK: this puts the user's entire contacts list into RAM, // and then calls `matchFilters` over it. Refreshing the page // is required after following a new user. - return pubkey ? { kinds: [1, 6], authors: [...await getFeedPubkeys(pubkey)] } : undefined; - } -} - -async function getTokenPubkey(token: string): Promise { - if (token.startsWith('token1')) { - const kysely = await Storages.kysely(); - const tokenHash = await getTokenHash(token as `token1${string}`); - - const { pubkey } = await kysely - .selectFrom('auth_tokens') - .select('pubkey') - .where('token_hash', '=', tokenHash) - .executeTakeFirstOrThrow(); - - return pubkey; - } else { - return bech32ToPubkey(token); + return pubkey ? { kinds: [1, 6, 20], authors: [...await getFeedPubkeys(relay, pubkey)], limit: 0 } : undefined; } } diff --git a/src/controllers/api/suggestions.ts b/packages/ditto/controllers/api/suggestions.ts similarity index 50% rename from src/controllers/api/suggestions.ts rename to packages/ditto/controllers/api/suggestions.ts index c047c415..cb6a8206 100644 --- a/src/controllers/api/suggestions.ts +++ b/packages/ditto/controllers/api/suggestions.ts @@ -1,38 +1,36 @@ +import { paginated, paginatedList, paginationSchema } from '@ditto/mastoapi/pagination'; import { NostrFilter } from '@nostrify/nostrify'; import { matchFilter } from 'nostr-tools'; import { AppContext, AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { paginatedList } from '@/utils/api.ts'; import { getTagSet } from '@/utils/tags.ts'; import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts'; export const suggestionsV1Controller: AppController = async (c) => { - const signal = c.req.raw.signal; - const params = c.get('listPagination'); - const suggestions = await renderV2Suggestions(c, params, signal); + const { signal } = c.var; + const { offset, limit } = paginationSchema.parse(c.req.query()); + const suggestions = await renderV2Suggestions(c, { offset, limit }, signal); const accounts = suggestions.map(({ account }) => account); - return paginatedList(c, params, accounts); + return paginatedList(c, { offset, limit }, accounts); }; export const suggestionsV2Controller: AppController = async (c) => { - const signal = c.req.raw.signal; - const params = c.get('listPagination'); - const suggestions = await renderV2Suggestions(c, params, signal); - return paginatedList(c, params, suggestions); + const { signal } = c.var; + const { offset, limit } = paginationSchema.parse(c.req.query()); + const suggestions = await renderV2Suggestions(c, { offset, limit }, signal); + return paginatedList(c, { offset, limit }, suggestions); }; async function renderV2Suggestions(c: AppContext, params: { offset: number; limit: number }, signal?: AbortSignal) { + const { conf, relay, user } = c.var; const { offset, limit } = params; - const store = c.get('store'); - const signer = c.get('signer'); - const pubkey = await signer?.getPublicKey(); + const pubkey = await user?.signer.getPublicKey(); const filters: NostrFilter[] = [ - { kinds: [30382], authors: [Conf.pubkey], '#n': ['suggested'], limit }, - { kinds: [1985], '#L': ['pub.ditto.trends'], '#l': [`#p`], authors: [Conf.pubkey], limit: 1 }, + { kinds: [30382], authors: [await conf.signer.getPublicKey()], '#n': ['suggested'], limit }, + { kinds: [1985], '#L': ['pub.ditto.trends'], '#l': [`#p`], authors: [await conf.signer.getPublicKey()], limit: 1 }, ]; if (pubkey) { @@ -40,14 +38,21 @@ async function renderV2Suggestions(c: AppContext, params: { offset: number; limi filters.push({ kinds: [10000], authors: [pubkey], limit: 1 }); } - const events = await store.query(filters, { signal }); + const events = await relay.query(filters, { signal }); + const adminPubkey = await conf.signer.getPublicKey(); const [userEvents, followsEvent, mutesEvent, trendingEvent] = [ - events.filter((event) => matchFilter({ kinds: [30382], authors: [Conf.pubkey], '#n': ['suggested'] }, event)), + events.filter((event) => matchFilter({ kinds: [30382], authors: [adminPubkey], '#n': ['suggested'] }, event)), pubkey ? events.find((event) => matchFilter({ kinds: [3], authors: [pubkey] }, event)) : undefined, pubkey ? events.find((event) => matchFilter({ kinds: [10000], authors: [pubkey] }, event)) : undefined, events.find((event) => - matchFilter({ kinds: [1985], '#L': ['pub.ditto.trends'], '#l': [`#p`], authors: [Conf.pubkey], limit: 1 }, event) + matchFilter({ + kinds: [1985], + '#L': ['pub.ditto.trends'], + '#l': [`#p`], + authors: [adminPubkey], + limit: 1, + }, event) ), ]; @@ -72,11 +77,11 @@ async function renderV2Suggestions(c: AppContext, params: { offset: number; limi const authors = [...pubkeys].slice(offset, offset + limit); - const profiles = await store.query( + const profiles = await relay.query( [{ kinds: [0], authors, limit: authors.length }], { signal }, ) - .then((events) => hydrateEvents({ events, store, signal })); + .then((events) => hydrateEvents({ ...c.var, events })); return Promise.all(authors.map(async (pubkey) => { const profile = profiles.find((event) => event.pubkey === pubkey); @@ -87,3 +92,39 @@ async function renderV2Suggestions(c: AppContext, params: { offset: number; limi }; })); } + +export const localSuggestionsController: AppController = async (c) => { + const { conf, relay, pagination, signal } = c.var; + + const grants = await relay.query( + [{ kinds: [30360], authors: [await conf.signer.getPublicKey()], ...pagination }], + { signal }, + ); + + const pubkeys = new Set(); + + for (const grant of grants) { + const pubkey = grant.tags.find(([name]) => name === 'p')?.[1]; + if (pubkey) { + pubkeys.add(pubkey); + } + } + + const profiles = await relay.query( + [{ kinds: [0], authors: [...pubkeys], search: `domain:${conf.url.host}`, ...pagination }], + { signal }, + ) + .then((events) => hydrateEvents({ ...c.var, events })); + + const suggestions = [...pubkeys].map((pubkey) => { + const profile = profiles.find((event) => event.pubkey === pubkey); + if (!profile) return; + + return { + source: 'global', + account: renderAccount(profile), + }; + }).filter(Boolean); + + return paginated(c, grants, suggestions); +}; diff --git a/src/controllers/api/timelines.ts b/packages/ditto/controllers/api/timelines.ts similarity index 54% rename from src/controllers/api/timelines.ts rename to packages/ditto/controllers/api/timelines.ts index cd0d7ff1..820ebd75 100644 --- a/src/controllers/api/timelines.ts +++ b/packages/ditto/controllers/api/timelines.ts @@ -1,20 +1,48 @@ +import { paginated } from '@ditto/mastoapi/pagination'; import { NostrFilter } from '@nostrify/nostrify'; import { z } from 'zod'; import { type AppContext, type AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { getFeedPubkeys } from '@/queries.ts'; import { booleanParamSchema, languageSchema } from '@/schema.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { paginated } from '@/utils/api.ts'; import { getTagSet } from '@/utils/tags.ts'; import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts'; +const homeQuerySchema = z.object({ + exclude_replies: booleanParamSchema.optional(), + only_media: booleanParamSchema.optional(), +}); + const homeTimelineController: AppController = async (c) => { - const params = c.get('pagination'); - const pubkey = await c.get('signer')?.getPublicKey()!; - const authors = [...await getFeedPubkeys(pubkey)]; - return renderStatuses(c, [{ authors, kinds: [1, 6], ...params }]); + const { relay, user, pagination } = c.var; + const pubkey = await user?.signer.getPublicKey()!; + const result = homeQuerySchema.safeParse(c.req.query()); + + if (!result.success) { + return c.json({ error: 'Bad request', schema: result.error }, 400); + } + + const { exclude_replies, only_media } = result.data; + + const authors = [...await getFeedPubkeys(relay, pubkey)]; + const filter: NostrFilter = { authors, kinds: [1, 6, 20], ...pagination }; + + const search: string[] = []; + + if (only_media) { + search.push('media:true'); + } + + if (exclude_replies) { + search.push('reply:false'); + } + + if (search.length) { + filter.search = search.join(' '); + } + + return renderStatuses(c, [filter]); }; const publicQuerySchema = z.object({ @@ -24,6 +52,7 @@ const publicQuerySchema = z.object({ }); const publicTimelineController: AppController = (c) => { + const { conf } = c.var; const params = c.get('pagination'); const result = publicQuerySchema.safeParse(c.req.query()); @@ -33,12 +62,12 @@ const publicTimelineController: AppController = (c) => { const { local, instance, language } = result.data; - const filter: NostrFilter = { kinds: [1], ...params }; + const filter: NostrFilter = { kinds: [1, 20], ...params }; const search: `${string}:${string}`[] = []; if (local) { - search.push(`domain:${Conf.url.host}`); + search.push(`domain:${conf.url.host}`); } else if (instance) { search.push(`domain:${instance}`); } @@ -57,43 +86,43 @@ const publicTimelineController: AppController = (c) => { const hashtagTimelineController: AppController = (c) => { const hashtag = c.req.param('hashtag')!.toLowerCase(); const params = c.get('pagination'); - return renderStatuses(c, [{ kinds: [1], '#t': [hashtag], ...params }]); + return renderStatuses(c, [{ kinds: [1, 20], '#t': [hashtag], ...params }]); }; const suggestedTimelineController: AppController = async (c) => { - const store = c.get('store'); - const params = c.get('pagination'); + const { conf, relay, pagination } = c.var; - const [follows] = await store.query( - [{ kinds: [3], authors: [Conf.pubkey], limit: 1 }], + const [follows] = await relay.query( + [{ kinds: [3], authors: [await conf.signer.getPublicKey()], limit: 1 }], ); const authors = [...getTagSet(follows?.tags ?? [], 'p')]; - return renderStatuses(c, [{ authors, kinds: [1], ...params }]); + return renderStatuses(c, [{ authors, kinds: [1, 20], ...pagination }]); }; /** Render statuses for timelines. */ async function renderStatuses(c: AppContext, filters: NostrFilter[]) { - const { signal } = c.req.raw; - const store = c.get('store'); - const opts = { signal, timeout: Conf.db.timeouts.timelines }; + const { conf, user, signal } = c.var; - const events = await store + const relay = user?.relay ?? c.var.relay; + const opts = { signal, timeout: conf.db.timeouts.timelines }; + + const events = await relay .query(filters, opts) - .then((events) => hydrateEvents({ events, store, signal })); + .then((events) => hydrateEvents({ ...c.var, events })); if (!events.length) { return c.json([]); } - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const viewerPubkey = await user?.signer.getPublicKey(); const statuses = (await Promise.all(events.map((event) => { if (event.kind === 6) { - return renderReblog(event, { viewerPubkey }); + return renderReblog(relay, event, { viewerPubkey }); } - return renderStatus(event, { viewerPubkey }); + return renderStatus(relay, event, { viewerPubkey }); }))).filter(Boolean); if (!statuses.length) { diff --git a/src/controllers/api/translate.ts b/packages/ditto/controllers/api/translate.ts similarity index 89% rename from src/controllers/api/translate.ts rename to packages/ditto/controllers/api/translate.ts index d763c713..7a0f7731 100644 --- a/src/controllers/api/translate.ts +++ b/packages/ditto/controllers/api/translate.ts @@ -1,22 +1,25 @@ +import { cachedTranslationsSizeGauge } from '@ditto/metrics'; +import { logi } from '@soapbox/logi'; import { LanguageCode } from 'iso-639-1'; import { z } from 'zod'; import { AppController } from '@/app.ts'; import { translationCache } from '@/caches/translationCache.ts'; import { MastodonTranslation } from '@/entities/MastodonTranslation.ts'; -import { cachedTranslationsSizeGauge } from '@/metrics.ts'; import { getEvent } from '@/queries.ts'; import { localeSchema } from '@/schema.ts'; import { parseBody } from '@/utils/api.ts'; import { renderStatus } from '@/views/mastodon/statuses.ts'; +import { errorJson } from '@/utils/log.ts'; const translateSchema = z.object({ lang: localeSchema, }); const translateController: AppController = async (c) => { + const { relay, user, signal } = c.var; + const result = translateSchema.safeParse(await parseBody(c.req.raw)); - const { signal } = c.req.raw; if (!result.success) { return c.json({ error: 'Bad request.', schema: result.error }, 422); @@ -31,18 +34,18 @@ const translateController: AppController = async (c) => { const id = c.req.param('id'); - const event = await getEvent(id, { signal }); + const event = await getEvent(id, c.var); if (!event) { return c.json({ error: 'Record not found' }, 400); } - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const viewerPubkey = await user?.signer.getPublicKey(); if (lang.toLowerCase() === event?.language?.toLowerCase()) { return c.json({ error: 'Source and target languages are the same. No translation needed.' }, 400); } - const status = await renderStatus(event, { viewerPubkey }); + const status = await renderStatus(relay, event, { viewerPubkey }); if (!status?.content) { return c.json({ error: 'Bad request.', schema: result.error }, 400); } @@ -130,7 +133,7 @@ const translateController: AppController = async (c) => { } } - mastodonTranslation.detected_source_language = data.source_lang; + mastodonTranslation.detected_source_language = data.sourceLang; translationCache.set(cacheKey, mastodonTranslation); cachedTranslationsSizeGauge.set(translationCache.size); @@ -140,6 +143,7 @@ const translateController: AppController = async (c) => { if (e instanceof Error && e.message.includes('not supported')) { return c.json({ error: `Translation of source language '${event.language}' not supported` }, 422); } + logi({ level: 'error', ns: 'ditto.translate', error: errorJson(e) }); return c.json({ error: 'Service Unavailable' }, 503); } }; diff --git a/src/controllers/api/trends.ts b/packages/ditto/controllers/api/trends.ts similarity index 66% rename from src/controllers/api/trends.ts rename to packages/ditto/controllers/api/trends.ts index 45e2d117..a687c2cc 100644 --- a/src/controllers/api/trends.ts +++ b/packages/ditto/controllers/api/trends.ts @@ -1,29 +1,31 @@ +import { type DittoConf } from '@ditto/conf'; +import { paginated, paginationSchema } from '@ditto/mastoapi/pagination'; import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify'; +import { logi } from '@soapbox/logi'; import { z } from 'zod'; import { AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { paginationSchema } from '@/schemas/pagination.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { Storages } from '@/storages.ts'; import { generateDateRange, Time } from '@/utils/time.ts'; -import { unfurlCardCached } from '@/utils/unfurl.ts'; -import { paginated } from '@/utils/api.ts'; +import { PreviewCard, unfurlCardCached } from '@/utils/unfurl.ts'; +import { errorJson } from '@/utils/log.ts'; import { renderStatus } from '@/views/mastodon/statuses.ts'; -let trendingHashtagsCache = getTrendingHashtags().catch((e) => { - console.error(`Failed to get trending hashtags: ${e}`); - return Promise.resolve([]); -}); +interface TrendHistory { + day: string; + accounts: string; + uses: string; +} -Deno.cron('update trending hashtags cache', '35 * * * *', async () => { - try { - const trends = await getTrendingHashtags(); - trendingHashtagsCache = Promise.resolve(trends); - } catch (e) { - console.error(e); - } -}); +interface TrendingHashtag { + name: string; + url: string; + history: TrendHistory[]; +} + +interface TrendingLink extends PreviewCard { + history: TrendHistory[]; +} const trendingTagsQuerySchema = z.object({ limit: z.coerce.number().catch(10).transform((value) => Math.min(Math.max(value, 0), 20)), @@ -31,14 +33,26 @@ const trendingTagsQuerySchema = z.object({ }); const trendingTagsController: AppController = async (c) => { + const { conf, relay } = c.var; const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query()); - const trends = await trendingHashtagsCache; - return c.json(trends.slice(offset, offset + limit)); + + try { + const trends = await getTrendingHashtags(conf, relay); + return c.json(trends.slice(offset, offset + limit)); + } catch (e) { + logi({ + level: 'error', + ns: 'ditto.trends.api', + type: 'tags', + msg: 'Failed to get trending hashtags', + error: errorJson(e), + }); + return c.json([]); + } }; -async function getTrendingHashtags() { - const store = await Storages.db(); - const trends = await getTrendingTags(store, 't'); +async function getTrendingHashtags(conf: DittoConf, relay: NStore): Promise { + const trends = await getTrendingTags(relay, 't', await conf.signer.getPublicKey()); return trends.map((trend) => { const hashtag = trend.value; @@ -51,35 +65,32 @@ async function getTrendingHashtags() { return { name: hashtag, - url: Conf.local(`/tags/${hashtag}`), + url: conf.local(`/tags/${hashtag}`), history, }; }); } -let trendingLinksCache = getTrendingLinks().catch((e) => { - console.error(`Failed to get trending links: ${e}`); - return Promise.resolve([]); -}); - -Deno.cron('update trending links cache', '50 * * * *', async () => { - try { - const trends = await getTrendingLinks(); - trendingLinksCache = Promise.resolve(trends); - } catch (e) { - console.error(e); - } -}); - const trendingLinksController: AppController = async (c) => { + const { conf, relay } = c.var; const { limit, offset } = trendingTagsQuerySchema.parse(c.req.query()); - const trends = await trendingLinksCache; - return c.json(trends.slice(offset, offset + limit)); + try { + const trends = await getTrendingLinks(conf, relay); + return c.json(trends.slice(offset, offset + limit)); + } catch (e) { + logi({ + level: 'error', + ns: 'ditto.trends.api', + type: 'links', + msg: 'Failed to get trending links', + error: errorJson(e), + }); + return c.json([]); + } }; -async function getTrendingLinks() { - const store = await Storages.db(); - const trends = await getTrendingTags(store, 'r'); +async function getTrendingLinks(conf: DittoConf, relay: NStore): Promise { + const trends = await getTrendingTags(relay, 'r', await conf.signer.getPublicKey()); return Promise.all(trends.map(async (trend) => { const link = trend.value; @@ -113,14 +124,14 @@ async function getTrendingLinks() { } const trendingStatusesController: AppController = async (c) => { - const store = await Storages.db(); + const { conf, relay } = c.var; const { limit, offset, until } = paginationSchema.parse(c.req.query()); - const [label] = await store.query([{ + const [label] = await relay.query([{ kinds: [1985], '#L': ['pub.ditto.trends'], '#l': ['#e'], - authors: [Conf.pubkey], + authors: [await conf.signer.getPublicKey()], until, limit: 1, }]); @@ -134,8 +145,8 @@ const trendingStatusesController: AppController = async (c) => { return c.json([]); } - const results = await store.query([{ kinds: [1], ids }]) - .then((events) => hydrateEvents({ events, store })); + const results = await relay.query([{ kinds: [1, 20], ids }]) + .then((events) => hydrateEvents({ ...c.var, events })); // Sort events in the order they appear in the label. const events = ids @@ -143,7 +154,7 @@ const trendingStatusesController: AppController = async (c) => { .filter((event): event is NostrEvent => !!event); const statuses = await Promise.all( - events.map((event) => renderStatus(event, {})), + events.map((event) => renderStatus(relay, event, {})), ); return paginated(c, results, statuses); @@ -159,12 +170,12 @@ interface TrendingTag { }[]; } -export async function getTrendingTags(store: NStore, tagName: string): Promise { +export async function getTrendingTags(store: NStore, tagName: string, pubkey: string): Promise { const [label] = await store.query([{ kinds: [1985], '#L': ['pub.ditto.trends'], '#l': [`#${tagName}`], - authors: [Conf.pubkey], + authors: [pubkey], limit: 1, }]); @@ -187,7 +198,7 @@ export async function getTrendingTags(store: NStore, tagName: string): Promise { + const { method } = c.req; + const { pathname } = new URL(c.req.url); + + c.header('Cache-Control', 'no-store'); + if (err instanceof HTTPException) { if (err.res) { return err.res; @@ -14,7 +22,7 @@ export const errorHandler: ErrorHandler = (err, c) => { return c.json({ error: 'The server was unable to respond in a timely manner' }, 500); } - console.error(err); + logi({ level: 'error', ns: 'ditto.http', msg: 'Unhandled error', method, pathname, error: errorJson(err) }); return c.json({ error: 'Something went wrong' }, 500); }; diff --git a/src/controllers/frontend.ts b/packages/ditto/controllers/frontend.ts similarity index 50% rename from src/controllers/frontend.ts rename to packages/ditto/controllers/frontend.ts index b1a3bba4..ad98a9aa 100644 --- a/src/controllers/frontend.ts +++ b/packages/ditto/controllers/frontend.ts @@ -1,70 +1,62 @@ -import { AppMiddleware } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { Stickynotes } from '@soapbox/stickynotes'; -import { Storages } from '@/storages.ts'; +import { logi } from '@soapbox/logi'; + +import { AppContext, AppMiddleware } from '@/app.ts'; import { getPathParams, MetadataEntities } from '@/utils/og-metadata.ts'; import { getInstanceMetadata } from '@/utils/instance.ts'; +import { errorJson } from '@/utils/log.ts'; import { lookupPubkey } from '@/utils/lookup.ts'; import { renderMetadata } from '@/views/meta.ts'; import { getAuthor, getEvent } from '@/queries.ts'; import { renderStatus } from '@/views/mastodon/statuses.ts'; import { renderAccount } from '@/views/mastodon/accounts.ts'; -const console = new Stickynotes('ditto:frontend'); - /** Placeholder to find & replace with metadata. */ const META_PLACEHOLDER = '' as const; -export const frontendController: AppMiddleware = async (c, next) => { +export const frontendController: AppMiddleware = async (c) => { + c.header('Cache-Control', 'max-age=86400, s-maxage=30, public, stale-if-error=604800'); + try { - const content = await Deno.readTextFile(new URL('../../public/index.html', import.meta.url)); - - const ua = c.req.header('User-Agent'); - console.debug('ua', ua); - - if (!Conf.crawlerRegex.test(ua ?? '')) { - return c.html(content); - } + const content = await Deno.readTextFile(new URL('../../../public/index.html', import.meta.url)); if (content.includes(META_PLACEHOLDER)) { const params = getPathParams(c.req.path); try { - const entities = await getEntities(params ?? {}); + const entities = await getEntities(c, params ?? {}); const meta = renderMetadata(c.req.url, entities); return c.html(content.replace(META_PLACEHOLDER, meta)); } catch (e) { - console.log(`Error building meta tags: ${e}`); + logi({ level: 'error', ns: 'ditto.frontend', msg: 'Error building meta tags', error: errorJson(e) }); return c.html(content); } } return c.html(content); - } catch (e) { - console.log(e); - await next(); + } catch { + return c.notFound(); } }; -async function getEntities(params: { acct?: string; statusId?: string }): Promise { - const store = await Storages.db(); +async function getEntities(c: AppContext, params: { acct?: string; statusId?: string }): Promise { + const { relay } = c.var; const entities: MetadataEntities = { - instance: await getInstanceMetadata(store), + instance: await getInstanceMetadata(relay), }; if (params.statusId) { - const event = await getEvent(params.statusId, { kind: 1 }); + const event = await getEvent(params.statusId, c.var); if (event) { - entities.status = await renderStatus(event, {}); + entities.status = await renderStatus(relay, event, {}); entities.account = entities.status?.account; } return entities; } if (params.acct) { - const pubkey = await lookupPubkey(params.acct.replace(/^@/, '')); - const event = pubkey ? await getAuthor(pubkey) : undefined; + const pubkey = await lookupPubkey(params.acct.replace(/^@/, ''), c.var); + const event = pubkey ? await getAuthor(pubkey, c.var) : undefined; if (event) { - entities.account = await renderAccount(event); + entities.account = renderAccount(event); } } diff --git a/src/controllers/manifest.ts b/packages/ditto/controllers/manifest.ts similarity index 84% rename from src/controllers/manifest.ts rename to packages/ditto/controllers/manifest.ts index 2e75de04..70d42dea 100644 --- a/src/controllers/manifest.ts +++ b/packages/ditto/controllers/manifest.ts @@ -1,10 +1,11 @@ import { AppController } from '@/app.ts'; -import { Storages } from '@/storages.ts'; import { WebManifestCombined } from '@/types/webmanifest.ts'; import { getInstanceMetadata } from '@/utils/instance.ts'; export const manifestController: AppController = async (c) => { - const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal); + const { relay, signal } = c.var; + + const meta = await getInstanceMetadata(relay, signal); const manifest: WebManifestCombined = { description: meta.about, diff --git a/packages/ditto/controllers/metrics.ts b/packages/ditto/controllers/metrics.ts new file mode 100644 index 00000000..be3ef624 --- /dev/null +++ b/packages/ditto/controllers/metrics.ts @@ -0,0 +1,22 @@ +import { dbAvailableConnectionsGauge, dbPoolSizeGauge } from '@ditto/metrics'; +import { register } from 'prom-client'; + +import { AppController } from '@/app.ts'; + +/** Prometheus/OpenMetrics controller. */ +export const metricsController: AppController = async (c) => { + const { db } = c.var; + + // Update some metrics at request time. + dbPoolSizeGauge.set(db.poolSize); + dbAvailableConnectionsGauge.set(db.availableConnections); + + // Serve the metrics. + const metrics = await register.metrics(); + + const headers: HeadersInit = { + 'Content-Type': register.contentType, + }; + + return c.text(metrics, 200, headers); +}; diff --git a/src/controllers/nostr/relay-info.ts b/packages/ditto/controllers/nostr/relay-info.ts similarity index 76% rename from src/controllers/nostr/relay-info.ts rename to packages/ditto/controllers/nostr/relay-info.ts index 9ee7babb..50702c23 100644 --- a/src/controllers/nostr/relay-info.ts +++ b/packages/ditto/controllers/nostr/relay-info.ts @@ -1,18 +1,19 @@ import denoJson from 'deno.json' with { type: 'json' }; import { AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { Storages } from '@/storages.ts'; import { getInstanceMetadata } from '@/utils/instance.ts'; const relayInfoController: AppController = async (c) => { - const store = await Storages.db(); - const meta = await getInstanceMetadata(store, c.req.raw.signal); + const { conf, relay, signal } = c.var; + + const meta = await getInstanceMetadata(relay, signal); + + c.res.headers.set('access-control-allow-origin', '*'); return c.json({ name: meta.name, description: meta.about, - pubkey: Conf.pubkey, + pubkey: await conf.signer.getPublicKey(), contact: meta.email, supported_nips: [1, 5, 9, 11, 16, 45, 50, 46, 98], software: 'Ditto', diff --git a/src/controllers/nostr/relay.ts b/packages/ditto/controllers/nostr/relay.ts similarity index 60% rename from src/controllers/nostr/relay.ts rename to packages/ditto/controllers/nostr/relay.ts index 71397ee8..f6641549 100644 --- a/src/controllers/nostr/relay.ts +++ b/packages/ditto/controllers/nostr/relay.ts @@ -1,6 +1,10 @@ -import { Stickynotes } from '@soapbox/stickynotes'; -import TTLCache from '@isaacs/ttlcache'; +import { type DittoConf } from '@ditto/conf'; +import { relayConnectionsGauge, relayEventsCounter, relayMessagesCounter } from '@ditto/metrics'; +import { MemoryRateLimiter, MultiRateLimiter, type RateLimiter } from '@ditto/ratelimiter'; +import { logi } from '@soapbox/logi'; +import { JsonValue } from '@std/json'; import { + NKinds, NostrClientCLOSE, NostrClientCOUNT, NostrClientEVENT, @@ -11,47 +15,53 @@ import { } from '@nostrify/nostrify'; import { AppController } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { relayInfoController } from '@/controllers/nostr/relay-info.ts'; -import { relayConnectionsGauge, relayEventsCounter, relayMessagesCounter } from '@/metrics.ts'; -import * as pipeline from '@/pipeline.ts'; import { RelayError } from '@/RelayError.ts'; -import { Storages } from '@/storages.ts'; -import { Time } from '@/utils/time.ts'; +import { type DittoPgStore } from '@/storages/DittoPgStore.ts'; +import { errorJson } from '@/utils/log.ts'; import { purifyEvent } from '@/utils/purify.ts'; +import { Time } from '@/utils/time.ts'; -/** Limit of initial events returned for a subscription. */ -const FILTER_LIMIT = 100; - -const LIMITER_WINDOW = Time.minutes(1); -const LIMITER_LIMIT = 300; - -const limiter = new TTLCache(); +const limiters = { + msg: new MemoryRateLimiter({ limit: 300, window: Time.minutes(1) }), + req: new MultiRateLimiter([ + new MemoryRateLimiter({ limit: 15, window: Time.seconds(5) }), + new MemoryRateLimiter({ limit: 300, window: Time.minutes(5) }), + new MemoryRateLimiter({ limit: 1000, window: Time.hours(1) }), + ]), + event: new MultiRateLimiter([ + new MemoryRateLimiter({ limit: 10, window: Time.seconds(10) }), + new MemoryRateLimiter({ limit: 100, window: Time.hours(1) }), + new MemoryRateLimiter({ limit: 500, window: Time.days(1) }), + ]), + ephemeral: new MemoryRateLimiter({ limit: 30, window: Time.seconds(10) }), +}; /** Connections for metrics purposes. */ const connections = new Set(); -const console = new Stickynotes('ditto:relay'); - /** Set up the Websocket connection. */ -function connectStream(socket: WebSocket, ip: string | undefined) { +function connectStream(conf: DittoConf, relay: DittoPgStore, socket: WebSocket, ip: string | undefined) { const controllers = new Map(); + if (ip) { + const remaining = Object + .values(limiters) + .reduce((acc, limiter) => Math.min(acc, limiter.client(ip).remaining), Infinity); + + if (remaining < 0) { + socket.close(1008, 'Rate limit exceeded'); + return; + } + } + socket.onopen = () => { connections.add(socket); relayConnectionsGauge.set(connections.size); }; socket.onmessage = (e) => { - if (ip) { - const count = limiter.get(ip) ?? 0; - limiter.set(ip, count + 1, { ttl: LIMITER_WINDOW }); - - if (count > LIMITER_LIMIT) { - socket.close(1008, 'Rate limit exceeded'); - return; - } - } + if (rateLimited(limiters.msg)) return; if (typeof e.data !== 'string') { socket.close(1003, 'Invalid message'); @@ -59,8 +69,14 @@ function connectStream(socket: WebSocket, ip: string | undefined) { } const result = n.json().pipe(n.clientMsg()).safeParse(e.data); + if (result.success) { - relayMessagesCounter.inc({ verb: result.data[0] }); + const msg = result.data; + const verb = msg[0]; + + logi({ level: 'trace', ns: 'ditto.relay.msg', verb, msg: msg as JsonValue, ip }); + relayMessagesCounter.inc({ verb }); + handleMsg(result.data); } else { relayMessagesCounter.inc(); @@ -77,6 +93,19 @@ function connectStream(socket: WebSocket, ip: string | undefined) { } }; + function rateLimited(limiter: Pick): boolean { + if (ip) { + const client = limiter.client(ip); + try { + client.hit(); + } catch { + socket.close(1008, 'Rate limit exceeded'); + return true; + } + } + return false; + } + /** Handle client message. */ function handleMsg(msg: NostrClientMsg) { switch (msg[0]) { @@ -97,16 +126,15 @@ function connectStream(socket: WebSocket, ip: string | undefined) { /** Handle REQ. Start a subscription. */ async function handleReq([_, subId, ...filters]: NostrClientREQ): Promise { + if (rateLimited(limiters.req)) return; + const controller = new AbortController(); controllers.get(subId)?.abort(); controllers.set(subId, controller); - const store = await Storages.db(); - const pubsub = await Storages.pubsub(); - try { - for (const event of await store.query(filters, { limit: FILTER_LIMIT, timeout: Conf.db.timeouts.relay })) { - send(['EVENT', subId, purifyEvent(event)]); + for await (const [verb, , ...rest] of relay.req(filters, { limit: 100, timeout: conf.db.timeouts.relay })) { + send([verb, subId, ...rest] as NostrRelayMsg); } } catch (e) { if (e instanceof RelayError) { @@ -119,33 +147,25 @@ function connectStream(socket: WebSocket, ip: string | undefined) { controllers.delete(subId); return; } - - send(['EOSE', subId]); - - try { - for await (const msg of pubsub.req(filters, { signal: controller.signal })) { - if (msg[0] === 'EVENT') { - send(['EVENT', subId, msg[2]]); - } - } - } catch (_e) { - controllers.delete(subId); - } } /** Handle EVENT. Store the event. */ async function handleEvent([_, event]: NostrClientEVENT): Promise { relayEventsCounter.inc({ kind: event.kind.toString() }); + + const limiter = NKinds.ephemeral(event.kind) ? limiters.ephemeral : limiters.event; + if (rateLimited(limiter)) return; + try { // This will store it (if eligible) and run other side-effects. - await pipeline.handleEvent(purifyEvent(event), AbortSignal.timeout(1000)); + await relay.event(purifyEvent(event), { signal: AbortSignal.timeout(1000) }); send(['OK', event.id, true, '']); } catch (e) { if (e instanceof RelayError) { send(['OK', event.id, false, e.message]); } else { send(['OK', event.id, false, 'error: something went wrong']); - console.error(e); + logi({ level: 'error', ns: 'ditto.relay', msg: 'Error in relay', error: errorJson(e), ip }); } } } @@ -161,8 +181,8 @@ function connectStream(socket: WebSocket, ip: string | undefined) { /** Handle COUNT. Return the number of events matching the filters. */ async function handleCount([_, subId, ...filters]: NostrClientCOUNT): Promise { - const store = await Storages.db(); - const { count } = await store.count(filters, { timeout: Conf.db.timeouts.relay }); + if (rateLimited(limiters.req)) return; + const { count } = await relay.count(filters, { timeout: conf.db.timeouts.relay }); send(['COUNT', subId, { count, approximate: false }]); } @@ -175,6 +195,7 @@ function connectStream(socket: WebSocket, ip: string | undefined) { } const relayController: AppController = (c, next) => { + const { conf, relay } = c.var; const upgrade = c.req.header('upgrade'); // NIP-11: https://github.com/nostr-protocol/nips/blob/master/11.md @@ -186,16 +207,14 @@ const relayController: AppController = (c, next) => { return c.text('Please use a Nostr client to connect.', 400); } - const ip = c.req.header('x-real-ip'); - if (ip) { - const count = limiter.get(ip) ?? 0; - if (count > LIMITER_LIMIT) { - return c.json({ error: 'Rate limit exceeded' }, 429); - } + let ip = c.req.header('x-real-ip'); + + if (ip && conf.ipWhitelist.includes(ip)) { + ip = undefined; } - const { socket, response } = Deno.upgradeWebSocket(c.req.raw, { idleTimeout: 30 }); - connectStream(socket, ip); + const { socket, response } = Deno.upgradeWebSocket(c.req.raw); + connectStream(conf, relay as DittoPgStore, socket, ip); return response; }; diff --git a/src/controllers/well-known/nodeinfo.ts b/packages/ditto/controllers/well-known/nodeinfo.ts similarity index 89% rename from src/controllers/well-known/nodeinfo.ts rename to packages/ditto/controllers/well-known/nodeinfo.ts index 4f03f425..bd446ce9 100644 --- a/src/controllers/well-known/nodeinfo.ts +++ b/packages/ditto/controllers/well-known/nodeinfo.ts @@ -1,17 +1,17 @@ -import { Conf } from '@/config.ts'; - import type { AppController } from '@/app.ts'; const nodeInfoController: AppController = (c) => { + const { conf } = c.var; + return c.json({ links: [ { rel: 'http://nodeinfo.diaspora.software/ns/schema/2.0', - href: Conf.local('/nodeinfo/2.0'), + href: conf.local('/nodeinfo/2.0'), }, { rel: 'http://nodeinfo.diaspora.software/ns/schema/2.1', - href: Conf.local('/nodeinfo/2.1'), + href: conf.local('/nodeinfo/2.1'), }, ], }); diff --git a/packages/ditto/controllers/well-known/nostr.ts b/packages/ditto/controllers/well-known/nostr.ts new file mode 100644 index 00000000..7c27aa70 --- /dev/null +++ b/packages/ditto/controllers/well-known/nostr.ts @@ -0,0 +1,48 @@ +import { NostrJson } from '@nostrify/nostrify'; +import { z } from 'zod'; + +import { AppController } from '@/app.ts'; +import { localNip05Lookup } from '@/utils/nip05.ts'; + +const nameSchema = z.string().min(1).regex(/^[\w.-]+$/); +const emptyResult: NostrJson = { names: {}, relays: {} }; + +/** + * Serves NIP-05's nostr.json. + * https://github.com/nostr-protocol/nips/blob/master/05.md + */ +const nostrController: AppController = async (c) => { + // If there are no query parameters, this will always return an empty result. + if (!Object.entries(c.req.queries()).length) { + c.header('Cache-Control', 'max-age=31536000, public, immutable, stale-while-revalidate=86400'); + return c.json(emptyResult); + } + + const result = nameSchema.safeParse(c.req.query('name')); + const name = result.success ? result.data : undefined; + const pointer = name ? await localNip05Lookup(name, c.var) : undefined; + + if (!name || !pointer) { + // Not found, cache for 5 minutes. + c.header('Cache-Control', 'max-age=300, public, stale-while-revalidate=30'); + return c.json(emptyResult); + } + + const { pubkey, relays = [] } = pointer; + + // It's found, so cache for 6 hours. + c.header('Cache-Control', 'max-age=21600, public, stale-while-revalidate=3600'); + + return c.json( + { + names: { + [name]: pubkey, + }, + relays: { + [pubkey]: relays, + }, + } satisfies NostrJson, + ); +}; + +export { nostrController }; diff --git a/packages/ditto/cron.ts b/packages/ditto/cron.ts new file mode 100644 index 00000000..bcbbffb0 --- /dev/null +++ b/packages/ditto/cron.ts @@ -0,0 +1,24 @@ +import { sql } from 'kysely'; + +import { + type TrendsCtx, + updateTrendingEvents, + updateTrendingHashtags, + updateTrendingLinks, + updateTrendingPubkeys, + updateTrendingZappedEvents, +} from '@/trends.ts'; + +/** Start cron jobs for the application. */ +export function cron(ctx: TrendsCtx) { + Deno.cron('update trending pubkeys', '0 * * * *', () => updateTrendingPubkeys(ctx)); + Deno.cron('update trending zapped events', '7 * * * *', () => updateTrendingZappedEvents(ctx)); + Deno.cron('update trending events', '15 * * * *', () => updateTrendingEvents(ctx)); + Deno.cron('update trending hashtags', '30 * * * *', () => updateTrendingHashtags(ctx)); + Deno.cron('update trending links', '45 * * * *', () => updateTrendingLinks(ctx)); + + Deno.cron('refresh top authors', '20 * * * *', async () => { + const { kysely } = ctx.db; + await sql`refresh materialized view top_authors`.execute(kysely); + }); +} diff --git a/packages/ditto/deno.json b/packages/ditto/deno.json new file mode 100644 index 00000000..82d28139 --- /dev/null +++ b/packages/ditto/deno.json @@ -0,0 +1,13 @@ +{ + "name": "@ditto/ditto", + "exports": {}, + "imports": { + "@/": "./", + "deno.json": "../../deno.json" + }, + "lint": { + "rules": { + "exclude": ["verbatim-module-syntax"] + } + } +} diff --git a/src/entities/MastodonAccount.ts b/packages/ditto/entities/MastodonAccount.ts similarity index 91% rename from src/entities/MastodonAccount.ts rename to packages/ditto/entities/MastodonAccount.ts index 99409c6a..4ea6665b 100644 --- a/src/entities/MastodonAccount.ts +++ b/packages/ditto/entities/MastodonAccount.ts @@ -45,6 +45,12 @@ export interface MastodonAccount { ditto: { accepts_zaps: boolean; external_url: string; + streak: { + days: number; + start: string | null; + end: string | null; + expires: string | null; + }; }; domain?: string; pleroma: { diff --git a/src/entities/MastodonAttachment.ts b/packages/ditto/entities/MastodonAttachment.ts similarity index 100% rename from src/entities/MastodonAttachment.ts rename to packages/ditto/entities/MastodonAttachment.ts diff --git a/src/entities/MastodonMention.ts b/packages/ditto/entities/MastodonMention.ts similarity index 100% rename from src/entities/MastodonMention.ts rename to packages/ditto/entities/MastodonMention.ts diff --git a/src/entities/MastodonStatus.ts b/packages/ditto/entities/MastodonStatus.ts similarity index 100% rename from src/entities/MastodonStatus.ts rename to packages/ditto/entities/MastodonStatus.ts diff --git a/src/entities/MastodonTranslation.ts b/packages/ditto/entities/MastodonTranslation.ts similarity index 100% rename from src/entities/MastodonTranslation.ts rename to packages/ditto/entities/MastodonTranslation.ts diff --git a/src/entities/PreviewCard.ts b/packages/ditto/entities/PreviewCard.ts similarity index 100% rename from src/entities/PreviewCard.ts rename to packages/ditto/entities/PreviewCard.ts diff --git a/packages/ditto/firehose.ts b/packages/ditto/firehose.ts new file mode 100644 index 00000000..1daca562 --- /dev/null +++ b/packages/ditto/firehose.ts @@ -0,0 +1,42 @@ +import { firehoseEventsCounter } from '@ditto/metrics'; +import { Semaphore } from '@core/asyncutil'; +import { NRelay, NStore } from '@nostrify/nostrify'; +import { logi } from '@soapbox/logi'; + +import { nostrNow } from '@/utils.ts'; + +interface FirehoseOpts { + pool: NRelay; + relay: NStore; + concurrency: number; + kinds: number[]; + timeout?: number; +} + +/** + * This function watches events on all known relays and performs + * side-effects based on them, such as trending hashtag tracking + * and storing events for notifications and the home feed. + */ +export async function startFirehose(opts: FirehoseOpts): Promise { + const { pool, relay, kinds, concurrency, timeout = 5000 } = opts; + + const sem = new Semaphore(concurrency); + + for await (const msg of pool.req([{ kinds, limit: 0, since: nostrNow() }])) { + if (msg[0] === 'EVENT') { + const event = msg[2]; + + logi({ level: 'debug', ns: 'ditto.event', source: 'firehose', id: event.id, kind: event.kind }); + firehoseEventsCounter.inc({ kind: event.kind }); + + sem.lock(async () => { + try { + await relay.event(event, { signal: AbortSignal.timeout(timeout) }); + } catch { + // Ignore + } + }); + } + } +} diff --git a/src/interfaces/DittoEvent.ts b/packages/ditto/interfaces/DittoEvent.ts similarity index 87% rename from src/interfaces/DittoEvent.ts rename to packages/ditto/interfaces/DittoEvent.ts index cca7c0ca..d1b0c280 100644 --- a/src/interfaces/DittoEvent.ts +++ b/packages/ditto/interfaces/DittoEvent.ts @@ -6,6 +6,13 @@ export interface AuthorStats { followers_count: number; following_count: number; notes_count: number; + streak_start?: number; + streak_end?: number; + nip05?: string; + nip05_domain?: string; + nip05_hostname?: string; + nip05_last_verified_at?: number; + favicon?: string; } /** Ditto internal stats for the event. */ @@ -20,9 +27,9 @@ export interface EventStats { /** Internal Event representation used by Ditto, including extra keys. */ export interface DittoEvent extends NostrEvent { author?: DittoEvent; - author_domain?: string; author_stats?: AuthorStats; event_stats?: EventStats; + mentions?: DittoEvent[]; user?: DittoEvent; repost?: DittoEvent; quote?: DittoEvent; diff --git a/src/interfaces/DittoPagination.ts b/packages/ditto/interfaces/DittoPagination.ts similarity index 100% rename from src/interfaces/DittoPagination.ts rename to packages/ditto/interfaces/DittoPagination.ts diff --git a/packages/ditto/middleware/cacheControlMiddleware.test.ts b/packages/ditto/middleware/cacheControlMiddleware.test.ts new file mode 100644 index 00000000..dd3e0acf --- /dev/null +++ b/packages/ditto/middleware/cacheControlMiddleware.test.ts @@ -0,0 +1,33 @@ +import { Hono } from '@hono/hono'; +import { assertEquals } from '@std/assert'; + +import { cacheControlMiddleware } from '@/middleware/cacheControlMiddleware.ts'; + +Deno.test('cacheControlMiddleware with multiple options', async () => { + const app = new Hono(); + + app.use(cacheControlMiddleware({ + maxAge: 31536000, + public: true, + immutable: true, + })); + + app.get('/', (c) => c.text('OK')); + + const response = await app.request('/'); + const cacheControl = response.headers.get('Cache-Control'); + + assertEquals(cacheControl, 'max-age=31536000, public, immutable'); +}); + +Deno.test('cacheControlMiddleware with no options does not add header', async () => { + const app = new Hono(); + + app.use(cacheControlMiddleware({})); + app.get('/', (c) => c.text('OK')); + + const response = await app.request('/'); + const cacheControl = response.headers.get('Cache-Control'); + + assertEquals(cacheControl, null); +}); diff --git a/packages/ditto/middleware/cacheControlMiddleware.ts b/packages/ditto/middleware/cacheControlMiddleware.ts new file mode 100644 index 00000000..59557e4f --- /dev/null +++ b/packages/ditto/middleware/cacheControlMiddleware.ts @@ -0,0 +1,102 @@ +import { MiddlewareHandler } from '@hono/hono'; + +/** + * Options for the `cacheControlMiddleware` middleware. + * + * NOTE: All numerical values are in **seconds**. + * + * See the definitions of [fresh](https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching#fresh_and_stale_based_on_age) and [stale](https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching#fresh_and_stale_based_on_age). + */ +export interface CacheControlMiddlewareOpts { + /** Indicates that the response remains fresh until _N_ seconds after the response is generated. */ + maxAge?: number; + /** Indicates how long the response remains fresh in a shared cache. */ + sMaxAge?: number; + /** Indicates that the response can be stored in caches, but the response must be validated with the origin server before each reuse, even when the cache is disconnected from the origin server. */ + noCache?: boolean; + /** Indicates that the response can be stored in caches and can be reused while fresh. */ + mustRevalidate?: boolean; + /** Equivalent of `must-revalidate`, but specifically for shared caches only. */ + proxyRevalidate?: boolean; + /** Indicates that any caches of any kind (private or shared) should not store this response. */ + noStore?: boolean; + /** Indicates that the response can be stored only in a private cache (e.g. local caches in browsers). */ + private?: boolean; + /** Indicates that the response can be stored in a shared cache. */ + public?: boolean; + /** Indicates that a cache should store the response only if it understands the requirements for caching based on status code. */ + mustUnderstand?: boolean; + /** Indicates that any intermediary (regardless of whether it implements a cache) shouldn't transform the response contents. */ + noTransform?: boolean; + /** Indicates that the response will not be updated while it's fresh. */ + immutable?: boolean; + /** Indicates that the cache could reuse a stale response while it revalidates it to a cache. */ + staleWhileRevalidate?: number; + /** indicates that the cache can reuse a stale response when an upstream server generates an error, or when the error is generated locally. */ + staleIfError?: number; +} + +/** Adds a [`Cache-Control`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control) header to the response. */ +export function cacheControlMiddleware(opts: CacheControlMiddlewareOpts): MiddlewareHandler { + return async (c, next) => { + const directives: string[] = []; + + if (typeof opts.maxAge === 'number') { + directives.push(`max-age=${opts.maxAge}`); + } + + if (typeof opts.sMaxAge === 'number') { + directives.push(`s-maxage=${opts.sMaxAge}`); + } + + if (opts.noCache) { + directives.push('no-cache'); + } + + if (opts.mustRevalidate) { + directives.push('must-revalidate'); + } + + if (opts.proxyRevalidate) { + directives.push('proxy-revalidate'); + } + + if (opts.noStore) { + directives.push('no-store'); + } + + if (opts.private) { + directives.push('private'); + } + + if (opts.public) { + directives.push('public'); + } + + if (opts.mustUnderstand) { + directives.push('must-understand'); + } + + if (opts.noTransform) { + directives.push('no-transform'); + } + + if (opts.immutable) { + directives.push('immutable'); + } + + if (typeof opts.staleWhileRevalidate === 'number') { + directives.push(`stale-while-revalidate=${opts.staleWhileRevalidate}`); + } + + if (typeof opts.staleIfError === 'number') { + directives.push(`stale-if-error=${opts.staleIfError}`); + } + + if (directives.length) { + c.header('Cache-Control', directives.join(', ')); + } + + await next(); + }; +} diff --git a/src/middleware/cspMiddleware.ts b/packages/ditto/middleware/cspMiddleware.ts similarity index 82% rename from src/middleware/cspMiddleware.ts rename to packages/ditto/middleware/cspMiddleware.ts index 70c9316d..8e890101 100644 --- a/src/middleware/cspMiddleware.ts +++ b/packages/ditto/middleware/cspMiddleware.ts @@ -1,20 +1,18 @@ import { AppMiddleware } from '@/app.ts'; -import { Conf } from '@/config.ts'; import { PleromaConfigDB } from '@/utils/PleromaConfigDB.ts'; -import { Storages } from '@/storages.ts'; import { getPleromaConfigs } from '@/utils/pleroma.ts'; -let configDBCache: Promise | undefined; - export const cspMiddleware = (): AppMiddleware => { + let configDBCache: Promise | undefined; + return async (c, next) => { - const store = await Storages.db(); + const { conf, relay } = c.var; if (!configDBCache) { - configDBCache = getPleromaConfigs(store); + configDBCache = getPleromaConfigs(relay); } - const { host, protocol, origin } = Conf.url; + const { host, protocol, origin } = conf.url; const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:'; const configDB = await configDBCache; const sentryDsn = configDB.getIn(':pleroma', ':frontend_configurations', ':soapbox_fe', 'sentryDsn'); diff --git a/packages/ditto/middleware/logiMiddleware.ts b/packages/ditto/middleware/logiMiddleware.ts new file mode 100644 index 00000000..be17e3bb --- /dev/null +++ b/packages/ditto/middleware/logiMiddleware.ts @@ -0,0 +1,19 @@ +import { MiddlewareHandler } from '@hono/hono'; +import { logi } from '@soapbox/logi'; + +export const logiMiddleware: MiddlewareHandler = async (c, next) => { + const { method } = c.req; + const { pathname } = new URL(c.req.url); + + logi({ level: 'info', ns: 'ditto.http.request', method, pathname }); + + const start = new Date(); + + await next(); + + const end = new Date(); + const duration = (end.getTime() - start.getTime()) / 1000; + const level = c.res.status >= 500 ? 'error' : 'info'; + + logi({ level, ns: 'ditto.http.response', method, pathname, status: c.res.status, duration }); +}; diff --git a/src/middleware/metricsMiddleware.ts b/packages/ditto/middleware/metricsMiddleware.ts similarity index 96% rename from src/middleware/metricsMiddleware.ts rename to packages/ditto/middleware/metricsMiddleware.ts index 0b213b82..91f2c422 100644 --- a/src/middleware/metricsMiddleware.ts +++ b/packages/ditto/middleware/metricsMiddleware.ts @@ -1,8 +1,7 @@ +import { httpRequestsCounter, httpResponseDurationHistogram, httpResponsesCounter } from '@ditto/metrics'; import { ScopedPerformance } from '@esroyo/scoped-performance'; import { MiddlewareHandler } from '@hono/hono'; -import { httpRequestsCounter, httpResponseDurationHistogram, httpResponsesCounter } from '@/metrics.ts'; - /** Prometheus metrics middleware that tracks HTTP requests by methods and responses by status code. */ export const metricsMiddleware: MiddlewareHandler = async (c, next) => { // Start a timer to measure the duration of the response. diff --git a/packages/ditto/middleware/notActivitypubMiddleware.ts b/packages/ditto/middleware/notActivitypubMiddleware.ts new file mode 100644 index 00000000..1cdb9cfb --- /dev/null +++ b/packages/ditto/middleware/notActivitypubMiddleware.ts @@ -0,0 +1,19 @@ +import { MiddlewareHandler } from '@hono/hono'; + +const ACTIVITYPUB_TYPES = [ + 'application/activity+json', + 'application/ld+json', + 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"', +]; + +/** Return 4xx errors on common (unsupported) ActivityPub routes to prevent AP traffic. */ +export const notActivitypubMiddleware: MiddlewareHandler = async (c, next) => { + const accept = c.req.header('accept'); + const types = accept?.split(',')?.map((type) => type.trim()) ?? []; + + if (types.every((type) => ACTIVITYPUB_TYPES.includes(type))) { + return c.text('ActivityPub is not supported', 406); + } + + await next(); +}; diff --git a/packages/ditto/middleware/rateLimitMiddleware.ts b/packages/ditto/middleware/rateLimitMiddleware.ts new file mode 100644 index 00000000..651598b4 --- /dev/null +++ b/packages/ditto/middleware/rateLimitMiddleware.ts @@ -0,0 +1,25 @@ +import { type DittoConf } from '@ditto/conf'; +import { MiddlewareHandler } from '@hono/hono'; +import { rateLimiter } from 'hono-rate-limiter'; + +/** + * Rate limit middleware for Hono, based on [`hono-rate-limiter`](https://github.com/rhinobase/hono-rate-limiter). + */ +export function rateLimitMiddleware(limit: number, windowMs: number, includeHeaders?: boolean): MiddlewareHandler { + // @ts-ignore Mismatched hono versions. + return rateLimiter<{ Variables: { conf: DittoConf } }>({ + limit, + windowMs, + standardHeaders: includeHeaders, + handler: (c) => { + c.header('Cache-Control', 'no-store'); + return c.text('Too many requests, please try again later.', 429); + }, + skip: (c) => { + const { conf } = c.var; + const ip = c.req.header('x-real-ip'); + return !ip || conf.ipWhitelist.includes(ip); + }, + keyGenerator: (c) => c.req.header('x-real-ip')!, + }); +} diff --git a/packages/ditto/middleware/swapNutzapsMiddleware.ts b/packages/ditto/middleware/swapNutzapsMiddleware.ts new file mode 100644 index 00000000..79bdf01e --- /dev/null +++ b/packages/ditto/middleware/swapNutzapsMiddleware.ts @@ -0,0 +1,183 @@ +import { CashuMint, CashuWallet, getEncodedToken, type Proof } from '@cashu/cashu-ts'; +import { MiddlewareHandler } from '@hono/hono'; +import { HTTPException } from '@hono/hono/http-exception'; +import { getPublicKey } from 'nostr-tools'; +import { NostrFilter, NSchema as n } from '@nostrify/nostrify'; +import { stringToBytes } from '@scure/base'; +import { logi } from '@soapbox/logi'; + +import { AppEnv } from '@/app.ts'; +import { isNostrId } from '@/utils.ts'; +import { errorJson } from '@/utils/log.ts'; +import { createEvent } from '@/utils/api.ts'; +import { z } from 'zod'; + +/** + * Swap nutzaps into wallet (create new events) if the user has a wallet, otheriwse, just fallthrough. + * Errors are only thrown if 'signer' and 'store' middlewares are not set. + */ +export const swapNutzapsMiddleware: MiddlewareHandler = async (c, next) => { + const { conf, relay, user, signal } = c.var; + + if (!user) { + throw new HTTPException(401, { message: 'No pubkey provided' }); + } + + if (!user.signer.nip44) { + throw new HTTPException(401, { message: 'No NIP-44 signer provided' }); + } + + if (!relay) { + throw new HTTPException(401, { message: 'No store provided' }); + } + + const pubkey = await user.signer.getPublicKey(); + const [wallet] = await relay.query([{ authors: [pubkey], kinds: [17375] }], { signal }); + + if (wallet) { + let decryptedContent: string; + try { + decryptedContent = await user.signer.nip44.decrypt(pubkey, wallet.content); + } catch (e) { + logi({ + level: 'error', + ns: 'ditto.api.cashu.wallet.swap', + id: wallet.id, + kind: wallet.kind, + error: errorJson(e), + }); + return c.json({ error: 'Could not decrypt wallet content.' }, 400); + } + + let contentTags: string[][]; + try { + contentTags = JSON.parse(decryptedContent); + } catch { + return c.json({ error: 'Could not JSON parse the decrypted wallet content.' }, 400); + } + + const privkey = contentTags.find(([value]) => value === 'privkey')?.[1]; + if (!privkey || !isNostrId(privkey)) { + return c.json({ error: 'Wallet does not contain privkey or privkey is not a valid nostr id.' }, 400); + } + const p2pk = getPublicKey(stringToBytes('hex', privkey)); + + const [nutzapInformation] = await relay.query([{ authors: [pubkey], kinds: [10019] }], { signal }); + if (!nutzapInformation) { + return c.json({ error: 'You need to have a nutzap information event so we can get the mints.' }, 400); + } + + const nutzapInformationPubkey = nutzapInformation.tags.find(([name]) => name === 'pubkey')?.[1]; + if (!nutzapInformationPubkey || (nutzapInformationPubkey !== p2pk)) { + return c.json({ + error: + "You do not have a 'pubkey' tag in your nutzap information event or the one you have does not match the one derivated from the wallet.", + }, 400); + } + + const mints = [...new Set(nutzapInformation.tags.filter(([name]) => name === 'mint').map(([_, value]) => value))]; + if (mints.length < 1) { + return c.json({ error: 'You do not have any mints in your nutzap information event.' }, 400); + } + + const nutzapsFilter: NostrFilter = { kinds: [9321], '#p': [pubkey], '#u': mints }; + + const [nutzapHistory] = await relay.query([{ kinds: [7376], authors: [pubkey] }], { signal }); + if (nutzapHistory) { + nutzapsFilter.since = nutzapHistory.created_at; + } + + const mintsToProofs: { [key: string]: { proofs: Proof[]; redeemed: string[][] } } = {}; + + const nutzaps = await relay.query([nutzapsFilter], { signal }); + + for (const event of nutzaps) { + try { + const mint = event.tags.find(([name]) => name === 'u')?.[1]; + if (!mint) { + continue; + } + + const proof = event.tags.find(([name]) => name === 'proof')?.[1]; + if (!proof) { + continue; + } + + if (!mintsToProofs[mint]) { + mintsToProofs[mint] = { proofs: [], redeemed: [] }; + } + + const parsed = n.json().pipe( + z.object({ + id: z.string(), + amount: z.number(), + secret: z.string(), + C: z.string(), + dleq: z.object({ s: z.string(), e: z.string(), r: z.string().optional() }).optional(), + dleqValid: z.boolean().optional(), + }).array(), + ).safeParse(proof); + + if (!parsed.success) { + continue; + } + + mintsToProofs[mint].proofs = [...mintsToProofs[mint].proofs, ...parsed.data]; + mintsToProofs[mint].redeemed = [ + ...mintsToProofs[mint].redeemed, + [ + 'e', // nutzap event that has been redeemed + event.id, + conf.relay, + 'redeemed', + ], + ['p', event.pubkey], // pubkey of the author of the 9321 event (nutzap sender) + ]; + } catch (e) { + logi({ level: 'error', ns: 'ditto.api.cashu.wallet.swap', error: errorJson(e) }); + } + } + + // TODO: throw error if mintsToProofs is an empty object? + for (const mint of Object.keys(mintsToProofs)) { + try { + const token = getEncodedToken({ mint, proofs: mintsToProofs[mint].proofs }); + + const cashuWallet = new CashuWallet(new CashuMint(mint)); + const receiveProofs = await cashuWallet.receive(token, { privkey }); + + const unspentProofs = await createEvent({ + kind: 7375, + content: await user.signer.nip44.encrypt( + pubkey, + JSON.stringify({ + mint, + proofs: receiveProofs, + }), + ), + }, c); + + const amount = receiveProofs.reduce((accumulator, current) => { + return accumulator + current.amount; + }, 0); + + await createEvent({ + kind: 7376, + content: await user.signer.nip44.encrypt( + pubkey, + JSON.stringify([ + ['direction', 'in'], + ['amount', amount], + ['e', unspentProofs.id, conf.relay, 'created'], + ]), + ), + tags: mintsToProofs[mint].redeemed, + }, c); + } catch (e) { + logi({ level: 'error', ns: 'ditto.api.cashu.wallet.swap', error: errorJson(e) }); + } + } + } + + await next(); +}; diff --git a/src/middleware/translatorMiddleware.ts b/packages/ditto/middleware/translatorMiddleware.ts similarity index 55% rename from src/middleware/translatorMiddleware.ts rename to packages/ditto/middleware/translatorMiddleware.ts index f5a6baa2..478c2fb9 100644 --- a/src/middleware/translatorMiddleware.ts +++ b/packages/ditto/middleware/translatorMiddleware.ts @@ -1,24 +1,25 @@ +import { DeepLTranslator, LibreTranslateTranslator } from '@ditto/translators'; +import { safeFetch } from '@soapbox/safe-fetch'; + import { AppMiddleware } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { fetchWorker } from '@/workers/fetch.ts'; -import { DeepLTranslator } from '@/translators/DeepLTranslator.ts'; -import { LibreTranslateTranslator } from '@/translators/LibreTranslateTranslator.ts'; /** Set the translator used for translating posts. */ export const translatorMiddleware: AppMiddleware = async (c, next) => { - switch (Conf.translationProvider) { + const { conf } = c.var; + + switch (conf.translationProvider) { case 'deepl': { - const { deeplApiKey: apiKey, deeplBaseUrl: baseUrl } = Conf; + const { deeplApiKey: apiKey, deeplBaseUrl: baseUrl } = conf; if (apiKey) { - c.set('translator', new DeepLTranslator({ baseUrl, apiKey, fetch: fetchWorker })); + c.set('translator', new DeepLTranslator({ baseUrl, apiKey, fetch: safeFetch })); } break; } case 'libretranslate': { - const { libretranslateApiKey: apiKey, libretranslateBaseUrl: baseUrl } = Conf; + const { libretranslateApiKey: apiKey, libretranslateBaseUrl: baseUrl } = conf; if (apiKey) { - c.set('translator', new LibreTranslateTranslator({ baseUrl, apiKey, fetch: fetchWorker })); + c.set('translator', new LibreTranslateTranslator({ baseUrl, apiKey, fetch: safeFetch })); } break; } diff --git a/packages/ditto/middleware/uploaderMiddleware.ts b/packages/ditto/middleware/uploaderMiddleware.ts new file mode 100644 index 00000000..2a3cffd3 --- /dev/null +++ b/packages/ditto/middleware/uploaderMiddleware.ts @@ -0,0 +1,47 @@ +import { DenoUploader, IPFSUploader, S3Uploader } from '@ditto/uploaders'; +import { BlossomUploader, NostrBuildUploader } from '@nostrify/nostrify/uploaders'; +import { safeFetch } from '@soapbox/safe-fetch'; + +import { AppMiddleware } from '@/app.ts'; + +/** Set an uploader for the user. */ +export const uploaderMiddleware: AppMiddleware = async (c, next) => { + const { user, conf } = c.var; + const signer = user?.signer; + + switch (conf.uploader) { + case 's3': + c.set( + 'uploader', + new S3Uploader({ + accessKey: conf.s3.accessKey, + bucket: conf.s3.bucket, + endPoint: conf.s3.endPoint!, + pathStyle: conf.s3.pathStyle, + port: conf.s3.port, + region: conf.s3.region!, + secretKey: conf.s3.secretKey, + sessionToken: conf.s3.sessionToken, + useSSL: conf.s3.useSSL, + baseUrl: conf.mediaDomain, + }), + ); + break; + case 'ipfs': + c.set('uploader', new IPFSUploader({ baseUrl: conf.mediaDomain, apiUrl: conf.ipfs.apiUrl, fetch: safeFetch })); + break; + case 'local': + c.set('uploader', new DenoUploader({ baseUrl: conf.mediaDomain, dir: conf.uploadsDir })); + break; + case 'nostrbuild': + c.set('uploader', new NostrBuildUploader({ endpoint: conf.nostrbuildEndpoint, signer, fetch: safeFetch })); + break; + case 'blossom': + if (signer) { + c.set('uploader', new BlossomUploader({ servers: conf.blossomServers, signer, fetch: safeFetch })); + } + break; + } + + await next(); +}; diff --git a/src/nostr-wasm.ts b/packages/ditto/nostr-wasm.ts similarity index 100% rename from src/nostr-wasm.ts rename to packages/ditto/nostr-wasm.ts diff --git a/src/precheck.ts b/packages/ditto/precheck.ts similarity index 100% rename from src/precheck.ts rename to packages/ditto/precheck.ts diff --git a/packages/ditto/queries.ts b/packages/ditto/queries.ts new file mode 100644 index 00000000..dd1e54e1 --- /dev/null +++ b/packages/ditto/queries.ts @@ -0,0 +1,84 @@ +import { DittoDB } from '@ditto/db'; +import { DittoConf } from '@ditto/conf'; +import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify'; + +import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; +import { hydrateEvents } from '@/storages/hydrate.ts'; +import { findReplyTag, getTagSet } from '@/utils/tags.ts'; + +interface GetEventOpts { + db: DittoDB; + conf: DittoConf; + relay: NStore; + signal?: AbortSignal; +} + +/** + * Get a Nostr event by its ID. + * @deprecated Use `relay.query` directly. + */ +async function getEvent(id: string, opts: GetEventOpts): Promise { + const filter: NostrFilter = { ids: [id], limit: 1 }; + const events = await opts.relay.query([filter], opts); + const [event] = await hydrateEvents({ ...opts, events }); + return event; +} + +/** + * Get a Nostr `set_medatadata` event for a user's pubkey. + * @deprecated Use `relay.query` directly. + */ +async function getAuthor(pubkey: string, opts: GetEventOpts): Promise { + const events = await opts.relay.query([{ authors: [pubkey], kinds: [0], limit: 1 }], opts); + const [event] = await hydrateEvents({ ...opts, events }); + return event; +} + +/** Get users the given pubkey follows. */ +const getFollows = async (relay: NStore, pubkey: string, signal?: AbortSignal): Promise => { + const [event] = await relay.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { signal }); + return event; +}; + +/** Get pubkeys the user follows. */ +async function getFollowedPubkeys(relay: NStore, pubkey: string, signal?: AbortSignal): Promise> { + const event = await getFollows(relay, pubkey, signal); + if (!event) return new Set(); + return getTagSet(event.tags, 'p'); +} + +/** Get pubkeys the user follows, including the user's own pubkey. */ +async function getFeedPubkeys(relay: NStore, pubkey: string): Promise> { + const authors = await getFollowedPubkeys(relay, pubkey); + return authors.add(pubkey); +} + +async function getAncestors(store: NStore, event: NostrEvent, result: NostrEvent[] = []): Promise { + if (result.length < 100) { + const replyTag = findReplyTag(event.tags); + const inReplyTo = replyTag ? replyTag[1] : undefined; + + if (inReplyTo) { + const [parentEvent] = await store.query([{ ids: [inReplyTo], until: event.created_at, limit: 1 }]); + + if (parentEvent) { + result.push(parentEvent); + return getAncestors(store, parentEvent, result); + } + } + } + + return result.reverse(); +} + +async function getDescendants( + store: NStore, + event: NostrEvent, + signal?: AbortSignal, +): Promise { + return await store + .query([{ kinds: [1], '#e': [event.id], since: event.created_at, limit: 200 }], { signal }) + .then((events) => events.filter(({ tags }) => findReplyTag(tags)?.[1] === event.id)); +} + +export { getAncestors, getAuthor, getDescendants, getEvent, getFeedPubkeys, getFollowedPubkeys, getFollows }; diff --git a/src/schema.test.ts b/packages/ditto/schema.test.ts similarity index 100% rename from src/schema.test.ts rename to packages/ditto/schema.test.ts diff --git a/src/schema.ts b/packages/ditto/schema.ts similarity index 73% rename from src/schema.ts rename to packages/ditto/schema.ts index b55a1f9a..c67aa5f6 100644 --- a/src/schema.ts +++ b/packages/ditto/schema.ts @@ -1,4 +1,5 @@ import ISO6391, { LanguageCode } from 'iso-639-1'; +import { NSchema as n } from '@nostrify/nostrify'; import { z } from 'zod'; /** Validates individual items in an array, dropping any that aren't valid. */ @@ -12,18 +13,6 @@ function filteredArray(schema: T) { )); } -/** https://developer.mozilla.org/en-US/docs/Glossary/Base64#the_unicode_problem */ -const decode64Schema = z.string().transform((value, ctx) => { - try { - const binString = atob(value); - const bytes = Uint8Array.from(binString, (m) => m.codePointAt(0)!); - return new TextDecoder().decode(bytes); - } catch (_e) { - ctx.addIssue({ code: z.ZodIssueCode.custom, message: 'Invalid base64', fatal: true }); - return z.NEVER; - } -}); - /** Parses a hashtag, eg `#yolo`. */ const hashtagSchema = z.string().regex(/^\w{1,30}$/); @@ -33,16 +22,6 @@ const hashtagSchema = z.string().regex(/^\w{1,30}$/); */ const safeUrlSchema = z.string().max(2048).url(); -/** WebSocket URL. */ -const wsUrlSchema = z.string().refine((val) => { - try { - const { protocol } = new URL(val); - return protocol === 'wss:' || protocol === 'ws:'; - } catch { - return false; - } -}, 'Invalid WebSocket URL'); - /** https://github.com/colinhacks/zod/issues/1630#issuecomment-1365983831 */ const booleanParamSchema = z.enum(['true', 'false']).transform((value) => value === 'true'); @@ -60,7 +39,7 @@ const languageSchema = z.string().transform((val, ctx) => { }); return z.NEVER; } - return val as LanguageCode; + return val; }); const localeSchema = z.string().transform((val, ctx) => { @@ -80,9 +59,21 @@ const sizesSchema = z.string().refine((value) => value.split(' ').every((v) => /^[1-9]\d{0,3}[xX][1-9]\d{0,3}$/.test(v)) ); +/** Ditto Cashu wallet */ +const walletSchema = z.object({ + pubkey_p2pk: n.id(), + mints: z.array(z.string().url()).nonempty().transform((val) => { + return [...new Set(val)]; + }), + relays: z.array(z.string()).nonempty().transform((val) => { + return [...new Set(val)]; + }), + /** Unit in sats */ + balance: z.number(), +}); + export { booleanParamSchema, - decode64Schema, fileSchema, filteredArray, hashtagSchema, @@ -91,5 +82,5 @@ export { percentageSchema, safeUrlSchema, sizesSchema, - wsUrlSchema, + walletSchema, }; diff --git a/src/schemas/mastodon.ts b/packages/ditto/schemas/mastodon.ts similarity index 100% rename from src/schemas/mastodon.ts rename to packages/ditto/schemas/mastodon.ts diff --git a/src/schemas/nostr.ts b/packages/ditto/schemas/nostr.ts similarity index 82% rename from src/schemas/nostr.ts rename to packages/ditto/schemas/nostr.ts index 05cd0f31..558e6c13 100644 --- a/src/schemas/nostr.ts +++ b/packages/ditto/schemas/nostr.ts @@ -1,14 +1,8 @@ import { NSchema as n } from '@nostrify/nostrify'; -import { getEventHash, verifyEvent } from 'nostr-tools'; import { z } from 'zod'; import { safeUrlSchema, sizesSchema } from '@/schema.ts'; -/** Nostr event schema that also verifies the event's signature. */ -const signedEventSchema = n.event() - .refine((event) => event.id === getEventHash(event), 'Event ID does not match hash') - .refine(verifyEvent, 'Event signature is invalid'); - /** Kind 0 standardized fields extended with Ditto custom fields. */ const metadataSchema = n.metadata().and(z.object({ fields: z.tuple([z.string(), z.string()]).array().optional().catch(undefined), @@ -68,12 +62,4 @@ const emojiTagSchema = z.tuple([z.literal('emoji'), z.string(), z.string().url() /** NIP-30 custom emoji tag. */ type EmojiTag = z.infer; -export { - type EmojiTag, - emojiTagSchema, - metadataSchema, - relayInfoDocSchema, - screenshotsSchema, - serverMetaSchema, - signedEventSchema, -}; +export { type EmojiTag, emojiTagSchema, metadataSchema, relayInfoDocSchema, screenshotsSchema, serverMetaSchema }; diff --git a/src/schemas/pleroma-api.ts b/packages/ditto/schemas/pleroma-api.ts similarity index 100% rename from src/schemas/pleroma-api.ts rename to packages/ditto/schemas/pleroma-api.ts diff --git a/packages/ditto/sentry.ts b/packages/ditto/sentry.ts new file mode 100644 index 00000000..4875a12e --- /dev/null +++ b/packages/ditto/sentry.ts @@ -0,0 +1,15 @@ +import * as Sentry from '@sentry/deno'; +import { logi } from '@soapbox/logi'; + +import { Conf } from '@/config.ts'; + +// Sentry +if (Conf.sentryDsn) { + logi({ level: 'info', ns: 'ditto.sentry', msg: 'Sentry enabled.', enabled: true }); + Sentry.init({ + dsn: Conf.sentryDsn, + tracesSampleRate: 1.0, + }); +} else { + logi({ level: 'info', ns: 'ditto.sentry', msg: 'Sentry not configured. Skipping.', enabled: false }); +} diff --git a/packages/ditto/server.ts b/packages/ditto/server.ts new file mode 100644 index 00000000..c5815537 --- /dev/null +++ b/packages/ditto/server.ts @@ -0,0 +1,14 @@ +import { logi } from '@soapbox/logi'; + +import '@/precheck.ts'; +import '@/sentry.ts'; +import '@/nostr-wasm.ts'; +import app from '@/app.ts'; +import { Conf } from '@/config.ts'; + +Deno.serve({ + port: Conf.port, + onListen({ hostname, port }): void { + logi({ level: 'info', ns: 'ditto.server', msg: `Listening on http://${hostname}:${port}`, hostname, port }); + }, +}, app.fetch); diff --git a/packages/ditto/signers/ConnectSigner.ts b/packages/ditto/signers/ConnectSigner.ts new file mode 100644 index 00000000..4f5a6f3e --- /dev/null +++ b/packages/ditto/signers/ConnectSigner.ts @@ -0,0 +1,117 @@ +// deno-lint-ignore-file require-await +import { HTTPException } from '@hono/hono/http-exception'; +import { NConnectSigner, NostrEvent, NostrSigner, NRelay } from '@nostrify/nostrify'; + +interface ConnectSignerOpts { + bunkerPubkey: string; + userPubkey: string; + signer: NostrSigner; + relay: NRelay; + relays?: string[]; +} + +/** + * NIP-46 signer. + * + * Simple extension of nostrify's `NConnectSigner`, with our options to keep it DRY. + */ +export class ConnectSigner implements NostrSigner { + private signer: NConnectSigner; + + constructor(private opts: ConnectSignerOpts) { + const { relay, signer } = this.opts; + + this.signer = new NConnectSigner({ + encryption: 'nip44', + pubkey: this.opts.bunkerPubkey, + relay, + signer, + timeout: 60_000, + }); + } + + async signEvent(event: Omit): Promise { + try { + return await this.signer.signEvent(event); + } catch (e) { + if (e instanceof Error && e.name === 'AbortError') { + throw new HTTPException(408, { message: 'The event was not signed quickly enough' }); + } else { + throw e; + } + } + } + + readonly nip04 = { + encrypt: async (pubkey: string, plaintext: string): Promise => { + try { + return await this.signer.nip04.encrypt(pubkey, plaintext); + } catch (e) { + if (e instanceof Error && e.name === 'AbortError') { + throw new HTTPException(408, { + message: 'Text was not encrypted quickly enough', + }); + } else { + throw e; + } + } + }, + + decrypt: async (pubkey: string, ciphertext: string): Promise => { + try { + return await this.signer.nip04.decrypt(pubkey, ciphertext); + } catch (e) { + if (e instanceof Error && e.name === 'AbortError') { + throw new HTTPException(408, { + message: 'Text was not decrypted quickly enough', + }); + } else { + throw e; + } + } + }, + }; + + readonly nip44 = { + encrypt: async (pubkey: string, plaintext: string): Promise => { + try { + return await this.signer.nip44.encrypt(pubkey, plaintext); + } catch (e) { + if (e instanceof Error && e.name === 'AbortError') { + throw new HTTPException(408, { + message: 'Text was not encrypted quickly enough', + }); + } else { + throw e; + } + } + }, + + decrypt: async (pubkey: string, ciphertext: string): Promise => { + try { + return await this.signer.nip44.decrypt(pubkey, ciphertext); + } catch (e) { + if (e instanceof Error && e.name === 'AbortError') { + throw new HTTPException(408, { + message: 'Text was not decrypted quickly enough', + }); + } else { + throw e; + } + } + }, + }; + + // Prevent unnecessary NIP-46 round-trips. + async getPublicKey(): Promise { + return this.opts.userPubkey; + } + + /** Get the user's relays if they passed in an `nprofile` auth token. */ + async getRelays(): Promise> { + return this.opts.relays?.reduce>((acc, relay) => { + acc[relay] = { read: true, write: true }; + return acc; + }, {}) ?? {}; + } +} diff --git a/src/signers/ReadOnlySigner.ts b/packages/ditto/signers/ReadOnlySigner.ts similarity index 100% rename from src/signers/ReadOnlySigner.ts rename to packages/ditto/signers/ReadOnlySigner.ts diff --git a/static/favicon.ico b/packages/ditto/static/favicon.ico similarity index 100% rename from static/favicon.ico rename to packages/ditto/static/favicon.ico diff --git a/static/images/avi.png b/packages/ditto/static/images/avi.png similarity index 100% rename from static/images/avi.png rename to packages/ditto/static/images/avi.png diff --git a/static/images/banner.png b/packages/ditto/static/images/banner.png similarity index 100% rename from static/images/banner.png rename to packages/ditto/static/images/banner.png diff --git a/static/images/thumbnail.png b/packages/ditto/static/images/thumbnail.png similarity index 100% rename from static/images/thumbnail.png rename to packages/ditto/static/images/thumbnail.png diff --git a/packages/ditto/storages/DittoAPIStore.ts b/packages/ditto/storages/DittoAPIStore.ts new file mode 100644 index 00000000..6df5ebba --- /dev/null +++ b/packages/ditto/storages/DittoAPIStore.ts @@ -0,0 +1,60 @@ +import { logi } from '@soapbox/logi'; +import { NostrEvent, NostrFilter, NostrRelayCLOSED, NostrRelayEOSE, NostrRelayEVENT, NRelay } from '@nostrify/nostrify'; + +import { errorJson } from '@/utils/log.ts'; +import { purifyEvent } from '@/utils/purify.ts'; + +interface DittoAPIStoreOpts { + pool: NRelay; + relay: NRelay; +} + +/** + * Store used by Ditto's Mastodon API implementation. + * It extends the RelayStore to publish events to the wider Nostr network. + */ +export class DittoAPIStore implements NRelay { + private ns = 'ditto.api.store'; + + constructor(private opts: DittoAPIStoreOpts) {} + + req( + filters: NostrFilter[], + opts?: { signal?: AbortSignal }, + ): AsyncIterable { + const { relay } = this.opts; + return relay.req(filters, opts); + } + + query(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise { + const { relay } = this.opts; + return relay.query(filters, opts); + } + + async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise { + const { pool, relay } = this.opts; + const { id, kind } = event; + + await relay.event(event, opts); + + (async () => { + try { + // `purifyEvent` is important, or you will suffer. + await pool.event(purifyEvent(event), opts); + } catch (e) { + logi({ level: 'error', ns: this.ns, source: 'publish', id, kind, error: errorJson(e) }); + } + })(); + } + + async close(): Promise { + const { pool, relay } = this.opts; + + await pool.close(); + await relay.close(); + } + + [Symbol.asyncDispose](): Promise { + return this.close(); + } +} diff --git a/src/storages/EventsDB.test.ts b/packages/ditto/storages/DittoPgStore.test.ts similarity index 63% rename from src/storages/EventsDB.test.ts rename to packages/ditto/storages/DittoPgStore.test.ts index 44937e41..405229dd 100644 --- a/src/storages/EventsDB.test.ts +++ b/packages/ditto/storages/DittoPgStore.test.ts @@ -1,11 +1,41 @@ import { assertEquals, assertRejects } from '@std/assert'; +import { NostrRelayMsg } from '@nostrify/nostrify'; +import { genEvent } from '@nostrify/nostrify/test'; import { generateSecretKey } from 'nostr-tools'; import { RelayError } from '@/RelayError.ts'; -import { eventFixture, genEvent } from '@/test.ts'; +import { eventFixture } from '@/test.ts'; import { Conf } from '@/config.ts'; +import { DittoPgStore } from '@/storages/DittoPgStore.ts'; import { createTestDB } from '@/test.ts'; +Deno.test('req streaming', async () => { + await using db = await createTestDB({ pure: true }); + const { store: relay } = db; + + const msgs: NostrRelayMsg[] = []; + const controller = new AbortController(); + + const promise = (async () => { + for await (const msg of relay.req([{ since: 0 }], { signal: controller.signal })) { + msgs.push(msg); + } + })(); + + const event = genEvent({ created_at: Math.floor(Date.now() / 1000) }); + await relay.event(event); + + controller.abort(); + + await promise; + + const verbs = msgs.map(([verb]) => verb); + + assertEquals(verbs, ['EOSE', 'EVENT', 'CLOSED']); + assertEquals(msgs[1][2], event); + assertEquals(relay.subs.size, 0); // cleanup +}); + Deno.test('count filters', async () => { await using db = await createTestDB({ pure: true }); const { store } = db; @@ -42,15 +72,23 @@ Deno.test('query events with domain search filter', async () => { await store.event(event1); assertEquals(await store.query([{}]), [event1]); - assertEquals(await store.query([{ search: 'domain:localhost:4036' }]), []); + assertEquals(await store.query([{ search: 'domain:gleasonator.dev' }]), []); assertEquals(await store.query([{ search: '' }]), [event1]); await kysely - .insertInto('pubkey_domains') - .values({ pubkey: event1.pubkey, domain: 'localhost:4036', last_updated_at: event1.created_at }) + .updateTable('author_stats') + .set({ + pubkey: event1.pubkey, + nip05_domain: 'gleasonator.dev', + nip05_last_verified_at: event1.created_at, + followers_count: 0, + following_count: 0, + notes_count: 0, + search: '', + }) .execute(); - assertEquals(await store.query([{ kinds: [1], search: 'domain:localhost:4036' }]), [event1]); + assertEquals(await store.query([{ kinds: [1], search: 'domain:gleasonator.dev' }]), [event1]); assertEquals(await store.query([{ kinds: [1], search: 'domain:example.com' }]), []); }); @@ -64,8 +102,8 @@ Deno.test('query events with language search filter', async () => { await store.event(en); await store.event(es); - await kysely.updateTable('nostr_events').set('language', 'en').where('id', '=', en.id).execute(); - await kysely.updateTable('nostr_events').set('language', 'es').where('id', '=', es.id).execute(); + await kysely.updateTable('nostr_events').set('search_ext', { language: 'en' }).where('id', '=', en.id).execute(); + await kysely.updateTable('nostr_events').set('search_ext', { language: 'es' }).where('id', '=', es.id).execute(); assertEquals(await store.query([{ search: 'language:en' }]), [en]); assertEquals(await store.query([{ search: 'language:es' }]), [es]); @@ -167,11 +205,21 @@ Deno.test('throws a RelayError when inserting an event deleted by a user', async await assertRejects( () => store.event(event), - RelayError, + // RelayError, 'event deleted by user', ); }); +Deno.test('inserting the same event twice', async () => { + await using db = await createTestDB({ pure: true }); + const { store } = db; + + const event = genEvent({ kind: 1 }); + + await store.event(event); + await store.event(event); +}); + Deno.test('inserting replaceable events', async () => { await using db = await createTestDB({ pure: true }); const { store } = db; @@ -187,6 +235,8 @@ Deno.test('inserting replaceable events', async () => { const newerEvent = genEvent({ kind: 0, created_at: 999 }, sk); await store.event(newerEvent); assertEquals(await store.query([{ kinds: [0] }]), [newerEvent]); + + await store.event(olderEvent); // doesn't throw }); Deno.test("throws a RelayError when querying an event with a large 'since'", async () => { @@ -244,3 +294,42 @@ Deno.test('NPostgres.query with search', async (t) => { assertEquals(await store.query([{ search: "this shouldn't match" }]), []); }); }); + +Deno.test('DittoPgStore.indexTags indexes only the final `e` and `p` tag of kind 7 events', () => { + const event = { + kind: 7, + id: 'a92549a442d306b32273aa9456ba48e3851a4e6203af3f567543298ab964b35b', + pubkey: 'f288a224a61b7361aa9dc41a90aba8a2dff4544db0bc386728e638b21da1792c', + created_at: 1737908284, + tags: [ + ['e', '2503cea56931fb25914866e12ffc739741539db4d6815220b9974ef0967fe3f9', '', 'root'], + ['p', 'fad5c18326fb26d9019f1b2aa503802f0253494701bf311d7588a1e65cb8046b'], + ['p', '26d6a946675e603f8de4bf6f9cef442037b70c7eee170ff06ed7673fc34c98f1'], + ['p', '04c960497af618ae18f5147b3e5c309ef3d8a6251768a1c0820e02c93768cc3b'], + ['p', '0114bb11dd8eb89bfb40669509b2a5a473d27126e27acae58257f2fd7cd95776'], + ['p', '9fce3aea32b35637838fb45b75be32595742e16bb3e4742cc82bb3d50f9087e6'], + ['p', '26bd32c67232bdf16d05e763ec67d883015eb99fd1269025224c20c6cfdb0158'], + ['p', 'eab0e756d32b80bcd464f3d844b8040303075a13eabc3599a762c9ac7ab91f4f'], + ['p', 'edcd20558f17d99327d841e4582f9b006331ac4010806efa020ef0d40078e6da'], + ['p', 'bd1e19980e2c91e6dc657e92c25762ca882eb9272d2579e221f037f93788de91'], + ['p', 'bf2376e17ba4ec269d10fcc996a4746b451152be9031fa48e74553dde5526bce'], + ['p', '3878d95db7b854c3a0d3b2d6b7bf9bf28b36162be64326f5521ba71cf3b45a69'], + ['p', 'ede3866ddfc40aa4e458952c11c67e827e3cbb8a6a4f0a934c009aa2ed2fb477'], + ['p', 'f288a224a61b7361aa9dc41a90aba8a2dff4544db0bc386728e638b21da1792c'], + ['p', '9ce71f1506ccf4b99f234af49bd6202be883a80f95a155c6e9a1c36fd7e780c7', '', 'mention'], + ['p', '932614571afcbad4d17a191ee281e39eebbb41b93fac8fd87829622aeb112f4d', '', 'mention'], + ['e', 'e3653ae41ffb510e5fc071555ecfbc94d2fc31e355d61d941e39a97ac6acb15b'], + ['p', '4e088f3087f6a7e7097ce5fe7fd884ec04ddc69ed6cdd37c55e200f7744b1792'], + ], + content: '🤙', + sig: + '44639d039a7f7fb8772fcfa13d134d3cda684ec34b6a777ead589676f9e8d81b08a24234066dcde1aacfbe193224940fba7586e7197c159757d3caf8f2b57e1b', + }; + + const tags = DittoPgStore.indexTags(event); + + assertEquals(tags, [ + ['e', 'e3653ae41ffb510e5fc071555ecfbc94d2fc31e355d61d941e39a97ac6acb15b'], + ['p', '4e088f3087f6a7e7097ce5fe7fd884ec04ddc69ed6cdd37c55e200f7744b1792'], + ]); +}); diff --git a/packages/ditto/storages/DittoPgStore.ts b/packages/ditto/storages/DittoPgStore.ts new file mode 100644 index 00000000..ea3e864c --- /dev/null +++ b/packages/ditto/storages/DittoPgStore.ts @@ -0,0 +1,645 @@ +// deno-lint-ignore-file require-await + +import { type DittoDB, type DittoTables } from '@ditto/db'; +import { detectLanguage } from '@ditto/lang'; +import { NPostgres, NPostgresSchema } from '@nostrify/db'; +import { dbEventsCounter, internalSubscriptionsSizeGauge } from '@ditto/metrics'; +import { + NIP50, + NKinds, + NostrEvent, + NostrFilter, + NostrRelayCLOSED, + NostrRelayEOSE, + NostrRelayEVENT, + NSchema as n, +} from '@nostrify/nostrify'; +import { Machina } from '@nostrify/nostrify/utils'; +import { logi } from '@soapbox/logi'; +import { JsonValue } from '@std/json'; +import { LanguageCode } from 'iso-639-1'; +import { Kysely } from 'kysely'; +import linkify from 'linkifyjs'; +import { LRUCache } from 'lru-cache'; +import { matchFilter, nip27 } from 'nostr-tools'; +import tldts from 'tldts'; +import { z } from 'zod'; + +import { RelayError } from '@/RelayError.ts'; +import { isNostrId } from '@/utils.ts'; +import { abortError } from '@/utils/abort.ts'; +import { purifyEvent } from '@/utils/purify.ts'; +import { DittoEvent } from '@/interfaces/DittoEvent.ts'; +import { getMediaLinks } from '@/utils/note.ts'; +import { updateStats } from '@/utils/stats.ts'; + +/** Function to decide whether or not to index a tag. */ +type TagCondition = (opts: TagConditionOpts) => boolean; + +/** Options for the tag condition function. */ +interface TagConditionOpts { + /** Nostr event whose tags are being indexed. */ + event: NostrEvent; + /** Count of the current tag name so far. Each tag name has a separate counter starting at 0. */ + count: number; + /** Overall tag index. */ + index: number; + /** Current vag value. */ + value: string; +} + +/** Options for the EventsDB store. */ +interface DittoPgStoreOpts { + /** Kysely instance to use. */ + db: DittoDB; + /** Pubkey of the admin account. */ + pubkey: string; + /** Timeout in milliseconds for database queries. */ + timeout?: number; + /** Whether the event returned should be a Nostr event or a Ditto event. Defaults to false. */ + pure?: boolean; + /** Chunk size for streaming events. Defaults to 20. */ + chunkSize?: number; + /** Batch size for fulfilling subscriptions. Defaults to 500. */ + batchSize?: number; + /** Max age (in **seconds**) an event can be to be fulfilled to realtime subscribers. */ + maxAge?: number; + /** Whether to listen for events from the database with NOTIFY. */ + notify?: boolean; +} + +/** Realtime subscription. */ +interface Subscription { + filters: NostrFilter[]; + machina: Machina; +} + +/** SQL database storage adapter for Nostr events. */ +export class DittoPgStore extends NPostgres { + readonly subs = new Map(); + readonly encounters = new LRUCache({ max: 1000 }); + + /** Conditions for when to index certain tags. */ + static tagConditions: Record = { + 'a': ({ count }) => count < 15, + 'd': ({ event, count }) => count === 0 && NKinds.parameterizedReplaceable(event.kind), + 'e': DittoPgStore.eTagCondition, + 'k': ({ count, value }) => count === 0 && Number.isInteger(Number(value)), + 'L': ({ event, count }) => event.kind === 1985 || count === 0, + 'l': ({ event, count }) => event.kind === 1985 || count === 0, + 'n': ({ count, value }) => count < 50 && value.length < 50, + 'P': ({ count, value }) => count === 0 && isNostrId(value), + 'p': DittoPgStore.pTagCondition, + 'proxy': ({ count, value }) => count === 0 && value.length < 256, + 'q': ({ event, count, value }) => count === 0 && event.kind === 1 && isNostrId(value), + 'r': ({ event, count }) => (event.kind === 1985 ? count < 20 : count < 3), + 't': ({ event, count, value }) => + (value === value.toLowerCase()) && (event.kind === 1985 ? count < 20 : count < 5) && value.length < 50, + 'u': ({ count, value }) => { + const { success } = z.string().url().safeParse(value); // TODO: maybe find a better library specific for validating web urls + return count < 15 && success; + }, + }; + + constructor(private opts: DittoPgStoreOpts) { + super(opts.db.kysely, { + indexTags: DittoPgStore.indexTags, + indexSearch: DittoPgStore.searchText, + indexExtensions: DittoPgStore.indexExtensions, + chunkSize: opts.chunkSize, + }); + + if (opts.notify) { + opts.db.listen('nostr_event', async (id) => { + if (this.encounters.has(id)) return; + this.encounters.set(id, true); + + const [event] = await this.query([{ ids: [id] }]); + + if (event) { + await this.fulfill(event); + } + }); + } + } + + /** Insert an event (and its tags) into the database. */ + override async event(event: NostrEvent, opts: { signal?: AbortSignal; timeout?: number } = {}): Promise { + event = purifyEvent(event); + + logi({ level: 'debug', ns: 'ditto.event', source: 'db', id: event.id, kind: event.kind }); + dbEventsCounter.inc({ kind: event.kind }); + + if (NKinds.ephemeral(event.kind)) { + return await this.fulfill(event); + } + + if (this.opts.notify) { + this.encounters.set(event.id, true); + } + + if (await this.isDeletedAdmin(event)) { + throw new RelayError('blocked', 'event deleted by admin'); + } + + await this.deleteEventsAdmin(event); + + try { + await this.storeEvent(event, { ...opts, timeout: opts.timeout ?? this.opts.timeout }); + this.fulfill(event); // don't await or catch (should never reject) + } catch (e) { + if (e instanceof Error) { + switch (e.message) { + case 'duplicate key value violates unique constraint "nostr_events_pkey"': + case 'duplicate key value violates unique constraint "author_stats_pkey"': + return; + case 'canceling statement due to statement timeout': + throw new RelayError('error', 'the event could not be added fast enough'); + default: + throw e; + } + } else { + throw e; + } + } + } + + /** Maybe store the event, if eligible. */ + private async storeEvent( + event: NostrEvent, + opts: { signal?: AbortSignal; timeout?: number } = {}, + ): Promise { + try { + await super.transaction(async (relay, kysely) => { + await updateStats({ event, relay, kysely: kysely as unknown as Kysely }); + await relay.event(event, opts); + }); + } catch (e) { + // If the failure is only because of updateStats (which runs first), insert the event anyway. + // We can't catch this in the transaction because the error aborts the transaction on the Postgres side. + if (e instanceof Error && e.message.includes('event_stats' satisfies keyof DittoTables)) { + await super.event(event, opts); + } else { + throw e; + } + } + } + + /** Fulfill active subscriptions with this event. */ + protected async fulfill(event: NostrEvent): Promise { + const { maxAge = 60, batchSize = 500 } = this.opts; + + const now = Math.floor(Date.now() / 1000); + const age = now - event.created_at; + + if (age > maxAge) { + // Ephemeral events must be fulfilled, or else return an error to the client. + if (NKinds.ephemeral(event.kind)) { + throw new RelayError('invalid', 'event too old'); + } else { + // Silently ignore old events. + return; + } + } + + let count = 0; + + for (const [subId, { filters, machina }] of this.subs.entries()) { + for (const filter of filters) { + count++; + + if (this.matchesFilter(event, filter)) { + machina.push(['EVENT', subId, event]); + break; + } + + // Yield to event loop. + if (count % batchSize === 0) { + await new Promise((resolve) => setTimeout(resolve, 0)); + } + } + } + } + + /** Check if the event fulfills the filter, according to Ditto criteria. */ + protected matchesFilter(event: NostrEvent, filter: NostrFilter): boolean { + // TODO: support streaming by search. + return typeof filter.search !== 'string' && matchFilter(filter, event); + } + + /** Check if an event has been deleted by the admin. */ + private async isDeletedAdmin(event: NostrEvent): Promise { + const filters: NostrFilter[] = [ + { kinds: [5], authors: [this.opts.pubkey], '#e': [event.id], limit: 1 }, + ]; + + if (NKinds.replaceable(event.kind) || NKinds.parameterizedReplaceable(event.kind)) { + const d = event.tags.find(([tag]) => tag === 'd')?.[1] ?? ''; + + filters.push({ + kinds: [5], + authors: [this.opts.pubkey], + '#a': [`${event.kind}:${event.pubkey}:${d}`], + since: event.created_at, + limit: 1, + }); + } + + const events = await this.query(filters); + return events.length > 0; + } + + /** The DITTO_NSEC can delete any event from the database. NDatabase already handles user deletions. */ + private async deleteEventsAdmin(event: NostrEvent): Promise { + if (event.kind === 5 && event.pubkey === this.opts.pubkey) { + const ids = new Set(event.tags.filter(([name]) => name === 'e').map(([_name, value]) => value)); + const addrs = new Set(event.tags.filter(([name]) => name === 'a').map(([_name, value]) => value)); + + const filters: NostrFilter[] = []; + + if (ids.size) { + filters.push({ ids: [...ids] }); + } + + for (const addr of addrs) { + const [k, pubkey, d] = addr.split(':'); + const kind = Number(k); + + if (!(Number.isInteger(kind) && kind >= 0)) continue; + if (!isNostrId(pubkey)) continue; + if (d === undefined) continue; + + const filter: NostrFilter = { + kinds: [kind], + authors: [pubkey], + until: event.created_at, + }; + + if (d) { + filter['#d'] = [d]; + } + + filters.push(filter); + } + + if (filters.length) { + await this.remove(filters); + } + } + } + + override async *req( + filters: NostrFilter[], + opts: { timeout?: number; signal?: AbortSignal; limit?: number } = {}, + ): AsyncIterable { + const { db, chunkSize = 20 } = this.opts; + const { limit, timeout = this.opts.timeout, signal } = opts; + + filters = await this.expandFilters(filters); + + const subId = crypto.randomUUID(); + const normalFilters = this.normalizeFilters(filters); + const machina = new Machina(signal); + + if (normalFilters.length && limit !== 0) { + this.withTimeout(db.kysely as unknown as Kysely, timeout, async (trx) => { + let query = this.getEventsQuery(trx, normalFilters); + + if (typeof opts.limit === 'number') { + query = query.limit(opts.limit); + } + + for await (const row of query.stream(chunkSize)) { + const event = this.parseEventRow(row); + machina.push(['EVENT', subId, event]); + } + + machina.push(['EOSE', subId]); + }).catch((error) => { + if (error instanceof Error && error.message.includes('timeout')) { + machina.push(['CLOSED', subId, 'error: the relay could not respond fast enough']); + } else { + machina.push(['CLOSED', subId, 'error: something went wrong']); + } + }); + + try { + for await (const msg of machina) { + const [verb] = msg; + + yield msg; + + if (verb === 'EOSE') { + break; + } + + if (verb === 'CLOSED') { + return; + } + } + } catch { + yield ['CLOSED', subId, 'error: the relay could not respond fast enough']; + return; + } + } else { + yield ['EOSE', subId]; + } + + this.subs.set(subId, { filters, machina }); + internalSubscriptionsSizeGauge.set(this.subs.size); + + try { + for await (const msg of machina) { + yield msg; + } + } catch (e) { + if (e instanceof Error && e.name === 'AbortError') { + yield ['CLOSED', subId, 'error: the relay could not respond fast enough']; + } else { + yield ['CLOSED', subId, 'error: something went wrong']; + } + } finally { + this.subs.delete(subId); + internalSubscriptionsSizeGauge.set(this.subs.size); + } + } + + /** Get events for filters from the database. */ + override async query( + filters: NostrFilter[], + opts: { signal?: AbortSignal; pure?: boolean; timeout?: number; limit?: number } = {}, + ): Promise { + filters = await this.expandFilters(filters); + + if (opts.signal?.aborted) return Promise.resolve([]); + + logi({ level: 'debug', ns: 'ditto.req', source: 'db', filters: filters as JsonValue }); + + return super.query(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout }); + } + + /** Parse an event row from the database. */ + protected override parseEventRow(row: NPostgresSchema['nostr_events']): DittoEvent { + const event: DittoEvent = { + id: row.id, + kind: row.kind, + pubkey: row.pubkey, + content: row.content, + created_at: Number(row.created_at), + tags: row.tags, + sig: row.sig, + }; + + if (!this.opts.pure) { + event.language = row.search_ext.language as LanguageCode | undefined; + } + + return event; + } + + /** Delete events based on filters from the database. */ + override async remove(filters: NostrFilter[], opts: { signal?: AbortSignal; timeout?: number } = {}): Promise { + logi({ level: 'debug', ns: 'ditto.remove', source: 'db', filters: filters as JsonValue }); + return super.remove(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout }); + } + + /** Get number of events that would be returned by filters. */ + override async count( + filters: NostrFilter[], + opts: { signal?: AbortSignal; timeout?: number } = {}, + ): Promise<{ count: number; approximate: boolean }> { + if (opts.signal?.aborted) return Promise.reject(abortError()); + + logi({ level: 'debug', ns: 'ditto.count', source: 'db', filters: filters as JsonValue }); + + return super.count(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout }); + } + + /** Rule for indexing `e` tags. */ + private static eTagCondition({ event, count, value, index }: TagConditionOpts): boolean { + if (!isNostrId(value)) return false; + + if (event.kind === 7) { + return index === event.tags.findLastIndex(([name]) => name === 'e'); + } + + return event.kind === 10003 || count < 15; + } + + /** Rule for indexing `p` tags. */ + private static pTagCondition({ event, count, value, index }: TagConditionOpts): boolean { + if (!isNostrId(value)) return false; + + if (event.kind === 7) { + return index === event.tags.findLastIndex(([name]) => name === 'p'); + } + + return count < 15 || event.kind === 3; + } + + /** Return only the tags that should be indexed. */ + static override indexTags(event: NostrEvent): string[][] { + const tagCounts: Record = {}; + + function getCount(name: string) { + return tagCounts[name] || 0; + } + + function incrementCount(name: string) { + tagCounts[name] = getCount(name) + 1; + } + + function checkCondition(name: string, value: string, condition: TagCondition, index: number): boolean { + return condition({ + event, + count: getCount(name), + value, + index, + }); + } + + return event.tags.reduce((results, tag, index) => { + const [name, value] = tag; + const condition = DittoPgStore.tagConditions[name] as TagCondition | undefined; + + if (value && condition && value.length < 200 && checkCondition(name, value, condition, index)) { + results.push(tag); + } + + incrementCount(name); + return results; + }, []); + } + + static indexExtensions(event: NostrEvent): Record { + const ext: Record = {}; + + if (event.kind === 1) { + ext.reply = event.tags.some(([name]) => name === 'e').toString(); + } else if (event.kind === 1111) { + ext.reply = event.tags.some(([name]) => ['e', 'E'].includes(name)).toString(); + } else if (event.kind === 6) { + ext.reply = 'false'; + } + + if ([1, 20, 30023].includes(event.kind)) { + const language = detectLanguage(event.content, 0.90); + + if (language) { + ext.language = language; + } + } + + const imeta: string[][][] = event.tags + .filter(([name]) => name === 'imeta') + .map(([_, ...entries]) => + entries.map((entry) => { + const split = entry.split(' '); + return [split[0], split.splice(1).join(' ')]; + }) + ); + + // quirks mode + if (!imeta.length && event.kind === 1) { + const links = linkify.find(event.content).filter(({ type }) => type === 'url'); + imeta.push(...getMediaLinks(links)); + } + + if (imeta.length) { + ext.media = 'true'; + + if (imeta.every((tags) => tags.some(([name, value]) => name === 'm' && value.startsWith('video/')))) { + ext.video = 'true'; + } + } + + ext.protocol = event.tags.find(([name]) => name === 'proxy')?.[2] ?? 'nostr'; + + return ext; + } + + /** Build a search index from the event. */ + static searchText(event: NostrEvent): string { + switch (event.kind) { + case 0: + return DittoPgStore.buildUserSearchContent(event); + case 1: + case 20: + return nip27.replaceAll(event.content, () => ''); + case 30009: + return DittoPgStore.buildTagsSearchContent(event.tags.filter(([t]) => t !== 'alt')); + case 30360: + return event.tags.find(([name]) => name === 'd')?.[1] || ''; + default: + return ''; + } + } + + /** Build search content for a user. */ + static buildUserSearchContent(event: NostrEvent): string { + const { name, nip05 } = n.json().pipe(n.metadata()).catch({}).parse(event.content); + return [name, nip05].filter(Boolean).join('\n'); + } + + /** Build search content from tag values. */ + static buildTagsSearchContent(tags: string[][]): string { + return tags.map(([_tag, value]) => value).join('\n'); + } + + /** Converts filters to more performant, simpler filters. */ + async expandFilters(filters: NostrFilter[]): Promise { + filters = structuredClone(filters); + + for (const filter of filters) { + if (filter.since && filter.since >= 2_147_483_647) { + throw new RelayError('invalid', 'since filter too far into the future'); + } + if (filter.until && filter.until >= 2_147_483_647) { + throw new RelayError('invalid', 'until filter too far into the future'); + } + for (const kind of filter.kinds ?? []) { + if (kind >= 2_147_483_647) { + throw new RelayError('invalid', 'kind filter too far into the future'); + } + } + + if (filter.search) { + const tokens = NIP50.parseInput(filter.search); + + const domains = new Set(); + const hostnames = new Set(); + + for (const token of tokens) { + if (typeof token === 'object' && token.key === 'domain') { + const { domain, hostname } = tldts.parse(token.value); + if (domain === hostname) { + domains.add(token.value); + } else { + hostnames.add(token.value); + } + } + } + + if (domains.size || hostnames.size) { + let query = this.opts.db.kysely + .selectFrom('author_stats') + .select('pubkey') + .where((eb) => { + const expr = []; + if (domains.size) { + expr.push(eb('nip05_domain', 'in', [...domains])); + } + if (hostnames.size) { + expr.push(eb('nip05_hostname', 'in', [...hostnames])); + } + if (expr.length === 1) { + return expr[0]; + } + return eb.or(expr); + }); + + if (filter.authors) { + query = query.where('pubkey', 'in', filter.authors); + } + + const pubkeys = await query.execute().then((rows) => rows.map((row) => row.pubkey)); + + filter.authors = pubkeys; + } + + // Re-serialize the search string without the domain key. :facepalm: + filter.search = tokens + .filter((t) => typeof t === 'string' || typeof t === 'object' && t.key !== 'domain') + .map((t) => typeof t === 'object' ? `${t.key}:${t.value}` : t) + .join(' '); + } + } + + return filters; + } + + /** Execute the callback in a new transaction, unless the Kysely instance is already a transaction. */ + private static override async trx( + db: Kysely, + callback: (trx: Kysely) => Promise, + ): Promise { + if (db.isTransaction) { + return await callback(db); + } else { + return await db.transaction().execute((trx) => callback(trx)); + } + } + + /** Execute NPostgres functions in a transaction. */ + // @ts-ignore gg + override async transaction( + callback: (store: DittoPgStore, kysely: Kysely) => Promise, + ): Promise { + const { db } = this.opts; + + await DittoPgStore.trx(db.kysely, async (trx) => { + const store = new DittoPgStore({ ...this.opts, db: { ...db, kysely: trx }, notify: false }); + await callback(store, trx); + }); + } +} diff --git a/packages/ditto/storages/DittoPool.ts b/packages/ditto/storages/DittoPool.ts new file mode 100644 index 00000000..53545128 --- /dev/null +++ b/packages/ditto/storages/DittoPool.ts @@ -0,0 +1,91 @@ +// deno-lint-ignore-file require-await +import { DittoConf } from '@ditto/conf'; +import { NostrEvent, NostrFilter, NPool, type NRelay, NRelay1 } from '@nostrify/nostrify'; +import { logi } from '@soapbox/logi'; + +interface DittoPoolOpts { + conf: DittoConf; + relay: NRelay; + maxEventRelays?: number; +} + +export class DittoPool extends NPool { + private _opts: DittoPoolOpts; + + constructor(opts: DittoPoolOpts) { + super({ + open(url) { + return new NRelay1(url, { + // Skip event verification (it's done in the pipeline). + verifyEvent: () => true, + log: logi, + }); + }, + reqRouter: (filters) => { + return this.reqRouter(filters); + }, + eventRouter: async (event) => { + return this.eventRouter(event); + }, + }); + + this._opts = opts; + } + + private async reqRouter(filters: NostrFilter[]): Promise> { + const routes = new Map(); + + for (const relayUrl of await this.getRelayUrls({ marker: 'read' })) { + routes.set(relayUrl, filters); + } + + return routes; + } + + private async eventRouter(event: NostrEvent): Promise { + const { conf, maxEventRelays = 4 } = this._opts; + const { pubkey } = event; + + const relaySet = await this.getRelayUrls({ pubkey, marker: 'write' }); + relaySet.delete(conf.relay); + + return [...relaySet].slice(0, maxEventRelays); + } + + private async getRelayUrls(opts: { pubkey?: string; marker?: 'read' | 'write' } = {}): Promise> { + const { conf, relay } = this._opts; + + const relays = new Set<`wss://${string}`>(); + const authors = new Set([await conf.signer.getPublicKey()]); + + if (opts.pubkey) { + authors.add(opts.pubkey); + } + + const events = await relay.query([ + { kinds: [10002], authors: [...authors] }, + ]); + + // Ensure user's own relay list is counted first. + if (opts.pubkey) { + events.sort((a) => a.pubkey === opts.pubkey ? -1 : 1); + } + + for (const event of events) { + for (const [name, relayUrl, marker] of event.tags) { + if (name === 'r' && (!marker || !opts.marker || marker === opts.marker)) { + try { + const url = new URL(relayUrl); + if (url.protocol === 'wss:') { + relays.add(url.toString() as `wss://${string}`); + } + } catch { + // fallthrough + } + } + } + } + + return relays; + } +} diff --git a/packages/ditto/storages/DittoRelayStore.test.ts b/packages/ditto/storages/DittoRelayStore.test.ts new file mode 100644 index 00000000..66690efa --- /dev/null +++ b/packages/ditto/storages/DittoRelayStore.test.ts @@ -0,0 +1,69 @@ +import { DittoPolyPg } from '@ditto/db'; +import { DittoConf } from '@ditto/conf'; +import { genEvent, MockRelay } from '@nostrify/nostrify/test'; +import { assertEquals } from '@std/assert'; +import { generateSecretKey, getPublicKey } from 'nostr-tools'; + +import { DittoRelayStore } from './DittoRelayStore.ts'; + +import type { NostrMetadata } from '@nostrify/types'; + +Deno.test('updateAuthorData sets nip05', async () => { + const alex = generateSecretKey(); + + await using test = setupTest((req) => { + switch (req.url) { + case 'https://gleasonator.dev/.well-known/nostr.json?name=alex': + return jsonResponse({ names: { alex: getPublicKey(alex) } }); + default: + return new Response('Not found', { status: 404 }); + } + }); + + const { db, store } = test; + + const metadata: NostrMetadata = { nip05: 'alex@gleasonator.dev' }; + const event = genEvent({ kind: 0, content: JSON.stringify(metadata) }, alex); + + await store.updateAuthorData(event); + + const row = await db.kysely + .selectFrom('author_stats') + .selectAll() + .where('pubkey', '=', getPublicKey(alex)) + .executeTakeFirst(); + + assertEquals(row?.nip05, 'alex@gleasonator.dev'); + assertEquals(row?.nip05_domain, 'gleasonator.dev'); + assertEquals(row?.nip05_hostname, 'gleasonator.dev'); +}); + +function setupTest(cb: (req: Request) => Response | Promise) { + const conf = new DittoConf(Deno.env); + const db = new DittoPolyPg(conf.databaseUrl); + const relay = new MockRelay(); + + const mockFetch: typeof fetch = async (input, init) => { + const req = new Request(input, init); + return await cb(req); + }; + + const store = new DittoRelayStore({ conf, db, relay, fetch: mockFetch }); + + return { + db, + store, + [Symbol.asyncDispose]: async () => { + await store[Symbol.asyncDispose](); + await db[Symbol.asyncDispose](); + }, + }; +} + +function jsonResponse(body: unknown): Response { + return new Response(JSON.stringify(body), { + headers: { + 'Content-Type': 'application/json', + }, + }); +} diff --git a/packages/ditto/storages/DittoRelayStore.ts b/packages/ditto/storages/DittoRelayStore.ts new file mode 100644 index 00000000..7b935d96 --- /dev/null +++ b/packages/ditto/storages/DittoRelayStore.ts @@ -0,0 +1,470 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoDB, DittoTables } from '@ditto/db'; +import { + cachedFaviconsSizeGauge, + cachedNip05sSizeGauge, + pipelineEventsCounter, + policyEventsCounter, + webPushNotificationsCounter, +} from '@ditto/metrics'; +import { + NKinds, + NostrEvent, + NostrFilter, + NostrRelayCLOSED, + NostrRelayCOUNT, + NostrRelayEOSE, + NostrRelayEVENT, + NRelay, + NSchema as n, +} from '@nostrify/nostrify'; +import { logi } from '@soapbox/logi'; +import { UpdateObject } from 'kysely'; +import { LRUCache } from 'lru-cache'; +import tldts from 'tldts'; +import { z } from 'zod'; + +import { DittoPush } from '@/DittoPush.ts'; +import { DittoEvent } from '@/interfaces/DittoEvent.ts'; +import { RelayError } from '@/RelayError.ts'; +import { hydrateEvents } from '@/storages/hydrate.ts'; +import { eventAge, nostrNow, Time } from '@/utils.ts'; +import { getAmount } from '@/utils/bolt11.ts'; +import { errorJson } from '@/utils/log.ts'; +import { purifyEvent } from '@/utils/purify.ts'; +import { getTagSet } from '@/utils/tags.ts'; +import { PolicyWorker } from '@/workers/policy.ts'; +import { verifyEventWorker } from '@/workers/verify.ts'; +import { fetchFavicon, insertFavicon, queryFavicon } from '@/utils/favicon.ts'; +import { lookupNip05 } from '@/utils/nip05.ts'; +import { parseNoteContent, stripimeta } from '@/utils/note.ts'; +import { SimpleLRU } from '@/utils/SimpleLRU.ts'; +import { unfurlCardCached } from '@/utils/unfurl.ts'; +import { renderWebPushNotification } from '@/views/mastodon/push.ts'; +import { nip19 } from 'nostr-tools'; + +interface DittoRelayStoreOpts { + db: DittoDB; + conf: DittoConf; + relay: NRelay; + fetch?: typeof fetch; +} + +/** Backing storage class for Ditto relay implementation at `/relay`. */ +export class DittoRelayStore implements NRelay { + private push: DittoPush; + private encounters = new LRUCache({ max: 5000 }); + private controller = new AbortController(); + private policyWorker: PolicyWorker; + + private faviconCache: SimpleLRU; + private nip05Cache: SimpleLRU; + + private ns = 'ditto.relay.store'; + + constructor(private opts: DittoRelayStoreOpts) { + const { conf, db } = this.opts; + + this.push = new DittoPush(opts); + this.policyWorker = new PolicyWorker(conf); + + this.listen().catch((e: unknown) => { + logi({ level: 'error', ns: this.ns, source: 'listen', error: errorJson(e) }); + }); + + this.faviconCache = new SimpleLRU( + async (domain, { signal }) => { + const row = await queryFavicon(db.kysely, domain); + + if (row && (nostrNow() - row.last_updated_at) < (conf.caches.favicon.ttl / 1000)) { + return new URL(row.favicon); + } + + const url = await fetchFavicon(domain, signal); + await insertFavicon(db.kysely, domain, url.href); + return url; + }, + { ...conf.caches.favicon, gauge: cachedFaviconsSizeGauge }, + ); + + this.nip05Cache = new SimpleLRU( + (nip05, { signal }) => { + return lookupNip05(nip05, { ...this.opts, signal }); + }, + { ...conf.caches.nip05, gauge: cachedNip05sSizeGauge }, + ); + } + + /** Open a firehose to the relay. */ + private async listen(): Promise { + const { relay } = this.opts; + const { signal } = this.controller; + + for await (const msg of relay.req([{ limit: 0 }], { signal })) { + if (msg[0] === 'EVENT') { + const [, , event] = msg; + await this.event(event, { signal }); + } + } + } + + req( + filters: NostrFilter[], + opts?: { signal?: AbortSignal }, + ): AsyncIterable { + const { relay } = this.opts; + return relay.req(filters, opts); + } + + /** + * Common pipeline function to process (and maybe store) events. + * It is idempotent, so it can be called multiple times for the same event. + */ + async event(event: DittoEvent, opts: { publish?: boolean; signal?: AbortSignal } = {}): Promise { + const { conf, relay } = this.opts; + const { signal } = opts; + + // Skip events that have already been encountered. + if (this.encounters.get(event.id)) { + throw new RelayError('duplicate', 'already have this event'); + } + // Reject events that are too far in the future. + if (eventAge(event) < -Time.minutes(1)) { + throw new RelayError('invalid', 'event too far in the future'); + } + // Integer max value for Postgres. + if (event.kind >= 2_147_483_647) { + throw new RelayError('invalid', 'event kind too large'); + } + // The only point of ephemeral events is to stream them, + // so throw an error if we're not even going to do that. + if (NKinds.ephemeral(event.kind) && !this.isFresh(event)) { + throw new RelayError('invalid', 'event too old'); + } + // Block NIP-70 events, because we have no way to `AUTH`. + if (event.tags.some(([name]) => name === '-')) { + throw new RelayError('invalid', 'protected event'); + } + // Validate the event's signature. + if (!(await verifyEventWorker(event))) { + throw new RelayError('invalid', 'invalid signature'); + } + // Recheck encountered after async ops. + if (this.encounters.has(event.id)) { + throw new RelayError('duplicate', 'already have this event'); + } + // Set the event as encountered after verifying the signature. + this.encounters.set(event.id, true); + + // Log the event. + logi({ level: 'debug', ns: 'ditto.event', source: 'pipeline', id: event.id, kind: event.kind }); + pipelineEventsCounter.inc({ kind: event.kind }); + + // NIP-46 events get special treatment. + // They are exempt from policies and other side-effects, and should be streamed out immediately. + // If streaming fails, an error should be returned. + if (event.kind === 24133) { + await relay.event(event, { signal }); + } + + // Ensure the event doesn't violate the policy. + if (event.pubkey !== await conf.signer.getPublicKey()) { + await this.policyFilter(event, signal); + } + + // Prepare the event for additional checks. + // FIXME: This is kind of hacky. Should be reorganized to fetch only what's needed for each stage. + await this.hydrateEvent(event, signal); + + // Ensure that the author is not banned. + const n = getTagSet(event.user?.tags ?? [], 'n'); + if (n.has('disabled')) { + throw new RelayError('blocked', 'author is blocked'); + } + + try { + await relay.event(purifyEvent(event), { signal }); + } finally { + // This needs to run in steps, and should not block the API from responding. + Promise.allSettled([ + this.handleZaps(event), + this.updateAuthorData(event, signal), + this.prewarmLinkPreview(event, signal), + this.generateSetEvents(event), + ]) + .then(() => this.webPush(event)) + .catch(() => {}); + } + } + + private async policyFilter(event: NostrEvent, signal?: AbortSignal): Promise { + try { + const result = await this.policyWorker.call(event, signal); + const [, , ok, reason] = result; + logi({ level: 'debug', ns: 'ditto.policy', id: event.id, kind: event.kind, ok, reason }); + policyEventsCounter.inc({ ok: String(ok) }); + RelayError.assert(result); + } catch (e) { + if (e instanceof RelayError) { + throw e; + } else { + logi({ level: 'error', ns: 'ditto.policy', id: event.id, kind: event.kind, error: errorJson(e) }); + throw new RelayError('blocked', 'policy error'); + } + } + } + + /** Stores the event in the 'event_zaps' table */ + private async handleZaps(event: NostrEvent) { + if (event.kind !== 9735) return; + + const { db } = this.opts; + + const zapRequestString = event?.tags?.find(([name]) => name === 'description')?.[1]; + if (!zapRequestString) return; + const zapRequest = n.json().pipe(n.event()).optional().catch(undefined).parse(zapRequestString); + if (!zapRequest) return; + + const amountSchema = z.coerce.number().int().nonnegative().catch(0); + const amount_millisats = amountSchema.parse(getAmount(event?.tags.find(([name]) => name === 'bolt11')?.[1])); + if (!amount_millisats || amount_millisats < 1) return; + + const zappedEventId = zapRequest.tags.find(([name]) => name === 'e')?.[1]; + if (!zappedEventId) return; + + try { + await db.kysely.insertInto('event_zaps').values({ + receipt_id: event.id, + target_event_id: zappedEventId, + sender_pubkey: zapRequest.pubkey, + amount_millisats, + comment: zapRequest.content, + }).execute(); + } catch { + // receipt_id is unique, do nothing + } + } + + /** Parse kind 0 metadata and track indexes in the database. */ + async updateAuthorData(event: NostrEvent, signal?: AbortSignal): Promise { + if (event.kind !== 0) return; + + const { db } = this.opts; + + // Parse metadata. + const metadata = n.json().pipe(n.metadata()).catch({}).safeParse(event.content); + if (!metadata.success) return; + + const { name, nip05 } = metadata.data; + + const updates: UpdateObject = {}; + + const authorStats = await db.kysely + .selectFrom('author_stats') + .selectAll() + .where('pubkey', '=', event.pubkey) + .executeTakeFirst(); + + const lastVerified = authorStats?.nip05_last_verified_at; + const eventNewer = !lastVerified || event.created_at > lastVerified; + + try { + if (nip05 !== authorStats?.nip05 && eventNewer || !lastVerified) { + if (nip05) { + const tld = tldts.parse(nip05); + if (tld.isIcann && !tld.isIp && !tld.isPrivate) { + const pointer = await this.nip05Cache.fetch(nip05, { signal }); + if (pointer.pubkey === event.pubkey) { + updates.nip05 = nip05; + updates.nip05_domain = tld.domain; + updates.nip05_hostname = tld.hostname; + updates.nip05_last_verified_at = event.created_at; + } + } + } else { + updates.nip05 = null; + updates.nip05_domain = null; + updates.nip05_hostname = null; + updates.nip05_last_verified_at = event.created_at; + } + } + } catch { + // Fallthrough. + } + + // Fetch favicon. + const domain = nip05?.split('@')[1].toLowerCase(); + if (domain) { + try { + await this.faviconCache.fetch(domain, { signal }); + } catch { + // Fallthrough. + } + } + + const search = [name, nip05].filter(Boolean).join(' ').trim(); + + if (search !== authorStats?.search) { + updates.search = search; + } + + if (Object.keys(updates).length) { + await db.kysely.insertInto('author_stats') + .values({ + pubkey: event.pubkey, + followers_count: 0, + following_count: 0, + notes_count: 0, + search, + ...updates, + }) + .onConflict((oc) => oc.column('pubkey').doUpdateSet(updates)) + .execute(); + } + } + + private async prewarmLinkPreview(event: NostrEvent, signal?: AbortSignal): Promise { + const { firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), []); + if (firstUrl) { + await unfurlCardCached(firstUrl, signal); + } + } + + private async generateSetEvents(event: NostrEvent): Promise { + const { conf } = this.opts; + + const signer = conf.signer; + const pubkey = await signer.getPublicKey(); + + const tagsAdmin = event.tags.some(([name, value]) => ['p', 'P'].includes(name) && value === pubkey); + + if (event.kind === 1984 && tagsAdmin) { + const rel = await signer.signEvent({ + kind: 30383, + content: '', + tags: [ + ['d', event.id], + ['p', event.pubkey], + ['k', '1984'], + ['n', 'open'], + ...[...getTagSet(event.tags, 'p')].map((value) => ['P', value]), + ...[...getTagSet(event.tags, 'e')].map((value) => ['e', value]), + ], + created_at: Math.floor(Date.now() / 1000), + }); + + await this.event(rel, { signal: AbortSignal.timeout(1000) }); + } + + if (event.kind === 3036 && tagsAdmin) { + const rel = await signer.signEvent({ + kind: 30383, + content: '', + tags: [ + ['d', event.id], + ['p', event.pubkey], + ['k', '3036'], + ['n', 'pending'], + ], + created_at: Math.floor(Date.now() / 1000), + }); + + await this.event(rel, { signal: AbortSignal.timeout(1000) }); + } + } + + private async webPush(event: NostrEvent): Promise { + if (!this.isFresh(event)) { + throw new RelayError('invalid', 'event too old'); + } + + const { db, relay } = this.opts; + const pubkeys = getTagSet(event.tags, 'p'); + + if (!pubkeys.size) { + return; + } + + const rows = await db.kysely + .selectFrom('push_subscriptions') + .selectAll() + .where('pubkey', 'in', [...pubkeys]) + .execute(); + + for (const row of rows) { + const viewerPubkey = row.pubkey; + + if (viewerPubkey === event.pubkey) { + continue; // Don't notify authors about their own events. + } + + const message = await renderWebPushNotification(relay, event, viewerPubkey); + if (!message) { + continue; + } + + const subscription = { + endpoint: row.endpoint, + keys: { + auth: row.auth, + p256dh: row.p256dh, + }, + }; + + await this.push.push(subscription, message); + webPushNotificationsCounter.inc({ type: message.notification_type }); + } + } + + /** Hydrate the event with the user, if applicable. */ + private async hydrateEvent(event: NostrEvent, signal?: AbortSignal): Promise { + const [hydrated] = await hydrateEvents({ ...this.opts, events: [event], signal }); + return hydrated; + } + + /** Determine if the event is being received in a timely manner. */ + private isFresh(event: NostrEvent): boolean { + return eventAge(event) < Time.minutes(1); + } + + async query(filters: NostrFilter[], opts: { pure?: boolean; signal?: AbortSignal } = {}): Promise { + const { relay } = this.opts; + const { pure = true, signal } = opts; // TODO: make pure `false` by default + + const events = await relay.query(filters, opts); + + if (!pure) { + return hydrateEvents({ ...this.opts, events, signal }); + } + + return events; + } + + count(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise { + const { relay } = this.opts; + if (!relay.count) { + return Promise.reject(new Error('Method not implemented.')); + } + return relay.count(filters, opts); + } + + remove(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise { + const { relay } = this.opts; + if (!relay.remove) { + return Promise.reject(new Error('Method not implemented.')); + } + return relay.remove(filters, opts); + } + + async close(): Promise { + const { relay } = this.opts; + + this.controller.abort(); + + await relay.close(); + } + + [Symbol.asyncDispose](): Promise { + return this.close(); + } +} diff --git a/src/storages/hydrate.bench.ts b/packages/ditto/storages/hydrate.bench.ts similarity index 79% rename from src/storages/hydrate.bench.ts rename to packages/ditto/storages/hydrate.bench.ts index eeacec50..4da8afbf 100644 --- a/src/storages/hydrate.bench.ts +++ b/packages/ditto/storages/hydrate.bench.ts @@ -1,5 +1,6 @@ +import { jsonlEvents } from '@nostrify/nostrify/test'; + import { assembleEvents } from '@/storages/hydrate.ts'; -import { jsonlEvents } from '@/test.ts'; const testEvents = await jsonlEvents('fixtures/hydrated.jsonl'); const testStats = JSON.parse(await Deno.readTextFile('fixtures/stats.json')); @@ -9,5 +10,5 @@ const testStats = JSON.parse(await Deno.readTextFile('fixtures/stats.json')); const events = testEvents.slice(0, 20); Deno.bench('assembleEvents with home feed', () => { - assembleEvents(events, testEvents, testStats); + assembleEvents('', events, testEvents, testStats); }); diff --git a/src/storages/hydrate.test.ts b/packages/ditto/storages/hydrate.test.ts similarity index 80% rename from src/storages/hydrate.test.ts rename to packages/ditto/storages/hydrate.test.ts index 1527f321..fa14d50d 100644 --- a/src/storages/hydrate.test.ts +++ b/packages/ditto/storages/hydrate.test.ts @@ -1,13 +1,16 @@ +import { DittoConf } from '@ditto/conf'; +import { DummyDB } from '@ditto/db'; import { MockRelay } from '@nostrify/nostrify/test'; import { assertEquals } from '@std/assert'; +import { generateSecretKey, nip19 } from 'nostr-tools'; import { DittoEvent } from '@/interfaces/DittoEvent.ts'; import { hydrateEvents } from '@/storages/hydrate.ts'; -import { createTestDB, eventFixture } from '@/test.ts'; +import { eventFixture } from '@/test.ts'; Deno.test('hydrateEvents(): author --- WITHOUT stats', async () => { - const relay = new MockRelay(); - await using db = await createTestDB(); + const opts = setupTest(); + const { relay } = opts; const event0 = await eventFixture('event-0'); const event1 = await eventFixture('event-1'); @@ -16,19 +19,15 @@ Deno.test('hydrateEvents(): author --- WITHOUT stats', async () => { await relay.event(event0); await relay.event(event1); - await hydrateEvents({ - events: [event1], - store: relay, - kysely: db.kysely, - }); + await hydrateEvents({ ...opts, events: [event1] }); const expectedEvent = { ...event1, author: event0 }; assertEquals(event1, expectedEvent); }); Deno.test('hydrateEvents(): repost --- WITHOUT stats', async () => { - const relay = new MockRelay(); - await using db = await createTestDB(); + const opts = setupTest(); + const { relay } = opts; const event0madePost = await eventFixture('event-0-the-one-who-post-and-users-repost'); const event0madeRepost = await eventFixture('event-0-the-one-who-repost'); @@ -41,23 +40,20 @@ Deno.test('hydrateEvents(): repost --- WITHOUT stats', async () => { await relay.event(event1reposted); await relay.event(event6); - await hydrateEvents({ - events: [event6], - store: relay, - kysely: db.kysely, - }); + await hydrateEvents({ ...opts, events: [event6] }); const expectedEvent6 = { ...event6, author: event0madeRepost, repost: { ...event1reposted, author: event0madePost }, }; + assertEquals(event6, expectedEvent6); }); Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => { - const relay = new MockRelay(); - await using db = await createTestDB(); + const opts = setupTest(); + const { relay } = opts; const event0madeQuoteRepost = await eventFixture('event-0-the-one-who-quote-repost'); const event0 = await eventFixture('event-0'); @@ -70,11 +66,7 @@ Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => { await relay.event(event1quoteRepost); await relay.event(event1willBeQuoteReposted); - await hydrateEvents({ - events: [event1quoteRepost], - store: relay, - kysely: db.kysely, - }); + await hydrateEvents({ ...opts, events: [event1quoteRepost] }); const expectedEvent1quoteRepost = { ...event1quoteRepost, @@ -86,8 +78,8 @@ Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => { }); Deno.test('hydrateEvents(): repost of quote repost --- WITHOUT stats', async () => { - const relay = new MockRelay(); - await using db = await createTestDB(); + const opts = setupTest(); + const { relay } = opts; const author = await eventFixture('event-0-makes-repost-with-quote-repost'); const event1 = await eventFixture('event-1-will-be-reposted-with-quote-repost'); @@ -100,23 +92,20 @@ Deno.test('hydrateEvents(): repost of quote repost --- WITHOUT stats', async () await relay.event(event1quote); await relay.event(event6); - await hydrateEvents({ - events: [event6], - store: relay, - kysely: db.kysely, - }); + await hydrateEvents({ ...opts, events: [event6] }); const expectedEvent6 = { ...event6, author, repost: { ...event1quote, author, quote: { author, ...event1 } }, }; + assertEquals(event6, expectedEvent6); }); Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stats', async () => { - const relay = new MockRelay(); - await using db = await createTestDB(); + const opts = setupTest(); + const { relay } = opts; const authorDictator = await eventFixture('kind-0-dictator'); const authorVictim = await eventFixture('kind-0-george-orwell'); @@ -129,11 +118,7 @@ Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stat await relay.event(reportEvent); await relay.event(event1); - await hydrateEvents({ - events: [reportEvent], - store: relay, - kysely: db.kysely, - }); + await hydrateEvents({ ...opts, events: [reportEvent] }); const expectedEvent: DittoEvent = { ...reportEvent, @@ -141,12 +126,13 @@ Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stat reported_notes: [event1], reported_profile: authorVictim, }; + assertEquals(reportEvent, expectedEvent); }); Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 --- WITHOUT stats', async () => { - const relay = new MockRelay(); - await using db = await createTestDB(); + const opts = setupTest(); + const { relay } = opts; const zapSender = await eventFixture('kind-0-jack'); const zapReceipt = await eventFixture('kind-9735-jack-zap-patrick'); @@ -159,11 +145,7 @@ Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 --- await relay.event(zappedPost); await relay.event(zapReceiver); - await hydrateEvents({ - events: [zapReceipt], - store: relay, - kysely: db.kysely, - }); + await hydrateEvents({ ...opts, events: [zapReceipt] }); const expectedEvent: DittoEvent = { ...zapReceipt, @@ -175,5 +157,14 @@ Deno.test('hydrateEvents(): zap sender, zap amount, zapped post // kind 9735 --- zap_amount: 5225000, // millisats zap_message: '🫂', }; + assertEquals(zapReceipt, expectedEvent); }); + +function setupTest() { + const db = new DummyDB(); + const conf = new DittoConf(new Map([['DITTO_NSEC', nip19.nsecEncode(generateSecretKey())]])); + const relay = new MockRelay(); + + return { conf, db, relay }; +} diff --git a/src/storages/hydrate.ts b/packages/ditto/storages/hydrate.ts similarity index 63% rename from src/storages/hydrate.ts rename to packages/ditto/storages/hydrate.ts index 7f5c8125..a4dfe7ab 100644 --- a/src/storages/hydrate.ts +++ b/packages/ditto/storages/hydrate.ts @@ -1,28 +1,28 @@ +import { DittoDB, DittoTables } from '@ditto/db'; +import { DittoConf } from '@ditto/conf'; import { NStore } from '@nostrify/nostrify'; import { Kysely } from 'kysely'; import { matchFilter } from 'nostr-tools'; import { NSchema as n } from '@nostrify/nostrify'; import { z } from 'zod'; -import { DittoTables } from '@/db/DittoTables.ts'; -import { Conf } from '@/config.ts'; import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; -import { fallbackAuthor } from '@/utils.ts'; +import { fallbackAuthor, isNostrId } from '@/utils.ts'; import { findQuoteTag } from '@/utils/tags.ts'; import { findQuoteInContent } from '@/utils/note.ts'; import { getAmount } from '@/utils/bolt11.ts'; -import { Storages } from '@/storages.ts'; interface HydrateOpts { + db: DittoDB; + conf: DittoConf; + relay: NStore; events: DittoEvent[]; - store: NStore; signal?: AbortSignal; - kysely?: Kysely; } /** Hydrate events using the provided storage. */ async function hydrateEvents(opts: HydrateOpts): Promise { - const { events, store, signal, kysely = await Storages.kysely() } = opts; + const { conf, db, events } = opts; if (!events.length) { return events; @@ -30,69 +30,96 @@ async function hydrateEvents(opts: HydrateOpts): Promise { const cache = [...events]; - for (const event of await gatherReposts({ events: cache, store, signal })) { + for (const event of await gatherRelatedEvents({ ...opts, events: cache })) { cache.push(event); } - for (const event of await gatherReacted({ events: cache, store, signal })) { + for (const event of await gatherQuotes({ ...opts, events: cache })) { cache.push(event); } - for (const event of await gatherQuotes({ events: cache, store, signal })) { + for (const event of await gatherProfiles({ ...opts, events: cache })) { cache.push(event); } - for (const event of await gatherAuthors({ events: cache, store, signal })) { + for (const event of await gatherUsers({ ...opts, events: cache })) { cache.push(event); } - for (const event of await gatherUsers({ events: cache, store, signal })) { + for (const event of await gatherInfo({ ...opts, events: cache })) { cache.push(event); } - for (const event of await gatherInfo({ events: cache, store, signal })) { - cache.push(event); - } + const authorStats = await gatherAuthorStats(cache, db.kysely); + const eventStats = await gatherEventStats(cache, db.kysely); - for (const event of await gatherReportedProfiles({ events: cache, store, signal })) { - cache.push(event); - } + const domains = authorStats.reduce((result, { nip05_hostname }) => { + if (nip05_hostname) result.add(nip05_hostname); + return result; + }, new Set()); - for (const event of await gatherReportedNotes({ events: cache, store, signal })) { - cache.push(event); - } - - for (const event of await gatherZapped({ events: cache, store, signal })) { - cache.push(event); - } + const favicons = ( + await db.kysely + .selectFrom('domain_favicons') + .select(['domain', 'favicon']) + .where('domain', 'in', [...domains]) + .execute() + ) + .reduce((result, { domain, favicon }) => { + result[domain] = favicon; + return result; + }, {} as Record); const stats = { - authors: await gatherAuthorStats(cache, kysely as Kysely), - events: await gatherEventStats(cache, kysely as Kysely), + authors: authorStats, + events: eventStats, + favicons, }; // Dedupe events. const results = [...new Map(cache.map((event) => [event.id, event])).values()]; + const admin = await conf.signer.getPublicKey(); + // First connect all the events to each-other, then connect the connected events to the original list. - assembleEvents(results, results, stats); - assembleEvents(events, results, stats); + assembleEvents(admin, results, results, stats); + assembleEvents(admin, events, results, stats); return events; } /** Connect the events in list `b` to the DittoEvent fields in list `a`. */ export function assembleEvents( + admin: string, a: DittoEvent[], b: DittoEvent[], - stats: { authors: DittoTables['author_stats'][]; events: DittoTables['event_stats'][] }, + stats: { + authors: DittoTables['author_stats'][]; + events: DittoTables['event_stats'][]; + favicons: Record; + }, ): DittoEvent[] { - const admin = Conf.pubkey; + const authorStats = stats.authors.reduce((result, { pubkey, ...stat }) => { + result[pubkey] = { + ...stat, + streak_start: stat.streak_start ?? undefined, + streak_end: stat.streak_end ?? undefined, + nip05: stat.nip05 ?? undefined, + nip05_domain: stat.nip05_domain ?? undefined, + nip05_hostname: stat.nip05_hostname ?? undefined, + nip05_last_verified_at: stat.nip05_last_verified_at ?? undefined, + favicon: stats.favicons[stat.nip05_hostname!], + }; + return result; + }, {} as Record); - const eventStats = stats.events.map((stat) => ({ - ...stat, - reactions: JSON.parse(stat.reactions), - })); + const eventStats = stats.events.reduce((result, { event_id, ...stat }) => { + result[event_id] = { + ...stat, + reactions: JSON.parse(stat.reactions), + }; + return result; + }, {} as Record); for (const event of a) { event.author = b.find((e) => matchFilter({ kinds: [0], authors: [event.pubkey] }, e)); @@ -102,21 +129,25 @@ export function assembleEvents( if (event.kind === 1) { const id = findQuoteTag(event.tags)?.[1] || findQuoteInContent(event.content); if (id) { - event.quote = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e)); + event.quote = b.find((e) => matchFilter({ kinds: [1, 20], ids: [id] }, e)); } + + const pubkeys = event.tags.filter(([name, value]) => name === 'p' && isNostrId(value)) + .map(([_name, value]) => value); + event.mentions = b.filter((e) => matchFilter({ kinds: [0], authors: pubkeys }, e)); } if (event.kind === 6) { const id = event.tags.find(([name]) => name === 'e')?.[1]; if (id) { - event.repost = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e)); + event.repost = b.find((e) => matchFilter({ kinds: [1, 20], ids: [id] }, e)); } } if (event.kind === 7) { const id = event.tags.findLast(([name]) => name === 'e')?.[1]; if (id) { - event.reacted = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e)); + event.reacted = b.find((e) => matchFilter({ kinds: [1, 20], ids: [id] }, e)); } } @@ -130,7 +161,7 @@ export function assembleEvents( const ids = event.tags.filter(([name]) => name === 'e').map(([_name, value]) => value); for (const id of ids) { - const reported = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e)); + const reported = b.find((e) => matchFilter({ kinds: [1, 20], ids: [id] }, e)); if (reported) { reportedEvents.push(reported); } @@ -146,7 +177,7 @@ export function assembleEvents( const id = event.tags.find(([name]) => name === 'e')?.[1]; if (id) { - event.zapped = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e)); + event.zapped = b.find((e) => matchFilter({ kinds: [1, 20], ids: [id] }, e)); } const zapRequestString = event?.tags?.find(([name]) => name === 'description')?.[1]; @@ -161,53 +192,57 @@ export function assembleEvents( event.zap_message = zapRequest?.content ?? ''; } - event.author_stats = stats.authors.find((stats) => stats.pubkey === event.pubkey); - event.event_stats = eventStats.find((stats) => stats.event_id === event.id); + event.author_stats = authorStats[event.pubkey]; + event.event_stats = eventStats[event.id]; } return a; } -/** Collect reposts from the events. */ -function gatherReposts({ events, store, signal }: HydrateOpts): Promise { +/** Collect event targets (eg reposts, quote posts, reacted posts, etc.) */ +function gatherRelatedEvents({ events, relay, signal }: HydrateOpts): Promise { const ids = new Set(); for (const event of events) { + // Reposted events if (event.kind === 6) { const id = event.tags.find(([name]) => name === 'e')?.[1]; if (id) { ids.add(id); } } + // Reacted events + if (event.kind === 7) { + const id = event.tags.findLast(([name]) => name === 'e')?.[1]; + if (id) { + ids.add(id); + } + } + // Reported events + if (event.kind === 1984) { + for (const [name, value] of event.tags) { + if (name === 'e') { + ids.add(value); + } + } + } + // Zapped events + if (event.kind === 9735) { + const id = event.tags.find(([name]) => name === 'e')?.[1]; + if (id) { + ids.add(id); + } + } } - return store.query( - [{ ids: [...ids], limit: ids.size }], - { signal }, - ); -} - -/** Collect events being reacted to by the events. */ -function gatherReacted({ events, store, signal }: HydrateOpts): Promise { - const ids = new Set(); - - for (const event of events) { - if (event.kind === 7) { - const id = event.tags.findLast(([name]) => name === 'e')?.[1]; - if (id) { - ids.add(id); - } - } - } - - return store.query( + return relay.query( [{ ids: [...ids], limit: ids.size }], { signal }, ); } /** Collect quotes from the events. */ -function gatherQuotes({ events, store, signal }: HydrateOpts): Promise { +function gatherQuotes({ events, relay, signal }: HydrateOpts): Promise { const ids = new Set(); for (const event of events) { @@ -219,17 +254,36 @@ function gatherQuotes({ events, store, signal }: HydrateOpts): Promise { +/** Collect profiles from the events. */ +async function gatherProfiles({ events, relay, signal }: HydrateOpts): Promise { const pubkeys = new Set(); for (const event of events) { + // Authors + pubkeys.add(event.pubkey); + + // Mentions + if (event.kind === 1) { + for (const [name, value] of event.tags) { + if (name === 'p') { + pubkeys.add(value); + } + } + } + // Reported profiles + if (event.kind === 1984) { + const pubkey = event.tags.find(([name]) => name === 'p')?.[1]; + if (pubkey) { + pubkeys.add(pubkey); + } + } + // Zap recipients if (event.kind === 9735) { const zapReceiver = event.tags.find(([name]) => name === 'p')?.[1]; if (zapReceiver) { @@ -245,17 +299,16 @@ async function gatherAuthors({ events, store, signal }: HydrateOpts): Promise matchFilter({ kinds: [0], authors: [pubkey] }, e)); - if (author) { + if (!author) { const fallback = fallbackAuthor(pubkey); authors.push(fallback); } @@ -265,21 +318,21 @@ async function gatherAuthors({ events, store, signal }: HydrateOpts): Promise { +async function gatherUsers({ conf, events, relay, signal }: HydrateOpts): Promise { const pubkeys = new Set(events.map((event) => event.pubkey)); if (!pubkeys.size) { return Promise.resolve([]); } - return store.query( - [{ kinds: [30382], authors: [Conf.pubkey], '#d': [...pubkeys], limit: pubkeys.size }], + return relay.query( + [{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [...pubkeys], limit: pubkeys.size }], { signal }, ); } /** Collect info events from the events. */ -function gatherInfo({ events, store, signal }: HydrateOpts): Promise { +async function gatherInfo({ conf, events, relay, signal }: HydrateOpts): Promise { const ids = new Set(); for (const event of events) { @@ -292,66 +345,8 @@ function gatherInfo({ events, store, signal }: HydrateOpts): Promise { - const ids = new Set(); - for (const event of events) { - if (event.kind === 1984) { - const status_ids = event.tags.filter(([name]) => name === 'e').map((tag) => tag[1]); - if (status_ids.length > 0) { - for (const id of status_ids) { - ids.add(id); - } - } - } - } - - return store.query( - [{ kinds: [1], ids: [...ids], limit: ids.size }], - { signal }, - ); -} - -/** Collect reported profiles from the events. */ -function gatherReportedProfiles({ events, store, signal }: HydrateOpts): Promise { - const pubkeys = new Set(); - - for (const event of events) { - if (event.kind === 1984) { - const pubkey = event.tags.find(([name]) => name === 'p')?.[1]; - if (pubkey) { - pubkeys.add(pubkey); - } - } - } - - return store.query( - [{ kinds: [0], authors: [...pubkeys], limit: pubkeys.size }], - { signal }, - ); -} - -/** Collect events being zapped. */ -function gatherZapped({ events, store, signal }: HydrateOpts): Promise { - const ids = new Set(); - - for (const event of events) { - if (event.kind === 9735) { - const id = event.tags.find(([name]) => name === 'e')?.[1]; - if (id) { - ids.add(id); - } - } - } - - return store.query( - [{ ids: [...ids], limit: ids.size }], + return relay.query( + [{ kinds: [30383], authors: [await conf.signer.getPublicKey()], '#d': [...ids], limit: ids.size }], { signal }, ); } @@ -378,11 +373,10 @@ async function gatherAuthorStats( .execute(); return rows.map((row) => ({ - pubkey: row.pubkey, + ...row, followers_count: Math.max(0, row.followers_count), following_count: Math.max(0, row.following_count), notes_count: Math.max(0, row.notes_count), - search: row.search, })); } diff --git a/packages/ditto/test.ts b/packages/ditto/test.ts new file mode 100644 index 00000000..f8fd08d8 --- /dev/null +++ b/packages/ditto/test.ts @@ -0,0 +1,49 @@ +import { DittoPolyPg } from '@ditto/db'; +import { NostrEvent } from '@nostrify/nostrify'; + +import { Conf } from '@/config.ts'; +import { DittoPgStore } from '@/storages/DittoPgStore.ts'; +import { sql } from 'kysely'; + +/** Import an event fixture by name in tests. */ +export async function eventFixture(name: string): Promise { + const result = await import(`~/fixtures/events/${name}.json`, { with: { type: 'json' } }); + return structuredClone(result.default); +} + +/** Create a database for testing. It uses `DATABASE_URL`, or creates an in-memory database by default. */ +export async function createTestDB(opts?: { pure?: boolean }) { + const db = new DittoPolyPg(Conf.databaseUrl, { poolSize: 1 }); + await db.migrate(); + + const store = new DittoPgStore({ + db, + timeout: Conf.db.timeouts.default, + pubkey: await Conf.signer.getPublicKey(), + pure: opts?.pure ?? false, + notify: true, + }); + + return { + db, + ...db, + store, + kysely: db.kysely, + [Symbol.asyncDispose]: async () => { + const { rows } = await sql< + { tablename: string } + >`select tablename from pg_tables where schemaname = current_schema()`.execute(db.kysely); + + for (const { tablename } of rows) { + if (tablename.startsWith('kysely_')) continue; + await sql`truncate table ${sql.ref(tablename)} cascade`.execute(db.kysely); + } + + await db[Symbol.asyncDispose](); + }, + }; +} + +export function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} diff --git a/src/trends.test.ts b/packages/ditto/trends.test.ts similarity index 77% rename from src/trends.test.ts rename to packages/ditto/trends.test.ts index 66cae23b..a99b4eb4 100644 --- a/src/trends.test.ts +++ b/packages/ditto/trends.test.ts @@ -1,8 +1,9 @@ import { assertEquals } from '@std/assert'; +import { genEvent } from '@nostrify/nostrify/test'; import { generateSecretKey, NostrEvent } from 'nostr-tools'; import { getTrendingTagValues } from '@/trends.ts'; -import { createTestDB, genEvent } from '@/test.ts'; +import { createTestDB } from '@/test.ts'; Deno.test("getTrendingTagValues(): 'e' tag and WITHOUT language parameter", async () => { await using db = await createTestDB(); @@ -16,9 +17,11 @@ Deno.test("getTrendingTagValues(): 'e' tag and WITHOUT language parameter", asyn const post1uses = numberOfAuthorsWhoLikedPost1 * post1multiplier; for (let i = 0; i < numberOfAuthorsWhoLikedPost1; i++) { const sk = generateSecretKey(); - events.push( - genEvent({ kind: 7, content: '+', tags: Array(post1multiplier).fill([...['e', post1.id]]) }, sk), - ); + for (let j = 0; j < post1multiplier; j++) { + events.push( + genEvent({ kind: 7, content: '+', tags: [['e', post1.id, `${j}`]] }, sk), + ); + } } events.push(post1); @@ -29,9 +32,11 @@ Deno.test("getTrendingTagValues(): 'e' tag and WITHOUT language parameter", asyn const post2uses = numberOfAuthorsWhoLikedPost2 * post2multiplier; for (let i = 0; i < numberOfAuthorsWhoLikedPost2; i++) { const sk = generateSecretKey(); - events.push( - genEvent({ kind: 7, content: '+', tags: Array(post2multiplier).fill([...['e', post2.id]]) }, sk), - ); + for (let j = 0; j < post2multiplier; j++) { + events.push( + genEvent({ kind: 7, content: '+', tags: [['e', post2.id, `${j}`]] }, sk), + ); + } } events.push(post2); @@ -62,9 +67,11 @@ Deno.test("getTrendingTagValues(): 'e' tag and WITH language parameter", async ( const post1uses = numberOfAuthorsWhoLikedPost1 * post1multiplier; for (let i = 0; i < numberOfAuthorsWhoLikedPost1; i++) { const sk = generateSecretKey(); - events.push( - genEvent({ kind: 7, content: '+', tags: Array(post1multiplier).fill([...['e', post1.id]]) }, sk), - ); + for (let j = 0; j < post1multiplier; j++) { + events.push( + genEvent({ kind: 7, content: '+', tags: [['e', post1.id, `${j}`]] }, sk), + ); + } } events.push(post1); @@ -74,9 +81,11 @@ Deno.test("getTrendingTagValues(): 'e' tag and WITH language parameter", async ( const post2multiplier = 1; for (let i = 0; i < numberOfAuthorsWhoLikedPost2; i++) { const sk = generateSecretKey(); - events.push( - genEvent({ kind: 7, content: '+', tags: Array(post2multiplier).fill([...['e', post2.id]]) }, sk), - ); + for (let j = 0; j < post2multiplier; j++) { + events.push( + genEvent({ kind: 7, content: '+', tags: [['e', post2.id, `${j}`]] }, sk), + ); + } } events.push(post2); @@ -85,12 +94,12 @@ Deno.test("getTrendingTagValues(): 'e' tag and WITH language parameter", async ( } await db.kysely.updateTable('nostr_events') - .set('language', 'pt') + .set('search_ext', { language: 'pt' }) .where('id', '=', post1.id) .execute(); await db.kysely.updateTable('nostr_events') - .set('language', 'en') + .set('search_ext', { language: 'en' }) .where('id', '=', post2.id) .execute(); diff --git a/src/trends.ts b/packages/ditto/trends.ts similarity index 62% rename from src/trends.ts rename to packages/ditto/trends.ts index aced6800..47afdb9a 100644 --- a/src/trends.ts +++ b/packages/ditto/trends.ts @@ -1,16 +1,12 @@ -import { NostrFilter } from '@nostrify/nostrify'; -import { Stickynotes } from '@soapbox/stickynotes'; +import { DittoConf } from '@ditto/conf'; +import { DittoDB, DittoTables } from '@ditto/db'; +import { NostrFilter, NStore } from '@nostrify/nostrify'; +import { logi } from '@soapbox/logi'; import { Kysely, sql } from 'kysely'; -import { Conf } from '@/config.ts'; -import { DittoTables } from '@/db/DittoTables.ts'; -import { handleEvent } from '@/pipeline.ts'; -import { AdminSigner } from '@/signers/AdminSigner.ts'; -import { Storages } from '@/storages.ts'; +import { errorJson } from '@/utils/log.ts'; import { Time } from '@/utils/time.ts'; -const console = new Stickynotes('ditto:trends'); - /** Get trending tag values for a given tag in the given time frame. */ export async function getTrendingTagValues( /** Kysely instance to execute queries on. */ @@ -65,8 +61,15 @@ export async function getTrendingTagValues( })); } +export interface TrendsCtx { + conf: DittoConf; + db: DittoDB; + relay: NStore; +} + /** Get trending tags and publish an event with them. */ export async function updateTrendingTags( + ctx: TrendsCtx, l: string, tagName: string, kinds: number[], @@ -75,8 +78,11 @@ export async function updateTrendingTags( aliases?: string[], values?: string[], ) { - console.info(`Updating trending ${l}...`); - const kysely = await Storages.kysely(); + const { conf, db, relay } = ctx; + const params = { l, tagName, kinds, limit, extra, aliases, values }; + + logi({ level: 'info', ns: 'ditto.trends', msg: 'Updating trending', ...params }); + const signal = AbortSignal.timeout(1000); const yesterday = Math.floor((Date.now() - Time.days(1)) / 1000); @@ -85,20 +91,21 @@ export async function updateTrendingTags( const tagNames = aliases ? [tagName, ...aliases] : [tagName]; try { - const trends = await getTrendingTagValues(kysely, tagNames, { + const trends = await getTrendingTagValues(db.kysely, tagNames, { kinds, since: yesterday, until: now, limit, }, values); - console.log(trends); - if (!trends.length) { - console.info(`No trending ${l} found. Skipping.`); + if (trends.length) { + logi({ level: 'info', ns: 'ditto.trends', msg: 'Trends found', trends, ...params }); + } else { + logi({ level: 'info', ns: 'ditto.trends', msg: 'No trends found. Skipping.', ...params }); return; } - const signer = new AdminSigner(); + const signer = conf.signer; const label = await signer.signEvent({ kind: 1985, @@ -111,57 +118,59 @@ export async function updateTrendingTags( created_at: Math.floor(Date.now() / 1000), }); - await handleEvent(label, signal); - console.info(`Trending ${l} updated.`); + await relay.event(label, { signal }); + logi({ level: 'info', ns: 'ditto.trends', msg: 'Trends updated', ...params }); } catch (e) { - console.error(`Error updating trending ${l}: ${e instanceof Error ? e.message : e}`); + logi({ level: 'error', ns: 'ditto.trends', msg: 'Error updating trends', ...params, error: errorJson(e) }); } } /** Update trending pubkeys. */ -export function updateTrendingPubkeys(): Promise { - return updateTrendingTags('#p', 'p', [1, 3, 6, 7, 9735], 40, Conf.relay); +export function updateTrendingPubkeys(ctx: TrendsCtx): Promise { + return updateTrendingTags(ctx, '#p', 'p', [1, 3, 6, 7, 9735], 40, ctx.conf.relay); } /** Update trending zapped events. */ -export function updateTrendingZappedEvents(): Promise { - return updateTrendingTags('zapped', 'e', [9735], 40, Conf.relay, ['q']); +export function updateTrendingZappedEvents(ctx: TrendsCtx): Promise { + return updateTrendingTags(ctx, 'zapped', 'e', [9735], 40, ctx.conf.relay, ['q']); } /** Update trending events. */ -export async function updateTrendingEvents(): Promise { +export async function updateTrendingEvents(ctx: TrendsCtx): Promise { + const { conf, db } = ctx; + const results: Promise[] = [ - updateTrendingTags('#e', 'e', [1, 6, 7, 9735], 40, Conf.relay, ['q']), + updateTrendingTags(ctx, '#e', 'e', [1, 6, 7, 9735], 40, ctx.conf.relay, ['q']), ]; - const kysely = await Storages.kysely(); - - for (const language of Conf.preferredLanguages ?? []) { + for (const language of conf.preferredLanguages ?? []) { const yesterday = Math.floor((Date.now() - Time.days(1)) / 1000); const now = Math.floor(Date.now() / 1000); - const rows = await kysely + const rows = await db.kysely .selectFrom('nostr_events') .select('nostr_events.id') - .where('nostr_events.language', '=', language) + .where(sql`nostr_events.search_ext->>'language'`, '=', language) .where('nostr_events.created_at', '>=', yesterday) .where('nostr_events.created_at', '<=', now) .execute(); const ids = rows.map((row) => row.id); - results.push(updateTrendingTags(`#e.${language}`, 'e', [1, 6, 7, 9735], 40, Conf.relay, ['q'], ids)); + results.push( + updateTrendingTags(ctx, `#e.${language}`, 'e', [1, 6, 7, 9735], 40, conf.relay, ['q'], ids), + ); } await Promise.allSettled(results); } /** Update trending hashtags. */ -export function updateTrendingHashtags(): Promise { - return updateTrendingTags('#t', 't', [1], 20); +export function updateTrendingHashtags(ctx: TrendsCtx): Promise { + return updateTrendingTags(ctx, '#t', 't', [1], 20); } /** Update trending links. */ -export function updateTrendingLinks(): Promise { - return updateTrendingTags('#r', 'r', [1], 20); +export function updateTrendingLinks(ctx: TrendsCtx): Promise { + return updateTrendingTags(ctx, '#r', 'r', [1], 20); } diff --git a/src/types/MastodonPush.ts b/packages/ditto/types/MastodonPush.ts similarity index 100% rename from src/types/MastodonPush.ts rename to packages/ditto/types/MastodonPush.ts diff --git a/src/types/webmanifest.ts b/packages/ditto/types/webmanifest.ts similarity index 100% rename from src/types/webmanifest.ts rename to packages/ditto/types/webmanifest.ts diff --git a/src/utils.ts b/packages/ditto/utils.ts similarity index 100% rename from src/utils.ts rename to packages/ditto/utils.ts diff --git a/src/utils/PleromaConfigDB.test.ts b/packages/ditto/utils/PleromaConfigDB.test.ts similarity index 100% rename from src/utils/PleromaConfigDB.test.ts rename to packages/ditto/utils/PleromaConfigDB.test.ts diff --git a/src/utils/PleromaConfigDB.ts b/packages/ditto/utils/PleromaConfigDB.ts similarity index 100% rename from src/utils/PleromaConfigDB.ts rename to packages/ditto/utils/PleromaConfigDB.ts diff --git a/src/utils/SimpleLRU.test.ts b/packages/ditto/utils/SimpleLRU.test.ts similarity index 93% rename from src/utils/SimpleLRU.test.ts rename to packages/ditto/utils/SimpleLRU.test.ts index a73e4f36..03fbfe8a 100644 --- a/src/utils/SimpleLRU.test.ts +++ b/packages/ditto/utils/SimpleLRU.test.ts @@ -4,7 +4,7 @@ import { assertEquals, assertRejects } from '@std/assert'; Deno.test("SimpleLRU doesn't repeat failed calls", async () => { let calls = 0; - const cache = new SimpleLRU( + using cache = new SimpleLRU( // deno-lint-ignore require-await async () => { calls++; diff --git a/packages/ditto/utils/SimpleLRU.ts b/packages/ditto/utils/SimpleLRU.ts new file mode 100644 index 00000000..4d8780b7 --- /dev/null +++ b/packages/ditto/utils/SimpleLRU.ts @@ -0,0 +1,57 @@ +// deno-lint-ignore-file ban-types + +import { LRUCache } from 'lru-cache'; +import { type Gauge } from 'prom-client'; + +type FetchFn = (key: K, opts: { signal?: AbortSignal }) => Promise; + +type SimpleLRUOpts = LRUCache.Options & { + gauge?: Gauge; + errorRefresh?: number; +}; + +export class SimpleLRU< + K extends {}, + V extends {}, +> { + protected cache: LRUCache, void>; + private tids = new Set(); + + constructor(private fetchFn: FetchFn, private opts: SimpleLRUOpts>) { + this.cache = new LRUCache({ ...opts }); + } + + async fetch(key: K, opts?: { signal?: AbortSignal }): Promise { + if (opts?.signal?.aborted) { + throw new DOMException('The signal has been aborted', 'AbortError'); + } + + const cached = await this.cache.get(key); + + if (cached) { + return cached; + } + + const promise = this.fetchFn(key, { signal: opts?.signal }); + + this.cache.set(key, promise); + + promise.then(() => { + this.opts.gauge?.set(this.cache.size); + }).catch(() => { + const tid = setTimeout(() => { + this.cache.delete(key); + this.tids.delete(tid); + }, this.opts.errorRefresh ?? 10_000); + this.tids.add(tid); + }); + + return promise; + } + + [Symbol.dispose](): void { + for (const tid of this.tids) { + clearTimeout(tid); + } + } +} diff --git a/src/utils/abort.ts b/packages/ditto/utils/abort.ts similarity index 100% rename from src/utils/abort.ts rename to packages/ditto/utils/abort.ts diff --git a/src/utils/aes.bench.ts b/packages/ditto/utils/aes.bench.ts similarity index 100% rename from src/utils/aes.bench.ts rename to packages/ditto/utils/aes.bench.ts diff --git a/src/utils/aes.test.ts b/packages/ditto/utils/aes.test.ts similarity index 100% rename from src/utils/aes.test.ts rename to packages/ditto/utils/aes.test.ts diff --git a/src/utils/aes.ts b/packages/ditto/utils/aes.ts similarity index 100% rename from src/utils/aes.ts rename to packages/ditto/utils/aes.ts diff --git a/packages/ditto/utils/api.ts b/packages/ditto/utils/api.ts new file mode 100644 index 00000000..b5d4fc3b --- /dev/null +++ b/packages/ditto/utils/api.ts @@ -0,0 +1,201 @@ +import { User } from '@ditto/mastoapi/middleware'; +import { DittoEnv } from '@ditto/mastoapi/router'; +import { HTTPException } from '@hono/hono/http-exception'; +import { NostrEvent, NostrFilter } from '@nostrify/nostrify'; +import { EventTemplate } from 'nostr-tools'; +import * as TypeFest from 'type-fest'; + +import { type AppContext } from '@/app.ts'; +import { nostrNow } from '@/utils.ts'; +import { parseFormData } from '@/utils/formdata.ts'; +import { Context } from '@hono/hono'; + +/** EventTemplate with defaults. */ +type EventStub = TypeFest.SetOptional; + +/** Publish an event through the pipeline. */ +async function createEvent( + t: EventStub, + c: Context, +): Promise { + const { user, relay, signal } = c.var; + + if (!user) { + throw new HTTPException(401, { + res: c.json({ error: 'No way to sign Nostr event' }, 401), + }); + } + + const event = await user.signer.signEvent({ + content: '', + created_at: nostrNow(), + tags: [], + ...t, + }); + + await relay.event(event, { signal, publish: true }); + return event; +} + +/** Filter for fetching an existing event to update. */ +interface UpdateEventFilter extends NostrFilter { + kinds: [number]; + limit: 1; +} + +/** Update a replaceable event, or throw if no event exists yet. */ +async function updateEvent( + filter: UpdateEventFilter, + fn: (prev: NostrEvent) => E | Promise, + c: AppContext, +): Promise { + const { relay } = c.var; + + const [prev] = await relay.query( + [filter], + { signal: c.req.raw.signal }, + ); + + if (prev) { + return createEvent(await fn(prev), c); + } else { + throw new HTTPException(422, { + message: 'No event to update', + }); + } +} + +/** Update a replaceable list event, or throw if no event exists yet. */ +function updateListEvent( + filter: UpdateEventFilter, + fn: (tags: string[][]) => string[][], + c: AppContext, +): Promise { + return updateEvent(filter, ({ content, tags }) => ({ + kind: filter.kinds[0], + content, + tags: fn(tags), + }), c); +} + +/** Publish an admin event through the pipeline. */ +async function createAdminEvent(t: EventStub, c: AppContext): Promise { + const { conf, relay, signal } = c.var; + + const event = await conf.signer.signEvent({ + content: '', + created_at: nostrNow(), + tags: [], + ...t, + }); + + // @ts-ignore `publish` is important for `DittoAPIStore`. + await relay.event(event, { signal, publish: true }); + return event; +} + +/** Fetch existing event, update its tags, then publish the new admin event. */ +function updateListAdminEvent( + filter: UpdateEventFilter, + fn: (tags: string[][]) => string[][], + c: AppContext, +): Promise { + return updateAdminEvent(filter, (prev) => ({ + kind: filter.kinds[0], + content: prev?.content ?? '', + tags: fn(prev?.tags ?? []), + }), c); +} + +/** Fetch existing event, update it, then publish the new admin event. */ +async function updateAdminEvent( + filter: UpdateEventFilter, + fn: (prev: NostrEvent | undefined) => E, + c: AppContext, +): Promise { + const { relay, signal } = c.var; + const [prev] = await relay.query([filter], { signal }); + return createAdminEvent(fn(prev), c); +} + +function updateUser(pubkey: string, n: Record, c: AppContext): Promise { + return updateNames(30382, pubkey, n, c); +} + +function updateEventInfo(id: string, n: Record, c: AppContext): Promise { + return updateNames(30383, id, n, c); +} + +async function updateNames(k: number, d: string, n: Record, c: AppContext): Promise { + const { conf } = c.var; + const admin = await conf.signer.getPublicKey(); + + return updateAdminEvent( + { kinds: [k], authors: [admin], '#d': [d], limit: 1 }, + (prev) => { + const prevNames = prev?.tags.reduce((acc, [name, value]) => { + if (name === 'n') acc[value] = true; + return acc; + }, {} as Record); + + const names = { ...prevNames, ...n }; + const nTags = Object.entries(names).filter(([, value]) => value).map(([name]) => ['n', name]); + const other = prev?.tags.filter(([name]) => !['d', 'n'].includes(name)) ?? []; + + return { + kind: k, + content: prev?.content ?? '', + tags: [ + ['d', d], + ...nTags, + ...other, + ], + }; + }, + c, + ); +} + +/** Parse request body to JSON, depending on the content-type of the request. */ +async function parseBody(req: Request): Promise { + switch (req.headers.get('content-type')?.split(';')[0]) { + case 'multipart/form-data': + case 'application/x-www-form-urlencoded': + try { + return parseFormData(await req.formData()); + } catch { + throw new HTTPException(400, { message: 'Invalid form data' }); + } + case 'application/json': + return req.json(); + } +} + +type HeaderRecord = Record; + +/** Actors with Bluesky's `!no-unauthenticated` self-label should require authorization to view. */ +function assertAuthenticated(c: AppContext, author: NostrEvent): void { + if ( + !c.var.user && author.tags.some(([name, value, ns]) => + name === 'l' && + value === '!no-unauthenticated' && + ns === 'com.atproto.label.defs#selfLabel' + ) + ) { + throw new HTTPException(401, { message: 'Sign-in required.' }); + } +} + +export { + assertAuthenticated, + createAdminEvent, + createEvent, + type EventStub, + parseBody, + updateAdminEvent, + updateEvent, + updateEventInfo, + updateListAdminEvent, + updateListEvent, + updateUser, +}; diff --git a/src/utils/auth.bench.ts b/packages/ditto/utils/auth.bench.ts similarity index 100% rename from src/utils/auth.bench.ts rename to packages/ditto/utils/auth.bench.ts diff --git a/src/utils/auth.test.ts b/packages/ditto/utils/auth.test.ts similarity index 100% rename from src/utils/auth.test.ts rename to packages/ditto/utils/auth.test.ts diff --git a/src/utils/auth.ts b/packages/ditto/utils/auth.ts similarity index 100% rename from src/utils/auth.ts rename to packages/ditto/utils/auth.ts diff --git a/src/utils/bolt11.test.ts b/packages/ditto/utils/bolt11.test.ts similarity index 100% rename from src/utils/bolt11.test.ts rename to packages/ditto/utils/bolt11.test.ts diff --git a/src/utils/bolt11.ts b/packages/ditto/utils/bolt11.ts similarity index 100% rename from src/utils/bolt11.ts rename to packages/ditto/utils/bolt11.ts diff --git a/packages/ditto/utils/favicon.ts b/packages/ditto/utils/favicon.ts new file mode 100644 index 00000000..448dfe0d --- /dev/null +++ b/packages/ditto/utils/favicon.ts @@ -0,0 +1,79 @@ +import { DOMParser } from '@b-fuze/deno-dom'; +import { DittoTables } from '@ditto/db'; +import { logi } from '@soapbox/logi'; +import { safeFetch } from '@soapbox/safe-fetch'; +import { Kysely } from 'kysely'; +import tldts from 'tldts'; + +import { nostrNow } from '@/utils.ts'; + +export async function queryFavicon( + kysely: Kysely, + domain: string, +): Promise { + return await kysely + .selectFrom('domain_favicons') + .selectAll() + .where('domain', '=', domain) + .executeTakeFirst(); +} + +export async function insertFavicon(kysely: Kysely, domain: string, favicon: string): Promise { + await kysely + .insertInto('domain_favicons') + .values({ domain, favicon, last_updated_at: nostrNow() }) + .onConflict((oc) => oc.column('domain').doUpdateSet({ favicon, last_updated_at: nostrNow() })) + .execute(); +} + +export async function fetchFavicon(domain: string, signal?: AbortSignal): Promise { + logi({ level: 'info', ns: 'ditto.favicon', domain, state: 'started' }); + const tld = tldts.parse(domain); + + if (!tld.isIcann || tld.isIp || tld.isPrivate) { + throw new Error(`Invalid favicon domain: ${domain}`); + } + + const rootUrl = new URL('/', `https://${domain}/`); + const response = await safeFetch(rootUrl, { signal }); + const html = await response.text(); + + const doc = new DOMParser().parseFromString(html, 'text/html'); + const link = doc.querySelector('link[rel="icon"], link[rel="shortcut icon"]'); + + if (link) { + const href = link.getAttribute('href'); + if (href) { + let url: URL | undefined; + + try { + url = new URL(href); + } catch { + try { + url = new URL(href, rootUrl); + } catch { + // fall through + } + } + + if (url) { + logi({ level: 'info', ns: 'ditto.favicon', domain, state: 'found', url }); + return url; + } + } + } + + // Fallback to checking `/favicon.ico` of the domain. + const url = new URL('/favicon.ico', `https://${domain}/`); + const fallback = await safeFetch(url, { method: 'HEAD', signal }); + const contentType = fallback.headers.get('content-type'); + + if (fallback.ok && ['image/vnd.microsoft.icon', 'image/x-icon'].includes(contentType!)) { + logi({ level: 'info', ns: 'ditto.favicon', domain, state: 'found', url }); + return url; + } + + logi({ level: 'info', ns: 'ditto.favicon', domain, state: 'failed' }); + + throw new Error(`Favicon not found: ${domain}`); +} diff --git a/src/utils/formdata.test.ts b/packages/ditto/utils/formdata.test.ts similarity index 100% rename from src/utils/formdata.test.ts rename to packages/ditto/utils/formdata.test.ts diff --git a/src/utils/formdata.ts b/packages/ditto/utils/formdata.ts similarity index 94% rename from src/utils/formdata.ts rename to packages/ditto/utils/formdata.ts index 6d5d997b..47fffa04 100644 --- a/src/utils/formdata.ts +++ b/packages/ditto/utils/formdata.ts @@ -16,10 +16,12 @@ export function parseFormData(formData: FormData): unknown { /** Deeply sets a value in an object based on a Rails-style nested key. */ function deepSet( /** The target object to modify. */ + // deno-lint-ignore no-explicit-any target: Record, /** The Rails-style key (e.g., "fields_attributes[0][name]"). */ key: string, /** The value to set. */ + // deno-lint-ignore no-explicit-any value: any, ): void { const keys = key.match(/[^[\]]+/g); // Extract keys like ["fields_attributes", "0", "name"] diff --git a/src/utils/html.ts b/packages/ditto/utils/html.ts similarity index 100% rename from src/utils/html.ts rename to packages/ditto/utils/html.ts diff --git a/src/utils/instance.ts b/packages/ditto/utils/instance.ts similarity index 94% rename from src/utils/instance.ts rename to packages/ditto/utils/instance.ts index c0b9c0d4..3f746e07 100644 --- a/src/utils/instance.ts +++ b/packages/ditto/utils/instance.ts @@ -18,7 +18,7 @@ export interface InstanceMetadata extends NostrMetadata { /** Get and parse instance metadata from the kind 0 of the admin user. */ export async function getInstanceMetadata(store: NStore, signal?: AbortSignal): Promise { const [event] = await store.query( - [{ kinds: [0], authors: [Conf.pubkey], limit: 1 }], + [{ kinds: [0], authors: [await Conf.signer.getPublicKey()], limit: 1 }], { signal }, ); diff --git a/src/utils/lnurl.ts b/packages/ditto/utils/lnurl.ts similarity index 70% rename from src/utils/lnurl.ts rename to packages/ditto/utils/lnurl.ts index 1dd99769..ad2fefa6 100644 --- a/src/utils/lnurl.ts +++ b/packages/ditto/utils/lnurl.ts @@ -1,23 +1,23 @@ -import { LNURL, LNURLDetails } from '@nostrify/nostrify/ln'; -import { Stickynotes } from '@soapbox/stickynotes'; - -import { cachedLnurlsSizeGauge } from '@/metrics.ts'; -import { SimpleLRU } from '@/utils/SimpleLRU.ts'; -import { Time } from '@/utils/time.ts'; -import { fetchWorker } from '@/workers/fetch.ts'; +import { cachedLnurlsSizeGauge } from '@ditto/metrics'; import { NostrEvent } from '@nostrify/nostrify'; +import { LNURL, LNURLDetails } from '@nostrify/nostrify/ln'; +import { logi } from '@soapbox/logi'; +import { safeFetch } from '@soapbox/safe-fetch'; +import { JsonValue } from '@std/json'; -const console = new Stickynotes('ditto:lnurl'); +import { SimpleLRU } from '@/utils/SimpleLRU.ts'; +import { errorJson } from '@/utils/log.ts'; +import { Time } from '@/utils/time.ts'; const lnurlCache = new SimpleLRU( async (lnurl, { signal }) => { - console.debug(`Lookup ${lnurl}`); + logi({ level: 'info', ns: 'ditto.lnurl', lnurl, state: 'started' }); try { - const result = await LNURL.lookup(lnurl, { fetch: fetchWorker, signal }); - console.debug(`Found: ${lnurl}`); - return result; + const details = await LNURL.lookup(lnurl, { fetch: safeFetch, signal }); + logi({ level: 'info', ns: 'ditto.lnurl', lnurl, state: 'found', details: details as unknown as JsonValue }); + return details; } catch (e) { - console.debug(`Not found: ${lnurl}`); + logi({ level: 'info', ns: 'ditto.lnurl', lnurl, state: 'failed', error: errorJson(e) }); throw e; } }, @@ -62,7 +62,7 @@ async function getInvoice(params: CallbackParams, signal?: AbortSignal): Promise const { pr } = await LNURL.callback( details.callback, params, - { fetch: fetchWorker, signal }, + { fetch: safeFetch, signal }, ); return pr; diff --git a/packages/ditto/utils/log.ts b/packages/ditto/utils/log.ts new file mode 100644 index 00000000..28fcbf0d --- /dev/null +++ b/packages/ditto/utils/log.ts @@ -0,0 +1,8 @@ +/** Serialize an error into JSON for JSON logging. */ +export function errorJson(error: unknown): Error | null { + if (error instanceof Error) { + return error; + } else { + return null; + } +} diff --git a/src/utils/lookup.test.ts b/packages/ditto/utils/lookup.test.ts similarity index 100% rename from src/utils/lookup.test.ts rename to packages/ditto/utils/lookup.test.ts diff --git a/src/utils/lookup.ts b/packages/ditto/utils/lookup.ts similarity index 80% rename from src/utils/lookup.ts rename to packages/ditto/utils/lookup.ts index 48c6ba81..e0f10a0e 100644 --- a/src/utils/lookup.ts +++ b/packages/ditto/utils/lookup.ts @@ -1,38 +1,44 @@ -import { NostrEvent, NSchema as n } from '@nostrify/nostrify'; +import { NostrEvent, NSchema as n, NStore } from '@nostrify/nostrify'; import { nip19 } from 'nostr-tools'; import { match } from 'path-to-regexp'; import tldts from 'tldts'; import { getAuthor } from '@/queries.ts'; import { bech32ToPubkey } from '@/utils.ts'; -import { nip05Cache } from '@/utils/nip05.ts'; -import { Stickynotes } from '@soapbox/stickynotes'; +import { lookupNip05 } from '@/utils/nip05.ts'; + +import type { DittoConf } from '@ditto/conf'; +import type { DittoDB } from '@ditto/db'; + +interface LookupAccountOpts { + db: DittoDB; + conf: DittoConf; + relay: NStore; + signal?: AbortSignal; +} /** Resolve a bech32 or NIP-05 identifier to an account. */ export async function lookupAccount( value: string, - signal = AbortSignal.timeout(3000), + opts: LookupAccountOpts, ): Promise { - const pubkey = await lookupPubkey(value, signal); + const pubkey = await lookupPubkey(value, opts); if (pubkey) { - return getAuthor(pubkey); + return getAuthor(pubkey, opts); } } /** Resolve a bech32 or NIP-05 identifier to a pubkey. */ -export async function lookupPubkey(value: string, signal?: AbortSignal): Promise { - const console = new Stickynotes('ditto:lookup'); - +export async function lookupPubkey(value: string, opts: LookupAccountOpts): Promise { if (n.bech32().safeParse(value).success) { return bech32ToPubkey(value); } try { - const { pubkey } = await nip05Cache.fetch(value, { signal }); + const { pubkey } = await lookupNip05(value, opts); return pubkey; - } catch (e) { - console.debug(e); + } catch { return; } } diff --git a/src/utils/media.test.ts b/packages/ditto/utils/media.test.ts similarity index 100% rename from src/utils/media.test.ts rename to packages/ditto/utils/media.test.ts diff --git a/src/utils/media.ts b/packages/ditto/utils/media.ts similarity index 100% rename from src/utils/media.ts rename to packages/ditto/utils/media.ts diff --git a/packages/ditto/utils/nip05.ts b/packages/ditto/utils/nip05.ts new file mode 100644 index 00000000..83ddc863 --- /dev/null +++ b/packages/ditto/utils/nip05.ts @@ -0,0 +1,69 @@ +import { DittoConf } from '@ditto/conf'; +import { NIP05, NStore } from '@nostrify/nostrify'; +import { logi } from '@soapbox/logi'; +import { safeFetch } from '@soapbox/safe-fetch'; +import { nip19 } from 'nostr-tools'; +import tldts from 'tldts'; + +import { errorJson } from '@/utils/log.ts'; + +interface GetNip05Opts { + conf: DittoConf; + relay: NStore; + signal?: AbortSignal; + fetch?: typeof fetch; +} + +export async function lookupNip05(nip05: string, opts: GetNip05Opts): Promise { + const { conf, signal } = opts; + const tld = tldts.parse(nip05); + + if (!tld.isIcann || tld.isIp || tld.isPrivate) { + throw new Error(`Invalid NIP-05: ${nip05}`); + } + + logi({ level: 'info', ns: 'ditto.nip05', nip05, state: 'started' }); + + const [name, domain] = nip05.split('@'); + + try { + if (domain === conf.url.host) { + const pointer = await localNip05Lookup(name, opts); + if (pointer) { + logi({ level: 'info', ns: 'ditto.nip05', nip05, state: 'found', source: 'local', pubkey: pointer.pubkey }); + return pointer; + } else { + throw new Error(`Not found: ${nip05}`); + } + } else { + const pointer = await NIP05.lookup(nip05, { fetch: opts.fetch ?? safeFetch, signal }); + logi({ level: 'info', ns: 'ditto.nip05', nip05, state: 'found', source: 'fetch', pubkey: pointer.pubkey }); + return pointer; + } + } catch (e) { + logi({ level: 'info', ns: 'ditto.nip05', nip05, state: 'failed', error: errorJson(e) }); + throw e; + } +} + +export async function localNip05Lookup( + localpart: string, + opts: GetNip05Opts, +): Promise { + const { conf, relay, signal } = opts; + + const name = `${localpart}@${conf.url.host}`; + + const [grant] = await relay.query([{ + kinds: [30360], + '#d': [name, name.toLowerCase()], + authors: [await conf.signer.getPublicKey()], + limit: 1, + }], { signal }); + + const pubkey = grant?.tags.find(([name]) => name === 'p')?.[1]; + + if (pubkey) { + return { pubkey, relays: [conf.relay] }; + } +} diff --git a/src/utils/note.test.ts b/packages/ditto/utils/note.test.ts similarity index 100% rename from src/utils/note.test.ts rename to packages/ditto/utils/note.test.ts diff --git a/src/utils/note.ts b/packages/ditto/utils/note.ts similarity index 96% rename from src/utils/note.ts rename to packages/ditto/utils/note.ts index bae371ff..45fcf94a 100644 --- a/src/utils/note.ts +++ b/packages/ditto/utils/note.ts @@ -22,7 +22,7 @@ interface ParsedNoteContent { /** Convert Nostr content to Mastodon API HTML. Also return parsed data. */ function parseNoteContent(content: string, mentions: MastodonMention[]): ParsedNoteContent { - const links = linkify.find(content).filter(isLinkURL); + const links = linkify.find(content).filter(({ type }) => type === 'url'); const firstUrl = links.find(isNonMediaLink)?.href; const result = linkifyStr(content, { @@ -123,11 +123,6 @@ function isNonMediaLink({ href }: Link): boolean { return /^https?:\/\//.test(href) && !getUrlMediaType(href); } -/** Ensures the Link is a URL so it can be parsed. */ -function isLinkURL(link: Link): boolean { - return link.type === 'url'; -} - /** Get pubkey from decoded bech32 entity, or undefined if not applicable. */ function getDecodedPubkey(decoded: nip19.DecodeResult): string | undefined { switch (decoded.type) { diff --git a/src/utils/og-metadata.ts b/packages/ditto/utils/og-metadata.ts similarity index 100% rename from src/utils/og-metadata.ts rename to packages/ditto/utils/og-metadata.ts diff --git a/src/utils/pleroma.ts b/packages/ditto/utils/pleroma.ts similarity index 80% rename from src/utils/pleroma.ts rename to packages/ditto/utils/pleroma.ts index 05c35b7c..db3ca6a1 100644 --- a/src/utils/pleroma.ts +++ b/packages/ditto/utils/pleroma.ts @@ -2,11 +2,11 @@ import { NSchema as n, NStore } from '@nostrify/nostrify'; import { Conf } from '@/config.ts'; import { configSchema } from '@/schemas/pleroma-api.ts'; -import { AdminSigner } from '@/signers/AdminSigner.ts'; import { PleromaConfigDB } from '@/utils/PleromaConfigDB.ts'; export async function getPleromaConfigs(store: NStore, signal?: AbortSignal): Promise { - const { pubkey } = Conf; + const signer = Conf.signer; + const pubkey = await signer.getPublicKey(); const [event] = await store.query([{ kinds: [30078], @@ -20,7 +20,7 @@ export async function getPleromaConfigs(store: NStore, signal?: AbortSignal): Pr } try { - const decrypted = await new AdminSigner().nip44.decrypt(Conf.pubkey, event.content); + const decrypted = await signer.nip44.decrypt(pubkey, event.content); const configs = n.json().pipe(configSchema.array()).catch([]).parse(decrypted); return new PleromaConfigDB(configs); } catch (_e) { diff --git a/src/utils/purify.ts b/packages/ditto/utils/purify.ts similarity index 100% rename from src/utils/purify.ts rename to packages/ditto/utils/purify.ts diff --git a/packages/ditto/utils/search.test.ts b/packages/ditto/utils/search.test.ts new file mode 100644 index 00000000..d3c92011 --- /dev/null +++ b/packages/ditto/utils/search.test.ts @@ -0,0 +1,55 @@ +import { assertEquals } from '@std/assert'; +import { sql } from 'kysely'; + +import { createTestDB } from '@/test.ts'; +import { getPubkeysBySearch } from '@/utils/search.ts'; + +Deno.test('fuzzy search works', async () => { + await using db = await createTestDB(); + + await db.kysely.insertInto('author_stats').values({ + pubkey: '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4', + search: 'patrickReiis patrickdosreis.com', + notes_count: 0, + followers_count: 0, + following_count: 0, + }).execute(); + + await sql`REFRESH MATERIALIZED VIEW top_authors`.execute(db.kysely); + + assertEquals( + await getPubkeysBySearch(db.kysely, { q: 'pat rick', limit: 1, offset: 0, following: new Set() }), + new Set(), + ); + assertEquals( + await getPubkeysBySearch(db.kysely, { q: 'patrick dosreis', limit: 1, offset: 0, following: new Set() }), + new Set([ + '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4', + ]), + ); + assertEquals( + await getPubkeysBySearch(db.kysely, { q: 'dosreis.com', limit: 1, offset: 0, following: new Set() }), + new Set([ + '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4', + ]), + ); +}); + +Deno.test('fuzzy search works with offset', async () => { + await using db = await createTestDB(); + + await db.kysely.insertInto('author_stats').values({ + pubkey: '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4', + search: 'abdcef patrickReiis patrickdosreis.com', + notes_count: 0, + followers_count: 0, + following_count: 0, + }).execute(); + + await sql`REFRESH MATERIALIZED VIEW top_authors`.execute(db.kysely); + + assertEquals( + await getPubkeysBySearch(db.kysely, { q: 'dosreis.com', limit: 1, offset: 1, following: new Set() }), + new Set(), + ); +}); diff --git a/packages/ditto/utils/search.ts b/packages/ditto/utils/search.ts new file mode 100644 index 00000000..7c6584c6 --- /dev/null +++ b/packages/ditto/utils/search.ts @@ -0,0 +1,37 @@ +import { DittoTables } from '@ditto/db'; +import { Kysely, sql } from 'kysely'; + +/** Get pubkeys whose name and NIP-05 is similar to 'q' */ +export async function getPubkeysBySearch( + kysely: Kysely, + opts: { q: string; limit: number; offset: number; following: Set }, +): Promise> { + const { q, limit, following, offset } = opts; + + const pubkeys = new Set(); + + const query = kysely + .selectFrom('top_authors') + .select('pubkey') + .where('search', sql`%>`, q) + .limit(limit) + .offset(offset); + + if (following.size) { + const authorsQuery = query.where('pubkey', 'in', [...following]); + + for (const { pubkey } of await authorsQuery.execute()) { + pubkeys.add(pubkey); + } + } + + if (pubkeys.size >= limit) { + return pubkeys; + } + + for (const { pubkey } of await query.limit(limit - pubkeys.size).execute()) { + pubkeys.add(pubkey); + } + + return pubkeys; +} diff --git a/packages/ditto/utils/stats.test.ts b/packages/ditto/utils/stats.test.ts new file mode 100644 index 00000000..043e6f13 --- /dev/null +++ b/packages/ditto/utils/stats.test.ts @@ -0,0 +1,220 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; +import { NPostgres } from '@nostrify/db'; +import { genEvent } from '@nostrify/nostrify/test'; +import { assertEquals } from '@std/assert'; +import { sql } from 'kysely'; +import { generateSecretKey, getPublicKey } from 'nostr-tools'; + +import { countAuthorStats, getAuthorStats, getEventStats, getFollowDiff, updateStats } from '@/utils/stats.ts'; + +Deno.test('updateStats with kind 1 increments notes count', async () => { + await using test = await setupTest(); + + const sk = generateSecretKey(); + const pubkey = getPublicKey(sk); + + await updateStats({ ...test, event: genEvent({ kind: 1 }, sk) }); + + const stats = await getAuthorStats(test.kysely, pubkey); + + assertEquals(stats!.notes_count, 1); +}); + +Deno.test('updateStats with kind 1 increments replies count', async () => { + await using test = await setupTest(); + const { relay, kysely } = test; + + const sk = generateSecretKey(); + + const note = genEvent({ kind: 1 }, sk); + await updateStats({ ...test, event: note }); + await relay.event(note); + + const reply = genEvent({ kind: 1, tags: [['e', note.id]] }, sk); + await updateStats({ ...test, event: reply }); + await relay.event(reply); + + const stats = await getEventStats(kysely, note.id); + + assertEquals(stats!.replies_count, 1); +}); + +Deno.test('updateStats with kind 5 decrements notes count', async () => { + await using test = await setupTest(); + const { relay, kysely } = test; + + const sk = generateSecretKey(); + const pubkey = getPublicKey(sk); + + const create = genEvent({ kind: 1 }, sk); + const remove = genEvent({ kind: 5, tags: [['e', create.id]] }, sk); + + await updateStats({ ...test, event: create }); + assertEquals((await getAuthorStats(kysely, pubkey))!.notes_count, 1); + await relay.event(create); + + await updateStats({ ...test, event: remove }); + assertEquals((await getAuthorStats(kysely, pubkey))!.notes_count, 0); + await relay.event(remove); +}); + +Deno.test('updateStats with kind 3 increments followers count', async () => { + await using test = await setupTest(); + const { kysely } = test; + + await updateStats({ ...test, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) }); + await updateStats({ ...test, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) }); + await updateStats({ ...test, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) }); + + const stats = await getAuthorStats(kysely, 'alex'); + + assertEquals(stats!.followers_count, 3); +}); + +Deno.test('updateStats with kind 3 decrements followers count', async () => { + await using test = await setupTest(); + const { relay, kysely } = test; + + const sk = generateSecretKey(); + const follow = genEvent({ kind: 3, tags: [['p', 'alex']], created_at: 0 }, sk); + const remove = genEvent({ kind: 3, tags: [], created_at: 1 }, sk); + + await updateStats({ ...test, event: follow }); + assertEquals((await getAuthorStats(kysely, 'alex'))!.followers_count, 1); + await relay.event(follow); + + await updateStats({ ...test, event: remove }); + assertEquals((await getAuthorStats(kysely, 'alex'))!.followers_count, 0); + await relay.event(remove); +}); + +Deno.test('getFollowDiff returns added and removed followers', () => { + const prev = genEvent({ tags: [['p', 'alex'], ['p', 'bob']] }); + const next = genEvent({ tags: [['p', 'alex'], ['p', 'carol']] }); + + const { added, removed } = getFollowDiff(next.tags, prev.tags); + + assertEquals(added, new Set(['carol'])); + assertEquals(removed, new Set(['bob'])); +}); + +Deno.test('updateStats with kind 6 increments reposts count', async () => { + await using test = await setupTest(); + const { relay, kysely } = test; + + const note = genEvent({ kind: 1 }); + await updateStats({ ...test, event: note }); + await relay.event(note); + + const repost = genEvent({ kind: 6, tags: [['e', note.id]] }); + await updateStats({ ...test, event: repost }); + await relay.event(repost); + + const stats = await getEventStats(kysely, note.id); + + assertEquals(stats!.reposts_count, 1); +}); + +Deno.test('updateStats with kind 5 decrements reposts count', async () => { + await using test = await setupTest(); + const { relay, kysely } = test; + + const note = genEvent({ kind: 1 }); + await updateStats({ ...test, event: note }); + await relay.event(note); + + const sk = generateSecretKey(); + const repost = genEvent({ kind: 6, tags: [['e', note.id]] }, sk); + await updateStats({ ...test, event: repost }); + await relay.event(repost); + + await updateStats({ ...test, event: genEvent({ kind: 5, tags: [['e', repost.id]] }, sk) }); + + const stats = await getEventStats(kysely, note.id); + + assertEquals(stats!.reposts_count, 0); +}); + +Deno.test('updateStats with kind 7 increments reactions count', async () => { + await using test = await setupTest(); + const { relay, kysely } = test; + + const note = genEvent({ kind: 1 }); + await updateStats({ ...test, event: note }); + await relay.event(note); + + await updateStats({ ...test, event: genEvent({ kind: 7, content: '+', tags: [['e', note.id]] }) }); + await updateStats({ ...test, event: genEvent({ kind: 7, content: '😂', tags: [['e', note.id]] }) }); + + const stats = await getEventStats(kysely, note.id); + + assertEquals(stats!.reactions, JSON.stringify({ '+': 1, '😂': 1 })); + assertEquals(stats!.reactions_count, 2); +}); + +Deno.test('updateStats with kind 5 decrements reactions count', async () => { + await using test = await setupTest(); + const { relay, kysely } = test; + + const note = genEvent({ kind: 1 }); + await updateStats({ ...test, event: note }); + await relay.event(note); + + const sk = generateSecretKey(); + const reaction = genEvent({ kind: 7, content: '+', tags: [['e', note.id]] }, sk); + await updateStats({ ...test, event: reaction }); + await relay.event(reaction); + + await updateStats({ ...test, event: genEvent({ kind: 5, tags: [['e', reaction.id]] }, sk) }); + + const stats = await getEventStats(kysely, note.id); + + assertEquals(stats!.reactions, JSON.stringify({})); +}); + +Deno.test('countAuthorStats counts author stats from the database', async () => { + await using test = await setupTest(); + const { relay } = test; + + const sk = generateSecretKey(); + const pubkey = getPublicKey(sk); + + await relay.event(genEvent({ kind: 1, content: 'hello' }, sk)); + await relay.event(genEvent({ kind: 1, content: 'yolo' }, sk)); + await relay.event(genEvent({ kind: 3, tags: [['p', pubkey]] })); + + await test.kysely.insertInto('author_stats').values({ + pubkey, + search: 'Yolo Lolo', + notes_count: 0, + followers_count: 0, + following_count: 0, + }).onConflict((oc) => oc.column('pubkey').doUpdateSet({ 'search': 'baka' })) + .execute(); + + const stats = await countAuthorStats({ ...test, pubkey }); + + assertEquals(stats!.notes_count, 2); + assertEquals(stats!.followers_count, 1); +}); + +async function setupTest() { + const conf = new DittoConf(Deno.env); + + const db = new DittoPolyPg(conf.databaseUrl); + await db.migrate(); + + const { kysely } = db; + const relay = new NPostgres(kysely); + + return { + relay, + kysely, + [Symbol.asyncDispose]: async () => { + await sql`truncate table event_stats cascade`.execute(kysely); + await sql`truncate table author_stats cascade`.execute(kysely); + await db[Symbol.asyncDispose](); + }, + }; +} diff --git a/src/utils/stats.ts b/packages/ditto/utils/stats.ts similarity index 79% rename from src/utils/stats.ts rename to packages/ditto/utils/stats.ts index 4573bb60..448ba241 100644 --- a/src/utils/stats.ts +++ b/packages/ditto/utils/stats.ts @@ -1,28 +1,32 @@ +import { DittoTables } from '@ditto/db'; import { NostrEvent, NSchema as n, NStore } from '@nostrify/nostrify'; -import { Kysely, UpdateObject } from 'kysely'; +import { Insertable, Kysely, UpdateObject } from 'kysely'; import { SetRequired } from 'type-fest'; import { z } from 'zod'; -import { DittoTables } from '@/db/DittoTables.ts'; +import { Conf } from '@/config.ts'; import { findQuoteTag, findReplyTag, getTagSet } from '@/utils/tags.ts'; interface UpdateStatsOpts { kysely: Kysely; - store: NStore; + relay: NStore; event: NostrEvent; x?: 1 | -1; } /** Handle one event at a time and update relevant stats for it. */ // deno-lint-ignore require-await -export async function updateStats({ event, kysely, store, x = 1 }: UpdateStatsOpts): Promise { +export async function updateStats({ event, kysely, relay, x = 1 }: UpdateStatsOpts): Promise { switch (event.kind) { case 1: + case 20: + case 1111: + case 30023: return handleEvent1(kysely, event, x); case 3: - return handleEvent3(kysely, event, x, store); + return handleEvent3(kysely, event, x, relay); case 5: - return handleEvent5(kysely, event, -1, store); + return handleEvent5(kysely, event, -1, relay); case 6: return handleEvent6(kysely, event, x); case 7: @@ -34,7 +38,34 @@ export async function updateStats({ event, kysely, store, x = 1 }: UpdateStatsOp /** Update stats for kind 1 event. */ async function handleEvent1(kysely: Kysely, event: NostrEvent, x: number): Promise { - await updateAuthorStats(kysely, event.pubkey, ({ notes_count }) => ({ notes_count: Math.max(0, notes_count + x) })); + await updateAuthorStats(kysely, event.pubkey, (prev) => { + const now = event.created_at; + + let start = prev.streak_start; + let end = prev.streak_end; + + if (start && end) { // Streak exists. + if (now <= end) { + // Streak cannot go backwards in time. Skip it. + } else if (now - end > Conf.streakWindow) { + // Streak is broken. Start a new streak. + start = now; + end = now; + } else { + // Extend the streak. + end = now; + } + } else { // New streak. + start = now; + end = now; + } + + return { + notes_count: Math.max(0, prev.notes_count + x), + streak_start: start || null, + streak_end: end || null, + }; + }); const replyId = findReplyTag(event.tags)?.[1]; const quoteId = findQuoteTag(event.tags)?.[1]; @@ -57,12 +88,12 @@ async function handleEvent1(kysely: Kysely, event: NostrEvent, x: n } /** Update stats for kind 3 event. */ -async function handleEvent3(kysely: Kysely, event: NostrEvent, x: number, store: NStore): Promise { +async function handleEvent3(kysely: Kysely, event: NostrEvent, x: number, relay: NStore): Promise { const following = getTagSet(event.tags, 'p'); await updateAuthorStats(kysely, event.pubkey, () => ({ following_count: following.size })); - const [prev] = await store.query([ + const [prev] = await relay.query([ { kinds: [3], authors: [event.pubkey], limit: 1 }, ]); @@ -86,12 +117,12 @@ async function handleEvent3(kysely: Kysely, event: NostrEvent, x: n } /** Update stats for kind 5 event. */ -async function handleEvent5(kysely: Kysely, event: NostrEvent, x: -1, store: NStore): Promise { +async function handleEvent5(kysely: Kysely, event: NostrEvent, x: -1, relay: NStore): Promise { const id = event.tags.find(([name]) => name === 'e')?.[1]; if (id) { - const [target] = await store.query([{ ids: [id], authors: [event.pubkey], limit: 1 }]); + const [target] = await relay.query([{ ids: [id], authors: [event.pubkey], limit: 1 }]); if (target) { - await updateStats({ event: target, kysely, store, x }); + await updateStats({ event: target, kysely, relay, x }); } } } @@ -187,9 +218,9 @@ export function getAuthorStats( export async function updateAuthorStats( kysely: Kysely, pubkey: string, - fn: (prev: DittoTables['author_stats']) => UpdateObject, + fn: (prev: Insertable) => UpdateObject, ): Promise { - const empty: DittoTables['author_stats'] = { + const empty: Insertable = { pubkey, followers_count: 0, following_count: 0, @@ -269,13 +300,13 @@ export async function updateEventStats( /** Calculate author stats from the database. */ export async function countAuthorStats( - { pubkey, store }: RefreshAuthorStatsOpts, + { pubkey, relay }: RefreshAuthorStatsOpts, ): Promise { const [{ count: followers_count }, { count: notes_count }, [followList], [kind0]] = await Promise.all([ - store.count([{ kinds: [3], '#p': [pubkey] }]), - store.count([{ kinds: [1], authors: [pubkey] }]), - store.query([{ kinds: [3], authors: [pubkey], limit: 1 }]), - store.query([{ kinds: [0], authors: [pubkey], limit: 1 }]), + relay.count([{ kinds: [3], '#p': [pubkey] }]), + relay.count([{ kinds: [1, 20], authors: [pubkey] }]), + relay.query([{ kinds: [3], authors: [pubkey], limit: 1 }]), + relay.query([{ kinds: [0], authors: [pubkey], limit: 1 }]), ]); let search: string = ''; const metadata = n.json().pipe(n.metadata()).catch({}).safeParse(kind0?.content); @@ -290,20 +321,26 @@ export async function countAuthorStats( following_count: getTagSet(followList?.tags ?? [], 'p').size, notes_count, search, + streak_start: null, + streak_end: null, + nip05: null, + nip05_domain: null, + nip05_hostname: null, + nip05_last_verified_at: null, }; } export interface RefreshAuthorStatsOpts { pubkey: string; kysely: Kysely; - store: SetRequired; + relay: SetRequired; } /** Refresh the author's stats in the database. */ export async function refreshAuthorStats( - { pubkey, kysely, store }: RefreshAuthorStatsOpts, + { pubkey, kysely, relay }: RefreshAuthorStatsOpts, ): Promise { - const stats = await countAuthorStats({ store, pubkey, kysely }); + const stats = await countAuthorStats({ relay, pubkey, kysely }); await kysely.insertInto('author_stats') .values(stats) diff --git a/src/utils/tags.test.ts b/packages/ditto/utils/tags.test.ts similarity index 100% rename from src/utils/tags.test.ts rename to packages/ditto/utils/tags.test.ts diff --git a/src/utils/tags.ts b/packages/ditto/utils/tags.ts similarity index 100% rename from src/utils/tags.ts rename to packages/ditto/utils/tags.ts diff --git a/src/utils/text.ts b/packages/ditto/utils/text.ts similarity index 100% rename from src/utils/text.ts rename to packages/ditto/utils/text.ts diff --git a/src/utils/time.test.ts b/packages/ditto/utils/time.test.ts similarity index 100% rename from src/utils/time.test.ts rename to packages/ditto/utils/time.test.ts diff --git a/src/utils/time.ts b/packages/ditto/utils/time.ts similarity index 100% rename from src/utils/time.ts rename to packages/ditto/utils/time.ts diff --git a/src/utils/unfurl.ts b/packages/ditto/utils/unfurl.ts similarity index 84% rename from src/utils/unfurl.ts rename to packages/ditto/utils/unfurl.ts index b5f5c4eb..e2d4f855 100644 --- a/src/utils/unfurl.ts +++ b/packages/ditto/utils/unfurl.ts @@ -1,21 +1,19 @@ +import { cachedLinkPreviewSizeGauge } from '@ditto/metrics'; import TTLCache from '@isaacs/ttlcache'; -import Debug from '@soapbox/stickynotes/debug'; +import { logi } from '@soapbox/logi'; +import { safeFetch } from '@soapbox/safe-fetch'; import DOMPurify from 'isomorphic-dompurify'; import { unfurl } from 'unfurl.js'; import { Conf } from '@/config.ts'; import { PreviewCard } from '@/entities/PreviewCard.ts'; -import { cachedLinkPreviewSizeGauge } from '@/metrics.ts'; -import { fetchWorker } from '@/workers/fetch.ts'; - -const debug = Debug('ditto:unfurl'); +import { errorJson } from '@/utils/log.ts'; async function unfurlCard(url: string, signal: AbortSignal): Promise { - debug(`Unfurling ${url}...`); try { const result = await unfurl(url, { fetch: (url) => - fetchWorker(url, { + safeFetch(url, { headers: { 'Accept': 'text/html, application/xhtml+xml', 'User-Agent': Conf.fetchUserAgent, @@ -26,7 +24,7 @@ async function unfurlCard(url: string, signal: AbortSignal): Promise Array.from(new Map(events.map((event) => [event.pubkey, event])).values())) - .then((events) => hydrateEvents({ events, store, signal })) + .then((events) => hydrateEvents({ ...c.var, events, relay, signal })) .then((events) => filterFn ? events.filter(filterFn) : events); const accounts = await Promise.all( @@ -43,14 +41,13 @@ async function renderEventAccounts(c: AppContext, filters: NostrFilter[], opts?: } async function renderAccounts(c: AppContext, pubkeys: string[]) { - const { offset, limit } = c.get('listPagination'); + const { offset, limit } = paginationSchema.parse(c.req.query()); const authors = pubkeys.reverse().slice(offset, offset + limit); - const store = await Storages.db(); - const signal = c.req.raw.signal; + const { relay, signal } = c.var; - const events = await store.query([{ kinds: [0], authors }], { signal }) - .then((events) => hydrateEvents({ events, store, signal })); + const events = await relay.query([{ kinds: [0], authors }], { signal }) + .then((events) => hydrateEvents({ ...c.var, events })); const accounts = await Promise.all( authors.map((pubkey) => { @@ -72,11 +69,11 @@ async function renderStatuses(c: AppContext, ids: string[], signal = AbortSignal return c.json([]); } - const store = await Storages.db(); - const { limit } = c.get('pagination'); + const { user, relay, pagination } = c.var; + const { limit } = pagination; - const events = await store.query([{ kinds: [1], ids, limit }], { signal }) - .then((events) => hydrateEvents({ events, store, signal })); + const events = await relay.query([{ kinds: [1, 20], ids, limit }], { signal }) + .then((events) => hydrateEvents({ ...c.var, events })); if (!events.length) { return c.json([]); @@ -84,10 +81,10 @@ async function renderStatuses(c: AppContext, ids: string[], signal = AbortSignal const sortedEvents = [...events].sort((a, b) => ids.indexOf(a.id) - ids.indexOf(b.id)); - const viewerPubkey = await c.get('signer')?.getPublicKey(); + const viewerPubkey = await user?.signer.getPublicKey(); const statuses = await Promise.all( - sortedEvents.map((event) => renderStatus(event, { viewerPubkey })), + sortedEvents.map((event) => renderStatus(relay, event, { viewerPubkey })), ); // TODO: pagination with min_id and max_id based on the order of `ids`. diff --git a/src/views/ditto.ts b/packages/ditto/views/ditto.ts similarity index 100% rename from src/views/ditto.ts rename to packages/ditto/views/ditto.ts diff --git a/src/views/mastodon/accounts.ts b/packages/ditto/views/mastodon/accounts.ts similarity index 74% rename from src/views/mastodon/accounts.ts rename to packages/ditto/views/mastodon/accounts.ts index 025737c3..d541e633 100644 --- a/src/views/mastodon/accounts.ts +++ b/packages/ditto/views/mastodon/accounts.ts @@ -6,11 +6,9 @@ import { MastodonAccount } from '@/entities/MastodonAccount.ts'; import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; import { metadataSchema } from '@/schemas/nostr.ts'; import { getLnurl } from '@/utils/lnurl.ts'; -import { parseAndVerifyNip05 } from '@/utils/nip05.ts'; import { parseNoteContent } from '@/utils/note.ts'; import { getTagSet } from '@/utils/tags.ts'; -import { faviconCache } from '@/utils/favicon.ts'; -import { nostrDate, nostrNow } from '@/utils.ts'; +import { nostrDate, nostrNow, parseNip05 } from '@/utils.ts'; import { renderEmojis } from '@/views/mastodon/emojis.ts'; type ToAccountOpts = { @@ -20,16 +18,14 @@ type ToAccountOpts = { withSource?: false; }; -async function renderAccount( - event: Omit, - opts: ToAccountOpts = {}, - signal = AbortSignal.timeout(3000), -): Promise { +function renderAccount(event: Omit, opts: ToAccountOpts = {}): MastodonAccount { const { pubkey } = event; + const stats = event.author_stats; const names = getTagSet(event.user?.tags ?? [], 'n'); + if (names.has('disabled')) { - const account = await accountFromPubkey(pubkey, opts); + const account = accountFromPubkey(pubkey, opts); account.pleroma.deactivated = true; return account; } @@ -48,17 +44,9 @@ async function renderAccount( const npub = nip19.npubEncode(pubkey); const nprofile = nip19.nprofileEncode({ pubkey, relays: [Conf.relay] }); - const parsed05 = await parseAndVerifyNip05(nip05, pubkey, signal); + const parsed05 = stats?.nip05 ? parseNip05(stats.nip05) : undefined; const acct = parsed05?.handle || npub; - let favicon: URL | undefined; - if (parsed05?.domain) { - try { - favicon = await faviconCache.fetch(parsed05.domain, { signal }); - } catch { - favicon = new URL('/favicon.ico', `https://${parsed05.domain}/`); - } - } const { html } = parseNoteContent(about || '', []); const fields = _fields @@ -69,6 +57,22 @@ async function renderAccount( verified_at: null, })) ?? []; + let streakDays = 0; + let streakStart = stats?.streak_start ?? null; + let streakEnd = stats?.streak_end ?? null; + const { streakWindow } = Conf; + + if (streakStart && streakEnd) { + const broken = nostrNow() - streakEnd > streakWindow; + if (broken) { + streakStart = null; + streakEnd = null; + } else { + const delta = streakEnd - streakStart; + streakDays = Math.max(Math.ceil(delta / 86400), 1); + } + } + return { id: pubkey, acct, @@ -81,8 +85,8 @@ async function renderAccount( emojis: renderEmojis(event), fields: fields.map((field) => ({ ...field, value: parseNoteContent(field.value, []).html })), follow_requests_count: 0, - followers_count: event.author_stats?.followers_count ?? 0, - following_count: event.author_stats?.following_count ?? 0, + followers_count: stats?.followers_count ?? 0, + following_count: stats?.following_count ?? 0, fqn: parsed05?.handle || npub, header: banner, header_static: banner, @@ -106,13 +110,19 @@ async function renderAccount( }, } : undefined, - statuses_count: event.author_stats?.notes_count ?? 0, + statuses_count: stats?.notes_count ?? 0, uri: Conf.local(`/users/${acct}`), url: Conf.local(`/@${acct}`), username: parsed05?.nickname || npub.substring(0, 8), ditto: { accepts_zaps: Boolean(getLnurl({ lud06, lud16 })), external_url: Conf.external(nprofile), + streak: { + days: streakDays, + start: streakStart ? nostrDate(streakStart).toISOString() : null, + end: streakEnd ? nostrDate(streakEnd).toISOString() : null, + expires: streakEnd ? nostrDate(streakEnd + streakWindow).toISOString() : null, + }, }, domain: parsed05?.domain, pleroma: { @@ -123,7 +133,7 @@ async function renderAccount( is_local: parsed05?.domain === Conf.url.host, settings_store: opts.withSource ? opts.settingsStore : undefined, tags: [...getTagSet(event.user?.tags ?? [], 't')], - favicon: favicon?.toString(), + favicon: stats?.favicon, }, nostr: { pubkey, @@ -133,7 +143,7 @@ async function renderAccount( }; } -function accountFromPubkey(pubkey: string, opts: ToAccountOpts = {}): Promise { +function accountFromPubkey(pubkey: string, opts: ToAccountOpts = {}): MastodonAccount { const event: UnsignedEvent = { kind: 0, pubkey, diff --git a/src/views/mastodon/admin-accounts.ts b/packages/ditto/views/mastodon/admin-accounts.ts similarity index 100% rename from src/views/mastodon/admin-accounts.ts rename to packages/ditto/views/mastodon/admin-accounts.ts diff --git a/src/views/mastodon/attachments.ts b/packages/ditto/views/mastodon/attachments.ts similarity index 100% rename from src/views/mastodon/attachments.ts rename to packages/ditto/views/mastodon/attachments.ts diff --git a/src/views/mastodon/emojis.ts b/packages/ditto/views/mastodon/emojis.ts similarity index 100% rename from src/views/mastodon/emojis.ts rename to packages/ditto/views/mastodon/emojis.ts diff --git a/src/views/mastodon/notifications.ts b/packages/ditto/views/mastodon/notifications.ts similarity index 69% rename from src/views/mastodon/notifications.ts rename to packages/ditto/views/mastodon/notifications.ts index 4cf6eb5c..7f71c1ea 100644 --- a/src/views/mastodon/notifications.ts +++ b/packages/ditto/views/mastodon/notifications.ts @@ -1,4 +1,4 @@ -import { NostrEvent } from '@nostrify/nostrify'; +import { NostrEvent, NStore } from '@nostrify/nostrify'; import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts'; import { Conf } from '@/config.ts'; @@ -10,36 +10,36 @@ interface RenderNotificationOpts { viewerPubkey: string; } -function renderNotification(event: DittoEvent, opts: RenderNotificationOpts) { +async function renderNotification(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) { const mentioned = !!event.tags.find(([name, value]) => name === 'p' && value === opts.viewerPubkey); if (event.kind === 1 && mentioned) { - return renderMention(event, opts); + return renderMention(store, event, opts); } if (event.kind === 6) { - return renderReblog(event, opts); + return renderReblog(store, event, opts); } if (event.kind === 7 && event.content === '+') { - return renderFavourite(event, opts); + return renderFavourite(store, event, opts); } if (event.kind === 7) { - return renderReaction(event, opts); + return renderReaction(store, event, opts); } - if (event.kind === 30360 && event.pubkey === Conf.pubkey) { + if (event.kind === 30360 && event.pubkey === await Conf.signer.getPublicKey()) { return renderNameGrant(event); } if (event.kind === 9735) { - return renderZap(event, opts); + return renderZap(store, event, opts); } } -async function renderMention(event: DittoEvent, opts: RenderNotificationOpts) { - const status = await renderStatus(event, opts); +async function renderMention(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) { + const status = await renderStatus(store, event, opts); if (!status) return; return { @@ -51,9 +51,9 @@ async function renderMention(event: DittoEvent, opts: RenderNotificationOpts) { }; } -async function renderReblog(event: DittoEvent, opts: RenderNotificationOpts) { +async function renderReblog(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) { if (event.repost?.kind !== 1) return; - const status = await renderStatus(event.repost, opts); + const status = await renderStatus(store, event.repost, opts); if (!status) return; const account = event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey); @@ -66,9 +66,9 @@ async function renderReblog(event: DittoEvent, opts: RenderNotificationOpts) { }; } -async function renderFavourite(event: DittoEvent, opts: RenderNotificationOpts) { +async function renderFavourite(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) { if (event.reacted?.kind !== 1) return; - const status = await renderStatus(event.reacted, opts); + const status = await renderStatus(store, event.reacted, opts); if (!status) return; const account = event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey); @@ -81,9 +81,9 @@ async function renderFavourite(event: DittoEvent, opts: RenderNotificationOpts) }; } -async function renderReaction(event: DittoEvent, opts: RenderNotificationOpts) { +async function renderReaction(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) { if (event.reacted?.kind !== 1) return; - const status = await renderStatus(event.reacted, opts); + const status = await renderStatus(store, event.reacted, opts); if (!status) return; const account = event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey); @@ -99,21 +99,24 @@ async function renderReaction(event: DittoEvent, opts: RenderNotificationOpts) { } async function renderNameGrant(event: DittoEvent) { + const r = event.tags.find(([name]) => name === 'r')?.[1]; const d = event.tags.find(([name]) => name === 'd')?.[1]; - const account = event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey); + const name = r ?? d; - if (!d) return; + if (name) return; + + const account = event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey); return { id: notificationId(event), type: 'ditto:name_grant' as const, - name: d, + name, created_at: nostrDate(event.created_at).toISOString(), account, }; } -async function renderZap(event: DittoEvent, opts: RenderNotificationOpts) { +async function renderZap(store: NStore, event: DittoEvent, opts: RenderNotificationOpts) { if (!event.zap_sender) return; const { zap_amount = 0, zap_message = '' } = event; @@ -130,7 +133,7 @@ async function renderZap(event: DittoEvent, opts: RenderNotificationOpts) { message: zap_message, created_at: nostrDate(event.created_at).toISOString(), account, - ...(event.zapped ? { status: await renderStatus(event.zapped, opts) } : {}), + ...(event.zapped ? { status: await renderStatus(store, event.zapped, opts) } : {}), }; } diff --git a/src/views/mastodon/push.ts b/packages/ditto/views/mastodon/push.ts similarity index 90% rename from src/views/mastodon/push.ts rename to packages/ditto/views/mastodon/push.ts index 0a13179b..eb2e064c 100644 --- a/src/views/mastodon/push.ts +++ b/packages/ditto/views/mastodon/push.ts @@ -1,4 +1,4 @@ -import type { NostrEvent } from '@nostrify/nostrify'; +import type { NostrEvent, NStore } from '@nostrify/nostrify'; import { nip19 } from 'nostr-tools'; import { MastodonPush } from '@/types/MastodonPush.ts'; @@ -9,10 +9,11 @@ import { renderNotification } from '@/views/mastodon/notifications.ts'; * Unlike other views, only one will be rendered at a time, so making use of async calls is okay. */ export async function renderWebPushNotification( + store: NStore, event: NostrEvent, viewerPubkey: string, ): Promise { - const notification = await renderNotification(event, { viewerPubkey }); + const notification = await renderNotification(store, event, { viewerPubkey }); if (!notification) { return; } diff --git a/src/views/mastodon/relationships.ts b/packages/ditto/views/mastodon/relationships.ts similarity index 100% rename from src/views/mastodon/relationships.ts rename to packages/ditto/views/mastodon/relationships.ts diff --git a/src/views/mastodon/reports.ts b/packages/ditto/views/mastodon/reports.ts similarity index 87% rename from src/views/mastodon/reports.ts rename to packages/ditto/views/mastodon/reports.ts index 48baa42f..a2ad8d62 100644 --- a/src/views/mastodon/reports.ts +++ b/packages/ditto/views/mastodon/reports.ts @@ -1,3 +1,5 @@ +import { NStore } from '@nostrify/nostrify'; + import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts'; import { nostrDate } from '@/utils.ts'; @@ -6,7 +8,7 @@ import { renderStatus } from '@/views/mastodon/statuses.ts'; import { getTagSet } from '@/utils/tags.ts'; /** Expects a `reportEvent` of kind 1984 and a `profile` of kind 0 of the person being reported */ -async function renderReport(event: DittoEvent) { +function renderReport(event: DittoEvent) { // The category is present in both the 'e' and 'p' tag, however, it is possible to report a user without reporting a note, so it's better to get the category from the 'p' tag const category = event.tags.find(([name]) => name === 'p')?.[2]; const statusIds = event.tags.filter(([name]) => name === 'e').map((tag) => tag[1]) ?? []; @@ -23,9 +25,7 @@ async function renderReport(event: DittoEvent) { created_at: nostrDate(event.created_at).toISOString(), status_ids: statusIds, rules_ids: null, - target_account: event.reported_profile - ? await renderAccount(event.reported_profile) - : await accountFromPubkey(reportedPubkey), + target_account: event.reported_profile ? renderAccount(event.reported_profile) : accountFromPubkey(reportedPubkey), }; } @@ -36,7 +36,7 @@ interface RenderAdminReportOpts { /** Admin-level information about a filed report. * Expects an event of kind 1984 fully hydrated. * https://docs.joinmastodon.org/entities/Admin_Report */ -async function renderAdminReport(event: DittoEvent, opts: RenderAdminReportOpts) { +async function renderAdminReport(store: NStore, event: DittoEvent, opts: RenderAdminReportOpts) { const { viewerPubkey } = opts; // The category is present in both the 'e' and 'p' tag, however, it is possible to report a user without reporting a note, so it's better to get the category from the 'p' tag @@ -45,7 +45,7 @@ async function renderAdminReport(event: DittoEvent, opts: RenderAdminReportOpts) const statuses = []; if (event.reported_notes) { for (const status of event.reported_notes) { - statuses.push(await renderStatus(status, { viewerPubkey })); + statuses.push(await renderStatus(store, status, { viewerPubkey })); } } diff --git a/src/views/mastodon/statuses.ts b/packages/ditto/views/mastodon/statuses.ts similarity index 79% rename from src/views/mastodon/statuses.ts rename to packages/ditto/views/mastodon/statuses.ts index 3f8f1c96..5957356e 100644 --- a/src/views/mastodon/statuses.ts +++ b/packages/ditto/views/mastodon/statuses.ts @@ -1,4 +1,4 @@ -import { NostrEvent } from '@nostrify/nostrify'; +import { NostrEvent, NStore } from '@nostrify/nostrify'; import { nip19 } from 'nostr-tools'; import { Conf } from '@/config.ts'; @@ -6,7 +6,6 @@ import { MastodonAttachment } from '@/entities/MastodonAttachment.ts'; import { MastodonMention } from '@/entities/MastodonMention.ts'; import { MastodonStatus } from '@/entities/MastodonStatus.ts'; import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; -import { Storages } from '@/storages.ts'; import { nostrDate } from '@/utils.ts'; import { getMediaLinks, parseNoteContent, stripimeta } from '@/utils/note.ts'; import { findReplyTag } from '@/utils/tags.ts'; @@ -20,7 +19,11 @@ interface RenderStatusOpts { depth?: number; } -async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise { +async function renderStatus( + store: NStore, + event: DittoEvent, + opts: RenderStatusOpts, +): Promise { const { viewerPubkey, depth = 1 } = opts; if (depth > 2 || depth < 0) return; @@ -33,34 +36,18 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise< }); const account = event.author - ? await renderAccount({ ...event.author, author_stats: event.author_stats }) - : await accountFromPubkey(event.pubkey); + ? renderAccount({ ...event.author, author_stats: event.author_stats }) + : accountFromPubkey(event.pubkey); const replyId = findReplyTag(event.tags)?.[1]; - const mentionedPubkeys = [ - ...new Set( - event.tags - .filter((tag) => tag[0] === 'p') - .map((tag) => tag[1]), - ), - ]; - - const store = await Storages.db(); - - const mentionedProfiles = await store.query( - [{ kinds: [0], authors: mentionedPubkeys, limit: mentionedPubkeys.length }], - ); - - const mentions = await Promise.all( - mentionedPubkeys.map((pubkey) => renderMention(pubkey, mentionedProfiles.find((event) => event.pubkey === pubkey))), - ); + const mentions = event.mentions?.map((event) => renderMention(event)) ?? []; const { html, links, firstUrl } = parseNoteContent(stripimeta(event.content, event.tags), mentions); const [card, relatedEvents] = await Promise .all([ - firstUrl ? unfurlCardCached(firstUrl) : null, + firstUrl ? unfurlCardCached(firstUrl, AbortSignal.timeout(500)) : null, viewerPubkey ? await store.query([ { kinds: [6], '#e': [event.id], authors: [viewerPubkey], limit: 1 }, @@ -137,7 +124,7 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise< tags: [], emojis: renderEmojis(event), poll: null, - quote: !event.quote ? null : await renderStatus(event.quote, { depth: depth + 1 }), + quote: !event.quote ? null : await renderStatus(store, event.quote, { depth: depth + 1 }), quote_id: event.quote?.id ?? null, uri: Conf.local(`/users/${account.acct}/statuses/${event.id}`), url: Conf.local(`/@${account.acct}/${event.id}`), @@ -153,14 +140,18 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise< }; } -async function renderReblog(event: DittoEvent, opts: RenderStatusOpts): Promise { +async function renderReblog( + store: NStore, + event: DittoEvent, + opts: RenderStatusOpts, +): Promise { const { viewerPubkey } = opts; if (!event.repost) return; - const status = await renderStatus(event, {}); // omit viewerPubkey intentionally + const status = await renderStatus(store, event, {}); // omit viewerPubkey intentionally if (!status) return; - const reblog = await renderStatus(event.repost, { viewerPubkey }) ?? null; + const reblog = await renderStatus(store, event.repost, { viewerPubkey }) ?? null; return { ...status, @@ -170,8 +161,8 @@ async function renderReblog(event: DittoEvent, opts: RenderStatusOpts): Promise< }; } -async function renderMention(pubkey: string, event?: NostrEvent): Promise { - const account = event ? await renderAccount(event) : await accountFromPubkey(pubkey); +function renderMention(event: NostrEvent): MastodonMention { + const account = renderAccount(event); return { id: account.id, acct: account.acct, diff --git a/src/views/meta.ts b/packages/ditto/views/meta.ts similarity index 100% rename from src/views/meta.ts rename to packages/ditto/views/meta.ts diff --git a/src/workers/policy.ts b/packages/ditto/workers/policy.ts similarity index 56% rename from src/workers/policy.ts rename to packages/ditto/workers/policy.ts index 4124feb9..e2617f72 100644 --- a/src/workers/policy.ts +++ b/packages/ditto/workers/policy.ts @@ -1,20 +1,16 @@ +import { DittoConf } from '@ditto/conf'; import { NostrEvent, NostrRelayOK, NPolicy } from '@nostrify/nostrify'; -import { Stickynotes } from '@soapbox/stickynotes'; +import { logi } from '@soapbox/logi'; import * as Comlink from 'comlink'; -import { Conf } from '@/config.ts'; import type { CustomPolicy } from '@/workers/policy.worker.ts'; -import '@/workers/handlers/abortsignal.ts'; - -const console = new Stickynotes('ditto:policy'); - -class PolicyWorker implements NPolicy { +export class PolicyWorker implements NPolicy { private worker: Comlink.Remote; private ready: Promise; private enabled = true; - constructor() { + constructor(private conf: DittoConf) { this.worker = Comlink.wrap( new Worker( new URL('./policy.worker.ts', import.meta.url), @@ -23,8 +19,8 @@ class PolicyWorker implements NPolicy { name: 'PolicyWorker', deno: { permissions: { - read: [Conf.denoDir, Conf.policy, Conf.dataDir], - write: [Conf.dataDir], + read: [conf.denoDir, conf.policy, conf.dataDir], + write: [conf.dataDir], net: 'inherit', env: false, import: true, @@ -48,30 +44,48 @@ class PolicyWorker implements NPolicy { } private async init(): Promise { + const conf = this.conf; + try { await this.worker.init({ - path: Conf.policy, - databaseUrl: Conf.databaseUrl, - pubkey: Conf.pubkey, + path: conf.policy, + databaseUrl: conf.databaseUrl, + pubkey: await conf.signer.getPublicKey(), }); - console.warn(`Using custom policy: ${Conf.policy}`); + logi({ + level: 'info', + ns: 'ditto.system.policy', + msg: 'Using custom policy', + path: conf.policy, + enabled: true, + }); } catch (e) { if (e instanceof Error && e.message.includes('Module not found')) { - console.warn('Custom policy not found '); + logi({ + level: 'info', + ns: 'ditto.system.policy', + msg: 'Custom policy not found ', + path: null, + enabled: false, + }); this.enabled = false; return; } if (e instanceof Error && e.message.includes('PGlite is not supported in worker threads')) { - console.warn('Custom policies are not supported with PGlite. The policy is disabled.'); + logi({ + level: 'warn', + ns: 'ditto.system.policy', + msg: 'Custom policies are not supported with PGlite. The policy is disabled.', + path: conf.policy, + enabled: false, + }); this.enabled = false; return; } - throw new Error(`DITTO_POLICY (error importing policy): ${Conf.policy}`); + throw new Error(`DITTO_POLICY (error importing policy): ${conf.policy}`); } } } - -export const policyWorker = new PolicyWorker(); diff --git a/src/workers/policy.worker.ts b/packages/ditto/workers/policy.worker.ts similarity index 77% rename from src/workers/policy.worker.ts rename to packages/ditto/workers/policy.worker.ts index 5e9d4d4a..539830a5 100644 --- a/src/workers/policy.worker.ts +++ b/packages/ditto/workers/policy.worker.ts @@ -1,12 +1,10 @@ +import { DittoPolyPg } from '@ditto/db'; import '@soapbox/safe-fetch/load'; import { NostrEvent, NostrRelayOK, NPolicy } from '@nostrify/nostrify'; import { ReadOnlyPolicy } from '@nostrify/policies'; import * as Comlink from 'comlink'; -import { DittoDB } from '@/db/DittoDB.ts'; -import { EventsDB } from '@/storages/EventsDB.ts'; - -import '@/workers/handlers/abortsignal.ts'; +import { DittoPgStore } from '@/storages/DittoPgStore.ts'; // @ts-ignore Don't try to access the env from this worker. Deno.env = new Map(); @@ -17,7 +15,7 @@ interface PolicyInit { path: string; /** Database URL to connect to. */ databaseUrl: string; - /** Admin pubkey to use for EventsDB checks. */ + /** Admin pubkey to use for DittoPgStore checks. */ pubkey: string; } @@ -32,10 +30,10 @@ export class CustomPolicy implements NPolicy { async init({ path, databaseUrl, pubkey }: PolicyInit): Promise { const Policy = (await import(path)).default; - const { kysely } = DittoDB.create(databaseUrl, { poolSize: 1 }); + const db = new DittoPolyPg(databaseUrl, { poolSize: 1 }); - const store = new EventsDB({ - kysely, + const store = new DittoPgStore({ + db, pubkey, timeout: 5_000, }); diff --git a/src/workers/verify.ts b/packages/ditto/workers/verify.ts similarity index 100% rename from src/workers/verify.ts rename to packages/ditto/workers/verify.ts diff --git a/src/workers/verify.worker.ts b/packages/ditto/workers/verify.worker.ts similarity index 100% rename from src/workers/verify.worker.ts rename to packages/ditto/workers/verify.worker.ts diff --git a/packages/lang/deno.json b/packages/lang/deno.json new file mode 100644 index 00000000..f192fb0f --- /dev/null +++ b/packages/lang/deno.json @@ -0,0 +1,7 @@ +{ + "name": "@ditto/lang", + "version": "1.1.0", + "exports": { + ".": "./language.ts" + } +} diff --git a/src/utils/language.test.ts b/packages/lang/language.test.ts similarity index 66% rename from src/utils/language.test.ts rename to packages/lang/language.test.ts index 255f6b58..09dbb66a 100644 --- a/src/utils/language.test.ts +++ b/packages/lang/language.test.ts @@ -1,6 +1,7 @@ -import { detectLanguage } from '@/utils/language.ts'; import { assertEquals } from '@std/assert'; +import { detectLanguage } from './language.ts'; + Deno.test('Detect English language', () => { assertEquals(detectLanguage(``, 0.90), undefined); assertEquals(detectLanguage(`Good morning my fellow friends`, 0.90), 'en'); @@ -26,3 +27,26 @@ Deno.test('Detect English language', () => { 'en', ); }); + +Deno.test('Detects definitive texts', () => { + // NOTE: pass `1` as min confidence to test only the definitive patterns + + // unambiguous + assertEquals(detectLanguage('안녕하세요.', 1), 'ko'); + assertEquals(detectLanguage('Γειά σου!', 1), 'el'); + assertEquals(detectLanguage('שלום!', 1), 'he'); + assertEquals(detectLanguage('こんにちは。', 1), 'ja'); + assertEquals( + detectLanguage( + '最近、長女から「中学生男子全員クソ」という話を良く聞き中学生女子側の視点が分かってよかった。父からは「中学生男子は自分がクソだということを3年間かかって学習するんだよ」と言っておいた', + 1, + ), + 'ja', + ); + + // ambiguous + assertEquals(detectLanguage('你好', 1), undefined); + assertEquals(detectLanguage('東京', 1), undefined); + assertEquals(detectLanguage('Привет', 1), undefined); + assertEquals(detectLanguage('Hello', 1), undefined); +}); diff --git a/packages/lang/language.ts b/packages/lang/language.ts new file mode 100644 index 00000000..9a713122 --- /dev/null +++ b/packages/lang/language.ts @@ -0,0 +1,57 @@ +import ISO6391, { type LanguageCode } from 'iso-639-1'; +import lande from 'lande'; +import linkify from 'linkifyjs'; + +linkify.registerCustomProtocol('nostr', true); + +/** + * Returns the detected language if the confidence is greater or equal than 'minConfidence'. + * 'minConfidence' must be a number between 0 and 1, such as 0.95. + */ +export function detectLanguage(text: string, minConfidence: number): LanguageCode | undefined { + // It's better to remove the emojis first + const sanitizedText = linkify.tokenize( + text + .replaceAll(/\p{Extended_Pictographic}/gu, '') // strip emojis + .replaceAll(/[\s\uFEFF\u00A0\u200B-\u200D\u{0FE0E}]+/gu, ' '), // strip invisible characters + ) + .reduce((acc, { t, v }) => t === 'text' ? acc + v : acc, '').trim(); + + // Definite patterns for some languages. + // Text which matches MUST unambiguously be in the given language. + // This is only possible for some languages. + // All patterns match the full text, so mixed scripts would fail these tests. + const languagePatterns: Partial> = { + ko: /^[\p{Script=Hangul}\s]+$/u, // Korean (Hangul only) + el: /^[\p{Script=Greek}\s]+$/u, // Greek + he: /^[\p{Script=Hebrew}\s]+$/u, // Hebrew + ja: /^(?=.*[\p{Script=Hiragana}\p{Script=Katakana}])[\p{Script=Hiragana}\p{Script=Katakana}\p{Script=Han}\s]+$/u, // Japanese (requires at least one Kana) + // zh: not possible to detect unambiguously + }; + + // If any pattern matches, the language is known. + for (const [lang, pattern] of Object.entries(languagePatterns) as [LanguageCode, RegExp][]) { + const text = sanitizedText + .replaceAll(/[\p{P}\p{S}]/gu, '') // strip punctuation and symbols + .replaceAll(/\p{N}/gu, ''); // strip numbers + + if (pattern.test(text)) { + return lang; + } + } + + if (sanitizedText.length < 10) { // heuristics + return; + } + + const [topResult] = lande(sanitizedText); + + if (topResult) { + const [iso6393, confidence] = topResult; + const locale = new Intl.Locale(iso6393); + + if (confidence >= minConfidence && ISO6391.validate(locale.language)) { + return locale.language; + } + } +} diff --git a/packages/mastoapi/auth/aes.bench.ts b/packages/mastoapi/auth/aes.bench.ts new file mode 100644 index 00000000..3b46f436 --- /dev/null +++ b/packages/mastoapi/auth/aes.bench.ts @@ -0,0 +1,18 @@ +import { generateSecretKey } from 'nostr-tools'; + +import { aesDecrypt, aesEncrypt } from './aes.ts'; + +Deno.bench('aesEncrypt', async (b) => { + const sk = generateSecretKey(); + const decrypted = generateSecretKey(); + b.start(); + await aesEncrypt(sk, decrypted); +}); + +Deno.bench('aesDecrypt', async (b) => { + const sk = generateSecretKey(); + const decrypted = generateSecretKey(); + const encrypted = await aesEncrypt(sk, decrypted); + b.start(); + await aesDecrypt(sk, encrypted); +}); diff --git a/packages/mastoapi/auth/aes.test.ts b/packages/mastoapi/auth/aes.test.ts new file mode 100644 index 00000000..ee735731 --- /dev/null +++ b/packages/mastoapi/auth/aes.test.ts @@ -0,0 +1,15 @@ +import { assertEquals } from '@std/assert'; +import { encodeHex } from '@std/encoding/hex'; +import { generateSecretKey } from 'nostr-tools'; + +import { aesDecrypt, aesEncrypt } from './aes.ts'; + +Deno.test('aesDecrypt & aesEncrypt', async () => { + const sk = generateSecretKey(); + const data = generateSecretKey(); + + const encrypted = await aesEncrypt(sk, data); + const decrypted = await aesDecrypt(sk, encrypted); + + assertEquals(encodeHex(decrypted), encodeHex(data)); +}); diff --git a/packages/mastoapi/auth/aes.ts b/packages/mastoapi/auth/aes.ts new file mode 100644 index 00000000..983fc39c --- /dev/null +++ b/packages/mastoapi/auth/aes.ts @@ -0,0 +1,17 @@ +/** Encrypt data with AES-GCM and a secret key. */ +export async function aesEncrypt(sk: Uint8Array, plaintext: Uint8Array): Promise { + const secretKey = await crypto.subtle.importKey('raw', sk, { name: 'AES-GCM' }, false, ['encrypt']); + const iv = crypto.getRandomValues(new Uint8Array(12)); + const buffer = await crypto.subtle.encrypt({ name: 'AES-GCM', iv }, secretKey, plaintext); + + return new Uint8Array([...iv, ...new Uint8Array(buffer)]); +} + +/** Decrypt data with AES-GCM and a secret key. */ +export async function aesDecrypt(sk: Uint8Array, ciphertext: Uint8Array): Promise { + const secretKey = await crypto.subtle.importKey('raw', sk, { name: 'AES-GCM' }, false, ['decrypt']); + const iv = ciphertext.slice(0, 12); + const buffer = await crypto.subtle.decrypt({ name: 'AES-GCM', iv }, secretKey, ciphertext.slice(12)); + + return new Uint8Array(buffer); +} diff --git a/packages/mastoapi/auth/token.bench.ts b/packages/mastoapi/auth/token.bench.ts new file mode 100644 index 00000000..5df41d0f --- /dev/null +++ b/packages/mastoapi/auth/token.bench.ts @@ -0,0 +1,11 @@ +import { generateToken, getTokenHash } from './token.ts'; + +Deno.bench('generateToken', async () => { + await generateToken(); +}); + +Deno.bench('getTokenHash', async (b) => { + const { token } = await generateToken(); + b.start(); + await getTokenHash(token); +}); diff --git a/packages/mastoapi/auth/token.test.ts b/packages/mastoapi/auth/token.test.ts new file mode 100644 index 00000000..6f002267 --- /dev/null +++ b/packages/mastoapi/auth/token.test.ts @@ -0,0 +1,18 @@ +import { assertEquals } from '@std/assert'; +import { decodeHex, encodeHex } from '@std/encoding/hex'; + +import { generateToken, getTokenHash } from './token.ts'; + +Deno.test('generateToken', async () => { + const sk = decodeHex('a0968751df8fd42f362213f08751911672f2a037113b392403bbb7dd31b71c95'); + + const { token, hash } = await generateToken(sk); + + assertEquals(token, 'token15ztgw5wl3l2z7d3zz0cgw5v3zee09gphzyanjfqrhwma6vdhrj2sauwknd'); + assertEquals(encodeHex(hash), 'ab4c4ead4d1c72a38fffd45b999937b7e3f25f867b19aaf252df858e77b66a8a'); +}); + +Deno.test('getTokenHash', async () => { + const hash = await getTokenHash('token15ztgw5wl3l2z7d3zz0cgw5v3zee09gphzyanjfqrhwma6vdhrj2sauwknd'); + assertEquals(encodeHex(hash), 'ab4c4ead4d1c72a38fffd45b999937b7e3f25f867b19aaf252df858e77b66a8a'); +}); diff --git a/packages/mastoapi/auth/token.ts b/packages/mastoapi/auth/token.ts new file mode 100644 index 00000000..8d71ed6f --- /dev/null +++ b/packages/mastoapi/auth/token.ts @@ -0,0 +1,30 @@ +import { bech32 } from '@scure/base'; +import { generateSecretKey } from 'nostr-tools'; + +/** + * Generate an auth token for the API. + * + * Returns a bech32 encoded API token and the SHA-256 hash of the bytes. + * The token should be presented to the user, but only the hash should be stored in the database. + */ +export async function generateToken(sk = generateSecretKey()): Promise<{ token: `token1${string}`; hash: Uint8Array }> { + const words = bech32.toWords(sk); + const token = bech32.encode('token', words); + + const buffer = await crypto.subtle.digest('SHA-256', sk); + const hash = new Uint8Array(buffer); + + return { token, hash }; +} + +/** + * Get the SHA-256 hash of an API token. + * First decodes from bech32 then hashes the bytes. + * Used to identify the user in the database by the hash of their token. + */ +export async function getTokenHash(token: `token1${string}`): Promise { + const { bytes: sk } = bech32.decodeToBytes(token); + const buffer = await crypto.subtle.digest('SHA-256', sk); + + return new Uint8Array(buffer); +} diff --git a/packages/mastoapi/deno.json b/packages/mastoapi/deno.json new file mode 100644 index 00000000..b9626b3e --- /dev/null +++ b/packages/mastoapi/deno.json @@ -0,0 +1,10 @@ +{ + "name": "@ditto/mastoapi", + "version": "1.1.0", + "exports": { + "./middleware": "./middleware/mod.ts", + "./pagination": "./pagination/mod.ts", + "./router": "./router/mod.ts", + "./test": "./test.ts" + } +} diff --git a/packages/mastoapi/middleware/User.ts b/packages/mastoapi/middleware/User.ts new file mode 100644 index 00000000..ac38b8de --- /dev/null +++ b/packages/mastoapi/middleware/User.ts @@ -0,0 +1,6 @@ +import type { NostrSigner, NRelay } from '@nostrify/nostrify'; + +export interface User { + signer: S; + relay: R; +} diff --git a/packages/mastoapi/middleware/mod.ts b/packages/mastoapi/middleware/mod.ts new file mode 100644 index 00000000..fb6ffb59 --- /dev/null +++ b/packages/mastoapi/middleware/mod.ts @@ -0,0 +1,5 @@ +export { paginationMiddleware } from './paginationMiddleware.ts'; +export { tokenMiddleware } from './tokenMiddleware.ts'; +export { userMiddleware } from './userMiddleware.ts'; + +export type { User } from './User.ts'; diff --git a/packages/mastoapi/middleware/paginationMiddleware.ts b/packages/mastoapi/middleware/paginationMiddleware.ts new file mode 100644 index 00000000..28a7f1a1 --- /dev/null +++ b/packages/mastoapi/middleware/paginationMiddleware.ts @@ -0,0 +1,81 @@ +import { paginated, paginatedList } from '../pagination/paginate.ts'; +import { paginationSchema } from '../pagination/schema.ts'; + +import type { DittoMiddleware } from '@ditto/mastoapi/router'; +import type { NostrEvent } from '@nostrify/nostrify'; + +interface Pagination { + since?: number; + until?: number; + limit: number; +} + +interface ListPagination { + limit: number; + offset: number; +} + +type HeaderRecord = Record; +type PaginateFn = (events: NostrEvent[], body: object | unknown[], headers?: HeaderRecord) => Response; +type ListPaginateFn = (params: ListPagination, body: object | unknown[], headers?: HeaderRecord) => Response; + +/** Fixes compatibility with Mastodon apps by that don't use `Link` headers. */ +// @ts-ignore Types are right. +export function paginationMiddleware(): DittoMiddleware<{ pagination: Pagination; paginate: PaginateFn }>; +export function paginationMiddleware( + type: 'list', +): DittoMiddleware<{ pagination: ListPagination; paginate: ListPaginateFn }>; +export function paginationMiddleware( + type?: string, +): DittoMiddleware<{ pagination?: Pagination | ListPagination; paginate: PaginateFn | ListPaginateFn }> { + return async (c, next) => { + const { relay } = c.var; + + const pagination = paginationSchema.parse(c.req.query()); + + const { + max_id: maxId, + min_id: minId, + since, + until, + } = pagination; + + if ((maxId && !until) || (minId && !since)) { + const ids: string[] = []; + + if (maxId) ids.push(maxId); + if (minId) ids.push(minId); + + if (ids.length) { + const events = await relay.query( + [{ ids, limit: ids.length }], + { signal: c.req.raw.signal }, + ); + + for (const event of events) { + if (!until && maxId === event.id) pagination.until = event.created_at; + if (!since && minId === event.id) pagination.since = event.created_at; + } + } + } + + if (type === 'list') { + c.set('pagination', { + limit: pagination.limit, + offset: pagination.offset, + }); + const fn: ListPaginateFn = (params, body, headers) => paginatedList(c, params, body, headers); + c.set('paginate', fn); + } else { + c.set('pagination', { + since: pagination.since, + until: pagination.until, + limit: pagination.limit, + }); + const fn: PaginateFn = (events, body, headers) => paginated(c, events, body, headers); + c.set('paginate', fn); + } + + await next(); + }; +} diff --git a/packages/mastoapi/middleware/tokenMiddleware.ts b/packages/mastoapi/middleware/tokenMiddleware.ts new file mode 100644 index 00000000..a2241c19 --- /dev/null +++ b/packages/mastoapi/middleware/tokenMiddleware.ts @@ -0,0 +1,136 @@ +import { parseAuthRequest } from '@ditto/nip98'; +import { HTTPException } from '@hono/hono/http-exception'; +import { type NostrSigner, NSecSigner } from '@nostrify/nostrify'; +import { nip19 } from 'nostr-tools'; + +import { aesDecrypt } from '../auth/aes.ts'; +import { getTokenHash } from '../auth/token.ts'; +import { ConnectSigner } from '../signers/ConnectSigner.ts'; +import { ReadOnlySigner } from '../signers/ReadOnlySigner.ts'; +import { UserStore } from '../storages/UserStore.ts'; + +import type { DittoEnv, DittoMiddleware } from '@ditto/mastoapi/router'; +import type { Context } from '@hono/hono'; +import type { User } from './User.ts'; + +type CredentialsFn = (c: Context) => string | undefined; + +export function tokenMiddleware(fn?: CredentialsFn): DittoMiddleware<{ user?: User }> { + return async (c, next) => { + const header = fn ? fn(c) : c.req.header('authorization'); + + if (header) { + const { relay, conf } = c.var; + + const auth = parseAuthorization(header); + const signer = await getSigner(c, auth); + const userPubkey = await signer.getPublicKey(); + const adminPubkey = await conf.signer.getPublicKey(); + + const user: User = { + signer, + relay: new UserStore({ relay, userPubkey, adminPubkey }), + }; + + c.set('user', user); + } + + await next(); + }; +} + +function getSigner(c: Context, auth: Authorization): NostrSigner | Promise { + switch (auth.realm) { + case 'Bearer': { + if (isToken(auth.token)) { + return getSignerFromToken(c, auth.token); + } else { + return getSignerFromNip19(auth.token); + } + } + case 'Nostr': { + return getSignerFromNip98(c); + } + default: { + throw new HTTPException(400, { message: 'Unsupported Authorization realm.' }); + } + } +} + +async function getSignerFromToken(c: Context, token: `token1${string}`): Promise { + const { conf, db, relay } = c.var; + + try { + const tokenHash = await getTokenHash(token); + + const row = await db.kysely + .selectFrom('auth_tokens') + .select(['pubkey', 'bunker_pubkey', 'nip46_sk_enc', 'nip46_relays']) + .where('token_hash', '=', tokenHash) + .executeTakeFirstOrThrow(); + + const nep46Seckey = await aesDecrypt(conf.seckey, row.nip46_sk_enc); + + return new ConnectSigner({ + bunkerPubkey: row.bunker_pubkey, + userPubkey: row.pubkey, + signer: new NSecSigner(nep46Seckey), + relays: row.nip46_relays, + relay, + }); + } catch { + throw new HTTPException(401, { message: 'Token is wrong or expired.' }); + } +} + +function getSignerFromNip19(bech32: string): NostrSigner { + try { + const decoded = nip19.decode(bech32); + + switch (decoded.type) { + case 'npub': + return new ReadOnlySigner(decoded.data); + case 'nprofile': + return new ReadOnlySigner(decoded.data.pubkey); + case 'nsec': + return new NSecSigner(decoded.data); + } + } catch { + // fallthrough + } + + throw new HTTPException(401, { message: 'Invalid NIP-19 identifier in Authorization header.' }); +} + +async function getSignerFromNip98(c: Context): Promise { + const { conf } = c.var; + + const req = Object.create(c.req.raw, { + url: { value: conf.local(c.req.url) }, + }); + + const result = await parseAuthRequest(req); + + if (result.success) { + return new ReadOnlySigner(result.data.pubkey); + } else { + throw new HTTPException(401, { message: 'Invalid NIP-98 event in Authorization header.' }); + } +} + +interface Authorization { + realm: string; + token: string; +} + +function parseAuthorization(header: string): Authorization { + const [realm, ...parts] = header.split(' '); + return { + realm, + token: parts.join(' '), + }; +} + +function isToken(value: string): value is `token1${string}` { + return value.startsWith('token1'); +} diff --git a/packages/mastoapi/middleware/userMiddleware.test.ts b/packages/mastoapi/middleware/userMiddleware.test.ts new file mode 100644 index 00000000..2d30b0dc --- /dev/null +++ b/packages/mastoapi/middleware/userMiddleware.test.ts @@ -0,0 +1,74 @@ +import { setUser, testApp } from '@ditto/mastoapi/test'; +import { assertEquals } from '@std/assert'; + +import { userMiddleware } from './userMiddleware.ts'; +import { ReadOnlySigner } from '../signers/ReadOnlySigner.ts'; + +Deno.test('no user 401', async () => { + const { app } = testApp(); + const response = await app.use(userMiddleware()).request('/'); + assertEquals(response.status, 401); +}); + +Deno.test('unsupported signer 400', async () => { + const { app, relay } = testApp(); + const signer = new ReadOnlySigner('0461fcbecc4c3374439932d6b8f11269ccdb7cc973ad7a50ae362db135a474dd'); + + const response = await app + .use(setUser({ signer, relay })) + .use(userMiddleware({ enc: 'nip44' })) + .use((c, next) => { + c.var.user.signer.nip44.encrypt; // test that the type is set + return next(); + }) + .request('/'); + + assertEquals(response.status, 400); +}); + +Deno.test('with user 200', async () => { + const { app, user } = testApp(); + + const response = await app + .use(setUser(user)) + .use(userMiddleware()) + .get('/', (c) => c.text('ok')) + .request('/'); + + assertEquals(response.status, 200); +}); + +Deno.test('user and role 403', async () => { + const { app, user } = testApp(); + + const response = await app + .use(setUser(user)) + .use(userMiddleware({ role: 'admin' })) + .request('/'); + + assertEquals(response.status, 403); +}); + +Deno.test('admin role 200', async () => { + const { conf, app, user, relay } = testApp(); + + const event = await conf.signer.signEvent({ + kind: 30382, + tags: [ + ['d', await user.signer.getPublicKey()], + ['n', 'admin'], + ], + content: '', + created_at: Math.floor(Date.now() / 1000), + }); + + await relay.event(event); + + const response = await app + .use(setUser(user)) + .use(userMiddleware({ role: 'admin' })) + .get('/', (c) => c.text('ok')) + .request('/'); + + assertEquals(response.status, 200); +}); diff --git a/packages/mastoapi/middleware/userMiddleware.ts b/packages/mastoapi/middleware/userMiddleware.ts new file mode 100644 index 00000000..2b964362 --- /dev/null +++ b/packages/mastoapi/middleware/userMiddleware.ts @@ -0,0 +1,77 @@ +import { buildAuthEventTemplate, validateAuthEvent } from '@ditto/nip98'; +import { HTTPException } from '@hono/hono/http-exception'; + +import type { DittoMiddleware } from '@ditto/mastoapi/router'; +import type { NostrEvent, NostrSigner } from '@nostrify/nostrify'; +import type { SetRequired } from 'type-fest'; +import type { User } from './User.ts'; + +type Nip44Signer = SetRequired; + +interface UserMiddlewareOpts { + enc?: 'nip04' | 'nip44'; + role?: string; + verify?: boolean; +} + +export function userMiddleware(): DittoMiddleware<{ user: User }>; +// @ts-ignore Types are right. +export function userMiddleware( + opts: UserMiddlewareOpts & { enc: 'nip44' }, +): DittoMiddleware<{ user: User }>; +export function userMiddleware(opts: UserMiddlewareOpts): DittoMiddleware<{ user: User }>; +export function userMiddleware(opts: UserMiddlewareOpts = {}): DittoMiddleware<{ user: User }> { + return async (c, next) => { + const { conf, user, relay } = c.var; + const { enc, role, verify } = opts; + + if (!user) { + throw new HTTPException(401, { message: 'Authorization required' }); + } + + if (enc && !user.signer[enc]) { + throw new HTTPException(400, { message: `User does not have a ${enc} signer` }); + } + + if (role || verify) { + const req = setRequestUrl(c.req.raw, conf.local(c.req.url)); + const reqEvent = await buildAuthEventTemplate(req); + const resEvent = await user.signer.signEvent(reqEvent); + const result = await validateAuthEvent(req, resEvent); + + if (!result.success) { + throw new HTTPException(401, { message: 'Verification failed' }); + } + + // Prevent people from accidentally using the wrong account. This has no other security implications. + if (result.data.pubkey !== await user.signer.getPublicKey()) { + throw new HTTPException(401, { message: 'Pubkey mismatch' }); + } + + if (role) { + const [user] = await relay.query([{ + kinds: [30382], + authors: [await conf.signer.getPublicKey()], + '#d': [result.data.pubkey], + limit: 1, + }]); + + if (!user || !matchesRole(user, role)) { + throw new HTTPException(403, { message: `Must have ${role} role` }); + } + } + } + + await next(); + }; +} + +/** Rewrite the URL of the request object. */ +function setRequestUrl(req: Request, url: string): Request { + return Object.create(req, { url: { value: url } }); +} + +/** Check whether the user fulfills the role. */ +function matchesRole(user: NostrEvent, role: string): boolean { + return user.tags.some(([tag, value]) => tag === 'n' && value === role); +} diff --git a/packages/mastoapi/pagination/link-header.test.ts b/packages/mastoapi/pagination/link-header.test.ts new file mode 100644 index 00000000..db41eaa0 --- /dev/null +++ b/packages/mastoapi/pagination/link-header.test.ts @@ -0,0 +1,34 @@ +import { genEvent } from '@nostrify/nostrify/test'; +import { assertEquals } from '@std/assert'; + +import { buildLinkHeader, buildListLinkHeader } from './link-header.ts'; + +Deno.test('buildLinkHeader', () => { + const url = 'https://ditto.test/api/v1/events'; + + const events = [ + genEvent({ created_at: 1 }), + genEvent({ created_at: 2 }), + genEvent({ created_at: 3 }), + ]; + + const link = buildLinkHeader(url, events); + + assertEquals( + link?.toString(), + '; rel="next", ; rel="prev"', + ); +}); + +Deno.test('buildListLinkHeader', () => { + const url = 'https://ditto.test/api/v1/tags'; + + const params = { offset: 0, limit: 3 }; + + const link = buildListLinkHeader(url, params); + + assertEquals( + link?.toString(), + '; rel="next", ; rel="prev"', + ); +}); diff --git a/packages/mastoapi/pagination/link-header.ts b/packages/mastoapi/pagination/link-header.ts new file mode 100644 index 00000000..648b4aab --- /dev/null +++ b/packages/mastoapi/pagination/link-header.ts @@ -0,0 +1,39 @@ +import type { NostrEvent } from '@nostrify/nostrify'; + +/** Build HTTP Link header for Mastodon API pagination. */ +export function buildLinkHeader(url: string, events: NostrEvent[]): string | undefined { + if (events.length <= 1) return; + + const firstEvent = events[0]; + const lastEvent = events[events.length - 1]; + + const { pathname, search } = new URL(url); + + const next = new URL(pathname + search, url); + const prev = new URL(pathname + search, url); + + next.searchParams.set('until', String(lastEvent.created_at)); + prev.searchParams.set('since', String(firstEvent.created_at)); + + return `<${next}>; rel="next", <${prev}>; rel="prev"`; +} + +/** Build HTTP Link header for paginating Nostr lists. */ +export function buildListLinkHeader( + url: string, + params: { offset: number; limit: number }, +): string | undefined { + const { pathname, search } = new URL(url); + const { offset, limit } = params; + + const next = new URL(pathname + search, url); + const prev = new URL(pathname + search, url); + + next.searchParams.set('offset', String(offset + limit)); + prev.searchParams.set('offset', String(Math.max(offset - limit, 0))); + + next.searchParams.set('limit', String(limit)); + prev.searchParams.set('limit', String(limit)); + + return `<${next}>; rel="next", <${prev}>; rel="prev"`; +} diff --git a/packages/mastoapi/pagination/mod.ts b/packages/mastoapi/pagination/mod.ts new file mode 100644 index 00000000..18998a36 --- /dev/null +++ b/packages/mastoapi/pagination/mod.ts @@ -0,0 +1,3 @@ +export { buildLinkHeader, buildListLinkHeader } from './link-header.ts'; +export { paginated, paginatedList } from './paginate.ts'; +export { paginationSchema } from './schema.ts'; diff --git a/packages/mastoapi/pagination/paginate.test.ts b/packages/mastoapi/pagination/paginate.test.ts new file mode 100644 index 00000000..e69de29b diff --git a/packages/mastoapi/pagination/paginate.ts b/packages/mastoapi/pagination/paginate.ts new file mode 100644 index 00000000..aab93a47 --- /dev/null +++ b/packages/mastoapi/pagination/paginate.ts @@ -0,0 +1,50 @@ +import { buildLinkHeader, buildListLinkHeader } from './link-header.ts'; + +import type { DittoEnv } from '@ditto/mastoapi/router'; +import type { Context } from '@hono/hono'; +import type { NostrEvent } from '@nostrify/nostrify'; + +type HeaderRecord = Record; + +/** Return results with pagination headers. Assumes chronological sorting of events. */ +export function paginated( + c: Context, + events: NostrEvent[], + body: object | unknown[], + headers: HeaderRecord = {}, +): Response { + const { conf } = c.var; + + const url = conf.local(c.req.url); + const link = buildLinkHeader(url, events); + + if (link) { + headers.link = link; + } + + // Filter out undefined entities. + const results = Array.isArray(body) ? body.filter(Boolean) : body; + return c.json(results, 200, headers); +} + +/** paginate a list of tags. */ +export function paginatedList( + c: Context, + params: { offset: number; limit: number }, + body: object | unknown[], + headers: HeaderRecord = {}, +): Response { + const { conf } = c.var; + + const url = conf.local(c.req.url); + const link = buildListLinkHeader(url, params); + const hasMore = Array.isArray(body) ? body.length > 0 : true; + + if (link) { + headers.link = hasMore ? link : link.split(', ').find((link) => link.endsWith('; rel="prev"'))!; + } + + // Filter out undefined entities. + const results = Array.isArray(body) ? body.filter(Boolean) : body; + return c.json(results, 200, headers); +} diff --git a/packages/mastoapi/pagination/schema.test.ts b/packages/mastoapi/pagination/schema.test.ts new file mode 100644 index 00000000..94be9091 --- /dev/null +++ b/packages/mastoapi/pagination/schema.test.ts @@ -0,0 +1,23 @@ +import { assertEquals } from '@std/assert'; + +import { paginationSchema } from './schema.ts'; + +Deno.test('paginationSchema', () => { + const pagination = paginationSchema.parse({ + limit: '10', + offset: '20', + max_id: '1', + min_id: '2', + since: '3', + until: '4', + }); + + assertEquals(pagination, { + limit: 10, + offset: 20, + max_id: '1', + min_id: '2', + since: 3, + until: 4, + }); +}); diff --git a/src/schemas/pagination.ts b/packages/mastoapi/pagination/schema.ts similarity index 70% rename from src/schemas/pagination.ts rename to packages/mastoapi/pagination/schema.ts index 89e3c5f6..5647246d 100644 --- a/src/schemas/pagination.ts +++ b/packages/mastoapi/pagination/schema.ts @@ -1,7 +1,16 @@ import { z } from 'zod'; +export interface Pagination { + max_id?: string; + min_id?: string; + since?: number; + until?: number; + limit: number; + offset: number; +} + /** Schema to parse pagination query params. */ -export const paginationSchema = z.object({ +export const paginationSchema: z.ZodType = z.object({ max_id: z.string().transform((val) => { if (!val.includes('-')) return val; return val.split('-')[1]; @@ -11,4 +20,4 @@ export const paginationSchema = z.object({ until: z.coerce.number().nonnegative().optional().catch(undefined), limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)), offset: z.coerce.number().nonnegative().catch(0), -}); +}) as z.ZodType; diff --git a/packages/mastoapi/router/DittoApp.test.ts b/packages/mastoapi/router/DittoApp.test.ts new file mode 100644 index 00000000..c828d68a --- /dev/null +++ b/packages/mastoapi/router/DittoApp.test.ts @@ -0,0 +1,23 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; +import { Hono } from '@hono/hono'; +import { MockRelay } from '@nostrify/nostrify/test'; + +import { DittoApp } from './DittoApp.ts'; +import { DittoRoute } from './DittoRoute.ts'; + +Deno.test('DittoApp', async () => { + await using db = new DittoPolyPg('memory://'); + const conf = new DittoConf(new Map()); + const relay = new MockRelay(); + + const app = new DittoApp({ conf, db, relay }); + + const hono = new Hono(); + const route = new DittoRoute(); + + app.route('/', route); + + // @ts-expect-error Passing a non-DittoRoute to route. + app.route('/', hono); +}); diff --git a/packages/mastoapi/router/DittoApp.ts b/packages/mastoapi/router/DittoApp.ts new file mode 100644 index 00000000..3309f65d --- /dev/null +++ b/packages/mastoapi/router/DittoApp.ts @@ -0,0 +1,21 @@ +import { Hono } from '@hono/hono'; + +import type { HonoOptions } from '@hono/hono/hono-base'; +import type { DittoEnv } from './DittoEnv.ts'; + +export class DittoApp extends Hono { + // @ts-ignore Require a DittoRoute for type safety. + declare route: (path: string, app: Hono) => Hono; + + constructor(vars: Omit, opts: HonoOptions = {}) { + super(opts); + + this.use((c, next) => { + c.set('db', vars.db); + c.set('conf', vars.conf); + c.set('relay', vars.relay); + c.set('signal', c.req.raw.signal); + return next(); + }); + } +} diff --git a/packages/mastoapi/router/DittoEnv.ts b/packages/mastoapi/router/DittoEnv.ts new file mode 100644 index 00000000..7f399e62 --- /dev/null +++ b/packages/mastoapi/router/DittoEnv.ts @@ -0,0 +1,20 @@ +import type { DittoConf } from '@ditto/conf'; +import type { DittoDB } from '@ditto/db'; +import type { Env } from '@hono/hono'; +import type { NRelay } from '@nostrify/nostrify'; + +export interface DittoEnv extends Env { + Variables: { + /** Ditto site configuration. */ + conf: DittoConf; + /** Relay store. */ + relay: NRelay; + /** + * Database object. + * @deprecated Store data as Nostr events instead. + */ + db: DittoDB; + /** Abort signal for the request. */ + signal: AbortSignal; + }; +} diff --git a/packages/mastoapi/router/DittoMiddleware.ts b/packages/mastoapi/router/DittoMiddleware.ts new file mode 100644 index 00000000..1483ca90 --- /dev/null +++ b/packages/mastoapi/router/DittoMiddleware.ts @@ -0,0 +1,5 @@ +import type { MiddlewareHandler } from '@hono/hono'; +import type { DittoEnv } from './DittoEnv.ts'; + +// deno-lint-ignore ban-types +export type DittoMiddleware = MiddlewareHandler; diff --git a/packages/mastoapi/router/DittoRoute.test.ts b/packages/mastoapi/router/DittoRoute.test.ts new file mode 100644 index 00000000..737019c4 --- /dev/null +++ b/packages/mastoapi/router/DittoRoute.test.ts @@ -0,0 +1,12 @@ +import { assertEquals } from '@std/assert'; + +import { DittoRoute } from './DittoRoute.ts'; + +Deno.test('DittoRoute', async () => { + const route = new DittoRoute(); + const response = await route.request('/'); + const body = await response.json(); + + assertEquals(response.status, 500); + assertEquals(body, { error: 'Missing required variable: db' }); +}); diff --git a/packages/mastoapi/router/DittoRoute.ts b/packages/mastoapi/router/DittoRoute.ts new file mode 100644 index 00000000..369fb858 --- /dev/null +++ b/packages/mastoapi/router/DittoRoute.ts @@ -0,0 +1,53 @@ +import { type ErrorHandler, Hono } from '@hono/hono'; +import { HTTPException } from '@hono/hono/http-exception'; + +import type { HonoOptions } from '@hono/hono/hono-base'; +import type { DittoEnv } from './DittoEnv.ts'; + +/** + * Ditto base route class. + * Ensures that required variables are set for type safety. + */ +export class DittoRoute extends Hono { + constructor(opts: HonoOptions = {}) { + super(opts); + + this.use((c, next) => { + this.assertVars(c.var); + return next(); + }); + + this.onError(this._errorHandler); + } + + private assertVars(vars: Partial): DittoEnv['Variables'] { + if (!vars.db) this.throwMissingVar('db'); + if (!vars.conf) this.throwMissingVar('conf'); + if (!vars.relay) this.throwMissingVar('relay'); + if (!vars.signal) this.throwMissingVar('signal'); + + return { + ...vars, + db: vars.db, + conf: vars.conf, + relay: vars.relay, + signal: vars.signal, + }; + } + + private throwMissingVar(name: string): never { + throw new HTTPException(500, { message: `Missing required variable: ${name}` }); + } + + private _errorHandler: ErrorHandler = (error, c) => { + if (error instanceof HTTPException) { + if (error.res) { + return error.res; + } else { + return c.json({ error: error.message }, error.status); + } + } + + return c.json({ error: 'Something went wrong' }, 500); + }; +} diff --git a/packages/mastoapi/router/mod.ts b/packages/mastoapi/router/mod.ts new file mode 100644 index 00000000..a4361da6 --- /dev/null +++ b/packages/mastoapi/router/mod.ts @@ -0,0 +1,5 @@ +export { DittoApp } from './DittoApp.ts'; +export { DittoRoute } from './DittoRoute.ts'; + +export type { DittoEnv } from './DittoEnv.ts'; +export type { DittoMiddleware } from './DittoMiddleware.ts'; diff --git a/src/signers/ConnectSigner.ts b/packages/mastoapi/signers/ConnectSigner.ts similarity index 92% rename from src/signers/ConnectSigner.ts rename to packages/mastoapi/signers/ConnectSigner.ts index 89c62679..e3671413 100644 --- a/src/signers/ConnectSigner.ts +++ b/packages/mastoapi/signers/ConnectSigner.ts @@ -1,10 +1,9 @@ // deno-lint-ignore-file require-await import { HTTPException } from '@hono/hono/http-exception'; -import { NConnectSigner, NostrEvent, NostrSigner } from '@nostrify/nostrify'; - -import { Storages } from '@/storages.ts'; +import { NConnectSigner, type NostrEvent, type NostrSigner, type NRelay } from '@nostrify/nostrify'; interface ConnectSignerOpts { + relay: NRelay; bunkerPubkey: string; userPubkey: string; signer: NostrSigner; @@ -27,8 +26,7 @@ export class ConnectSigner implements NostrSigner { return new NConnectSigner({ encryption: 'nip44', pubkey: this.opts.bunkerPubkey, - // TODO: use a remote relay for `nprofile` signing (if present and `Conf.relay` isn't already in the list) - relay: await Storages.pubsub(), + relay: this.opts.relay, signer, timeout: 60_000, }); diff --git a/packages/mastoapi/signers/ReadOnlySigner.ts b/packages/mastoapi/signers/ReadOnlySigner.ts new file mode 100644 index 00000000..74740b03 --- /dev/null +++ b/packages/mastoapi/signers/ReadOnlySigner.ts @@ -0,0 +1,18 @@ +// deno-lint-ignore-file require-await +import { HTTPException } from '@hono/hono/http-exception'; + +import type { NostrEvent, NostrSigner } from '@nostrify/nostrify'; + +export class ReadOnlySigner implements NostrSigner { + constructor(private pubkey: string) {} + + async signEvent(): Promise { + throw new HTTPException(401, { + message: 'Log in with Nostr Connect to sign events', + }); + } + + async getPublicKey(): Promise { + return this.pubkey; + } +} diff --git a/src/storages/UserStore.test.ts b/packages/mastoapi/storages/UserStore.test.ts similarity index 83% rename from src/storages/UserStore.test.ts rename to packages/mastoapi/storages/UserStore.test.ts index d04ece07..c9aa3329 100644 --- a/src/storages/UserStore.test.ts +++ b/packages/mastoapi/storages/UserStore.test.ts @@ -1,7 +1,7 @@ import { MockRelay } from '@nostrify/nostrify/test'; - import { assertEquals } from '@std/assert'; -import { UserStore } from '@/storages/UserStore.ts'; + +import { UserStore } from './UserStore.ts'; import userBlack from '~/fixtures/events/kind-0-black.json' with { type: 'json' }; import userMe from '~/fixtures/events/event-0-makes-repost-with-quote-repost.json' with { type: 'json' }; @@ -14,9 +14,8 @@ Deno.test('query events of users that are not muted', async () => { const blockEventCopy = structuredClone(blockEvent); const event1authorUserMeCopy = structuredClone(event1authorUserMe); - const db = new MockRelay(); - - const store = new UserStore(userBlackCopy.pubkey, db); + const relay = new MockRelay(); + const store = new UserStore({ relay, userPubkey: userBlackCopy.pubkey }); await store.event(blockEventCopy); await store.event(userBlackCopy); @@ -30,9 +29,8 @@ Deno.test('user never muted anyone', async () => { const userBlackCopy = structuredClone(userBlack); const userMeCopy = structuredClone(userMe); - const db = new MockRelay(); - - const store = new UserStore(userBlackCopy.pubkey, db); + const relay = new MockRelay(); + const store = new UserStore({ relay, userPubkey: userBlackCopy.pubkey }); await store.event(userBlackCopy); await store.event(userMeCopy); diff --git a/packages/mastoapi/storages/UserStore.ts b/packages/mastoapi/storages/UserStore.ts new file mode 100644 index 00000000..dec77916 --- /dev/null +++ b/packages/mastoapi/storages/UserStore.ts @@ -0,0 +1,73 @@ +import type { + NostrEvent, + NostrFilter, + NostrRelayCLOSED, + NostrRelayEOSE, + NostrRelayEVENT, + NRelay, +} from '@nostrify/nostrify'; + +interface UserStoreOpts { + relay: NRelay; + userPubkey: string; + adminPubkey?: string; +} + +export class UserStore implements NRelay { + constructor(private opts: UserStoreOpts) {} + + req( + filters: NostrFilter[], + opts?: { signal?: AbortSignal }, + ): AsyncIterable { + // TODO: support req maybe? It would be inefficient. + return this.opts.relay.req(filters, opts); + } + + async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise { + return await this.opts.relay.event(event, opts); + } + + /** + * Query events that `pubkey` did not mute + * https://github.com/nostr-protocol/nips/blob/master/51.md#standard-lists + */ + async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise { + const { relay, userPubkey, adminPubkey } = this.opts; + + const mutes = new Set(); + const [muteList] = await this.opts.relay.query([{ authors: [userPubkey], kinds: [10000], limit: 1 }]); + + for (const [name, value] of muteList?.tags ?? []) { + if (name === 'p') { + mutes.add(value); + } + } + + const events = await relay.query(filters, opts); + + const users = adminPubkey + ? await relay.query([{ + kinds: [30382], + authors: [adminPubkey], + '#d': [...events.map(({ pubkey }) => pubkey)], + }]) + : []; + + return events.filter((event) => { + const user = users.find((user) => user.tags.find(([name]) => name === 'd')?.[1] === event.pubkey); + + for (const [name, value] of user?.tags ?? []) { + if (name === 'n' && value === 'disabled') { + return false; + } + } + + return event.kind === 0 || !mutes.has(event.pubkey); + }); + } + + close(): Promise { + return this.opts.relay.close(); + } +} diff --git a/packages/mastoapi/test.ts b/packages/mastoapi/test.ts new file mode 100644 index 00000000..41e35c2c --- /dev/null +++ b/packages/mastoapi/test.ts @@ -0,0 +1,41 @@ +import { DittoConf } from '@ditto/conf'; +import { type DittoDB, DummyDB } from '@ditto/db'; +import { DittoApp, type DittoMiddleware } from '@ditto/mastoapi/router'; +import { type NostrSigner, type NRelay, NSecSigner } from '@nostrify/nostrify'; +import { MockRelay } from '@nostrify/nostrify/test'; +import { generateSecretKey, nip19 } from 'nostr-tools'; + +import type { User } from '@ditto/mastoapi/middleware'; + +export function testApp(): { + app: DittoApp; + relay: NRelay; + conf: DittoConf; + db: DittoDB; + user: { + signer: NostrSigner; + relay: NRelay; + }; +} { + const db = new DummyDB(); + + const nsec = nip19.nsecEncode(generateSecretKey()); + const conf = new DittoConf(new Map([['DITTO_NSEC', nsec]])); + + const relay = new MockRelay(); + const app = new DittoApp({ conf, relay, db }); + + const user = { + signer: new NSecSigner(generateSecretKey()), + relay, + }; + + return { app, relay, conf, db, user }; +} + +export function setUser(user: User): DittoMiddleware<{ user: User }> { + return async (c, next) => { + c.set('user', user); + await next(); + }; +} diff --git a/packages/metrics/deno.json b/packages/metrics/deno.json new file mode 100644 index 00000000..12524c18 --- /dev/null +++ b/packages/metrics/deno.json @@ -0,0 +1,6 @@ +{ + "name": "@ditto/metrics", + "exports": { + ".": "./metrics.ts" + } +} diff --git a/packages/metrics/metrics.ts b/packages/metrics/metrics.ts new file mode 100644 index 00000000..716582d4 --- /dev/null +++ b/packages/metrics/metrics.ts @@ -0,0 +1,151 @@ +import { Counter, Gauge, Histogram } from 'prom-client'; + +const prefix = 'ditto'; + +export const httpRequestsCounter: Counter<'method'> = new Counter({ + name: `${prefix}_http_requests_total`, + help: 'Total number of HTTP requests', + labelNames: ['method'], +}); + +export const httpResponsesCounter: Counter<'method' | 'path' | 'status'> = new Counter({ + name: `${prefix}_http_responses_total`, + help: 'Total number of HTTP responses', + labelNames: ['method', 'path', 'status'], +}); + +export const httpResponseDurationHistogram: Histogram<'method' | 'path' | 'status'> = new Histogram({ + name: `${prefix}_http_response_duration_seconds`, + help: 'Histogram of HTTP response times in seconds', + labelNames: ['method', 'path', 'status'], +}); + +export const streamingConnectionsGauge: Gauge = new Gauge({ + name: `${prefix}_streaming_connections`, + help: 'Number of active connections to the streaming API', +}); + +export const streamingServerMessagesCounter: Counter = new Counter({ + name: `${prefix}_streaming_server_messages_total`, + help: 'Total number of messages sent from the streaming API', +}); + +export const streamingClientMessagesCounter: Counter = new Counter({ + name: `${prefix}_streaming_client_messages_total`, + help: 'Total number of messages received by the streaming API', +}); + +export const fetchResponsesCounter: Counter<'method' | 'status'> = new Counter({ + name: `${prefix}_fetch_responses_total`, + help: 'Total number of fetch requests', + labelNames: ['method', 'status'], +}); + +export const firehoseEventsCounter: Counter<'kind'> = new Counter({ + name: `${prefix}_firehose_events_total`, + help: 'Total number of Nostr events processed by the firehose', + labelNames: ['kind'], +}); + +export const pipelineEventsCounter: Counter<'kind'> = new Counter({ + name: `${prefix}_pipeline_events_total`, + help: 'Total number of Nostr events processed by the pipeline', + labelNames: ['kind'], +}); + +export const policyEventsCounter: Counter<'ok'> = new Counter({ + name: `${prefix}_policy_events_total`, + help: 'Total number of policy OK responses', + labelNames: ['ok'], +}); + +export const relayEventsCounter: Counter<'kind'> = new Counter({ + name: `${prefix}_relay_events_total`, + help: 'Total number of EVENT messages processed by the relay', + labelNames: ['kind'], +}); + +export const relayMessagesCounter: Counter<'verb'> = new Counter({ + name: `${prefix}_relay_messages_total`, + help: 'Total number of Nostr messages processed by the relay', + labelNames: ['verb'], +}); + +export const relayConnectionsGauge: Gauge = new Gauge({ + name: `${prefix}_relay_connections`, + help: 'Number of active connections to the relay', +}); + +export const dbQueriesCounter: Counter<'kind'> = new Counter({ + name: `${prefix}_db_queries_total`, + help: 'Total number of database queries', + labelNames: ['kind'], +}); + +export const dbEventsCounter: Counter<'kind'> = new Counter({ + name: `${prefix}_db_events_total`, + help: 'Total number of database inserts', + labelNames: ['kind'], +}); + +export const dbPoolSizeGauge: Gauge = new Gauge({ + name: `${prefix}_db_pool_size`, + help: 'Number of connections in the database pool', +}); + +export const dbAvailableConnectionsGauge: Gauge = new Gauge({ + name: `${prefix}_db_available_connections`, + help: 'Number of available connections in the database pool', +}); + +export const dbQueryDurationHistogram: Histogram = new Histogram({ + name: `${prefix}_db_query_duration_seconds`, + help: 'Duration of database queries', +}); + +export const cachedFaviconsSizeGauge: Gauge = new Gauge({ + name: `${prefix}_cached_favicons_size`, + help: 'Number of domain favicons in cache', +}); + +export const cachedLnurlsSizeGauge: Gauge = new Gauge({ + name: `${prefix}_cached_lnurls_size`, + help: 'Number of LNURL details in cache', +}); + +export const cachedNip05sSizeGauge: Gauge = new Gauge({ + name: `${prefix}_cached_nip05s_size`, + help: 'Number of NIP-05 results in cache', +}); + +export const cachedLinkPreviewSizeGauge: Gauge = new Gauge({ + name: `${prefix}_cached_link_previews_size`, + help: 'Number of link previews in cache', +}); + +export const cachedTranslationsSizeGauge: Gauge = new Gauge({ + name: `${prefix}_cached_translations_size`, + help: 'Number of translated statuses in cache', +}); + +export const internalSubscriptionsSizeGauge: Gauge = new Gauge({ + name: `${prefix}_internal_subscriptions_size`, + help: "Number of active subscriptions to Ditto's internal relay", +}); + +export const relayPoolRelaysSizeGauge: Gauge<'ready_state'> = new Gauge({ + name: `${prefix}_relay_pool_relays_size`, + help: 'Number of relays in the relay pool', + labelNames: ['ready_state'], +}); + +export const relayPoolSubscriptionsSizeGauge: Gauge = new Gauge({ + name: `${prefix}_relay_pool_subscriptions_size`, + help: 'Number of active subscriptions to the relay pool', +}); + +export const webPushNotificationsCounter: Counter<'type'> = new Counter({ + name: `${prefix}_web_push_notifications_total`, + help: 'Total number of Web Push notifications sent', + labelNames: ['type'], +}); diff --git a/packages/nip98/deno.json b/packages/nip98/deno.json new file mode 100644 index 00000000..108e1bb8 --- /dev/null +++ b/packages/nip98/deno.json @@ -0,0 +1,7 @@ +{ + "name": "@ditto/nip98", + "version": "1.0.0", + "exports": { + ".": "./nip98.ts" + } +} diff --git a/src/utils/nip98.ts b/packages/nip98/nip98.ts similarity index 71% rename from src/utils/nip98.ts rename to packages/nip98/nip98.ts index f83fcddb..b0815f91 100644 --- a/src/utils/nip98.ts +++ b/packages/nip98/nip98.ts @@ -1,11 +1,10 @@ -import { NostrEvent, NSchema as n } from '@nostrify/nostrify'; +import { type NostrEvent, NSchema as n } from '@nostrify/nostrify'; import { encodeHex } from '@std/encoding/hex'; -import { EventTemplate, nip13 } from 'nostr-tools'; +import { type EventTemplate, nip13 } from 'nostr-tools'; -import { decode64Schema } from '@/schema.ts'; -import { signedEventSchema } from '@/schemas/nostr.ts'; -import { eventAge, findTag, nostrNow } from '@/utils.ts'; -import { Time } from '@/utils/time.ts'; +import { decode64Schema, signedEventSchema } from './schema.ts'; + +import type { z } from 'zod'; /** Decode a Nostr event from a base64 encoded string. */ const decode64EventSchema = decode64Schema.pipe(n.json()).pipe(signedEventSchema); @@ -21,7 +20,10 @@ interface ParseAuthRequestOpts { /** Parse the auth event from a Request, returning a zod SafeParse type. */ // deno-lint-ignore require-await -async function parseAuthRequest(req: Request, opts: ParseAuthRequestOpts = {}) { +async function parseAuthRequest( + req: Request, + opts: ParseAuthRequestOpts = {}, +): Promise | z.SafeParseError> { const header = req.headers.get('authorization'); const base64 = header?.match(/^Nostr (.+)$/)?.[1]; const result = decode64EventSchema.safeParse(base64); @@ -31,8 +33,12 @@ async function parseAuthRequest(req: Request, opts: ParseAuthRequestOpts = {}) { } /** Compare the auth event with the request, returning a zod SafeParse type. */ -function validateAuthEvent(req: Request, event: NostrEvent, opts: ParseAuthRequestOpts = {}) { - const { maxAge = Time.minutes(1), validatePayload = true, pow = 0 } = opts; +function validateAuthEvent( + req: Request, + event: NostrEvent, + opts: ParseAuthRequestOpts = {}, +): Promise> { + const { maxAge = 60_000, validatePayload = true, pow = 0 } = opts; const schema = signedEventSchema .refine((event) => event.kind === 27235, 'Event must be kind 27235') @@ -87,4 +93,19 @@ function tagValue(event: NostrEvent, tagName: string): string | undefined { return findTag(event.tags, tagName)?.[1]; } +/** Get the current time in Nostr format. */ +const nostrNow = (): number => Math.floor(Date.now() / 1000); + +/** Convenience function to convert Nostr dates into native Date objects. */ +const nostrDate = (seconds: number): Date => new Date(seconds * 1000); + +/** Return the event's age in milliseconds. */ +function eventAge(event: NostrEvent): number { + return Date.now() - nostrDate(event.created_at).getTime(); +} + +function findTag(tags: string[][], name: string): string[] | undefined { + return tags.find((tag) => tag[0] === name); +} + export { buildAuthEventTemplate, parseAuthRequest, type ParseAuthRequestOpts, validateAuthEvent }; diff --git a/packages/nip98/schema.ts b/packages/nip98/schema.ts new file mode 100644 index 00000000..a0cf627c --- /dev/null +++ b/packages/nip98/schema.ts @@ -0,0 +1,20 @@ +import { NSchema as n } from '@nostrify/nostrify'; +import { getEventHash, verifyEvent } from 'nostr-tools'; +import z from 'zod'; + +/** https://developer.mozilla.org/en-US/docs/Glossary/Base64#the_unicode_problem */ +export const decode64Schema = z.string().transform((value, ctx) => { + try { + const binString = atob(value); + const bytes = Uint8Array.from(binString, (m) => m.codePointAt(0)!); + return new TextDecoder().decode(bytes); + } catch (_e) { + ctx.addIssue({ code: z.ZodIssueCode.custom, message: 'Invalid base64', fatal: true }); + return z.NEVER; + } +}); + +/** Nostr event schema that also verifies the event's signature. */ +export const signedEventSchema = n.event() + .refine((event) => event.id === getEventHash(event), 'Event ID does not match hash') + .refine(verifyEvent, 'Event signature is invalid'); diff --git a/src/policies/MuteListPolicy.test.ts b/packages/policies/MuteListPolicy.test.ts similarity index 65% rename from src/policies/MuteListPolicy.test.ts rename to packages/policies/MuteListPolicy.test.ts index 89d7d993..21c29cbc 100644 --- a/src/policies/MuteListPolicy.test.ts +++ b/packages/policies/MuteListPolicy.test.ts @@ -1,8 +1,8 @@ import { MockRelay } from '@nostrify/nostrify/test'; import { assertEquals } from '@std/assert'; -import { UserStore } from '@/storages/UserStore.ts'; -import { MuteListPolicy } from '@/policies/MuteListPolicy.ts'; + +import { MuteListPolicy } from './MuteListPolicy.ts'; import userBlack from '~/fixtures/events/kind-0-black.json' with { type: 'json' }; import userMe from '~/fixtures/events/event-0-makes-repost-with-quote-repost.json' with { type: 'json' }; @@ -16,18 +16,16 @@ Deno.test('block event: muted user cannot post', async () => { const blockEventCopy = structuredClone(blockEvent); const event1authorUserMeCopy = structuredClone(event1authorUserMe); - const db = new MockRelay(); + const relay = new MockRelay(); + const policy = new MuteListPolicy(userBlack.pubkey, relay); - const store = new UserStore(userBlackCopy.pubkey, db); - const policy = new MuteListPolicy(userBlack.pubkey, db); - - await store.event(blockEventCopy); - await store.event(userBlackCopy); - await store.event(userMeCopy); + await relay.event(blockEventCopy); + await relay.event(userBlackCopy); + await relay.event(userMeCopy); const ok = await policy.call(event1authorUserMeCopy); - assertEquals(ok, ['OK', event1authorUserMeCopy.id, false, 'blocked: Your account has been deactivated.']); + assertEquals(ok, ['OK', event1authorUserMeCopy.id, false, 'blocked: account blocked']); }); Deno.test('allow event: user is NOT muted because there is no muted event', async () => { @@ -35,13 +33,11 @@ Deno.test('allow event: user is NOT muted because there is no muted event', asyn const userMeCopy = structuredClone(userMe); const event1authorUserMeCopy = structuredClone(event1authorUserMe); - const db = new MockRelay(); + const relay = new MockRelay(); + const policy = new MuteListPolicy(userBlack.pubkey, relay); - const store = new UserStore(userBlackCopy.pubkey, db); - const policy = new MuteListPolicy(userBlack.pubkey, db); - - await store.event(userBlackCopy); - await store.event(userMeCopy); + await relay.event(userBlackCopy); + await relay.event(userMeCopy); const ok = await policy.call(event1authorUserMeCopy); @@ -55,16 +51,15 @@ Deno.test('allow event: user is NOT muted because he is not in mute event', asyn const blockEventCopy = structuredClone(blockEvent); const event1copy = structuredClone(event1); - const db = new MockRelay(); + const relay = new MockRelay(); - const store = new UserStore(userBlackCopy.pubkey, db); - const policy = new MuteListPolicy(userBlack.pubkey, db); + const policy = new MuteListPolicy(userBlack.pubkey, relay); - await store.event(userBlackCopy); - await store.event(blockEventCopy); - await store.event(userMeCopy); - await store.event(event1copy); - await store.event(event1authorUserMeCopy); + await relay.event(userBlackCopy); + await relay.event(blockEventCopy); + await relay.event(userMeCopy); + await relay.event(event1copy); + await relay.event(event1authorUserMeCopy); const ok = await policy.call(event1copy); diff --git a/src/policies/MuteListPolicy.ts b/packages/policies/MuteListPolicy.ts similarity index 53% rename from src/policies/MuteListPolicy.ts rename to packages/policies/MuteListPolicy.ts index 130d10df..1025e75b 100644 --- a/src/policies/MuteListPolicy.ts +++ b/packages/policies/MuteListPolicy.ts @@ -1,16 +1,21 @@ -import { NostrEvent, NostrRelayOK, NPolicy, NStore } from '@nostrify/nostrify'; - -import { getTagSet } from '@/utils/tags.ts'; +import type { NostrEvent, NostrRelayOK, NPolicy, NStore } from '@nostrify/nostrify'; export class MuteListPolicy implements NPolicy { constructor(private pubkey: string, private store: NStore) {} async call(event: NostrEvent): Promise { + const pubkeys = new Set(); + const [muteList] = await this.store.query([{ authors: [this.pubkey], kinds: [10000], limit: 1 }]); - const pubkeys = getTagSet(muteList?.tags ?? [], 'p'); + + for (const [name, value] of muteList?.tags ?? []) { + if (name === 'p') { + pubkeys.add(value); + } + } if (pubkeys.has(event.pubkey)) { - return ['OK', event.id, false, 'blocked: Your account has been deactivated.']; + return ['OK', event.id, false, 'blocked: account blocked']; } return ['OK', event.id, true, '']; diff --git a/packages/policies/deno.json b/packages/policies/deno.json new file mode 100644 index 00000000..ca190883 --- /dev/null +++ b/packages/policies/deno.json @@ -0,0 +1,7 @@ +{ + "name": "@ditto/policies", + "version": "1.1.0", + "exports": { + ".": "./mod.ts" + } +} diff --git a/packages/policies/mod.ts b/packages/policies/mod.ts new file mode 100644 index 00000000..9748a4cf --- /dev/null +++ b/packages/policies/mod.ts @@ -0,0 +1 @@ +export { MuteListPolicy } from './MuteListPolicy.ts'; diff --git a/packages/ratelimiter/MemoryRateLimiter.test.ts b/packages/ratelimiter/MemoryRateLimiter.test.ts new file mode 100644 index 00000000..2da6b2d1 --- /dev/null +++ b/packages/ratelimiter/MemoryRateLimiter.test.ts @@ -0,0 +1,31 @@ +import { assertEquals, assertThrows } from '@std/assert'; + +import { MemoryRateLimiter } from './MemoryRateLimiter.ts'; +import { RateLimitError } from './RateLimitError.ts'; + +Deno.test('MemoryRateLimiter', async (t) => { + const limit = 5; + const window = 100; + + using limiter = new MemoryRateLimiter({ limit, window }); + + await t.step('can hit up to limit', () => { + for (let i = 0; i < limit; i++) { + const client = limiter.client('test'); + assertEquals(client.hits, i); + client.hit(); + } + }); + + await t.step('throws when hit if limit exceeded', () => { + assertThrows(() => limiter.client('test').hit(), RateLimitError); + }); + + await t.step('can hit after window resets', async () => { + await new Promise((resolve) => setTimeout(resolve, window + 1)); + + const client = limiter.client('test'); + assertEquals(client.hits, 0); + client.hit(); + }); +}); diff --git a/packages/ratelimiter/MemoryRateLimiter.ts b/packages/ratelimiter/MemoryRateLimiter.ts new file mode 100644 index 00000000..15546fd0 --- /dev/null +++ b/packages/ratelimiter/MemoryRateLimiter.ts @@ -0,0 +1,78 @@ +import { RateLimitError } from './RateLimitError.ts'; + +import type { RateLimiter, RateLimiterClient } from './types.ts'; + +interface MemoryRateLimiterOpts { + limit: number; + window: number; +} + +export class MemoryRateLimiter implements RateLimiter { + private iid: number; + + private previous = new Map(); + private current = new Map(); + + constructor(private opts: MemoryRateLimiterOpts) { + this.iid = setInterval(() => { + this.previous = this.current; + this.current = new Map(); + }, opts.window); + } + + get limit(): number { + return this.opts.limit; + } + + get window(): number { + return this.opts.window; + } + + client(key: string): RateLimiterClient { + const curr = this.current.get(key); + const prev = this.previous.get(key); + + if (curr) { + return curr; + } + + if (prev && prev.resetAt > new Date()) { + this.current.set(key, prev); + this.previous.delete(key); + return prev; + } + + const next = new MemoryRateLimiterClient(this); + this.current.set(key, next); + return next; + } + + [Symbol.dispose](): void { + clearInterval(this.iid); + } +} + +class MemoryRateLimiterClient implements RateLimiterClient { + private _hits: number = 0; + readonly resetAt: Date; + + constructor(private limiter: MemoryRateLimiter) { + this.resetAt = new Date(Date.now() + limiter.window); + } + + get hits(): number { + return this._hits; + } + + get remaining(): number { + return this.limiter.limit - this.hits; + } + + hit(n: number = 1): void { + this._hits += n; + + if (this.remaining < 0) { + throw new RateLimitError(this.limiter, this); + } + } +} diff --git a/packages/ratelimiter/MultiRateLimiter.test.ts b/packages/ratelimiter/MultiRateLimiter.test.ts new file mode 100644 index 00000000..9b1fd648 --- /dev/null +++ b/packages/ratelimiter/MultiRateLimiter.test.ts @@ -0,0 +1,41 @@ +import { assertEquals, assertThrows } from '@std/assert'; + +import { MemoryRateLimiter } from './MemoryRateLimiter.ts'; +import { MultiRateLimiter } from './MultiRateLimiter.ts'; + +Deno.test('MultiRateLimiter', async (t) => { + using limiter1 = new MemoryRateLimiter({ limit: 5, window: 100 }); + using limiter2 = new MemoryRateLimiter({ limit: 8, window: 200 }); + + const limiter = new MultiRateLimiter([limiter1, limiter2]); + + await t.step('can hit up to first limit', () => { + for (let i = 0; i < limiter1.limit; i++) { + const client = limiter.client('test'); + assertEquals(client.hits, i); + client.hit(); + } + }); + + await t.step('throws when hit if first limit exceeded', () => { + assertThrows(() => limiter.client('test').hit(), Error); + }); + + await t.step('can hit up to second limit after the first window resets', async () => { + await new Promise((resolve) => setTimeout(resolve, limiter1.window + 1)); + + const limit = limiter2.limit - limiter1.limit - 1; + + for (let i = 0; i < limit; i++) { + const client = limiter.client('test'); + assertEquals(client.hits, i); + client.hit(); + } + }); + + await t.step('throws when hit if second limit exceeded', () => { + assertEquals(limiter.client('test').limiter, limiter1); + assertThrows(() => limiter.client('test').hit(), Error); + assertEquals(limiter.client('test').limiter, limiter2); + }); +}); diff --git a/packages/ratelimiter/MultiRateLimiter.ts b/packages/ratelimiter/MultiRateLimiter.ts new file mode 100644 index 00000000..189ca177 --- /dev/null +++ b/packages/ratelimiter/MultiRateLimiter.ts @@ -0,0 +1,51 @@ +import type { RateLimiter, RateLimiterClient } from './types.ts'; + +export class MultiRateLimiter { + constructor(private limiters: RateLimiter[]) {} + + client(key: string): MultiRateLimiterClient { + return new MultiRateLimiterClient(key, this.limiters); + } +} + +class MultiRateLimiterClient implements RateLimiterClient { + constructor(private key: string, private limiters: RateLimiter[]) { + if (!limiters.length) { + throw new Error('No limiters provided'); + } + } + + /** Returns the _active_ limiter, which is either the first exceeded or the first. */ + get limiter(): RateLimiter { + const exceeded = this.limiters.find((limiter) => limiter.client(this.key).remaining < 0); + return exceeded ?? this.limiters[0]; + } + + get hits(): number { + return this.limiter.client(this.key).hits; + } + + get resetAt(): Date { + return this.limiter.client(this.key).resetAt; + } + + get remaining(): number { + return this.limiter.client(this.key).remaining; + } + + hit(n?: number): void { + let error: unknown; + + for (const limiter of this.limiters) { + try { + limiter.client(this.key).hit(n); + } catch (e) { + error ??= e; + } + } + + if (error instanceof Error) { + throw error; + } + } +} diff --git a/packages/ratelimiter/RateLimitError.ts b/packages/ratelimiter/RateLimitError.ts new file mode 100644 index 00000000..da3a4fd8 --- /dev/null +++ b/packages/ratelimiter/RateLimitError.ts @@ -0,0 +1,10 @@ +import type { RateLimiter, RateLimiterClient } from './types.ts'; + +export class RateLimitError extends Error { + constructor( + readonly limiter: RateLimiter, + readonly client: RateLimiterClient, + ) { + super('Rate limit exceeded'); + } +} diff --git a/packages/ratelimiter/deno.json b/packages/ratelimiter/deno.json new file mode 100644 index 00000000..66e97171 --- /dev/null +++ b/packages/ratelimiter/deno.json @@ -0,0 +1,7 @@ +{ + "name": "@ditto/ratelimiter", + "version": "1.1.0", + "exports": { + ".": "./mod.ts" + } +} diff --git a/packages/ratelimiter/mod.ts b/packages/ratelimiter/mod.ts new file mode 100644 index 00000000..58bbbeaa --- /dev/null +++ b/packages/ratelimiter/mod.ts @@ -0,0 +1,5 @@ +export { MemoryRateLimiter } from './MemoryRateLimiter.ts'; +export { MultiRateLimiter } from './MultiRateLimiter.ts'; +export { RateLimitError } from './RateLimitError.ts'; + +export type { RateLimiter, RateLimiterClient } from './types.ts'; diff --git a/packages/ratelimiter/types.ts b/packages/ratelimiter/types.ts new file mode 100644 index 00000000..c1a6b2f0 --- /dev/null +++ b/packages/ratelimiter/types.ts @@ -0,0 +1,12 @@ +export interface RateLimiter extends Disposable { + readonly limit: number; + readonly window: number; + client(key: string): RateLimiterClient; +} + +export interface RateLimiterClient { + readonly hits: number; + readonly resetAt: Date; + readonly remaining: number; + hit(n?: number): void; +} diff --git a/packages/translators/DeepLTranslator.test.ts b/packages/translators/DeepLTranslator.test.ts new file mode 100644 index 00000000..a688f135 --- /dev/null +++ b/packages/translators/DeepLTranslator.test.ts @@ -0,0 +1,101 @@ +import { detectLanguage } from '@ditto/lang'; +import { assert, assertEquals } from '@std/assert'; + +import { DeepLTranslator } from './DeepLTranslator.ts'; + +Deno.test('DeepL translation with source language omitted', async () => { + const translator = mockDeepL({ + translations: [ + { detected_source_language: 'PT', text: 'Good morning friends' }, + { detected_source_language: 'PT', text: 'My name is Patrick' }, + { + detected_source_language: 'PT', + text: + 'I will live in America, I promise. But first, I should mention that lande is interpreting this text as Italian, how strange.', + }, + ], + }); + + const data = await translator.translate( + [ + 'Bom dia amigos', + 'Meu nome é Patrick', + 'Eu irei morar na America, eu prometo. Mas antes, eu devo mencionar que o lande está interpretando este texto como italiano, que estranho.', + ], + undefined, + 'en', + ); + + assertEquals(data.sourceLang, 'pt'); + assertEquals(detectLanguage(data.results[0], 0), 'en'); + assertEquals(detectLanguage(data.results[1], 0), 'en'); + assertEquals(detectLanguage(data.results[2], 0), 'en'); +}); + +Deno.test('DeepL translation with source language set', async () => { + const translator = mockDeepL({ + translations: [ + { detected_source_language: 'PT', text: 'Good morning friends' }, + { detected_source_language: 'PT', text: 'My name is Patrick' }, + { + detected_source_language: 'PT', + text: + 'I will live in America, I promise. But first, I should mention that lande is interpreting this text as Italian, how strange.', + }, + ], + }); + + const data = await translator.translate( + [ + 'Bom dia amigos', + 'Meu nome é Patrick', + 'Eu irei morar na America, eu prometo. Mas antes, eu devo mencionar que o lande está interpretando este texto como italiano, que estranho.', + ], + 'pt', + 'en', + ); + + assertEquals(data.sourceLang, 'pt'); + assertEquals(detectLanguage(data.results[0], 0), 'en'); + assertEquals(detectLanguage(data.results[1], 0), 'en'); + assertEquals(detectLanguage(data.results[2], 0), 'en'); +}); + +Deno.test("DeepL translation doesn't alter Nostr URIs", async () => { + const translator = mockDeepL({ + translations: [ + { + detected_source_language: 'EN', + text: + 'Graças ao trabalho de nostr:nprofile1qy2hwumn8ghj7erfw36x7tnsw43z7un9d3shjqpqgujeqakgt7fyp6zjggxhyy7ft623qtcaay5lkc8n8gkry4cvnrzqep59se e nostr:nprofile1qy2hwumn8ghj7erfw36x7tnsw43z7un9d3shjqpqe6tnvlr46lv3lwdu80r07kanhk6jcxy5r07w9umgv9kuhu9dl5hsz44l8s , agora é possível filtrar o feed global por idioma no #Ditto!', + }, + ], + }); + + const patrick = + 'nostr:nprofile1qy2hwumn8ghj7erfw36x7tnsw43z7un9d3shjqpqgujeqakgt7fyp6zjggxhyy7ft623qtcaay5lkc8n8gkry4cvnrzqep59se'; + const danidfra = + 'nostr:nprofile1qy2hwumn8ghj7erfw36x7tnsw43z7un9d3shjqpqe6tnvlr46lv3lwdu80r07kanhk6jcxy5r07w9umgv9kuhu9dl5hsz44l8s'; + + const input = + `Thanks to work by ${patrick} and ${danidfra} , it's now possible to filter the global feed by language on #Ditto!`; + + const { results: [output] } = await translator.translate([input], 'en', 'pt'); + + assert(output.includes(patrick)); + assert(output.includes(danidfra)); +}); + +interface DeepLResponse { + translations: { + detected_source_language: string; + text: string; + }[]; +} + +function mockDeepL(json: DeepLResponse): DeepLTranslator { + return new DeepLTranslator({ + apiKey: 'deepl', + fetch: () => Promise.resolve(new Response(JSON.stringify(json))), + }); +} diff --git a/src/translators/DeepLTranslator.ts b/packages/translators/DeepLTranslator.ts similarity index 73% rename from src/translators/DeepLTranslator.ts rename to packages/translators/DeepLTranslator.ts index 26067379..673c6e07 100644 --- a/src/translators/DeepLTranslator.ts +++ b/packages/translators/DeepLTranslator.ts @@ -1,8 +1,9 @@ -import { LanguageCode } from 'iso-639-1'; import { z } from 'zod'; -import { DittoTranslator } from '@/interfaces/DittoTranslator.ts'; -import { languageSchema } from '@/schema.ts'; +import { languageSchema } from './schema.ts'; + +import type { LanguageCode } from 'iso-639-1'; +import type { DittoTranslator } from './DittoTranslator.ts'; interface DeepLTranslatorOpts { /** DeepL base URL to use. Default: 'https://api.deepl.com' */ @@ -31,12 +32,12 @@ export class DeepLTranslator implements DittoTranslator { source: LanguageCode | undefined, dest: LanguageCode, opts?: { signal?: AbortSignal }, - ) { + ): Promise<{ results: string[]; sourceLang: LanguageCode }> { const { translations } = await this.translateMany(texts, source, dest, opts); return { results: translations.map((value) => value.text), - source_lang: translations[0]?.detected_source_language as LanguageCode, + sourceLang: translations[0]?.detected_source_language, }; } @@ -71,7 +72,13 @@ export class DeepLTranslator implements DittoTranslator { const json = await response.json(); if (!response.ok) { - throw new Error(json['message']); + const result = DeepLTranslator.errorSchema().safeParse(json); + + if (result.success) { + throw new Error(result.data.message); + } else { + throw new Error(`Unexpected DeepL error: ${response.statusText} (${response.status})`); + } } return DeepLTranslator.schema().parse(json); @@ -83,10 +90,17 @@ export class DeepLTranslator implements DittoTranslator { return z.object({ translations: z.array( z.object({ - detected_source_language: languageSchema, + detected_source_language: z.string().transform((val) => val.toLowerCase()).pipe(languageSchema), text: z.string(), }), ), }); } + + /** DeepL error response schema. */ + private static errorSchema() { + return z.object({ + message: z.string(), + }); + } } diff --git a/src/interfaces/DittoTranslator.ts b/packages/translators/DittoTranslator.ts similarity index 89% rename from src/interfaces/DittoTranslator.ts rename to packages/translators/DittoTranslator.ts index 7e5e1d50..2a9fb7db 100644 --- a/src/interfaces/DittoTranslator.ts +++ b/packages/translators/DittoTranslator.ts @@ -14,5 +14,5 @@ export interface DittoTranslator { targetLanguage: LanguageCode, /** Custom options. */ opts?: { signal?: AbortSignal }, - ): Promise<{ results: string[]; source_lang: LanguageCode }>; + ): Promise<{ results: string[]; sourceLang: LanguageCode }>; } diff --git a/packages/translators/LibreTranslateTranslator.test.ts b/packages/translators/LibreTranslateTranslator.test.ts new file mode 100644 index 00000000..94da0ec0 --- /dev/null +++ b/packages/translators/LibreTranslateTranslator.test.ts @@ -0,0 +1,89 @@ +import { detectLanguage } from '@ditto/lang'; +import { assertEquals } from '@std/assert'; + +import { LibreTranslateTranslator } from './LibreTranslateTranslator.ts'; + +Deno.test('LibreTranslate translation with source language omitted', async () => { + const translator = mockLibreTranslate(); + + const data = await translator.translate( + [ + 'Bom dia amigos', + 'Meu nome é Patrick, um nome belo ou feio? A questão é mais profunda do que parece.', + 'A respiração é mais importante do que comer e tomar agua.', + ], + undefined, + 'ca', + ); + + assertEquals(data.sourceLang, 'pt'); + assertEquals(detectLanguage(data.results[0], 0), 'ca'); + assertEquals(detectLanguage(data.results[1], 0), 'ca'); + assertEquals(detectLanguage(data.results[2], 0), 'ca'); +}); + +Deno.test('LibreTranslate translation with source language set', async () => { + const translator = mockLibreTranslate(); + + const data = await translator.translate( + [ + 'Bom dia amigos', + 'Meu nome é Patrick, um nome belo ou feio? A questão é mais profunda do que parece.', + 'A respiração é mais importante do que comer e tomar agua.', + ], + 'pt', + 'ca', + ); + + assertEquals(data.sourceLang, 'pt'); + assertEquals(detectLanguage(data.results[0], 0), 'ca'); + assertEquals(detectLanguage(data.results[1], 0), 'ca'); + assertEquals(detectLanguage(data.results[2], 0), 'ca'); +}); + +function mockLibreTranslate(): LibreTranslateTranslator { + return new LibreTranslateTranslator({ + apiKey: 'libretranslate', + fetch: async (input, init) => { + const req = new Request(input, init); + const body = await req.json(); + + switch (body.q) { + case 'Bom dia amigos': + return jsonResponse({ + detectedLanguage: { language: 'pt' }, + translatedText: 'Bon dia, amics.', + }); + case 'Meu nome é Patrick, um nome belo ou feio? A questão é mais profunda do que parece.': + return jsonResponse({ + detectedLanguage: { language: 'pt' }, + translatedText: 'Em dic Patrick, un nom molt o lleig? La pregunta és més profunda del que sembla.', + }); + case 'A respiração é mais importante do que comer e tomar agua.': + return jsonResponse({ + detectedLanguage: { language: 'pt' }, + translatedText: 'La respiració és més important que menjar i prendre aigua.', + }); + } + + return new Response(JSON.stringify({ error: 'Not found' }), { status: 404 }); + }, + }); +} + +interface LibreTranslateResponse { + translatedText: string; + detectedLanguage?: { + language: string; + }; +} + +function jsonResponse(json: LibreTranslateResponse): Response { + const body = JSON.stringify(json); + + return new Response(body, { + headers: { + 'Content-Type': 'application/json', + }, + }); +} diff --git a/src/translators/LibreTranslateTranslator.ts b/packages/translators/LibreTranslateTranslator.ts similarity index 72% rename from src/translators/LibreTranslateTranslator.ts rename to packages/translators/LibreTranslateTranslator.ts index ef7fb1f8..cc978e90 100644 --- a/src/translators/LibreTranslateTranslator.ts +++ b/packages/translators/LibreTranslateTranslator.ts @@ -1,8 +1,9 @@ -import { LanguageCode } from 'iso-639-1'; import { z } from 'zod'; -import { DittoTranslator } from '@/interfaces/DittoTranslator.ts'; -import { languageSchema } from '@/schema.ts'; +import { languageSchema } from './schema.ts'; + +import type { LanguageCode } from 'iso-639-1'; +import type { DittoTranslator } from './DittoTranslator.ts'; interface LibreTranslateTranslatorOpts { /** Libretranslate endpoint to use. Default: 'https://libretranslate.com' */ @@ -31,14 +32,14 @@ export class LibreTranslateTranslator implements DittoTranslator { source: LanguageCode | undefined, dest: LanguageCode, opts?: { signal?: AbortSignal }, - ) { + ): Promise<{ results: string[]; sourceLang: LanguageCode }> { const translations = await Promise.all( texts.map((text) => this.translateOne(text, source, dest, 'html', { signal: opts?.signal })), ); return { results: translations.map((value) => value.translatedText), - source_lang: (translations[0]?.detectedLanguage?.language ?? source) as LanguageCode, // cast is ok + sourceLang: (translations[0]?.detectedLanguage?.language ?? source) as LanguageCode, // cast is ok }; } @@ -70,12 +71,20 @@ export class LibreTranslateTranslator implements DittoTranslator { const response = await this.fetch(request); const json = await response.json(); - if (!response.ok) { - throw new Error(json['error']); - } - const data = LibreTranslateTranslator.schema().parse(json); - return data; + console.log(json); + + if (!response.ok) { + const result = LibreTranslateTranslator.errorSchema().safeParse(json); + + if (result.success) { + throw new Error(result.data.error); + } else { + throw new Error(`Unexpected LibreTranslate error: ${response.statusText} (${response.status})`); + } + } + + return LibreTranslateTranslator.schema().parse(json); } /** Libretranslate response schema. @@ -89,4 +98,11 @@ export class LibreTranslateTranslator implements DittoTranslator { }).optional(), }); } + + /** Libretranslate error response schema. */ + private static errorSchema() { + return z.object({ + error: z.string(), + }); + } } diff --git a/packages/translators/deno.json b/packages/translators/deno.json new file mode 100644 index 00000000..5d603f3a --- /dev/null +++ b/packages/translators/deno.json @@ -0,0 +1,7 @@ +{ + "name": "@ditto/translators", + "version": "1.1.0", + "exports": { + ".": "./mod.ts" + } +} diff --git a/packages/translators/mod.ts b/packages/translators/mod.ts new file mode 100644 index 00000000..e60f19c7 --- /dev/null +++ b/packages/translators/mod.ts @@ -0,0 +1,4 @@ +export { DeepLTranslator } from './DeepLTranslator.ts'; +export { LibreTranslateTranslator } from './LibreTranslateTranslator.ts'; + +export type { DittoTranslator } from './DittoTranslator.ts'; diff --git a/packages/translators/schema.test.ts b/packages/translators/schema.test.ts new file mode 100644 index 00000000..4ca84adc --- /dev/null +++ b/packages/translators/schema.test.ts @@ -0,0 +1,8 @@ +import { assertEquals } from '@std/assert'; + +import { languageSchema } from './schema.ts'; + +Deno.test('languageSchema', () => { + assertEquals(languageSchema.safeParse('pt').success, true); + assertEquals(languageSchema.safeParse('PT').success, false); +}); diff --git a/packages/translators/schema.ts b/packages/translators/schema.ts new file mode 100644 index 00000000..803ef1b0 --- /dev/null +++ b/packages/translators/schema.ts @@ -0,0 +1,8 @@ +import ISO6391 from 'iso-639-1'; +import z from 'zod'; + +/** Value is a ISO-639-1 language code. */ +export const languageSchema = z.string().refine( + (val) => ISO6391.validate(val), + { message: 'Not a valid language in ISO-639-1 format' }, +); diff --git a/src/uploaders/DenoUploader.ts b/packages/uploaders/DenoUploader.ts similarity index 95% rename from src/uploaders/DenoUploader.ts rename to packages/uploaders/DenoUploader.ts index fd30d8c6..a97bdb52 100644 --- a/src/uploaders/DenoUploader.ts +++ b/packages/uploaders/DenoUploader.ts @@ -1,10 +1,11 @@ import { join } from 'node:path'; -import { NUploader } from '@nostrify/nostrify'; import { crypto } from '@std/crypto'; import { encodeHex } from '@std/encoding/hex'; import { extensionsByType } from '@std/media-types'; +import type { NUploader } from '@nostrify/nostrify'; + export interface DenoUploaderOpts { baseUrl: string; dir: string; diff --git a/src/uploaders/IPFSUploader.ts b/packages/uploaders/IPFSUploader.ts similarity index 96% rename from src/uploaders/IPFSUploader.ts rename to packages/uploaders/IPFSUploader.ts index 7bf5165b..cf9c1516 100644 --- a/src/uploaders/IPFSUploader.ts +++ b/packages/uploaders/IPFSUploader.ts @@ -1,6 +1,7 @@ -import { NUploader } from '@nostrify/nostrify'; import { z } from 'zod'; +import type { NUploader } from '@nostrify/nostrify'; + export interface IPFSUploaderOpts { baseUrl: string; apiUrl?: string; diff --git a/src/uploaders/S3Uploader.ts b/packages/uploaders/S3Uploader.ts similarity index 85% rename from src/uploaders/S3Uploader.ts rename to packages/uploaders/S3Uploader.ts index b74796ab..551a554d 100644 --- a/src/uploaders/S3Uploader.ts +++ b/packages/uploaders/S3Uploader.ts @@ -1,12 +1,11 @@ import { join } from 'node:path'; import { S3Client } from '@bradenmacdonald/s3-lite-client'; -import { NUploader } from '@nostrify/nostrify'; import { crypto } from '@std/crypto'; import { encodeHex } from '@std/encoding/hex'; import { extensionsByType } from '@std/media-types'; -import { Conf } from '@/config.ts'; +import type { NUploader } from '@nostrify/nostrify'; export interface S3UploaderOpts { endPoint: string; @@ -18,13 +17,14 @@ export interface S3UploaderOpts { port?: number; sessionToken?: string; useSSL?: boolean; + baseUrl?: string; } /** S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more. */ export class S3Uploader implements NUploader { private client: S3Client; - constructor(opts: S3UploaderOpts) { + constructor(private opts: S3UploaderOpts) { this.client = new S3Client(opts); } @@ -40,10 +40,10 @@ export class S3Uploader implements NUploader { }, }); - const { pathStyle, bucket } = Conf.s3; + const { pathStyle, bucket, baseUrl } = this.opts; const path = (pathStyle && bucket) ? join(bucket, filename) : filename; - const url = new URL(path, Conf.mediaDomain).toString(); + const url = new URL(path, baseUrl).toString(); return [ ['url', url], diff --git a/packages/uploaders/deno.json b/packages/uploaders/deno.json new file mode 100644 index 00000000..b37b8aa7 --- /dev/null +++ b/packages/uploaders/deno.json @@ -0,0 +1,7 @@ +{ + "name": "@ditto/uploaders", + "version": "1.1.0", + "exports": { + ".": "./mod.ts" + } +} diff --git a/packages/uploaders/mod.ts b/packages/uploaders/mod.ts new file mode 100644 index 00000000..c5405344 --- /dev/null +++ b/packages/uploaders/mod.ts @@ -0,0 +1,3 @@ +export { DenoUploader } from './DenoUploader.ts'; +export { IPFSUploader } from './IPFSUploader.ts'; +export { S3Uploader } from './S3Uploader.ts'; diff --git a/scripts/admin-event.ts b/scripts/admin-event.ts index 00711993..bec49460 100644 --- a/scripts/admin-event.ts +++ b/scripts/admin-event.ts @@ -1,13 +1,17 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { JsonParseStream } from '@std/json/json-parse-stream'; import { TextLineStream } from '@std/streams/text-line-stream'; -import { AdminSigner } from '@/signers/AdminSigner.ts'; -import { Storages } from '@/storages.ts'; -import { type EventStub } from '@/utils/api.ts'; -import { nostrNow } from '@/utils.ts'; +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; +import { type EventStub } from '../packages/ditto/utils/api.ts'; +import { nostrNow } from '../packages/ditto/utils.ts'; -const signer = new AdminSigner(); -const store = await Storages.db(); +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); + +const { signer } = conf; const readable = Deno.stdin.readable .pipeThrough(new TextDecoderStream()) @@ -22,7 +26,7 @@ for await (const t of readable) { ...t as EventStub, }); - await store.event(event); + await relay.event(event); } Deno.exit(0); diff --git a/scripts/admin-role.ts b/scripts/admin-role.ts index d275329f..59b95878 100644 --- a/scripts/admin-role.ts +++ b/scripts/admin-role.ts @@ -1,15 +1,20 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { NSchema } from '@nostrify/nostrify'; import { nip19 } from 'nostr-tools'; -import { AdminSigner } from '@/signers/AdminSigner.ts'; -import { Storages } from '@/storages.ts'; -import { nostrNow } from '@/utils.ts'; +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; +import { nostrNow } from '../packages/ditto/utils.ts'; -const store = await Storages.db(); +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); const [pubkeyOrNpub, role] = Deno.args; const pubkey = pubkeyOrNpub.startsWith('npub1') ? nip19.decode(pubkeyOrNpub as `npub1${string}`).data : pubkeyOrNpub; +const { signer } = conf; + if (!NSchema.id().safeParse(pubkey).success) { console.error('Invalid pubkey'); Deno.exit(1); @@ -20,10 +25,9 @@ if (!['admin', 'user'].includes(role)) { Deno.exit(1); } -const signer = new AdminSigner(); const admin = await signer.getPublicKey(); -const [existing] = await store.query([{ +const [existing] = await relay.query([{ kinds: [30382], authors: [admin], '#d': [pubkey], @@ -57,6 +61,6 @@ const event = await signer.signEvent({ created_at: nostrNow(), }); -await store.event(event); +await relay.event(event); Deno.exit(0); diff --git a/scripts/db-export.test.ts b/scripts/db-export.test.ts index 939537d5..3b180291 100644 --- a/scripts/db-export.test.ts +++ b/scripts/db-export.test.ts @@ -1,4 +1,5 @@ import { assertEquals, assertThrows } from '@std/assert'; + import { buildFilter } from './db-export.ts'; Deno.test('buildFilter should return an empty filter when no arguments are provided', () => { diff --git a/scripts/db-export.ts b/scripts/db-export.ts index e32e08ad..d9295420 100644 --- a/scripts/db-export.ts +++ b/scripts/db-export.ts @@ -1,7 +1,14 @@ -import { Storages } from '@/storages.ts'; +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { NostrFilter } from '@nostrify/nostrify'; import { Command, InvalidOptionArgumentError } from 'commander'; +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; + +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); + interface ExportFilter { authors?: string[]; ids?: string[]; @@ -97,8 +104,6 @@ export function buildFilter(args: ExportFilter) { } async function exportEvents(args: ExportFilter) { - const store = await Storages.db(); - let filter: NostrFilter = {}; try { filter = buildFilter(args); @@ -107,7 +112,7 @@ async function exportEvents(args: ExportFilter) { } let count = 0; - for await (const msg of store.req([filter])) { + for await (const msg of relay.req([filter])) { if (msg[0] === 'EOSE') { break; } diff --git a/scripts/db-import.ts b/scripts/db-import.ts index c34384bf..4d27e54a 100644 --- a/scripts/db-import.ts +++ b/scripts/db-import.ts @@ -1,13 +1,16 @@ -import { Semaphore } from '@lambdalisue/async'; +import { Semaphore } from '@core/asyncutil'; +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { NostrEvent } from '@nostrify/nostrify'; import { JsonParseStream } from '@std/json/json-parse-stream'; import { TextLineStream } from '@std/streams/text-line-stream'; -import { Conf } from '@/config.ts'; -import { Storages } from '@/storages.ts'; +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; -const store = await Storages.db(); -const sem = new Semaphore(Conf.pg.poolSize); +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); +const sem = new Semaphore(conf.pg.poolSize); console.warn('Importing events...'); @@ -27,7 +30,7 @@ for await (const line of readable) { sem.lock(async () => { try { - await store.event(event); + await relay.event(event); console.warn(`(${count}) Event<${event.kind}> ${event.id}`); } catch (error) { if (error instanceof Error && error.message.includes('violates unique constraint')) { diff --git a/scripts/db-migrate.ts b/scripts/db-migrate.ts index d3e93783..23547eea 100644 --- a/scripts/db-migrate.ts +++ b/scripts/db-migrate.ts @@ -1,9 +1,9 @@ -import { Storages } from '@/storages.ts'; +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; -// This migrates kysely internally. -const kysely = await Storages.kysely(); +const conf = new DittoConf(Deno.env); +await using db = new DittoPolyPg(conf.databaseUrl); -// Close the connection before exiting. -await kysely.destroy(); +await db.migrate(); Deno.exit(); diff --git a/scripts/db-policy.ts b/scripts/db-policy.ts index 4be3c4ef..b7ceee96 100644 --- a/scripts/db-policy.ts +++ b/scripts/db-policy.ts @@ -1,16 +1,23 @@ -import { policyWorker } from '@/workers/policy.ts'; -import { Storages } from '@/storages.ts'; +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; + +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; +import { PolicyWorker } from '../packages/ditto/workers/policy.ts'; + +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); +const policyWorker = new PolicyWorker(conf); -const db = await Storages.db(); let count = 0; -for await (const msg of db.req([{}])) { +for await (const msg of relay.req([{}])) { const [type, , event] = msg; if (type === 'EOSE') console.log('EOSE'); if (type !== 'EVENT') continue; const [, , ok] = await policyWorker.call(event, AbortSignal.timeout(5000)); if (!ok) { - await db.remove([{ ids: [event.id] }]); + await relay.remove([{ ids: [event.id] }]); count += 1; } } diff --git a/scripts/db-populate-extensions.ts b/scripts/db-populate-extensions.ts new file mode 100644 index 00000000..9af8be2a --- /dev/null +++ b/scripts/db-populate-extensions.ts @@ -0,0 +1,29 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; +import { NostrEvent } from '@nostrify/nostrify'; + +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; + +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); + +const query = db.kysely + .selectFrom('nostr_events') + .select(['id', 'kind', 'content', 'pubkey', 'tags', 'created_at', 'sig']); + +for await (const row of query.stream()) { + const event: NostrEvent = { ...row, created_at: Number(row.created_at) }; + const ext = DittoPgStore.indexExtensions(event); + + try { + await db.kysely + .updateTable('nostr_events') + .set('search_ext', ext) + .where('id', '=', event.id) + .execute(); + } catch { + // do nothing + } +} + +Deno.exit(); diff --git a/scripts/db-populate-nip05.ts b/scripts/db-populate-nip05.ts new file mode 100644 index 00000000..c1015f9f --- /dev/null +++ b/scripts/db-populate-nip05.ts @@ -0,0 +1,34 @@ +import { Semaphore } from '@core/asyncutil'; +import { NostrEvent } from '@nostrify/nostrify'; + +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; + +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; +import { DittoRelayStore } from '../packages/ditto/storages/DittoRelayStore.ts'; + +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); + +const pgstore = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); +const relaystore = new DittoRelayStore({ conf, db, relay: pgstore }); + +const sem = new Semaphore(5); + +const query = db.kysely + .selectFrom('nostr_events') + .select(['id', 'kind', 'content', 'pubkey', 'tags', 'created_at', 'sig']) + .where('kind', '=', 0); + +for await (const row of query.stream(100)) { + while (sem.locked) { + await new Promise((resolve) => setTimeout(resolve, 0)); + } + + sem.lock(async () => { + const event: NostrEvent = { ...row, created_at: Number(row.created_at) }; + await relaystore.updateAuthorData(event, AbortSignal.timeout(3000)); + }); +} + +Deno.exit(); diff --git a/scripts/db-populate-search.ts b/scripts/db-populate-search.ts index 81b84ee6..7189b30c 100644 --- a/scripts/db-populate-search.ts +++ b/scripts/db-populate-search.ts @@ -1,10 +1,14 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { NSchema as n } from '@nostrify/nostrify'; -import { Storages } from '@/storages.ts'; -const store = await Storages.db(); -const kysely = await Storages.kysely(); +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; -for await (const msg of store.req([{ kinds: [0] }])) { +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); + +for await (const msg of relay.req([{ kinds: [0] }])) { if (msg[0] === 'EVENT') { const { pubkey, content } = msg[2]; @@ -12,7 +16,7 @@ for await (const msg of store.req([{ kinds: [0] }])) { const search = [name, nip05].filter(Boolean).join(' ').trim(); try { - await kysely.insertInto('author_stats').values({ + await db.kysely.insertInto('author_stats').values({ pubkey, search, followers_count: 0, diff --git a/scripts/db-streak-recompute.ts b/scripts/db-streak-recompute.ts new file mode 100644 index 00000000..6a0f313f --- /dev/null +++ b/scripts/db-streak-recompute.ts @@ -0,0 +1,54 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; + +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); + +const statsQuery = db.kysely.selectFrom('author_stats').select('pubkey'); +const { streakWindow } = conf; + +for await (const { pubkey } of statsQuery.stream(10)) { + const eventsQuery = db.kysely + .selectFrom('nostr_events') + .select('created_at') + .where('pubkey', '=', pubkey) + .where('kind', 'in', [1, 20, 1111, 30023]) + .orderBy('nostr_events.created_at', 'desc') + .orderBy('nostr_events.id', 'asc'); + + let end: number | null = null; + let start: number | null = null; + + for await (const { created_at } of eventsQuery.stream(20)) { + const createdAt = Number(created_at); + + if (!end) { + const now = Math.floor(Date.now() / 1000); + + if (now - createdAt > streakWindow) { + break; // streak broken + } + + end = createdAt; + } + + if (start && (start - createdAt > streakWindow)) { + break; // streak broken + } + + start = createdAt; + } + + if (start && end) { + await db.kysely + .updateTable('author_stats') + .set({ + streak_end: end, + streak_start: start, + }) + .where('pubkey', '=', pubkey) + .execute(); + } +} + +Deno.exit(); diff --git a/scripts/deparameterize.ts b/scripts/deparameterize.ts new file mode 100644 index 00000000..1b5fdfa6 --- /dev/null +++ b/scripts/deparameterize.ts @@ -0,0 +1,45 @@ +const decoder = new TextDecoder(); + +for await (const chunk of Deno.stdin.readable) { + const text = decoder.decode(chunk); + + const { sql, parameters } = JSON.parse(text) as { sql: string; parameters: unknown[] }; + + let result = sql; + + for (let i = 0; i < parameters.length; i++) { + const param = parameters[i]; + + result = result.replace(`$${i + 1}`, serializeParameter(param)); + } + + console.log(result + ';'); +} + +function serializeParameter(param: unknown): string { + if (param === null) { + return 'null'; + } + + if (typeof param === 'string') { + return `'${param}'`; + } + + if (typeof param === 'number' || typeof param === 'boolean') { + return param.toString(); + } + + if (param instanceof Date) { + return `'${param.toISOString()}'`; + } + + if (Array.isArray(param)) { + return `'{${param.join(',')}}'`; + } + + if (typeof param === 'object') { + return `'${JSON.stringify(param)}'`; + } + + return JSON.stringify(param); +} diff --git a/scripts/nostr-pull.ts b/scripts/nostr-pull.ts index 573b5f01..d8a4513a 100644 --- a/scripts/nostr-pull.ts +++ b/scripts/nostr-pull.ts @@ -3,12 +3,16 @@ * by looking them up on a list of relays. */ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { NostrEvent, NRelay1, NSchema } from '@nostrify/nostrify'; import { nip19 } from 'nostr-tools'; -import { Storages } from '@/storages.ts'; +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; -const store = await Storages.db(); +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); interface ImportEventsOpts { profilesOnly: boolean; @@ -19,7 +23,7 @@ const importUsers = async ( authors: string[], relays: string[], opts?: Partial, - doEvent: DoEvent = async (event: NostrEvent) => await store.event(event), + doEvent: DoEvent = async (event: NostrEvent) => await relay.event(event), ) => { // Kind 0s + follow lists. const profiles: Record> = {}; @@ -47,7 +51,7 @@ const importUsers = async ( if (!profilesOnly) { matched.push( ...await conn.query( - authors.map((author) => ({ kinds: [1], authors: [author], limit: 200 })), + authors.map((author) => ({ kinds: [1, 20], authors: [author], limit: 200 })), ), ); } diff --git a/scripts/setup-kind0.ts b/scripts/setup-kind0.ts index 6b58993d..b3dd0682 100644 --- a/scripts/setup-kind0.ts +++ b/scripts/setup-kind0.ts @@ -1,11 +1,13 @@ -import { AdminSigner } from '@/signers/AdminSigner.ts'; +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { Command } from 'commander'; import { NostrEvent } from 'nostr-tools'; -import { nostrNow } from '@/utils.ts'; -import { Buffer } from 'node:buffer'; -import { Conf } from '@/config.ts'; -import pngToIco from 'png-to-ico'; -import { Storages } from '@/storages.ts'; + +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; + +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); function die(code: number, ...args: unknown[]) { console.error(...args); @@ -26,7 +28,6 @@ if (import.meta.main) { 'Lightning address for the server. Can just be your own lightning address.', ) .option('-a --about ', 'About text. This shows up whenever a description for your server is needed.') - .option('-i --image ', 'Image URL to use for OpenGraph previews and favicon.') .action(async (name, args) => { const { lightning, about, image } = args; const content: Record = {}; @@ -36,34 +37,19 @@ if (import.meta.main) { content.lud16 = lightning; content.name = name; content.picture = image; - content.website = Conf.localDomain; + content.website = conf.localDomain; - const signer = new AdminSigner(); + const signer = conf.signer; const bare: Omit = { - created_at: nostrNow(), kind: 0, tags: [], content: JSON.stringify(content), + created_at: Math.floor(Date.now() / 1000), }; const signed = await signer.signEvent(bare); - if (image) { - try { - await fetch(image) - .then((res) => { - if (!res.ok) throw new Error('Error attempting to fetch favicon.'); - if (res.headers.get('content-type') !== 'image/png') throw new Error('Non-png images are not supported!'); - return res.blob(); - }) - .then(async (blob) => - await pngToIco(Buffer.from(await blob.arrayBuffer())) - .then(async (buf) => await Deno.writeFile('./public/favicon.ico', new Uint8Array(buf))) - ); - } catch (e) { - die(1, `Error generating favicon from url ${image}: "${e}". Please check this or try again without --image.`); - } - } + console.log({ content, signed }); - await Storages.db().then((store) => store.event(signed)); + await relay.event(signed); }); await kind0.parseAsync(); diff --git a/scripts/setup.ts b/scripts/setup.ts index 3f3fc955..f4ccf368 100644 --- a/scripts/setup.ts +++ b/scripts/setup.ts @@ -4,7 +4,7 @@ import { exists } from '@std/fs/exists'; import { generateSecretKey, nip19 } from 'nostr-tools'; import question from 'question-deno'; -import { Conf } from '@/config.ts'; +import { Conf } from '../packages/ditto/config.ts'; console.log(''); console.log('Hello! Welcome to the Ditto setup tool. We will ask you a few questions to generate a .env file for you.'); diff --git a/scripts/stats-recompute.ts b/scripts/stats-recompute.ts index 77be13fe..16614e45 100644 --- a/scripts/stats-recompute.ts +++ b/scripts/stats-recompute.ts @@ -1,7 +1,15 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { nip19 } from 'nostr-tools'; -import { Storages } from '@/storages.ts'; -import { refreshAuthorStats } from '@/utils/stats.ts'; +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; +import { refreshAuthorStats } from '../packages/ditto/utils/stats.ts'; + +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); + +const { kysely } = db; let pubkey: string; try { @@ -16,7 +24,4 @@ try { Deno.exit(1); } -const store = await Storages.db(); -const kysely = await Storages.kysely(); - -await refreshAuthorStats({ pubkey, kysely, store }); +await refreshAuthorStats({ pubkey, kysely, relay }); diff --git a/scripts/trends.ts b/scripts/trends.ts index 6600f7e2..2a878a12 100644 --- a/scripts/trends.ts +++ b/scripts/trends.ts @@ -1,12 +1,20 @@ +import { DittoConf } from '@ditto/conf'; +import { DittoPolyPg } from '@ditto/db'; import { z } from 'zod'; +import { DittoPgStore } from '../packages/ditto/storages/DittoPgStore.ts'; import { updateTrendingEvents, updateTrendingHashtags, updateTrendingLinks, updateTrendingPubkeys, updateTrendingZappedEvents, -} from '@/trends.ts'; +} from '../packages/ditto/trends.ts'; + +const conf = new DittoConf(Deno.env); +const db = new DittoPolyPg(conf.databaseUrl); +const relay = new DittoPgStore({ db, pubkey: await conf.signer.getPublicKey() }); +const ctx = { conf, db, relay }; const trendSchema = z.enum(['pubkeys', 'zapped_events', 'events', 'hashtags', 'links']); const trends = trendSchema.array().parse(Deno.args); @@ -19,23 +27,23 @@ for (const trend of trends) { switch (trend) { case 'pubkeys': console.log('Updating trending pubkeys...'); - await updateTrendingPubkeys(); + await updateTrendingPubkeys(ctx); break; case 'zapped_events': console.log('Updating trending zapped events...'); - await updateTrendingZappedEvents(); + await updateTrendingZappedEvents(ctx); break; case 'events': console.log('Updating trending events...'); - await updateTrendingEvents(); + await updateTrendingEvents(ctx); break; case 'hashtags': console.log('Updating trending hashtags...'); - await updateTrendingHashtags(); + await updateTrendingHashtags(ctx); break; case 'links': console.log('Updating trending links...'); - await updateTrendingLinks(); + await updateTrendingLinks(ctx); break; } } diff --git a/src/DittoPush.ts b/src/DittoPush.ts deleted file mode 100644 index 364f08ae..00000000 --- a/src/DittoPush.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { ApplicationServer, PushMessageOptions, PushSubscriber, PushSubscription } from '@negrel/webpush'; - -import { Conf } from '@/config.ts'; -import { Storages } from '@/storages.ts'; -import { getInstanceMetadata } from '@/utils/instance.ts'; - -export class DittoPush { - static _server: Promise | undefined; - - static get server(): Promise { - if (!this._server) { - this._server = (async () => { - const store = await Storages.db(); - const meta = await getInstanceMetadata(store); - const keys = await Conf.vapidKeys; - - if (keys) { - return await ApplicationServer.new({ - contactInformation: `mailto:${meta.email}`, - vapidKeys: keys, - }); - } else { - console.warn('VAPID keys are not set. Push notifications will be disabled.'); - } - })(); - } - - return this._server; - } - - static async push( - subscription: PushSubscription, - json: object, - opts: PushMessageOptions = {}, - ): Promise { - const server = await this.server; - - if (!server) { - return; - } - - const subscriber = new PushSubscriber(server, subscription); - const text = JSON.stringify(json); - return subscriber.pushTextMessage(text, opts); - } -} diff --git a/src/app.ts b/src/app.ts deleted file mode 100644 index 5378f915..00000000 --- a/src/app.ts +++ /dev/null @@ -1,406 +0,0 @@ -import { type Context, Env as HonoEnv, Handler, Hono, Input as HonoInput, MiddlewareHandler } from '@hono/hono'; -import { cors } from '@hono/hono/cors'; -import { serveStatic } from '@hono/hono/deno'; -import { logger } from '@hono/hono/logger'; -import { NostrEvent, NostrSigner, NStore, NUploader } from '@nostrify/nostrify'; -import Debug from '@soapbox/stickynotes/debug'; -import { Kysely } from 'kysely'; - -import '@/startup.ts'; - -import { DittoTables } from '@/db/DittoTables.ts'; -import { Time } from '@/utils/time.ts'; - -import { - accountController, - accountLookupController, - accountSearchController, - accountStatusesController, - blockController, - createAccountController, - familiarFollowersController, - favouritesController, - followController, - followersController, - followingController, - muteController, - relationshipsController, - unblockController, - unfollowController, - unmuteController, - updateCredentialsController, - verifyCredentialsController, -} from '@/controllers/api/accounts.ts'; -import { - adminAccountsController, - adminActionController, - adminApproveController, - adminRejectController, -} from '@/controllers/api/admin.ts'; -import { appCredentialsController, createAppController } from '@/controllers/api/apps.ts'; -import { blocksController } from '@/controllers/api/blocks.ts'; -import { bookmarksController } from '@/controllers/api/bookmarks.ts'; -import { captchaController, captchaVerifyController } from '@/controllers/api/captcha.ts'; -import { - adminRelaysController, - adminSetRelaysController, - deleteZapSplitsController, - getZapSplitsController, - nameRequestController, - nameRequestsController, - statusZapSplitsController, - updateInstanceController, - updateZapSplitsController, -} from '@/controllers/api/ditto.ts'; -import { emptyArrayController, notImplementedController } from '@/controllers/api/fallback.ts'; -import { - instanceDescriptionController, - instanceV1Controller, - instanceV2Controller, -} from '@/controllers/api/instance.ts'; -import { markersController, updateMarkersController } from '@/controllers/api/markers.ts'; -import { mediaController, updateMediaController } from '@/controllers/api/media.ts'; -import { mutesController } from '@/controllers/api/mutes.ts'; -import { notificationController, notificationsController } from '@/controllers/api/notifications.ts'; -import { - createTokenController, - oauthAuthorizeController, - oauthController, - revokeTokenController, -} from '@/controllers/api/oauth.ts'; -import { - configController, - frontendConfigController, - pleromaAdminDeleteStatusController, - pleromaAdminSuggestController, - pleromaAdminTagController, - pleromaAdminUnsuggestController, - pleromaAdminUntagController, - updateConfigController, -} from '@/controllers/api/pleroma.ts'; -import { preferencesController } from '@/controllers/api/preferences.ts'; -import { getSubscriptionController, pushSubscribeController } from '@/controllers/api/push.ts'; -import { deleteReactionController, reactionController, reactionsController } from '@/controllers/api/reactions.ts'; -import { relayController } from '@/controllers/nostr/relay.ts'; -import { - adminReportController, - adminReportReopenController, - adminReportResolveController, - adminReportsController, - reportController, -} from '@/controllers/api/reports.ts'; -import { searchController } from '@/controllers/api/search.ts'; -import { - bookmarkController, - contextController, - createStatusController, - deleteStatusController, - favouriteController, - favouritedByController, - pinController, - quotesController, - rebloggedByController, - reblogStatusController, - statusController, - unbookmarkController, - unfavouriteController, - unpinController, - unreblogStatusController, - zapController, - zappedByController, -} from '@/controllers/api/statuses.ts'; -import { streamingController } from '@/controllers/api/streaming.ts'; -import { suggestionsV1Controller, suggestionsV2Controller } from '@/controllers/api/suggestions.ts'; -import { - hashtagTimelineController, - homeTimelineController, - publicTimelineController, - suggestedTimelineController, -} from '@/controllers/api/timelines.ts'; -import { - trendingLinksController, - trendingStatusesController, - trendingTagsController, -} from '@/controllers/api/trends.ts'; -import { translateController } from '@/controllers/api/translate.ts'; -import { errorHandler } from '@/controllers/error.ts'; -import { frontendController } from '@/controllers/frontend.ts'; -import { metricsController } from '@/controllers/metrics.ts'; -import { indexController } from '@/controllers/site.ts'; -import { manifestController } from '@/controllers/manifest.ts'; -import { nodeInfoController, nodeInfoSchemaController } from '@/controllers/well-known/nodeinfo.ts'; -import { nostrController } from '@/controllers/well-known/nostr.ts'; -import { DittoTranslator } from '@/interfaces/DittoTranslator.ts'; -import { auth98Middleware, requireProof, requireRole } from '@/middleware/auth98Middleware.ts'; -import { cspMiddleware } from '@/middleware/cspMiddleware.ts'; -import { metricsMiddleware } from '@/middleware/metricsMiddleware.ts'; -import { paginationMiddleware } from '@/middleware/paginationMiddleware.ts'; -import { rateLimitMiddleware } from '@/middleware/rateLimitMiddleware.ts'; -import { requireSigner } from '@/middleware/requireSigner.ts'; -import { signerMiddleware } from '@/middleware/signerMiddleware.ts'; -import { storeMiddleware } from '@/middleware/storeMiddleware.ts'; -import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts'; -import { translatorMiddleware } from '@/middleware/translatorMiddleware.ts'; - -export interface AppEnv extends HonoEnv { - Variables: { - /** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */ - signer?: NostrSigner; - /** Uploader for the user to upload files. */ - uploader?: NUploader; - /** NIP-98 signed event proving the pubkey is owned by the user. */ - proof?: NostrEvent; - /** Kysely instance for the database. */ - kysely: Kysely; - /** Storage for the user, might filter out unwanted content. */ - store: NStore; - /** Normalized pagination params. */ - pagination: { since?: number; until?: number; limit: number }; - /** Normalized list pagination params. */ - listPagination: { offset: number; limit: number }; - /** Translation service. */ - translator?: DittoTranslator; - }; -} - -type AppContext = Context; -type AppMiddleware = MiddlewareHandler; -type AppController = Handler>; - -const app = new Hono({ strict: false }); - -const debug = Debug('ditto:http'); - -/** User-provided files in the gitignored `public/` directory. */ -const publicFiles = serveStatic({ root: './public/' }); -/** Static files provided by the Ditto repo, checked into git. */ -const staticFiles = serveStatic({ root: './static/' }); - -app.use('*', rateLimitMiddleware(300, Time.minutes(5))); - -app.use('/api/*', metricsMiddleware, paginationMiddleware, logger(debug)); -app.use('/.well-known/*', metricsMiddleware, logger(debug)); -app.use('/users/*', metricsMiddleware, logger(debug)); -app.use('/nodeinfo/*', metricsMiddleware, logger(debug)); -app.use('/oauth/*', metricsMiddleware, logger(debug)); - -app.get('/api/v1/streaming', metricsMiddleware, streamingController); -app.get('/relay', metricsMiddleware, relayController); - -app.use( - '*', - cspMiddleware(), - cors({ origin: '*', exposeHeaders: ['link'] }), - signerMiddleware, - uploaderMiddleware, - auth98Middleware(), - storeMiddleware, -); - -app.get('/metrics', metricsController); - -app.get('/.well-known/nodeinfo', nodeInfoController); -app.get('/.well-known/nostr.json', nostrController); - -app.get('/nodeinfo/:version', nodeInfoSchemaController); -app.get('/manifest.webmanifest', manifestController); - -app.get('/api/v1/instance', instanceV1Controller); -app.get('/api/v2/instance', instanceV2Controller); -app.get('/api/v1/instance/extended_description', instanceDescriptionController); - -app.get('/api/v1/apps/verify_credentials', appCredentialsController); -app.post('/api/v1/apps', createAppController); - -app.post('/oauth/token', createTokenController); -app.post('/oauth/revoke', revokeTokenController); -app.post('/oauth/authorize', oauthAuthorizeController); -app.get('/oauth/authorize', oauthController); - -app.post('/api/v1/accounts', requireProof({ pow: 20 }), createAccountController); -app.get('/api/v1/accounts/verify_credentials', requireSigner, verifyCredentialsController); -app.patch('/api/v1/accounts/update_credentials', requireSigner, updateCredentialsController); -app.get('/api/v1/accounts/search', accountSearchController); -app.get('/api/v1/accounts/lookup', accountLookupController); -app.get('/api/v1/accounts/relationships', requireSigner, relationshipsController); -app.get('/api/v1/accounts/familiar_followers', requireSigner, familiarFollowersController); -app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/block', requireSigner, blockController); -app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unblock', requireSigner, unblockController); -app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/mute', requireSigner, muteController); -app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unmute', requireSigner, unmuteController); -app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/follow', requireSigner, followController); -app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unfollow', requireSigner, unfollowController); -app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/followers', followersController); -app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/following', followingController); -app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/statuses', accountStatusesController); -app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}', accountController); - -app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/favourited_by', favouritedByController); -app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/reblogged_by', rebloggedByController); -app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/context', contextController); -app.get('/api/v1/statuses/:id{[0-9a-f]{64}}', statusController); -app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/favourite', requireSigner, favouriteController); -app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}/unfavourite', requireSigner, unfavouriteController); -app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/bookmark', requireSigner, bookmarkController); -app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unbookmark', requireSigner, unbookmarkController); -app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/pin', requireSigner, pinController); -app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unpin', requireSigner, unpinController); -app.post( - '/api/v1/statuses/:id{[0-9a-f]{64}}/translate', - requireSigner, - rateLimitMiddleware(15, Time.minutes(1)), - translatorMiddleware, - translateController, -); -app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/reblog', requireSigner, reblogStatusController); -app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unreblog', requireSigner, unreblogStatusController); -app.post('/api/v1/statuses', requireSigner, createStatusController); -app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}', requireSigner, deleteStatusController); - -app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/quotes', quotesController); - -app.post('/api/v1/media', mediaController); -app.put( - '/api/v1/media/:id{[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}}', - updateMediaController, -); -app.post('/api/v2/media', mediaController); - -app.get('/api/v1/timelines/home', requireSigner, homeTimelineController); -app.get('/api/v1/timelines/public', publicTimelineController); -app.get('/api/v1/timelines/tag/:hashtag', hashtagTimelineController); -app.get('/api/v1/timelines/suggested', suggestedTimelineController); - -app.get('/api/v1/preferences', preferencesController); -app.get('/api/v1/search', searchController); -app.get('/api/v2/search', searchController); - -app.get('/api/pleroma/frontend_configurations', frontendConfigController); - -app.get('/api/v1/trends/statuses', trendingStatusesController); -app.get('/api/v1/trends/links', trendingLinksController); -app.get('/api/v1/trends/tags', trendingTagsController); -app.get('/api/v1/trends', trendingTagsController); - -app.get('/api/v1/suggestions', suggestionsV1Controller); -app.get('/api/v2/suggestions', suggestionsV2Controller); - -app.get('/api/v1/notifications', requireSigner, notificationsController); -app.get('/api/v1/notifications/:id', requireSigner, notificationController); - -app.get('/api/v1/favourites', requireSigner, favouritesController); -app.get('/api/v1/bookmarks', requireSigner, bookmarksController); -app.get('/api/v1/blocks', requireSigner, blocksController); -app.get('/api/v1/mutes', requireSigner, mutesController); - -app.get('/api/v1/markers', requireProof(), markersController); -app.post('/api/v1/markers', requireProof(), updateMarkersController); - -app.get('/api/v1/push/subscription', requireSigner, getSubscriptionController); -app.post('/api/v1/push/subscription', requireProof(), pushSubscribeController); - -app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions', reactionsController); -app.get('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', reactionsController); -app.put('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', requireSigner, reactionController); -app.delete('/api/v1/pleroma/statuses/:id{[0-9a-f]{64}}/reactions/:emoji', requireSigner, deleteReactionController); - -app.get('/api/v1/pleroma/admin/config', requireRole('admin'), configController); -app.post('/api/v1/pleroma/admin/config', requireRole('admin'), updateConfigController); -app.delete('/api/v1/pleroma/admin/statuses/:id', requireRole('admin'), pleromaAdminDeleteStatusController); - -app.get('/api/v1/admin/ditto/relays', requireRole('admin'), adminRelaysController); -app.put('/api/v1/admin/ditto/relays', requireRole('admin'), adminSetRelaysController); - -app.put('/api/v1/admin/ditto/instance', requireRole('admin'), updateInstanceController); - -app.post('/api/v1/ditto/names', requireSigner, nameRequestController); -app.get('/api/v1/ditto/names', requireSigner, nameRequestsController); - -app.get('/api/v1/ditto/captcha', rateLimitMiddleware(3, Time.minutes(1)), captchaController); -app.post( - '/api/v1/ditto/captcha/:id/verify', - rateLimitMiddleware(8, Time.minutes(1)), - requireProof(), - captchaVerifyController, -); - -app.get('/api/v1/ditto/zap_splits', getZapSplitsController); -app.get('/api/v1/ditto/:id{[0-9a-f]{64}}/zap_splits', statusZapSplitsController); - -app.put('/api/v1/admin/ditto/zap_splits', requireRole('admin'), updateZapSplitsController); -app.delete('/api/v1/admin/ditto/zap_splits', requireRole('admin'), deleteZapSplitsController); - -app.post('/api/v1/ditto/zap', requireSigner, zapController); -app.get('/api/v1/ditto/statuses/:id{[0-9a-f]{64}}/zapped_by', zappedByController); - -app.post('/api/v1/reports', requireSigner, reportController); -app.get('/api/v1/admin/reports', requireSigner, requireRole('admin'), adminReportsController); -app.get('/api/v1/admin/reports/:id{[0-9a-f]{64}}', requireSigner, requireRole('admin'), adminReportController); -app.post( - '/api/v1/admin/reports/:id{[0-9a-f]{64}}/resolve', - requireSigner, - requireRole('admin'), - adminReportResolveController, -); -app.post( - '/api/v1/admin/reports/:id{[0-9a-f]{64}}/reopen', - requireSigner, - requireRole('admin'), - adminReportReopenController, -); - -app.get('/api/v1/admin/accounts', requireRole('admin'), adminAccountsController); -app.post('/api/v1/admin/accounts/:id{[0-9a-f]{64}}/action', requireSigner, requireRole('admin'), adminActionController); -app.post( - '/api/v1/admin/accounts/:id{[0-9a-f]{64}}/approve', - requireSigner, - requireRole('admin'), - adminApproveController, -); -app.post('/api/v1/admin/accounts/:id{[0-9a-f]{64}}/reject', requireSigner, requireRole('admin'), adminRejectController); - -app.put('/api/v1/pleroma/admin/users/tag', requireRole('admin'), pleromaAdminTagController); -app.delete('/api/v1/pleroma/admin/users/tag', requireRole('admin'), pleromaAdminUntagController); -app.patch('/api/v1/pleroma/admin/users/suggest', requireRole('admin'), pleromaAdminSuggestController); -app.patch('/api/v1/pleroma/admin/users/unsuggest', requireRole('admin'), pleromaAdminUnsuggestController); - -// Not (yet) implemented. -app.get('/api/v1/custom_emojis', emptyArrayController); -app.get('/api/v1/filters', emptyArrayController); -app.get('/api/v1/domain_blocks', emptyArrayController); -app.get('/api/v1/conversations', emptyArrayController); -app.get('/api/v1/lists', emptyArrayController); - -app.use('/api/*', notImplementedController); -app.use('/.well-known/*', publicFiles, notImplementedController); -app.use('/nodeinfo/*', notImplementedController); -app.use('/oauth/*', notImplementedController); - -// Known frontend routes -app.get('/:acct{@.*}', frontendController); -app.get('/:acct{@.*}/*', frontendController); -app.get('/:bech32{^[\x21-\x7E]{1,83}1[023456789acdefghjklmnpqrstuvwxyz]{6,}$}', frontendController); -app.get('/users/*', frontendController); -app.get('/tags/*', frontendController); -app.get('/statuses/*', frontendController); -app.get('/notice/*', frontendController); -app.get('/timeline/*', frontendController); - -// Known static file routes -app.get('/favicon.ico', publicFiles, staticFiles); -app.get('/images/*', publicFiles, staticFiles); -app.get('/instance/*', publicFiles); -app.get('/packs/*', publicFiles); -app.get('/sw.js', publicFiles); - -// Site index -app.get('/', frontendController, indexController); - -// Fallback -app.get('*', publicFiles, staticFiles, frontendController); - -app.onError(errorHandler); - -export default app; - -export type { AppContext, AppController, AppMiddleware }; diff --git a/src/config.ts b/src/config.ts deleted file mode 100644 index 68bf3ed8..00000000 --- a/src/config.ts +++ /dev/null @@ -1,391 +0,0 @@ -import os from 'node:os'; -import ISO6391, { LanguageCode } from 'iso-639-1'; -import { getPublicKey, nip19 } from 'nostr-tools'; -import { z } from 'zod'; -import { decodeBase64 } from '@std/encoding/base64'; -import { encodeBase64Url } from '@std/encoding/base64url'; - -import { getEcdsaPublicKey } from '@/utils/crypto.ts'; - -/** Application-wide configuration. */ -class Conf { - private static _pubkey: string | undefined; - /** Ditto admin secret key in nip19 format. This is the way it's configured by an admin. */ - static get nsec(): `nsec1${string}` { - const value = Deno.env.get('DITTO_NSEC'); - if (!value) { - throw new Error('Missing DITTO_NSEC'); - } - if (!value.startsWith('nsec1')) { - throw new Error('Invalid DITTO_NSEC'); - } - return value as `nsec1${string}`; - } - /** Ditto admin secret key in hex format. */ - static get seckey(): Uint8Array { - return nip19.decode(Conf.nsec).data; - } - /** Ditto admin public key in hex format. */ - static get pubkey(): string { - if (!this._pubkey) { - this._pubkey = getPublicKey(Conf.seckey); - } - return this._pubkey; - } - /** Port to use when serving the HTTP server. */ - static get port(): number { - return parseInt(Deno.env.get('PORT') || '4036'); - } - /** Relay URL to the Ditto server's relay. */ - static get relay(): `wss://${string}` | `ws://${string}` { - const { protocol, host } = Conf.url; - return `${protocol === 'https:' ? 'wss:' : 'ws:'}//${host}/relay`; - } - /** Relay to use for NIP-50 `search` queries. */ - static get searchRelay(): string | undefined { - return Deno.env.get('SEARCH_RELAY'); - } - /** Origin of the Ditto server, including the protocol and port. */ - static get localDomain(): string { - return Deno.env.get('LOCAL_DOMAIN') || `http://localhost:${Conf.port}`; - } - /** Link to an external nostr viewer. */ - static get externalDomain(): string { - return Deno.env.get('NOSTR_EXTERNAL') || 'https://njump.me'; - } - /** Get a link to a nip19-encoded entity in the configured external viewer. */ - static external(path: string) { - return new URL(path, Conf.externalDomain).toString(); - } - /** - * Heroku-style database URL. This is used in production to connect to the - * database. - * - * Follows the format: - * - * ```txt - * protocol://username:password@host:port/database_name - * ``` - */ - static get databaseUrl(): string { - return Deno.env.get('DATABASE_URL') ?? 'file://data/pgdata'; - } - /** PGlite debug level. 0 disables logging. */ - static get pgliteDebug(): 0 | 1 | 2 | 3 | 4 | 5 { - return Number(Deno.env.get('PGLITE_DEBUG') || 0) as 0 | 1 | 2 | 3 | 4 | 5; - } - private static _vapidPublicKey: Promise | undefined; - static get vapidPublicKey(): Promise { - if (!this._vapidPublicKey) { - this._vapidPublicKey = (async () => { - const keys = await Conf.vapidKeys; - if (keys) { - const { publicKey } = keys; - const bytes = await crypto.subtle.exportKey('raw', publicKey); - return encodeBase64Url(bytes); - } - })(); - } - - return this._vapidPublicKey; - } - static get vapidKeys(): Promise { - return (async () => { - const encoded = Deno.env.get('VAPID_PRIVATE_KEY'); - - if (!encoded) { - return; - } - - const keyData = decodeBase64(encoded); - - const privateKey = await crypto.subtle.importKey( - 'pkcs8', - keyData, - { name: 'ECDSA', namedCurve: 'P-256' }, - true, - ['sign'], - ); - const publicKey = await getEcdsaPublicKey(privateKey, true); - - return { privateKey, publicKey }; - })(); - } - static db = { - /** Database query timeout configurations. */ - timeouts: { - /** Default query timeout when another setting isn't more specific. */ - get default(): number { - return Number(Deno.env.get('DB_TIMEOUT_DEFAULT') || 5_000); - }, - /** Timeout used for queries made through the Nostr relay. */ - get relay(): number { - return Number(Deno.env.get('DB_TIMEOUT_RELAY') || 1_000); - }, - /** Timeout used for timelines such as home, notifications, hashtag, etc. */ - get timelines(): number { - return Number(Deno.env.get('DB_TIMEOUT_TIMELINES') || 15_000); - }, - }, - }; - /** Time-to-live for captchas in milliseconds. */ - static get captchaTTL(): number { - return Number(Deno.env.get('CAPTCHA_TTL') || 5 * 60 * 1000); - } - /** Character limit to enforce for posts made through Mastodon API. */ - static get postCharLimit(): number { - return Number(Deno.env.get('POST_CHAR_LIMIT') || 5000); - } - /** S3 media storage configuration. */ - static s3 = { - get endPoint(): string | undefined { - return Deno.env.get('S3_ENDPOINT'); - }, - get region(): string | undefined { - return Deno.env.get('S3_REGION'); - }, - get accessKey(): string | undefined { - return Deno.env.get('S3_ACCESS_KEY'); - }, - get secretKey(): string | undefined { - return Deno.env.get('S3_SECRET_KEY'); - }, - get bucket(): string | undefined { - return Deno.env.get('S3_BUCKET'); - }, - get pathStyle(): boolean | undefined { - return optionalBooleanSchema.parse(Deno.env.get('S3_PATH_STYLE')); - }, - get port(): number | undefined { - return optionalNumberSchema.parse(Deno.env.get('S3_PORT')); - }, - get sessionToken(): string | undefined { - return Deno.env.get('S3_SESSION_TOKEN'); - }, - get useSSL(): boolean | undefined { - return optionalBooleanSchema.parse(Deno.env.get('S3_USE_SSL')); - }, - }; - /** IPFS uploader configuration. */ - static ipfs = { - /** Base URL for private IPFS API calls. */ - get apiUrl(): string { - return Deno.env.get('IPFS_API_URL') || 'http://localhost:5001'; - }, - }; - /** nostr.build API endpoint when the `nostrbuild` uploader is used. */ - static get nostrbuildEndpoint(): string { - return Deno.env.get('NOSTRBUILD_ENDPOINT') || 'https://nostr.build/api/v2/upload/files'; - } - /** Default Blossom servers to use when the `blossom` uploader is set. */ - static get blossomServers(): string[] { - return Deno.env.get('BLOSSOM_SERVERS')?.split(',') || ['https://blossom.primal.net/']; - } - /** Module to upload files with. */ - static get uploader(): string | undefined { - return Deno.env.get('DITTO_UPLOADER'); - } - /** Location to use for local uploads. */ - static get uploadsDir(): string { - return Deno.env.get('UPLOADS_DIR') || 'data/uploads'; - } - /** Media base URL for uploads. */ - static get mediaDomain(): string { - const value = Deno.env.get('MEDIA_DOMAIN'); - - if (!value) { - const url = Conf.url; - url.host = `media.${url.host}`; - return url.toString(); - } - - return value; - } - /** - * Whether to analyze media metadata with [blurhash](https://www.npmjs.com/package/blurhash) and [sharp](https://www.npmjs.com/package/sharp). - * This is prone to security vulnerabilities, which is why it's not enabled by default. - */ - static get mediaAnalyze(): boolean { - return optionalBooleanSchema.parse(Deno.env.get('MEDIA_ANALYZE')) ?? false; - } - /** Max upload size for files in number of bytes. Default 100MiB. */ - static get maxUploadSize(): number { - return Number(Deno.env.get('MAX_UPLOAD_SIZE') || 100 * 1024 * 1024); - } - /** Usernames that regular users cannot sign up with. */ - static get forbiddenUsernames(): string[] { - return Deno.env.get('FORBIDDEN_USERNAMES')?.split(',') || [ - '_', - 'admin', - 'administrator', - 'root', - 'sysadmin', - 'system', - ]; - } - /** Domain of the Ditto server as a `URL` object, for easily grabbing the `hostname`, etc. */ - static get url(): URL { - return new URL(Conf.localDomain); - } - /** Merges the path with the localDomain. */ - static local(path: string): string { - return mergePaths(Conf.localDomain, path); - } - /** URL to send Sentry errors to. */ - static get sentryDsn(): string | undefined { - return Deno.env.get('SENTRY_DSN'); - } - /** Postgres settings. */ - static pg = { - /** Number of connections to use in the pool. */ - get poolSize(): number { - return Number(Deno.env.get('PG_POOL_SIZE') ?? 20); - }, - }; - /** Whether to enable requesting events from known relays. */ - static get firehoseEnabled(): boolean { - return optionalBooleanSchema.parse(Deno.env.get('FIREHOSE_ENABLED')) ?? true; - } - /** Number of events the firehose is allowed to process at one time before they have to wait in a queue. */ - static get firehoseConcurrency(): number { - return Math.ceil(Number(Deno.env.get('FIREHOSE_CONCURRENCY') ?? (Conf.pg.poolSize * 0.25))); - } - /** Nostr event kinds of events to listen for on the firehose. */ - static get firehoseKinds(): number[] { - return (Deno.env.get('FIREHOSE_KINDS') ?? '0, 1, 3, 5, 6, 7, 9735, 10002') - .split(/[, ]+/g) - .map(Number); - } - /** - * Whether Ditto should subscribe to Nostr events from the Postgres database itself. - * This would make Nostr events inserted directly into Postgres available to the streaming API and relay. - */ - static get notifyEnabled(): boolean { - return optionalBooleanSchema.parse(Deno.env.get('NOTIFY_ENABLED')) ?? false; - } - /** Whether to enable Ditto cron jobs. */ - static get cronEnabled(): boolean { - return optionalBooleanSchema.parse(Deno.env.get('CRON_ENABLED')) ?? true; - } - /** Crawler User-Agent regex to render link previews to. */ - static get crawlerRegex(): RegExp { - return new RegExp( - Deno.env.get('CRAWLER_REGEX') || - 'googlebot|bingbot|yandex|baiduspider|twitterbot|facebookexternalhit|rogerbot|linkedinbot|embedly|quora link preview|showyoubot|outbrain|pinterestbot|slackbot|vkShare|W3C_Validator|whatsapp|mastodon|pleroma|Discordbot|AhrefsBot|SEMrushBot|MJ12bot|SeekportBot|Synapse|Matrix', - 'i', - ); - } - /** User-Agent to use when fetching link previews. Pretend to be Facebook by default. */ - static get fetchUserAgent(): string { - return Deno.env.get('DITTO_FETCH_USER_AGENT') ?? 'facebookexternalhit'; - } - /** Path to the custom policy module. Must be an absolute path, https:, npm:, or jsr: URI. */ - static get policy(): string { - return Deno.env.get('DITTO_POLICY') || new URL('../data/policy.ts', import.meta.url).pathname; - } - /** Absolute path to the data directory used by Ditto. */ - static get dataDir(): string { - return Deno.env.get('DITTO_DATA_DIR') || new URL('../data', import.meta.url).pathname; - } - /** Absolute path of the Deno directory. */ - static get denoDir(): string { - return Deno.env.get('DENO_DIR') || `${os.userInfo().homedir}/.cache/deno`; - } - /** Whether zap splits should be enabled. */ - static get zapSplitsEnabled(): boolean { - return optionalBooleanSchema.parse(Deno.env.get('ZAP_SPLITS_ENABLED')) ?? false; - } - /** Languages this server wishes to highlight. Used when querying trends.*/ - static get preferredLanguages(): LanguageCode[] | undefined { - return Deno.env.get('DITTO_LANGUAGES')?.split(',')?.filter(ISO6391.validate) as LanguageCode[]; - } - /** Translation provider used to translate posts. */ - static get translationProvider(): string | undefined { - return Deno.env.get('TRANSLATION_PROVIDER'); - } - /** DeepL URL endpoint. */ - static get deeplBaseUrl(): string | undefined { - return Deno.env.get('DEEPL_BASE_URL'); - } - /** DeepL API KEY. */ - static get deeplApiKey(): string | undefined { - return Deno.env.get('DEEPL_API_KEY'); - } - /** LibreTranslate URL endpoint. */ - static get libretranslateBaseUrl(): string | undefined { - return Deno.env.get('LIBRETRANSLATE_BASE_URL'); - } - /** LibreTranslate API KEY. */ - static get libretranslateApiKey(): string | undefined { - return Deno.env.get('LIBRETRANSLATE_API_KEY'); - } - /** Cache settings. */ - static caches = { - /** NIP-05 cache settings. */ - get nip05(): { max: number; ttl: number } { - return { - max: Number(Deno.env.get('DITTO_CACHE_NIP05_MAX') || 3000), - ttl: Number(Deno.env.get('DITTO_CACHE_NIP05_TTL') || 1 * 60 * 60 * 1000), - }; - }, - /** Favicon cache settings. */ - get favicon(): { max: number; ttl: number } { - return { - max: Number(Deno.env.get('DITTO_CACHE_FAVICON_MAX') || 500), - ttl: Number(Deno.env.get('DITTO_CACHE_FAVICON_TTL') || 1 * 60 * 60 * 1000), - }; - }, - /** Link preview cache settings. */ - get linkPreview(): { max: number; ttl: number } { - return { - max: Number(Deno.env.get('DITTO_CACHE_LINK_PREVIEW_MAX') || 1000), - ttl: Number(Deno.env.get('DITTO_CACHE_LINK_PREVIEW_TTL') || 12 * 60 * 60 * 1000), - }; - }, - /** Translation cache settings. */ - get translation(): { max: number; ttl: number } { - return { - max: Number(Deno.env.get('DITTO_CACHE_TRANSLATION_MAX') || 1000), - ttl: Number(Deno.env.get('DITTO_CACHE_TRANSLATION_TTL') || 6 * 60 * 60 * 1000), - }; - }, - }; - static profileFields = { - get maxFields(): number { - return Number(Deno.env.get('PROFILE_FIELDS_MAX_FIELDS') || 10); - }, - get nameLength(): number { - return Number(Deno.env.get('PROFILE_FIELDS_NAME_LENGTH') || 255); - }, - get valueLength(): number { - return Number(Deno.env.get('PROFILE_FIELDS_VALUE_LENGTH') || 2047); - }, - }; -} - -const optionalBooleanSchema = z - .enum(['true', 'false']) - .optional() - .transform((value) => value !== undefined ? value === 'true' : undefined); - -const optionalNumberSchema = z - .string() - .optional() - .transform((value) => value !== undefined ? Number(value) : undefined); - -function mergePaths(base: string, path: string) { - const url = new URL( - path.startsWith('/') ? path : new URL(path).pathname, - base, - ); - - if (!path.startsWith('/')) { - // Copy query parameters from the original URL to the new URL - const originalUrl = new URL(path); - url.search = originalUrl.search; - } - - return url.toString(); -} - -export { Conf }; diff --git a/src/controllers/api/fallback.ts b/src/controllers/api/fallback.ts deleted file mode 100644 index 0e98ac79..00000000 --- a/src/controllers/api/fallback.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { type Context } from '@hono/hono'; - -const emptyArrayController = (c: Context) => c.json([]); -const notImplementedController = (c: Context) => Promise.resolve(c.json({ error: 'Not implemented' }, 404)); - -export { emptyArrayController, notImplementedController }; diff --git a/src/controllers/metrics.ts b/src/controllers/metrics.ts deleted file mode 100644 index d168243b..00000000 --- a/src/controllers/metrics.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { register } from 'prom-client'; - -import { AppController } from '@/app.ts'; -import { - dbAvailableConnectionsGauge, - dbPoolSizeGauge, - relayPoolRelaysSizeGauge, - relayPoolSubscriptionsSizeGauge, -} from '@/metrics.ts'; -import { Storages } from '@/storages.ts'; - -/** Prometheus/OpenMetrics controller. */ -export const metricsController: AppController = async (c) => { - const db = await Storages.database(); - const pool = await Storages.client(); - - // Update some metrics at request time. - dbPoolSizeGauge.set(db.poolSize); - dbAvailableConnectionsGauge.set(db.availableConnections); - - relayPoolRelaysSizeGauge.reset(); - relayPoolSubscriptionsSizeGauge.reset(); - - for (const relay of pool.relays.values()) { - relayPoolRelaysSizeGauge.inc({ ready_state: relay.socket.readyState }); - relayPoolSubscriptionsSizeGauge.inc(relay.subscriptions.length); - } - - // Serve the metrics. - const metrics = await register.metrics(); - - const headers: HeadersInit = { - 'Content-Type': register.contentType, - }; - - return c.text(metrics, 200, headers); -}; diff --git a/src/controllers/site.ts b/src/controllers/site.ts deleted file mode 100644 index 751e60ef..00000000 --- a/src/controllers/site.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Conf } from '@/config.ts'; - -import type { AppController } from '@/app.ts'; - -/** Landing page controller. */ -const indexController: AppController = (c) => { - const { origin } = Conf.url; - - return c.text(`Please connect with a Mastodon client: - - ${origin} - -Ditto -`); -}; - -export { indexController }; diff --git a/src/controllers/well-known/nostr.ts b/src/controllers/well-known/nostr.ts deleted file mode 100644 index b6b7af09..00000000 --- a/src/controllers/well-known/nostr.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { z } from 'zod'; - -import { AppController } from '@/app.ts'; -import { localNip05Lookup } from '@/utils/nip05.ts'; - -const nameSchema = z.string().min(1).regex(/^\w+$/); - -/** - * Serves NIP-05's nostr.json. - * https://github.com/nostr-protocol/nips/blob/master/05.md - */ -const nostrController: AppController = async (c) => { - const store = c.get('store'); - - const result = nameSchema.safeParse(c.req.query('name')); - const name = result.success ? result.data : undefined; - - const pointer = name ? await localNip05Lookup(store, name) : undefined; - - if (!name || !pointer) { - return c.json({ names: {}, relays: {} }); - } - - const { pubkey, relays } = pointer; - - return c.json({ - names: { - [name]: pubkey, - }, - relays: { - [pubkey]: relays, - }, - }); -}; - -export { nostrController }; diff --git a/src/cron.ts b/src/cron.ts deleted file mode 100644 index 6994561e..00000000 --- a/src/cron.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { updateTrendingLinks } from '@/trends.ts'; -import { updateTrendingHashtags } from '@/trends.ts'; -import { updateTrendingEvents, updateTrendingPubkeys, updateTrendingZappedEvents } from '@/trends.ts'; - -/** Start cron jobs for the application. */ -export function cron() { - Deno.cron('update trending pubkeys', '0 * * * *', updateTrendingPubkeys); - Deno.cron('update trending zapped events', '7 * * * *', updateTrendingZappedEvents); - Deno.cron('update trending events', '15 * * * *', updateTrendingEvents); - Deno.cron('update trending hashtags', '30 * * * *', updateTrendingHashtags); - Deno.cron('update trending links', '45 * * * *', updateTrendingLinks); -} diff --git a/src/db/DittoDB.ts b/src/db/DittoDB.ts deleted file mode 100644 index 923a109d..00000000 --- a/src/db/DittoDB.ts +++ /dev/null @@ -1,56 +0,0 @@ -import fs from 'node:fs/promises'; -import path from 'node:path'; - -import { FileMigrationProvider, Kysely, Migrator } from 'kysely'; - -import { DittoPglite } from '@/db/adapters/DittoPglite.ts'; -import { DittoPostgres } from '@/db/adapters/DittoPostgres.ts'; -import { DittoDatabase, DittoDatabaseOpts } from '@/db/DittoDatabase.ts'; -import { DittoTables } from '@/db/DittoTables.ts'; - -export class DittoDB { - /** Open a new database connection. */ - static create(databaseUrl: string, opts?: DittoDatabaseOpts): DittoDatabase { - const { protocol } = new URL(databaseUrl); - - switch (protocol) { - case 'file:': - case 'memory:': - return DittoPglite.create(databaseUrl, opts); - case 'postgres:': - case 'postgresql:': - return DittoPostgres.create(databaseUrl, opts); - default: - throw new Error('Unsupported database URL.'); - } - } - - /** Migrate the database to the latest version. */ - static async migrate(kysely: Kysely) { - const migrator = new Migrator({ - db: kysely, - provider: new FileMigrationProvider({ - fs, - path, - migrationFolder: new URL(import.meta.resolve('./migrations')).pathname, - }), - }); - - console.warn('Running migrations...'); - const { results, error } = await migrator.migrateToLatest(); - - if (error) { - console.error(error); - Deno.exit(1); - } else { - if (!results?.length) { - console.warn('Everything up-to-date.'); - } else { - console.warn('Migrations finished!'); - for (const { migrationName, status } of results!) { - console.warn(` - ${migrationName}: ${status}`); - } - } - } - } -} diff --git a/src/db/KyselyLogger.ts b/src/db/KyselyLogger.ts deleted file mode 100644 index 514f44a4..00000000 --- a/src/db/KyselyLogger.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { Stickynotes } from '@soapbox/stickynotes'; -import { Logger } from 'kysely'; -import { dbQueriesCounter, dbQueryDurationHistogram } from '@/metrics.ts'; - -/** Log the SQL for queries. */ -export const KyselyLogger: Logger = (event) => { - const console = new Stickynotes('ditto:sql'); - - const { query, queryDurationMillis } = event; - const { sql, parameters } = query; - - const queryDurationSeconds = queryDurationMillis / 1000; - - dbQueriesCounter.inc(); - dbQueryDurationHistogram.observe(queryDurationSeconds); - - console.debug( - sql, - JSON.stringify(parameters), - `\x1b[90m(${(queryDurationSeconds / 1000).toFixed(2)}s)\x1b[0m`, - ); -}; diff --git a/src/db/adapters/DittoPglite.ts b/src/db/adapters/DittoPglite.ts deleted file mode 100644 index df616458..00000000 --- a/src/db/adapters/DittoPglite.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { PGlite } from '@electric-sql/pglite'; -import { pg_trgm } from '@electric-sql/pglite/contrib/pg_trgm'; -import { PgliteDialect } from '@soapbox/kysely-pglite'; -import { Kysely } from 'kysely'; - -import { DittoDatabase, DittoDatabaseOpts } from '@/db/DittoDatabase.ts'; -import { DittoTables } from '@/db/DittoTables.ts'; -import { KyselyLogger } from '@/db/KyselyLogger.ts'; -import { isWorker } from '@/utils/worker.ts'; - -export class DittoPglite { - static create(databaseUrl: string, opts?: DittoDatabaseOpts): DittoDatabase { - const url = new URL(databaseUrl); - - if (url.protocol === 'file:' && isWorker()) { - throw new Error('PGlite is not supported in worker threads.'); - } - - const pglite = new PGlite(databaseUrl, { - extensions: { pg_trgm }, - debug: opts?.debug, - }); - - const kysely = new Kysely({ - dialect: new PgliteDialect({ database: pglite }), - log: KyselyLogger, - }); - - const listen = (channel: string, callback: (payload: string) => void): void => { - pglite.listen(channel, callback); - }; - - return { - kysely, - poolSize: 1, - availableConnections: 1, - listen, - }; - } -} diff --git a/src/db/adapters/DittoPostgres.ts b/src/db/adapters/DittoPostgres.ts deleted file mode 100644 index 180e4a7a..00000000 --- a/src/db/adapters/DittoPostgres.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { - BinaryOperationNode, - FunctionNode, - Kysely, - OperatorNode, - PostgresAdapter, - PostgresIntrospector, - PostgresQueryCompiler, - PrimitiveValueListNode, - ValueNode, -} from 'kysely'; -import { PostgresJSDialectConfig, PostgresJSDriver } from 'kysely-postgres-js'; -import postgres from 'postgres'; - -import { DittoDatabase, DittoDatabaseOpts } from '@/db/DittoDatabase.ts'; -import { DittoTables } from '@/db/DittoTables.ts'; -import { KyselyLogger } from '@/db/KyselyLogger.ts'; - -export class DittoPostgres { - static create(databaseUrl: string, opts?: DittoDatabaseOpts): DittoDatabase { - const pg = postgres(databaseUrl, { max: opts?.poolSize }); - - const kysely = new Kysely({ - dialect: { - createAdapter() { - return new PostgresAdapter(); - }, - createDriver() { - return new PostgresJSDriver({ - postgres: pg as unknown as PostgresJSDialectConfig['postgres'], - }); - }, - createIntrospector(db) { - return new PostgresIntrospector(db); - }, - createQueryCompiler() { - return new DittoPostgresQueryCompiler(); - }, - }, - log: KyselyLogger, - }); - - const listen = (channel: string, callback: (payload: string) => void): void => { - pg.listen(channel, callback); - }; - - return { - kysely, - get poolSize() { - return pg.connections.open; - }, - get availableConnections() { - return pg.connections.idle; - }, - listen, - }; - } -} - -/** Converts `in` queries to `any` to improve prepared statements on Postgres. */ -class DittoPostgresQueryCompiler extends PostgresQueryCompiler { - protected override visitBinaryOperation(node: BinaryOperationNode): void { - if ( - OperatorNode.is(node.operator) && node.operator.operator === 'in' && PrimitiveValueListNode.is(node.rightOperand) - ) { - this.visitNode(node.leftOperand); - this.append(' = '); - this.visitNode(FunctionNode.create('any', [ValueNode.create(node.rightOperand.values)])); - } else { - super.visitBinaryOperation(node); - } - } -} diff --git a/src/db/migrations/002_events_fts.ts b/src/db/migrations/002_events_fts.ts deleted file mode 100644 index 45ad03e4..00000000 --- a/src/db/migrations/002_events_fts.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Kysely } from 'kysely'; - -export async function up(_db: Kysely): Promise { - // This migration used to create an FTS table for SQLite, but SQLite support was removed. -} - -export async function down(_db: Kysely): Promise { -} diff --git a/src/db/migrations/003_events_admin.ts b/src/db/migrations/003_events_admin.ts deleted file mode 100644 index 388a3a47..00000000 --- a/src/db/migrations/003_events_admin.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Kysely } from 'kysely'; - -export async function up(_db: Kysely): Promise { -} - -export async function down(db: Kysely): Promise { - await db.schema.alterTable('users').dropColumn('admin').execute(); -} diff --git a/src/db/migrations/004_add_user_indexes.ts b/src/db/migrations/004_add_user_indexes.ts deleted file mode 100644 index fca9c5f3..00000000 --- a/src/db/migrations/004_add_user_indexes.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Kysely } from 'kysely'; - -export async function up(_db: Kysely): Promise { -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('idx_users_pubkey').execute(); - await db.schema.dropIndex('idx_users_username').execute(); -} diff --git a/src/db/migrations/006_pragma.ts b/src/db/migrations/006_pragma.ts deleted file mode 100644 index f20ee9bd..00000000 --- a/src/db/migrations/006_pragma.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { Kysely } from 'kysely'; - -export async function up(_db: Kysely): Promise { -} - -export async function down(_db: Kysely): Promise { -} diff --git a/src/db/migrations/008_wal.ts b/src/db/migrations/008_wal.ts deleted file mode 100644 index f20ee9bd..00000000 --- a/src/db/migrations/008_wal.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { Kysely } from 'kysely'; - -export async function up(_db: Kysely): Promise { -} - -export async function down(_db: Kysely): Promise { -} diff --git a/src/db/migrations/010_drop_users.ts b/src/db/migrations/010_drop_users.ts deleted file mode 100644 index c36f2fa9..00000000 --- a/src/db/migrations/010_drop_users.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Kysely } from 'kysely'; - -export async function up(db: Kysely): Promise { - await db.schema.dropTable('users').ifExists().execute(); -} - -export async function down(_db: Kysely): Promise { -} diff --git a/src/filter.test.ts b/src/filter.test.ts deleted file mode 100644 index 9379208e..00000000 --- a/src/filter.test.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { assertEquals } from '@std/assert'; - -import event0 from '~/fixtures/events/event-0.json' with { type: 'json' }; -import event1 from '~/fixtures/events/event-1.json' with { type: 'json' }; - -import { eventToMicroFilter, getFilterId, getFilterLimit, getMicroFilters, isMicrofilter } from './filter.ts'; - -Deno.test('getMicroFilters', () => { - const event = event0; - const microfilters = getMicroFilters(event); - assertEquals(microfilters.length, 2); - assertEquals(microfilters[0], { authors: [event.pubkey], kinds: [0] }); - assertEquals(microfilters[1], { ids: [event.id] }); -}); - -Deno.test('eventToMicroFilter', () => { - assertEquals(eventToMicroFilter(event0), { authors: [event0.pubkey], kinds: [0] }); - assertEquals(eventToMicroFilter(event1), { ids: [event1.id] }); -}); - -Deno.test('isMicrofilter', () => { - assertEquals(isMicrofilter({ ids: [event0.id] }), true); - assertEquals(isMicrofilter({ authors: [event0.pubkey], kinds: [0] }), true); - assertEquals(isMicrofilter({ ids: [event0.id], authors: [event0.pubkey], kinds: [0] }), false); -}); - -Deno.test('getFilterId', () => { - assertEquals( - getFilterId({ ids: [event0.id] }), - '{"ids":["63d38c9b483d2d98a46382eadefd272e0e4bdb106a5b6eddb400c4e76f693d35"]}', - ); - assertEquals( - getFilterId({ authors: [event0.pubkey], kinds: [0] }), - '{"authors":["79c2cae114ea28a981e7559b4fe7854a473521a8d22a66bbab9fa248eb820ff6"],"kinds":[0]}', - ); -}); - -Deno.test('getFilterLimit', () => { - assertEquals(getFilterLimit({ ids: [event0.id] }), 1); - assertEquals(getFilterLimit({ ids: [event0.id], limit: 2 }), 1); - assertEquals(getFilterLimit({ ids: [event0.id], limit: 0 }), 0); - assertEquals(getFilterLimit({ ids: [event0.id], limit: -1 }), 0); - assertEquals(getFilterLimit({ kinds: [0], authors: [event0.pubkey] }), 1); - assertEquals(getFilterLimit({ kinds: [1], authors: [event0.pubkey] }), Infinity); - assertEquals(getFilterLimit({}), Infinity); -}); diff --git a/src/filter.ts b/src/filter.ts deleted file mode 100644 index f9288c8a..00000000 --- a/src/filter.ts +++ /dev/null @@ -1,97 +0,0 @@ -import { NKinds, NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify'; -import stringifyStable from 'fast-stable-stringify'; -import { z } from 'zod'; - -/** Microfilter to get one specific event by ID. */ -type IdMicrofilter = { ids: [NostrEvent['id']] }; -/** Microfilter to get an author. */ -type AuthorMicrofilter = { kinds: [0]; authors: [NostrEvent['pubkey']] }; -/** Filter to get one specific event. */ -type MicroFilter = IdMicrofilter | AuthorMicrofilter; - -/** Get deterministic ID for a microfilter. */ -function getFilterId(filter: MicroFilter): string { - if ('ids' in filter) { - return stringifyStable({ ids: [filter.ids[0]] }); - } else { - return stringifyStable({ - kinds: [filter.kinds[0]], - authors: [filter.authors[0]], - }); - } -} - -/** Get a microfilter from a Nostr event. */ -function eventToMicroFilter(event: NostrEvent): MicroFilter { - const [microfilter] = getMicroFilters(event); - return microfilter; -} - -/** Get all the microfilters for an event, in order of priority. */ -function getMicroFilters(event: NostrEvent): MicroFilter[] { - const microfilters: MicroFilter[] = []; - if (event.kind === 0) { - microfilters.push({ kinds: [0], authors: [event.pubkey] }); - } - microfilters.push({ ids: [event.id] }); - return microfilters; -} - -/** Microfilter schema. */ -const microFilterSchema = z.union([ - z.object({ ids: z.tuple([n.id()]) }).strict(), - z.object({ kinds: z.tuple([z.literal(0)]), authors: z.tuple([n.id()]) }).strict(), -]); - -/** Checks whether the filter is a microfilter. */ -function isMicrofilter(filter: NostrFilter): filter is MicroFilter { - return microFilterSchema.safeParse(filter).success; -} - -/** Returns true if the filter could potentially return any stored events at all. */ -function canFilter(filter: NostrFilter): boolean { - return getFilterLimit(filter) > 0; -} - -/** Normalize the `limit` of each filter, and remove filters that can't produce any events. */ -function normalizeFilters(filters: F[]): F[] { - return filters.reduce((acc, filter) => { - const limit = getFilterLimit(filter); - if (limit > 0) { - acc.push(limit === Infinity ? filter : { ...filter, limit }); - } - return acc; - }, []); -} - -/** Calculate the intrinsic limit of a filter. This function may return `Infinity`. */ -function getFilterLimit(filter: NostrFilter): number { - if (filter.ids && !filter.ids.length) return 0; - if (filter.kinds && !filter.kinds.length) return 0; - if (filter.authors && !filter.authors.length) return 0; - - for (const [key, value] of Object.entries(filter)) { - if (key[0] === '#' && Array.isArray(value) && !value.length) return 0; - } - - return Math.min( - Math.max(0, filter.limit ?? Infinity), - filter.ids?.length ?? Infinity, - filter.authors?.length && filter.kinds?.every((kind) => NKinds.replaceable(kind)) - ? filter.authors.length * filter.kinds.length - : Infinity, - ); -} - -export { - type AuthorMicrofilter, - canFilter, - eventToMicroFilter, - getFilterId, - getFilterLimit, - getMicroFilters, - type IdMicrofilter, - isMicrofilter, - type MicroFilter, - normalizeFilters, -}; diff --git a/src/firehose.ts b/src/firehose.ts deleted file mode 100644 index da8ab9c1..00000000 --- a/src/firehose.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { Semaphore } from '@lambdalisue/async'; -import { Stickynotes } from '@soapbox/stickynotes'; - -import { Conf } from '@/config.ts'; -import { firehoseEventsCounter } from '@/metrics.ts'; -import { Storages } from '@/storages.ts'; -import { nostrNow } from '@/utils.ts'; - -import * as pipeline from '@/pipeline.ts'; - -const console = new Stickynotes('ditto:firehose'); -const sem = new Semaphore(Conf.firehoseConcurrency); - -/** - * This function watches events on all known relays and performs - * side-effects based on them, such as trending hashtag tracking - * and storing events for notifications and the home feed. - */ -export async function startFirehose(): Promise { - const store = await Storages.client(); - - for await (const msg of store.req([{ kinds: Conf.firehoseKinds, limit: 0, since: nostrNow() }])) { - if (msg[0] === 'EVENT') { - const event = msg[2]; - console.debug(`NostrEvent<${event.kind}> ${event.id}`); - firehoseEventsCounter.inc({ kind: event.kind }); - - sem.lock(async () => { - try { - await pipeline.handleEvent(event, AbortSignal.timeout(5000)); - } catch (e) { - console.warn(e); - } - }); - } - } -} diff --git a/src/interfaces/DittoFilter.ts b/src/interfaces/DittoFilter.ts deleted file mode 100644 index f7f1a9ea..00000000 --- a/src/interfaces/DittoFilter.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { NostrEvent } from '@nostrify/nostrify'; -import { DittoEvent } from '@/interfaces/DittoEvent.ts'; - -/** Additional properties that may be added by Ditto to events. */ -export type DittoRelation = Exclude; diff --git a/src/metrics.ts b/src/metrics.ts deleted file mode 100644 index 7fe75a8f..00000000 --- a/src/metrics.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { Counter, Gauge, Histogram } from 'prom-client'; - -export const httpRequestsCounter = new Counter({ - name: 'ditto_http_requests_total', - help: 'Total number of HTTP requests', - labelNames: ['method'], -}); - -export const httpResponsesCounter = new Counter({ - name: 'ditto_http_responses_total', - help: 'Total number of HTTP responses', - labelNames: ['method', 'path', 'status'], -}); - -export const httpResponseDurationHistogram = new Histogram({ - name: 'ditto_http_response_duration_seconds', - help: 'Histogram of HTTP response times in seconds', - labelNames: ['method', 'path', 'status'], -}); - -export const streamingConnectionsGauge = new Gauge({ - name: 'ditto_streaming_connections', - help: 'Number of active connections to the streaming API', -}); - -export const streamingServerMessagesCounter = new Counter({ - name: 'ditto_streaming_server_messages_total', - help: 'Total number of messages sent from the streaming API', -}); - -export const streamingClientMessagesCounter = new Counter({ - name: 'ditto_streaming_client_messages_total', - help: 'Total number of messages received by the streaming API', -}); - -export const fetchResponsesCounter = new Counter({ - name: 'ditto_fetch_responses_total', - help: 'Total number of fetch requests', - labelNames: ['method', 'status'], -}); - -export const firehoseEventsCounter = new Counter({ - name: 'ditto_firehose_events_total', - help: 'Total number of Nostr events processed by the firehose', - labelNames: ['kind'], -}); - -export const pipelineEventsCounter = new Counter({ - name: 'ditto_pipeline_events_total', - help: 'Total number of Nostr events processed by the pipeline', - labelNames: ['kind'], -}); - -export const policyEventsCounter = new Counter({ - name: 'ditto_policy_events_total', - help: 'Total number of policy OK responses', - labelNames: ['ok'], -}); - -export const relayEventsCounter = new Counter({ - name: 'ditto_relay_events_total', - help: 'Total number of EVENT messages processed by the relay', - labelNames: ['kind'], -}); - -export const relayMessagesCounter = new Counter({ - name: 'ditto_relay_messages_total', - help: 'Total number of Nostr messages processed by the relay', - labelNames: ['verb'], -}); - -export const relayConnectionsGauge = new Gauge({ - name: 'ditto_relay_connections', - help: 'Number of active connections to the relay', -}); - -export const dbQueriesCounter = new Counter({ - name: 'ditto_db_queries_total', - help: 'Total number of database queries', - labelNames: ['kind'], -}); - -export const dbEventsCounter = new Counter({ - name: 'ditto_db_events_total', - help: 'Total number of database inserts', - labelNames: ['kind'], -}); - -export const dbPoolSizeGauge = new Gauge({ - name: 'ditto_db_pool_size', - help: 'Number of connections in the database pool', -}); - -export const dbAvailableConnectionsGauge = new Gauge({ - name: 'ditto_db_available_connections', - help: 'Number of available connections in the database pool', -}); - -export const dbQueryDurationHistogram = new Histogram({ - name: 'ditto_db_query_duration_seconds', - help: 'Duration of database queries', -}); - -export const cachedFaviconsSizeGauge = new Gauge({ - name: 'ditto_cached_favicons_size', - help: 'Number of domain favicons in cache', -}); - -export const cachedLnurlsSizeGauge = new Gauge({ - name: 'ditto_cached_lnurls_size', - help: 'Number of LNURL details in cache', -}); - -export const cachedNip05sSizeGauge = new Gauge({ - name: 'ditto_cached_nip05s_size', - help: 'Number of NIP-05 results in cache', -}); - -export const cachedLinkPreviewSizeGauge = new Gauge({ - name: 'ditto_cached_link_previews_size', - help: 'Number of link previews in cache', -}); - -export const cachedTranslationsSizeGauge = new Gauge({ - name: 'ditto_cached_translations_size', - help: 'Number of translated statuses in cache', -}); - -export const internalSubscriptionsSizeGauge = new Gauge({ - name: 'ditto_internal_subscriptions_size', - help: "Number of active subscriptions to Ditto's internal relay", -}); - -export const relayPoolRelaysSizeGauge = new Gauge({ - name: 'ditto_relay_pool_relays_size', - help: 'Number of relays in the relay pool', - labelNames: ['ready_state'], -}); - -export const relayPoolSubscriptionsSizeGauge = new Gauge({ - name: 'ditto_relay_pool_subscriptions_size', - help: 'Number of active subscriptions to the relay pool', -}); - -export const webPushNotificationsCounter = new Counter({ - name: 'ditto_web_push_notifications_total', - help: 'Total number of Web Push notifications sent', - labelNames: ['type'], -}); diff --git a/src/middleware/auth98Middleware.ts b/src/middleware/auth98Middleware.ts deleted file mode 100644 index 85557151..00000000 --- a/src/middleware/auth98Middleware.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { HTTPException } from '@hono/hono/http-exception'; -import { NostrEvent } from '@nostrify/nostrify'; - -import { type AppContext, type AppMiddleware } from '@/app.ts'; -import { ReadOnlySigner } from '@/signers/ReadOnlySigner.ts'; -import { Storages } from '@/storages.ts'; -import { localRequest } from '@/utils/api.ts'; -import { - buildAuthEventTemplate, - parseAuthRequest, - type ParseAuthRequestOpts, - validateAuthEvent, -} from '@/utils/nip98.ts'; -import { Conf } from '@/config.ts'; - -/** - * NIP-98 auth. - * https://github.com/nostr-protocol/nips/blob/master/98.md - */ -function auth98Middleware(opts: ParseAuthRequestOpts = {}): AppMiddleware { - return async (c, next) => { - const req = localRequest(c); - const result = await parseAuthRequest(req, opts); - - if (result.success) { - c.set('signer', new ReadOnlySigner(result.data.pubkey)); - c.set('proof', result.data); - } - - await next(); - }; -} - -type UserRole = 'user' | 'admin'; - -/** Require the user to prove their role before invoking the controller. */ -function requireRole(role: UserRole, opts?: ParseAuthRequestOpts): AppMiddleware { - return withProof(async (_c, proof, next) => { - const store = await Storages.db(); - - const [user] = await store.query([{ - kinds: [30382], - authors: [Conf.pubkey], - '#d': [proof.pubkey], - limit: 1, - }]); - - if (user && matchesRole(user, role)) { - await next(); - } else { - throw new HTTPException(401); - } - }, opts); -} - -/** Require the user to demonstrate they own the pubkey by signing an event. */ -function requireProof(opts?: ParseAuthRequestOpts): AppMiddleware { - return withProof(async (_c, _proof, next) => { - await next(); - }, opts); -} - -/** Check whether the user fulfills the role. */ -function matchesRole(user: NostrEvent, role: UserRole): boolean { - return user.tags.some(([tag, value]) => tag === 'n' && value === role); -} - -/** HOC to obtain proof in middleware. */ -function withProof( - handler: (c: AppContext, proof: NostrEvent, next: () => Promise) => Promise, - opts?: ParseAuthRequestOpts, -): AppMiddleware { - return async (c, next) => { - const signer = c.get('signer'); - const pubkey = await signer?.getPublicKey(); - const proof = c.get('proof') || await obtainProof(c, opts); - - // Prevent people from accidentally using the wrong account. This has no other security implications. - if (proof && pubkey && pubkey !== proof.pubkey) { - throw new HTTPException(401, { message: 'Pubkey mismatch' }); - } - - if (proof) { - c.set('proof', proof); - - if (!signer) { - c.set('signer', new ReadOnlySigner(proof.pubkey)); - } - - await handler(c, proof, next); - } else { - throw new HTTPException(401, { message: 'No proof' }); - } - }; -} - -/** Get the proof over Nostr Connect. */ -async function obtainProof(c: AppContext, opts?: ParseAuthRequestOpts) { - const signer = c.get('signer'); - if (!signer) { - throw new HTTPException(401, { - res: c.json({ error: 'No way to sign Nostr event' }, 401), - }); - } - - const req = localRequest(c); - const reqEvent = await buildAuthEventTemplate(req, opts); - const resEvent = await signer.signEvent(reqEvent); - const result = await validateAuthEvent(req, resEvent, opts); - - if (result.success) { - return result.data; - } -} - -export { auth98Middleware, requireProof, requireRole }; diff --git a/src/middleware/paginationMiddleware.ts b/src/middleware/paginationMiddleware.ts deleted file mode 100644 index b1f1e2f3..00000000 --- a/src/middleware/paginationMiddleware.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { AppMiddleware } from '@/app.ts'; -import { paginationSchema } from '@/schemas/pagination.ts'; -import { Storages } from '@/storages.ts'; - -/** Fixes compatibility with Mastodon apps by that don't use `Link` headers. */ -export const paginationMiddleware: AppMiddleware = async (c, next) => { - const pagination = paginationSchema.parse(c.req.query()); - - const { - max_id: maxId, - min_id: minId, - since, - until, - } = pagination; - - if ((maxId && !until) || (minId && !since)) { - const ids: string[] = []; - - if (maxId) ids.push(maxId); - if (minId) ids.push(minId); - - if (ids.length) { - const store = await Storages.db(); - - const events = await store.query( - [{ ids, limit: ids.length }], - { signal: c.req.raw.signal }, - ); - - for (const event of events) { - if (!until && maxId === event.id) pagination.until = event.created_at; - if (!since && minId === event.id) pagination.since = event.created_at; - } - } - } - - c.set('pagination', { - since: pagination.since, - until: pagination.until, - limit: pagination.limit, - }); - - c.set('listPagination', { - limit: pagination.limit, - offset: pagination.offset, - }); - - await next(); -}; diff --git a/src/middleware/rateLimitMiddleware.ts b/src/middleware/rateLimitMiddleware.ts deleted file mode 100644 index 689f7cee..00000000 --- a/src/middleware/rateLimitMiddleware.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { MiddlewareHandler } from '@hono/hono'; -import { rateLimiter } from 'hono-rate-limiter'; - -/** - * Rate limit middleware for Hono, based on [`hono-rate-limiter`](https://github.com/rhinobase/hono-rate-limiter). - */ -export function rateLimitMiddleware(limit: number, windowMs: number): MiddlewareHandler { - // @ts-ignore Mismatched hono versions. - return rateLimiter({ - limit, - windowMs, - skip: (c) => !c.req.header('x-real-ip'), - keyGenerator: (c) => c.req.header('x-real-ip')!, - }); -} diff --git a/src/middleware/requireSigner.ts b/src/middleware/requireSigner.ts deleted file mode 100644 index c954dbd6..00000000 --- a/src/middleware/requireSigner.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { HTTPException } from '@hono/hono/http-exception'; - -import { AppMiddleware } from '@/app.ts'; - -/** Throw a 401 if a signer isn't set. */ -export const requireSigner: AppMiddleware = async (c, next) => { - if (!c.get('signer')) { - throw new HTTPException(401, { message: 'No pubkey provided' }); - } - - await next(); -}; diff --git a/src/middleware/signerMiddleware.ts b/src/middleware/signerMiddleware.ts deleted file mode 100644 index 8fca06a3..00000000 --- a/src/middleware/signerMiddleware.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { HTTPException } from '@hono/hono/http-exception'; -import { NSecSigner } from '@nostrify/nostrify'; -import { nip19 } from 'nostr-tools'; - -import { AppMiddleware } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { ConnectSigner } from '@/signers/ConnectSigner.ts'; -import { ReadOnlySigner } from '@/signers/ReadOnlySigner.ts'; -import { Storages } from '@/storages.ts'; -import { aesDecrypt } from '@/utils/aes.ts'; -import { getTokenHash } from '@/utils/auth.ts'; - -/** We only accept "Bearer" type. */ -const BEARER_REGEX = new RegExp(`^Bearer (${nip19.BECH32_REGEX.source})$`); - -/** Make a `signer` object available to all controllers, or unset if the user isn't logged in. */ -export const signerMiddleware: AppMiddleware = async (c, next) => { - const header = c.req.header('authorization'); - const match = header?.match(BEARER_REGEX); - - if (match) { - const [_, bech32] = match; - - if (bech32.startsWith('token1')) { - try { - const kysely = await Storages.kysely(); - const tokenHash = await getTokenHash(bech32 as `token1${string}`); - - const { pubkey: userPubkey, bunker_pubkey: bunkerPubkey, nip46_sk_enc, nip46_relays } = await kysely - .selectFrom('auth_tokens') - .select(['pubkey', 'bunker_pubkey', 'nip46_sk_enc', 'nip46_relays']) - .where('token_hash', '=', tokenHash) - .executeTakeFirstOrThrow(); - - const nep46Seckey = await aesDecrypt(Conf.seckey, nip46_sk_enc); - - c.set( - 'signer', - new ConnectSigner({ - bunkerPubkey, - userPubkey, - signer: new NSecSigner(nep46Seckey), - relays: nip46_relays, - }), - ); - } catch { - throw new HTTPException(401); - } - } else { - try { - const decoded = nip19.decode(bech32!); - - switch (decoded.type) { - case 'npub': - c.set('signer', new ReadOnlySigner(decoded.data)); - break; - case 'nprofile': - c.set('signer', new ReadOnlySigner(decoded.data.pubkey)); - break; - case 'nsec': - c.set('signer', new NSecSigner(decoded.data)); - break; - } - } catch { - throw new HTTPException(401); - } - } - } - - await next(); -}; diff --git a/src/middleware/storeMiddleware.ts b/src/middleware/storeMiddleware.ts deleted file mode 100644 index 4e24ab05..00000000 --- a/src/middleware/storeMiddleware.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { AppMiddleware } from '@/app.ts'; -import { UserStore } from '@/storages/UserStore.ts'; -import { Storages } from '@/storages.ts'; - -/** Store middleware. */ -export const storeMiddleware: AppMiddleware = async (c, next) => { - const pubkey = await c.get('signer')?.getPublicKey(); - - if (pubkey) { - const store = new UserStore(pubkey, await Storages.admin()); - c.set('store', store); - } else { - c.set('store', await Storages.admin()); - } - await next(); -}; diff --git a/src/middleware/uploaderMiddleware.ts b/src/middleware/uploaderMiddleware.ts deleted file mode 100644 index 96a47336..00000000 --- a/src/middleware/uploaderMiddleware.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { BlossomUploader, NostrBuildUploader } from '@nostrify/nostrify/uploaders'; - -import { AppMiddleware } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import { DenoUploader } from '@/uploaders/DenoUploader.ts'; -import { IPFSUploader } from '@/uploaders/IPFSUploader.ts'; -import { S3Uploader } from '@/uploaders/S3Uploader.ts'; -import { fetchWorker } from '@/workers/fetch.ts'; - -/** Set an uploader for the user. */ -export const uploaderMiddleware: AppMiddleware = async (c, next) => { - const signer = c.get('signer'); - - switch (Conf.uploader) { - case 's3': - c.set( - 'uploader', - new S3Uploader({ - accessKey: Conf.s3.accessKey, - bucket: Conf.s3.bucket, - endPoint: Conf.s3.endPoint!, - pathStyle: Conf.s3.pathStyle, - port: Conf.s3.port, - region: Conf.s3.region!, - secretKey: Conf.s3.secretKey, - sessionToken: Conf.s3.sessionToken, - useSSL: Conf.s3.useSSL, - }), - ); - break; - case 'ipfs': - c.set('uploader', new IPFSUploader({ baseUrl: Conf.mediaDomain, apiUrl: Conf.ipfs.apiUrl, fetch: fetchWorker })); - break; - case 'local': - c.set('uploader', new DenoUploader({ baseUrl: Conf.mediaDomain, dir: Conf.uploadsDir })); - break; - case 'nostrbuild': - c.set('uploader', new NostrBuildUploader({ endpoint: Conf.nostrbuildEndpoint, signer, fetch: fetchWorker })); - break; - case 'blossom': - if (signer) { - c.set('uploader', new BlossomUploader({ servers: Conf.blossomServers, signer, fetch: fetchWorker })); - } - break; - } - - await next(); -}; diff --git a/src/notify.ts b/src/notify.ts deleted file mode 100644 index 69480875..00000000 --- a/src/notify.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Semaphore } from '@lambdalisue/async'; - -import { Conf } from '@/config.ts'; -import * as pipeline from '@/pipeline.ts'; -import { Storages } from '@/storages.ts'; - -const sem = new Semaphore(1); - -export async function startNotify(): Promise { - const { listen } = await Storages.database(); - const store = await Storages.db(); - - listen('nostr_event', (payload) => { - sem.lock(async () => { - try { - const id = payload; - const timeout = Conf.db.timeouts.default; - - const [event] = await store.query([{ ids: [id], limit: 1 }], { signal: AbortSignal.timeout(timeout) }); - if (event) { - await pipeline.handleEvent(event, AbortSignal.timeout(timeout)); - } - } catch (e) { - console.warn(e); - } - }); - }); -} diff --git a/src/pipeline.ts b/src/pipeline.ts deleted file mode 100644 index 5becff20..00000000 --- a/src/pipeline.ts +++ /dev/null @@ -1,380 +0,0 @@ -import { NKinds, NostrEvent, NSchema as n } from '@nostrify/nostrify'; -import { Stickynotes } from '@soapbox/stickynotes'; -import { Kysely, sql } from 'kysely'; -import { LRUCache } from 'lru-cache'; -import { z } from 'zod'; - -import { Conf } from '@/config.ts'; -import { DittoTables } from '@/db/DittoTables.ts'; -import { DittoPush } from '@/DittoPush.ts'; -import { DittoEvent } from '@/interfaces/DittoEvent.ts'; -import { pipelineEventsCounter, policyEventsCounter, webPushNotificationsCounter } from '@/metrics.ts'; -import { RelayError } from '@/RelayError.ts'; -import { AdminSigner } from '@/signers/AdminSigner.ts'; -import { hydrateEvents } from '@/storages/hydrate.ts'; -import { Storages } from '@/storages.ts'; -import { eventAge, parseNip05, Time } from '@/utils.ts'; -import { getAmount } from '@/utils/bolt11.ts'; -import { detectLanguage } from '@/utils/language.ts'; -import { nip05Cache } from '@/utils/nip05.ts'; -import { purifyEvent } from '@/utils/purify.ts'; -import { updateStats } from '@/utils/stats.ts'; -import { getTagSet } from '@/utils/tags.ts'; -import { renderWebPushNotification } from '@/views/mastodon/push.ts'; -import { policyWorker } from '@/workers/policy.ts'; -import { verifyEventWorker } from '@/workers/verify.ts'; - -const console = new Stickynotes('ditto:pipeline'); - -/** - * Common pipeline function to process (and maybe store) events. - * It is idempotent, so it can be called multiple times for the same event. - */ -async function handleEvent(event: DittoEvent, signal: AbortSignal): Promise { - if (eventAge(event) < -Time.minutes(1)) { - throw new RelayError('invalid', 'event too far in the future'); - } - // Integer max value for Postgres. - if (event.kind >= 2_147_483_647) { - throw new RelayError('invalid', 'event kind too large'); - } - // The only point of ephemeral events is to stream them, - // so throw an error if we're not even going to do that. - if (NKinds.ephemeral(event.kind) && !isFresh(event)) { - throw new RelayError('invalid', 'event too old'); - } - // Block NIP-70 events, because we have no way to `AUTH`. - if (isProtectedEvent(event)) { - throw new RelayError('invalid', 'protected event'); - } - // Validate the event's signature. - if (!(await verifyEventWorker(event))) { - throw new RelayError('invalid', 'invalid signature'); - } - // Skip events that have been recently encountered. - // We must do this after verifying the signature. - if (encounterEvent(event)) { - throw new RelayError('duplicate', 'already have this event'); - } - - // Log the event. - console.info(`NostrEvent<${event.kind}> ${event.id}`); - pipelineEventsCounter.inc({ kind: event.kind }); - - // NIP-46 events get special treatment. - // They are exempt from policies and other side-effects, and should be streamed out immediately. - // If streaming fails, an error should be returned. - if (event.kind === 24133) { - await streamOut(event); - return; - } - - // Ensure the event doesn't violate the policy. - if (event.pubkey !== Conf.pubkey) { - await policyFilter(event, signal); - } - - // Prepare the event for additional checks. - // FIXME: This is kind of hacky. Should be reorganized to fetch only what's needed for each stage. - await hydrateEvent(event, signal); - - // Ensure that the author is not banned. - const n = getTagSet(event.user?.tags ?? [], 'n'); - if (n.has('disabled')) { - throw new RelayError('blocked', 'author is blocked'); - } - - // Ephemeral events must throw if they are not streamed out. - if (NKinds.ephemeral(event.kind)) { - await Promise.all([ - streamOut(event), - webPush(event), - ]); - return; - } - - const kysely = await Storages.kysely(); - - try { - await storeEvent(purifyEvent(event), signal); - } finally { - // This needs to run in steps, and should not block the API from responding. - Promise.allSettled([ - handleZaps(kysely, event), - parseMetadata(event, signal), - setLanguage(event), - generateSetEvents(event), - ]) - .then(() => - Promise.allSettled([ - streamOut(event), - webPush(event), - ]) - ); - } -} - -async function policyFilter(event: NostrEvent, signal: AbortSignal): Promise { - const console = new Stickynotes('ditto:policy'); - - try { - const result = await policyWorker.call(event, signal); - policyEventsCounter.inc({ ok: String(result[2]) }); - console.debug(JSON.stringify(result)); - RelayError.assert(result); - } catch (e) { - if (e instanceof RelayError) { - throw e; - } else { - console.error(e); - throw new RelayError('blocked', 'policy error'); - } - } -} - -const encounters = new LRUCache({ max: 1000 }); - -/** Encounter the event, and return whether it has already been encountered. */ -function encounterEvent(event: NostrEvent): boolean { - const encountered = !!encounters.get(event.id); - if (!encountered) { - encounters.set(event.id, true); - } - return encountered; -} - -/** Check whether the event has a NIP-70 `-` tag. */ -function isProtectedEvent(event: NostrEvent): boolean { - return event.tags.some(([name]) => name === '-'); -} - -/** Hydrate the event with the user, if applicable. */ -async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise { - await hydrateEvents({ events: [event], store: await Storages.db(), signal }); - - const kysely = await Storages.kysely(); - const domain = await kysely - .selectFrom('pubkey_domains') - .select('domain') - .where('pubkey', '=', event.pubkey) - .executeTakeFirst(); - - event.author_domain = domain?.domain; -} - -/** Maybe store the event, if eligible. */ -async function storeEvent(event: NostrEvent, signal?: AbortSignal): Promise { - if (NKinds.ephemeral(event.kind)) return; - const store = await Storages.db(); - - try { - await store.transaction(async (store, kysely) => { - await updateStats({ event, store, kysely }); - await store.event(event, { signal }); - }); - } catch (e) { - // If the failure is only because of updateStats (which runs first), insert the event anyway. - // We can't catch this in the transaction because the error aborts the transaction on the Postgres side. - if (e instanceof Error && e.message.includes('event_stats' satisfies keyof DittoTables)) { - await store.event(event, { signal }); - } else { - throw e; - } - } -} - -/** Parse kind 0 metadata and track indexes in the database. */ -async function parseMetadata(event: NostrEvent, signal: AbortSignal): Promise { - if (event.kind !== 0) return; - - // Parse metadata. - const metadata = n.json().pipe(n.metadata()).catch({}).safeParse(event.content); - if (!metadata.success) return; - - const kysely = await Storages.kysely(); - - // Get nip05. - const { name, nip05 } = metadata.data; - const result = nip05 ? await nip05Cache.fetch(nip05, { signal }).catch(() => undefined) : undefined; - - // Populate author_search. - try { - const search = result?.pubkey === event.pubkey ? [name, nip05].filter(Boolean).join(' ').trim() : name ?? ''; - - if (search) { - await kysely.insertInto('author_stats') - .values({ pubkey: event.pubkey, search, followers_count: 0, following_count: 0, notes_count: 0 }) - .onConflict((oc) => oc.column('pubkey').doUpdateSet({ search })) - .execute(); - } - } catch { - // do nothing - } - - if (nip05 && result && result.pubkey === event.pubkey) { - // Track pubkey domain. - try { - const { domain } = parseNip05(nip05); - - await sql` - INSERT INTO pubkey_domains (pubkey, domain, last_updated_at) - VALUES (${event.pubkey}, ${domain}, ${event.created_at}) - ON CONFLICT(pubkey) DO UPDATE SET - domain = excluded.domain, - last_updated_at = excluded.last_updated_at - WHERE excluded.last_updated_at > pubkey_domains.last_updated_at - `.execute(kysely); - } catch (_e) { - // do nothing - } - } -} - -/** Update the event in the database and set its language. */ -async function setLanguage(event: NostrEvent): Promise { - if (event.kind !== 1) return; - - const language = detectLanguage(event.content, 0.90); - if (!language) return; - - const kysely = await Storages.kysely(); - try { - await kysely.updateTable('nostr_events') - .set('language', language) - .where('id', '=', event.id) - .execute(); - } catch { - // do nothing - } -} - -/** Determine if the event is being received in a timely manner. */ -function isFresh(event: NostrEvent): boolean { - return eventAge(event) < Time.minutes(1); -} - -/** Distribute the event through active subscriptions. */ -async function streamOut(event: NostrEvent): Promise { - if (!isFresh(event)) { - throw new RelayError('invalid', 'event too old'); - } - - const pubsub = await Storages.pubsub(); - await pubsub.event(event); -} - -async function webPush(event: NostrEvent): Promise { - if (!isFresh(event)) { - throw new RelayError('invalid', 'event too old'); - } - - const kysely = await Storages.kysely(); - const pubkeys = getTagSet(event.tags, 'p'); - - if (!pubkeys.size) { - return; - } - - const rows = await kysely - .selectFrom('push_subscriptions') - .selectAll() - .where('pubkey', 'in', [...pubkeys]) - .execute(); - - for (const row of rows) { - const viewerPubkey = row.pubkey; - - if (viewerPubkey === event.pubkey) { - continue; // Don't notify authors about their own events. - } - - const message = await renderWebPushNotification(event, viewerPubkey); - if (!message) { - continue; - } - - const subscription = { - endpoint: row.endpoint, - keys: { - auth: row.auth, - p256dh: row.p256dh, - }, - }; - - await DittoPush.push(subscription, message); - webPushNotificationsCounter.inc({ type: message.notification_type }); - } -} - -async function generateSetEvents(event: NostrEvent): Promise { - const tagsAdmin = event.tags.some(([name, value]) => ['p', 'P'].includes(name) && value === Conf.pubkey); - - if (event.kind === 1984 && tagsAdmin) { - const signer = new AdminSigner(); - - const rel = await signer.signEvent({ - kind: 30383, - content: '', - tags: [ - ['d', event.id], - ['p', event.pubkey], - ['k', '1984'], - ['n', 'open'], - ...[...getTagSet(event.tags, 'p')].map((pubkey) => ['P', pubkey]), - ...[...getTagSet(event.tags, 'e')].map((pubkey) => ['e', pubkey]), - ], - created_at: Math.floor(Date.now() / 1000), - }); - - await handleEvent(rel, AbortSignal.timeout(1000)); - } - - if (event.kind === 3036 && tagsAdmin) { - const signer = new AdminSigner(); - - const rel = await signer.signEvent({ - kind: 30383, - content: '', - tags: [ - ['d', event.id], - ['p', event.pubkey], - ['k', '3036'], - ['n', 'pending'], - ], - created_at: Math.floor(Date.now() / 1000), - }); - - await handleEvent(rel, AbortSignal.timeout(1000)); - } -} - -/** Stores the event in the 'event_zaps' table */ -async function handleZaps(kysely: Kysely, event: NostrEvent) { - if (event.kind !== 9735) return; - - const zapRequestString = event?.tags?.find(([name]) => name === 'description')?.[1]; - if (!zapRequestString) return; - const zapRequest = n.json().pipe(n.event()).optional().catch(undefined).parse(zapRequestString); - if (!zapRequest) return; - - const amountSchema = z.coerce.number().int().nonnegative().catch(0); - const amount_millisats = amountSchema.parse(getAmount(event?.tags.find(([name]) => name === 'bolt11')?.[1])); - if (!amount_millisats || amount_millisats < 1) return; - - const zappedEventId = zapRequest.tags.find(([name]) => name === 'e')?.[1]; - if (!zappedEventId) return; - - try { - await kysely.insertInto('event_zaps').values({ - receipt_id: event.id, - target_event_id: zappedEventId, - sender_pubkey: zapRequest.pubkey, - amount_millisats, - comment: zapRequest.content, - }).execute(); - } catch { - // receipt_id is unique, do nothing - } -} - -export { handleEvent, handleZaps }; diff --git a/src/queries.ts b/src/queries.ts deleted file mode 100644 index e93027d9..00000000 --- a/src/queries.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify'; -import Debug from '@soapbox/stickynotes/debug'; - -import { Conf } from '@/config.ts'; -import { Storages } from '@/storages.ts'; -import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; -import { type DittoRelation } from '@/interfaces/DittoFilter.ts'; -import { hydrateEvents } from '@/storages/hydrate.ts'; -import { fallbackAuthor } from '@/utils.ts'; -import { findReplyTag, getTagSet } from '@/utils/tags.ts'; - -const debug = Debug('ditto:queries'); - -interface GetEventOpts { - /** Signal to abort the request. */ - signal?: AbortSignal; - /** Event kind. */ - kind?: number; - /** Relations to include on the event. */ - relations?: DittoRelation[]; -} - -/** Get a Nostr event by its ID. */ -const getEvent = async ( - id: string, - opts: GetEventOpts = {}, -): Promise => { - debug(`getEvent: ${id}`); - const store = await Storages.db(); - const { kind, signal = AbortSignal.timeout(1000) } = opts; - - const filter: NostrFilter = { ids: [id], limit: 1 }; - if (kind) { - filter.kinds = [kind]; - } - - return await store.query([filter], { limit: 1, signal }) - .then((events) => hydrateEvents({ events, store, signal })) - .then(([event]) => event); -}; - -/** - * Get a Nostr `set_medatadata` event for a user's pubkey. - * @deprecated Use `store.query` directly. - */ -async function getAuthor(pubkey: string, opts: GetEventOpts = {}): Promise { - const store = await Storages.db(); - const { signal = AbortSignal.timeout(1000) } = opts; - - const events = await store.query([{ authors: [pubkey], kinds: [0], limit: 1 }], { limit: 1, signal }); - const event = events[0] ?? fallbackAuthor(pubkey); - - await hydrateEvents({ events: [event], store, signal }); - - return event; -} - -/** Get users the given pubkey follows. */ -const getFollows = async (pubkey: string, signal?: AbortSignal): Promise => { - const store = await Storages.db(); - const [event] = await store.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { limit: 1, signal }); - return event; -}; - -/** Get pubkeys the user follows. */ -async function getFollowedPubkeys(pubkey: string, signal?: AbortSignal): Promise> { - const event = await getFollows(pubkey, signal); - if (!event) return new Set(); - return getTagSet(event.tags, 'p'); -} - -/** Get pubkeys the user follows, including the user's own pubkey. */ -async function getFeedPubkeys(pubkey: string): Promise> { - const authors = await getFollowedPubkeys(pubkey); - return authors.add(pubkey); -} - -async function getAncestors(store: NStore, event: NostrEvent, result: NostrEvent[] = []): Promise { - if (result.length < 100) { - const replyTag = findReplyTag(event.tags); - const inReplyTo = replyTag ? replyTag[1] : undefined; - - if (inReplyTo) { - const [parentEvent] = await store.query([{ ids: [inReplyTo], until: event.created_at, limit: 1 }]); - - if (parentEvent) { - result.push(parentEvent); - return getAncestors(store, parentEvent, result); - } - } - } - - return result.reverse(); -} - -async function getDescendants( - store: NStore, - event: NostrEvent, - signal = AbortSignal.timeout(2000), -): Promise { - return await store - .query([{ kinds: [1], '#e': [event.id], since: event.created_at, limit: 200 }], { signal }) - .then((events) => events.filter(({ tags }) => findReplyTag(tags)?.[1] === event.id)); -} - -/** Returns whether the pubkey is followed by a local user. */ -async function isLocallyFollowed(pubkey: string): Promise { - const { host } = Conf.url; - - const store = await Storages.db(); - - const [event] = await store.query( - [{ kinds: [3], '#p': [pubkey], search: `domain:${host}`, limit: 1 }], - { limit: 1 }, - ); - - return Boolean(event); -} - -export { - getAncestors, - getAuthor, - getDescendants, - getEvent, - getFeedPubkeys, - getFollowedPubkeys, - getFollows, - isLocallyFollowed, -}; diff --git a/src/sentry.ts b/src/sentry.ts deleted file mode 100644 index 84b662e2..00000000 --- a/src/sentry.ts +++ /dev/null @@ -1,12 +0,0 @@ -import * as Sentry from '@sentry/deno'; - -import { Conf } from '@/config.ts'; - -// Sentry -if (Conf.sentryDsn) { - console.log('Sentry enabled'); - Sentry.init({ - dsn: Conf.sentryDsn, - tracesSampleRate: 1.0, - }); -} diff --git a/src/server.ts b/src/server.ts deleted file mode 100644 index 4825e99d..00000000 --- a/src/server.ts +++ /dev/null @@ -1,7 +0,0 @@ -import '@/precheck.ts'; -import '@/sentry.ts'; -import '@/nostr-wasm.ts'; -import app from '@/app.ts'; -import { Conf } from '@/config.ts'; - -Deno.serve({ port: Conf.port }, app.fetch); diff --git a/src/signers/AdminSigner.ts b/src/signers/AdminSigner.ts deleted file mode 100644 index 5aea2e21..00000000 --- a/src/signers/AdminSigner.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { NSecSigner } from '@nostrify/nostrify'; -import { Conf } from '@/config.ts'; - -/** Sign events as the Ditto server. */ -export class AdminSigner extends NSecSigner { - constructor() { - super(Conf.seckey); - } -} diff --git a/src/startup.ts b/src/startup.ts deleted file mode 100644 index 16439c0b..00000000 --- a/src/startup.ts +++ /dev/null @@ -1,18 +0,0 @@ -// Starts up applications required to run before the HTTP server is on. - -import { Conf } from '@/config.ts'; -import { cron } from '@/cron.ts'; -import { startFirehose } from '@/firehose.ts'; -import { startNotify } from '@/notify.ts'; - -if (Conf.firehoseEnabled) { - startFirehose(); -} - -if (Conf.notifyEnabled) { - startNotify(); -} - -if (Conf.cronEnabled) { - cron(); -} diff --git a/src/storages.ts b/src/storages.ts deleted file mode 100644 index 867c7939..00000000 --- a/src/storages.ts +++ /dev/null @@ -1,131 +0,0 @@ -// deno-lint-ignore-file require-await -import { Conf } from '@/config.ts'; -import { DittoDatabase } from '@/db/DittoDatabase.ts'; -import { DittoDB } from '@/db/DittoDB.ts'; -import { internalSubscriptionsSizeGauge } from '@/metrics.ts'; -import { wsUrlSchema } from '@/schema.ts'; -import { AdminStore } from '@/storages/AdminStore.ts'; -import { EventsDB } from '@/storages/EventsDB.ts'; -import { SearchStore } from '@/storages/search-store.ts'; -import { InternalRelay } from '@/storages/InternalRelay.ts'; -import { NPool, NRelay1 } from '@nostrify/nostrify'; -import { getRelays } from '@/utils/outbox.ts'; -import { seedZapSplits } from '@/utils/zap-split.ts'; - -export class Storages { - private static _db: Promise | undefined; - private static _database: Promise | undefined; - private static _admin: Promise | undefined; - private static _client: Promise> | undefined; - private static _pubsub: Promise | undefined; - private static _search: Promise | undefined; - - public static async database(): Promise { - if (!this._database) { - this._database = (async () => { - const db = DittoDB.create(Conf.databaseUrl, { - poolSize: Conf.pg.poolSize, - debug: Conf.pgliteDebug, - }); - await DittoDB.migrate(db.kysely); - return db; - })(); - } - return this._database; - } - - public static async kysely(): Promise { - const { kysely } = await this.database(); - return kysely; - } - - /** SQL database to store events this Ditto server cares about. */ - public static async db(): Promise { - if (!this._db) { - this._db = (async () => { - const kysely = await this.kysely(); - const store = new EventsDB({ kysely, pubkey: Conf.pubkey, timeout: Conf.db.timeouts.default }); - await seedZapSplits(store); - return store; - })(); - } - return this._db; - } - - /** Admin user storage. */ - public static async admin(): Promise { - if (!this._admin) { - this._admin = Promise.resolve(new AdminStore(await this.db())); - } - return this._admin; - } - - /** Internal pubsub relay between controllers and the pipeline. */ - public static async pubsub(): Promise { - if (!this._pubsub) { - this._pubsub = Promise.resolve(new InternalRelay({ gauge: internalSubscriptionsSizeGauge })); - } - return this._pubsub; - } - - /** Relay pool storage. */ - public static async client(): Promise> { - if (!this._client) { - this._client = (async () => { - const db = await this.db(); - - const [relayList] = await db.query([ - { kinds: [10002], authors: [Conf.pubkey], limit: 1 }, - ]); - - const tags = relayList?.tags ?? []; - - const activeRelays = tags.reduce((acc, [name, url, marker]) => { - const valid = wsUrlSchema.safeParse(url).success; - - if (valid && name === 'r' && (!marker || marker === 'write')) { - acc.push(url); - } - return acc; - }, []); - - console.log(`pool: connecting to ${activeRelays.length} relays.`); - - return new NPool({ - open(url) { - return new NRelay1(url, { - // Skip event verification (it's done in the pipeline). - verifyEvent: () => true, - }); - }, - reqRouter: async (filters) => { - return new Map(activeRelays.map((relay) => { - return [relay, filters]; - })); - }, - eventRouter: async (event) => { - const relaySet = await getRelays(await Storages.db(), event.pubkey); - relaySet.delete(Conf.relay); - - const relays = [...relaySet].slice(0, 4); - return relays; - }, - }); - })(); - } - return this._client; - } - - /** Storage to use for remote search. */ - public static async search(): Promise { - if (!this._search) { - this._search = Promise.resolve( - new SearchStore({ - relay: Conf.searchRelay, - fallback: await this.db(), - }), - ); - } - return this._search; - } -} diff --git a/src/storages/AdminStore.ts b/src/storages/AdminStore.ts deleted file mode 100644 index 4ebe2743..00000000 --- a/src/storages/AdminStore.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify'; - -import { Conf } from '@/config.ts'; -import { DittoEvent } from '@/interfaces/DittoEvent.ts'; -import { getTagSet } from '@/utils/tags.ts'; - -/** A store that prevents banned users from being displayed. */ -export class AdminStore implements NStore { - constructor(private store: NStore) {} - - async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise { - return await this.store.event(event, opts); - } - - async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise { - const events = await this.store.query(filters, opts); - const pubkeys = new Set(events.map((event) => event.pubkey)); - - const users = await this.store.query([{ - kinds: [30382], - authors: [Conf.pubkey], - '#d': [...pubkeys], - limit: pubkeys.size, - }]); - - return events.filter((event) => { - const user = users.find( - ({ kind, pubkey, tags }) => - kind === 30382 && pubkey === Conf.pubkey && tags.find(([name]) => name === 'd')?.[1] === event.pubkey, - ); - - const n = getTagSet(user?.tags ?? [], 'n'); - - if (n.has('disabled')) { - return false; - } - - return true; - }); - } -} diff --git a/src/storages/EventsDB.ts b/src/storages/EventsDB.ts deleted file mode 100644 index 6dccdcb2..00000000 --- a/src/storages/EventsDB.ts +++ /dev/null @@ -1,360 +0,0 @@ -// deno-lint-ignore-file require-await - -import { LanguageCode } from 'iso-639-1'; -import { NPostgres, NPostgresSchema } from '@nostrify/db'; -import { NIP50, NKinds, NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify'; -import { Stickynotes } from '@soapbox/stickynotes'; -import { Kysely, SelectQueryBuilder } from 'kysely'; -import { nip27 } from 'nostr-tools'; - -import { DittoTables } from '@/db/DittoTables.ts'; -import { dbEventsCounter } from '@/metrics.ts'; -import { RelayError } from '@/RelayError.ts'; -import { isNostrId } from '@/utils.ts'; -import { abortError } from '@/utils/abort.ts'; -import { purifyEvent } from '@/utils/purify.ts'; -import { DittoEvent } from '@/interfaces/DittoEvent.ts'; - -/** Function to decide whether or not to index a tag. */ -type TagCondition = ({ event, count, value }: { - event: NostrEvent; - count: number; - value: string; -}) => boolean; - -/** Options for the EventsDB store. */ -interface EventsDBOpts { - /** Kysely instance to use. */ - kysely: Kysely; - /** Pubkey of the admin account. */ - pubkey: string; - /** Timeout in milliseconds for database queries. */ - timeout: number; - /** Whether the event returned should be a Nostr event or a Ditto event. Defaults to false. */ - pure?: boolean; -} - -/** SQL database storage adapter for Nostr events. */ -class EventsDB extends NPostgres { - private console = new Stickynotes('ditto:db:events'); - - /** Conditions for when to index certain tags. */ - static tagConditions: Record = { - 'a': ({ count }) => count < 15, - 'd': ({ event, count }) => count === 0 && NKinds.parameterizedReplaceable(event.kind), - 'e': ({ event, count, value }) => ((event.kind === 10003) || count < 15) && isNostrId(value), - 'k': ({ count, value }) => count === 0 && Number.isInteger(Number(value)), - 'L': ({ event, count }) => event.kind === 1985 || count === 0, - 'l': ({ event, count }) => event.kind === 1985 || count === 0, - 'n': ({ count, value }) => count < 50 && value.length < 50, - 'P': ({ count, value }) => count === 0 && isNostrId(value), - 'p': ({ event, count, value }) => (count < 15 || event.kind === 3) && isNostrId(value), - 'proxy': ({ count, value }) => count === 0 && value.length < 256, - 'q': ({ event, count, value }) => count === 0 && event.kind === 1 && isNostrId(value), - 'r': ({ event, count }) => (event.kind === 1985 ? count < 20 : count < 3), - 't': ({ event, count, value }) => (event.kind === 1985 ? count < 20 : count < 5) && value.length < 50, - }; - - constructor(private opts: EventsDBOpts) { - super(opts.kysely, { - indexTags: EventsDB.indexTags, - indexSearch: EventsDB.searchText, - }); - } - - /** Insert an event (and its tags) into the database. */ - override async event(event: NostrEvent, opts: { signal?: AbortSignal; timeout?: number } = {}): Promise { - event = purifyEvent(event); - this.console.debug('EVENT', JSON.stringify(event)); - dbEventsCounter.inc({ kind: event.kind }); - - if (await this.isDeletedAdmin(event)) { - throw new RelayError('blocked', 'event deleted by admin'); - } - - await this.deleteEventsAdmin(event); - - try { - await super.event(event, { ...opts, timeout: opts.timeout ?? this.opts.timeout }); - } catch (e) { - if (e instanceof Error && e.message === 'Cannot add a deleted event') { - throw new RelayError('blocked', 'event deleted by user'); - } else if (e instanceof Error && e.message === 'Cannot replace an event with an older event') { - return; - } else { - throw e; - } - } - } - - /** Check if an event has been deleted by the admin. */ - private async isDeletedAdmin(event: NostrEvent): Promise { - const filters: NostrFilter[] = [ - { kinds: [5], authors: [this.opts.pubkey], '#e': [event.id], limit: 1 }, - ]; - - if (NKinds.replaceable(event.kind) || NKinds.parameterizedReplaceable(event.kind)) { - const d = event.tags.find(([tag]) => tag === 'd')?.[1] ?? ''; - - filters.push({ - kinds: [5], - authors: [this.opts.pubkey], - '#a': [`${event.kind}:${event.pubkey}:${d}`], - since: event.created_at, - limit: 1, - }); - } - - const events = await this.query(filters); - return events.length > 0; - } - - /** The DITTO_NSEC can delete any event from the database. NDatabase already handles user deletions. */ - private async deleteEventsAdmin(event: NostrEvent): Promise { - if (event.kind === 5 && event.pubkey === this.opts.pubkey) { - const ids = new Set(event.tags.filter(([name]) => name === 'e').map(([_name, value]) => value)); - const addrs = new Set(event.tags.filter(([name]) => name === 'a').map(([_name, value]) => value)); - - const filters: NostrFilter[] = []; - - if (ids.size) { - filters.push({ ids: [...ids] }); - } - - for (const addr of addrs) { - const [k, pubkey, d] = addr.split(':'); - const kind = Number(k); - - if (!(Number.isInteger(kind) && kind >= 0)) continue; - if (!isNostrId(pubkey)) continue; - if (d === undefined) continue; - - const filter: NostrFilter = { - kinds: [kind], - authors: [pubkey], - until: event.created_at, - }; - - if (d) { - filter['#d'] = [d]; - } - - filters.push(filter); - } - - if (filters.length) { - await this.remove(filters); - } - } - } - - protected override getFilterQuery(trx: Kysely, filter: NostrFilter) { - if (filter.search) { - const tokens = NIP50.parseInput(filter.search); - - let query = super.getFilterQuery(trx, { - ...filter, - search: tokens.filter((t) => typeof t === 'string').join(' '), - }) as SelectQueryBuilder; - - const languages = new Set(); - - for (const token of tokens) { - if (typeof token === 'object' && token.key === 'language') { - languages.add(token.value); - } - } - - if (languages.size) { - query = query.where('language', 'in', [...languages]); - } - - return query; - } - - return super.getFilterQuery(trx, filter); - } - - /** Get events for filters from the database. */ - override async query( - filters: NostrFilter[], - opts: { signal?: AbortSignal; timeout?: number; limit?: number } = {}, - ): Promise { - filters = await this.expandFilters(filters); - - for (const filter of filters) { - if (filter.since && filter.since >= 2_147_483_647) { - throw new RelayError('invalid', 'since filter too far into the future'); - } - if (filter.until && filter.until >= 2_147_483_647) { - throw new RelayError('invalid', 'until filter too far into the future'); - } - for (const kind of filter.kinds ?? []) { - if (kind >= 2_147_483_647) { - throw new RelayError('invalid', 'kind filter too far into the future'); - } - } - } - - if (opts.signal?.aborted) return Promise.resolve([]); - - this.console.debug('REQ', JSON.stringify(filters)); - - return super.query(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout }); - } - - /** Parse an event row from the database. */ - protected override parseEventRow(row: DittoTables['nostr_events']): DittoEvent { - const event: DittoEvent = { - id: row.id, - kind: row.kind, - pubkey: row.pubkey, - content: row.content, - created_at: Number(row.created_at), - tags: row.tags, - sig: row.sig, - }; - - if (this.opts.pure) { - return event; - } - - if (row.language) { - event.language = row.language as LanguageCode; - } - - return event; - } - - /** Delete events based on filters from the database. */ - override async remove(filters: NostrFilter[], opts: { signal?: AbortSignal; timeout?: number } = {}): Promise { - this.console.debug('DELETE', JSON.stringify(filters)); - return super.remove(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout }); - } - - /** Get number of events that would be returned by filters. */ - override async count( - filters: NostrFilter[], - opts: { signal?: AbortSignal; timeout?: number } = {}, - ): Promise<{ count: number; approximate: any }> { - if (opts.signal?.aborted) return Promise.reject(abortError()); - - this.console.debug('COUNT', JSON.stringify(filters)); - - return super.count(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout }); - } - - /** Return only the tags that should be indexed. */ - static override indexTags(event: NostrEvent): string[][] { - const tagCounts: Record = {}; - - function getCount(name: string) { - return tagCounts[name] || 0; - } - - function incrementCount(name: string) { - tagCounts[name] = getCount(name) + 1; - } - - function checkCondition(name: string, value: string, condition: TagCondition) { - return condition({ - event, - count: getCount(name), - value, - }); - } - - return event.tags.reduce((results, tag) => { - const [name, value] = tag; - const condition = EventsDB.tagConditions[name] as TagCondition | undefined; - - if (value && condition && value.length < 200 && checkCondition(name, value, condition)) { - results.push(tag); - } - - incrementCount(name); - return results; - }, []); - } - - /** Build a search index from the event. */ - static searchText(event: NostrEvent): string { - switch (event.kind) { - case 0: - return EventsDB.buildUserSearchContent(event); - case 1: - return nip27.replaceAll(event.content, () => ''); - case 30009: - return EventsDB.buildTagsSearchContent(event.tags.filter(([t]) => t !== 'alt')); - case 30360: - return event.tags.find(([name]) => name === 'd')?.[1] || ''; - default: - return ''; - } - } - - /** Build search content for a user. */ - static buildUserSearchContent(event: NostrEvent): string { - const { name, nip05 } = n.json().pipe(n.metadata()).catch({}).parse(event.content); - return [name, nip05].filter(Boolean).join('\n'); - } - - /** Build search content from tag values. */ - static buildTagsSearchContent(tags: string[][]): string { - return tags.map(([_tag, value]) => value).join('\n'); - } - - /** Converts filters to more performant, simpler filters. */ - async expandFilters(filters: NostrFilter[]): Promise { - filters = structuredClone(filters); - - for (const filter of filters) { - if (filter.search) { - const tokens = NIP50.parseInput(filter.search); - - const domains = new Set(); - - for (const token of tokens) { - if (typeof token === 'object' && token.key === 'domain') { - domains.add(token.value); - } - } - - if (domains.size) { - let query = this.opts.kysely - .selectFrom('pubkey_domains') - .select('pubkey') - .where('domain', 'in', [...domains]); - - if (filter.authors) { - query = query.where('pubkey', 'in', filter.authors); - } - - const pubkeys = await query.execute().then((rows) => rows.map((row) => row.pubkey)); - - filter.authors = pubkeys; - } - - // Re-serialize the search string without the domain key. :facepalm: - filter.search = tokens - .filter((t) => typeof t === 'string' || typeof t === 'object' && t.key !== 'domain') - .map((t) => typeof t === 'object' ? `${t.key}:${t.value}` : t) - .join(' '); - } - - if (filter.kinds) { - // Ephemeral events are not stored, so don't bother querying for them. - // If this results in an empty kinds array, NDatabase will remove the filter before querying and return no results. - filter.kinds = filter.kinds.filter((kind) => !NKinds.ephemeral(kind)); - } - } - - return filters; - } - - override async transaction(callback: (store: NPostgres, kysely: Kysely) => Promise): Promise { - return super.transaction((store, kysely) => callback(store, kysely as unknown as Kysely)); - } -} - -export { EventsDB }; diff --git a/src/storages/InternalRelay.ts b/src/storages/InternalRelay.ts deleted file mode 100644 index 4400b562..00000000 --- a/src/storages/InternalRelay.ts +++ /dev/null @@ -1,86 +0,0 @@ -// deno-lint-ignore-file require-await -import { - NIP50, - NostrEvent, - NostrFilter, - NostrRelayCLOSED, - NostrRelayEOSE, - NostrRelayEVENT, - NRelay, -} from '@nostrify/nostrify'; -import { Machina } from '@nostrify/nostrify/utils'; -import { matchFilter } from 'nostr-tools'; -import { Gauge } from 'prom-client'; - -import { DittoEvent } from '@/interfaces/DittoEvent.ts'; -import { purifyEvent } from '@/utils/purify.ts'; - -interface InternalRelayOpts { - gauge?: Gauge; -} - -/** - * PubSub event store for streaming events within the application. - * The pipeline should push events to it, then anything in the application can subscribe to it. - */ -export class InternalRelay implements NRelay { - private subs = new Map }>(); - - constructor(private opts: InternalRelayOpts = {}) {} - - async *req( - filters: NostrFilter[], - opts?: { signal?: AbortSignal }, - ): AsyncGenerator { - const id = crypto.randomUUID(); - const machina = new Machina(opts?.signal); - - yield ['EOSE', id]; - - this.subs.set(id, { filters, machina }); - this.opts.gauge?.set(this.subs.size); - - try { - for await (const event of machina) { - yield ['EVENT', id, event]; - } - } finally { - this.subs.delete(id); - this.opts.gauge?.set(this.subs.size); - } - } - - async event(event: DittoEvent): Promise { - for (const { filters, machina } of this.subs.values()) { - for (const filter of filters) { - if (matchFilter(filter, event)) { - if (filter.search) { - const tokens = NIP50.parseInput(filter.search); - - const domain = (tokens.find((t) => - typeof t === 'object' && t.key === 'domain' - ) as { key: 'domain'; value: string } | undefined)?.value; - - if (domain === event.author_domain) { - machina.push(purifyEvent(event)); - break; - } - } else { - machina.push(purifyEvent(event)); - break; - } - } - } - } - - return Promise.resolve(); - } - - async query(): Promise { - return []; - } - - async close(): Promise { - return Promise.resolve(); - } -} diff --git a/src/storages/UserStore.ts b/src/storages/UserStore.ts deleted file mode 100644 index 2449d8c1..00000000 --- a/src/storages/UserStore.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify'; - -import { DittoEvent } from '@/interfaces/DittoEvent.ts'; -import { getTagSet } from '@/utils/tags.ts'; - -export class UserStore implements NStore { - private promise: Promise | undefined; - - constructor(private pubkey: string, private store: NStore) {} - - async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise { - return await this.store.event(event, opts); - } - - /** - * Query events that `pubkey` did not mute - * https://github.com/nostr-protocol/nips/blob/master/51.md#standard-lists - */ - async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise { - const events = await this.store.query(filters, opts); - const pubkeys = await this.getMutedPubkeys(); - - return events.filter((event) => { - return event.kind === 0 || !pubkeys.has(event.pubkey); - }); - } - - private async getMuteList(): Promise { - if (!this.promise) { - this.promise = this.store.query([{ authors: [this.pubkey], kinds: [10000], limit: 1 }]); - } - const [muteList] = await this.promise; - return muteList; - } - - private async getMutedPubkeys(): Promise> { - const mutedPubkeysEvent = await this.getMuteList(); - if (!mutedPubkeysEvent) { - return new Set(); - } - return getTagSet(mutedPubkeysEvent.tags, 'p'); - } -} diff --git a/src/storages/search-store.ts b/src/storages/search-store.ts deleted file mode 100644 index 4951c722..00000000 --- a/src/storages/search-store.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { NostrEvent, NostrFilter, NRelay1, NStore } from '@nostrify/nostrify'; -import Debug from '@soapbox/stickynotes/debug'; - -import { normalizeFilters } from '@/filter.ts'; -import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; -import { hydrateEvents } from '@/storages/hydrate.ts'; -import { abortError } from '@/utils/abort.ts'; - -interface SearchStoreOpts { - relay: string | undefined; - fallback: NStore; - hydrator?: NStore; -} - -class SearchStore implements NStore { - #debug = Debug('ditto:storages:search'); - - #fallback: NStore; - #hydrator: NStore; - #relay: NRelay1 | undefined; - - constructor(opts: SearchStoreOpts) { - this.#fallback = opts.fallback; - this.#hydrator = opts.hydrator ?? this; - - if (opts.relay) { - this.#relay = new NRelay1(opts.relay); - } - } - - event(_event: NostrEvent, _opts?: { signal?: AbortSignal }): Promise { - return Promise.reject(new Error('EVENT not implemented.')); - } - - async query(filters: NostrFilter[], opts?: { signal?: AbortSignal; limit?: number }): Promise { - filters = normalizeFilters(filters); - - if (opts?.signal?.aborted) return Promise.reject(abortError()); - if (!filters.length) return Promise.resolve([]); - - this.#debug('REQ', JSON.stringify(filters)); - const query = filters[0]?.search; - - if (this.#relay && this.#relay.socket.readyState === WebSocket.OPEN) { - this.#debug(`Searching for "${query}" at ${this.#relay.socket.url}...`); - - const events = await this.#relay.query(filters, opts); - - return hydrateEvents({ - events, - store: this.#hydrator, - signal: opts?.signal, - }); - } else { - this.#debug(`Searching for "${query}" locally...`); - return this.#fallback.query(filters, opts); - } - } -} - -export { SearchStore }; diff --git a/src/test.ts b/src/test.ts deleted file mode 100644 index 4e813f05..00000000 --- a/src/test.ts +++ /dev/null @@ -1,82 +0,0 @@ -import ISO6391, { LanguageCode } from 'iso-639-1'; -import lande from 'lande'; -import { NostrEvent } from '@nostrify/nostrify'; -import { finalizeEvent, generateSecretKey } from 'nostr-tools'; - -import { Conf } from '@/config.ts'; -import { DittoDB } from '@/db/DittoDB.ts'; -import { EventsDB } from '@/storages/EventsDB.ts'; -import { purifyEvent } from '@/utils/purify.ts'; -import { sql } from 'kysely'; - -/** Import an event fixture by name in tests. */ -export async function eventFixture(name: string): Promise { - const result = await import(`~/fixtures/events/${name}.json`, { with: { type: 'json' } }); - return structuredClone(result.default); -} - -/** Import a JSONL fixture by name in tests. */ -export async function jsonlEvents(path: string): Promise { - const data = await Deno.readTextFile(path); - return data.split('\n').map((line) => JSON.parse(line)); -} - -/** Generate an event for use in tests. */ -export function genEvent(t: Partial = {}, sk: Uint8Array = generateSecretKey()): NostrEvent { - const event = finalizeEvent({ - kind: 255, - created_at: 0, - content: '', - tags: [], - ...t, - }, sk); - - return purifyEvent(event); -} - -/** Create a database for testing. It uses `DATABASE_URL`, or creates an in-memory database by default. */ -export async function createTestDB(opts?: { pure?: boolean }) { - const { kysely } = DittoDB.create(Conf.databaseUrl, { poolSize: 1 }); - - await DittoDB.migrate(kysely); - - const store = new EventsDB({ - kysely, - timeout: Conf.db.timeouts.default, - pubkey: Conf.pubkey, - pure: opts?.pure ?? false, - }); - - return { - store, - kysely, - [Symbol.asyncDispose]: async () => { - const { rows } = await sql< - { tablename: string } - >`select tablename from pg_tables where schemaname = current_schema()`.execute(kysely); - - for (const { tablename } of rows) { - if (tablename.startsWith('kysely_')) continue; - await sql`truncate table ${sql.ref(tablename)} cascade`.execute(kysely); - } - - await kysely.destroy(); - }, - }; -} - -export function sleep(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -export function getLanguage(text: string): LanguageCode | undefined { - const [topResult] = lande(text); - if (topResult) { - const [iso6393] = topResult; - const locale = new Intl.Locale(iso6393); - if (ISO6391.validate(locale.language)) { - return locale.language as LanguageCode; - } - } - return; -} diff --git a/src/translators/DeepLTranslator.test.ts b/src/translators/DeepLTranslator.test.ts deleted file mode 100644 index 08f16a66..00000000 --- a/src/translators/DeepLTranslator.test.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { assert, assertEquals } from '@std/assert'; - -import { Conf } from '@/config.ts'; -import { DeepLTranslator } from '@/translators/DeepLTranslator.ts'; -import { getLanguage } from '@/test.ts'; - -const { - deeplBaseUrl: baseUrl, - deeplApiKey: apiKey, - translationProvider, -} = Conf; - -const deepl = 'deepl'; - -Deno.test('DeepL translation with source language omitted', { - ignore: !(translationProvider === deepl && apiKey), -}, async () => { - const translator = new DeepLTranslator({ fetch: fetch, baseUrl, apiKey: apiKey! }); - - const data = await translator.translate( - [ - 'Bom dia amigos', - 'Meu nome é Patrick', - 'Eu irei morar na America, eu prometo. Mas antes, eu devo mencionar que o lande está interpretando este texto como italiano, que estranho.', - ], - undefined, - 'en', - ); - - assertEquals(data.source_lang, 'pt'); - assertEquals(getLanguage(data.results[0]), 'en'); - assertEquals(getLanguage(data.results[1]), 'en'); - assertEquals(getLanguage(data.results[2]), 'en'); -}); - -Deno.test('DeepL translation with source language set', { - ignore: !(translationProvider === deepl && apiKey), -}, async () => { - const translator = new DeepLTranslator({ fetch: fetch, baseUrl, apiKey: apiKey as string }); - - const data = await translator.translate( - [ - 'Bom dia amigos', - 'Meu nome é Patrick', - 'Eu irei morar na America, eu prometo. Mas antes, eu devo mencionar que o lande está interpretando este texto como italiano, que estranho.', - ], - 'pt', - 'en', - ); - - assertEquals(data.source_lang, 'pt'); - assertEquals(getLanguage(data.results[0]), 'en'); - assertEquals(getLanguage(data.results[1]), 'en'); - assertEquals(getLanguage(data.results[2]), 'en'); -}); - -Deno.test("DeepL translation doesn't alter Nostr URIs", { - ignore: !(translationProvider === deepl && apiKey), -}, async () => { - const translator = new DeepLTranslator({ fetch: fetch, baseUrl, apiKey: apiKey as string }); - - const patrick = - 'nostr:nprofile1qy2hwumn8ghj7erfw36x7tnsw43z7un9d3shjqpqgujeqakgt7fyp6zjggxhyy7ft623qtcaay5lkc8n8gkry4cvnrzqep59se'; - const danidfra = - 'nostr:nprofile1qy2hwumn8ghj7erfw36x7tnsw43z7un9d3shjqpqe6tnvlr46lv3lwdu80r07kanhk6jcxy5r07w9umgv9kuhu9dl5hsz44l8s'; - - const input = - `Thanks to work by ${patrick} and ${danidfra} , it's now possible to filter the global feed by language on #Ditto!`; - - const { results: [output] } = await translator.translate([input], 'en', 'pt'); - - assert(output.includes(patrick)); - assert(output.includes(danidfra)); -}); diff --git a/src/translators/LibreTranslateTranslator.test.ts b/src/translators/LibreTranslateTranslator.test.ts deleted file mode 100644 index edda3039..00000000 --- a/src/translators/LibreTranslateTranslator.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { assertEquals } from '@std/assert'; - -import { Conf } from '@/config.ts'; -import { LibreTranslateTranslator } from '@/translators/LibreTranslateTranslator.ts'; -import { getLanguage } from '@/test.ts'; - -const { - libretranslateBaseUrl: baseUrl, - libretranslateApiKey: apiKey, - translationProvider, -} = Conf; - -const libretranslate = 'libretranslate'; - -Deno.test('LibreTranslate translation with source language omitted', { - ignore: !(translationProvider === libretranslate && apiKey), -}, async () => { - const translator = new LibreTranslateTranslator({ fetch: fetch, baseUrl, apiKey: apiKey! }); - - const data = await translator.translate( - [ - 'Bom dia amigos', - 'Meu nome é Patrick, um nome belo ou feio? A questão é mais profunda do que parece.', - 'A respiração é mais importante do que comer e tomar agua.', - ], - undefined, - 'ca', - ); - - assertEquals(data.source_lang, 'pt'); - assertEquals(getLanguage(data.results[0]), 'ca'); - assertEquals(getLanguage(data.results[1]), 'ca'); - assertEquals(getLanguage(data.results[2]), 'ca'); -}); - -Deno.test('LibreTranslate translation with source language set', { - ignore: !(translationProvider === libretranslate && apiKey), -}, async () => { - const translator = new LibreTranslateTranslator({ fetch: fetch, baseUrl, apiKey: apiKey! }); - - const data = await translator.translate( - [ - 'Bom dia amigos', - 'Meu nome é Patrick, um nome belo ou feio? A questão é mais profunda do que parece.', - 'A respiração é mais importante do que comer e tomar agua.', - ], - 'pt', - 'ca', - ); - - assertEquals(data.source_lang, 'pt'); - assertEquals(getLanguage(data.results[0]), 'ca'); - assertEquals(getLanguage(data.results[1]), 'ca'); - assertEquals(getLanguage(data.results[2]), 'ca'); -}); diff --git a/src/utils/SimpleLRU.ts b/src/utils/SimpleLRU.ts deleted file mode 100644 index f18a6211..00000000 --- a/src/utils/SimpleLRU.ts +++ /dev/null @@ -1,52 +0,0 @@ -// deno-lint-ignore-file ban-types - -import { LRUCache } from 'lru-cache'; -import { type Gauge } from 'prom-client'; - -type FetchFn = (key: K, opts: O) => Promise; - -interface FetchFnOpts { - signal?: AbortSignal | null; -} - -type SimpleLRUOpts = LRUCache.Options & { - gauge?: Gauge; -}; - -export class SimpleLRU< - K extends {}, - V extends {}, - O extends {} = FetchFnOpts, -> { - protected cache: LRUCache; - - constructor(fetchFn: FetchFn, private opts: SimpleLRUOpts) { - this.cache = new LRUCache({ - async fetchMethod(key, _staleValue, { signal }) { - try { - return await fetchFn(key, { signal: signal as unknown as AbortSignal }); - } catch { - return null as unknown as V; - } - }, - ...opts, - }); - } - - async fetch(key: K, opts?: O): Promise { - const result = await this.cache.fetch(key, opts); - - this.opts.gauge?.set(this.cache.size); - - if (result === undefined || result === null) { - throw new Error('SimpleLRU: fetch failed'); - } - - return result; - } - - put(key: K, value: V): Promise { - this.cache.set(key, value); - return Promise.resolve(); - } -} diff --git a/src/utils/api.ts b/src/utils/api.ts deleted file mode 100644 index 4bbd32fc..00000000 --- a/src/utils/api.ts +++ /dev/null @@ -1,318 +0,0 @@ -import { type Context } from '@hono/hono'; -import { HTTPException } from '@hono/hono/http-exception'; -import { NostrEvent, NostrFilter } from '@nostrify/nostrify'; -import Debug from '@soapbox/stickynotes/debug'; -import { EventTemplate } from 'nostr-tools'; -import * as TypeFest from 'type-fest'; - -import { type AppContext } from '@/app.ts'; -import { Conf } from '@/config.ts'; -import * as pipeline from '@/pipeline.ts'; -import { RelayError } from '@/RelayError.ts'; -import { AdminSigner } from '@/signers/AdminSigner.ts'; -import { Storages } from '@/storages.ts'; -import { nostrNow } from '@/utils.ts'; -import { parseFormData } from '@/utils/formdata.ts'; -import { purifyEvent } from '@/utils/purify.ts'; - -const debug = Debug('ditto:api'); - -/** EventTemplate with defaults. */ -type EventStub = TypeFest.SetOptional; - -/** Publish an event through the pipeline. */ -async function createEvent(t: EventStub, c: AppContext): Promise { - const signer = c.get('signer'); - - if (!signer) { - throw new HTTPException(401, { - res: c.json({ error: 'No way to sign Nostr event' }, 401), - }); - } - - const event = await signer.signEvent({ - content: '', - created_at: nostrNow(), - tags: [], - ...t, - }); - - return publishEvent(event, c); -} - -/** Filter for fetching an existing event to update. */ -interface UpdateEventFilter extends NostrFilter { - kinds: [number]; - limit: 1; -} - -/** Update a replaceable event, or throw if no event exists yet. */ -async function updateEvent( - filter: UpdateEventFilter, - fn: (prev: NostrEvent) => E | Promise, - c: AppContext, -): Promise { - const store = await Storages.db(); - - const [prev] = await store.query( - [filter], - { signal: c.req.raw.signal }, - ); - - if (prev) { - return createEvent(await fn(prev), c); - } else { - throw new HTTPException(422, { - message: 'No event to update', - }); - } -} - -/** Update a replaceable list event, or throw if no event exists yet. */ -function updateListEvent( - filter: UpdateEventFilter, - fn: (tags: string[][]) => string[][], - c: AppContext, -): Promise { - return updateEvent(filter, ({ content, tags }) => ({ - kind: filter.kinds[0], - content, - tags: fn(tags), - }), c); -} - -/** Publish an admin event through the pipeline. */ -async function createAdminEvent(t: EventStub, c: AppContext): Promise { - const signer = new AdminSigner(); - - const event = await signer.signEvent({ - content: '', - created_at: nostrNow(), - tags: [], - ...t, - }); - - return publishEvent(event, c); -} - -/** Fetch existing event, update its tags, then publish the new admin event. */ -function updateListAdminEvent( - filter: UpdateEventFilter, - fn: (tags: string[][]) => string[][], - c: AppContext, -): Promise { - return updateAdminEvent(filter, (prev) => ({ - kind: filter.kinds[0], - content: prev?.content ?? '', - tags: fn(prev?.tags ?? []), - }), c); -} - -/** Fetch existing event, update it, then publish the new admin event. */ -async function updateAdminEvent( - filter: UpdateEventFilter, - fn: (prev: NostrEvent | undefined) => E, - c: AppContext, -): Promise { - const store = await Storages.db(); - const [prev] = await store.query([filter], { limit: 1, signal: c.req.raw.signal }); - return createAdminEvent(fn(prev), c); -} - -function updateUser(pubkey: string, n: Record, c: AppContext): Promise { - return updateNames(30382, pubkey, n, c); -} - -function updateEventInfo(id: string, n: Record, c: AppContext): Promise { - return updateNames(30383, id, n, c); -} - -async function updateNames(k: number, d: string, n: Record, c: AppContext): Promise { - const signer = new AdminSigner(); - const admin = await signer.getPublicKey(); - - return updateAdminEvent( - { kinds: [k], authors: [admin], '#d': [d], limit: 1 }, - (prev) => { - const prevNames = prev?.tags.reduce((acc, [name, value]) => { - if (name === 'n') acc[value] = true; - return acc; - }, {} as Record); - - const names = { ...prevNames, ...n }; - const nTags = Object.entries(names).filter(([, value]) => value).map(([name]) => ['n', name]); - const other = prev?.tags.filter(([name]) => !['d', 'n'].includes(name)) ?? []; - - return { - kind: k, - content: prev?.content ?? '', - tags: [ - ['d', d], - ...nTags, - ...other, - ], - }; - }, - c, - ); -} - -/** Push the event through the pipeline, rethrowing any RelayError. */ -async function publishEvent(event: NostrEvent, c: AppContext): Promise { - debug('EVENT', event); - try { - await pipeline.handleEvent(event, c.req.raw.signal); - const client = await Storages.client(); - await client.event(purifyEvent(event)); - } catch (e) { - if (e instanceof RelayError) { - throw new HTTPException(422, { - res: c.json({ error: e.message }, 422), - }); - } else { - throw e; - } - } - - return event; -} - -/** Parse request body to JSON, depending on the content-type of the request. */ -async function parseBody(req: Request): Promise { - switch (req.headers.get('content-type')?.split(';')[0]) { - case 'multipart/form-data': - case 'application/x-www-form-urlencoded': - try { - return parseFormData(await req.formData()); - } catch { - throw new HTTPException(400, { message: 'Invalid form data' }); - } - case 'application/json': - return req.json(); - } -} - -/** Build HTTP Link header for Mastodon API pagination. */ -function buildLinkHeader(url: string, events: NostrEvent[]): string | undefined { - if (events.length <= 1) return; - const firstEvent = events[0]; - const lastEvent = events[events.length - 1]; - - const { origin } = Conf.url; - const { pathname, search } = new URL(url); - const next = new URL(pathname + search, origin); - const prev = new URL(pathname + search, origin); - - next.searchParams.set('until', String(lastEvent.created_at)); - prev.searchParams.set('since', String(firstEvent.created_at)); - - return `<${next}>; rel="next", <${prev}>; rel="prev"`; -} - -type Entity = { id: string }; -type HeaderRecord = Record; - -/** Return results with pagination headers. Assumes chronological sorting of events. */ -function paginated(c: AppContext, events: NostrEvent[], entities: (Entity | undefined)[], headers: HeaderRecord = {}) { - const link = buildLinkHeader(c.req.url, events); - - if (link) { - headers.link = link; - } - - // Filter out undefined entities. - const results = entities.filter((entity): entity is Entity => Boolean(entity)); - return c.json(results, 200, headers); -} - -/** Build HTTP Link header for paginating Nostr lists. */ -function buildListLinkHeader(url: string, params: { offset: number; limit: number }): string | undefined { - const { origin } = Conf.url; - const { pathname, search } = new URL(url); - const { offset, limit } = params; - const next = new URL(pathname + search, origin); - const prev = new URL(pathname + search, origin); - - next.searchParams.set('offset', String(offset + limit)); - prev.searchParams.set('offset', String(Math.max(offset - limit, 0))); - - next.searchParams.set('limit', String(limit)); - prev.searchParams.set('limit', String(limit)); - - return `<${next}>; rel="next", <${prev}>; rel="prev"`; -} - -/** paginate a list of tags. */ -function paginatedList( - c: AppContext, - params: { offset: number; limit: number }, - entities: unknown[], - headers: HeaderRecord = {}, -) { - const link = buildListLinkHeader(c.req.url, params); - const hasMore = entities.length > 0; - - if (link) { - headers.link = hasMore ? link : link.split(', ').find((link) => link.endsWith('; rel="prev"'))!; - } - - // Filter out undefined entities. - const results = entities.filter(Boolean); - return c.json(results, 200, headers); -} - -/** JSON-LD context. */ -type LDContext = (string | Record>)[]; - -/** Add a basic JSON-LD context to ActivityStreams object, if it doesn't already exist. */ -function maybeAddContext(object: T): T & { '@context': LDContext } { - return { - '@context': ['https://www.w3.org/ns/activitystreams'], - ...object, - }; -} - -/** Like hono's `c.json()` except returns JSON-LD. */ -function activityJson(c: Context, object: T) { - const response = c.json(maybeAddContext(object)); - response.headers.set('content-type', 'application/activity+json; charset=UTF-8'); - return response; -} - -/** Rewrite the URL of the request object to use the local domain. */ -function localRequest(c: Context): Request { - return Object.create(c.req.raw, { - url: { value: Conf.local(c.req.url) }, - }); -} - -/** Actors with Bluesky's `!no-unauthenticated` self-label should require authorization to view. */ -function assertAuthenticated(c: AppContext, author: NostrEvent): void { - if ( - !c.get('signer') && author.tags.some(([name, value, ns]) => - name === 'l' && - value === '!no-unauthenticated' && - ns === 'com.atproto.label.defs#selfLabel' - ) - ) { - throw new HTTPException(401, { message: 'Sign-in required.' }); - } -} - -export { - activityJson, - assertAuthenticated, - createAdminEvent, - createEvent, - type EventStub, - localRequest, - paginated, - paginatedList, - parseBody, - updateAdminEvent, - updateEvent, - updateEventInfo, - updateListAdminEvent, - updateListEvent, - updateUser, -}; diff --git a/src/utils/connect.ts b/src/utils/connect.ts deleted file mode 100644 index 7726fa89..00000000 --- a/src/utils/connect.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Conf } from '@/config.ts'; -import { Storages } from '@/storages.ts'; -import { getInstanceMetadata } from '@/utils/instance.ts'; - -/** NIP-46 client-connect metadata. */ -interface ConnectMetadata { - name: string; - description: string; - url: string; -} - -/** Get NIP-46 `nostrconnect://` URI for the Ditto server. */ -export async function getClientConnectUri(signal?: AbortSignal): Promise { - const uri = new URL('nostrconnect://'); - const { name, tagline } = await getInstanceMetadata(await Storages.db(), signal); - - const metadata: ConnectMetadata = { - name, - description: tagline, - url: Conf.localDomain, - }; - - uri.host = Conf.pubkey; - uri.searchParams.set('relay', Conf.relay); - uri.searchParams.set('metadata', JSON.stringify(metadata)); - - return uri.toString(); -} diff --git a/src/utils/favicon.ts b/src/utils/favicon.ts deleted file mode 100644 index dfe82d1b..00000000 --- a/src/utils/favicon.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { DOMParser } from '@b-fuze/deno-dom'; -import Debug from '@soapbox/stickynotes/debug'; -import tldts from 'tldts'; - -import { Conf } from '@/config.ts'; -import { cachedFaviconsSizeGauge } from '@/metrics.ts'; -import { SimpleLRU } from '@/utils/SimpleLRU.ts'; -import { fetchWorker } from '@/workers/fetch.ts'; - -const debug = Debug('ditto:favicon'); - -const faviconCache = new SimpleLRU( - async (key, { signal }) => { - debug(`Fetching favicon ${key}`); - const tld = tldts.parse(key); - - if (!tld.isIcann || tld.isIp || tld.isPrivate) { - throw new Error(`Invalid favicon domain: ${key}`); - } - - const rootUrl = new URL('/', `https://${key}/`); - const response = await fetchWorker(rootUrl, { signal }); - const html = await response.text(); - - const doc = new DOMParser().parseFromString(html, 'text/html'); - const link = doc.querySelector('link[rel="icon"], link[rel="shortcut icon"]'); - - if (link) { - const href = link.getAttribute('href'); - if (href) { - try { - return new URL(href); - } catch { - return new URL(href, rootUrl); - } - } - } - - throw new Error(`Favicon not found: ${key}`); - }, - { ...Conf.caches.favicon, gauge: cachedFaviconsSizeGauge }, -); - -export { faviconCache }; diff --git a/src/utils/language.ts b/src/utils/language.ts deleted file mode 100644 index 8af8ddf9..00000000 --- a/src/utils/language.ts +++ /dev/null @@ -1,34 +0,0 @@ -import ISO6391, { type LanguageCode } from 'iso-639-1'; -import lande from 'lande'; -import linkify from 'linkifyjs'; - -linkify.registerCustomProtocol('nostr', true); - -/** Returns the detected language if the confidence is greater or equal than 'minConfidence' - * 'minConfidence' must be a number between 0 and 1, such as 0.95 - */ -export function detectLanguage(text: string, minConfidence: number): LanguageCode | undefined { - // It's better to remove the emojis first - const sanitizedText = linkify.tokenize( - text - .replaceAll(/\p{Extended_Pictographic}/gu, '') - .replaceAll(/[\s\uFEFF\u00A0\u200B-\u200D\u{0FE0E}]+/gu, ' '), - ).reduce((acc, { t, v }) => t === 'text' ? acc + v : acc, '').trim(); - - if (sanitizedText.length < 10) { // heuristics - return; - } - - const [topResult] = lande( - sanitizedText, - ); - if (topResult) { - const [iso6393, confidence] = topResult; - const locale = new Intl.Locale(iso6393); - - if (confidence >= minConfidence && ISO6391.validate(locale.language)) { - return locale.language as LanguageCode; - } - } - return; -} diff --git a/src/utils/nip05.ts b/src/utils/nip05.ts deleted file mode 100644 index cd763d92..00000000 --- a/src/utils/nip05.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { nip19 } from 'nostr-tools'; -import { NIP05, NStore } from '@nostrify/nostrify'; -import Debug from '@soapbox/stickynotes/debug'; -import tldts from 'tldts'; - -import { Conf } from '@/config.ts'; -import { cachedNip05sSizeGauge } from '@/metrics.ts'; -import { Storages } from '@/storages.ts'; -import { SimpleLRU } from '@/utils/SimpleLRU.ts'; -import { Nip05, parseNip05 } from '@/utils.ts'; -import { fetchWorker } from '@/workers/fetch.ts'; - -const debug = Debug('ditto:nip05'); - -const nip05Cache = new SimpleLRU( - async (key, { signal }) => { - debug(`Lookup ${key}`); - const tld = tldts.parse(key); - - if (!tld.isIcann || tld.isIp || tld.isPrivate) { - throw new Error(`Invalid NIP-05: ${key}`); - } - - const [name, domain] = key.split('@'); - - try { - if (domain === Conf.url.host) { - const store = await Storages.db(); - const pointer = await localNip05Lookup(store, name); - if (pointer) { - debug(`Found: ${key} is ${pointer.pubkey}`); - return pointer; - } else { - throw new Error(`Not found: ${key}`); - } - } else { - const result = await NIP05.lookup(key, { fetch: fetchWorker, signal }); - debug(`Found: ${key} is ${result.pubkey}`); - return result; - } - } catch (e) { - debug(`Not found: ${key}`); - throw e; - } - }, - { ...Conf.caches.nip05, gauge: cachedNip05sSizeGauge }, -); - -async function localNip05Lookup(store: NStore, localpart: string): Promise { - const [grant] = await store.query([{ - kinds: [30360], - '#d': [`${localpart}@${Conf.url.host}`], - authors: [Conf.pubkey], - limit: 1, - }]); - - const pubkey = grant?.tags.find(([name]) => name === 'p')?.[1]; - - if (pubkey) { - return { pubkey, relays: [Conf.relay] }; - } -} - -export async function parseAndVerifyNip05( - nip05: string | undefined, - pubkey: string, - signal = AbortSignal.timeout(3000), -): Promise { - if (!nip05) return; - try { - const result = await nip05Cache.fetch(nip05, { signal }); - if (result.pubkey === pubkey) { - return parseNip05(nip05); - } - } catch (_e) { - // do nothing - } -} - -export { localNip05Lookup, nip05Cache }; diff --git a/src/utils/outbox.test.ts b/src/utils/outbox.test.ts deleted file mode 100644 index 62dac2d0..00000000 --- a/src/utils/outbox.test.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { MockRelay } from '@nostrify/nostrify/test'; -import { eventFixture } from '@/test.ts'; -import { getRelays } from '@/utils/outbox.ts'; -import { assertEquals } from '@std/assert'; - -Deno.test('Get write relays - kind 10002', async () => { - const db = new MockRelay(); - - const relayListMetadata = await eventFixture('kind-10002-alex'); - - await db.event(relayListMetadata); - - const relays = await getRelays(db, relayListMetadata.pubkey); - - assertEquals(relays.size, 6); -}); - -Deno.test('Get write relays with invalid URL - kind 10002', async () => { - const db = new MockRelay(); - - const relayListMetadata = await eventFixture('kind-10002-alex'); - relayListMetadata.tags[0] = ['r', 'yolo']; - - await db.event(relayListMetadata); - - const relays = await getRelays(db, relayListMetadata.pubkey); - - assertEquals(relays.size, 5); -}); diff --git a/src/utils/outbox.ts b/src/utils/outbox.ts deleted file mode 100644 index 891cccb8..00000000 --- a/src/utils/outbox.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { NStore } from '@nostrify/nostrify'; - -import { Conf } from '@/config.ts'; - -export async function getRelays(store: NStore, pubkey: string): Promise> { - const relays = new Set<`wss://${string}`>(); - - const events = await store.query([ - { kinds: [10002], authors: [pubkey, Conf.pubkey], limit: 2 }, - ]); - - for (const event of events) { - for (const [name, relay, marker] of event.tags) { - if (name === 'r' && (marker === 'write' || !marker)) { - try { - const url = new URL(relay); - if (url.protocol === 'wss:') { - relays.add(url.toString() as `wss://${string}`); - } - } catch (_e) { - // fall through - } - } - } - } - - return relays; -} diff --git a/src/utils/search.test.ts b/src/utils/search.test.ts deleted file mode 100644 index d7073a39..00000000 --- a/src/utils/search.test.ts +++ /dev/null @@ -1,92 +0,0 @@ -import { assertEquals } from '@std/assert'; - -import { createTestDB, genEvent } from '@/test.ts'; -import { getIdsBySearch, getPubkeysBySearch } from '@/utils/search.ts'; - -Deno.test('fuzzy search works', async () => { - await using db = await createTestDB(); - - await db.kysely.insertInto('author_stats').values({ - pubkey: '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4', - search: 'patrickReiis patrickdosreis.com', - notes_count: 0, - followers_count: 0, - following_count: 0, - }).execute(); - - assertEquals( - await getPubkeysBySearch(db.kysely, { q: 'pat rick', limit: 1, offset: 0, followedPubkeys: new Set() }), - new Set(), - ); - assertEquals( - await getPubkeysBySearch(db.kysely, { q: 'patrick dosreis', limit: 1, offset: 0, followedPubkeys: new Set() }), - new Set([ - '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4', - ]), - ); - assertEquals( - await getPubkeysBySearch(db.kysely, { q: 'dosreis.com', limit: 1, offset: 0, followedPubkeys: new Set() }), - new Set([ - '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4', - ]), - ); -}); - -Deno.test('fuzzy search works with offset', async () => { - await using db = await createTestDB(); - - await db.kysely.insertInto('author_stats').values({ - pubkey: '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4', - search: 'abdcef patrickReiis patrickdosreis.com', - notes_count: 0, - followers_count: 0, - following_count: 0, - }).execute(); - - assertEquals( - await getPubkeysBySearch(db.kysely, { q: 'dosreis.com', limit: 1, offset: 1, followedPubkeys: new Set() }), - new Set(), - ); -}); - -Deno.test('Searching for posts work', async () => { - await using db = await createTestDB(); - - const event = genEvent({ content: "I'm not an orphan. Death is my importance", kind: 1 }); - await db.store.event(event); - await db.kysely.updateTable('nostr_events').set('language', 'en').where('id', '=', event.id).execute(); - - const event2 = genEvent({ content: 'The more I explore is the more I fall in love with the music I make.', kind: 1 }); - await db.store.event(event2); - await db.kysely.updateTable('nostr_events').set('language', 'en').where('id', '=', event2.id).execute(); - - assertEquals( - await getIdsBySearch(db.kysely, { q: 'Death is my importance', limit: 1, offset: 0 }), // ordered words - new Set([event.id]), - ); - - assertEquals( - await getIdsBySearch(db.kysely, { q: 'make I music', limit: 1, offset: 0 }), // reversed words - new Set([event2.id]), - ); - - assertEquals( - await getIdsBySearch(db.kysely, { q: 'language:en make I music', limit: 10, offset: 0 }), // reversed words, english - new Set([event2.id]), - ); - - assertEquals( - await getIdsBySearch(db.kysely, { q: 'language:en an orphan', limit: 10, offset: 0 }), // all posts in english plus search - new Set([event.id]), - ); - - assertEquals( - await getIdsBySearch(db.kysely, { q: 'language:en', limit: 10, offset: 0 }), // all posts in english - new Set([event.id, event2.id]), - ); - - assertEquals( - await getIdsBySearch(db.kysely, { q: '', limit: 10, offset: 0 }), - new Set(), - ); -}); diff --git a/src/utils/search.ts b/src/utils/search.ts deleted file mode 100644 index 649afdd6..00000000 --- a/src/utils/search.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { Kysely, sql } from 'kysely'; - -import { DittoTables } from '@/db/DittoTables.ts'; -import { NIP50 } from '@nostrify/nostrify'; - -/** Get pubkeys whose name and NIP-05 is similar to 'q' */ -export async function getPubkeysBySearch( - kysely: Kysely, - opts: { q: string; limit: number; offset: number; followedPubkeys: Set }, -): Promise> { - const { q, limit, followedPubkeys, offset } = opts; - - let query = kysely - .selectFrom('author_stats') - .select((eb) => [ - 'pubkey', - 'search', - eb.fn('word_similarity', [sql`${q}`, 'search']).as('sml'), - ]) - .where(() => sql`${q} <% search`) - .orderBy(['followers_count desc']) - .orderBy(['sml desc', 'search']) - .limit(limit) - .offset(offset); - - const pubkeys = new Set((await query.execute()).map(({ pubkey }) => pubkey)); - - if (followedPubkeys.size > 0) { - query = query.where('pubkey', 'in', [...followedPubkeys]); - } - - const followingPubkeys = new Set((await query.execute()).map(({ pubkey }) => pubkey)); - - return new Set(Array.from(followingPubkeys.union(pubkeys))); -} - -/** - * Get kind 1 ids whose content matches `q`. - * It supports NIP-50 extensions. - */ -export async function getIdsBySearch( - kysely: Kysely, - opts: { q: string; limit: number; offset: number }, -): Promise> { - const { q, limit, offset } = opts; - - const [lexemes] = (await sql<{ phraseto_tsquery: 'string' }>`SELECT phraseto_tsquery(${q})`.execute(kysely)).rows; - - // if it's just stop words, don't bother making a request to the database - if (!lexemes.phraseto_tsquery) { - return new Set(); - } - - const tokens = NIP50.parseInput(q); - const parsedSearch = tokens.filter((t) => typeof t === 'string').join(' '); - - let query = kysely - .selectFrom('nostr_events') - .select('id') - .where('kind', '=', 1) - .orderBy(['created_at desc']) - .limit(limit) - .offset(offset); - - const languages = new Set(); - const domains = new Set(); - - for (const token of tokens) { - if (typeof token === 'object' && token.key === 'language') { - languages.add(token.value); - } - if (typeof token === 'object' && token.key === 'domain') { - domains.add(token.value); - } - } - - if (languages.size) { - query = query.where('language', 'in', [...languages]); - } - - if (domains.size) { - const pubkeys = (await kysely - .selectFrom('pubkey_domains') - .select('pubkey') - .where('domain', 'in', [...domains]) - .execute()).map(({ pubkey }) => pubkey); - - query = query.where('pubkey', 'in', pubkeys); - } - - // If there is not a specific content to search, return the query already - // This is useful if the person only makes a query search such as `domain:patrickdosreis.com` - if (!parsedSearch.length) { - const ids = new Set((await query.execute()).map(({ id }) => id)); - return ids; - } - - let fallbackQuery = query; - if (parsedSearch) { - query = query.where('search', '@@', sql`phraseto_tsquery(${parsedSearch})`); - } - - const ids = new Set((await query.execute()).map(({ id }) => id)); - - // If there is no ids, fallback to `plainto_tsquery` - if (!ids.size) { - fallbackQuery = fallbackQuery.where( - 'search', - '@@', - sql`plainto_tsquery(${parsedSearch})`, - ); - const ids = new Set((await fallbackQuery.execute()).map(({ id }) => id)); - return ids; - } - - return ids; -} diff --git a/src/utils/stats.test.ts b/src/utils/stats.test.ts deleted file mode 100644 index 797f78da..00000000 --- a/src/utils/stats.test.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { assertEquals } from '@std/assert'; -import { generateSecretKey, getPublicKey } from 'nostr-tools'; - -import { createTestDB, genEvent } from '@/test.ts'; -import { countAuthorStats, getAuthorStats, getEventStats, getFollowDiff, updateStats } from '@/utils/stats.ts'; - -Deno.test('updateStats with kind 1 increments notes count', async () => { - await using db = await createTestDB(); - - const sk = generateSecretKey(); - const pubkey = getPublicKey(sk); - - await updateStats({ ...db, event: genEvent({ kind: 1 }, sk) }); - - const stats = await getAuthorStats(db.kysely, pubkey); - - assertEquals(stats!.notes_count, 1); -}); - -Deno.test('updateStats with kind 1 increments replies count', async () => { - await using db = await createTestDB(); - - const sk = generateSecretKey(); - - const note = genEvent({ kind: 1 }, sk); - await updateStats({ ...db, event: note }); - await db.store.event(note); - - const reply = genEvent({ kind: 1, tags: [['e', note.id]] }, sk); - await updateStats({ ...db, event: reply }); - await db.store.event(reply); - - const stats = await getEventStats(db.kysely, note.id); - - assertEquals(stats!.replies_count, 1); -}); - -Deno.test('updateStats with kind 5 decrements notes count', async () => { - await using db = await createTestDB(); - - const sk = generateSecretKey(); - const pubkey = getPublicKey(sk); - - const create = genEvent({ kind: 1 }, sk); - const remove = genEvent({ kind: 5, tags: [['e', create.id]] }, sk); - - await updateStats({ ...db, event: create }); - assertEquals((await getAuthorStats(db.kysely, pubkey))!.notes_count, 1); - await db.store.event(create); - - await updateStats({ ...db, event: remove }); - assertEquals((await getAuthorStats(db.kysely, pubkey))!.notes_count, 0); - await db.store.event(remove); -}); - -Deno.test('updateStats with kind 3 increments followers count', async () => { - await using db = await createTestDB(); - - await updateStats({ ...db, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) }); - await updateStats({ ...db, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) }); - await updateStats({ ...db, event: genEvent({ kind: 3, tags: [['p', 'alex']] }) }); - - const stats = await getAuthorStats(db.kysely, 'alex'); - - assertEquals(stats!.followers_count, 3); -}); - -Deno.test('updateStats with kind 3 decrements followers count', async () => { - await using db = await createTestDB(); - - const sk = generateSecretKey(); - const follow = genEvent({ kind: 3, tags: [['p', 'alex']], created_at: 0 }, sk); - const remove = genEvent({ kind: 3, tags: [], created_at: 1 }, sk); - - await updateStats({ ...db, event: follow }); - assertEquals((await getAuthorStats(db.kysely, 'alex'))!.followers_count, 1); - await db.store.event(follow); - - await updateStats({ ...db, event: remove }); - assertEquals((await getAuthorStats(db.kysely, 'alex'))!.followers_count, 0); - await db.store.event(remove); -}); - -Deno.test('getFollowDiff returns added and removed followers', () => { - const prev = genEvent({ tags: [['p', 'alex'], ['p', 'bob']] }); - const next = genEvent({ tags: [['p', 'alex'], ['p', 'carol']] }); - - const { added, removed } = getFollowDiff(next.tags, prev.tags); - - assertEquals(added, new Set(['carol'])); - assertEquals(removed, new Set(['bob'])); -}); - -Deno.test('updateStats with kind 6 increments reposts count', async () => { - await using db = await createTestDB(); - - const note = genEvent({ kind: 1 }); - await updateStats({ ...db, event: note }); - await db.store.event(note); - - const repost = genEvent({ kind: 6, tags: [['e', note.id]] }); - await updateStats({ ...db, event: repost }); - await db.store.event(repost); - - const stats = await getEventStats(db.kysely, note.id); - - assertEquals(stats!.reposts_count, 1); -}); - -Deno.test('updateStats with kind 5 decrements reposts count', async () => { - await using db = await createTestDB(); - - const note = genEvent({ kind: 1 }); - await updateStats({ ...db, event: note }); - await db.store.event(note); - - const sk = generateSecretKey(); - const repost = genEvent({ kind: 6, tags: [['e', note.id]] }, sk); - await updateStats({ ...db, event: repost }); - await db.store.event(repost); - - await updateStats({ ...db, event: genEvent({ kind: 5, tags: [['e', repost.id]] }, sk) }); - - const stats = await getEventStats(db.kysely, note.id); - - assertEquals(stats!.reposts_count, 0); -}); - -Deno.test('updateStats with kind 7 increments reactions count', async () => { - await using db = await createTestDB(); - - const note = genEvent({ kind: 1 }); - await updateStats({ ...db, event: note }); - await db.store.event(note); - - await updateStats({ ...db, event: genEvent({ kind: 7, content: '+', tags: [['e', note.id]] }) }); - await updateStats({ ...db, event: genEvent({ kind: 7, content: '😂', tags: [['e', note.id]] }) }); - - const stats = await getEventStats(db.kysely, note.id); - - assertEquals(stats!.reactions, JSON.stringify({ '+': 1, '😂': 1 })); - assertEquals(stats!.reactions_count, 2); -}); - -Deno.test('updateStats with kind 5 decrements reactions count', async () => { - await using db = await createTestDB(); - - const note = genEvent({ kind: 1 }); - await updateStats({ ...db, event: note }); - await db.store.event(note); - - const sk = generateSecretKey(); - const reaction = genEvent({ kind: 7, content: '+', tags: [['e', note.id]] }, sk); - await updateStats({ ...db, event: reaction }); - await db.store.event(reaction); - - await updateStats({ ...db, event: genEvent({ kind: 5, tags: [['e', reaction.id]] }, sk) }); - - const stats = await getEventStats(db.kysely, note.id); - - assertEquals(stats!.reactions, JSON.stringify({})); -}); - -Deno.test('countAuthorStats counts author stats from the database', async () => { - await using db = await createTestDB(); - - const sk = generateSecretKey(); - const pubkey = getPublicKey(sk); - - await db.store.event(genEvent({ kind: 1, content: 'hello' }, sk)); - await db.store.event(genEvent({ kind: 1, content: 'yolo' }, sk)); - await db.store.event(genEvent({ kind: 3, tags: [['p', pubkey]] })); - - await db.kysely.insertInto('author_stats').values({ - pubkey, - search: 'Yolo Lolo', - notes_count: 0, - followers_count: 0, - following_count: 0, - }).onConflict((oc) => oc.column('pubkey').doUpdateSet({ 'search': 'baka' })) - .execute(); - - const stats = await countAuthorStats({ store: db.store, pubkey, kysely: db.kysely }); - - assertEquals(stats!.notes_count, 2); - assertEquals(stats!.followers_count, 1); -}); diff --git a/src/workers/fetch.test.ts b/src/workers/fetch.test.ts deleted file mode 100644 index e4c698d4..00000000 --- a/src/workers/fetch.test.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { assertEquals, assertRejects } from '@std/assert'; - -import { fetchWorker } from '@/workers/fetch.ts'; - -Deno.test({ - name: 'fetchWorker', - async fn() { - const response = await fetchWorker('https://httpbingo.org/get'); - const json = await response.json(); - assertEquals(json.headers.Host, ['httpbingo.org']); - }, - sanitizeResources: false, -}); - -Deno.test({ - name: 'fetchWorker with AbortSignal', - async fn() { - const controller = new AbortController(); - const signal = controller.signal; - - setTimeout(() => controller.abort(), 100); - assertRejects(() => fetchWorker('https://httpbingo.org/delay/10', { signal })); - - await new Promise((resolve) => { - signal.addEventListener('abort', () => resolve(), { once: true }); - }); - }, - sanitizeResources: false, -}); diff --git a/src/workers/fetch.ts b/src/workers/fetch.ts deleted file mode 100644 index bb5588ed..00000000 --- a/src/workers/fetch.ts +++ /dev/null @@ -1,86 +0,0 @@ -import * as Comlink from 'comlink'; - -import { FetchWorker } from './fetch.worker.ts'; -import './handlers/abortsignal.ts'; - -import { fetchResponsesCounter } from '@/metrics.ts'; - -const worker = new Worker(new URL('./fetch.worker.ts', import.meta.url), { type: 'module', name: 'fetchWorker' }); -const client = Comlink.wrap(worker); - -// Wait for the worker to be ready before we start using it. -const ready = new Promise((resolve) => { - const handleEvent = () => { - self.removeEventListener('message', handleEvent); - resolve(); - }; - worker.addEventListener('message', handleEvent); -}); - -/** - * Fetch implementation with a Web Worker. - * Calling this performs the fetch in a separate CPU thread so it doesn't block the main thread. - */ -const fetchWorker: typeof fetch = async (...args) => { - await ready; - - const [url, init] = serializeFetchArgs(args); - const { body, signal, ...rest } = init; - - const result = await client.fetch(url, { ...rest, body: await prepareBodyForWorker(body) }, signal); - const response = new Response(...result); - - const { method } = init; - const { status } = response; - fetchResponsesCounter.inc({ method, status }); - - return response; -}; - -/** Take arguments to `fetch`, and turn them into something we can send over Comlink. */ -function serializeFetchArgs(args: Parameters): [string, RequestInit] { - const request = normalizeRequest(args); - const init = requestToInit(request); - return [request.url, init]; -} - -/** Get a `Request` object from arguments to `fetch`. */ -function normalizeRequest(args: Parameters): Request { - return new Request(...args); -} - -/** Get the body as a type we can transfer over Web Workers. */ -async function prepareBodyForWorker( - body: BodyInit | undefined | null, -): Promise { - if (!body || typeof body === 'string' || body instanceof ArrayBuffer || body instanceof Blob) { - return body; - } else { - const response = new Response(body); - return await response.arrayBuffer(); - } -} - -/** - * Convert a `Request` object into its serialized `RequestInit` format. - * `RequestInit` is a subset of `Request`, just lacking helper methods like `json()`, - * making it easier to serialize (exceptions: `body` and `signal`). - */ -function requestToInit(request: Request): RequestInit { - return { - method: request.method, - headers: [...request.headers.entries()], - body: request.body, - referrer: request.referrer, - referrerPolicy: request.referrerPolicy, - mode: request.mode, - credentials: request.credentials, - cache: request.cache, - redirect: request.redirect, - integrity: request.integrity, - keepalive: request.keepalive, - signal: request.signal, - }; -} - -export { fetchWorker }; diff --git a/src/workers/fetch.worker.ts b/src/workers/fetch.worker.ts deleted file mode 100644 index 87d0a6c3..00000000 --- a/src/workers/fetch.worker.ts +++ /dev/null @@ -1,33 +0,0 @@ -/// - -import { safeFetch } from '@soapbox/safe-fetch'; -import { Stickynotes } from '@soapbox/stickynotes'; -import * as Comlink from 'comlink'; - -import '@/workers/handlers/abortsignal.ts'; -import '@/sentry.ts'; - -const console = new Stickynotes('ditto:fetch.worker'); - -export const FetchWorker = { - async fetch( - url: string, - init: Omit, - signal: AbortSignal | null | undefined, - ): Promise<[BodyInit, ResponseInit]> { - console.debug(init.method, url); - const response = await safeFetch(url, { ...init, signal }); - return [ - await response.arrayBuffer(), - { - status: response.status, - statusText: response.statusText, - headers: [...response.headers.entries()], - }, - ]; - }, -}; - -Comlink.expose(FetchWorker); - -self.postMessage('ready'); diff --git a/src/workers/handlers/abortsignal.ts b/src/workers/handlers/abortsignal.ts deleted file mode 100644 index 14cf9f41..00000000 --- a/src/workers/handlers/abortsignal.ts +++ /dev/null @@ -1,46 +0,0 @@ -import * as Comlink from 'comlink'; - -const signalFinalizers = new FinalizationRegistry((port: MessagePort) => { - port.postMessage(null); - port.close(); -}); - -Comlink.transferHandlers.set('abortsignal', { - canHandle(value) { - return value instanceof AbortSignal || value?.constructor?.name === 'AbortSignal'; - }, - serialize(signal) { - if (signal.aborted) { - return [{ aborted: true }]; - } - - const { port1, port2 } = new MessageChannel(); - signal.addEventListener( - 'abort', - () => port1.postMessage({ reason: signal.reason }), - { once: true }, - ); - - signalFinalizers?.register(signal, port1); - - return [{ aborted: false, port: port2 }, [port2]]; - }, - deserialize({ aborted, port }) { - if (aborted || !port) { - return AbortSignal.abort(); - } - - const ctrl = new AbortController(); - - port.addEventListener('message', (ev) => { - if (ev.data && 'reason' in ev.data) { - ctrl.abort(ev.data.reason); - } - port.close(); - }, { once: true }); - - port.start(); - - return ctrl.signal; - }, -} as Comlink.TransferHandler);