mirror of
https://gitlab.com/soapbox-pub/ditto.git
synced 2025-12-06 11:29:46 +00:00
Compare commits
No commits in common. "main" and "v1.2.0" have entirely different histories.
464 changed files with 7443 additions and 14156 deletions
|
|
@ -1,4 +1,4 @@
|
|||
image: denoland/deno:2.2.2
|
||||
image: denoland/deno:2.1.1
|
||||
|
||||
default:
|
||||
interruptible: true
|
||||
|
|
@ -8,12 +8,11 @@ stages:
|
|||
|
||||
test:
|
||||
stage: test
|
||||
timeout: 2 minutes
|
||||
script:
|
||||
- deno fmt --check
|
||||
- deno task lint
|
||||
- deno lint
|
||||
- deno task check
|
||||
- deno task test --ignore=packages/transcode --coverage=cov_profile
|
||||
- deno task test --coverage=cov_profile
|
||||
- deno coverage cov_profile
|
||||
coverage: /All files[^\|]*\|[^\|]*\s+([\d\.]+)/
|
||||
services:
|
||||
|
|
|
|||
45
.goosehints
45
.goosehints
|
|
@ -1,45 +0,0 @@
|
|||
# Ditto
|
||||
|
||||
This project is called Ditto, a self-hosted social media server written in TypeScript with Deno. It implements the [Nostr Protocol](https://raw.githubusercontent.com/nostr-protocol/nips/refs/heads/master/README.md), and parts of the [Mastodon API](https://docs.joinmastodon.org/methods/) and [Pleroma API](https://git.pleroma.social/pleroma/pleroma/-/raw/develop/docs/development/API/pleroma_api.md).
|
||||
|
||||
## Project Structure
|
||||
|
||||
Ditto is a monorepo with a `packages` directory. The main package is `packages/ditto`, and the main API definition is in `packages/ditto/app.ts`.
|
||||
|
||||
## Deno, npm, and jsr
|
||||
|
||||
Ditto uses Deno 2.x
|
||||
|
||||
Dependencies are managed in `deno.json`, which are added with the `deno add` command. This command also updates the `deno.lock` file. npm packages can be added by using `deno add` and prefixing the package name with an `npm:` protocol. For example, `deno add npm:kysely` would add the `kysely` package from npm.
|
||||
|
||||
[jsr](https://jsr.io/) is a modern alternative to npm. It's a completely different registry with different packages available. jsr packages can be added by using `deno add` and prefixing the package name with a `jsr:` protocol. For example, `deno add jsr:@std/assert` would add the `@std/assert` package from jsr.
|
||||
|
||||
## Nostr
|
||||
|
||||
Nostr is a decentralized social media protocol involving clients, relays, keys, and a unified Nostr event format.
|
||||
|
||||
Specifications on Nostr are called "NIPs". NIP stands for "Nostr Implementation Possibilities". NIPs are numbered like `NIP-XX` where `XX` are two capitalized hexadecimal digits, eg `NIP-01` and `NIP-C7`.
|
||||
|
||||
To learn about Nostr, use the fetch tool to read [NIP-01](https://raw.githubusercontent.com/nostr-protocol/nips/refs/heads/master/01.md).
|
||||
|
||||
To read a specific NIP, construct the NIP URL following this template: `https://raw.githubusercontent.com/nostr-protocol/nips/refs/heads/master/{nip}.md` (replace `{nip}` in the URL template with the relevant NIP name, eg `07` for NIP-07, or `C7` for NIP-C7). Then use the fetch tool to read the URL.
|
||||
|
||||
To read the definition of a specific kind, construct a URL following this template: `https://nostrbook.dev/kinds/{kind}.md` (replace `{kind}` in the template with the kind number, eg `https://nostrbook.dev/kinds/0.md` for kind 0).
|
||||
|
||||
To discover the full list of NIPs, use the fetch tool to read the [NIPs README](https://raw.githubusercontent.com/nostr-protocol/nips/refs/heads/master/README.md).
|
||||
|
||||
It's important that Ditto conforms to Nostr standards. Please read as much of the NIPs as you need to have a full understanding before adding or modifying Nostr events and filters. It is possible to add new ideas to Nostr that don't exist yet in the NIPs, but only after other options have been explored. Care must be taken when adding new Nostr ideas, to ensure they fit seamlessly within the existing Nostr ecosystem.
|
||||
|
||||
## How Ditto uses Nostr and Mastodon API
|
||||
|
||||
Ditto implements a full Nostr relay, available at `/relay` of the Ditto server.
|
||||
|
||||
Mastodon API functionality, available at `/api/*`, is built around the Nostr relay's storage implementation.
|
||||
|
||||
Ditto's goal is to enable Mastodon API clients to interact directly with Nostr. It achieves this by implementing most of Mastodon's API, and "pretending" to be a Mastodon server to client applications, while in actuality it uses Nostr as its decentralized protocol layer.
|
||||
|
||||
## Testing Changes
|
||||
|
||||
After making changes, please run `deno task check` to check for type errors. If there are any type errors, please try to fix them.
|
||||
|
||||
Afterwards, run `deno fmt` to format the code, and then you are done. Please do not try to run the server, or run any other tests.
|
||||
|
|
@ -1 +1 @@
|
|||
deno 2.2.2
|
||||
deno 2.1.1
|
||||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
|
|
@ -8,7 +8,7 @@
|
|||
"request": "launch",
|
||||
"name": "Launch Program",
|
||||
"type": "node",
|
||||
"program": "${workspaceFolder}/packages/ditto/server.ts",
|
||||
"program": "${workspaceFolder}/src/server.ts",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"runtimeExecutable": "deno",
|
||||
"runtimeArgs": [
|
||||
|
|
|
|||
5
.vscode/settings.json
vendored
5
.vscode/settings.json
vendored
|
|
@ -2,8 +2,5 @@
|
|||
"deno.enable": true,
|
||||
"deno.lint": true,
|
||||
"editor.defaultFormatter": "denoland.vscode-deno",
|
||||
"path-intellisense.extensionOnImport": true,
|
||||
"files.associations": {
|
||||
".goosehints": "markdown"
|
||||
}
|
||||
"path-intellisense.extensionOnImport": true
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
FROM denoland/deno:2.2.2
|
||||
FROM denoland/deno:2.1.1
|
||||
ENV PORT 5000
|
||||
|
||||
WORKDIR /app
|
||||
RUN mkdir -p data && chown -R deno data
|
||||
COPY . .
|
||||
RUN deno cache --allow-import packages/ditto/server.ts
|
||||
RUN deno cache --allow-import src/server.ts
|
||||
RUN apt-get update && apt-get install -y unzip curl
|
||||
RUN deno task soapbox
|
||||
CMD deno task start
|
||||
|
|
|
|||
30
ansible/playbook.yml
Normal file
30
ansible/playbook.yml
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
- name: Update Ditto
|
||||
hosts: all
|
||||
become: true
|
||||
tasks:
|
||||
- name: Update Deno
|
||||
shell:
|
||||
cmd: curl -fsSL https://deno.land/x/install/install.sh | sh
|
||||
environment:
|
||||
DENO_INSTALL: /usr/local
|
||||
become_user: root
|
||||
|
||||
- name: Update Soapbox
|
||||
shell:
|
||||
cmd: deno task soapbox
|
||||
chdir: /opt/ditto
|
||||
become_user: ditto
|
||||
|
||||
- name: Update ditto from the main branch
|
||||
git:
|
||||
repo: 'https://gitlab.com/soapbox-pub/ditto.git'
|
||||
dest: '/opt/ditto'
|
||||
version: main
|
||||
become_user: ditto
|
||||
|
||||
- name: Restart ditto service
|
||||
systemd:
|
||||
name: ditto
|
||||
state: restarted
|
||||
become_user: root
|
||||
49
deno.json
49
deno.json
|
|
@ -1,34 +1,17 @@
|
|||
{
|
||||
"version": "1.1.0",
|
||||
"workspace": [
|
||||
"./packages/captcha",
|
||||
"./packages/conf",
|
||||
"./packages/db",
|
||||
"./packages/ditto",
|
||||
"./packages/lang",
|
||||
"./packages/mastoapi",
|
||||
"./packages/metrics",
|
||||
"./packages/nip98",
|
||||
"./packages/policies",
|
||||
"./packages/ratelimiter",
|
||||
"./packages/transcode",
|
||||
"./packages/translators",
|
||||
"./packages/uploaders",
|
||||
"./packages/cashu"
|
||||
],
|
||||
"tasks": {
|
||||
"start": "deno run -A --env-file --deny-read=.env packages/ditto/server.ts",
|
||||
"dev": "deno run -A --env-file --deny-read=.env --watch packages/ditto/server.ts",
|
||||
"start": "deno run -A --env-file --deny-read=.env src/server.ts",
|
||||
"dev": "deno run -A --env-file --deny-read=.env --watch src/server.ts",
|
||||
"hook": "deno run --allow-read --allow-run --allow-write https://deno.land/x/deno_hooks@0.1.1/mod.ts",
|
||||
"db:export": "deno run -A --env-file --deny-read=.env scripts/db-export.ts",
|
||||
"db:import": "deno run -A --env-file --deny-read=.env scripts/db-import.ts",
|
||||
"db:cleanup": "deno run -A --env-file --deny-read=.env scripts/db-policy.ts",
|
||||
"db:migrate": "deno run -A --env-file --deny-read=.env scripts/db-migrate.ts",
|
||||
"nostr:pull": "deno run -A --env-file --deny-read=.env scripts/nostr-pull.ts",
|
||||
"debug": "deno run -A --env-file --deny-read=.env --inspect packages/ditto/server.ts",
|
||||
"debug": "deno run -A --env-file --deny-read=.env --inspect src/server.ts",
|
||||
"test": "deno test -A --env-file=.env.test --deny-read=.env --junit-path=./deno-test.xml",
|
||||
"check": "deno check --allow-import .",
|
||||
"lint": "deno lint --allow-import",
|
||||
"nsec": "deno run scripts/nsec.ts",
|
||||
"admin:event": "deno run -A --env-file --deny-read=.env scripts/admin-event.ts",
|
||||
"admin:role": "deno run -A --env-file --deny-read=.env scripts/admin-role.ts",
|
||||
|
|
@ -37,11 +20,9 @@
|
|||
"stats:recompute": "deno run -A --env-file --deny-read=.env scripts/stats-recompute.ts",
|
||||
"soapbox": "curl -O https://dl.soapbox.pub/main/soapbox.zip && mkdir -p public && mv soapbox.zip public/ && cd public/ && unzip -o soapbox.zip && rm soapbox.zip",
|
||||
"trends": "deno run -A --env-file --deny-read=.env scripts/trends.ts",
|
||||
"clean:deps": "deno cache --reload packages/ditto/app.ts",
|
||||
"db:populate:nip05": "deno run -A --env-file --deny-read=.env scripts/db-populate-nip05.ts",
|
||||
"clean:deps": "deno cache --reload src/app.ts",
|
||||
"db:populate-search": "deno run -A --env-file --deny-read=.env scripts/db-populate-search.ts",
|
||||
"db:populate-extensions": "deno run -A --env-file --deny-read=.env scripts/db-populate-extensions.ts",
|
||||
"db:streak:recompute": "deno run -A --env-file --deny-read=.env scripts/db-streak-recompute.ts",
|
||||
"vapid": "deno run scripts/vapid.ts"
|
||||
},
|
||||
"unstable": [
|
||||
|
|
@ -54,19 +35,19 @@
|
|||
"./public"
|
||||
],
|
||||
"imports": {
|
||||
"@/": "./src/",
|
||||
"@b-fuze/deno-dom": "jsr:@b-fuze/deno-dom@^0.1.47",
|
||||
"@bradenmacdonald/s3-lite-client": "jsr:@bradenmacdonald/s3-lite-client@^0.7.4",
|
||||
"@cashu/cashu-ts": "npm:@cashu/cashu-ts@^2.2.0",
|
||||
"@core/asyncutil": "jsr:@core/asyncutil@^1.2.0",
|
||||
"@electric-sql/pglite": "npm:@electric-sql/pglite@^0.2.8",
|
||||
"@esroyo/scoped-performance": "jsr:@esroyo/scoped-performance@^3.1.0",
|
||||
"@gfx/canvas-wasm": "jsr:@gfx/canvas-wasm@^0.4.2",
|
||||
"@hono/hono": "jsr:@hono/hono@^4.4.6",
|
||||
"@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1",
|
||||
"@lambdalisue/async": "jsr:@lambdalisue/async@^2.1.1",
|
||||
"@negrel/webpush": "jsr:@negrel/webpush@^0.3.0",
|
||||
"@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0",
|
||||
"@nostrify/db": "jsr:@nostrify/db@^0.39.4",
|
||||
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.39.1",
|
||||
"@nostrify/db": "jsr:@nostrify/db@^0.37.3",
|
||||
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.38.0",
|
||||
"@nostrify/policies": "jsr:@nostrify/policies@^0.36.1",
|
||||
"@nostrify/types": "jsr:@nostrify/types@^0.36.0",
|
||||
"@scure/base": "npm:@scure/base@^1.1.6",
|
||||
|
|
@ -75,7 +56,6 @@
|
|||
"@soapbox/logi": "jsr:@soapbox/logi@^0.3.0",
|
||||
"@soapbox/safe-fetch": "jsr:@soapbox/safe-fetch@^2.0.0",
|
||||
"@std/assert": "jsr:@std/assert@^0.225.1",
|
||||
"@std/async": "jsr:@std/async@^1.0.10",
|
||||
"@std/cli": "jsr:@std/cli@^0.223.0",
|
||||
"@std/crypto": "jsr:@std/crypto@^0.224.0",
|
||||
"@std/encoding": "jsr:@std/encoding@^0.224.0",
|
||||
|
|
@ -83,11 +63,11 @@
|
|||
"@std/json": "jsr:@std/json@^0.223.0",
|
||||
"@std/media-types": "jsr:@std/media-types@^0.224.1",
|
||||
"@std/streams": "jsr:@std/streams@^0.223.0",
|
||||
"@std/testing": "jsr:@std/testing@^1.0.9",
|
||||
"blurhash": "npm:blurhash@2.0.5",
|
||||
"comlink": "npm:comlink@^4.4.1",
|
||||
"comlink-async-generator": "npm:comlink-async-generator@^0.0.1",
|
||||
"commander": "npm:commander@12.1.0",
|
||||
"deno.json": "./deno.json",
|
||||
"entities": "npm:entities@^4.5.0",
|
||||
"fast-stable-stringify": "npm:fast-stable-stringify@^1.0.0",
|
||||
"formdata-helper": "npm:formdata-helper@^0.3.0",
|
||||
|
|
@ -105,6 +85,7 @@
|
|||
"nostr-tools": "npm:nostr-tools@2.5.1",
|
||||
"nostr-wasm": "npm:nostr-wasm@^0.1.0",
|
||||
"path-to-regexp": "npm:path-to-regexp@^7.1.0",
|
||||
"png-to-ico": "npm:png-to-ico@^2.1.8",
|
||||
"postgres": "https://gitlab.com/soapbox-pub/postgres.js/-/raw/e79d7d2039446fbf7a37d4eca0d17e94a94b8b53/deno/mod.js",
|
||||
"prom-client": "npm:prom-client@^15.1.2",
|
||||
"question-deno": "https://raw.githubusercontent.com/ocpu/question-deno/10022b8e52555335aa510adb08b0a300df3cf904/mod.ts",
|
||||
|
|
@ -116,6 +97,16 @@
|
|||
"zod": "npm:zod@^3.23.8",
|
||||
"~/fixtures/": "./fixtures/"
|
||||
},
|
||||
"lint": {
|
||||
"rules": {
|
||||
"tags": [
|
||||
"recommended"
|
||||
],
|
||||
"exclude": [
|
||||
"no-explicit-any"
|
||||
]
|
||||
}
|
||||
},
|
||||
"fmt": {
|
||||
"useTabs": false,
|
||||
"lineWidth": 120,
|
||||
|
|
|
|||
272
deno.lock
generated
272
deno.lock
generated
|
|
@ -3,7 +3,6 @@
|
|||
"specifiers": {
|
||||
"jsr:@b-fuze/deno-dom@~0.1.47": "0.1.48",
|
||||
"jsr:@bradenmacdonald/s3-lite-client@~0.7.4": "0.7.6",
|
||||
"jsr:@core/asyncutil@^1.2.0": "1.2.0",
|
||||
"jsr:@denosaurs/plug@1.0.3": "1.0.3",
|
||||
"jsr:@esroyo/scoped-performance@^3.1.0": "3.1.0",
|
||||
"jsr:@gfx/canvas-wasm@~0.4.2": "0.4.2",
|
||||
|
|
@ -28,30 +27,24 @@
|
|||
"jsr:@gleasonator/policy@0.9.2": "0.9.2",
|
||||
"jsr:@gleasonator/policy@0.9.3": "0.9.3",
|
||||
"jsr:@gleasonator/policy@0.9.4": "0.9.4",
|
||||
"jsr:@gleasonator/policy@0.9.5": "0.9.5",
|
||||
"jsr:@gleasonator/policy@0.9.6": "0.9.6",
|
||||
"jsr:@gleasonator/policy@0.9.7": "0.9.7",
|
||||
"jsr:@gleasonator/policy@0.9.8": "0.9.8",
|
||||
"jsr:@hono/hono@^4.4.6": "4.6.15",
|
||||
"jsr:@lambdalisue/async@^2.1.1": "2.1.1",
|
||||
"jsr:@negrel/http-ece@0.6.0": "0.6.0",
|
||||
"jsr:@negrel/webpush@0.3": "0.3.0",
|
||||
"jsr:@nostrify/db@~0.39.4": "0.39.4",
|
||||
"jsr:@nostrify/db@~0.37.3": "0.37.3",
|
||||
"jsr:@nostrify/nostrify@0.31": "0.31.0",
|
||||
"jsr:@nostrify/nostrify@0.32": "0.32.0",
|
||||
"jsr:@nostrify/nostrify@0.36": "0.36.2",
|
||||
"jsr:@nostrify/nostrify@0.39": "0.39.1",
|
||||
"jsr:@nostrify/nostrify@0.38": "0.38.0",
|
||||
"jsr:@nostrify/nostrify@~0.22.1": "0.22.5",
|
||||
"jsr:@nostrify/nostrify@~0.22.4": "0.22.4",
|
||||
"jsr:@nostrify/nostrify@~0.22.5": "0.22.5",
|
||||
"jsr:@nostrify/nostrify@~0.39.1": "0.39.1",
|
||||
"jsr:@nostrify/nostrify@~0.46.3": "0.46.3",
|
||||
"jsr:@nostrify/policies@0.33": "0.33.0",
|
||||
"jsr:@nostrify/policies@0.33.1": "0.33.1",
|
||||
"jsr:@nostrify/policies@0.34": "0.34.0",
|
||||
"jsr:@nostrify/policies@0.36": "0.36.0",
|
||||
"jsr:@nostrify/policies@~0.33.1": "0.33.1",
|
||||
"jsr:@nostrify/policies@~0.36.1": "0.36.1",
|
||||
"jsr:@nostrify/policies@~0.36.2": "0.36.2",
|
||||
"jsr:@nostrify/types@0.30": "0.30.1",
|
||||
"jsr:@nostrify/types@0.35": "0.35.0",
|
||||
"jsr:@nostrify/types@0.36": "0.36.0",
|
||||
|
|
@ -61,10 +54,8 @@
|
|||
"jsr:@soapbox/safe-fetch@2": "2.0.0",
|
||||
"jsr:@std/assert@0.223": "0.223.0",
|
||||
"jsr:@std/assert@0.224": "0.224.0",
|
||||
"jsr:@std/assert@^1.0.10": "1.0.11",
|
||||
"jsr:@std/assert@~0.213.1": "0.213.1",
|
||||
"jsr:@std/assert@~0.225.1": "0.225.3",
|
||||
"jsr:@std/async@^1.0.10": "1.0.10",
|
||||
"jsr:@std/bytes@0.223": "0.223.0",
|
||||
"jsr:@std/bytes@0.224": "0.224.0",
|
||||
"jsr:@std/bytes@0.224.0": "0.224.0",
|
||||
|
|
@ -74,7 +65,6 @@
|
|||
"jsr:@std/bytes@^1.0.2-rc.3": "1.0.2",
|
||||
"jsr:@std/cli@0.223": "0.223.0",
|
||||
"jsr:@std/crypto@0.224": "0.224.0",
|
||||
"jsr:@std/data-structures@^1.0.6": "1.0.6",
|
||||
"jsr:@std/encoding@0.213.1": "0.213.1",
|
||||
"jsr:@std/encoding@0.224": "0.224.3",
|
||||
"jsr:@std/encoding@0.224.0": "0.224.0",
|
||||
|
|
@ -82,10 +72,8 @@
|
|||
"jsr:@std/encoding@~0.224.1": "0.224.3",
|
||||
"jsr:@std/fmt@0.213.1": "0.213.1",
|
||||
"jsr:@std/fs@0.213.1": "0.213.1",
|
||||
"jsr:@std/fs@^1.0.9": "1.0.11",
|
||||
"jsr:@std/fs@~0.229.3": "0.229.3",
|
||||
"jsr:@std/internal@1": "1.0.5",
|
||||
"jsr:@std/internal@^1.0.5": "1.0.5",
|
||||
"jsr:@std/io@0.223": "0.223.0",
|
||||
"jsr:@std/io@0.224": "0.224.9",
|
||||
"jsr:@std/json@0.223": "0.223.0",
|
||||
|
|
@ -94,11 +82,8 @@
|
|||
"jsr:@std/path@0.213.1": "0.213.1",
|
||||
"jsr:@std/path@0.224.0": "0.224.0",
|
||||
"jsr:@std/path@1.0.0-rc.1": "1.0.0-rc.1",
|
||||
"jsr:@std/path@^1.0.8": "1.0.8",
|
||||
"jsr:@std/path@~0.213.1": "0.213.1",
|
||||
"jsr:@std/streams@0.223": "0.223.0",
|
||||
"jsr:@std/testing@^1.0.9": "1.0.9",
|
||||
"npm:@cashu/cashu-ts@^2.2.0": "2.2.0",
|
||||
"npm:@electric-sql/pglite@~0.2.8": "0.2.8",
|
||||
"npm:@isaacs/ttlcache@^1.4.1": "1.4.1",
|
||||
"npm:@noble/hashes@^1.4.0": "1.4.0",
|
||||
|
|
@ -106,7 +91,7 @@
|
|||
"npm:@scure/base@^1.1.6": "1.1.6",
|
||||
"npm:@scure/bip32@^1.4.0": "1.4.0",
|
||||
"npm:@scure/bip39@^1.3.0": "1.3.0",
|
||||
"npm:@types/node@*": "22.5.4",
|
||||
"npm:@types/node@*": "18.16.19",
|
||||
"npm:blurhash@2.0.5": "2.0.5",
|
||||
"npm:comlink-async-generator@*": "0.0.1",
|
||||
"npm:comlink-async-generator@^0.0.1": "0.0.1",
|
||||
|
|
@ -132,11 +117,11 @@
|
|||
"npm:lru-cache@^10.2.2": "10.2.2",
|
||||
"npm:nostr-tools@2.5.1": "2.5.1",
|
||||
"npm:nostr-tools@^2.10.4": "2.10.4",
|
||||
"npm:nostr-tools@^2.13.0": "2.14.2",
|
||||
"npm:nostr-tools@^2.5.0": "2.5.1",
|
||||
"npm:nostr-tools@^2.7.0": "2.7.0",
|
||||
"npm:nostr-wasm@0.1": "0.1.0",
|
||||
"npm:path-to-regexp@^7.1.0": "7.1.0",
|
||||
"npm:png-to-ico@^2.1.8": "2.1.8",
|
||||
"npm:postgres@3.4.4": "3.4.4",
|
||||
"npm:prom-client@^15.1.2": "15.1.2",
|
||||
"npm:sharp@~0.33.5": "0.33.5",
|
||||
|
|
@ -146,7 +131,6 @@
|
|||
"npm:type-fest@^4.3.0": "4.18.2",
|
||||
"npm:unfurl.js@^6.4.0": "6.4.0",
|
||||
"npm:websocket-ts@^2.1.5": "2.1.5",
|
||||
"npm:websocket-ts@^2.2.1": "2.2.1",
|
||||
"npm:zod@^3.23.8": "3.23.8"
|
||||
},
|
||||
"jsr": {
|
||||
|
|
@ -168,9 +152,6 @@
|
|||
"jsr:@std/io@0.224"
|
||||
]
|
||||
},
|
||||
"@core/asyncutil@1.2.0": {
|
||||
"integrity": "9967f15190c60df032c13f72ce5ac73d185c34f31c53dc918d8800025854c118"
|
||||
},
|
||||
"@denosaurs/plug@1.0.3": {
|
||||
"integrity": "b010544e386bea0ff3a1d05e0c88f704ea28cbd4d753439c2f1ee021a85d4640",
|
||||
"dependencies": [
|
||||
|
|
@ -325,34 +306,6 @@
|
|||
"jsr:@nostrify/policies@~0.36.1"
|
||||
]
|
||||
},
|
||||
"@gleasonator/policy@0.9.5": {
|
||||
"integrity": "8ce76ad719b5d002bb1799c60f2deb4d450b32d590e0f4c211919aa68f1ea963",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@0.36",
|
||||
"jsr:@nostrify/policies@~0.36.1"
|
||||
]
|
||||
},
|
||||
"@gleasonator/policy@0.9.6": {
|
||||
"integrity": "5bbd04f2d986344509547d480b5202e5f42832a4216b5be66c161e638f5e6672",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@0.36",
|
||||
"jsr:@nostrify/policies@~0.36.1"
|
||||
]
|
||||
},
|
||||
"@gleasonator/policy@0.9.7": {
|
||||
"integrity": "e4f45032683e7433f9b8fb8a38e1ca767bbfb75513dd0600230f85d06d2956d6",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@0.36",
|
||||
"jsr:@nostrify/policies@~0.36.1"
|
||||
]
|
||||
},
|
||||
"@gleasonator/policy@0.9.8": {
|
||||
"integrity": "a972b1bc797f5a38f2e71458194c37af075c85e941c04048d208b858100efc52",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@~0.46.3",
|
||||
"jsr:@nostrify/policies@~0.36.2"
|
||||
]
|
||||
},
|
||||
"@hono/hono@4.4.6": {
|
||||
"integrity": "aa557ca9930787ee86b9ca1730691f1ce1c379174c2cb244d5934db2b6314453"
|
||||
},
|
||||
|
|
@ -383,6 +336,9 @@
|
|||
"@hono/hono@4.6.15": {
|
||||
"integrity": "935b3b12e98e4b22bcd1aa4dbe6587321e431c79829eba61f535b4ede39fd8b1"
|
||||
},
|
||||
"@lambdalisue/async@2.1.1": {
|
||||
"integrity": "1fc9bc6f4ed50215cd2f7217842b18cea80f81c25744f88f8c5eb4be5a1c9ab4"
|
||||
},
|
||||
"@negrel/http-ece@0.6.0": {
|
||||
"integrity": "7afdd81b86ea5b21a9677b323c01c3338705e11cc2bfed250870f5349d8f86f7",
|
||||
"dependencies": [
|
||||
|
|
@ -400,10 +356,10 @@
|
|||
"jsr:@std/path@0.224.0"
|
||||
]
|
||||
},
|
||||
"@nostrify/db@0.39.4": {
|
||||
"integrity": "53fecea3b67394cf4f52795f89d1d065bdeb0627b8655cc7fc3a89d6b21adf01",
|
||||
"@nostrify/db@0.37.3": {
|
||||
"integrity": "fe7cacd67bb817f10fb44587e832cfb042a3a0d32db29b24a487b7d006438623",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@0.39",
|
||||
"jsr:@nostrify/nostrify@0.38",
|
||||
"jsr:@nostrify/types@0.36",
|
||||
"npm:kysely@~0.27.3",
|
||||
"npm:nostr-tools@^2.10.4"
|
||||
|
|
@ -420,7 +376,7 @@
|
|||
"npm:kysely@~0.27.3",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.5.0",
|
||||
"npm:websocket-ts@^2.1.5",
|
||||
"npm:websocket-ts",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
|
|
@ -434,7 +390,7 @@
|
|||
"npm:kysely@~0.27.3",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.7.0",
|
||||
"npm:websocket-ts@^2.1.5",
|
||||
"npm:websocket-ts",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
|
|
@ -449,7 +405,7 @@
|
|||
"npm:@scure/bip39",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.7.0",
|
||||
"npm:websocket-ts@^2.1.5",
|
||||
"npm:websocket-ts",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
|
|
@ -462,7 +418,7 @@
|
|||
"npm:@scure/bip39",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.7.0",
|
||||
"npm:websocket-ts@^2.1.5",
|
||||
"npm:websocket-ts",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
|
|
@ -475,7 +431,7 @@
|
|||
"npm:@scure/bip39",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.7.0",
|
||||
"npm:websocket-ts@^2.1.5",
|
||||
"npm:websocket-ts",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
|
|
@ -490,7 +446,7 @@
|
|||
"npm:@scure/bip39",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.7.0",
|
||||
"npm:websocket-ts@^2.1.5",
|
||||
"npm:websocket-ts",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
|
|
@ -503,7 +459,7 @@
|
|||
"npm:@scure/bip39",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.7.0",
|
||||
"npm:websocket-ts@^2.1.5",
|
||||
"npm:websocket-ts",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
|
|
@ -518,51 +474,7 @@
|
|||
"npm:@scure/bip39",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.10.4",
|
||||
"npm:websocket-ts@^2.1.5",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
"@nostrify/nostrify@0.39.0": {
|
||||
"integrity": "f7e052c32b8b9bafe0f2517dcf090e7d3df5aed38452a0cf61ade817d34067ee",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@0.39",
|
||||
"jsr:@nostrify/types@0.36",
|
||||
"jsr:@std/crypto",
|
||||
"jsr:@std/encoding@~0.224.1",
|
||||
"npm:@scure/base",
|
||||
"npm:@scure/bip32",
|
||||
"npm:@scure/bip39",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.10.4",
|
||||
"npm:websocket-ts@^2.2.1",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
"@nostrify/nostrify@0.39.1": {
|
||||
"integrity": "84f98c815a07f4151bd02188a3525e438c416e9de632c79c9da9edbfca580d7f",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@~0.39.1",
|
||||
"jsr:@nostrify/types@0.36",
|
||||
"jsr:@std/crypto",
|
||||
"jsr:@std/encoding@~0.224.1",
|
||||
"npm:@scure/base",
|
||||
"npm:@scure/bip32",
|
||||
"npm:@scure/bip39",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.10.4",
|
||||
"npm:websocket-ts@^2.2.1",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
"@nostrify/nostrify@0.46.3": {
|
||||
"integrity": "a809b83219c483dff4c87420f54bef7e0f98a438450283be26a167698114fec5",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@~0.46.3",
|
||||
"jsr:@nostrify/types@0.36",
|
||||
"jsr:@std/encoding@~0.224.1",
|
||||
"npm:lru-cache@^10.2.0",
|
||||
"npm:nostr-tools@^2.13.0",
|
||||
"npm:websocket-ts@^2.2.1",
|
||||
"npm:websocket-ts",
|
||||
"npm:zod"
|
||||
]
|
||||
},
|
||||
|
|
@ -604,14 +516,6 @@
|
|||
"npm:nostr-tools@^2.7.0"
|
||||
]
|
||||
},
|
||||
"@nostrify/policies@0.36.2": {
|
||||
"integrity": "b62c99fadf2d451e68d24ac1643844b953785c45cc170d3aee62b57c60ab9829",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@~0.46.3",
|
||||
"jsr:@nostrify/types@0.36",
|
||||
"npm:nostr-tools@^2.13.0"
|
||||
]
|
||||
},
|
||||
"@nostrify/types@0.30.0": {
|
||||
"integrity": "1f38fa849cff930bd709edbf94ef9ac02f46afb8b851f86c8736517b354616da"
|
||||
},
|
||||
|
|
@ -651,18 +555,9 @@
|
|||
"@std/assert@0.225.3": {
|
||||
"integrity": "b3c2847aecf6955b50644cdb9cf072004ea3d1998dd7579fc0acb99dbb23bd4f",
|
||||
"dependencies": [
|
||||
"jsr:@std/internal@1"
|
||||
"jsr:@std/internal"
|
||||
]
|
||||
},
|
||||
"@std/assert@1.0.11": {
|
||||
"integrity": "2461ef3c368fe88bc60e186e7744a93112f16fd110022e113a0849e94d1c83c1",
|
||||
"dependencies": [
|
||||
"jsr:@std/internal@^1.0.5"
|
||||
]
|
||||
},
|
||||
"@std/async@1.0.10": {
|
||||
"integrity": "2ff1b1c7d33d1416159989b0f69e59ec7ee8cb58510df01e454def2108b3dbec"
|
||||
},
|
||||
"@std/bytes@0.223.0": {
|
||||
"integrity": "84b75052cd8680942c397c2631318772b295019098f40aac5c36cead4cba51a8"
|
||||
},
|
||||
|
|
@ -691,9 +586,6 @@
|
|||
"jsr:@std/encoding@0.224"
|
||||
]
|
||||
},
|
||||
"@std/data-structures@1.0.6": {
|
||||
"integrity": "76a7fd8080c66604c0496220a791860492ab21a04a63a969c0b9a0609bbbb760"
|
||||
},
|
||||
"@std/dotenv@0.224.0": {
|
||||
"integrity": "d9234cdf551507dcda60abb6c474289843741d8c07ee8ce540c60f5c1b220a1d"
|
||||
},
|
||||
|
|
@ -725,12 +617,6 @@
|
|||
"jsr:@std/path@1.0.0-rc.1"
|
||||
]
|
||||
},
|
||||
"@std/fs@1.0.11": {
|
||||
"integrity": "ba674672693340c5ebdd018b4fe1af46cb08741f42b4c538154e97d217b55bdd",
|
||||
"dependencies": [
|
||||
"jsr:@std/path@^1.0.8"
|
||||
]
|
||||
},
|
||||
"@std/internal@1.0.0": {
|
||||
"integrity": "ac6a6dfebf838582c4b4f61a6907374e27e05bedb6ce276e0f1608fe84e7cd9a"
|
||||
},
|
||||
|
|
@ -828,9 +714,6 @@
|
|||
"@std/path@1.0.0-rc.1": {
|
||||
"integrity": "b8c00ae2f19106a6bb7cbf1ab9be52aa70de1605daeb2dbdc4f87a7cbaf10ff6"
|
||||
},
|
||||
"@std/path@1.0.8": {
|
||||
"integrity": "548fa456bb6a04d3c1a1e7477986b6cffbce95102d0bb447c67c4ee70e0364be"
|
||||
},
|
||||
"@std/streams@0.223.0": {
|
||||
"integrity": "d6b28e498ced3960b04dc5d251f2dcfc1df244b5ec5a48dc23a8f9b490be3b99",
|
||||
"dependencies": [
|
||||
|
|
@ -838,38 +721,9 @@
|
|||
"jsr:@std/bytes@0.223",
|
||||
"jsr:@std/io@0.223"
|
||||
]
|
||||
},
|
||||
"@std/testing@1.0.9": {
|
||||
"integrity": "9bdd4ac07cb13e7594ac30e90f6ceef7254ac83a9aeaa089be0008f33aab5cd4",
|
||||
"dependencies": [
|
||||
"jsr:@std/assert@^1.0.10",
|
||||
"jsr:@std/data-structures",
|
||||
"jsr:@std/fs@^1.0.9",
|
||||
"jsr:@std/internal@^1.0.5",
|
||||
"jsr:@std/path@^1.0.8"
|
||||
]
|
||||
}
|
||||
},
|
||||
"npm": {
|
||||
"@cashu/cashu-ts@2.2.0": {
|
||||
"integrity": "sha512-7b6pGyjjpm3uAJvmOL+ztpRxqp1qnmzGpydp+Pu30pOjxj93EhejPTJVrZMDJ0P35y6u5+5jIjHF4k0fpovvmg==",
|
||||
"dependencies": [
|
||||
"@cashu/crypto",
|
||||
"@noble/curves@1.4.0",
|
||||
"@noble/hashes@1.4.0",
|
||||
"buffer"
|
||||
]
|
||||
},
|
||||
"@cashu/crypto@0.3.4": {
|
||||
"integrity": "sha512-mfv1Pj4iL1PXzUj9NKIJbmncCLMqYfnEDqh/OPxAX0nNBt6BOnVJJLjLWFlQeYxlnEfWABSNkrqPje1t5zcyhA==",
|
||||
"dependencies": [
|
||||
"@noble/curves@1.8.1",
|
||||
"@noble/hashes@1.7.1",
|
||||
"@scure/bip32@1.6.2",
|
||||
"@scure/bip39@1.5.4",
|
||||
"buffer"
|
||||
]
|
||||
},
|
||||
"@electric-sql/pglite@0.2.8": {
|
||||
"integrity": "sha512-0wSmQu22euBRzR5ghqyIHnBH4MfwlkL5WstOrrA3KOsjEWEglvoL/gH92JajEUA6Ufei/+qbkB2hVloC/K/RxQ=="
|
||||
},
|
||||
|
|
@ -987,12 +841,6 @@
|
|||
"@noble/hashes@1.4.0"
|
||||
]
|
||||
},
|
||||
"@noble/curves@1.8.1": {
|
||||
"integrity": "sha512-warwspo+UYUPep0Q+vtdVB4Ugn8GGQj8iyB3gnRWsztmUHTI3S1nhdiWNsPUGL0vud7JlRRk1XEu7Lq1KGTnMQ==",
|
||||
"dependencies": [
|
||||
"@noble/hashes@1.7.1"
|
||||
]
|
||||
},
|
||||
"@noble/hashes@1.3.1": {
|
||||
"integrity": "sha512-EbqwksQwz9xDRGfDST86whPBgM65E0OH/pCgqW0GBVzO22bNE+NuIbeTb714+IfSjU3aRk47EUvXIb5bTsenKA=="
|
||||
},
|
||||
|
|
@ -1002,9 +850,6 @@
|
|||
"@noble/hashes@1.4.0": {
|
||||
"integrity": "sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg=="
|
||||
},
|
||||
"@noble/hashes@1.7.1": {
|
||||
"integrity": "sha512-B8XBPsn4vT/KJAGqDzbwztd+6Yte3P4V7iafm24bxgDe/mlRuK6xmWPuCNrKt2vDafZ8MfJLlchDG/vYafQEjQ=="
|
||||
},
|
||||
"@noble/secp256k1@2.1.0": {
|
||||
"integrity": "sha512-XLEQQNdablO0XZOIniFQimiXsZDNwaYgL96dZwC54Q30imSbAOFf3NKtepc+cXyuZf5Q1HCgbqgZ2UFFuHVcEw=="
|
||||
},
|
||||
|
|
@ -1017,9 +862,6 @@
|
|||
"@scure/base@1.1.6": {
|
||||
"integrity": "sha512-ok9AWwhcgYuGG3Zfhyqg+zwl+Wn5uE+dwC0NV/2qQkx4dABbb/bx96vWu8NSj+BNjjSjno+JRYRjle1jV08k3g=="
|
||||
},
|
||||
"@scure/base@1.2.4": {
|
||||
"integrity": "sha512-5Yy9czTO47mqz+/J8GM6GIId4umdCk1wc1q8rKERQulIoc8VP9pzDcghv10Tl2E7R96ZUx/PhND3ESYUQX8NuQ=="
|
||||
},
|
||||
"@scure/bip32@1.3.1": {
|
||||
"integrity": "sha512-osvveYtyzdEVbt3OfwwXFr4P2iVBL5u1Q3q4ONBfDY/UpOuXmOlbgwc1xECEboY8wIays8Yt6onaWMUdUbfl0A==",
|
||||
"dependencies": [
|
||||
|
|
@ -1036,14 +878,6 @@
|
|||
"@scure/base@1.1.6"
|
||||
]
|
||||
},
|
||||
"@scure/bip32@1.6.2": {
|
||||
"integrity": "sha512-t96EPDMbtGgtb7onKKqxRLfE5g05k7uHnHRM2xdE6BP/ZmxaLtPek4J4KfVn/90IQNrU1IOAqMgiDtUdtbe3nw==",
|
||||
"dependencies": [
|
||||
"@noble/curves@1.8.1",
|
||||
"@noble/hashes@1.7.1",
|
||||
"@scure/base@1.2.4"
|
||||
]
|
||||
},
|
||||
"@scure/bip39@1.2.1": {
|
||||
"integrity": "sha512-Z3/Fsz1yr904dduJD0NpiyRHhRYHdcnyh73FZWiV+/qhWi83wNJ3NWolYqCEN+ZWsUz2TWwajJggcRE9r1zUYg==",
|
||||
"dependencies": [
|
||||
|
|
@ -1058,24 +892,17 @@
|
|||
"@scure/base@1.1.6"
|
||||
]
|
||||
},
|
||||
"@scure/bip39@1.5.4": {
|
||||
"integrity": "sha512-TFM4ni0vKvCfBpohoh+/lY05i9gRbSwXWngAsF4CABQxoaOHijxuaZ2R6cStDQ5CHtHO9aGJTr4ksVJASRRyMA==",
|
||||
"dependencies": [
|
||||
"@noble/hashes@1.7.1",
|
||||
"@scure/base@1.2.4"
|
||||
]
|
||||
},
|
||||
"@types/dompurify@3.0.5": {
|
||||
"integrity": "sha512-1Wg0g3BtQF7sSb27fJQAKck1HECM6zV1EB66j8JH9i3LCjYabJa0FSdiSgsD5K/RbrsR0SiraKacLB+T8ZVYAg==",
|
||||
"dependencies": [
|
||||
"@types/trusted-types"
|
||||
]
|
||||
},
|
||||
"@types/node@22.5.4": {
|
||||
"integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==",
|
||||
"dependencies": [
|
||||
"undici-types"
|
||||
]
|
||||
"@types/node@17.0.45": {
|
||||
"integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw=="
|
||||
},
|
||||
"@types/node@18.16.19": {
|
||||
"integrity": "sha512-IXl7o+R9iti9eBW4Wg2hx1xQDig183jj7YLn8F7udNceyfkbn1ZxmzZXuak20gR40D7pIkIY1kYGx5VIGbaHKA=="
|
||||
},
|
||||
"@types/trusted-types@2.0.7": {
|
||||
"integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw=="
|
||||
|
|
@ -1101,9 +928,6 @@
|
|||
"asynckit@0.4.0": {
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"base64-js@1.5.1": {
|
||||
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="
|
||||
},
|
||||
"bintrees@1.0.2": {
|
||||
"integrity": "sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw=="
|
||||
},
|
||||
|
|
@ -1116,13 +940,6 @@
|
|||
"fill-range"
|
||||
]
|
||||
},
|
||||
"buffer@6.0.3": {
|
||||
"integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==",
|
||||
"dependencies": [
|
||||
"base64-js",
|
||||
"ieee754"
|
||||
]
|
||||
},
|
||||
"chalk@5.3.0": {
|
||||
"integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="
|
||||
},
|
||||
|
|
@ -1361,9 +1178,6 @@
|
|||
"safer-buffer"
|
||||
]
|
||||
},
|
||||
"ieee754@1.2.1": {
|
||||
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="
|
||||
},
|
||||
"image-size@1.1.1": {
|
||||
"integrity": "sha512-541xKlUw6jr/6gGuk92F+mYM5zaFAc5ahphvkqvNe2bQ6gVBkd6bfrmVJ2t4KDAfikAYZyIqTnktX3i6/aQDrQ==",
|
||||
"dependencies": [
|
||||
|
|
@ -1568,18 +1382,6 @@
|
|||
"nostr-wasm"
|
||||
]
|
||||
},
|
||||
"nostr-tools@2.14.2": {
|
||||
"integrity": "sha512-YOIOn5EdJ2Kq5sQW5Zh4wOcqzR6kUyrCDHG4+mVD2szzthsyOTpiWX0yrwaRZGlHJG6q83vkhg95qc2W201XTQ==",
|
||||
"dependencies": [
|
||||
"@noble/ciphers",
|
||||
"@noble/curves@1.2.0",
|
||||
"@noble/hashes@1.3.1",
|
||||
"@scure/base@1.1.1",
|
||||
"@scure/bip32@1.3.1",
|
||||
"@scure/bip39@1.2.1",
|
||||
"nostr-wasm"
|
||||
]
|
||||
},
|
||||
"nostr-tools@2.5.1": {
|
||||
"integrity": "sha512-bpkhGGAhdiCN0irfV+xoH3YP5CQeOXyXzUq7SYeM6D56xwTXZCPEmBlUGqFVfQidvRsoVeVxeAiOXW2c2HxoRQ==",
|
||||
"dependencies": [
|
||||
|
|
@ -1649,6 +1451,14 @@
|
|||
"pidtree@0.6.0": {
|
||||
"integrity": "sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g=="
|
||||
},
|
||||
"png-to-ico@2.1.8": {
|
||||
"integrity": "sha512-Nf+IIn/cZ/DIZVdGveJp86NG5uNib1ZXMiDd/8x32HCTeKSvgpyg6D/6tUBn1QO/zybzoMK0/mc3QRgAyXdv9w==",
|
||||
"dependencies": [
|
||||
"@types/node@17.0.45",
|
||||
"minimist",
|
||||
"pngjs"
|
||||
]
|
||||
},
|
||||
"pngjs@6.0.0": {
|
||||
"integrity": "sha512-TRzzuFRRmEoSW/p1KVAmiOgPco2Irlah+bGFCeNfJXxxYGwSw7YwAOAcd7X28K/m5bjBWKsC29KyoMfHbypayg=="
|
||||
},
|
||||
|
|
@ -1850,9 +1660,6 @@
|
|||
"type-fest@4.18.2": {
|
||||
"integrity": "sha512-+suCYpfJLAe4OXS6+PPXjW3urOS4IoP9waSiLuXfLgqZODKw/aWwASvzqE886wA0kQgGy0mIWyhd87VpqIy6Xg=="
|
||||
},
|
||||
"undici-types@6.19.8": {
|
||||
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="
|
||||
},
|
||||
"unfurl.js@6.4.0": {
|
||||
"integrity": "sha512-DogJFWPkOWMcu2xPdpmbcsL+diOOJInD3/jXOv6saX1upnWmMK8ndAtDWUfJkuInqNI9yzADud4ID9T+9UeWCw==",
|
||||
"dependencies": [
|
||||
|
|
@ -1878,9 +1685,6 @@
|
|||
"websocket-ts@2.1.5": {
|
||||
"integrity": "sha512-rCNl9w6Hsir1azFm/pbjBEFzLD/gi7Th5ZgOxMifB6STUfTSovYAzryWw0TRvSZ1+Qu1Z5Plw4z42UfTNA9idA=="
|
||||
},
|
||||
"websocket-ts@2.2.1": {
|
||||
"integrity": "sha512-YKPDfxlK5qOheLZ2bTIiktZO1bpfGdNCPJmTEaPW7G9UXI1GKjDdeacOrsULUS000OPNxDVOyAuKLuIWPqWM0Q=="
|
||||
},
|
||||
"whatwg-encoding@3.1.1": {
|
||||
"integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
|
||||
"dependencies": [
|
||||
|
|
@ -2547,20 +2351,19 @@
|
|||
"dependencies": [
|
||||
"jsr:@b-fuze/deno-dom@~0.1.47",
|
||||
"jsr:@bradenmacdonald/s3-lite-client@~0.7.4",
|
||||
"jsr:@core/asyncutil@^1.2.0",
|
||||
"jsr:@esroyo/scoped-performance@^3.1.0",
|
||||
"jsr:@gfx/canvas-wasm@~0.4.2",
|
||||
"jsr:@hono/hono@^4.4.6",
|
||||
"jsr:@lambdalisue/async@^2.1.1",
|
||||
"jsr:@negrel/webpush@0.3",
|
||||
"jsr:@nostrify/db@~0.39.4",
|
||||
"jsr:@nostrify/nostrify@~0.39.1",
|
||||
"jsr:@nostrify/db@~0.37.3",
|
||||
"jsr:@nostrify/nostrify@0.38",
|
||||
"jsr:@nostrify/policies@~0.36.1",
|
||||
"jsr:@nostrify/types@0.36",
|
||||
"jsr:@soapbox/kysely-pglite@1",
|
||||
"jsr:@soapbox/logi@0.3",
|
||||
"jsr:@soapbox/safe-fetch@2",
|
||||
"jsr:@std/assert@~0.225.1",
|
||||
"jsr:@std/async@^1.0.10",
|
||||
"jsr:@std/cli@0.223",
|
||||
"jsr:@std/crypto@0.224",
|
||||
"jsr:@std/encoding@0.224",
|
||||
|
|
@ -2568,8 +2371,6 @@
|
|||
"jsr:@std/json@0.223",
|
||||
"jsr:@std/media-types@~0.224.1",
|
||||
"jsr:@std/streams@0.223",
|
||||
"jsr:@std/testing@^1.0.9",
|
||||
"npm:@cashu/cashu-ts@^2.2.0",
|
||||
"npm:@electric-sql/pglite@~0.2.8",
|
||||
"npm:@isaacs/ttlcache@^1.4.1",
|
||||
"npm:@noble/secp256k1@2",
|
||||
|
|
@ -2595,6 +2396,7 @@
|
|||
"npm:nostr-tools@2.5.1",
|
||||
"npm:nostr-wasm@0.1",
|
||||
"npm:path-to-regexp@^7.1.0",
|
||||
"npm:png-to-ico@^2.1.8",
|
||||
"npm:prom-client@^15.1.2",
|
||||
"npm:sharp@~0.33.5",
|
||||
"npm:tldts@^6.0.14",
|
||||
|
|
|
|||
23
docs/auth.md
Normal file
23
docs/auth.md
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# Authentication in Ditto
|
||||
|
||||
One of the main benefits of Nostr is that users control their keys. Instead of a username and password, the user has a public key (`npub` or `pubkey`) and private key (`nsec`). The public key is a globally-unique identifier for the user, and the private key can be used to sign events, producing a signature that only the pubkey could have produced.
|
||||
|
||||
With keys, users have full control over their identity. They can move between servers freely, and post to multiple servers at once. But with such power comes great responsibilities. Users cannot lose control of their key, or they'll lose control over their account forever.
|
||||
|
||||
## Managing Keys
|
||||
|
||||
There are several ways to manage keys in Nostr, and they all come with trade-offs. It's new territory, and people are still coming up with new ideas.
|
||||
|
||||
The main concerns are how to **conveniently log in on multiple devices**, and **who/what to trust with your key.**
|
||||
|
||||
### Current Solutions
|
||||
|
||||
1. **Private key text.** Users copy their key between devices/apps, giving apps full control over their key. Users might email the key to themselves, or better yet use a password manager, or apps might even provide a QR code for other apps to scan. This method is convenient, but it's not secure. Keys can get compromised in transit, or by a malicious or vulnerable app.
|
||||
|
||||
2. **Browser extension.** For web clients, an extension can expose `getPublicKey` and `signEvent` functions to web-pages without exposing the private key directly. This option is secure, but it only works well for laptop/desktop devices. On mobile, only FireFox can do it, with no support from Safari or Chrome. It also offers no way to share a key across devices on its own.
|
||||
|
||||
3. **Remote signer**. Users can run a remote signer program and then connect apps to it. The signer should be running 24/7, so it's best suited for running on a server. This idea has evolved into the creation of "bunker" services. Bunkers allow users to have a traditional username and password and login from anywhere. This method solves a lot of problems, but it also creates some problems. Users have to create an account on a separate website before they can log into your website. This makes it an option for more advanced users. Also, it's concerning that the administrator of the bunker server has full control over your keys. None of this is a problem if you run your own remote signer, but it's not a mainstream option.
|
||||
|
||||
4. **Custodial**. Apps which make you log you in with a username/password, and then keep Nostr keys for each user in their database. You might not even be able to export your keys. This option may be easier for users at first, but it puts a whole lot of liability on the server, since leaks can cause permanent damage. It also gives up a lot of the benefits of Nostr.
|
||||
|
||||
Each of these ideas could be improved upon greatly with new experiments and technical progress. But to Ditto, user freedom matters the most, so we're focusing on non-custodial solution. Even though there are security risks to copying around keys, the onus is on the user. The user may fall victim to a targeted attack (or make a stupid mistake), whereas custodial servers have the ability to wipe out entire demographics of users at once. Therefore we believe that custodial solutions are actually _less_ secure than users copying around keys. Users must take precautions about which apps to trust with their private key until we improve upon the area to make it more secure (likely with better support of browser extensions, OS key management, and more).
|
||||
27
docs/debugging.md
Normal file
27
docs/debugging.md
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
# Debugging Ditto
|
||||
|
||||
Running the command `deno task debug` will start the Ditto server in debug mode, making it possible to inspect with Chromium-based browsers by visiting `chrome://inspect`.
|
||||
|
||||
From there, go to the "Performance" tab and click "Start profiling". Perform the actions you want to profile, then click "Stop profiling". You can then inspect the call stack and see where the time is being spent.
|
||||
|
||||
## Remote debugging
|
||||
|
||||
If the Ditto server is on a separate machine, you will first need to put it into debug mode. Edit its systemd file (usually located at `/etc/systemd/system/ditto.service`) and change `deno task start` to `deno task debug` in the `ExecStart` line. Then run `systemctl daemon-reload` and `systemctl restart ditto`.
|
||||
|
||||
To access the debugger remotely, you can use SSH port forwarding. Run this command on your local machine, replacing `<user>@<host>` with the SSH login for the remote machine:
|
||||
|
||||
```sh
|
||||
ssh -L 9229:localhost:9229 <user>@<host>
|
||||
```
|
||||
|
||||
Then, in Chromium, go to `chrome://inspect` and the Ditto server should be available.
|
||||
|
||||
## SQL performance
|
||||
|
||||
To track slow queries, first set `DEBUG=ditto:sql` in the environment so only SQL logs are shown.
|
||||
|
||||
Then, grep for any logs above 0.001s:
|
||||
|
||||
```sh
|
||||
journalctl -fu ditto | grep -v '(0.00s)'
|
||||
```
|
||||
15
docs/installation.md
Normal file
15
docs/installation.md
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# Installing Ditto
|
||||
|
||||
First, install Deno:
|
||||
|
||||
```sh
|
||||
curl -fsSL https://deno.land/x/install/install.sh | sudo DENO_INSTALL=/usr/local sh
|
||||
```
|
||||
|
||||
Now, run Ditto:
|
||||
|
||||
```sh
|
||||
deno run -A https://gitlab.com/soapbox-pub/ditto/-/raw/main/src/server.ts
|
||||
```
|
||||
|
||||
That's it! Ditto is now running on your machine.
|
||||
9
docs/mastodon-api.md
Normal file
9
docs/mastodon-api.md
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# Mastodon API
|
||||
|
||||
Ditto implements Mastodon's client-server API, a REST API used by Mastodon mobile apps and frontends to interact with Mastodon servers. While it was originally designed for Mastodon, it has been adopted by other ActivityPub servers such as Pleroma, Mitra, Friendica, and many others.
|
||||
|
||||
Note that Mastodon API is **not** ActivityPub. It is not the API used to federate between servers. Instead, it enables user interfaces, mobile apps, bots, and other clients to interact with Mastodon servers.
|
||||
|
||||
Mastodon is built in Ruby on Rails, and its API is inspired by Twitter's legacy REST API. Rails, being an MVC framework, has "models", which it maps directly to "Entities" in its API.
|
||||
|
||||
Endpoints return either a single Entity, or an array of Entities. Entities Entities are JSON objects with a specific structure, and are documented in the [Mastodon API documentation](https://docs.joinmastodon.org/api/).
|
||||
226
log.json
Normal file
226
log.json
Normal file
File diff suppressed because one or more lines are too long
|
|
@ -1,9 +0,0 @@
|
|||
import { assert } from '@std/assert';
|
||||
|
||||
import { getCaptchaImages } from './assets.ts';
|
||||
|
||||
Deno.test('getCaptchaImages', async () => {
|
||||
// If this function runs at all, it most likely worked.
|
||||
const { bgImages } = await getCaptchaImages();
|
||||
assert(bgImages.length);
|
||||
});
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
import { type Image, loadImage } from '@gfx/canvas-wasm';
|
||||
|
||||
export interface CaptchaImages {
|
||||
bgImages: Image[];
|
||||
puzzleMask: Image;
|
||||
puzzleHole: Image;
|
||||
}
|
||||
|
||||
export async function getCaptchaImages(): Promise<CaptchaImages> {
|
||||
const bgImages = await getBackgroundImages();
|
||||
|
||||
const puzzleMask = await loadImage(
|
||||
await Deno.readFile(new URL('./assets/puzzle/puzzle-mask.png', import.meta.url)),
|
||||
);
|
||||
const puzzleHole = await loadImage(
|
||||
await Deno.readFile(new URL('./assets/puzzle/puzzle-hole.png', import.meta.url)),
|
||||
);
|
||||
|
||||
return { bgImages, puzzleMask, puzzleHole };
|
||||
}
|
||||
|
||||
async function getBackgroundImages(): Promise<Image[]> {
|
||||
const path = new URL('./assets/bg/', import.meta.url);
|
||||
|
||||
const images: Image[] = [];
|
||||
|
||||
for await (const dirEntry of Deno.readDir(path)) {
|
||||
if (dirEntry.isFile && dirEntry.name.endsWith('.jpg')) {
|
||||
const file = await Deno.readFile(new URL(dirEntry.name, path));
|
||||
const image = await loadImage(file);
|
||||
images.push(image);
|
||||
}
|
||||
}
|
||||
|
||||
return images;
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import { createCanvas } from '@gfx/canvas-wasm';
|
||||
import { assertNotEquals } from '@std/assert';
|
||||
import { encodeHex } from '@std/encoding/hex';
|
||||
|
||||
import { addNoise } from './canvas.ts';
|
||||
|
||||
// This is almost impossible to truly test,
|
||||
// but we can at least check that the image on the canvas changes.
|
||||
Deno.test('addNoise', async () => {
|
||||
const canvas = createCanvas(100, 100);
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
const dataBefore = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||
const hashBefore = await crypto.subtle.digest('SHA-256', dataBefore.data);
|
||||
|
||||
addNoise(ctx, canvas.width, canvas.height);
|
||||
|
||||
const dataAfter = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||
const hashAfter = await crypto.subtle.digest('SHA-256', dataAfter.data);
|
||||
|
||||
assertNotEquals(encodeHex(hashBefore), encodeHex(hashAfter));
|
||||
});
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import type { CanvasRenderingContext2D } from '@gfx/canvas-wasm';
|
||||
|
||||
/**
|
||||
* Add a small amount of noise to the image.
|
||||
* This protects against an attacker pregenerating every possible solution and then doing a reverse-lookup.
|
||||
*/
|
||||
export function addNoise(ctx: CanvasRenderingContext2D, width: number, height: number): void {
|
||||
const imageData = ctx.getImageData(0, 0, width, height);
|
||||
|
||||
// Loop over every pixel.
|
||||
for (let i = 0; i < imageData.data.length; i += 4) {
|
||||
// Add/subtract a small amount from each color channel.
|
||||
// We skip i+3 because that's the alpha channel, which we don't want to modify.
|
||||
for (let j = 0; j < 3; j++) {
|
||||
const alteration = Math.floor(Math.random() * 11) - 5; // Vary between -5 and +5
|
||||
imageData.data[i + j] = Math.min(Math.max(imageData.data[i + j] + alteration, 0), 255);
|
||||
}
|
||||
}
|
||||
|
||||
ctx.putImageData(imageData, 0, 0);
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
import { getCaptchaImages } from './assets.ts';
|
||||
import { generateCaptcha, verifyCaptchaSolution } from './captcha.ts';
|
||||
|
||||
Deno.test('generateCaptcha', async () => {
|
||||
const images = await getCaptchaImages();
|
||||
generateCaptcha(images, { w: 370, h: 400 }, { w: 65, h: 65 });
|
||||
});
|
||||
|
||||
Deno.test('verifyCaptchaSolution', () => {
|
||||
verifyCaptchaSolution({ w: 65, h: 65 }, { x: 0, y: 0 }, { x: 0, y: 0 });
|
||||
verifyCaptchaSolution({ w: 65, h: 65 }, { x: 0, y: 0 }, { x: 10, y: 10 });
|
||||
});
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
import { createCanvas, type EmulatedCanvas2D } from '@gfx/canvas-wasm';
|
||||
|
||||
import { addNoise } from './canvas.ts';
|
||||
import { areIntersecting, type Dimensions, type Point } from './geometry.ts';
|
||||
|
||||
import type { CaptchaImages } from './assets.ts';
|
||||
|
||||
/** Generate a puzzle captcha, returning canvases for the board and piece. */
|
||||
export function generateCaptcha(
|
||||
{ bgImages, puzzleMask, puzzleHole }: CaptchaImages,
|
||||
bgSize: Dimensions,
|
||||
puzzleSize: Dimensions,
|
||||
): {
|
||||
bg: EmulatedCanvas2D;
|
||||
puzzle: EmulatedCanvas2D;
|
||||
solution: Point;
|
||||
} {
|
||||
const bg = createCanvas(bgSize.w, bgSize.h);
|
||||
const puzzle = createCanvas(puzzleSize.w, puzzleSize.h);
|
||||
|
||||
const ctx = bg.getContext('2d');
|
||||
const pctx = puzzle.getContext('2d');
|
||||
|
||||
const solution = generateSolution(bgSize, puzzleSize);
|
||||
const bgImage = bgImages[Math.floor(Math.random() * bgImages.length)];
|
||||
|
||||
// Draw the background image.
|
||||
ctx.drawImage(bgImage, 0, 0, bg.width, bg.height);
|
||||
addNoise(ctx, bg.width, bg.height);
|
||||
|
||||
// Draw the puzzle piece.
|
||||
pctx.drawImage(puzzleMask, 0, 0, puzzle.width, puzzle.height);
|
||||
pctx.globalCompositeOperation = 'source-in';
|
||||
pctx.drawImage(bg, solution.x, solution.y, puzzle.width, puzzle.height, 0, 0, puzzle.width, puzzle.height);
|
||||
|
||||
// Draw the hole.
|
||||
ctx.globalCompositeOperation = 'source-atop';
|
||||
ctx.drawImage(puzzleHole, solution.x, solution.y, puzzle.width, puzzle.height);
|
||||
|
||||
return {
|
||||
bg,
|
||||
puzzle,
|
||||
solution,
|
||||
};
|
||||
}
|
||||
|
||||
export function verifyCaptchaSolution(puzzleSize: Dimensions, point: Point, solution: Point): boolean {
|
||||
return areIntersecting(
|
||||
{ ...point, ...puzzleSize },
|
||||
{ ...solution, ...puzzleSize },
|
||||
);
|
||||
}
|
||||
|
||||
/** Random coordinates such that the piece fits within the canvas. */
|
||||
function generateSolution(bgSize: Dimensions, puzzleSize: Dimensions): Point {
|
||||
return {
|
||||
x: Math.floor(Math.random() * (bgSize.w - puzzleSize.w)),
|
||||
y: Math.floor(Math.random() * (bgSize.h - puzzleSize.h)),
|
||||
};
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"name": "@ditto/captcha",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { areIntersecting } from './geometry.ts';
|
||||
|
||||
Deno.test('areIntersecting', () => {
|
||||
assertEquals(areIntersecting({ x: 0, y: 0, w: 10, h: 10 }, { x: 5, y: 5, w: 10, h: 10 }), true);
|
||||
assertEquals(areIntersecting({ x: 0, y: 0, w: 10, h: 10 }, { x: 15, y: 15, w: 10, h: 10 }), false);
|
||||
});
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
export interface Point {
|
||||
x: number;
|
||||
y: number;
|
||||
}
|
||||
|
||||
export interface Dimensions {
|
||||
w: number;
|
||||
h: number;
|
||||
}
|
||||
|
||||
type Rectangle = Point & Dimensions;
|
||||
|
||||
/** Check if the two rectangles intersect by at least `threshold` percent. */
|
||||
export function areIntersecting(rect1: Rectangle, rect2: Rectangle, threshold = 0.5): boolean {
|
||||
const r1cx = rect1.x + rect1.w / 2;
|
||||
const r2cx = rect2.x + rect2.w / 2;
|
||||
|
||||
const r1cy = rect1.y + rect1.h / 2;
|
||||
const r2cy = rect2.y + rect2.h / 2;
|
||||
|
||||
const dist = Math.sqrt((r2cx - r1cx) ** 2 + (r2cy - r1cy) ** 2);
|
||||
|
||||
const e1 = Math.sqrt(rect1.h ** 2 + rect1.w ** 2) / 2;
|
||||
const e2 = Math.sqrt(rect2.h ** 2 + rect2.w ** 2) / 2;
|
||||
|
||||
return dist <= (e1 + e2) * threshold;
|
||||
}
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
export { getCaptchaImages } from './assets.ts';
|
||||
export { generateCaptcha, verifyCaptchaSolution } from './captcha.ts';
|
||||
|
|
@ -1,457 +0,0 @@
|
|||
import { type NostrFilter, NSecSigner } from '@nostrify/nostrify';
|
||||
import { NPostgres } from '@nostrify/db';
|
||||
import { genEvent } from '@nostrify/nostrify/test';
|
||||
|
||||
import { generateSecretKey, getPublicKey } from 'nostr-tools';
|
||||
import { bytesToString, stringToBytes } from '@scure/base';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { DittoPolyPg, TestDB } from '@ditto/db';
|
||||
import { DittoConf } from '@ditto/conf';
|
||||
|
||||
import { getLastRedeemedNutzap, getMintsToProofs, getWallet, organizeProofs, validateAndParseWallet } from './cashu.ts';
|
||||
|
||||
Deno.test('validateAndParseWallet function returns valid data', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const store = new NPostgres(orig.kysely);
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const signer = new NSecSigner(sk);
|
||||
const pubkey = await signer.getPublicKey();
|
||||
const privkey = bytesToString('hex', sk);
|
||||
const p2pk = getPublicKey(stringToBytes('hex', privkey));
|
||||
|
||||
// Wallet
|
||||
const wallet = genEvent({
|
||||
kind: 17375,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify([
|
||||
['privkey', privkey],
|
||||
['mint', 'https://mint.soul.com'],
|
||||
]),
|
||||
),
|
||||
}, sk);
|
||||
await store.event(wallet);
|
||||
|
||||
// Nutzap information
|
||||
const nutzapInfo = genEvent({
|
||||
kind: 10019,
|
||||
tags: [
|
||||
['pubkey', p2pk],
|
||||
['mint', 'https://mint.soul.com'],
|
||||
['relay', conf.relay],
|
||||
],
|
||||
}, sk);
|
||||
await store.event(nutzapInfo);
|
||||
|
||||
const { data, error } = await validateAndParseWallet(store, signer, pubkey);
|
||||
|
||||
assertEquals(error, null);
|
||||
assertEquals(data, {
|
||||
wallet,
|
||||
nutzapInfo,
|
||||
privkey,
|
||||
p2pk,
|
||||
mints: ['https://mint.soul.com'],
|
||||
relays: [conf.relay],
|
||||
});
|
||||
});
|
||||
|
||||
Deno.test('organizeProofs function is working', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const store = new NPostgres(orig.kysely);
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const signer = new NSecSigner(sk);
|
||||
const pubkey = await signer.getPublicKey();
|
||||
|
||||
const event1 = genEvent({
|
||||
kind: 7375,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify({
|
||||
mint: 'https://mint.soul.com',
|
||||
proofs: [
|
||||
{
|
||||
id: '005c2502034d4f12',
|
||||
amount: 25,
|
||||
secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=',
|
||||
C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46',
|
||||
},
|
||||
{
|
||||
id: '005c2502034d4f12',
|
||||
amount: 25,
|
||||
secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=',
|
||||
C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46',
|
||||
},
|
||||
{
|
||||
id: '005c2502034d4f12',
|
||||
amount: 25,
|
||||
secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=',
|
||||
C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46',
|
||||
},
|
||||
{
|
||||
id: '005c2502034d4f12',
|
||||
amount: 25,
|
||||
secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=',
|
||||
C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46',
|
||||
},
|
||||
],
|
||||
del: [],
|
||||
}),
|
||||
),
|
||||
}, sk);
|
||||
await store.event(event1);
|
||||
|
||||
const proof1 = {
|
||||
'id': '004f7adf2a04356c',
|
||||
'amount': 1,
|
||||
'secret': '6780378b186cf7ada639ce4807803ad5e4a71217688430512f35074f9bca99c0',
|
||||
'C': '03f0dd8df04427c8c53e4ae9ce8eb91c4880203d6236d1d745c788a5d7a47aaff3',
|
||||
'dleq': {
|
||||
'e': 'bd22fcdb7ede1edb52b9b8c6e1194939112928e7b4fc0176325e7671fb2bd351',
|
||||
's': 'a9ad015571a0e538d62966a16d2facf806fb956c746a3dfa41fa689486431c67',
|
||||
'r': 'b283980e30bf5a31a45e5e296e93ae9f20bf3a140c884b3b4cd952dbecc521df',
|
||||
},
|
||||
};
|
||||
const token1 = JSON.stringify({
|
||||
mint: 'https://mint-fashion.com',
|
||||
proofs: [proof1],
|
||||
del: [],
|
||||
});
|
||||
|
||||
const event2 = genEvent({
|
||||
kind: 7375,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
token1,
|
||||
),
|
||||
}, sk);
|
||||
await store.event(event2);
|
||||
|
||||
const proof2 = {
|
||||
'id': '004f7adf2a04356c',
|
||||
'amount': 123,
|
||||
'secret': '6780378b186cf7ada639ce4807803ad5e4a71217688430512f35074f9bca99c0',
|
||||
'C': '03f0dd8df04427c8c53e4ae9ce8eb91c4880203d6236d1d745c788a5d7a47aaff3',
|
||||
'dleq': {
|
||||
'e': 'bd22fcdb7ede1edb52b9b8c6e1194939112928e7b4fc0176325e7671fb2bd351',
|
||||
's': 'a9ad015571a0e538d62966a16d2facf806fb956c746a3dfa41fa689486431c67',
|
||||
'r': 'b283980e30bf5a31a45e5e296e93ae9f20bf3a140c884b3b4cd952dbecc521df',
|
||||
},
|
||||
};
|
||||
|
||||
const token2 = JSON.stringify({
|
||||
mint: 'https://mint-fashion.com',
|
||||
proofs: [proof2],
|
||||
del: [],
|
||||
});
|
||||
|
||||
const event3 = genEvent({
|
||||
kind: 7375,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
token2,
|
||||
),
|
||||
}, sk);
|
||||
await store.event(event3);
|
||||
|
||||
const unspentProofs = await store.query([{ kinds: [7375], authors: [pubkey] }]);
|
||||
|
||||
const organizedProofs = await organizeProofs(unspentProofs, signer);
|
||||
|
||||
assertEquals(organizedProofs, {
|
||||
'https://mint.soul.com': {
|
||||
totalBalance: 100,
|
||||
[event1.id]: { event: event1, balance: 100 },
|
||||
},
|
||||
'https://mint-fashion.com': {
|
||||
totalBalance: 124,
|
||||
[event2.id]: { event: event2, balance: 1 },
|
||||
[event3.id]: { event: event3, balance: 123 },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
Deno.test('getLastRedeemedNutzap function is working', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const store = new NPostgres(orig.kysely);
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const signer = new NSecSigner(sk);
|
||||
const pubkey = await signer.getPublicKey();
|
||||
|
||||
const event1 = genEvent({
|
||||
kind: 7376,
|
||||
content: '<nip-44-encrypted>',
|
||||
created_at: Math.floor(Date.now() / 1000), // now
|
||||
tags: [
|
||||
['e', '<event-id-of-created-token>', '', 'redeemed'],
|
||||
],
|
||||
}, sk);
|
||||
await store.event(event1);
|
||||
|
||||
const event2 = genEvent({
|
||||
kind: 7376,
|
||||
content: '<nip-44-encrypted>',
|
||||
created_at: Math.floor((Date.now() - 86400000) / 1000), // yesterday
|
||||
tags: [
|
||||
['e', '<event-id-of-created-token>', '', 'redeemed'],
|
||||
],
|
||||
}, sk);
|
||||
await store.event(event2);
|
||||
|
||||
const event3 = genEvent({
|
||||
kind: 7376,
|
||||
content: '<nip-44-encrypted>',
|
||||
created_at: Math.floor((Date.now() - 86400000) / 1000), // yesterday
|
||||
tags: [
|
||||
['e', '<event-id-of-created-token>', '', 'redeemed'],
|
||||
],
|
||||
}, sk);
|
||||
await store.event(event3);
|
||||
|
||||
const event4 = genEvent({
|
||||
kind: 7376,
|
||||
content: '<nip-44-encrypted>',
|
||||
created_at: Math.floor((Date.now() + 86400000) / 1000), // tomorrow
|
||||
tags: [
|
||||
['e', '<event-id-of-created-token>', '', 'redeemed'],
|
||||
],
|
||||
}, sk);
|
||||
await store.event(event4);
|
||||
|
||||
const event = await getLastRedeemedNutzap(store, pubkey);
|
||||
|
||||
assertEquals(event, event4);
|
||||
});
|
||||
|
||||
Deno.test('getMintsToProofs function is working', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const store = new NPostgres(orig.kysely);
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const signer = new NSecSigner(sk);
|
||||
const pubkey = await signer.getPublicKey();
|
||||
|
||||
const redeemedNutzap = genEvent({
|
||||
created_at: Math.floor(Date.now() / 1000), // now
|
||||
kind: 9321,
|
||||
content: 'Thanks buddy! Nice idea.',
|
||||
tags: [
|
||||
[
|
||||
'proof',
|
||||
JSON.stringify({
|
||||
id: '005c2502034d4f12',
|
||||
amount: 25,
|
||||
secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=',
|
||||
C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46',
|
||||
}),
|
||||
],
|
||||
['u', 'https://mint.soul.com'],
|
||||
['e', 'nutzapped-post'],
|
||||
['p', '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4'],
|
||||
],
|
||||
}, sk);
|
||||
|
||||
await store.event(redeemedNutzap);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
|
||||
const history = genEvent({
|
||||
created_at: Math.floor(Date.now() / 1000), // now
|
||||
kind: 7376,
|
||||
content: 'nip-44-encrypted',
|
||||
tags: [
|
||||
['e', redeemedNutzap.id, conf.relay, 'redeemed'],
|
||||
['p', redeemedNutzap.pubkey],
|
||||
],
|
||||
}, sk);
|
||||
|
||||
await store.event(history);
|
||||
|
||||
const nutzap = genEvent({
|
||||
created_at: Math.floor(Date.now() / 1000), // now
|
||||
kind: 9321,
|
||||
content: 'Thanks buddy! Nice idea.',
|
||||
tags: [
|
||||
[
|
||||
'proof',
|
||||
JSON.stringify({
|
||||
id: '005c2502034d4f12',
|
||||
amount: 50,
|
||||
secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=',
|
||||
C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46',
|
||||
}),
|
||||
],
|
||||
['u', 'https://mint.soul.com'],
|
||||
['e', 'nutzapped-post'],
|
||||
['p', '47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4'],
|
||||
],
|
||||
}, sk);
|
||||
|
||||
await store.event(nutzap);
|
||||
|
||||
const nutzapsFilter: NostrFilter = {
|
||||
kinds: [9321],
|
||||
'#p': ['47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4'],
|
||||
'#u': ['https://mint.soul.com'],
|
||||
};
|
||||
|
||||
const lastRedeemedNutzap = await getLastRedeemedNutzap(store, pubkey);
|
||||
if (lastRedeemedNutzap) {
|
||||
nutzapsFilter.since = lastRedeemedNutzap.created_at;
|
||||
}
|
||||
|
||||
const mintsToProofs = await getMintsToProofs(store, nutzapsFilter, conf.relay);
|
||||
|
||||
assertEquals(mintsToProofs, {
|
||||
'https://mint.soul.com': {
|
||||
proofs: [{
|
||||
id: '005c2502034d4f12',
|
||||
amount: 50,
|
||||
secret: 'z+zyxAVLRqN9lEjxuNPSyRJzEstbl69Jc1vtimvtkPg=',
|
||||
C: '0241d98a8197ef238a192d47edf191a9de78b657308937b4f7dd0aa53beae72c46',
|
||||
}],
|
||||
toBeRedeemed: [
|
||||
['e', nutzap.id, conf.relay, 'redeemed'],
|
||||
['p', nutzap.pubkey],
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
Deno.test('getWallet function is working', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const signer = new NSecSigner(sk);
|
||||
const pubkey = await signer.getPublicKey();
|
||||
|
||||
const privkey = bytesToString('hex', sk);
|
||||
const p2pk = getPublicKey(stringToBytes('hex', privkey));
|
||||
|
||||
const relay = new NPostgres(orig.kysely);
|
||||
|
||||
const proofs = genEvent({
|
||||
kind: 7375,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify({
|
||||
mint: 'https://cuiaba.mint.com',
|
||||
proofs: [
|
||||
{
|
||||
'id': '004f7adf2a04356c',
|
||||
'amount': 2,
|
||||
'secret': '700312ccba84cb15d6a008c1d01b0dbf00025d3f2cb01f030a756553aca52de3',
|
||||
'C': '02f0ff21fdd19a547d66d9ca09df5573ad88d28e4951825130708ba53cbed19561',
|
||||
'dleq': {
|
||||
'e': '9c44a58cb429be619c474b97216009bd96ff1b7dd145b35828a14f180c03a86f',
|
||||
's': 'a11b8f616dfee5157a2c7c36da0ee181fe71b28729bee56b789e472c027ceb3b',
|
||||
'r': 'c51b9ade8cfd3939b78d509c9723f86b43b432680f55a6791e3e252b53d4b465',
|
||||
},
|
||||
},
|
||||
{
|
||||
'id': '004f7adf2a04356c',
|
||||
'amount': 4,
|
||||
'secret': '5936f22d486734c03bd50b89aaa34be8e99f20d199bcebc09da8716890e95fb3',
|
||||
'C': '039b55f92c02243e31b04e964f2ad0bcd2ed3229e334f4c7a81037392b8411d6e7',
|
||||
'dleq': {
|
||||
'e': '7b7be700f2515f1978ca27bc1045d50b9d146bb30d1fe0c0f48827c086412b9e',
|
||||
's': 'cf44b08c7e64fd2bd9199667327b10a29b7c699b10cb7437be518203b25fe3fa',
|
||||
'r': 'ec0cf54ce2d17fae5db1c6e5e5fd5f34d7c7df18798b8d92bcb7cb005ec2f93b',
|
||||
},
|
||||
},
|
||||
{
|
||||
'id': '004f7adf2a04356c',
|
||||
'amount': 16,
|
||||
'secret': '89e2315c058f3a010972dc6d546b1a2e81142614d715c28d169c6afdba5326bd',
|
||||
'C': '02bc1c3756e77563fe6c7769fc9d9bc578ea0b84bf4bf045cf31c7e2d3f3ad0818',
|
||||
'dleq': {
|
||||
'e': '8dfa000c9e2a43d35d2a0b1c7f36a96904aed35457ca308c6e7d10f334f84e72',
|
||||
's': '9270a914b1a53e32682b1277f34c5cfa931a6fab701a5dbee5855b68ddf621ab',
|
||||
'r': 'ae71e572839a3273b0141ea2f626915592b4b3f5f91b37bbeacce0d3396332c9',
|
||||
},
|
||||
},
|
||||
{
|
||||
'id': '004f7adf2a04356c',
|
||||
'amount': 16,
|
||||
'secret': '06f2209f313d92505ae5c72087263f711b7a97b1b29a71886870e672a1b180ac',
|
||||
'C': '02fa2ad933b62449e2765255d39593c48293f10b287cf7036b23570c8f01c27fae',
|
||||
'dleq': {
|
||||
'e': 'e696d61f6259ae97f8fe13a5af55d47f526eea62a7998bf888626fd1ae35e720',
|
||||
's': 'b9f1ef2a8aec0e73c1a4aaff67e28b3ca3bc4628a532113e0733643c697ed7ce',
|
||||
'r': 'b66ed62852811d14e9bf822baebfda92ba47c5c4babc4f2499d9ce81fbbbd3f2',
|
||||
},
|
||||
},
|
||||
],
|
||||
del: [],
|
||||
}),
|
||||
),
|
||||
created_at: Math.floor(Date.now() / 1000), // now
|
||||
}, sk);
|
||||
|
||||
await relay.event(proofs);
|
||||
|
||||
await relay.event(genEvent({
|
||||
kind: 10019,
|
||||
tags: [
|
||||
['pubkey', p2pk],
|
||||
['mint', 'https://mint.soul.com'],
|
||||
['mint', 'https://cuiaba.mint.com'],
|
||||
['relay', conf.relay],
|
||||
],
|
||||
}, sk));
|
||||
|
||||
const wallet = genEvent({
|
||||
kind: 17375,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify([
|
||||
['privkey', privkey],
|
||||
['mint', 'https://mint.soul.com'],
|
||||
]),
|
||||
),
|
||||
}, sk);
|
||||
|
||||
await relay.event(wallet);
|
||||
|
||||
const { wallet: walletEntity } = await getWallet(relay, pubkey, signer);
|
||||
|
||||
assertEquals(walletEntity, {
|
||||
balance: 38,
|
||||
mints: ['https://mint.soul.com', 'https://cuiaba.mint.com'],
|
||||
relays: [conf.relay],
|
||||
pubkey_p2pk: p2pk,
|
||||
});
|
||||
});
|
||||
|
|
@ -1,302 +0,0 @@
|
|||
import type { Proof } from '@cashu/cashu-ts';
|
||||
import { type NostrEvent, type NostrFilter, type NostrSigner, NSchema as n, type NStore } from '@nostrify/nostrify';
|
||||
import { getPublicKey } from 'nostr-tools';
|
||||
import { stringToBytes } from '@scure/base';
|
||||
import { logi } from '@soapbox/logi';
|
||||
import type { SetRequired } from 'type-fest';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { proofSchema, tokenEventSchema, type Wallet } from './schemas.ts';
|
||||
|
||||
type Data = {
|
||||
wallet: NostrEvent;
|
||||
nutzapInfo: NostrEvent;
|
||||
privkey: string;
|
||||
p2pk: string;
|
||||
mints: string[];
|
||||
relays: string[];
|
||||
};
|
||||
|
||||
type CustomError =
|
||||
| { message: 'Wallet not found'; code: 'wallet-not-found' }
|
||||
| { message: 'Could not decrypt wallet content'; code: 'fail-decrypt-wallet' }
|
||||
| { message: 'Could not parse wallet content'; code: 'fail-parse-wallet' }
|
||||
| { message: 'Wallet does not contain privkey or privkey is not a valid nostr id'; code: 'privkey-missing' }
|
||||
| { message: 'Nutzap information event not found'; code: 'nutzap-info-not-found' }
|
||||
| {
|
||||
message:
|
||||
"You do not have a 'pubkey' tag in your nutzap information event or the one you have does not match the one derivated from the wallet.";
|
||||
code: 'pubkey-mismatch';
|
||||
}
|
||||
| { message: 'You do not have any mints in your nutzap information event.'; code: 'mints-missing' };
|
||||
|
||||
/** Ensures that the wallet event and nutzap information event are correct. */
|
||||
async function validateAndParseWallet(
|
||||
store: NStore,
|
||||
signer: SetRequired<NostrSigner, 'nip44'>,
|
||||
pubkey: string,
|
||||
opts?: { signal?: AbortSignal },
|
||||
): Promise<{ data: Data; error: null } | { data: null; error: CustomError }> {
|
||||
const [wallet] = await store.query([{ authors: [pubkey], kinds: [17375] }], { signal: opts?.signal });
|
||||
if (!wallet) {
|
||||
return { error: { message: 'Wallet not found', code: 'wallet-not-found' }, data: null };
|
||||
}
|
||||
|
||||
let decryptedContent: string;
|
||||
try {
|
||||
decryptedContent = await signer.nip44.decrypt(pubkey, wallet.content);
|
||||
} catch (e) {
|
||||
logi({
|
||||
level: 'error',
|
||||
ns: 'ditto.api.cashu.wallet',
|
||||
id: wallet.id,
|
||||
kind: wallet.kind,
|
||||
error: errorJson(e),
|
||||
});
|
||||
return { data: null, error: { message: 'Could not decrypt wallet content', code: 'fail-decrypt-wallet' } };
|
||||
}
|
||||
|
||||
let contentTags: string[][];
|
||||
try {
|
||||
contentTags = n.json().pipe(z.string().array().array()).parse(decryptedContent);
|
||||
} catch {
|
||||
return { data: null, error: { message: 'Could not parse wallet content', code: 'fail-parse-wallet' } };
|
||||
}
|
||||
|
||||
const privkey = contentTags.find(([value]) => value === 'privkey')?.[1];
|
||||
if (!privkey || !isNostrId(privkey)) {
|
||||
return {
|
||||
data: null,
|
||||
error: { message: 'Wallet does not contain privkey or privkey is not a valid nostr id', code: 'privkey-missing' },
|
||||
};
|
||||
}
|
||||
const p2pk = getPublicKey(stringToBytes('hex', privkey));
|
||||
|
||||
const [nutzapInfo] = await store.query([{ authors: [pubkey], kinds: [10019] }], { signal: opts?.signal });
|
||||
if (!nutzapInfo) {
|
||||
return { data: null, error: { message: 'Nutzap information event not found', code: 'nutzap-info-not-found' } };
|
||||
}
|
||||
|
||||
const nutzapInformationPubkey = nutzapInfo.tags.find(([name]) => name === 'pubkey')?.[1];
|
||||
if (!nutzapInformationPubkey || (nutzapInformationPubkey !== p2pk)) {
|
||||
return {
|
||||
data: null,
|
||||
error: {
|
||||
message:
|
||||
"You do not have a 'pubkey' tag in your nutzap information event or the one you have does not match the one derivated from the wallet.",
|
||||
code: 'pubkey-mismatch',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const mints = [...new Set(nutzapInfo.tags.filter(([name]) => name === 'mint').map(([_, value]) => value))];
|
||||
if (mints.length < 1) {
|
||||
return {
|
||||
data: null,
|
||||
error: { message: 'You do not have any mints in your nutzap information event.', code: 'mints-missing' },
|
||||
};
|
||||
}
|
||||
|
||||
const relays = [...new Set(nutzapInfo.tags.filter(([name]) => name === 'relay').map(([_, value]) => value))];
|
||||
|
||||
return { data: { wallet, nutzapInfo, privkey, p2pk, mints, relays }, error: null };
|
||||
}
|
||||
|
||||
type OrganizedProofs = {
|
||||
[mintUrl: string]: {
|
||||
/** Total balance in this mint */
|
||||
totalBalance: number;
|
||||
/** Event id */
|
||||
[eventId: string]: {
|
||||
event: NostrEvent;
|
||||
/** Total balance in this event */
|
||||
balance: number;
|
||||
} | number;
|
||||
};
|
||||
};
|
||||
async function organizeProofs(
|
||||
events: NostrEvent[],
|
||||
signer: SetRequired<NostrSigner, 'nip44'>,
|
||||
): Promise<OrganizedProofs> {
|
||||
const organizedProofs: OrganizedProofs = {};
|
||||
const pubkey = await signer.getPublicKey();
|
||||
|
||||
for (const event of events) {
|
||||
const decryptedContent = await signer.nip44.decrypt(pubkey, event.content);
|
||||
const { data: token, success } = n.json().pipe(tokenEventSchema).safeParse(decryptedContent);
|
||||
if (!success) {
|
||||
continue;
|
||||
}
|
||||
const { mint, proofs } = token;
|
||||
|
||||
const balance = proofs.reduce((prev, current) => prev + current.amount, 0);
|
||||
|
||||
if (!organizedProofs[mint]) {
|
||||
organizedProofs[mint] = { totalBalance: 0 };
|
||||
}
|
||||
|
||||
organizedProofs[mint] = { ...organizedProofs[mint], [event.id]: { event, balance } };
|
||||
organizedProofs[mint].totalBalance += balance;
|
||||
}
|
||||
return organizedProofs;
|
||||
}
|
||||
|
||||
/** Returns a spending history event that contains the last redeemed nutzap. */
|
||||
async function getLastRedeemedNutzap(
|
||||
store: NStore,
|
||||
pubkey: string,
|
||||
opts?: { signal?: AbortSignal },
|
||||
): Promise<NostrEvent | undefined> {
|
||||
const events = await store.query([{ kinds: [7376], authors: [pubkey] }], { signal: opts?.signal });
|
||||
|
||||
for (const event of events) {
|
||||
const nutzap = event.tags.find(([name]) => name === 'e');
|
||||
const redeemed = nutzap?.[3];
|
||||
if (redeemed === 'redeemed') {
|
||||
return event;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* toBeRedeemed are the nutzaps that will be redeemed into a kind 7375 and saved in the kind 7376 tags
|
||||
* The tags format is: [
|
||||
* [ "e", "<9321-event-id>", "<relay-hint>", "redeemed" ], // nutzap event that has been redeemed
|
||||
* [ "p", "<sender-pubkey>" ] // pubkey of the author of the 9321 event (nutzap sender)
|
||||
* ]
|
||||
* https://github.com/nostr-protocol/nips/blob/master/61.md#updating-nutzap-redemption-history
|
||||
*/
|
||||
type MintsToProofs = { [key: string]: { proofs: Proof[]; toBeRedeemed: string[][] } };
|
||||
|
||||
/**
|
||||
* Gets proofs from nutzaps that have not been redeemed yet.
|
||||
* Each proof is associated with a specific mint.
|
||||
* @param store Store used to query for the nutzaps
|
||||
* @param nutzapsFilter Filter used to query for the nutzaps, most useful when
|
||||
* it contains a 'since' field so it saves time and resources
|
||||
* @param relay Relay hint where the new kind 7376 will be saved
|
||||
* @returns MintsToProofs An object where each key is a mint url and the values are an array of proofs
|
||||
* and an array of redeemed tags in this format:
|
||||
* ```
|
||||
* [
|
||||
* ...,
|
||||
* [ "e", "<9321-event-id>", "<relay-hint>", "redeemed" ], // nutzap event that has been redeemed
|
||||
* [ "p", "<sender-pubkey>" ] // pubkey of the author of the 9321 event (nutzap sender)
|
||||
* ]
|
||||
* ```
|
||||
*/
|
||||
async function getMintsToProofs(
|
||||
store: NStore,
|
||||
nutzapsFilter: NostrFilter,
|
||||
relay: string,
|
||||
opts?: { signal?: AbortSignal },
|
||||
): Promise<MintsToProofs> {
|
||||
const mintsToProofs: MintsToProofs = {};
|
||||
|
||||
const nutzaps = await store.query([nutzapsFilter], { signal: opts?.signal });
|
||||
|
||||
for (const event of nutzaps) {
|
||||
try {
|
||||
const mint = event.tags.find(([name]) => name === 'u')?.[1];
|
||||
if (!mint) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const proofs = event.tags.filter(([name]) => name === 'proof').map((tag) => tag[1]).filter(Boolean);
|
||||
if (proofs.length < 1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!mintsToProofs[mint]) {
|
||||
mintsToProofs[mint] = { proofs: [], toBeRedeemed: [] };
|
||||
}
|
||||
|
||||
const parsed = n.json().pipe(
|
||||
proofSchema,
|
||||
).array().safeParse(proofs);
|
||||
|
||||
if (!parsed.success) {
|
||||
continue;
|
||||
}
|
||||
|
||||
mintsToProofs[mint].proofs = [...mintsToProofs[mint].proofs, ...parsed.data];
|
||||
mintsToProofs[mint].toBeRedeemed = [
|
||||
...mintsToProofs[mint].toBeRedeemed,
|
||||
[
|
||||
'e', // nutzap event that has been redeemed
|
||||
event.id,
|
||||
relay,
|
||||
'redeemed',
|
||||
],
|
||||
['p', event.pubkey], // pubkey of the author of the 9321 event (nutzap sender)
|
||||
];
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.api.cashu.wallet.swap', error: errorJson(e) });
|
||||
}
|
||||
}
|
||||
|
||||
return mintsToProofs;
|
||||
}
|
||||
|
||||
/** Returns a wallet entity with the latest balance. */
|
||||
async function getWallet(
|
||||
store: NStore,
|
||||
pubkey: string,
|
||||
signer: SetRequired<NostrSigner, 'nip44'>,
|
||||
opts?: { signal?: AbortSignal },
|
||||
): Promise<{ wallet: Wallet; error: null } | { wallet: null; error: CustomError }> {
|
||||
const { data, error } = await validateAndParseWallet(store, signer, pubkey, { signal: opts?.signal });
|
||||
|
||||
if (error) {
|
||||
logi({ level: 'error', ns: 'ditto.cashu.get_wallet', error: errorJson(error) });
|
||||
return { wallet: null, error };
|
||||
}
|
||||
|
||||
const { p2pk, mints, relays } = data;
|
||||
|
||||
let balance = 0;
|
||||
|
||||
const tokens = await store.query([{ authors: [pubkey], kinds: [7375] }], { signal: opts?.signal });
|
||||
for (const token of tokens) {
|
||||
try {
|
||||
const decryptedContent: { mint: string; proofs: Proof[] } = JSON.parse(
|
||||
await signer.nip44.decrypt(pubkey, token.content),
|
||||
);
|
||||
|
||||
if (!mints.includes(decryptedContent.mint)) {
|
||||
mints.push(decryptedContent.mint);
|
||||
}
|
||||
|
||||
balance += decryptedContent.proofs.reduce((accumulator, current) => {
|
||||
return accumulator + current.amount;
|
||||
}, 0);
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'dtto.cashu.get_wallet', error: errorJson(e) });
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: maybe change the 'Wallet' type data structure so each mint is a key and the value are the tokens associated with a given mint
|
||||
const walletEntity: Wallet = {
|
||||
pubkey_p2pk: p2pk,
|
||||
mints,
|
||||
relays,
|
||||
balance,
|
||||
};
|
||||
|
||||
return { wallet: walletEntity, error: null };
|
||||
}
|
||||
|
||||
/** Serialize an error into JSON for JSON logging. */
|
||||
export function errorJson(error: unknown): Error | null {
|
||||
if (error instanceof Error) {
|
||||
return error;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function isNostrId(value: unknown): boolean {
|
||||
return n.id().safeParse(value).success;
|
||||
}
|
||||
|
||||
export { getLastRedeemedNutzap, getMintsToProofs, getWallet, organizeProofs, validateAndParseWallet };
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"name": "@ditto/cashu",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
export { getLastRedeemedNutzap, getMintsToProofs, getWallet, organizeProofs, validateAndParseWallet } from './cashu.ts';
|
||||
export { proofSchema, tokenEventSchema, type Wallet, walletSchema } from './schemas.ts';
|
||||
export { renderTransaction, type Transaction } from './views.ts';
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
import { NSchema as n } from '@nostrify/nostrify';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { proofSchema } from './schemas.ts';
|
||||
import { tokenEventSchema } from './schemas.ts';
|
||||
|
||||
Deno.test('Parse proof', () => {
|
||||
const proof =
|
||||
'{"id":"004f7adf2a04356c","amount":1,"secret":"6780378b186cf7ada639ce4807803ad5e4a71217688430512f35074f9bca99c0","C":"03f0dd8df04427c8c53e4ae9ce8eb91c4880203d6236d1d745c788a5d7a47aaff3","dleq":{"e":"bd22fcdb7ede1edb52b9b8c6e1194939112928e7b4fc0176325e7671fb2bd351","s":"a9ad015571a0e538d62966a16d2facf806fb956c746a3dfa41fa689486431c67","r":"b283980e30bf5a31a45e5e296e93ae9f20bf3a140c884b3b4cd952dbecc521df"}}';
|
||||
|
||||
assertEquals(n.json().pipe(proofSchema).safeParse(proof).success, true);
|
||||
assertEquals(n.json().pipe(proofSchema).safeParse(JSON.parse(proof)).success, false);
|
||||
assertEquals(proofSchema.safeParse(JSON.parse(proof)).success, true);
|
||||
assertEquals(proofSchema.safeParse(proof).success, false);
|
||||
});
|
||||
|
||||
Deno.test('Parse token', () => {
|
||||
const proof = {
|
||||
'id': '004f7adf2a04356c',
|
||||
'amount': 1,
|
||||
'secret': '6780378b186cf7ada639ce4807803ad5e4a71217688430512f35074f9bca99c0',
|
||||
'C': '03f0dd8df04427c8c53e4ae9ce8eb91c4880203d6236d1d745c788a5d7a47aaff3',
|
||||
'dleq': {
|
||||
'e': 'bd22fcdb7ede1edb52b9b8c6e1194939112928e7b4fc0176325e7671fb2bd351',
|
||||
's': 'a9ad015571a0e538d62966a16d2facf806fb956c746a3dfa41fa689486431c67',
|
||||
'r': 'b283980e30bf5a31a45e5e296e93ae9f20bf3a140c884b3b4cd952dbecc521df',
|
||||
},
|
||||
};
|
||||
const token = JSON.stringify({
|
||||
mint: 'https://mint-fashion.com',
|
||||
proofs: [proof],
|
||||
del: [],
|
||||
});
|
||||
|
||||
assertEquals(n.json().pipe(tokenEventSchema).safeParse(token).success, true);
|
||||
assertEquals(n.json().pipe(tokenEventSchema).safeParse(JSON.parse(token)).success, false);
|
||||
assertEquals(tokenEventSchema.safeParse(JSON.parse(token)).success, true);
|
||||
assertEquals(tokenEventSchema.safeParse(tokenEventSchema).success, false);
|
||||
});
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
import { NSchema as n } from '@nostrify/nostrify';
|
||||
import { z } from 'zod';
|
||||
|
||||
export const proofSchema: z.ZodType<{
|
||||
id: string;
|
||||
amount: number;
|
||||
secret: string;
|
||||
C: string;
|
||||
dleq?: { s: string; e: string; r?: string };
|
||||
dleqValid?: boolean;
|
||||
}> = z.object({
|
||||
id: z.string(),
|
||||
amount: z.number(),
|
||||
secret: z.string(),
|
||||
C: z.string(),
|
||||
dleq: z.object({ s: z.string(), e: z.string(), r: z.string().optional() })
|
||||
.optional(),
|
||||
dleqValid: z.boolean().optional(),
|
||||
});
|
||||
|
||||
/** Decrypted content of a kind 7375 */
|
||||
export const tokenEventSchema: z.ZodType<{
|
||||
mint: string;
|
||||
proofs: Array<z.infer<typeof proofSchema>>;
|
||||
del?: string[];
|
||||
}> = z.object({
|
||||
mint: z.string().url(),
|
||||
proofs: proofSchema.array(),
|
||||
del: z.string().array().optional(),
|
||||
});
|
||||
|
||||
/** Ditto Cashu wallet */
|
||||
export const walletSchema: z.ZodType<{
|
||||
pubkey_p2pk: string;
|
||||
mints: string[];
|
||||
relays: string[];
|
||||
balance: number;
|
||||
}> = z.object({
|
||||
pubkey_p2pk: n.id(),
|
||||
mints: z.array(z.string().url()).nonempty().transform((val) => {
|
||||
return [...new Set(val)];
|
||||
}),
|
||||
relays: z.array(z.string()).nonempty().transform((val) => {
|
||||
return [...new Set(val)];
|
||||
}),
|
||||
/** Unit in sats */
|
||||
balance: z.number(),
|
||||
});
|
||||
|
||||
export type Wallet = z.infer<typeof walletSchema>;
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
import { NSecSigner } from '@nostrify/nostrify';
|
||||
import { NPostgres } from '@nostrify/db';
|
||||
import { genEvent } from '@nostrify/nostrify/test';
|
||||
|
||||
import { generateSecretKey } from 'nostr-tools';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { DittoPolyPg, TestDB } from '@ditto/db';
|
||||
import { DittoConf } from '@ditto/conf';
|
||||
import { renderTransaction } from './views.ts';
|
||||
|
||||
Deno.test('renderTransaction function is working', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const signer = new NSecSigner(sk);
|
||||
const pubkey = await signer.getPublicKey();
|
||||
|
||||
const relay = new NPostgres(orig.kysely);
|
||||
|
||||
const history1 = genEvent({
|
||||
kind: 7376,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify([
|
||||
['direction', 'in'],
|
||||
['amount', '33'],
|
||||
]),
|
||||
),
|
||||
created_at: Math.floor(Date.now() / 1000), // now
|
||||
}, sk);
|
||||
await relay.event(history1);
|
||||
|
||||
const history2 = genEvent({
|
||||
kind: 7376,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify([
|
||||
['direction', 'out'],
|
||||
['amount', '29'],
|
||||
]),
|
||||
),
|
||||
created_at: Math.floor(Date.now() / 1000) - 1, // now - 1 second
|
||||
}, sk);
|
||||
await relay.event(history2);
|
||||
|
||||
const history3 = genEvent({
|
||||
kind: 7376,
|
||||
content: await signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify([
|
||||
['direction', 'ouch'],
|
||||
['amount', 'yolo'],
|
||||
]),
|
||||
),
|
||||
created_at: Math.floor(Date.now() / 1000) - 2, // now - 2 second
|
||||
}, sk);
|
||||
await relay.event(history3);
|
||||
|
||||
const events = await relay.query([{ kinds: [7376], authors: [pubkey], since: history2.created_at }]);
|
||||
|
||||
const transactions = await Promise.all(
|
||||
events.map((event) => {
|
||||
return renderTransaction(event, pubkey, signer);
|
||||
}),
|
||||
);
|
||||
|
||||
assertEquals(transactions, [
|
||||
{
|
||||
direction: 'in',
|
||||
amount: 33,
|
||||
created_at: history1.created_at,
|
||||
},
|
||||
{
|
||||
direction: 'out',
|
||||
amount: 29,
|
||||
created_at: history2.created_at,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
import { type NostrEvent, type NostrSigner, NSchema as n } from '@nostrify/nostrify';
|
||||
import type { SetRequired } from 'type-fest';
|
||||
import { z } from 'zod';
|
||||
|
||||
type Transaction = {
|
||||
amount: number;
|
||||
created_at: number;
|
||||
direction: 'in' | 'out';
|
||||
};
|
||||
|
||||
/** Renders one history of transaction. */
|
||||
async function renderTransaction(
|
||||
event: NostrEvent,
|
||||
viewerPubkey: string,
|
||||
signer: SetRequired<NostrSigner, 'nip44'>,
|
||||
): Promise<Transaction | undefined> {
|
||||
if (event.kind !== 7376) return;
|
||||
|
||||
const { data: contentTags, success } = n.json().pipe(z.coerce.string().array().min(2).array()).safeParse(
|
||||
await signer.nip44.decrypt(viewerPubkey, event.content),
|
||||
);
|
||||
|
||||
if (!success) {
|
||||
return;
|
||||
}
|
||||
|
||||
const direction = contentTags.find(([name]) => name === 'direction')?.[1];
|
||||
if (direction !== 'out' && direction !== 'in') {
|
||||
return;
|
||||
}
|
||||
|
||||
const amount = parseInt(contentTags.find(([name]) => name === 'amount')?.[1] ?? '', 10);
|
||||
if (isNaN(amount)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
created_at: event.created_at,
|
||||
direction,
|
||||
amount,
|
||||
};
|
||||
}
|
||||
|
||||
export { renderTransaction, type Transaction };
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
import { assertEquals, assertThrows } from '@std/assert';
|
||||
|
||||
import { DittoConf } from './DittoConf.ts';
|
||||
|
||||
Deno.test('DittoConfig', async (t) => {
|
||||
const env = new Map<string, string>([
|
||||
['DITTO_NSEC', 'nsec19shyxpuzd0cq2p5078fwnws7tyykypud6z205fzhlmlrs2vpz6hs83zwkw'],
|
||||
]);
|
||||
|
||||
const config = new DittoConf(env);
|
||||
|
||||
await t.step('signer', async () => {
|
||||
assertEquals(
|
||||
await config.signer.getPublicKey(),
|
||||
'1ba0c5ed1bbbf3b7eb0d7843ba16836a0201ea68a76bafcba507358c45911ff6',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
Deno.test('DittoConfig defaults', async (t) => {
|
||||
const env = new Map<string, string>();
|
||||
const config = new DittoConf(env);
|
||||
|
||||
await t.step('signer throws', () => {
|
||||
assertThrows(() => config.signer);
|
||||
});
|
||||
|
||||
await t.step('port', () => {
|
||||
assertEquals(config.port, 4036);
|
||||
});
|
||||
});
|
||||
|
||||
Deno.test('DittoConfig with insecure media host', () => {
|
||||
const env = new Map<string, string>([
|
||||
['LOCAL_DOMAIN', 'https://ditto.test'],
|
||||
['MEDIA_DOMAIN', 'https://ditto.test'],
|
||||
]);
|
||||
|
||||
assertThrows(
|
||||
() => new DittoConf(env),
|
||||
Error,
|
||||
'For security reasons, MEDIA_DOMAIN cannot be on the same host as LOCAL_DOMAIN',
|
||||
);
|
||||
});
|
||||
|
||||
Deno.test('DittoConfig with insecure media host and precheck disabled', () => {
|
||||
const env = new Map<string, string>([
|
||||
['LOCAL_DOMAIN', 'https://ditto.test'],
|
||||
['MEDIA_DOMAIN', 'https://ditto.test'],
|
||||
['DITTO_PRECHECK', 'false'],
|
||||
]);
|
||||
|
||||
new DittoConf(env);
|
||||
});
|
||||
|
|
@ -1,516 +0,0 @@
|
|||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import { NSecSigner } from '@nostrify/nostrify';
|
||||
import { decodeBase64 } from '@std/encoding/base64';
|
||||
import { encodeBase64Url } from '@std/encoding/base64url';
|
||||
import ISO6391, { type LanguageCode } from 'iso-639-1';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
|
||||
import { getEcdsaPublicKey } from './utils/crypto.ts';
|
||||
import { optionalBooleanSchema, optionalNumberSchema } from './utils/schema.ts';
|
||||
import { mergeURLPath } from './utils/url.ts';
|
||||
|
||||
/** Ditto application-wide configuration. */
|
||||
export class DittoConf {
|
||||
constructor(private env: { get(key: string): string | undefined }) {
|
||||
if (this.precheck) {
|
||||
const mediaUrl = new URL(this.mediaDomain);
|
||||
|
||||
if (this.url.host === mediaUrl.host) {
|
||||
throw new Error(
|
||||
'For security reasons, MEDIA_DOMAIN cannot be on the same host as LOCAL_DOMAIN.\n\nTo disable this check, set DITTO_PRECHECK="false"',
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Cached parsed admin signer. */
|
||||
private _signer: NSecSigner | undefined;
|
||||
|
||||
/** Cached parsed VAPID public key value. */
|
||||
private _vapidPublicKey: Promise<string | undefined> | undefined;
|
||||
|
||||
/**
|
||||
* Ditto admin secret key in hex format.
|
||||
* @deprecated Use `signer` instead. TODO: handle auth tokens.
|
||||
*/
|
||||
get seckey(): Uint8Array {
|
||||
const nsec = this.env.get('DITTO_NSEC');
|
||||
|
||||
if (!nsec) {
|
||||
throw new Error('Missing DITTO_NSEC');
|
||||
}
|
||||
|
||||
if (!nsec.startsWith('nsec1')) {
|
||||
throw new Error('Invalid DITTO_NSEC');
|
||||
}
|
||||
|
||||
return nip19.decode(nsec as `nsec1${string}`).data;
|
||||
}
|
||||
|
||||
/** Ditto admin signer. */
|
||||
get signer(): NSecSigner {
|
||||
if (!this._signer) {
|
||||
this._signer = new NSecSigner(this.seckey);
|
||||
}
|
||||
return this._signer;
|
||||
}
|
||||
|
||||
/** Port to use when serving the HTTP server. */
|
||||
get port(): number {
|
||||
return parseInt(this.env.get('PORT') || '4036');
|
||||
}
|
||||
|
||||
/** IP addresses not affected by rate limiting. */
|
||||
get ipWhitelist(): string[] {
|
||||
return this.env.get('IP_WHITELIST')?.split(',') || [];
|
||||
}
|
||||
|
||||
/** Relay URL to the Ditto server's relay. */
|
||||
get relay(): `wss://${string}` | `ws://${string}` {
|
||||
const { protocol, host } = this.url;
|
||||
return `${protocol === 'https:' ? 'wss:' : 'ws:'}//${host}/relay`;
|
||||
}
|
||||
|
||||
/** Relay to use for NIP-50 `search` queries. */
|
||||
get searchRelay(): string | undefined {
|
||||
return this.env.get('SEARCH_RELAY');
|
||||
}
|
||||
|
||||
/** Origin of the Ditto server, including the protocol and port. */
|
||||
get localDomain(): string {
|
||||
return this.env.get('LOCAL_DOMAIN') || `http://localhost:${this.port}`;
|
||||
}
|
||||
|
||||
/** Link to an external nostr viewer. */
|
||||
get externalDomain(): string {
|
||||
return this.env.get('NOSTR_EXTERNAL') || 'https://njump.me';
|
||||
}
|
||||
|
||||
/** Get a link to a nip19-encoded entity in the configured external viewer. */
|
||||
external(path: string): string {
|
||||
return new URL(path, this.externalDomain).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Heroku-style database URL. This is used in production to connect to the
|
||||
* database.
|
||||
*
|
||||
* Follows the format:
|
||||
*
|
||||
* ```txt
|
||||
* protocol://username:password@host:port/database_name
|
||||
* ```
|
||||
*/
|
||||
get databaseUrl(): string {
|
||||
return this.env.get('DATABASE_URL') ?? 'file://data/pgdata';
|
||||
}
|
||||
|
||||
/** PGlite debug level. 0 disables logging. */
|
||||
get pgliteDebug(): 0 | 1 | 2 | 3 | 4 | 5 {
|
||||
return Number(this.env.get('PGLITE_DEBUG') || 0) as 0 | 1 | 2 | 3 | 4 | 5;
|
||||
}
|
||||
|
||||
get vapidPublicKey(): Promise<string | undefined> {
|
||||
if (!this._vapidPublicKey) {
|
||||
this._vapidPublicKey = (async () => {
|
||||
const keys = await this.vapidKeys;
|
||||
if (keys) {
|
||||
const { publicKey } = keys;
|
||||
const bytes = await crypto.subtle.exportKey('raw', publicKey);
|
||||
return encodeBase64Url(bytes);
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
return this._vapidPublicKey;
|
||||
}
|
||||
|
||||
get vapidKeys(): Promise<CryptoKeyPair | undefined> {
|
||||
return (async () => {
|
||||
const encoded = this.env.get('VAPID_PRIVATE_KEY');
|
||||
|
||||
if (!encoded) {
|
||||
return;
|
||||
}
|
||||
|
||||
const keyData = decodeBase64(encoded);
|
||||
|
||||
const privateKey = await crypto.subtle.importKey(
|
||||
'pkcs8',
|
||||
keyData,
|
||||
{ name: 'ECDSA', namedCurve: 'P-256' },
|
||||
true,
|
||||
['sign'],
|
||||
);
|
||||
const publicKey = await getEcdsaPublicKey(privateKey, true);
|
||||
|
||||
return { privateKey, publicKey };
|
||||
})();
|
||||
}
|
||||
|
||||
get db(): { timeouts: { default: number; relay: number; timelines: number } } {
|
||||
const env = this.env;
|
||||
return {
|
||||
/** Database query timeout configurations. */
|
||||
timeouts: {
|
||||
/** Default query timeout when another setting isn't more specific. */
|
||||
get default(): number {
|
||||
return Number(env.get('DB_TIMEOUT_DEFAULT') || 5_000);
|
||||
},
|
||||
/** Timeout used for queries made through the Nostr relay. */
|
||||
get relay(): number {
|
||||
return Number(env.get('DB_TIMEOUT_RELAY') || 1_000);
|
||||
},
|
||||
/** Timeout used for timelines such as home, notifications, hashtag, etc. */
|
||||
get timelines(): number {
|
||||
return Number(env.get('DB_TIMEOUT_TIMELINES') || 15_000);
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Time-to-live for captchas in milliseconds. */
|
||||
get captchaTTL(): number {
|
||||
return Number(this.env.get('CAPTCHA_TTL') || 5 * 60 * 1000);
|
||||
}
|
||||
|
||||
/** Character limit to enforce for posts made through Mastodon API. */
|
||||
get postCharLimit(): number {
|
||||
return Number(this.env.get('POST_CHAR_LIMIT') || 5000);
|
||||
}
|
||||
|
||||
/** S3 media storage configuration. */
|
||||
get s3(): {
|
||||
endPoint?: string;
|
||||
region?: string;
|
||||
accessKey?: string;
|
||||
secretKey?: string;
|
||||
bucket?: string;
|
||||
pathStyle?: boolean;
|
||||
port?: number;
|
||||
sessionToken?: string;
|
||||
useSSL?: boolean;
|
||||
} {
|
||||
const env = this.env;
|
||||
|
||||
return {
|
||||
get endPoint(): string | undefined {
|
||||
return env.get('S3_ENDPOINT');
|
||||
},
|
||||
get region(): string | undefined {
|
||||
return env.get('S3_REGION');
|
||||
},
|
||||
get accessKey(): string | undefined {
|
||||
return env.get('S3_ACCESS_KEY');
|
||||
},
|
||||
get secretKey(): string | undefined {
|
||||
return env.get('S3_SECRET_KEY');
|
||||
},
|
||||
get bucket(): string | undefined {
|
||||
return env.get('S3_BUCKET');
|
||||
},
|
||||
get pathStyle(): boolean | undefined {
|
||||
return optionalBooleanSchema.parse(env.get('S3_PATH_STYLE'));
|
||||
},
|
||||
get port(): number | undefined {
|
||||
return optionalNumberSchema.parse(env.get('S3_PORT'));
|
||||
},
|
||||
get sessionToken(): string | undefined {
|
||||
return env.get('S3_SESSION_TOKEN');
|
||||
},
|
||||
get useSSL(): boolean | undefined {
|
||||
return optionalBooleanSchema.parse(env.get('S3_USE_SSL'));
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** IPFS uploader configuration. */
|
||||
get ipfs(): { apiUrl: string } {
|
||||
const env = this.env;
|
||||
|
||||
return {
|
||||
/** Base URL for private IPFS API calls. */
|
||||
get apiUrl(): string {
|
||||
return env.get('IPFS_API_URL') || 'http://localhost:5001';
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* The logging configuration for the Ditto server. The config is derived from
|
||||
* the DEBUG environment variable and it is parsed as follows:
|
||||
*
|
||||
* `DEBUG='<jsonl|pretty>:<minimum log level to show>:comma-separated scopes to show'`.
|
||||
* If the scopes are empty (e.g. in 'pretty:warn:', then all scopes are shown.)
|
||||
*/
|
||||
get logConfig(): {
|
||||
fmt: 'jsonl' | 'pretty';
|
||||
level: string;
|
||||
scopes: string[];
|
||||
} {
|
||||
let [fmt, level, scopes] = (this.env.get('LOG_CONFIG') || '').split(':');
|
||||
fmt ||= 'jsonl';
|
||||
level ||= 'debug';
|
||||
scopes ||= '';
|
||||
|
||||
if (fmt !== 'jsonl' && fmt !== 'pretty') fmt = 'jsonl';
|
||||
|
||||
return {
|
||||
fmt: fmt as 'jsonl' | 'pretty',
|
||||
level,
|
||||
scopes: scopes.split(',').filter(Boolean),
|
||||
};
|
||||
}
|
||||
|
||||
/** nostr.build API endpoint when the `nostrbuild` uploader is used. */
|
||||
get nostrbuildEndpoint(): string {
|
||||
return this.env.get('NOSTRBUILD_ENDPOINT') || 'https://nostr.build/api/v2/upload/files';
|
||||
}
|
||||
|
||||
/** Default Blossom servers to use when the `blossom` uploader is set. */
|
||||
get blossomServers(): string[] {
|
||||
return this.env.get('BLOSSOM_SERVERS')?.split(',') || ['https://blossom.primal.net/'];
|
||||
}
|
||||
|
||||
/** Module to upload files with. */
|
||||
get uploader(): string | undefined {
|
||||
return this.env.get('DITTO_UPLOADER');
|
||||
}
|
||||
|
||||
/** Location to use for local uploads. */
|
||||
get uploadsDir(): string {
|
||||
return this.env.get('UPLOADS_DIR') || 'data/uploads';
|
||||
}
|
||||
|
||||
/** Media base URL for uploads. */
|
||||
get mediaDomain(): string {
|
||||
const value = this.env.get('MEDIA_DOMAIN');
|
||||
|
||||
if (!value) {
|
||||
const url = this.url;
|
||||
url.host = `media.${url.host}`;
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to analyze media metadata with [blurhash](https://www.npmjs.com/package/blurhash) and [sharp](https://www.npmjs.com/package/sharp).
|
||||
* This is prone to security vulnerabilities, which is why it's not enabled by default.
|
||||
*/
|
||||
get mediaAnalyze(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('MEDIA_ANALYZE')) ?? false;
|
||||
}
|
||||
|
||||
/** Whether to transcode uploaded video files with ffmpeg. */
|
||||
get mediaTranscode(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('MEDIA_TRANSCODE')) ?? false;
|
||||
}
|
||||
|
||||
/** Max upload size for files in number of bytes. Default 100MiB. */
|
||||
get maxUploadSize(): number {
|
||||
return Number(this.env.get('MAX_UPLOAD_SIZE') || 100 * 1024 * 1024);
|
||||
}
|
||||
|
||||
/** Usernames that regular users cannot sign up with. */
|
||||
get forbiddenUsernames(): string[] {
|
||||
return this.env.get('FORBIDDEN_USERNAMES')?.split(',') || [
|
||||
'_',
|
||||
'admin',
|
||||
'administrator',
|
||||
'root',
|
||||
'sysadmin',
|
||||
'system',
|
||||
];
|
||||
}
|
||||
|
||||
/** Domain of the Ditto server as a `URL` object, for easily grabbing the `hostname`, etc. */
|
||||
get url(): URL {
|
||||
return new URL(this.localDomain);
|
||||
}
|
||||
|
||||
/** Merges the path with the localDomain. */
|
||||
local(path: string): string {
|
||||
return mergeURLPath(this.localDomain, path);
|
||||
}
|
||||
|
||||
/** URL to send Sentry errors to. */
|
||||
get sentryDsn(): string | undefined {
|
||||
return this.env.get('SENTRY_DSN');
|
||||
}
|
||||
|
||||
/** Postgres settings. */
|
||||
get pg(): { poolSize: number } {
|
||||
const env = this.env;
|
||||
|
||||
return {
|
||||
/** Number of connections to use in the pool. */
|
||||
get poolSize(): number {
|
||||
return Number(env.get('PG_POOL_SIZE') ?? 20);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Whether to enable requesting events from known relays. */
|
||||
get firehoseEnabled(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('FIREHOSE_ENABLED')) ?? true;
|
||||
}
|
||||
|
||||
/** Number of events the firehose is allowed to process at one time before they have to wait in a queue. */
|
||||
get firehoseConcurrency(): number {
|
||||
return Math.ceil(Number(this.env.get('FIREHOSE_CONCURRENCY') ?? 1));
|
||||
}
|
||||
|
||||
/** Nostr event kinds of events to listen for on the firehose. */
|
||||
get firehoseKinds(): number[] {
|
||||
return (this.env.get('FIREHOSE_KINDS') ?? '0, 1, 3, 5, 6, 7, 20, 9735, 10002')
|
||||
.split(/[, ]+/g)
|
||||
.map(Number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether Ditto should subscribe to Nostr events from the Postgres database itself.
|
||||
* This would make Nostr events inserted directly into Postgres available to the streaming API and relay.
|
||||
*/
|
||||
get notifyEnabled(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('NOTIFY_ENABLED')) ?? true;
|
||||
}
|
||||
|
||||
/** Whether to enable Ditto cron jobs. */
|
||||
get cronEnabled(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('CRON_ENABLED')) ?? true;
|
||||
}
|
||||
|
||||
/** User-Agent to use when fetching link previews. Pretend to be Facebook by default. */
|
||||
get fetchUserAgent(): string {
|
||||
return this.env.get('DITTO_FETCH_USER_AGENT') ?? 'facebookexternalhit';
|
||||
}
|
||||
|
||||
/** Path to the custom policy module. Must be an absolute path, https:, npm:, or jsr: URI. */
|
||||
get policy(): string {
|
||||
return this.env.get('DITTO_POLICY') || path.join(this.dataDir, 'policy.ts');
|
||||
}
|
||||
|
||||
/** Absolute path to the data directory used by Ditto. */
|
||||
get dataDir(): string {
|
||||
return this.env.get('DITTO_DATA_DIR') || path.join(Deno.cwd(), 'data');
|
||||
}
|
||||
|
||||
/** Absolute path of the Deno directory. */
|
||||
get denoDir(): string {
|
||||
return this.env.get('DENO_DIR') || `${os.userInfo().homedir}/.cache/deno`;
|
||||
}
|
||||
|
||||
/** Whether zap splits should be enabled. */
|
||||
get zapSplitsEnabled(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('ZAP_SPLITS_ENABLED')) ?? false;
|
||||
}
|
||||
|
||||
/** Languages this server wishes to highlight. Used when querying trends.*/
|
||||
get preferredLanguages(): LanguageCode[] | undefined {
|
||||
return this.env.get('DITTO_LANGUAGES')?.split(',')?.filter(ISO6391.validate);
|
||||
}
|
||||
|
||||
/** Mints to be displayed in the UI when the user decides to create a wallet.*/
|
||||
get cashuMints(): string[] {
|
||||
return this.env.get('CASHU_MINTS')?.split(',') ?? [];
|
||||
}
|
||||
|
||||
/** Translation provider used to translate posts. */
|
||||
get translationProvider(): string | undefined {
|
||||
return this.env.get('TRANSLATION_PROVIDER');
|
||||
}
|
||||
|
||||
/** DeepL URL endpoint. */
|
||||
get deeplBaseUrl(): string | undefined {
|
||||
return this.env.get('DEEPL_BASE_URL');
|
||||
}
|
||||
|
||||
/** DeepL API KEY. */
|
||||
get deeplApiKey(): string | undefined {
|
||||
return this.env.get('DEEPL_API_KEY');
|
||||
}
|
||||
|
||||
/** LibreTranslate URL endpoint. */
|
||||
get libretranslateBaseUrl(): string | undefined {
|
||||
return this.env.get('LIBRETRANSLATE_BASE_URL');
|
||||
}
|
||||
|
||||
/** LibreTranslate API KEY. */
|
||||
get libretranslateApiKey(): string | undefined {
|
||||
return this.env.get('LIBRETRANSLATE_API_KEY');
|
||||
}
|
||||
|
||||
/** Cache settings. */
|
||||
get caches(): {
|
||||
nip05: { max: number; ttl: number };
|
||||
favicon: { max: number; ttl: number };
|
||||
translation: { max: number; ttl: number };
|
||||
} {
|
||||
const env = this.env;
|
||||
|
||||
return {
|
||||
/** NIP-05 cache settings. */
|
||||
get nip05(): { max: number; ttl: number } {
|
||||
return {
|
||||
max: Number(env.get('DITTO_CACHE_NIP05_MAX') || 3000),
|
||||
ttl: Number(env.get('DITTO_CACHE_NIP05_TTL') || 1 * 60 * 60 * 1000),
|
||||
};
|
||||
},
|
||||
/** Favicon cache settings. */
|
||||
get favicon(): { max: number; ttl: number } {
|
||||
return {
|
||||
max: Number(env.get('DITTO_CACHE_FAVICON_MAX') || 500),
|
||||
ttl: Number(env.get('DITTO_CACHE_FAVICON_TTL') || 1 * 60 * 60 * 1000),
|
||||
};
|
||||
},
|
||||
/** Translation cache settings. */
|
||||
get translation(): { max: number; ttl: number } {
|
||||
return {
|
||||
max: Number(env.get('DITTO_CACHE_TRANSLATION_MAX') || 1000),
|
||||
ttl: Number(env.get('DITTO_CACHE_TRANSLATION_TTL') || 6 * 60 * 60 * 1000),
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Custom profile fields configuration. */
|
||||
get profileFields(): { maxFields: number; nameLength: number; valueLength: number } {
|
||||
const env = this.env;
|
||||
|
||||
return {
|
||||
get maxFields(): number {
|
||||
return Number(env.get('PROFILE_FIELDS_MAX_FIELDS') || 10);
|
||||
},
|
||||
get nameLength(): number {
|
||||
return Number(env.get('PROFILE_FIELDS_NAME_LENGTH') || 255);
|
||||
},
|
||||
get valueLength(): number {
|
||||
return Number(env.get('PROFILE_FIELDS_VALUE_LENGTH') || 2047);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Maximum time between events before a streak is broken, *in seconds*. */
|
||||
get streakWindow(): number {
|
||||
return Number(this.env.get('STREAK_WINDOW') || 129600);
|
||||
}
|
||||
|
||||
/** Whether to perform security/configuration checks on startup. */
|
||||
get precheck(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('DITTO_PRECHECK')) ?? true;
|
||||
}
|
||||
|
||||
/** Path to `ffmpeg` executable. */
|
||||
get ffmpegPath(): string {
|
||||
return this.env.get('FFMPEG_PATH') || 'ffmpeg';
|
||||
}
|
||||
|
||||
/** Path to `ffprobe` executable. */
|
||||
get ffprobePath(): string {
|
||||
return this.env.get('FFPROBE_PATH') || 'ffprobe';
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"name": "@ditto/conf",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
export { DittoConf } from './DittoConf.ts';
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
import { assertEquals, assertThrows } from '@std/assert';
|
||||
|
||||
import { optionalBooleanSchema, optionalNumberSchema } from './schema.ts';
|
||||
|
||||
Deno.test('optionalBooleanSchema', () => {
|
||||
assertEquals(optionalBooleanSchema.parse('true'), true);
|
||||
assertEquals(optionalBooleanSchema.parse('false'), false);
|
||||
assertEquals(optionalBooleanSchema.parse(undefined), undefined);
|
||||
|
||||
assertThrows(() => optionalBooleanSchema.parse('invalid'));
|
||||
});
|
||||
|
||||
Deno.test('optionalNumberSchema', () => {
|
||||
assertEquals(optionalNumberSchema.parse('123'), 123);
|
||||
assertEquals(optionalNumberSchema.parse('invalid'), NaN); // maybe this should throw?
|
||||
assertEquals(optionalNumberSchema.parse(undefined), undefined);
|
||||
});
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
export const optionalBooleanSchema = z
|
||||
.enum(['true', 'false'])
|
||||
.optional()
|
||||
.transform((value) => value !== undefined ? value === 'true' : undefined);
|
||||
|
||||
export const optionalNumberSchema = z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((value) => value !== undefined ? Number(value) : undefined);
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { mergeURLPath } from './url.ts';
|
||||
|
||||
Deno.test('mergeURLPath', () => {
|
||||
assertEquals(mergeURLPath('https://mario.com', '/path'), 'https://mario.com/path');
|
||||
assertEquals(mergeURLPath('https://mario.com', 'https://luigi.com/path'), 'https://mario.com/path');
|
||||
assertEquals(mergeURLPath('https://mario.com', 'https://luigi.com/path?q=1'), 'https://mario.com/path?q=1');
|
||||
});
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
/**
|
||||
* Produce a URL whose origin is guaranteed to be the same as the base URL.
|
||||
* The path is either an absolute path (starting with `/`), or a full URL. In either case, only its path is used.
|
||||
*/
|
||||
export function mergeURLPath(
|
||||
/** Base URL. Result is guaranteed to use this URL's origin. */
|
||||
base: string,
|
||||
/** Either an absolute path (starting with `/`), or a full URL. If a full URL, its path */
|
||||
path: string,
|
||||
): string {
|
||||
const url = new URL(
|
||||
path.startsWith('/') ? path : new URL(path).pathname,
|
||||
base,
|
||||
);
|
||||
|
||||
if (!path.startsWith('/')) {
|
||||
// Copy query parameters from the original URL to the new URL
|
||||
const originalUrl = new URL(path);
|
||||
url.search = originalUrl.search;
|
||||
}
|
||||
|
||||
return url.toString();
|
||||
}
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { logi } from '@soapbox/logi';
|
||||
import { FileMigrationProvider, type Kysely, Migrator } from 'kysely';
|
||||
|
||||
import type { JsonValue } from '@std/json';
|
||||
|
||||
export class DittoPgMigrator {
|
||||
private migrator: Migrator;
|
||||
|
||||
// deno-lint-ignore no-explicit-any
|
||||
constructor(private kysely: Kysely<any>) {
|
||||
this.migrator = new Migrator({
|
||||
db: this.kysely,
|
||||
provider: new FileMigrationProvider({
|
||||
fs,
|
||||
path,
|
||||
migrationFolder: new URL(import.meta.resolve('./migrations')).pathname,
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
async migrate(): Promise<void> {
|
||||
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Running migrations...', state: 'started' });
|
||||
const { results, error } = await this.migrator.migrateToLatest();
|
||||
|
||||
if (error) {
|
||||
logi({
|
||||
level: 'fatal',
|
||||
ns: 'ditto.db.migration',
|
||||
msg: 'Migration failed.',
|
||||
state: 'failed',
|
||||
results: results as unknown as JsonValue,
|
||||
error: error instanceof Error ? error : null,
|
||||
});
|
||||
throw new Error('Migration failed.');
|
||||
} else {
|
||||
if (!results?.length) {
|
||||
logi({ level: 'info', ns: 'ditto.db.migration', msg: 'Everything up-to-date.', state: 'skipped' });
|
||||
} else {
|
||||
logi({
|
||||
level: 'info',
|
||||
ns: 'ditto.db.migration',
|
||||
msg: 'Migrations finished!',
|
||||
state: 'migrated',
|
||||
results: results as unknown as JsonValue,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
import { dbQueriesCounter, dbQueryDurationHistogram } from '@ditto/metrics';
|
||||
import { logi, type LogiValue } from '@soapbox/logi';
|
||||
|
||||
import type { Logger } from 'kysely';
|
||||
|
||||
/** Log the SQL for queries. */
|
||||
export const KyselyLogger: Logger = (event) => {
|
||||
const { query, queryDurationMillis } = event;
|
||||
const { parameters, sql } = query;
|
||||
|
||||
const duration = queryDurationMillis / 1000;
|
||||
|
||||
dbQueriesCounter.inc();
|
||||
dbQueryDurationHistogram.observe(duration);
|
||||
|
||||
if (event.level === 'query') {
|
||||
logi({ level: 'trace', ns: 'ditto.sql', sql, parameters: parameters as LogiValue, duration });
|
||||
}
|
||||
|
||||
if (event.level === 'error') {
|
||||
if (event.error instanceof Error) {
|
||||
switch (event.error.message) {
|
||||
case 'duplicate key value violates unique constraint "nostr_events_pkey"':
|
||||
case 'duplicate key value violates unique constraint "author_stats_pkey"':
|
||||
case 'duplicate key value violates unique constraint "event_stats_pkey"':
|
||||
case 'duplicate key value violates unique constraint "event_zaps_pkey"':
|
||||
case 'insert or update on table "event_stats" violates foreign key constraint "event_stats_event_id_fkey"':
|
||||
return; // Don't log expected errors
|
||||
}
|
||||
}
|
||||
|
||||
logi({
|
||||
level: 'error',
|
||||
ns: 'ditto.sql',
|
||||
sql,
|
||||
parameters: parameters as LogiValue,
|
||||
error: event.error instanceof Error ? event.error : null,
|
||||
duration,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import { assertEquals, assertRejects } from '@std/assert';
|
||||
|
||||
import { DittoPglite } from './DittoPglite.ts';
|
||||
|
||||
Deno.test('DittoPglite', async () => {
|
||||
await using db = new DittoPglite('memory://');
|
||||
await db.migrate();
|
||||
|
||||
assertEquals(db.poolSize, 1);
|
||||
assertEquals(db.availableConnections, 1);
|
||||
});
|
||||
|
||||
Deno.test('DittoPglite query after closing', async () => {
|
||||
const db = new DittoPglite('memory://');
|
||||
await db[Symbol.asyncDispose]();
|
||||
|
||||
await assertRejects(
|
||||
() => db.kysely.selectFrom('nostr_events').selectAll().execute(),
|
||||
Error,
|
||||
'PGlite is closed',
|
||||
);
|
||||
});
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
import { PGlite } from '@electric-sql/pglite';
|
||||
import { pg_trgm } from '@electric-sql/pglite/contrib/pg_trgm';
|
||||
import { PgliteDialect } from '@soapbox/kysely-pglite';
|
||||
import { Kysely } from 'kysely';
|
||||
|
||||
import { KyselyLogger } from '../KyselyLogger.ts';
|
||||
import { DittoPgMigrator } from '../DittoPgMigrator.ts';
|
||||
import { isWorker } from '../utils/worker.ts';
|
||||
|
||||
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
|
||||
import type { DittoTables } from '../DittoTables.ts';
|
||||
|
||||
export class DittoPglite implements DittoDB {
|
||||
readonly poolSize = 1;
|
||||
readonly availableConnections = 1;
|
||||
readonly kysely: Kysely<DittoTables>;
|
||||
|
||||
private pglite: PGlite;
|
||||
private migrator: DittoPgMigrator;
|
||||
|
||||
constructor(databaseUrl: string, opts?: DittoDBOpts) {
|
||||
const url = new URL(databaseUrl);
|
||||
|
||||
if (url.protocol === 'file:' && isWorker()) {
|
||||
throw new Error('PGlite is not supported in worker threads.');
|
||||
}
|
||||
|
||||
this.pglite = new PGlite(databaseUrl, {
|
||||
extensions: { pg_trgm },
|
||||
debug: opts?.debug,
|
||||
});
|
||||
|
||||
this.kysely = new Kysely<DittoTables>({
|
||||
dialect: new PgliteDialect({ database: this.pglite }),
|
||||
log: KyselyLogger,
|
||||
});
|
||||
|
||||
this.migrator = new DittoPgMigrator(this.kysely);
|
||||
}
|
||||
|
||||
listen(channel: string, callback: (payload: string) => void): void {
|
||||
this.pglite.listen(channel, callback);
|
||||
}
|
||||
|
||||
async migrate(): Promise<void> {
|
||||
await this.migrator.migrate();
|
||||
}
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
try {
|
||||
// FIXME: `kysely.destroy()` calls `pglite.close()` internally, but it doesn't work.
|
||||
await this.pglite.close();
|
||||
await this.kysely.destroy();
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message === 'PGlite is closed') {
|
||||
// Make dispose idempotent.
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
import { DittoPolyPg } from './DittoPolyPg.ts';
|
||||
|
||||
Deno.test('DittoPolyPg', async () => {
|
||||
const db = new DittoPolyPg('memory://');
|
||||
await db.migrate();
|
||||
});
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
import { DittoPglite } from './DittoPglite.ts';
|
||||
import { DittoPostgres } from './DittoPostgres.ts';
|
||||
|
||||
import type { Kysely } from 'kysely';
|
||||
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
|
||||
import type { DittoTables } from '../DittoTables.ts';
|
||||
|
||||
/** Creates either a PGlite or Postgres connection depending on the databaseUrl. */
|
||||
export class DittoPolyPg implements DittoDB {
|
||||
private adapter: DittoDB;
|
||||
|
||||
/** Open a new database connection. */
|
||||
constructor(databaseUrl: string, opts?: DittoDBOpts) {
|
||||
const { protocol } = new URL(databaseUrl);
|
||||
|
||||
switch (protocol) {
|
||||
case 'file:':
|
||||
case 'memory:':
|
||||
this.adapter = new DittoPglite(databaseUrl, opts);
|
||||
break;
|
||||
case 'postgres:':
|
||||
case 'postgresql:':
|
||||
this.adapter = new DittoPostgres(databaseUrl, opts);
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unsupported database URL.');
|
||||
}
|
||||
}
|
||||
|
||||
get kysely(): Kysely<DittoTables> {
|
||||
return this.adapter.kysely;
|
||||
}
|
||||
|
||||
async migrate(): Promise<void> {
|
||||
await this.adapter.migrate();
|
||||
}
|
||||
|
||||
listen(channel: string, callback: (payload: string) => void): void {
|
||||
this.adapter.listen(channel, callback);
|
||||
}
|
||||
|
||||
get poolSize(): number {
|
||||
return this.adapter.poolSize;
|
||||
}
|
||||
|
||||
get availableConnections(): number {
|
||||
return this.adapter.availableConnections;
|
||||
}
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.adapter[Symbol.asyncDispose]();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
|
||||
import { DittoPostgres } from './DittoPostgres.ts';
|
||||
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const isPostgres = /^postgres(?:ql)?:/.test(conf.databaseUrl);
|
||||
|
||||
Deno.test('DittoPostgres', { ignore: !isPostgres }, async () => {
|
||||
await using db = new DittoPostgres(conf.databaseUrl);
|
||||
await db.migrate();
|
||||
});
|
||||
|
||||
// FIXME: There is a problem with postgres-js where queries just hang after the database is closed.
|
||||
|
||||
// Deno.test('DittoPostgres query after closing', { ignore: !isPostgres }, async () => {
|
||||
// const db = new DittoPostgres(conf.databaseUrl);
|
||||
// await db[Symbol.asyncDispose]();
|
||||
//
|
||||
// await assertRejects(
|
||||
// () => db.kysely.selectFrom('nostr_events').selectAll().execute(),
|
||||
// );
|
||||
// });
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
import {
|
||||
type BinaryOperationNode,
|
||||
FunctionNode,
|
||||
Kysely,
|
||||
OperatorNode,
|
||||
PostgresAdapter,
|
||||
PostgresIntrospector,
|
||||
PostgresQueryCompiler,
|
||||
PrimitiveValueListNode,
|
||||
ValueNode,
|
||||
} from 'kysely';
|
||||
import { type PostgresJSDialectConfig, PostgresJSDriver } from 'kysely-postgres-js';
|
||||
import postgres from 'postgres';
|
||||
|
||||
import { DittoPgMigrator } from '../DittoPgMigrator.ts';
|
||||
import { KyselyLogger } from '../KyselyLogger.ts';
|
||||
|
||||
import type { DittoDB, DittoDBOpts } from '../DittoDB.ts';
|
||||
import type { DittoTables } from '../DittoTables.ts';
|
||||
|
||||
export class DittoPostgres implements DittoDB {
|
||||
private pg: ReturnType<typeof postgres>;
|
||||
private migrator: DittoPgMigrator;
|
||||
|
||||
readonly kysely: Kysely<DittoTables>;
|
||||
|
||||
constructor(databaseUrl: string, opts?: DittoDBOpts) {
|
||||
this.pg = postgres(databaseUrl, { max: opts?.poolSize });
|
||||
|
||||
this.kysely = new Kysely<DittoTables>({
|
||||
dialect: {
|
||||
createAdapter: () => new PostgresAdapter(),
|
||||
createDriver: () =>
|
||||
new PostgresJSDriver({ postgres: this.pg as unknown as PostgresJSDialectConfig['postgres'] }),
|
||||
createIntrospector: (db) => new PostgresIntrospector(db),
|
||||
createQueryCompiler: () => new DittoPostgresQueryCompiler(),
|
||||
},
|
||||
log: KyselyLogger,
|
||||
});
|
||||
|
||||
this.migrator = new DittoPgMigrator(this.kysely);
|
||||
}
|
||||
|
||||
listen(channel: string, callback: (payload: string) => void): void {
|
||||
this.pg.listen(channel, callback);
|
||||
}
|
||||
|
||||
async migrate(): Promise<void> {
|
||||
await this.migrator.migrate();
|
||||
}
|
||||
|
||||
get poolSize(): number {
|
||||
return this.pg.connections.open;
|
||||
}
|
||||
|
||||
get availableConnections(): number {
|
||||
return this.pg.connections.idle;
|
||||
}
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.pg.end({ timeout: 0 }); // force-close the connections
|
||||
await this.kysely.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
/** Converts `in` queries to `any` to improve prepared statements on Postgres. */
|
||||
class DittoPostgresQueryCompiler extends PostgresQueryCompiler {
|
||||
protected override visitBinaryOperation(node: BinaryOperationNode): void {
|
||||
if (
|
||||
OperatorNode.is(node.operator) && node.operator.operator === 'in' && PrimitiveValueListNode.is(node.rightOperand)
|
||||
) {
|
||||
this.visitNode(node.leftOperand);
|
||||
this.append(' = ');
|
||||
this.visitNode(FunctionNode.create('any', [ValueNode.create(node.rightOperand.values)]));
|
||||
} else {
|
||||
super.visitBinaryOperation(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
import { assertEquals } from '@std/assert';
|
||||
import { DummyDB } from './DummyDB.ts';
|
||||
|
||||
Deno.test('DummyDB', async () => {
|
||||
const db = new DummyDB();
|
||||
await db.migrate();
|
||||
|
||||
const rows = await db.kysely.selectFrom('nostr_events').selectAll().execute();
|
||||
|
||||
assertEquals(rows, []);
|
||||
});
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
import { DummyDriver, Kysely, PostgresAdapter, PostgresIntrospector, PostgresQueryCompiler } from 'kysely';
|
||||
|
||||
import type { DittoDB } from '../DittoDB.ts';
|
||||
import type { DittoTables } from '../DittoTables.ts';
|
||||
|
||||
export class DummyDB implements DittoDB {
|
||||
readonly kysely: Kysely<DittoTables>;
|
||||
readonly poolSize = 0;
|
||||
readonly availableConnections = 0;
|
||||
|
||||
constructor() {
|
||||
this.kysely = new Kysely<DittoTables>({
|
||||
dialect: {
|
||||
createAdapter: () => new PostgresAdapter(),
|
||||
createDriver: () => new DummyDriver(),
|
||||
createIntrospector: (db) => new PostgresIntrospector(db),
|
||||
createQueryCompiler: () => new PostgresQueryCompiler(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
listen(): void {
|
||||
// noop
|
||||
}
|
||||
|
||||
migrate(): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
[Symbol.asyncDispose](): Promise<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { NPostgres } from '@nostrify/db';
|
||||
import { genEvent } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { DittoPolyPg } from './DittoPolyPg.ts';
|
||||
import { TestDB } from './TestDB.ts';
|
||||
|
||||
Deno.test('TestDB', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const store = new NPostgres(orig.kysely);
|
||||
await store.event(genEvent());
|
||||
|
||||
assertEquals((await store.count([{}])).count, 1);
|
||||
|
||||
await db.clear();
|
||||
|
||||
assertEquals((await store.count([{}])).count, 0);
|
||||
});
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
import type { DittoDB } from '../DittoDB.ts';
|
||||
import type { DittoTables } from '../DittoTables.ts';
|
||||
|
||||
/** Wraps another DittoDB implementation to clear all data when disposed. */
|
||||
export class TestDB implements DittoDB {
|
||||
constructor(private db: DittoDB) {}
|
||||
|
||||
get kysely(): Kysely<DittoTables> {
|
||||
return this.db.kysely;
|
||||
}
|
||||
|
||||
get poolSize(): number {
|
||||
return this.db.poolSize;
|
||||
}
|
||||
|
||||
get availableConnections(): number {
|
||||
return this.db.availableConnections;
|
||||
}
|
||||
|
||||
migrate(): Promise<void> {
|
||||
return this.db.migrate();
|
||||
}
|
||||
|
||||
listen(channel: string, callback: (payload: string) => void): void {
|
||||
return this.db.listen(channel, callback);
|
||||
}
|
||||
|
||||
/** Truncate all tables. */
|
||||
async clear(): Promise<void> {
|
||||
const query = sql<{ tablename: string }>`select tablename from pg_tables where schemaname = current_schema()`;
|
||||
|
||||
const { rows } = await query.execute(this.db.kysely);
|
||||
|
||||
for (const { tablename } of rows) {
|
||||
if (tablename.startsWith('kysely_')) {
|
||||
continue; // Skip Kysely's internal tables
|
||||
} else {
|
||||
await sql`truncate table ${sql.ref(tablename)} cascade`.execute(this.db.kysely);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.clear();
|
||||
await this.db[Symbol.asyncDispose]();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"name": "@ditto/db",
|
||||
"version": "0.1.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(_db: Kysely<unknown>): Promise<void> {
|
||||
// This migration used to create an FTS table for SQLite, but SQLite support was removed.
|
||||
}
|
||||
|
||||
export async function down(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.alterTable('users').dropColumn('admin').execute();
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.dropIndex('idx_users_pubkey').execute();
|
||||
await db.schema.dropIndex('idx_users_username').execute();
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
||||
export async function down(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
||||
export async function down(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.dropTable('users').ifExists().execute();
|
||||
}
|
||||
|
||||
export async function down(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema
|
||||
.alterTable('author_stats')
|
||||
.addColumn('streak_start', 'integer')
|
||||
.addColumn('streak_end', 'integer')
|
||||
.execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema
|
||||
.alterTable('author_stats')
|
||||
.dropColumn('streak_start')
|
||||
.dropColumn('streak_end')
|
||||
.execute();
|
||||
}
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema
|
||||
.alterTable('author_stats')
|
||||
.addColumn('nip05', 'varchar(320)')
|
||||
.addColumn('nip05_domain', 'varchar(253)')
|
||||
.addColumn('nip05_hostname', 'varchar(253)')
|
||||
.addColumn('nip05_last_verified_at', 'integer')
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.alterTable('author_stats')
|
||||
.addCheckConstraint('author_stats_nip05_domain_lowercase_chk', sql`nip05_domain = lower(nip05_domain)`)
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.alterTable('author_stats')
|
||||
.addCheckConstraint('author_stats_nip05_hostname_lowercase_chk', sql`nip05_hostname = lower(nip05_hostname)`)
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.alterTable('author_stats')
|
||||
.addCheckConstraint('author_stats_nip05_hostname_domain_chk', sql`nip05_hostname like '%' || nip05_domain`)
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createIndex('author_stats_nip05_domain_idx')
|
||||
.on('author_stats')
|
||||
.column('nip05_domain')
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createIndex('author_stats_nip05_hostname_idx')
|
||||
.on('author_stats')
|
||||
.column('nip05_hostname')
|
||||
.execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema
|
||||
.alterTable('author_stats')
|
||||
.dropColumn('nip05')
|
||||
.dropColumn('nip05_domain')
|
||||
.dropColumn('nip05_hostname')
|
||||
.dropColumn('nip05_last_verified_at')
|
||||
.execute();
|
||||
}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema
|
||||
.createTable('domain_favicons')
|
||||
.addColumn('domain', 'varchar(253)', (col) => col.primaryKey())
|
||||
.addColumn('favicon', 'varchar(2048)', (col) => col.notNull())
|
||||
.addColumn('last_updated_at', 'integer', (col) => col.notNull())
|
||||
.addCheckConstraint('domain_favicons_https_chk', sql`favicon ~* '^https:\\/\\/'`)
|
||||
.execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.dropTable('domain_favicons').execute();
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.dropTable('pubkey_domains').execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema
|
||||
.createTable('pubkey_domains')
|
||||
.ifNotExists()
|
||||
.addColumn('pubkey', 'text', (col) => col.primaryKey())
|
||||
.addColumn('domain', 'text', (col) => col.notNull())
|
||||
.addColumn('last_updated_at', 'integer', (col) => col.notNull().defaultTo(0))
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createIndex('pubkey_domains_domain_index')
|
||||
.on('pubkey_domains')
|
||||
.column('domain')
|
||||
.ifNotExists()
|
||||
.execute();
|
||||
}
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
// deno-lint-ignore no-explicit-any
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await db.schema
|
||||
.createView('top_authors')
|
||||
.materialized()
|
||||
.as(db.selectFrom('author_stats').select(['pubkey', 'followers_count', 'search']).orderBy('followers_count desc'))
|
||||
.execute();
|
||||
|
||||
await sql`CREATE INDEX top_authors_search_idx ON top_authors USING GIN (search gin_trgm_ops)`.execute(db);
|
||||
|
||||
await db.schema.createIndex('top_authors_pubkey_idx').on('top_authors').column('pubkey').execute();
|
||||
|
||||
await db.schema.dropIndex('author_stats_search_idx').execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.dropView('top_authors').execute();
|
||||
await sql`CREATE INDEX author_stats_search_idx ON author_stats USING GIN (search gin_trgm_ops)`.execute(db);
|
||||
}
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE TRIGGER nostr_event_trigger
|
||||
AFTER INSERT ON nostr_events
|
||||
FOR EACH ROW EXECUTE FUNCTION notify_nostr_event()
|
||||
`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE TRIGGER nostr_event_trigger
|
||||
AFTER INSERT OR UPDATE ON nostr_events
|
||||
FOR EACH ROW EXECUTE FUNCTION notify_nostr_event()
|
||||
`.execute(db);
|
||||
}
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION notify_nostr_event()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF OLD.id IS DISTINCT FROM NEW.id THEN
|
||||
PERFORM pg_notify('nostr_event', NEW.id::text);
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`.execute(db);
|
||||
|
||||
await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE TRIGGER nostr_event_trigger
|
||||
AFTER INSERT OR UPDATE ON nostr_events
|
||||
FOR EACH ROW EXECUTE FUNCTION notify_nostr_event()
|
||||
`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION notify_nostr_event()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
PERFORM pg_notify('nostr_event', NEW.id::text);
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`.execute(db);
|
||||
|
||||
await sql`DROP TRIGGER IF EXISTS nostr_event_trigger ON nostr_events`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE TRIGGER nostr_event_trigger
|
||||
AFTER INSERT ON nostr_events
|
||||
FOR EACH ROW EXECUTE FUNCTION notify_nostr_event()
|
||||
`.execute(db);
|
||||
}
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
const result = await sql<{ count: number }>`
|
||||
SELECT COUNT(*) as count
|
||||
FROM pg_indexes
|
||||
WHERE indexname = 'nostr_events_new_pkey'
|
||||
`.execute(db);
|
||||
|
||||
if (result.rows[0].count > 0) {
|
||||
await sql`ALTER INDEX nostr_events_new_pkey RENAME TO nostr_events_pkey;`.execute(db);
|
||||
}
|
||||
}
|
||||
|
||||
export async function down(_db: Kysely<unknown>): Promise<void> {
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.alterTable('event_stats').addColumn('link_preview', 'jsonb').execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.alterTable('event_stats').dropColumn('link_preview').execute();
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
import type { Kysely } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema
|
||||
.alterTable('event_stats')
|
||||
.addColumn('zaps_amount_cashu', 'integer', (col) => col.notNull().defaultTo(0))
|
||||
.execute();
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<unknown>): Promise<void> {
|
||||
await db.schema.alterTable('event_stats').dropColumn('zaps_amount_cashu').execute();
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
export { DittoPglite } from './adapters/DittoPglite.ts';
|
||||
export { DittoPolyPg } from './adapters/DittoPolyPg.ts';
|
||||
export { DittoPostgres } from './adapters/DittoPostgres.ts';
|
||||
export { DummyDB } from './adapters/DummyDB.ts';
|
||||
export { TestDB } from './adapters/TestDB.ts';
|
||||
|
||||
export type { DittoDB } from './DittoDB.ts';
|
||||
export type { DittoTables } from './DittoTables.ts';
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { ApplicationServer, PushMessageOptions, PushSubscriber, PushSubscription } from '@negrel/webpush';
|
||||
import { NStore } from '@nostrify/types';
|
||||
import { logi } from '@soapbox/logi';
|
||||
|
||||
import { getInstanceMetadata } from '@/utils/instance.ts';
|
||||
|
||||
interface DittoPushOpts {
|
||||
conf: DittoConf;
|
||||
relay: NStore;
|
||||
}
|
||||
|
||||
export class DittoPush {
|
||||
private server: Promise<ApplicationServer | undefined>;
|
||||
|
||||
constructor(opts: DittoPushOpts) {
|
||||
const { conf } = opts;
|
||||
|
||||
this.server = (async () => {
|
||||
const meta = await getInstanceMetadata(opts);
|
||||
const keys = await conf.vapidKeys;
|
||||
|
||||
if (keys) {
|
||||
return await ApplicationServer.new({
|
||||
contactInformation: `mailto:${meta.email}`,
|
||||
vapidKeys: keys,
|
||||
});
|
||||
} else {
|
||||
logi({
|
||||
level: 'warn',
|
||||
ns: 'ditto.push',
|
||||
msg: 'VAPID keys are not set. Push notifications will be disabled.',
|
||||
});
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
async push(
|
||||
subscription: PushSubscription,
|
||||
json: object,
|
||||
opts: PushMessageOptions = {},
|
||||
): Promise<void> {
|
||||
const server = await this.server;
|
||||
|
||||
if (!server) {
|
||||
return;
|
||||
}
|
||||
|
||||
const subscriber = new PushSubscriber(server, subscription);
|
||||
const text = JSON.stringify(json);
|
||||
return subscriber.pushTextMessage(text, opts);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
|
||||
/** @deprecated Use middleware to set/get the config instead. */
|
||||
export const Conf = new DittoConf(Deno.env);
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
import { generateCaptcha, getCaptchaImages, verifyCaptchaSolution } from '@ditto/captcha';
|
||||
import TTLCache from '@isaacs/ttlcache';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AppController } from '@/app.ts';
|
||||
import { updateUser } from '@/utils/api.ts';
|
||||
|
||||
interface Point {
|
||||
x: number;
|
||||
y: number;
|
||||
}
|
||||
|
||||
const pointSchema: z.ZodType<Point> = z.object({
|
||||
x: z.number(),
|
||||
y: z.number(),
|
||||
});
|
||||
|
||||
const captchas = new TTLCache<string, Point>();
|
||||
const imagesAsync = getCaptchaImages();
|
||||
|
||||
const BG_SIZE = { w: 370, h: 400 };
|
||||
const PUZZLE_SIZE = { w: 65, h: 65 };
|
||||
|
||||
/** Puzzle captcha controller. */
|
||||
export const captchaController: AppController = async (c) => {
|
||||
const { conf } = c.var;
|
||||
|
||||
const { bg, puzzle, solution } = generateCaptcha(
|
||||
await imagesAsync,
|
||||
BG_SIZE,
|
||||
PUZZLE_SIZE,
|
||||
);
|
||||
|
||||
const id = crypto.randomUUID();
|
||||
const now = new Date();
|
||||
const ttl = conf.captchaTTL;
|
||||
|
||||
captchas.set(id, solution, { ttl });
|
||||
|
||||
return c.json({
|
||||
id,
|
||||
type: 'puzzle',
|
||||
bg: bg.toDataURL(),
|
||||
puzzle: puzzle.toDataURL(),
|
||||
created_at: now.toISOString(),
|
||||
expires_at: new Date(now.getTime() + ttl).toISOString(),
|
||||
});
|
||||
};
|
||||
|
||||
/** Verify the captcha solution and sign an event in the database. */
|
||||
export const captchaVerifyController: AppController = async (c) => {
|
||||
const { user } = c.var;
|
||||
|
||||
const id = c.req.param('id');
|
||||
const result = pointSchema.safeParse(await c.req.json());
|
||||
const pubkey = await user!.signer.getPublicKey();
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Invalid input' }, { status: 422 });
|
||||
}
|
||||
|
||||
const solution = captchas.get(id);
|
||||
|
||||
if (!solution) {
|
||||
return c.json({ error: 'Captcha expired' }, { status: 410 });
|
||||
}
|
||||
|
||||
const solved = verifyCaptchaSolution(PUZZLE_SIZE, result.data, solution);
|
||||
|
||||
if (solved) {
|
||||
captchas.delete(id);
|
||||
await updateUser(pubkey, { captcha_solved: true }, c);
|
||||
return c.newResponse(null, { status: 204 });
|
||||
}
|
||||
|
||||
return c.json({ error: 'Incorrect solution' }, { status: 400 });
|
||||
};
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,494 +0,0 @@
|
|||
import { CashuMint, CashuWallet, MintQuoteState, Proof } from '@cashu/cashu-ts';
|
||||
import { getWallet, organizeProofs, proofSchema, renderTransaction, tokenEventSchema, type Wallet } from '@ditto/cashu';
|
||||
import { userMiddleware } from '@ditto/mastoapi/middleware';
|
||||
import { paginated, paginationSchema } from '@ditto/mastoapi/pagination';
|
||||
import { DittoRoute } from '@ditto/mastoapi/router';
|
||||
import { generateSecretKey, getPublicKey } from 'nostr-tools';
|
||||
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
|
||||
import { bytesToString, stringToBytes } from '@scure/base';
|
||||
import { logi } from '@soapbox/logi';
|
||||
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createEvent, parseBody } from '@/utils/api.ts';
|
||||
import { swapNutzapsMiddleware } from '@/middleware/swapNutzapsMiddleware.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { nostrNow } from '@/utils.ts';
|
||||
import { errorJson } from '@/utils/log.ts';
|
||||
import { getAmount } from '@/utils/bolt11.ts';
|
||||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
|
||||
const route = new DittoRoute();
|
||||
|
||||
const createMintQuoteSchema = z.object({
|
||||
mint: z.string().url(),
|
||||
amount: z.number().int(),
|
||||
});
|
||||
|
||||
/**
|
||||
* Creates a new mint quote in a specific mint.
|
||||
* https://github.com/cashubtc/nuts/blob/main/04.md#mint-quote
|
||||
*/
|
||||
route.post('/quote', userMiddleware({ enc: 'nip44' }), async (c) => {
|
||||
const { user } = c.var;
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = createMintQuoteSchema.safeParse(body);
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Bad schema', schema: result.error }, 400);
|
||||
}
|
||||
|
||||
const { mint: mintUrl, amount } = result.data;
|
||||
|
||||
try {
|
||||
const mint = new CashuMint(mintUrl);
|
||||
const wallet = new CashuWallet(mint);
|
||||
await wallet.loadMint();
|
||||
|
||||
const mintQuote = await wallet.createMintQuote(amount);
|
||||
|
||||
await createEvent({
|
||||
kind: 7374,
|
||||
content: await user.signer.nip44.encrypt(pubkey, mintQuote.quote),
|
||||
tags: [
|
||||
['expiration', String(mintQuote.expiry)],
|
||||
['mint', mintUrl],
|
||||
],
|
||||
}, c);
|
||||
|
||||
return c.json(mintQuote, 200);
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.api.cashu.quote', error: errorJson(e) });
|
||||
return c.json({ error: 'Could not create mint quote' }, 500);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Checks if the quote has been paid, if it has then mint new tokens.
|
||||
* https://github.com/cashubtc/nuts/blob/main/04.md#minting-tokens
|
||||
*/
|
||||
route.post('/mint/:quote_id', userMiddleware({ enc: 'nip44' }), async (c) => {
|
||||
const { conf, user, relay, signal } = c.var;
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
const quote_id = c.req.param('quote_id');
|
||||
|
||||
const expiredQuoteIds: string[] = [];
|
||||
const deleteExpiredQuotes = async (ids: string[]) => {
|
||||
if (ids.length === 0) return;
|
||||
|
||||
await createEvent({
|
||||
kind: 5,
|
||||
tags: ids.map((id) => ['e', id, conf.relay]),
|
||||
}, c);
|
||||
};
|
||||
|
||||
const events = await relay.query([{ kinds: [7374], authors: [pubkey] }], { signal });
|
||||
for (const event of events) {
|
||||
const decryptedQuoteId = await user.signer.nip44.decrypt(pubkey, event.content);
|
||||
const mintUrl = event.tags.find(([name]) => name === 'mint')?.[1];
|
||||
const expiration = Number(event.tags.find(([name]) => name === 'expiration')?.[1]);
|
||||
const now = nostrNow();
|
||||
|
||||
try {
|
||||
if (mintUrl && (quote_id === decryptedQuoteId)) {
|
||||
if (expiration <= now) {
|
||||
expiredQuoteIds.push(event.id);
|
||||
continue;
|
||||
}
|
||||
|
||||
const mint = new CashuMint(mintUrl);
|
||||
const wallet = new CashuWallet(mint);
|
||||
await wallet.loadMint();
|
||||
|
||||
const mintQuote = await wallet.checkMintQuote(quote_id);
|
||||
const amount = Number(getAmount(mintQuote.request)) / 1000;
|
||||
|
||||
if ((mintQuote.state === MintQuoteState.PAID) && amount) {
|
||||
const proofs = await wallet.mintProofs(amount, mintQuote.quote);
|
||||
|
||||
const unspentProofs = await createEvent({
|
||||
kind: 7375,
|
||||
content: await user.signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify({
|
||||
mint: mintUrl,
|
||||
proofs,
|
||||
}),
|
||||
),
|
||||
}, c);
|
||||
|
||||
await createEvent({
|
||||
kind: 7376,
|
||||
content: await user.signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify([
|
||||
['direction', 'in'],
|
||||
['amount', String(amount)],
|
||||
['e', unspentProofs.id, conf.relay, 'created'],
|
||||
]),
|
||||
),
|
||||
}, c);
|
||||
|
||||
await deleteExpiredQuotes(expiredQuoteIds);
|
||||
|
||||
return c.json({ success: 'Minting successful!', state: MintQuoteState.ISSUED }, 200);
|
||||
} else {
|
||||
await deleteExpiredQuotes(expiredQuoteIds);
|
||||
|
||||
return c.json(mintQuote, 200);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.api.cashu.mint', error: errorJson(e) });
|
||||
return c.json({ error: 'Server error' }, 500);
|
||||
}
|
||||
}
|
||||
|
||||
await deleteExpiredQuotes(expiredQuoteIds);
|
||||
|
||||
return c.json({ error: 'Quote not found' }, 404);
|
||||
});
|
||||
|
||||
const createWalletSchema = z.object({
|
||||
mints: z.array(z.string().url()).nonempty().transform((val) => {
|
||||
return [...new Set(val)];
|
||||
}),
|
||||
relays: z.array(z.string().url()).transform((val) => {
|
||||
return [...new Set(val)];
|
||||
}),
|
||||
});
|
||||
|
||||
/**
|
||||
* Creates a replaceable Cashu wallet and a replaceable nutzap information event.
|
||||
* https://github.com/nostr-protocol/nips/blob/master/60.md
|
||||
* https://github.com/nostr-protocol/nips/blob/master/61.md#nutzap-informational-event
|
||||
*/
|
||||
route.put('/wallet', userMiddleware({ enc: 'nip44' }), async (c) => {
|
||||
const { user, relay, signal, conf } = c.var;
|
||||
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = createWalletSchema.safeParse(body);
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Bad schema', schema: result.error }, 400);
|
||||
}
|
||||
|
||||
const { mints, relays } = result.data;
|
||||
let previousPrivkey: string | undefined;
|
||||
|
||||
const [event] = await relay.query([{ authors: [pubkey], kinds: [17375] }], { signal });
|
||||
if (event) {
|
||||
const walletContentSchema = z.string().array().min(2).array();
|
||||
|
||||
const { data: walletContent, success, error } = n.json().pipe(walletContentSchema).safeParse(
|
||||
await user.signer.nip44.decrypt(pubkey, event.content),
|
||||
);
|
||||
|
||||
if (!success) {
|
||||
return c.json({ error: 'Your wallet is in an invalid format', schema: error }, 400);
|
||||
}
|
||||
|
||||
previousPrivkey = walletContent.find(([name]) => name === 'privkey')?.[1];
|
||||
}
|
||||
|
||||
const walletContentTags: string[][] = [];
|
||||
|
||||
const privkey = previousPrivkey ?? bytesToString('hex', generateSecretKey());
|
||||
const p2pk = getPublicKey(stringToBytes('hex', privkey));
|
||||
|
||||
walletContentTags.push(['privkey', privkey]);
|
||||
|
||||
for (const mint of mints) {
|
||||
walletContentTags.push(['mint', mint]);
|
||||
}
|
||||
|
||||
if (relays.length < 1) {
|
||||
relays.push(conf.relay);
|
||||
}
|
||||
|
||||
const encryptedWalletContentTags = await user.signer.nip44.encrypt(pubkey, JSON.stringify(walletContentTags));
|
||||
|
||||
// Wallet
|
||||
await createEvent({
|
||||
kind: 17375,
|
||||
content: encryptedWalletContentTags,
|
||||
}, c);
|
||||
|
||||
// Nutzap information
|
||||
await createEvent({
|
||||
kind: 10019,
|
||||
tags: [
|
||||
...mints.map((mint) => ['mint', mint, 'sat']),
|
||||
...relays.map((relay) => ['relay', relay]),
|
||||
['pubkey', p2pk],
|
||||
],
|
||||
}, c);
|
||||
|
||||
// TODO: hydrate wallet and add a 'balance' field when a 'renderWallet' view function is created
|
||||
const walletEntity: Wallet = {
|
||||
pubkey_p2pk: p2pk,
|
||||
mints,
|
||||
relays,
|
||||
balance: 0, // Newly created wallet, balance is zero.
|
||||
};
|
||||
|
||||
return c.json(walletEntity, 200);
|
||||
});
|
||||
|
||||
/** Gets a wallet, if it exists. */
|
||||
route.get('/wallet', userMiddleware({ enc: 'nip44' }), swapNutzapsMiddleware, async (c) => {
|
||||
const { relay, user, signal } = c.var;
|
||||
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
|
||||
const { wallet, error } = await getWallet(relay, pubkey, user.signer, { signal });
|
||||
|
||||
if (error) {
|
||||
return c.json({ error: error.message }, 404);
|
||||
}
|
||||
|
||||
return c.json(wallet, 200);
|
||||
});
|
||||
|
||||
/** Gets a history of transactions. */
|
||||
route.get('/transactions', userMiddleware({ enc: 'nip44' }), async (c) => {
|
||||
const { relay, user, signal } = c.var;
|
||||
const { limit, since, until } = paginationSchema().parse(c.req.query());
|
||||
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
|
||||
const events = await relay.query([{ kinds: [7376], authors: [pubkey], since, until, limit }], {
|
||||
signal,
|
||||
});
|
||||
|
||||
const transactions = await Promise.all(
|
||||
events.map((event) => {
|
||||
return renderTransaction(event, pubkey, user.signer);
|
||||
}),
|
||||
);
|
||||
|
||||
if (!transactions.length) {
|
||||
return c.json([], 200);
|
||||
}
|
||||
|
||||
return paginated(c, events, transactions);
|
||||
});
|
||||
|
||||
/** Gets the nutzaps that a post received. */
|
||||
route.get('statuses/:id{[0-9a-f]{64}}/nutzapped_by', async (c) => {
|
||||
const id = c.req.param('id');
|
||||
const { relay, signal } = c.var;
|
||||
const { limit, since, until } = paginationSchema().parse(c.req.query());
|
||||
|
||||
const events = await relay.query([{ kinds: [9321], '#e': [id], since, until, limit }], {
|
||||
signal,
|
||||
});
|
||||
|
||||
if (!events.length) {
|
||||
return c.json([], 200);
|
||||
}
|
||||
|
||||
await hydrateEvents({ ...c.var, events });
|
||||
|
||||
const results = (await Promise.all(
|
||||
events.map((event: DittoEvent) => {
|
||||
const proofs = (event.tags.filter(([name]) => name === 'proof').map(([_, proof]) => {
|
||||
const { success, data } = n.json().pipe(proofSchema).safeParse(proof);
|
||||
if (!success) return;
|
||||
|
||||
return data;
|
||||
})
|
||||
.filter(Boolean)) as Proof[];
|
||||
|
||||
const amount = proofs.reduce((prev, current) => prev + current.amount, 0);
|
||||
const comment = event.content;
|
||||
|
||||
const account = event?.author ? renderAccount(event.author) : accountFromPubkey(event.pubkey);
|
||||
|
||||
return {
|
||||
comment,
|
||||
amount,
|
||||
account,
|
||||
};
|
||||
}),
|
||||
)).filter(Boolean);
|
||||
|
||||
return paginated(c, events, results);
|
||||
});
|
||||
|
||||
/** Get mints set by the CASHU_MINTS environment variable. */
|
||||
route.get('/mints', (c) => {
|
||||
const { conf } = c.var;
|
||||
|
||||
// TODO: Return full Mint information: https://github.com/cashubtc/nuts/blob/main/06.md
|
||||
const mints = conf.cashuMints;
|
||||
|
||||
return c.json({ mints }, 200);
|
||||
});
|
||||
|
||||
const nutzapSchema = z.object({
|
||||
account_id: n.id(),
|
||||
status_id: n.id().optional(),
|
||||
amount: z.number().int().positive(),
|
||||
comment: z.string().optional(),
|
||||
});
|
||||
|
||||
/** Nutzaps a post or a user. */
|
||||
route.post('/nutzap', userMiddleware({ enc: 'nip44' }), swapNutzapsMiddleware, async (c) => {
|
||||
const { conf, relay, user, signal } = c.var;
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = nutzapSchema.safeParse(body);
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Bad schema', schema: result.error }, 400);
|
||||
}
|
||||
|
||||
const { account_id, status_id, amount, comment } = result.data;
|
||||
|
||||
const filter = status_id ? [{ kinds: [1], ids: [status_id] }] : [{ kinds: [0], authors: [account_id] }];
|
||||
const [event] = await relay.query(filter, { signal });
|
||||
|
||||
if (!event) {
|
||||
return c.json({ error: status_id ? 'Status not found' : 'Account not found' }, 404);
|
||||
}
|
||||
|
||||
if (status_id) {
|
||||
await hydrateEvents({ ...c.var, events: [event] });
|
||||
}
|
||||
|
||||
if (event.kind === 1 && ((event as DittoEvent)?.author?.pubkey !== account_id)) {
|
||||
return c.json({ error: 'Post author does not match author' }, 422);
|
||||
}
|
||||
|
||||
const [nutzapInfo] = await relay.query([{ kinds: [10019], authors: [account_id] }], { signal });
|
||||
if (!nutzapInfo) {
|
||||
return c.json({ error: 'Target user does not have a nutzap information event' }, 404);
|
||||
}
|
||||
|
||||
const recipientMints = nutzapInfo.tags.filter(([name]) => name === 'mint').map((tag) => tag[1]).filter(Boolean);
|
||||
if (recipientMints.length < 1) {
|
||||
return c.json({ error: 'Target user does not have any mints setup' }, 422);
|
||||
}
|
||||
|
||||
const p2pk = nutzapInfo.tags.find(([name]) => name === 'pubkey')?.[1];
|
||||
if (!p2pk) {
|
||||
return c.json({ error: 'Target user does not have a cashu pubkey' }, 422);
|
||||
}
|
||||
|
||||
const unspentProofs = await relay.query([{ kinds: [7375], authors: [pubkey] }], { signal });
|
||||
let organizedProofs;
|
||||
try {
|
||||
organizedProofs = await organizeProofs(unspentProofs, user.signer);
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.api.cashu.nutzap', error: errorJson(e) });
|
||||
return c.json({ error: 'Failed to organize proofs' }, 500);
|
||||
}
|
||||
|
||||
const proofsToBeUsed: Proof[] = [];
|
||||
const eventsToBeDeleted: NostrEvent[] = [];
|
||||
let selectedMint: string | undefined;
|
||||
|
||||
for (const mint of recipientMints) {
|
||||
if (organizedProofs[mint]?.totalBalance >= amount) {
|
||||
selectedMint = mint;
|
||||
let minimumRequiredBalance = 0;
|
||||
|
||||
for (const key of Object.keys(organizedProofs[mint])) {
|
||||
if (key === 'totalBalance' || typeof organizedProofs[mint][key] === 'number') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (minimumRequiredBalance >= amount) {
|
||||
break;
|
||||
}
|
||||
|
||||
const event = organizedProofs[mint][key].event;
|
||||
const decryptedContent = await user.signer.nip44.decrypt(pubkey, event.content);
|
||||
|
||||
const { data: token, success } = n.json().pipe(tokenEventSchema).safeParse(decryptedContent);
|
||||
|
||||
if (!success) {
|
||||
continue; // TODO: maybe abort everything
|
||||
}
|
||||
|
||||
const { proofs } = token;
|
||||
|
||||
proofsToBeUsed.push(...proofs);
|
||||
eventsToBeDeleted.push(event);
|
||||
minimumRequiredBalance += organizedProofs[mint][key].balance;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!selectedMint) {
|
||||
return c.json({ error: 'You do not have mints in common with enough balance' }, 422);
|
||||
}
|
||||
|
||||
const mint = new CashuMint(selectedMint);
|
||||
const wallet = new CashuWallet(mint);
|
||||
await wallet.loadMint();
|
||||
|
||||
const { keep: proofsToKeep, send: proofsToSend } = await wallet.send(amount, proofsToBeUsed, {
|
||||
includeFees: true,
|
||||
pubkey: p2pk.length === 64 ? '02' + p2pk : p2pk,
|
||||
});
|
||||
|
||||
const historyTags: string[][] = [
|
||||
['direction', 'out'],
|
||||
['amount', String(proofsToSend.reduce((accumulator, current) => accumulator + current.amount, 0))],
|
||||
...eventsToBeDeleted.map((e) => ['e', e.id, conf.relay, 'destroyed']),
|
||||
];
|
||||
|
||||
if (proofsToKeep.length) {
|
||||
const newUnspentProof = await createEvent({
|
||||
kind: 7375,
|
||||
content: await user.signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify({
|
||||
mint: selectedMint,
|
||||
proofs: proofsToKeep,
|
||||
del: eventsToBeDeleted.map((e) => e.id),
|
||||
}),
|
||||
),
|
||||
}, c);
|
||||
|
||||
historyTags.push(['e', newUnspentProof.id, conf.relay, 'created']);
|
||||
}
|
||||
|
||||
await createEvent({
|
||||
kind: 7376,
|
||||
content: await user.signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify(historyTags),
|
||||
),
|
||||
}, c);
|
||||
|
||||
await createEvent({
|
||||
kind: 5,
|
||||
tags: eventsToBeDeleted.map((e) => ['e', e.id, conf.relay]),
|
||||
}, c);
|
||||
|
||||
const nutzapTags: string[][] = [
|
||||
...proofsToSend.map((proof) => ['proof', JSON.stringify(proof)]),
|
||||
['u', selectedMint],
|
||||
['p', account_id], // recipient of nutzap
|
||||
];
|
||||
if (status_id) {
|
||||
nutzapTags.push(['e', status_id, conf.relay]);
|
||||
}
|
||||
|
||||
// nutzap
|
||||
await createEvent({
|
||||
kind: 9321,
|
||||
content: comment ?? '',
|
||||
tags: nutzapTags,
|
||||
}, c);
|
||||
|
||||
return c.json({ message: 'Nutzap with success!!!' }, 200); // TODO: return wallet entity
|
||||
});
|
||||
|
||||
export default route;
|
||||
|
|
@ -1,272 +0,0 @@
|
|||
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AppController } from '@/app.ts';
|
||||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { getAuthor } from '@/queries.ts';
|
||||
import { addTag } from '@/utils/tags.ts';
|
||||
import { parseBody, updateAdminEvent } from '@/utils/api.ts';
|
||||
import { getInstanceMetadata } from '@/utils/instance.ts';
|
||||
import { deleteTag } from '@/utils/tags.ts';
|
||||
import { DittoZapSplits, getZapSplits } from '@/utils/zap-split.ts';
|
||||
import { screenshotsSchema } from '@/schemas/nostr.ts';
|
||||
import { percentageSchema } from '@/schema.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { accountFromPubkey } from '@/views/mastodon/accounts.ts';
|
||||
import { renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { updateListAdminEvent } from '@/utils/api.ts';
|
||||
|
||||
const markerSchema = z.enum(['read', 'write']);
|
||||
|
||||
/** WebSocket URL. */
|
||||
const wsUrlSchema = z.string().refine((val): val is `wss://${string}` | `ws://${string}` => {
|
||||
try {
|
||||
const { protocol } = new URL(val);
|
||||
return protocol === 'wss:' || protocol === 'ws:';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}, 'Invalid WebSocket URL');
|
||||
|
||||
const relaySchema = z.object({
|
||||
url: wsUrlSchema,
|
||||
marker: markerSchema.optional(),
|
||||
});
|
||||
|
||||
type RelayEntity = z.infer<typeof relaySchema>;
|
||||
|
||||
export const adminRelaysController: AppController = async (c) => {
|
||||
const { conf, relay } = c.var;
|
||||
|
||||
const [event] = await relay.query([
|
||||
{ kinds: [10002], authors: [await conf.signer.getPublicKey()], limit: 1 },
|
||||
]);
|
||||
|
||||
if (!event) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
return c.json(renderRelays(event));
|
||||
};
|
||||
|
||||
export const adminSetRelaysController: AppController = async (c) => {
|
||||
const { conf, relay } = c.var;
|
||||
const relays = relaySchema.array().parse(await c.req.json());
|
||||
|
||||
const event = await conf.signer.signEvent({
|
||||
kind: 10002,
|
||||
tags: relays.map(({ url, marker }) => marker ? ['r', url, marker] : ['r', url]),
|
||||
content: '',
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
});
|
||||
|
||||
await relay.event(event);
|
||||
|
||||
return c.json(renderRelays(event));
|
||||
};
|
||||
|
||||
/** Render Ditto API relays from a NIP-65 event. */
|
||||
function renderRelays(event: NostrEvent): RelayEntity[] {
|
||||
return event.tags.reduce((acc, [name, url, marker]) => {
|
||||
if (name === 'r') {
|
||||
const relay: RelayEntity = {
|
||||
url: url as `wss://${string}`,
|
||||
marker: markerSchema.safeParse(marker).success ? marker as 'read' | 'write' : undefined,
|
||||
};
|
||||
acc.push(relay);
|
||||
}
|
||||
return acc;
|
||||
}, [] as RelayEntity[]);
|
||||
}
|
||||
|
||||
const zapSplitSchema = z.record(
|
||||
n.id(),
|
||||
z.object({
|
||||
weight: z.number().int().min(1).max(100),
|
||||
message: z.string().max(500),
|
||||
}),
|
||||
);
|
||||
|
||||
export const updateZapSplitsController: AppController = async (c) => {
|
||||
const { conf } = c.var;
|
||||
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = zapSplitSchema.safeParse(body);
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: result.error }, 400);
|
||||
}
|
||||
|
||||
const adminPubkey = await conf.signer.getPublicKey();
|
||||
|
||||
const dittoZapSplit = await getZapSplits(adminPubkey, c.var);
|
||||
if (!dittoZapSplit) {
|
||||
return c.json({ error: 'Zap split not activated, restart the server.' }, 404);
|
||||
}
|
||||
|
||||
const { data } = result;
|
||||
const pubkeys = Object.keys(data);
|
||||
|
||||
if (pubkeys.length < 1) {
|
||||
return c.newResponse(null, { status: 204 });
|
||||
}
|
||||
|
||||
await updateListAdminEvent(
|
||||
{ kinds: [30078], authors: [adminPubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 },
|
||||
(tags) =>
|
||||
pubkeys.reduce((accumulator, pubkey) => {
|
||||
return addTag(accumulator, ['p', pubkey, data[pubkey].weight.toString(), data[pubkey].message]);
|
||||
}, tags),
|
||||
c,
|
||||
);
|
||||
|
||||
return c.newResponse(null, { status: 204 });
|
||||
};
|
||||
|
||||
const deleteZapSplitSchema = z.array(n.id()).min(1);
|
||||
|
||||
export const deleteZapSplitsController: AppController = async (c) => {
|
||||
const { conf } = c.var;
|
||||
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = deleteZapSplitSchema.safeParse(body);
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: result.error }, 400);
|
||||
}
|
||||
|
||||
const adminPubkey = await conf.signer.getPublicKey();
|
||||
|
||||
const dittoZapSplit = await getZapSplits(adminPubkey, c.var);
|
||||
if (!dittoZapSplit) {
|
||||
return c.json({ error: 'Zap split not activated, restart the server.' }, 404);
|
||||
}
|
||||
|
||||
const { data } = result;
|
||||
|
||||
await updateListAdminEvent(
|
||||
{ kinds: [30078], authors: [adminPubkey], '#d': ['pub.ditto.zapSplits'], limit: 1 },
|
||||
(tags) =>
|
||||
data.reduce((accumulator, currentValue) => {
|
||||
return deleteTag(accumulator, ['p', currentValue]);
|
||||
}, tags),
|
||||
c,
|
||||
);
|
||||
|
||||
return c.newResponse(null, { status: 204 });
|
||||
};
|
||||
|
||||
export const getZapSplitsController: AppController = async (c) => {
|
||||
const { conf } = c.var;
|
||||
|
||||
const dittoZapSplit: DittoZapSplits | undefined = await getZapSplits(await conf.signer.getPublicKey(), c.var) ?? {};
|
||||
if (!dittoZapSplit) {
|
||||
return c.json({ error: 'Zap split not activated, restart the server.' }, 404);
|
||||
}
|
||||
|
||||
const pubkeys = Object.keys(dittoZapSplit);
|
||||
|
||||
const zapSplits = await Promise.all(pubkeys.map(async (pubkey) => {
|
||||
const author = await getAuthor(pubkey, c.var);
|
||||
|
||||
const account = author ? renderAccount(author) : accountFromPubkey(pubkey);
|
||||
|
||||
return {
|
||||
account,
|
||||
weight: dittoZapSplit[pubkey].weight,
|
||||
message: dittoZapSplit[pubkey].message,
|
||||
};
|
||||
}));
|
||||
|
||||
return c.json(zapSplits, 200);
|
||||
};
|
||||
|
||||
export const statusZapSplitsController: AppController = async (c) => {
|
||||
const { relay, signal } = c.var;
|
||||
|
||||
const id = c.req.param('id');
|
||||
|
||||
const [event] = await relay.query([{ kinds: [1, 20], ids: [id], limit: 1 }], { signal });
|
||||
if (!event) {
|
||||
return c.json({ error: 'Event not found' }, 404);
|
||||
}
|
||||
|
||||
const zapsTag = event.tags.filter(([name]) => name === 'zap');
|
||||
|
||||
const pubkeys = zapsTag.map((name) => name[1]);
|
||||
|
||||
const users = await relay.query([{ authors: pubkeys, kinds: [0], limit: pubkeys.length }], { signal });
|
||||
await hydrateEvents({ ...c.var, events: users });
|
||||
|
||||
const zapSplits = (await Promise.all(pubkeys.map((pubkey) => {
|
||||
const author = (users.find((event) => event.pubkey === pubkey) as DittoEvent | undefined)?.author;
|
||||
const account = author ? renderAccount(author) : accountFromPubkey(pubkey);
|
||||
|
||||
const weight = percentageSchema.catch(0).parse(zapsTag.find((name) => name[1] === pubkey)![3]) ?? 0;
|
||||
|
||||
const message = zapsTag.find((name) => name[1] === pubkey)![4] ?? '';
|
||||
|
||||
return {
|
||||
account,
|
||||
message,
|
||||
weight,
|
||||
};
|
||||
}))).filter((zapSplit) => zapSplit.weight > 0);
|
||||
|
||||
return c.json(zapSplits, 200);
|
||||
};
|
||||
|
||||
const updateInstanceSchema = z.object({
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
short_description: z.string(),
|
||||
/** Mastodon doesn't have this field. */
|
||||
screenshots: screenshotsSchema,
|
||||
/** https://docs.joinmastodon.org/entities/Instance/#thumbnail-url */
|
||||
thumbnail: z.object({
|
||||
url: z.string().url(),
|
||||
}),
|
||||
});
|
||||
|
||||
export const updateInstanceController: AppController = async (c) => {
|
||||
const { conf } = c.var;
|
||||
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = updateInstanceSchema.safeParse(body);
|
||||
const pubkey = await conf.signer.getPublicKey();
|
||||
|
||||
if (!result.success) {
|
||||
return c.json(result.error, 422);
|
||||
}
|
||||
|
||||
const meta = await getInstanceMetadata(c.var);
|
||||
|
||||
await updateAdminEvent(
|
||||
{ kinds: [0], authors: [pubkey], limit: 1 },
|
||||
(_) => {
|
||||
const {
|
||||
title,
|
||||
description,
|
||||
short_description,
|
||||
screenshots,
|
||||
thumbnail,
|
||||
} = result.data;
|
||||
|
||||
meta.name = title;
|
||||
meta.about = description;
|
||||
meta.tagline = short_description;
|
||||
meta.screenshots = screenshots;
|
||||
meta.picture = thumbnail.url;
|
||||
delete meta.event;
|
||||
|
||||
return {
|
||||
kind: 0,
|
||||
content: JSON.stringify(meta),
|
||||
tags: [],
|
||||
};
|
||||
},
|
||||
c,
|
||||
);
|
||||
|
||||
return c.newResponse(null, { status: 204 });
|
||||
};
|
||||
|
|
@ -1,150 +0,0 @@
|
|||
import { paginated, paginatedList } from '@ditto/mastoapi/pagination';
|
||||
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AppContext, AppController } from '@/app.ts';
|
||||
import { booleanParamSchema } from '@/schema.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { extractIdentifier, lookupEvent, lookupPubkey } from '@/utils/lookup.ts';
|
||||
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { renderStatus } from '@/views/mastodon/statuses.ts';
|
||||
import { getFollowedPubkeys } from '@/queries.ts';
|
||||
import { getPubkeysBySearch } from '@/utils/search.ts';
|
||||
|
||||
const searchQuerySchema = z.object({
|
||||
q: z.string().transform(decodeURIComponent),
|
||||
type: z.enum(['accounts', 'statuses', 'hashtags']).optional(),
|
||||
resolve: booleanParamSchema.optional().transform(Boolean),
|
||||
following: z.boolean().default(false),
|
||||
account_id: n.id().optional(),
|
||||
offset: z.coerce.number().nonnegative().catch(0),
|
||||
});
|
||||
|
||||
type SearchQuery = z.infer<typeof searchQuerySchema> & { since?: number; until?: number; limit: number };
|
||||
|
||||
const searchController: AppController = async (c) => {
|
||||
const { relay, user, pagination, signal } = c.var;
|
||||
|
||||
const result = searchQuerySchema.safeParse(c.req.query());
|
||||
const viewerPubkey = await user?.signer.getPublicKey();
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Bad request', schema: result.error }, 422);
|
||||
}
|
||||
|
||||
if (!c.var.pool) {
|
||||
throw new Error('Ditto pool not available');
|
||||
}
|
||||
|
||||
const event = await lookupEvent(result.data.q, { ...c.var, pool: c.var.pool });
|
||||
const lookup = extractIdentifier(result.data.q);
|
||||
|
||||
// Render account from pubkey.
|
||||
if (!event && lookup) {
|
||||
const pubkey = await lookupPubkey(lookup, c.var);
|
||||
return c.json({
|
||||
accounts: pubkey ? [accountFromPubkey(pubkey)] : [],
|
||||
statuses: [],
|
||||
hashtags: [],
|
||||
});
|
||||
}
|
||||
|
||||
let events: NostrEvent[] = [];
|
||||
|
||||
if (event) {
|
||||
events = await hydrateEvents({ ...c.var, events: [event] });
|
||||
}
|
||||
|
||||
events.push(...(await searchEvents(c, { ...result.data, ...pagination, viewerPubkey }, signal)));
|
||||
|
||||
const [accounts, statuses] = await Promise.all([
|
||||
Promise.all(
|
||||
events
|
||||
.filter((event) => event.kind === 0)
|
||||
.map((event) => renderAccount(event))
|
||||
.filter(Boolean),
|
||||
),
|
||||
Promise.all(
|
||||
events
|
||||
.filter((event) => event.kind === 1)
|
||||
.map((event) => renderStatus(relay, event, { viewerPubkey }))
|
||||
.filter(Boolean),
|
||||
),
|
||||
]);
|
||||
|
||||
const body = {
|
||||
accounts,
|
||||
statuses,
|
||||
hashtags: [],
|
||||
};
|
||||
|
||||
if (result.data.type === 'accounts') {
|
||||
return paginatedList(c, { ...result.data, ...pagination }, body);
|
||||
} else {
|
||||
return paginated(c, events, body);
|
||||
}
|
||||
};
|
||||
|
||||
/** Get events for the search params. */
|
||||
async function searchEvents(
|
||||
c: AppContext,
|
||||
{ q, type, since, until, limit, offset, account_id, viewerPubkey }: SearchQuery & { viewerPubkey?: string },
|
||||
signal: AbortSignal,
|
||||
): Promise<NostrEvent[]> {
|
||||
const { relay, db } = c.var;
|
||||
|
||||
// Hashtag search is not supported.
|
||||
if (type === 'hashtags') {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
const filter: NostrFilter = {
|
||||
kinds: typeToKinds(type),
|
||||
search: q,
|
||||
since,
|
||||
until,
|
||||
limit,
|
||||
};
|
||||
|
||||
// For account search, use a special index, and prioritize followed accounts.
|
||||
if (type === 'accounts') {
|
||||
const following = viewerPubkey ? await getFollowedPubkeys(relay, viewerPubkey) : new Set<string>();
|
||||
const searchPubkeys = await getPubkeysBySearch(db.kysely, { q, limit, offset, following });
|
||||
|
||||
filter.authors = [...searchPubkeys];
|
||||
filter.search = undefined;
|
||||
}
|
||||
|
||||
// Results should only be shown from one author.
|
||||
if (account_id) {
|
||||
filter.authors = [account_id];
|
||||
}
|
||||
|
||||
// Query the events.
|
||||
let events = await relay
|
||||
.query([filter], { signal })
|
||||
.then((events) => hydrateEvents({ ...c.var, events }));
|
||||
|
||||
// When using an authors filter, return the events in the same order as the filter.
|
||||
if (filter.authors) {
|
||||
events = filter.authors
|
||||
.map((pubkey) => events.find((event) => event.pubkey === pubkey))
|
||||
.filter((event) => !!event);
|
||||
}
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
/** Get event kinds to search from `type` query param. */
|
||||
function typeToKinds(type: SearchQuery['type']): number[] {
|
||||
switch (type) {
|
||||
case 'accounts':
|
||||
return [0];
|
||||
case 'statuses':
|
||||
return [1];
|
||||
default:
|
||||
return [0, 1];
|
||||
}
|
||||
}
|
||||
|
||||
export { searchController };
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import { dbAvailableConnectionsGauge, dbPoolSizeGauge } from '@ditto/metrics';
|
||||
import { register } from 'prom-client';
|
||||
|
||||
import { AppController } from '@/app.ts';
|
||||
|
||||
/** Prometheus/OpenMetrics controller. */
|
||||
export const metricsController: AppController = async (c) => {
|
||||
const { db } = c.var;
|
||||
|
||||
// Update some metrics at request time.
|
||||
dbPoolSizeGauge.set(db.poolSize);
|
||||
dbAvailableConnectionsGauge.set(db.availableConnections);
|
||||
|
||||
// Serve the metrics.
|
||||
const metrics = await register.metrics();
|
||||
|
||||
const headers: HeadersInit = {
|
||||
'Content-Type': register.contentType,
|
||||
};
|
||||
|
||||
return c.text(metrics, 200, headers);
|
||||
};
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import { NostrJson } from '@nostrify/nostrify';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AppController } from '@/app.ts';
|
||||
import { localNip05Lookup } from '@/utils/nip05.ts';
|
||||
|
||||
const nameSchema = z.string().min(1).regex(/^[\w.-]+$/);
|
||||
|
||||
/**
|
||||
* Serves NIP-05's nostr.json.
|
||||
* https://github.com/nostr-protocol/nips/blob/master/05.md
|
||||
*/
|
||||
const nostrController: AppController = async (c) => {
|
||||
const { conf, relay, signal } = c.var;
|
||||
const nameParam = c.req.query('name');
|
||||
|
||||
// If no name parameter is provided, return all users
|
||||
if (!nameParam) {
|
||||
const adminPubkey = await conf.signer.getPublicKey();
|
||||
|
||||
// Query all NIP-05 grants (kind 30360 events)
|
||||
const grants = await relay.query(
|
||||
[{ kinds: [30360], authors: [adminPubkey] }],
|
||||
{ signal },
|
||||
);
|
||||
|
||||
const names: Record<string, string> = {};
|
||||
const relays: Record<string, string[]> = {};
|
||||
|
||||
for (const grant of grants) {
|
||||
// Extract the NIP-05 name from the 'd' tag
|
||||
const nip05 = grant.tags.find(([name]) => name === 'd')?.[1];
|
||||
// Extract the pubkey from the 'p' tag
|
||||
const pubkey = grant.tags.find(([name]) => name === 'p')?.[1];
|
||||
|
||||
if (nip05 && pubkey) {
|
||||
// Extract just the localpart (before @)
|
||||
const localpart = nip05.split('@')[0];
|
||||
if (localpart) {
|
||||
names[localpart] = pubkey;
|
||||
relays[pubkey] = [conf.relay];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cache for 6 hours.
|
||||
c.header('Cache-Control', 'max-age=21600, public, stale-while-revalidate=3600');
|
||||
|
||||
return c.json({ names, relays } satisfies NostrJson);
|
||||
}
|
||||
|
||||
// Original behavior: lookup a specific name
|
||||
const result = nameSchema.safeParse(nameParam);
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Invalid name parameter' }, { status: 422 });
|
||||
}
|
||||
|
||||
const name = result.data;
|
||||
const pointer = await localNip05Lookup(name, c.var);
|
||||
|
||||
if (!pointer) {
|
||||
return c.json({ names: {}, relays: {} } satisfies NostrJson, { status: 404 });
|
||||
}
|
||||
|
||||
const { pubkey, relays = [] } = pointer;
|
||||
|
||||
// It's found, so cache for 6 hours.
|
||||
c.header('Cache-Control', 'max-age=21600, public, stale-while-revalidate=3600');
|
||||
|
||||
return c.json(
|
||||
{
|
||||
names: {
|
||||
[name]: pubkey,
|
||||
},
|
||||
relays: {
|
||||
[pubkey]: relays,
|
||||
},
|
||||
} satisfies NostrJson,
|
||||
);
|
||||
};
|
||||
|
||||
export { nostrController };
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
import { sql } from 'kysely';
|
||||
|
||||
import {
|
||||
type TrendsCtx,
|
||||
updateTrendingEvents,
|
||||
updateTrendingHashtags,
|
||||
updateTrendingLinks,
|
||||
updateTrendingPubkeys,
|
||||
updateTrendingZappedEvents,
|
||||
} from '@/trends.ts';
|
||||
|
||||
/** Start cron jobs for the application. */
|
||||
export function cron(ctx: TrendsCtx) {
|
||||
Deno.cron('update trending pubkeys', '0 * * * *', () => updateTrendingPubkeys(ctx));
|
||||
Deno.cron('update trending zapped events', '7 * * * *', () => updateTrendingZappedEvents(ctx));
|
||||
Deno.cron('update trending events', '15 * * * *', () => updateTrendingEvents(ctx));
|
||||
Deno.cron('update trending hashtags', '30 * * * *', () => updateTrendingHashtags(ctx));
|
||||
Deno.cron('update trending links', '45 * * * *', () => updateTrendingLinks(ctx));
|
||||
|
||||
Deno.cron('refresh top authors', '20 * * * *', async () => {
|
||||
const { kysely } = ctx.db;
|
||||
await sql`refresh materialized view top_authors`.execute(kysely);
|
||||
});
|
||||
}
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"name": "@ditto/ditto",
|
||||
"version": "1.1.0",
|
||||
"exports": {},
|
||||
"imports": {
|
||||
"@/": "./",
|
||||
"deno.json": "../../deno.json"
|
||||
},
|
||||
"lint": {
|
||||
"rules": {
|
||||
"exclude": ["verbatim-module-syntax"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
import { firehoseEventsCounter } from '@ditto/metrics';
|
||||
import { Semaphore } from '@core/asyncutil';
|
||||
import { NRelay, NStore } from '@nostrify/nostrify';
|
||||
import { logi } from '@soapbox/logi';
|
||||
|
||||
import { nostrNow } from '@/utils.ts';
|
||||
|
||||
interface FirehoseOpts {
|
||||
pool: NRelay;
|
||||
relay: NStore;
|
||||
concurrency: number;
|
||||
kinds: number[];
|
||||
timeout?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function watches events on all known relays and performs
|
||||
* side-effects based on them, such as trending hashtag tracking
|
||||
* and storing events for notifications and the home feed.
|
||||
*/
|
||||
export async function startFirehose(opts: FirehoseOpts): Promise<void> {
|
||||
const { pool, relay, kinds, concurrency, timeout = 5000 } = opts;
|
||||
|
||||
const sem = new Semaphore(concurrency);
|
||||
|
||||
for await (const msg of pool.req([{ kinds, limit: 0, since: nostrNow() }])) {
|
||||
if (msg[0] === 'EVENT') {
|
||||
const event = msg[2];
|
||||
|
||||
logi({ level: 'debug', ns: 'ditto.event', source: 'firehose', id: event.id, kind: event.kind });
|
||||
firehoseEventsCounter.inc({ kind: event.kind });
|
||||
|
||||
sem.lock(async () => {
|
||||
try {
|
||||
await relay.event(event, { signal: AbortSignal.timeout(timeout) });
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,94 +0,0 @@
|
|||
import { CashuMint, CashuWallet, getEncodedToken } from '@cashu/cashu-ts';
|
||||
import { getLastRedeemedNutzap, getMintsToProofs, validateAndParseWallet } from '@ditto/cashu';
|
||||
import { HTTPException } from '@hono/hono/http-exception';
|
||||
import { NostrFilter } from '@nostrify/nostrify';
|
||||
import { logi } from '@soapbox/logi';
|
||||
|
||||
import { errorJson } from '@/utils/log.ts';
|
||||
import { createEvent } from '@/utils/api.ts';
|
||||
import { MiddlewareHandler } from '@hono/hono/types';
|
||||
|
||||
/**
|
||||
* Swap nutzaps into wallet (create new events) if the user has a wallet, otheriwse, just fallthrough.
|
||||
* Errors are only thrown if 'signer' and 'store' middlewares are not set.
|
||||
*/
|
||||
export const swapNutzapsMiddleware: MiddlewareHandler = async (c, next) => {
|
||||
const { conf, relay, user, signal } = c.var;
|
||||
|
||||
if (!user) {
|
||||
throw new HTTPException(401, { message: 'No pubkey provided' });
|
||||
}
|
||||
|
||||
if (!user.signer.nip44) {
|
||||
throw new HTTPException(401, { message: 'No NIP-44 signer provided' });
|
||||
}
|
||||
|
||||
if (!relay) {
|
||||
throw new HTTPException(401, { message: 'No store provided' });
|
||||
}
|
||||
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
|
||||
const { data, error } = await validateAndParseWallet(relay, user.signer, pubkey, { signal });
|
||||
|
||||
if (error && error.code === 'wallet-not-found') {
|
||||
await next();
|
||||
return;
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return c.json({ error: error.message }, 400);
|
||||
}
|
||||
|
||||
const { mints, privkey } = data;
|
||||
|
||||
const nutzapsFilter: NostrFilter = { kinds: [9321], '#p': [pubkey], '#u': mints };
|
||||
|
||||
const lastRedeemedNutzap = await getLastRedeemedNutzap(relay, pubkey, { signal });
|
||||
if (lastRedeemedNutzap) {
|
||||
nutzapsFilter.since = lastRedeemedNutzap.created_at;
|
||||
}
|
||||
|
||||
const mintsToProofs = await getMintsToProofs(relay, nutzapsFilter, conf.relay, { signal });
|
||||
|
||||
for (const mint of Object.keys(mintsToProofs)) {
|
||||
try {
|
||||
const token = getEncodedToken({ mint, proofs: mintsToProofs[mint].proofs });
|
||||
|
||||
const cashuWallet = new CashuWallet(new CashuMint(mint));
|
||||
const receiveProofs = await cashuWallet.receive(token, { privkey });
|
||||
|
||||
const unspentProofs = await createEvent({
|
||||
kind: 7375,
|
||||
content: await user.signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify({
|
||||
mint,
|
||||
proofs: receiveProofs,
|
||||
}),
|
||||
),
|
||||
}, c);
|
||||
|
||||
const amount = receiveProofs.reduce((accumulator, current) => {
|
||||
return accumulator + current.amount;
|
||||
}, 0);
|
||||
|
||||
await createEvent({
|
||||
kind: 7376,
|
||||
content: await user.signer.nip44.encrypt(
|
||||
pubkey,
|
||||
JSON.stringify([
|
||||
['direction', 'in'],
|
||||
['amount', String(amount)],
|
||||
['e', unspentProofs.id, conf.relay, 'created'],
|
||||
]),
|
||||
),
|
||||
tags: mintsToProofs[mint].toBeRedeemed,
|
||||
}, c);
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.api.cashu.wallet.swap', error: errorJson(e) });
|
||||
}
|
||||
}
|
||||
|
||||
await next();
|
||||
};
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
import { DenoUploader, IPFSUploader, S3Uploader } from '@ditto/uploaders';
|
||||
import { BlossomUploader, NostrBuildUploader } from '@nostrify/nostrify/uploaders';
|
||||
import { safeFetch } from '@soapbox/safe-fetch';
|
||||
|
||||
import { AppMiddleware } from '@/app.ts';
|
||||
|
||||
/** Set an uploader for the user. */
|
||||
export const uploaderMiddleware: AppMiddleware = async (c, next) => {
|
||||
const { user, conf } = c.var;
|
||||
const signer = user?.signer;
|
||||
|
||||
switch (conf.uploader) {
|
||||
case 's3':
|
||||
c.set(
|
||||
'uploader',
|
||||
new S3Uploader({
|
||||
accessKey: conf.s3.accessKey,
|
||||
bucket: conf.s3.bucket,
|
||||
endPoint: conf.s3.endPoint!,
|
||||
pathStyle: conf.s3.pathStyle,
|
||||
port: conf.s3.port,
|
||||
region: conf.s3.region!,
|
||||
secretKey: conf.s3.secretKey,
|
||||
sessionToken: conf.s3.sessionToken,
|
||||
useSSL: conf.s3.useSSL,
|
||||
baseUrl: conf.mediaDomain,
|
||||
}),
|
||||
);
|
||||
break;
|
||||
case 'ipfs':
|
||||
c.set('uploader', new IPFSUploader({ baseUrl: conf.mediaDomain, apiUrl: conf.ipfs.apiUrl, fetch: safeFetch }));
|
||||
break;
|
||||
case 'local':
|
||||
c.set('uploader', new DenoUploader({ baseUrl: conf.mediaDomain, dir: conf.uploadsDir }));
|
||||
break;
|
||||
case 'nostrbuild':
|
||||
c.set('uploader', new NostrBuildUploader({ endpoint: conf.nostrbuildEndpoint, signer, fetch: safeFetch }));
|
||||
break;
|
||||
case 'blossom':
|
||||
if (signer) {
|
||||
c.set('uploader', new BlossomUploader({ servers: conf.blossomServers, signer, fetch: safeFetch }));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
await next();
|
||||
};
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
import { DittoDB } from '@ditto/db';
|
||||
import { DittoConf } from '@ditto/conf';
|
||||
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
|
||||
|
||||
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { findReplyTag, getTagSet } from '@/utils/tags.ts';
|
||||
|
||||
interface GetEventOpts {
|
||||
db: DittoDB;
|
||||
conf: DittoConf;
|
||||
relay: NStore;
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Nostr event by its ID.
|
||||
* @deprecated Use `relay.query` directly.
|
||||
*/
|
||||
async function getEvent(id: string, opts: GetEventOpts): Promise<DittoEvent | undefined> {
|
||||
const filter: NostrFilter = { ids: [id], limit: 1 };
|
||||
const events = await opts.relay.query([filter], opts);
|
||||
const [event] = await hydrateEvents({ ...opts, events });
|
||||
return event;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Nostr `set_medatadata` event for a user's pubkey.
|
||||
* @deprecated Use `relay.query` directly.
|
||||
*/
|
||||
async function getAuthor(pubkey: string, opts: GetEventOpts): Promise<NostrEvent | undefined> {
|
||||
const events = await opts.relay.query([{ authors: [pubkey], kinds: [0], limit: 1 }], opts);
|
||||
const [event] = await hydrateEvents({ ...opts, events });
|
||||
return event;
|
||||
}
|
||||
|
||||
/** Get users the given pubkey follows. */
|
||||
const getFollows = async (relay: NStore, pubkey: string, signal?: AbortSignal): Promise<NostrEvent | undefined> => {
|
||||
const [event] = await relay.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { signal });
|
||||
return event;
|
||||
};
|
||||
|
||||
/** Get pubkeys the user follows. */
|
||||
async function getFollowedPubkeys(relay: NStore, pubkey: string, signal?: AbortSignal): Promise<Set<string>> {
|
||||
const event = await getFollows(relay, pubkey, signal);
|
||||
if (!event) return new Set();
|
||||
return getTagSet(event.tags, 'p');
|
||||
}
|
||||
|
||||
/** Get pubkeys the user follows, including the user's own pubkey. */
|
||||
async function getFeedPubkeys(relay: NStore, pubkey: string): Promise<Set<string>> {
|
||||
const authors = await getFollowedPubkeys(relay, pubkey);
|
||||
return authors.add(pubkey);
|
||||
}
|
||||
|
||||
async function getAncestors(store: NStore, event: NostrEvent, result: NostrEvent[] = []): Promise<NostrEvent[]> {
|
||||
if (result.length < 100) {
|
||||
const replyTag = findReplyTag(event.tags);
|
||||
const inReplyTo = replyTag ? replyTag[1] : undefined;
|
||||
|
||||
if (inReplyTo) {
|
||||
const [parentEvent] = await store.query([{ ids: [inReplyTo], until: event.created_at, limit: 1 }]);
|
||||
|
||||
if (parentEvent) {
|
||||
result.push(parentEvent);
|
||||
return getAncestors(store, parentEvent, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result.reverse();
|
||||
}
|
||||
|
||||
async function getDescendants(
|
||||
store: NStore,
|
||||
event: NostrEvent,
|
||||
signal?: AbortSignal,
|
||||
): Promise<NostrEvent[]> {
|
||||
return await store
|
||||
.query([{ kinds: [1], '#e': [event.id], since: event.created_at, limit: 200 }], { signal })
|
||||
.then((events) => events.filter(({ tags }) => findReplyTag(tags)?.[1] === event.id));
|
||||
}
|
||||
|
||||
export { getAncestors, getAuthor, getDescendants, getEvent, getFeedPubkeys, getFollowedPubkeys, getFollows };
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { NSecSigner } from '@nostrify/nostrify';
|
||||
import { genEvent } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { generateSecretKey } from 'nostr-tools';
|
||||
|
||||
import route from './customEmojisRoute.ts';
|
||||
|
||||
Deno.test('customEmojisRoute', async (t) => {
|
||||
await using test = new TestApp(route);
|
||||
const { relay } = test.var;
|
||||
|
||||
await t.step('unauth', async () => {
|
||||
const response = await test.api.get('/');
|
||||
const body = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(body, []);
|
||||
});
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const user = test.user({ relay, signer: new NSecSigner(sk) });
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
|
||||
await t.step('no emojis', async () => {
|
||||
const response = await test.api.get('/');
|
||||
const body = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(body, []);
|
||||
});
|
||||
|
||||
await t.step('with emoji packs', async () => {
|
||||
const pack = genEvent({
|
||||
kind: 30030,
|
||||
tags: [
|
||||
['d', 'soapbox'],
|
||||
['emoji', 'soapbox', 'https://soapbox.pub/favicon.ico'],
|
||||
['emoji', 'ditto', 'https://ditto.pub/favicon.ico'],
|
||||
],
|
||||
}, sk);
|
||||
|
||||
const list = genEvent({
|
||||
kind: 10030,
|
||||
tags: [
|
||||
['a', `30030:${pubkey}:soapbox`],
|
||||
['emoji', 'gleasonator', 'https://gleasonator.dev/favicon.ico'],
|
||||
],
|
||||
}, sk);
|
||||
|
||||
await relay.event(pack);
|
||||
await relay.event(list);
|
||||
|
||||
const response = await test.api.get('/');
|
||||
const body = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(body, [{
|
||||
shortcode: 'gleasonator',
|
||||
url: 'https://gleasonator.dev/favicon.ico',
|
||||
static_url: 'https://gleasonator.dev/favicon.ico',
|
||||
visible_in_picker: true,
|
||||
}, {
|
||||
shortcode: 'soapbox',
|
||||
url: 'https://soapbox.pub/favicon.ico',
|
||||
static_url: 'https://soapbox.pub/favicon.ico',
|
||||
visible_in_picker: true,
|
||||
category: 'soapbox',
|
||||
}, {
|
||||
shortcode: 'ditto',
|
||||
url: 'https://ditto.pub/favicon.ico',
|
||||
static_url: 'https://ditto.pub/favicon.ico',
|
||||
visible_in_picker: true,
|
||||
category: 'soapbox',
|
||||
}]);
|
||||
});
|
||||
});
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
import { userMiddleware } from '@ditto/mastoapi/middleware';
|
||||
import { DittoRoute } from '@ditto/mastoapi/router';
|
||||
|
||||
import { getCustomEmojis } from '@/utils/custom-emoji.ts';
|
||||
|
||||
const route = new DittoRoute();
|
||||
|
||||
interface MastodonCustomEmoji {
|
||||
shortcode: string;
|
||||
url: string;
|
||||
static_url: string;
|
||||
visible_in_picker: boolean;
|
||||
category?: string;
|
||||
}
|
||||
|
||||
route.get('/', userMiddleware({ required: false }), async (c) => {
|
||||
const { user } = c.var;
|
||||
|
||||
if (!user) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
const emojis = await getCustomEmojis(pubkey, c.var);
|
||||
|
||||
return c.json([...emojis.entries()].map(([shortcode, data]): MastodonCustomEmoji => {
|
||||
return {
|
||||
shortcode,
|
||||
url: data.url.toString(),
|
||||
static_url: data.url.toString(),
|
||||
visible_in_picker: true,
|
||||
category: data.category,
|
||||
};
|
||||
}));
|
||||
});
|
||||
|
||||
export default route;
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import route from './dittoNamesRoute.ts';
|
||||
|
||||
Deno.test('POST / creates a name request event', async () => {
|
||||
await using app = new TestApp(route);
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
const user = app.user();
|
||||
|
||||
const response = await app.api.post('/', { name: 'Alex@Ditto.pub', reason: 'for testing' });
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
|
||||
const [event] = await relay.query([{ kinds: [3036], authors: [await user.signer.getPublicKey()] }]);
|
||||
|
||||
assertEquals(event?.tags, [
|
||||
['r', 'Alex@Ditto.pub'],
|
||||
['r', 'alex@ditto.pub'],
|
||||
['L', 'nip05.domain'],
|
||||
['l', 'ditto.pub', 'nip05.domain'],
|
||||
['p', await conf.signer.getPublicKey()],
|
||||
]);
|
||||
|
||||
assertEquals(event?.content, 'for testing');
|
||||
});
|
||||
|
||||
Deno.test('POST / can be called multiple times with the same name', async () => {
|
||||
await using app = new TestApp(route);
|
||||
|
||||
app.user();
|
||||
|
||||
const response1 = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
const response2 = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
|
||||
assertEquals(response1.status, 200);
|
||||
assertEquals(response2.status, 200);
|
||||
});
|
||||
|
||||
Deno.test('POST / returns 400 if the name has already been granted', async () => {
|
||||
await using app = new TestApp(route);
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
app.user();
|
||||
|
||||
const grant = await conf.signer.signEvent({
|
||||
kind: 30360,
|
||||
tags: [['d', 'alex@ditto.pub']],
|
||||
content: '',
|
||||
created_at: 0,
|
||||
});
|
||||
|
||||
await relay.event(grant);
|
||||
|
||||
const response = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
|
||||
assertEquals(response.status, 400);
|
||||
});
|
||||
|
|
@ -1,130 +0,0 @@
|
|||
import { paginationMiddleware, userMiddleware } from '@ditto/mastoapi/middleware';
|
||||
import { DittoRoute } from '@ditto/mastoapi/router';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createEvent } from '@/utils/api.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { renderNameRequest } from '@/views/ditto.ts';
|
||||
import { booleanParamSchema } from '@/schema.ts';
|
||||
import { NostrFilter } from '@nostrify/nostrify';
|
||||
|
||||
const nameRequestSchema = z.object({
|
||||
name: z.string().email(),
|
||||
reason: z.string().max(500).optional(),
|
||||
});
|
||||
|
||||
const route = new DittoRoute();
|
||||
|
||||
route.post('/', userMiddleware(), async (c) => {
|
||||
const { conf, relay, user } = c.var;
|
||||
|
||||
const result = nameRequestSchema.safeParse(await c.req.json());
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Invalid username', schema: result.error }, 422);
|
||||
}
|
||||
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
const adminPubkey = await conf.signer.getPublicKey();
|
||||
|
||||
const { name, reason } = result.data;
|
||||
const [_localpart, domain] = name.split('@');
|
||||
|
||||
if (domain.toLowerCase() !== conf.url.host.toLowerCase()) {
|
||||
return c.json({ error: 'Unsupported domain' }, 422);
|
||||
}
|
||||
|
||||
const d = name.toLowerCase();
|
||||
|
||||
const [grant] = await relay.query([{ kinds: [30360], authors: [adminPubkey], '#d': [d] }]);
|
||||
if (grant) {
|
||||
return c.json({ error: 'Name has already been granted' }, 400);
|
||||
}
|
||||
|
||||
const [pending] = await relay.query([{
|
||||
kinds: [30383],
|
||||
authors: [adminPubkey],
|
||||
'#p': [pubkey],
|
||||
'#k': ['3036'],
|
||||
'#r': [d],
|
||||
'#n': ['pending'],
|
||||
limit: 1,
|
||||
}]);
|
||||
if (pending) {
|
||||
return c.json({ error: 'You have already requested that name, and it is pending approval by staff' }, 400);
|
||||
}
|
||||
|
||||
const tags: string[][] = [['r', name]];
|
||||
|
||||
if (name !== name.toLowerCase()) {
|
||||
tags.push(['r', name.toLowerCase()]);
|
||||
}
|
||||
|
||||
const event = await createEvent({
|
||||
kind: 3036,
|
||||
content: reason,
|
||||
tags: [
|
||||
...tags,
|
||||
['L', 'nip05.domain'],
|
||||
['l', domain.toLowerCase(), 'nip05.domain'],
|
||||
['p', await conf.signer.getPublicKey()],
|
||||
],
|
||||
}, c);
|
||||
|
||||
await hydrateEvents({ ...c.var, events: [event] });
|
||||
|
||||
const nameRequest = await renderNameRequest(event);
|
||||
return c.json(nameRequest);
|
||||
});
|
||||
|
||||
const nameRequestsSchema = z.object({
|
||||
approved: booleanParamSchema.optional(),
|
||||
rejected: booleanParamSchema.optional(),
|
||||
});
|
||||
|
||||
route.get('/', paginationMiddleware(), userMiddleware(), async (c) => {
|
||||
const { conf, relay, user, pagination } = c.var;
|
||||
const pubkey = await user!.signer.getPublicKey();
|
||||
|
||||
const { approved, rejected } = nameRequestsSchema.parse(c.req.query());
|
||||
|
||||
const filter: NostrFilter = {
|
||||
kinds: [30383],
|
||||
authors: [await conf.signer.getPublicKey()],
|
||||
'#k': ['3036'],
|
||||
'#p': [pubkey],
|
||||
...pagination,
|
||||
};
|
||||
|
||||
if (approved) {
|
||||
filter['#n'] = ['approved'];
|
||||
}
|
||||
if (rejected) {
|
||||
filter['#n'] = ['rejected'];
|
||||
}
|
||||
|
||||
const orig = await relay.query([filter]);
|
||||
const ids = new Set<string>();
|
||||
|
||||
for (const event of orig) {
|
||||
const d = event.tags.find(([name]) => name === 'd')?.[1];
|
||||
if (d) {
|
||||
ids.add(d);
|
||||
}
|
||||
}
|
||||
|
||||
if (!ids.size) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
const events = await relay.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }])
|
||||
.then((events) => hydrateEvents({ ...c.var, events }));
|
||||
|
||||
const nameRequests = await Promise.all(
|
||||
events.map((event) => renderNameRequest(event)),
|
||||
);
|
||||
|
||||
return c.var.paginate(orig, nameRequests);
|
||||
});
|
||||
|
||||
export default route;
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
|
||||
import route from './pleromaAdminPermissionGroupsRoute.ts';
|
||||
|
||||
Deno.test('POST /admin returns 403 if user is not an admin', async () => {
|
||||
await using app = new TestApp(route);
|
||||
|
||||
app.user();
|
||||
|
||||
const response = await app.api.post('/admin', { nicknames: ['alex@ditto.pub'] });
|
||||
|
||||
assertEquals(response.status, 403);
|
||||
});
|
||||
|
||||
Deno.test('POST /admin promotes to admin', async () => {
|
||||
await using app = new TestApp(route);
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
await app.admin();
|
||||
|
||||
const pawn = app.createUser();
|
||||
const pubkey = await pawn.signer.getPublicKey();
|
||||
|
||||
const response = await app.api.post('/admin', { nicknames: [nip19.npubEncode(pubkey)] });
|
||||
const json = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(json, { is_admin: true });
|
||||
|
||||
const [event] = await relay.query([{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey] }]);
|
||||
|
||||
assertEquals(event.tags, [['d', pubkey], ['n', 'admin']]);
|
||||
});
|
||||
|
||||
Deno.test('POST /moderator promotes to moderator', async () => {
|
||||
await using app = new TestApp(route);
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
await app.admin();
|
||||
|
||||
const pawn = app.createUser();
|
||||
const pubkey = await pawn.signer.getPublicKey();
|
||||
|
||||
const response = await app.api.post('/moderator', { nicknames: [nip19.npubEncode(pubkey)] });
|
||||
const json = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(json, { is_moderator: true });
|
||||
|
||||
const [event] = await relay.query([{ kinds: [30382], authors: [await conf.signer.getPublicKey()], '#d': [pubkey] }]);
|
||||
|
||||
assertEquals(event.tags, [['d', pubkey], ['n', 'moderator']]);
|
||||
});
|
||||
|
||||
Deno.test('POST /:group with an invalid group returns 422', async () => {
|
||||
await using app = new TestApp(route);
|
||||
|
||||
await app.admin();
|
||||
|
||||
const pawn = app.createUser();
|
||||
const pubkey = await pawn.signer.getPublicKey();
|
||||
|
||||
const response = await app.api.post('/yolo', { nicknames: [nip19.npubEncode(pubkey)] });
|
||||
|
||||
assertEquals(response.status, 422);
|
||||
});
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
import { userMiddleware } from '@ditto/mastoapi/middleware';
|
||||
import { DittoRoute } from '@ditto/mastoapi/router';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { parseBody, updateUser } from '@/utils/api.ts';
|
||||
import { lookupPubkey } from '@/utils/lookup.ts';
|
||||
|
||||
const route = new DittoRoute();
|
||||
|
||||
const pleromaPromoteAdminSchema = z.object({
|
||||
nicknames: z.string().array(),
|
||||
});
|
||||
|
||||
route.post('/:group', userMiddleware({ role: 'admin' }), async (c) => {
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = pleromaPromoteAdminSchema.safeParse(body);
|
||||
const group = c.req.param('group');
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Bad request', schema: result.error }, 422);
|
||||
}
|
||||
|
||||
if (!['admin', 'moderator'].includes(group)) {
|
||||
return c.json({ error: 'Bad request', schema: 'Invalid group' }, 422);
|
||||
}
|
||||
|
||||
const { data } = result;
|
||||
const { nicknames } = data;
|
||||
|
||||
for (const nickname of nicknames) {
|
||||
const pubkey = await lookupPubkey(nickname, c.var);
|
||||
if (pubkey) {
|
||||
await updateUser(pubkey, { [group]: true }, c);
|
||||
}
|
||||
}
|
||||
|
||||
return c.json({ [`is_${group}`]: true }, 200);
|
||||
});
|
||||
|
||||
export default route;
|
||||
|
|
@ -1,105 +0,0 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { DittoPolyPg } from '@ditto/db';
|
||||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { genEvent, MockRelay } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { DittoPgStore } from '@/storages/DittoPgStore.ts';
|
||||
import { DittoRelayStore } from '@/storages/DittoRelayStore.ts';
|
||||
|
||||
import route from './pleromaStatusesRoute.ts';
|
||||
|
||||
import type { MastodonStatus } from '@ditto/mastoapi/types';
|
||||
|
||||
Deno.test('Emoji reactions', async (t) => {
|
||||
await using test = createTestApp();
|
||||
const { relay } = test.var;
|
||||
|
||||
const mario = test.createUser();
|
||||
const luigi = test.createUser();
|
||||
|
||||
const note = genEvent({ kind: 1 });
|
||||
await relay.event(note);
|
||||
|
||||
await relay.event(genEvent({ kind: 10030, tags: [['emoji', 'ditto', 'https://ditto.pub/favicon.ico']] }, luigi.sk));
|
||||
|
||||
await t.step('PUT /:id/reactions/:emoji', async () => {
|
||||
test.user(mario);
|
||||
|
||||
const response = await test.api.put(`/${note.id}/reactions/🚀`);
|
||||
const json = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(json.pleroma.emoji_reactions, [{ name: '🚀', me: true, count: 1 }]);
|
||||
});
|
||||
|
||||
await t.step('PUT /:id/reactions/:emoji (custom emoji)', async () => {
|
||||
test.user(luigi);
|
||||
|
||||
const response = await test.api.put(`/${note.id}/reactions/:ditto:`);
|
||||
const json: MastodonStatus = await response.json();
|
||||
|
||||
assertEquals(
|
||||
json.pleroma.emoji_reactions.sort((a, b) => a.name.localeCompare(b.name)),
|
||||
[
|
||||
{ name: '🚀', me: false, count: 1 },
|
||||
{ name: 'ditto', me: true, count: 1, url: 'https://ditto.pub/favicon.ico' },
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
await t.step('GET /:id/reactions', async () => {
|
||||
test.user(mario);
|
||||
|
||||
const response = await test.api.get(`/${note.id}/reactions`);
|
||||
const json = await response.json();
|
||||
|
||||
(json as MastodonStatus['pleroma']['emoji_reactions']).sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
const [
|
||||
{ accounts: [marioAccount] },
|
||||
{ accounts: [luigiAccount] },
|
||||
] = json;
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
|
||||
assertEquals(json, [
|
||||
{ name: '🚀', me: true, count: 1, accounts: [marioAccount] },
|
||||
{ name: 'ditto', me: false, count: 1, accounts: [luigiAccount], url: 'https://ditto.pub/favicon.ico' },
|
||||
]);
|
||||
});
|
||||
|
||||
await t.step('DELETE /:id/reactions/:emoji', async () => {
|
||||
test.user(mario);
|
||||
|
||||
const response = await test.api.delete(`/${note.id}/reactions/🚀`);
|
||||
const json = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
|
||||
assertEquals(json.pleroma.emoji_reactions, [
|
||||
{ name: 'ditto', me: false, count: 1, url: 'https://ditto.pub/favicon.ico' },
|
||||
]);
|
||||
});
|
||||
|
||||
await t.step('DELETE /:id/reactions/:emoji (custom emoji)', async () => {
|
||||
test.user(luigi);
|
||||
|
||||
const response = await test.api.delete(`/${note.id}/reactions/:ditto:`);
|
||||
const json = await response.json();
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
assertEquals(json.pleroma.emoji_reactions, []);
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: modify `TestApp` itself to avoid this boilerplate.
|
||||
function createTestApp(): TestApp {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const db = new DittoPolyPg(conf.databaseUrl);
|
||||
const pool = new MockRelay();
|
||||
const store = new DittoPgStore({ conf, db, notify: false });
|
||||
const relay = new DittoRelayStore({ conf, db, pool, relay: store });
|
||||
|
||||
return new TestApp(route, { conf, db, relay });
|
||||
}
|
||||
|
|
@ -1,221 +0,0 @@
|
|||
import { paginationMiddleware, userMiddleware } from '@ditto/mastoapi/middleware';
|
||||
import { DittoRoute } from '@ditto/mastoapi/router';
|
||||
|
||||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { createEvent } from '@/utils/api.ts';
|
||||
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { renderStatus } from '@/views/mastodon/statuses.ts';
|
||||
import { HTTPException } from '@hono/hono/http-exception';
|
||||
|
||||
import { getCustomEmojis, parseEmojiInput } from '@/utils/custom-emoji.ts';
|
||||
|
||||
const route = new DittoRoute();
|
||||
|
||||
/*
|
||||
* React to a status.
|
||||
* https://docs.pleroma.social/backend/development/API/pleroma_api/#put-apiv1pleromastatusesidreactionsemoji
|
||||
*/
|
||||
route.put('/:id{[0-9a-f]{64}}/reactions/:emoji', userMiddleware(), async (c) => {
|
||||
const { relay, user, conf, signal } = c.var;
|
||||
|
||||
const params = c.req.param();
|
||||
const result = parseEmojiParam(params.emoji);
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
|
||||
const [event] = await relay.query([{ ids: [params.id] }], { signal });
|
||||
if (!event) {
|
||||
return c.json({ error: 'Event not found' }, 404);
|
||||
}
|
||||
|
||||
const tags: string[][] = [
|
||||
['e', event.id, conf.relay, event.pubkey],
|
||||
['p', event.pubkey, conf.relay],
|
||||
];
|
||||
|
||||
if (result.type === 'custom') {
|
||||
const emojis = await getCustomEmojis(pubkey, c.var);
|
||||
const emoji = emojis.get(result.shortcode);
|
||||
|
||||
if (!emoji) {
|
||||
return c.json({ error: 'Custom emoji not found' }, 404);
|
||||
}
|
||||
|
||||
tags.push(['emoji', result.shortcode, emoji.url.href]);
|
||||
}
|
||||
|
||||
let content: string;
|
||||
|
||||
switch (result.type) {
|
||||
case 'native':
|
||||
content = result.native;
|
||||
break;
|
||||
case 'custom':
|
||||
content = `:${result.shortcode}:`;
|
||||
break;
|
||||
}
|
||||
|
||||
await createEvent({ kind: 7, content, tags }, c);
|
||||
await hydrateEvents({ ...c.var, events: [event] });
|
||||
|
||||
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
|
||||
return c.json(status);
|
||||
});
|
||||
|
||||
/*
|
||||
* Delete reactions to a status.
|
||||
* https://docs.pleroma.social/backend/development/API/pleroma_api/#delete-apiv1pleromastatusesidreactionsemoji
|
||||
*/
|
||||
route.delete('/:id{[0-9a-f]{64}}/reactions/:emoji', userMiddleware(), async (c) => {
|
||||
const { relay, user, signal } = c.var;
|
||||
|
||||
const params = c.req.param();
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
|
||||
const [event] = await relay.query([{ ids: [params.id] }], { signal });
|
||||
|
||||
if (!event) {
|
||||
return c.json({ error: 'Status not found' }, 404);
|
||||
}
|
||||
|
||||
const events = await relay.query([
|
||||
{ kinds: [7], authors: [pubkey], '#e': [params.id] },
|
||||
], { signal });
|
||||
|
||||
const e = new Set<string>();
|
||||
|
||||
for (const { id, content } of events) {
|
||||
if (content === params.emoji || content === `:${params.emoji}:`) {
|
||||
e.add(id);
|
||||
}
|
||||
}
|
||||
|
||||
if (!e.size) {
|
||||
return c.json({ error: 'Reaction not found' }, 404);
|
||||
}
|
||||
|
||||
await createEvent({
|
||||
kind: 5,
|
||||
tags: [...e].map((id) => ['e', id]),
|
||||
}, c);
|
||||
|
||||
await hydrateEvents({ ...c.var, events: [event] });
|
||||
|
||||
const status = await renderStatus(relay, event, { viewerPubkey: pubkey });
|
||||
return c.json(status);
|
||||
});
|
||||
|
||||
/*
|
||||
* Get an object of emoji to account mappings with accounts that reacted to the post.
|
||||
* https://docs.pleroma.social/backend/development/API/pleroma_api/#get-apiv1pleromastatusesidreactions
|
||||
*/
|
||||
route.get(
|
||||
'/:id{[0-9a-f]{64}}/reactions/:emoji?',
|
||||
paginationMiddleware({ limit: 100 }),
|
||||
userMiddleware({ required: false }),
|
||||
async (c) => {
|
||||
const { relay, user, pagination, paginate } = c.var;
|
||||
|
||||
const params = c.req.param();
|
||||
const result = params.emoji ? parseEmojiParam(params.emoji) : undefined;
|
||||
const pubkey = await user?.signer.getPublicKey();
|
||||
|
||||
const events = await relay.query([{ kinds: [7], '#e': [params.id], ...pagination }])
|
||||
.then((events) =>
|
||||
events.filter((event) => {
|
||||
if (!result) return true;
|
||||
|
||||
switch (result.type) {
|
||||
case 'native':
|
||||
return event.content === result.native;
|
||||
case 'custom':
|
||||
return event.content === `:${result.shortcode}:`;
|
||||
}
|
||||
})
|
||||
)
|
||||
.then((events) => hydrateEvents({ ...c.var, events }));
|
||||
|
||||
/** Events grouped by emoji key. */
|
||||
const byEmojiKey = events.reduce((acc, event) => {
|
||||
const result = parseEmojiInput(event.content);
|
||||
|
||||
if (!result || result.type === 'basic') {
|
||||
return acc;
|
||||
}
|
||||
|
||||
let url: URL | undefined;
|
||||
|
||||
if (result.type === 'custom') {
|
||||
const tag = event.tags.find(([name, value]) => name === 'emoji' && value === result.shortcode);
|
||||
try {
|
||||
url = new URL(tag![2]);
|
||||
} catch {
|
||||
return acc;
|
||||
}
|
||||
}
|
||||
|
||||
let key: string;
|
||||
switch (result.type) {
|
||||
case 'native':
|
||||
key = result.native;
|
||||
break;
|
||||
case 'custom':
|
||||
key = `${result.shortcode}:${url}`;
|
||||
break;
|
||||
}
|
||||
|
||||
acc[key] = acc[key] || [];
|
||||
acc[key].push(event);
|
||||
|
||||
return acc;
|
||||
}, {} as Record<string, DittoEvent[]>);
|
||||
|
||||
const results = await Promise.all(
|
||||
Object.entries(byEmojiKey).map(async ([key, events]) => {
|
||||
let name: string = key;
|
||||
let url: string | undefined;
|
||||
|
||||
// Custom emojis: `<shortcode>:<url>`
|
||||
try {
|
||||
const [shortcode, ...rest] = key.split(':');
|
||||
|
||||
url = new URL(rest.join(':')).toString();
|
||||
name = shortcode;
|
||||
} catch {
|
||||
// fallthrough
|
||||
}
|
||||
|
||||
return {
|
||||
name,
|
||||
count: events.length,
|
||||
me: pubkey && events.some((event) => event.pubkey === pubkey),
|
||||
accounts: await Promise.all(
|
||||
events.map((event) => event.author ? renderAccount(event.author) : accountFromPubkey(event.pubkey)),
|
||||
),
|
||||
url,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
return paginate(events, results);
|
||||
},
|
||||
);
|
||||
|
||||
/** Determine if the input is a native or custom emoji, returning a structured object or throwing an error. */
|
||||
function parseEmojiParam(input: string):
|
||||
| { type: 'native'; native: string }
|
||||
| { type: 'custom'; shortcode: string } {
|
||||
if (/^\w+$/.test(input)) {
|
||||
input = `:${input}:`; // Pleroma API supports the `emoji` param with or without colons.
|
||||
}
|
||||
|
||||
const result = parseEmojiInput(input);
|
||||
|
||||
if (!result || result.type === 'basic') {
|
||||
throw new HTTPException(400, { message: 'Invalid emoji' });
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export default route;
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
import * as Sentry from '@sentry/deno';
|
||||
import { logi } from '@soapbox/logi';
|
||||
|
||||
import type { DittoConf } from '@ditto/conf';
|
||||
|
||||
/** Start Sentry, if configured. */
|
||||
export function startSentry(conf: DittoConf): void {
|
||||
if (conf.sentryDsn) {
|
||||
logi({ level: 'info', ns: 'ditto.sentry', msg: 'Sentry enabled.', enabled: true });
|
||||
Sentry.init({ dsn: conf.sentryDsn });
|
||||
} else {
|
||||
logi({ level: 'info', ns: 'ditto.sentry', msg: 'Sentry not configured. Skipping.', enabled: false });
|
||||
}
|
||||
}
|
||||
|
|
@ -1,117 +0,0 @@
|
|||
// deno-lint-ignore-file require-await
|
||||
import { HTTPException } from '@hono/hono/http-exception';
|
||||
import { NConnectSigner, NostrEvent, NostrSigner, NRelay } from '@nostrify/nostrify';
|
||||
|
||||
interface ConnectSignerOpts {
|
||||
bunkerPubkey: string;
|
||||
userPubkey: string;
|
||||
signer: NostrSigner;
|
||||
relay: NRelay;
|
||||
relays?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* NIP-46 signer.
|
||||
*
|
||||
* Simple extension of nostrify's `NConnectSigner`, with our options to keep it DRY.
|
||||
*/
|
||||
export class ConnectSigner implements NostrSigner {
|
||||
private signer: NConnectSigner;
|
||||
|
||||
constructor(private opts: ConnectSignerOpts) {
|
||||
const { relay, signer } = this.opts;
|
||||
|
||||
this.signer = new NConnectSigner({
|
||||
encryption: 'nip44',
|
||||
pubkey: this.opts.bunkerPubkey,
|
||||
relay,
|
||||
signer,
|
||||
timeout: 60_000,
|
||||
});
|
||||
}
|
||||
|
||||
async signEvent(event: Omit<NostrEvent, 'id' | 'pubkey' | 'sig'>): Promise<NostrEvent> {
|
||||
try {
|
||||
return await this.signer.signEvent(event);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.name === 'AbortError') {
|
||||
throw new HTTPException(408, { message: 'The event was not signed quickly enough' });
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
readonly nip04 = {
|
||||
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
|
||||
try {
|
||||
return await this.signer.nip04.encrypt(pubkey, plaintext);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.name === 'AbortError') {
|
||||
throw new HTTPException(408, {
|
||||
message: 'Text was not encrypted quickly enough',
|
||||
});
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
|
||||
try {
|
||||
return await this.signer.nip04.decrypt(pubkey, ciphertext);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.name === 'AbortError') {
|
||||
throw new HTTPException(408, {
|
||||
message: 'Text was not decrypted quickly enough',
|
||||
});
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
readonly nip44 = {
|
||||
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
|
||||
try {
|
||||
return await this.signer.nip44.encrypt(pubkey, plaintext);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.name === 'AbortError') {
|
||||
throw new HTTPException(408, {
|
||||
message: 'Text was not encrypted quickly enough',
|
||||
});
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
|
||||
try {
|
||||
return await this.signer.nip44.decrypt(pubkey, ciphertext);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.name === 'AbortError') {
|
||||
throw new HTTPException(408, {
|
||||
message: 'Text was not decrypted quickly enough',
|
||||
});
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Prevent unnecessary NIP-46 round-trips.
|
||||
async getPublicKey(): Promise<string> {
|
||||
return this.opts.userPubkey;
|
||||
}
|
||||
|
||||
/** Get the user's relays if they passed in an `nprofile` auth token. */
|
||||
async getRelays(): Promise<Record<string, { read: boolean; write: boolean }>> {
|
||||
return this.opts.relays?.reduce<Record<string, { read: boolean; write: boolean }>>((acc, relay) => {
|
||||
acc[relay] = { read: true, write: true };
|
||||
return acc;
|
||||
}, {}) ?? {};
|
||||
}
|
||||
}
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
import { logi } from '@soapbox/logi';
|
||||
import { NostrEvent, NostrFilter, NostrRelayCLOSED, NostrRelayEOSE, NostrRelayEVENT, NRelay } from '@nostrify/nostrify';
|
||||
|
||||
import { errorJson } from '@/utils/log.ts';
|
||||
import { purifyEvent } from '@/utils/purify.ts';
|
||||
|
||||
interface DittoAPIStoreOpts {
|
||||
pool: NRelay;
|
||||
relay: NRelay;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store used by Ditto's Mastodon API implementation.
|
||||
* It extends the RelayStore to publish events to the wider Nostr network.
|
||||
*/
|
||||
export class DittoAPIStore implements NRelay {
|
||||
private ns = 'ditto.api.store';
|
||||
|
||||
constructor(private opts: DittoAPIStoreOpts) {}
|
||||
|
||||
req(
|
||||
filters: NostrFilter[],
|
||||
opts?: { signal?: AbortSignal },
|
||||
): AsyncIterable<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED> {
|
||||
const { relay } = this.opts;
|
||||
return relay.req(filters, opts);
|
||||
}
|
||||
|
||||
query(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise<NostrEvent[]> {
|
||||
const { relay } = this.opts;
|
||||
return relay.query(filters, opts);
|
||||
}
|
||||
|
||||
async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise<void> {
|
||||
const { pool, relay } = this.opts;
|
||||
const { id, kind } = event;
|
||||
|
||||
await relay.event(event, opts);
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
// `purifyEvent` is important, or you will suffer.
|
||||
await pool.event(purifyEvent(event), opts);
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: this.ns, source: 'publish', id, kind, error: errorJson(e) });
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
const { pool, relay } = this.opts;
|
||||
|
||||
await pool.close();
|
||||
await relay.close();
|
||||
}
|
||||
|
||||
[Symbol.asyncDispose](): Promise<void> {
|
||||
return this.close();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,661 +0,0 @@
|
|||
// deno-lint-ignore-file require-await
|
||||
|
||||
import { type DittoConf } from '@ditto/conf';
|
||||
import { type DittoDB, type DittoTables } from '@ditto/db';
|
||||
import { detectLanguage } from '@ditto/lang';
|
||||
import { NPostgres, NPostgresSchema } from '@nostrify/db';
|
||||
import { dbEventsCounter, internalSubscriptionsBytesGauge, internalSubscriptionsSizeGauge } from '@ditto/metrics';
|
||||
import {
|
||||
NIP50,
|
||||
NKinds,
|
||||
NostrEvent,
|
||||
NostrFilter,
|
||||
NostrRelayCLOSED,
|
||||
NostrRelayEOSE,
|
||||
NostrRelayEVENT,
|
||||
NSchema as n,
|
||||
} from '@nostrify/nostrify';
|
||||
import { Machina } from '@nostrify/nostrify/utils';
|
||||
import { logi } from '@soapbox/logi';
|
||||
import { JsonValue } from '@std/json';
|
||||
import { LanguageCode } from 'iso-639-1';
|
||||
import { Kysely } from 'kysely';
|
||||
import linkify from 'linkifyjs';
|
||||
import { LRUCache } from 'lru-cache';
|
||||
import { matchFilter, nip27 } from 'nostr-tools';
|
||||
import tldts from 'tldts';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { RelayError } from '@/RelayError.ts';
|
||||
import { isNostrId } from '@/utils.ts';
|
||||
import { abortError } from '@/utils/abort.ts';
|
||||
import { purifyEvent } from '@/utils/purify.ts';
|
||||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { getMediaLinks } from '@/utils/note.ts';
|
||||
import { updateStats } from '@/utils/stats.ts';
|
||||
|
||||
/** Function to decide whether or not to index a tag. */
|
||||
type TagCondition = (opts: TagConditionOpts) => boolean;
|
||||
|
||||
/** Options for the tag condition function. */
|
||||
interface TagConditionOpts {
|
||||
/** Nostr event whose tags are being indexed. */
|
||||
event: NostrEvent;
|
||||
/** Count of the current tag name so far. Each tag name has a separate counter starting at 0. */
|
||||
count: number;
|
||||
/** Overall tag index. */
|
||||
index: number;
|
||||
/** Current vag value. */
|
||||
value: string;
|
||||
}
|
||||
|
||||
/** Options for the EventsDB store. */
|
||||
interface DittoPgStoreOpts {
|
||||
/** Kysely instance to use. */
|
||||
db: DittoDB;
|
||||
/** Ditto configuration. */
|
||||
conf: DittoConf;
|
||||
/** Timeout in milliseconds for database queries. */
|
||||
timeout?: number;
|
||||
/** Whether the event returned should be a Nostr event or a Ditto event. Defaults to false. */
|
||||
pure?: boolean;
|
||||
/** Chunk size for streaming events. Defaults to 20. */
|
||||
chunkSize?: number;
|
||||
/** Max age (in **seconds**) an event can be to be fulfilled to realtime subscribers. */
|
||||
maxAge?: number;
|
||||
/** Whether to listen for events from the database with NOTIFY. */
|
||||
notify?: boolean;
|
||||
}
|
||||
|
||||
/** Realtime subscription. */
|
||||
interface Subscription {
|
||||
filters: NostrFilter[];
|
||||
machina: Machina<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED>;
|
||||
}
|
||||
|
||||
/** SQL database storage adapter for Nostr events. */
|
||||
export class DittoPgStore extends NPostgres {
|
||||
readonly subs = new Map<string, Subscription>();
|
||||
readonly encounters = new LRUCache<string, boolean>({ max: 1000 });
|
||||
|
||||
/** Conditions for when to index certain tags. */
|
||||
static tagConditions: Record<string, TagCondition> = {
|
||||
'A': ({ count }) => count === 0,
|
||||
'E': ({ count, value }) => count === 0 && isNostrId(value),
|
||||
'I': ({ count }) => count === 0,
|
||||
'K': ({ count, value }) => count === 0 && Number.isInteger(Number(value)),
|
||||
'L': ({ event, count }) => event.kind === 1985 || count === 0,
|
||||
'P': ({ count, value }) => count === 0 && isNostrId(value),
|
||||
'a': ({ count }) => count < 15,
|
||||
'client': ({ count, value }) => count === 0 && value.length < 50,
|
||||
'd': ({ event, count }) => count === 0 && NKinds.parameterizedReplaceable(event.kind),
|
||||
'e': DittoPgStore.eTagCondition,
|
||||
'i': ({ count }) => count < 15,
|
||||
'k': ({ count }) => count < 3,
|
||||
'l': ({ event, count }) => event.kind === 1985 || count === 0,
|
||||
'n': ({ count, value }) => count < 50 && value.length < 50,
|
||||
'p': DittoPgStore.pTagCondition,
|
||||
'proxy': ({ count, value }) => count === 0 && value.length < 256,
|
||||
'q': ({ event, count, value }) => count === 0 && event.kind === 1 && isNostrId(value),
|
||||
'r': ({ event, count }) => (event.kind === 1985 ? count < 20 : count < 3),
|
||||
't': ({ event, count, value }) =>
|
||||
(value === value.toLowerCase()) && (event.kind === 1985 ? count < 20 : count < 5) && value.length < 50,
|
||||
'u': ({ count, value }) => {
|
||||
const { success } = z.string().url().safeParse(value); // TODO: maybe find a better library specific for validating web urls
|
||||
return count < 15 && success;
|
||||
},
|
||||
};
|
||||
|
||||
constructor(private opts: DittoPgStoreOpts) {
|
||||
super(opts.db.kysely, {
|
||||
indexTags: DittoPgStore.indexTags,
|
||||
indexSearch: DittoPgStore.searchText,
|
||||
indexExtensions: DittoPgStore.indexExtensions,
|
||||
chunkSize: opts.chunkSize,
|
||||
});
|
||||
|
||||
if (opts.notify) {
|
||||
opts.db.listen('nostr_event', async (id) => {
|
||||
if (this.encounters.has(id)) return;
|
||||
this.encounters.set(id, true);
|
||||
|
||||
const [event] = await this.query([{ ids: [id] }]);
|
||||
|
||||
if (event) {
|
||||
await this.fulfill(purifyEvent(event));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** Insert an event (and its tags) into the database. */
|
||||
override async event(event: NostrEvent, opts: { signal?: AbortSignal; timeout?: number } = {}): Promise<void> {
|
||||
event = purifyEvent(event);
|
||||
|
||||
logi({ level: 'debug', ns: 'ditto.event', source: 'db', id: event.id, kind: event.kind });
|
||||
dbEventsCounter.inc({ kind: event.kind });
|
||||
|
||||
if (NKinds.ephemeral(event.kind)) {
|
||||
if (this.encounters.has(event.id)) return;
|
||||
this.encounters.set(event.id, true);
|
||||
|
||||
return await this.fulfill(event);
|
||||
}
|
||||
|
||||
if (this.opts.notify) {
|
||||
this.encounters.set(event.id, true);
|
||||
}
|
||||
|
||||
if (await this.isDeletedAdmin(event)) {
|
||||
throw new RelayError('blocked', 'event deleted by admin');
|
||||
}
|
||||
|
||||
await this.deleteEventsAdmin(event);
|
||||
|
||||
try {
|
||||
await this.storeEvent(event, { ...opts, timeout: opts.timeout ?? this.opts.timeout });
|
||||
this.fulfill(event); // don't await or catch (should never reject)
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
switch (e.message) {
|
||||
case 'duplicate key value violates unique constraint "nostr_events_pkey"':
|
||||
case 'duplicate key value violates unique constraint "author_stats_pkey"':
|
||||
return;
|
||||
case 'canceling statement due to statement timeout':
|
||||
throw new RelayError('error', 'the event could not be added fast enough');
|
||||
default:
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Maybe store the event, if eligible. */
|
||||
private async storeEvent(
|
||||
event: NostrEvent,
|
||||
opts: { signal?: AbortSignal; timeout?: number } = {},
|
||||
): Promise<undefined> {
|
||||
const { conf } = this.opts;
|
||||
try {
|
||||
await super.transaction(async (relay, kysely) => {
|
||||
await updateStats({ conf, relay, kysely: kysely as unknown as Kysely<DittoTables>, event });
|
||||
await relay.event(event, opts);
|
||||
});
|
||||
} catch (e) {
|
||||
// If the failure is only because of updateStats (which runs first), insert the event anyway.
|
||||
// We can't catch this in the transaction because the error aborts the transaction on the Postgres side.
|
||||
if (e instanceof Error && e.message.includes('event_stats' satisfies keyof DittoTables)) {
|
||||
await super.event(event, opts);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Fulfill active subscriptions with this event. */
|
||||
protected async fulfill(event: NostrEvent): Promise<void> {
|
||||
const { maxAge = 60 } = this.opts;
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const age = now - event.created_at;
|
||||
|
||||
if (age > maxAge) {
|
||||
// Ephemeral events must be fulfilled, or else return an error to the client.
|
||||
if (NKinds.ephemeral(event.kind)) {
|
||||
throw new RelayError('invalid', 'event too old');
|
||||
} else {
|
||||
// Silently ignore old events.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (const [subId, { filters, machina }] of this.subs.entries()) {
|
||||
for (const filter of filters) {
|
||||
if (this.matchesFilter(event, filter)) {
|
||||
machina.push(['EVENT', subId, event]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Check if the event fulfills the filter, according to Ditto criteria. */
|
||||
protected matchesFilter(event: NostrEvent, filter: NostrFilter): boolean {
|
||||
// TODO: support streaming by search.
|
||||
return typeof filter.search !== 'string' && matchFilter(filter, event);
|
||||
}
|
||||
|
||||
/** Check if an event has been deleted by the admin. */
|
||||
private async isDeletedAdmin(event: NostrEvent): Promise<boolean> {
|
||||
const { conf } = this.opts;
|
||||
const adminPubkey = await conf.signer.getPublicKey();
|
||||
|
||||
const filters: NostrFilter[] = [
|
||||
{ kinds: [5], authors: [adminPubkey], '#e': [event.id], limit: 1 },
|
||||
];
|
||||
|
||||
if (NKinds.replaceable(event.kind) || NKinds.parameterizedReplaceable(event.kind)) {
|
||||
const d = event.tags.find(([tag]) => tag === 'd')?.[1] ?? '';
|
||||
|
||||
filters.push({
|
||||
kinds: [5],
|
||||
authors: [adminPubkey],
|
||||
'#a': [`${event.kind}:${event.pubkey}:${d}`],
|
||||
since: event.created_at,
|
||||
limit: 1,
|
||||
});
|
||||
}
|
||||
|
||||
const events = await this.query(filters);
|
||||
return events.length > 0;
|
||||
}
|
||||
|
||||
/** The DITTO_NSEC can delete any event from the database. NDatabase already handles user deletions. */
|
||||
private async deleteEventsAdmin(event: NostrEvent): Promise<void> {
|
||||
const { conf } = this.opts;
|
||||
const adminPubkey = await conf.signer.getPublicKey();
|
||||
|
||||
if (event.kind === 5 && event.pubkey === adminPubkey) {
|
||||
const ids = new Set(event.tags.filter(([name]) => name === 'e').map(([_name, value]) => value));
|
||||
const addrs = new Set(event.tags.filter(([name]) => name === 'a').map(([_name, value]) => value));
|
||||
|
||||
const filters: NostrFilter[] = [];
|
||||
|
||||
if (ids.size) {
|
||||
filters.push({ ids: [...ids] });
|
||||
}
|
||||
|
||||
for (const addr of addrs) {
|
||||
const [k, pubkey, d] = addr.split(':');
|
||||
const kind = Number(k);
|
||||
|
||||
if (!(Number.isInteger(kind) && kind >= 0)) continue;
|
||||
if (!isNostrId(pubkey)) continue;
|
||||
if (d === undefined) continue;
|
||||
|
||||
const filter: NostrFilter = {
|
||||
kinds: [kind],
|
||||
authors: [pubkey],
|
||||
until: event.created_at,
|
||||
};
|
||||
|
||||
if (d) {
|
||||
filter['#d'] = [d];
|
||||
}
|
||||
|
||||
filters.push(filter);
|
||||
}
|
||||
|
||||
if (filters.length) {
|
||||
await this.remove(filters);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override async *req(
|
||||
filters: NostrFilter[],
|
||||
opts: { timeout?: number; signal?: AbortSignal; limit?: number } = {},
|
||||
): AsyncIterable<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED> {
|
||||
const { db, chunkSize = 20 } = this.opts;
|
||||
const { limit, timeout = this.opts.timeout, signal } = opts;
|
||||
|
||||
filters = await this.expandFilters(filters);
|
||||
|
||||
const subId = crypto.randomUUID();
|
||||
const normalFilters = this.normalizeFilters(filters);
|
||||
const machina = new Machina<NostrRelayEVENT | NostrRelayEOSE | NostrRelayCLOSED>(signal);
|
||||
|
||||
if (normalFilters.length && limit !== 0) {
|
||||
this.withTimeout(db.kysely as unknown as Kysely<NPostgresSchema>, timeout, async (trx) => {
|
||||
let query = this.getEventsQuery(trx, normalFilters);
|
||||
|
||||
if (typeof opts.limit === 'number') {
|
||||
query = query.limit(opts.limit);
|
||||
}
|
||||
|
||||
for await (const row of query.stream(chunkSize)) {
|
||||
const event = this.parseEventRow(row);
|
||||
machina.push(['EVENT', subId, event]);
|
||||
}
|
||||
|
||||
machina.push(['EOSE', subId]);
|
||||
}).catch((error) => {
|
||||
if (error instanceof Error && (error.name === 'TimeoutError' || error.message.includes('timeout'))) {
|
||||
machina.push(['CLOSED', subId, 'error: the relay could not respond fast enough']);
|
||||
} else {
|
||||
machina.push(['CLOSED', subId, 'error: something went wrong']);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
for await (const msg of machina) {
|
||||
const [verb] = msg;
|
||||
|
||||
yield msg;
|
||||
|
||||
if (verb === 'EOSE') {
|
||||
break;
|
||||
}
|
||||
|
||||
if (verb === 'CLOSED') {
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
yield ['CLOSED', subId, 'error: the relay could not respond fast enough'];
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
yield ['EOSE', subId];
|
||||
}
|
||||
|
||||
const sizeBytes = new TextEncoder().encode(JSON.stringify(filters)).length;
|
||||
|
||||
this.subs.set(subId, { filters, machina });
|
||||
internalSubscriptionsSizeGauge.set(this.subs.size);
|
||||
internalSubscriptionsBytesGauge.inc(sizeBytes);
|
||||
|
||||
try {
|
||||
for await (const msg of machina) {
|
||||
yield msg;
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof Error && (e.name === 'TimeoutError' || e.message.includes('timeout'))) {
|
||||
yield ['CLOSED', subId, 'error: the relay could not respond fast enough'];
|
||||
} else {
|
||||
yield ['CLOSED', subId, 'error: something went wrong'];
|
||||
}
|
||||
} finally {
|
||||
this.subs.delete(subId);
|
||||
internalSubscriptionsSizeGauge.set(this.subs.size);
|
||||
internalSubscriptionsBytesGauge.dec(sizeBytes);
|
||||
}
|
||||
}
|
||||
|
||||
/** Get events for filters from the database. */
|
||||
override async query(
|
||||
filters: NostrFilter[],
|
||||
opts: { signal?: AbortSignal; timeout?: number; limit?: number } = {},
|
||||
): Promise<DittoEvent[]> {
|
||||
filters = await this.expandFilters(filters);
|
||||
|
||||
if (opts.signal?.aborted) return Promise.resolve([]);
|
||||
|
||||
logi({ level: 'debug', ns: 'ditto.req', source: 'db', filters: filters as JsonValue });
|
||||
|
||||
return super.query(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout });
|
||||
}
|
||||
|
||||
/** Parse an event row from the database. */
|
||||
protected override parseEventRow(row: NPostgresSchema['nostr_events']): DittoEvent {
|
||||
const event: DittoEvent = {
|
||||
id: row.id,
|
||||
kind: row.kind,
|
||||
pubkey: row.pubkey,
|
||||
content: row.content,
|
||||
created_at: Number(row.created_at),
|
||||
tags: row.tags,
|
||||
sig: row.sig,
|
||||
};
|
||||
|
||||
if (!this.opts.pure) {
|
||||
event.language = row.search_ext.language as LanguageCode | undefined;
|
||||
}
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
/** Delete events based on filters from the database. */
|
||||
override async remove(filters: NostrFilter[], opts: { signal?: AbortSignal; timeout?: number } = {}): Promise<void> {
|
||||
logi({ level: 'debug', ns: 'ditto.remove', source: 'db', filters: filters as JsonValue });
|
||||
return super.remove(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout });
|
||||
}
|
||||
|
||||
/** Get number of events that would be returned by filters. */
|
||||
override async count(
|
||||
filters: NostrFilter[],
|
||||
opts: { signal?: AbortSignal; timeout?: number } = {},
|
||||
): Promise<{ count: number; approximate: boolean }> {
|
||||
if (opts.signal?.aborted) return Promise.reject(abortError());
|
||||
|
||||
logi({ level: 'debug', ns: 'ditto.count', source: 'db', filters: filters as JsonValue });
|
||||
|
||||
return super.count(filters, { ...opts, timeout: opts.timeout ?? this.opts.timeout });
|
||||
}
|
||||
|
||||
/** Rule for indexing `e` tags. */
|
||||
private static eTagCondition({ event, count, value, index }: TagConditionOpts): boolean {
|
||||
if (!isNostrId(value)) return false;
|
||||
|
||||
if (event.kind === 7) {
|
||||
return index === event.tags.findLastIndex(([name]) => name === 'e');
|
||||
}
|
||||
|
||||
return event.kind === 10003 || count < 15;
|
||||
}
|
||||
|
||||
/** Rule for indexing `p` tags. */
|
||||
private static pTagCondition({ event, count, value, index }: TagConditionOpts): boolean {
|
||||
if (!isNostrId(value)) return false;
|
||||
|
||||
if (event.kind === 7) {
|
||||
return index === event.tags.findLastIndex(([name]) => name === 'p');
|
||||
}
|
||||
|
||||
return count < 15 || event.kind === 3;
|
||||
}
|
||||
|
||||
/** Return only the tags that should be indexed. */
|
||||
static override indexTags(event: NostrEvent): string[][] {
|
||||
const tagCounts: Record<string, number> = {};
|
||||
|
||||
function getCount(name: string) {
|
||||
return tagCounts[name] || 0;
|
||||
}
|
||||
|
||||
function incrementCount(name: string) {
|
||||
tagCounts[name] = getCount(name) + 1;
|
||||
}
|
||||
|
||||
function checkCondition(name: string, value: string, condition: TagCondition, index: number): boolean {
|
||||
return condition({
|
||||
event,
|
||||
count: getCount(name),
|
||||
value,
|
||||
index,
|
||||
});
|
||||
}
|
||||
|
||||
return event.tags.reduce<string[][]>((results, tag, index) => {
|
||||
const [name, value] = tag;
|
||||
const condition = DittoPgStore.tagConditions[name] as TagCondition | undefined;
|
||||
|
||||
if (value && condition && value.length < 200 && checkCondition(name, value, condition, index)) {
|
||||
results.push(tag);
|
||||
}
|
||||
|
||||
incrementCount(name);
|
||||
return results;
|
||||
}, []);
|
||||
}
|
||||
|
||||
static indexExtensions(event: NostrEvent): Record<string, string> {
|
||||
const ext: Record<string, string> = {};
|
||||
|
||||
if (event.kind === 1) {
|
||||
ext.reply = event.tags.some(([name]) => name === 'e').toString();
|
||||
} else if (event.kind === 1111) {
|
||||
ext.reply = event.tags.some(([name]) => ['e', 'E'].includes(name)).toString();
|
||||
} else if (event.kind === 6) {
|
||||
ext.reply = 'false';
|
||||
}
|
||||
|
||||
if ([1, 20, 30023].includes(event.kind)) {
|
||||
const language = detectLanguage(event.content, 0.90);
|
||||
|
||||
if (language) {
|
||||
ext.language = language;
|
||||
}
|
||||
}
|
||||
|
||||
const imeta: string[][][] = event.tags
|
||||
.filter(([name]) => name === 'imeta')
|
||||
.map(([_, ...entries]) =>
|
||||
entries.map((entry) => {
|
||||
const split = entry.split(' ');
|
||||
return [split[0], split.splice(1).join(' ')];
|
||||
})
|
||||
);
|
||||
|
||||
// quirks mode
|
||||
if (!imeta.length && event.kind === 1) {
|
||||
const links = linkify.find(event.content).filter(({ type }) => type === 'url');
|
||||
imeta.push(...getMediaLinks(links));
|
||||
}
|
||||
|
||||
if (imeta.length) {
|
||||
ext.media = 'true';
|
||||
|
||||
if (imeta.every((tags) => tags.some(([name, value]) => name === 'm' && value.startsWith('video/')))) {
|
||||
ext.video = 'true';
|
||||
}
|
||||
}
|
||||
|
||||
const client = event.tags.find(([name]) => name === 'client')?.[2];
|
||||
|
||||
if (client && /^31990:([0-9a-f]{64}):(.+)$/.test(client)) {
|
||||
ext.client = client;
|
||||
}
|
||||
|
||||
ext.protocol = event.tags.find(([name]) => name === 'proxy')?.[2] ?? 'nostr';
|
||||
|
||||
return ext;
|
||||
}
|
||||
|
||||
/** Build a search index from the event. */
|
||||
static searchText(event: NostrEvent): string {
|
||||
switch (event.kind) {
|
||||
case 0:
|
||||
return DittoPgStore.buildUserSearchContent(event);
|
||||
case 1:
|
||||
case 20:
|
||||
return nip27.replaceAll(event.content, () => '');
|
||||
case 30009:
|
||||
return DittoPgStore.buildTagsSearchContent(event.tags.filter(([t]) => t !== 'alt'));
|
||||
case 30360:
|
||||
return event.tags.find(([name]) => name === 'd')?.[1] || '';
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/** Build search content for a user. */
|
||||
static buildUserSearchContent(event: NostrEvent): string {
|
||||
const { name, nip05 } = n.json().pipe(n.metadata()).catch({}).parse(event.content);
|
||||
return [name, nip05].filter(Boolean).join('\n');
|
||||
}
|
||||
|
||||
/** Build search content from tag values. */
|
||||
static buildTagsSearchContent(tags: string[][]): string {
|
||||
return tags.map(([_tag, value]) => value).join('\n');
|
||||
}
|
||||
|
||||
/** Converts filters to more performant, simpler filters. */
|
||||
async expandFilters(filters: NostrFilter[]): Promise<NostrFilter[]> {
|
||||
filters = structuredClone(filters);
|
||||
|
||||
for (const filter of filters) {
|
||||
if (filter.since && filter.since >= 2_147_483_647) {
|
||||
throw new RelayError('invalid', 'since filter too far into the future');
|
||||
}
|
||||
if (filter.until && filter.until >= 2_147_483_647) {
|
||||
throw new RelayError('invalid', 'until filter too far into the future');
|
||||
}
|
||||
for (const kind of filter.kinds ?? []) {
|
||||
if (kind >= 2_147_483_647) {
|
||||
throw new RelayError('invalid', 'kind filter too far into the future');
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.search) {
|
||||
const tokens = NIP50.parseInput(filter.search);
|
||||
|
||||
const domains = new Set<string>();
|
||||
const hostnames = new Set<string>();
|
||||
|
||||
for (const token of tokens) {
|
||||
if (typeof token === 'object' && token.key === 'domain') {
|
||||
const { domain, hostname } = tldts.parse(token.value);
|
||||
if (domain === hostname) {
|
||||
domains.add(token.value);
|
||||
} else {
|
||||
hostnames.add(token.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (domains.size || hostnames.size) {
|
||||
let query = this.opts.db.kysely
|
||||
.selectFrom('author_stats')
|
||||
.select('pubkey')
|
||||
.where((eb) => {
|
||||
const expr = [];
|
||||
if (domains.size) {
|
||||
expr.push(eb('nip05_domain', 'in', [...domains]));
|
||||
}
|
||||
if (hostnames.size) {
|
||||
expr.push(eb('nip05_hostname', 'in', [...hostnames]));
|
||||
}
|
||||
if (expr.length === 1) {
|
||||
return expr[0];
|
||||
}
|
||||
return eb.or(expr);
|
||||
});
|
||||
|
||||
if (filter.authors) {
|
||||
query = query.where('pubkey', 'in', filter.authors);
|
||||
}
|
||||
|
||||
const pubkeys = await query.execute().then((rows) => rows.map((row) => row.pubkey));
|
||||
|
||||
filter.authors = pubkeys;
|
||||
}
|
||||
|
||||
// Re-serialize the search string without the domain key. :facepalm:
|
||||
filter.search = tokens
|
||||
.filter((t) => typeof t === 'string' || typeof t === 'object' && t.key !== 'domain')
|
||||
.map((t) => typeof t === 'object' ? `${t.key}:${t.value}` : t)
|
||||
.join(' ');
|
||||
}
|
||||
}
|
||||
|
||||
return filters;
|
||||
}
|
||||
|
||||
/** Execute the callback in a new transaction, unless the Kysely instance is already a transaction. */
|
||||
private static override async trx<T = unknown>(
|
||||
db: Kysely<DittoTables>,
|
||||
callback: (trx: Kysely<DittoTables>) => Promise<T>,
|
||||
): Promise<T> {
|
||||
if (db.isTransaction) {
|
||||
return await callback(db);
|
||||
} else {
|
||||
return await db.transaction().execute((trx) => callback(trx));
|
||||
}
|
||||
}
|
||||
|
||||
/** Execute NPostgres functions in a transaction. */
|
||||
// @ts-ignore gg
|
||||
override async transaction(
|
||||
callback: (store: DittoPgStore, kysely: Kysely<DittoTables>) => Promise<void>,
|
||||
): Promise<void> {
|
||||
const { db } = this.opts;
|
||||
|
||||
await DittoPgStore.trx(db.kysely, async (trx) => {
|
||||
const store = new DittoPgStore({ ...this.opts, db: { ...db, kysely: trx }, notify: false });
|
||||
await callback(store, trx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { genEvent, MockRelay } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { generateSecretKey, getPublicKey, nip19 } from 'nostr-tools';
|
||||
|
||||
import { DittoPool } from './DittoPool.ts';
|
||||
|
||||
Deno.test('DittoPool.reqRouter', async (t) => {
|
||||
const nsec = generateSecretKey();
|
||||
const conf = new DittoConf(new Map([['DITTO_NSEC', nip19.nsecEncode(nsec)]]));
|
||||
const relay = new MockRelay();
|
||||
|
||||
const pool = new DittoPool({ conf, relay });
|
||||
|
||||
const [alex, mk] = [
|
||||
generateKeypair(),
|
||||
generateKeypair(),
|
||||
];
|
||||
|
||||
const [ditto, henhouse, gleasonator] = [
|
||||
'wss://ditto.pub/relay',
|
||||
'wss://henhouse.social/relay',
|
||||
'wss://gleasonator.dev/relay',
|
||||
];
|
||||
|
||||
const events = [
|
||||
genEvent({ kind: 10002, tags: [['r', gleasonator], ['r', ditto]] }, alex.sk),
|
||||
genEvent({ kind: 10002, tags: [['r', henhouse], ['r', ditto]] }, mk.sk),
|
||||
];
|
||||
|
||||
for (const event of events) {
|
||||
await relay.event(event);
|
||||
}
|
||||
|
||||
await t.step('no authors', async () => {
|
||||
const reqRoutes = await pool.reqRouter([{ kinds: [1] }]);
|
||||
assertEquals(reqRoutes, new Map());
|
||||
});
|
||||
|
||||
await t.step('single author', async () => {
|
||||
const reqRoutes = await pool.reqRouter([{ kinds: [10002], authors: [alex.pk] }]);
|
||||
|
||||
const expected = new Map([
|
||||
[ditto, [{ kinds: [10002], authors: [alex.pk] }]],
|
||||
[gleasonator, [{ kinds: [10002], authors: [alex.pk] }]],
|
||||
]);
|
||||
|
||||
assertEquals(reqRoutes, expected);
|
||||
});
|
||||
|
||||
await t.step('multiple authors', async () => {
|
||||
const reqRoutes = await pool.reqRouter([{ kinds: [10002], authors: [alex.pk, mk.pk] }]);
|
||||
|
||||
const expected = new Map([
|
||||
[ditto, [{ kinds: [10002], authors: [alex.pk, mk.pk] }]],
|
||||
[henhouse, [{ kinds: [10002], authors: [mk.pk] }]],
|
||||
[gleasonator, [{ kinds: [10002], authors: [alex.pk] }]],
|
||||
]);
|
||||
|
||||
assertEquals(reqRoutes, expected);
|
||||
});
|
||||
|
||||
await t.step('no authors with fallback', async () => {
|
||||
const fallback = genEvent({ kind: 10002, tags: [['r', ditto]] }, nsec);
|
||||
await relay.event(fallback);
|
||||
|
||||
const reqRoutes = await pool.reqRouter([{ kinds: [1] }]);
|
||||
const expected = new Map([[ditto, [{ kinds: [1] }]]]);
|
||||
|
||||
assertEquals(reqRoutes, expected);
|
||||
});
|
||||
});
|
||||
|
||||
function generateKeypair(): { pk: string; sk: Uint8Array } {
|
||||
const sk = generateSecretKey();
|
||||
return { pk: getPublicKey(sk), sk };
|
||||
}
|
||||
|
|
@ -1,146 +0,0 @@
|
|||
// deno-lint-ignore-file require-await
|
||||
import { DittoConf } from '@ditto/conf';
|
||||
import { NostrEvent, NostrFilter, NPool, type NRelay, NRelay1 } from '@nostrify/nostrify';
|
||||
import { logi } from '@soapbox/logi';
|
||||
|
||||
interface DittoPoolOpts {
|
||||
conf: DittoConf;
|
||||
relay: NRelay;
|
||||
maxReqRelays?: number;
|
||||
maxEventRelays?: number;
|
||||
}
|
||||
|
||||
export class DittoPool extends NPool<NRelay1> {
|
||||
private _opts: DittoPoolOpts;
|
||||
|
||||
constructor(opts: DittoPoolOpts) {
|
||||
super({
|
||||
open(url) {
|
||||
return new NRelay1(url, {
|
||||
// Skip event verification (it's done in the pipeline).
|
||||
verifyEvent: () => true,
|
||||
log: logi,
|
||||
});
|
||||
},
|
||||
reqRouter: (filters) => {
|
||||
return this.reqRouter(filters);
|
||||
},
|
||||
eventRouter: async (event) => {
|
||||
return this.eventRouter(event);
|
||||
},
|
||||
});
|
||||
|
||||
this._opts = opts;
|
||||
}
|
||||
|
||||
async reqRouter(filters: NostrFilter[]): Promise<Map<string, NostrFilter[]>> {
|
||||
const { conf, relay, maxReqRelays = 5 } = this._opts;
|
||||
|
||||
const routes = new Map<string, NostrFilter[]>();
|
||||
const authors = new Set<string>();
|
||||
|
||||
for (const filter of filters) {
|
||||
if (filter.authors) {
|
||||
for (const author of filter.authors) {
|
||||
authors.add(author);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const pubkey = await conf.signer.getPublicKey();
|
||||
const map = new Map<string, NostrEvent>();
|
||||
|
||||
for (const event of await relay.query([{ kinds: [10002], authors: [pubkey, ...authors] }])) {
|
||||
map.set(event.pubkey, event);
|
||||
}
|
||||
|
||||
for (const filter of filters) {
|
||||
if (filter.authors) {
|
||||
const relayAuthors = new Map<`wss://${string}`, Set<string>>();
|
||||
|
||||
for (const author of filter.authors) {
|
||||
const event = map.get(author) ?? map.get(pubkey);
|
||||
if (event) {
|
||||
for (const relayUrl of [...this.getEventRelayUrls(event, 'write')].slice(0, maxReqRelays)) {
|
||||
const value = relayAuthors.get(relayUrl);
|
||||
relayAuthors.set(relayUrl, value ? new Set([...value, author]) : new Set([author]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const [relayUrl, authors] of relayAuthors) {
|
||||
const value = routes.get(relayUrl);
|
||||
const _filter = { ...filter, authors: [...authors] };
|
||||
routes.set(relayUrl, value ? [...value, _filter] : [_filter]);
|
||||
}
|
||||
} else {
|
||||
const event = map.get(pubkey);
|
||||
if (event) {
|
||||
for (const relayUrl of [...this.getEventRelayUrls(event, 'read')].slice(0, maxReqRelays)) {
|
||||
const value = routes.get(relayUrl);
|
||||
routes.set(relayUrl, value ? [...value, filter] : [filter]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return routes;
|
||||
}
|
||||
|
||||
async eventRouter(event: NostrEvent): Promise<string[]> {
|
||||
const { conf, maxEventRelays = 10 } = this._opts;
|
||||
const { pubkey } = event;
|
||||
|
||||
const relaySet = await this.getRelayUrls({ pubkey, marker: 'write' });
|
||||
relaySet.delete(conf.relay);
|
||||
|
||||
return [...relaySet].slice(0, maxEventRelays);
|
||||
}
|
||||
|
||||
private async getRelayUrls(opts: { pubkey?: string; marker?: 'read' | 'write' } = {}): Promise<Set<string>> {
|
||||
const { conf, relay } = this._opts;
|
||||
|
||||
const relays = new Set<`wss://${string}`>();
|
||||
const authors = new Set<string>([await conf.signer.getPublicKey()]);
|
||||
|
||||
if (opts.pubkey) {
|
||||
authors.add(opts.pubkey);
|
||||
}
|
||||
|
||||
const events = await relay.query([
|
||||
{ kinds: [10002], authors: [...authors] },
|
||||
]);
|
||||
|
||||
// Ensure user's own relay list is counted first.
|
||||
if (opts.pubkey) {
|
||||
events.sort((a) => a.pubkey === opts.pubkey ? -1 : 1);
|
||||
}
|
||||
|
||||
for (const event of events) {
|
||||
for (const relayUrl of this.getEventRelayUrls(event, opts.marker)) {
|
||||
relays.add(relayUrl);
|
||||
}
|
||||
}
|
||||
|
||||
return relays;
|
||||
}
|
||||
|
||||
private getEventRelayUrls(event: NostrEvent, marker?: 'read' | 'write'): Set<`wss://${string}`> {
|
||||
const relays = new Set<`wss://${string}`>();
|
||||
|
||||
for (const [name, relayUrl, _marker] of event.tags) {
|
||||
if (name === 'r' && (!marker || !_marker || marker === _marker)) {
|
||||
try {
|
||||
const url = new URL(relayUrl);
|
||||
if (url.protocol === 'wss:') {
|
||||
relays.add(url.toString() as `wss://${string}`);
|
||||
}
|
||||
} catch {
|
||||
// fallthrough
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return relays;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,213 +0,0 @@
|
|||
import { DittoPolyPg } from '@ditto/db';
|
||||
import { DittoConf } from '@ditto/conf';
|
||||
import { genEvent, MockRelay } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { waitFor } from '@std/async/unstable-wait-for';
|
||||
import { generateSecretKey, getPublicKey } from 'nostr-tools';
|
||||
|
||||
import { DittoRelayStore } from '@/storages/DittoRelayStore.ts';
|
||||
|
||||
import type { NostrMetadata } from '@nostrify/types';
|
||||
import { nostrNow } from '@/utils.ts';
|
||||
|
||||
Deno.test('generates set event for nip05 request', async () => {
|
||||
await using test = setupTest();
|
||||
|
||||
const admin = await test.conf.signer.getPublicKey();
|
||||
const event = genEvent({ kind: 3036, tags: [['r', 'alex@gleasonator.dev'], ['p', admin]] });
|
||||
|
||||
await test.store.event(event);
|
||||
|
||||
const filter = { kinds: [30383], authors: [admin], '#d': [event.id] };
|
||||
|
||||
await waitFor(async () => {
|
||||
const { count } = await test.store.count([filter]);
|
||||
return count > 0;
|
||||
}, 3000);
|
||||
|
||||
const [result] = await test.store.query([filter]);
|
||||
|
||||
assertEquals(result?.tags, [
|
||||
['d', event.id],
|
||||
['p', event.pubkey],
|
||||
['k', '3036'],
|
||||
['r', 'alex@gleasonator.dev'],
|
||||
['n', 'pending'],
|
||||
]);
|
||||
});
|
||||
|
||||
Deno.test('updateAuthorData sets nip05', async () => {
|
||||
const alex = generateSecretKey();
|
||||
|
||||
await using test = setupTest((req) => {
|
||||
switch (req.url) {
|
||||
case 'https://gleasonator.dev/.well-known/nostr.json?name=alex':
|
||||
return jsonResponse({ names: { alex: getPublicKey(alex) } });
|
||||
default:
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
});
|
||||
|
||||
const { db, store } = test;
|
||||
|
||||
const metadata: NostrMetadata = { nip05: 'alex@gleasonator.dev' };
|
||||
const event = genEvent({ kind: 0, content: JSON.stringify(metadata) }, alex);
|
||||
|
||||
await store.updateAuthorData(event);
|
||||
|
||||
const row = await db.kysely
|
||||
.selectFrom('author_stats')
|
||||
.selectAll()
|
||||
.where('pubkey', '=', getPublicKey(alex))
|
||||
.executeTakeFirst();
|
||||
|
||||
assertEquals(row?.nip05, 'alex@gleasonator.dev');
|
||||
assertEquals(row?.nip05_domain, 'gleasonator.dev');
|
||||
assertEquals(row?.nip05_hostname, 'gleasonator.dev');
|
||||
});
|
||||
|
||||
Deno.test('Admin revokes nip05 grant and nip05 column gets null', async () => {
|
||||
const alex = generateSecretKey();
|
||||
|
||||
await using test = setupTest((req) => {
|
||||
switch (req.url) {
|
||||
case 'https://gleasonator.dev/.well-known/nostr.json?name=alex':
|
||||
return jsonResponse({ names: { alex: getPublicKey(alex) } });
|
||||
default:
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
});
|
||||
|
||||
const { db, store, conf } = test;
|
||||
|
||||
const metadata: NostrMetadata = { nip05: 'alex@gleasonator.dev' };
|
||||
const event = genEvent({ kind: 0, content: JSON.stringify(metadata) }, alex);
|
||||
|
||||
await store.event(event);
|
||||
|
||||
await waitFor(async () => {
|
||||
const row = await db.kysely
|
||||
.selectFrom('author_stats')
|
||||
.selectAll()
|
||||
.where('pubkey', '=', getPublicKey(alex))
|
||||
.executeTakeFirst();
|
||||
|
||||
assertEquals(row?.nip05, 'alex@gleasonator.dev');
|
||||
assertEquals(row?.nip05_domain, 'gleasonator.dev');
|
||||
assertEquals(row?.nip05_hostname, 'gleasonator.dev');
|
||||
|
||||
return true;
|
||||
}, 3000);
|
||||
|
||||
const grant = await conf.signer.signEvent({
|
||||
kind: 30360,
|
||||
tags: [
|
||||
['d', 'alex@gleasonator.dev'],
|
||||
['r', 'alex@gleasonator.dev'],
|
||||
['L', 'nip05.domain'],
|
||||
['l', 'gleasonator.dev', 'nip05.domain'],
|
||||
['p', event.pubkey],
|
||||
['e', 'whatever'],
|
||||
],
|
||||
created_at: nostrNow(),
|
||||
content: '',
|
||||
});
|
||||
|
||||
await store.event(grant);
|
||||
|
||||
const adminDeletion = await conf.signer.signEvent({
|
||||
kind: 5,
|
||||
tags: [
|
||||
['k', '30360'],
|
||||
['e', grant.id],
|
||||
],
|
||||
created_at: nostrNow(),
|
||||
content: '',
|
||||
});
|
||||
|
||||
await store.event(adminDeletion);
|
||||
|
||||
const nullRow = await db.kysely
|
||||
.selectFrom('author_stats')
|
||||
.selectAll()
|
||||
.where('pubkey', '=', getPublicKey(alex))
|
||||
.executeTakeFirst();
|
||||
|
||||
assertEquals(nullRow?.nip05, null);
|
||||
assertEquals(nullRow?.nip05_domain, null);
|
||||
assertEquals(nullRow?.nip05_hostname, null);
|
||||
});
|
||||
|
||||
Deno.test('fetchRelated', async () => {
|
||||
await using test = setupTest();
|
||||
const { pool, store } = test;
|
||||
|
||||
const post = genEvent({ kind: 1, content: 'hi' });
|
||||
const reply = genEvent({ kind: 1, content: 'wussup?', tags: [['e', post.id], ['p', post.pubkey]] });
|
||||
|
||||
await pool.event(post);
|
||||
await pool.event(reply);
|
||||
|
||||
await store.event(reply);
|
||||
|
||||
await waitFor(async () => {
|
||||
const { count } = await test.store.count([{ ids: [post.id] }]);
|
||||
return count > 0;
|
||||
}, 3000);
|
||||
});
|
||||
|
||||
Deno.test('event author is fetched', async () => {
|
||||
await using test = setupTest();
|
||||
const { pool, store } = test;
|
||||
|
||||
const sk = generateSecretKey();
|
||||
const pubkey = getPublicKey(sk);
|
||||
|
||||
const post = genEvent({ kind: 1 }, sk);
|
||||
const author = genEvent({ kind: 0 }, sk);
|
||||
|
||||
await pool.event(author);
|
||||
await store.event(post);
|
||||
|
||||
const [result] = await store.query([{ kinds: [0], authors: [pubkey] }]);
|
||||
|
||||
assertEquals(result?.id, author.id);
|
||||
});
|
||||
|
||||
function setupTest(cb?: (req: Request) => Response | Promise<Response>) {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const db = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
const pool = new MockRelay();
|
||||
const relay = new MockRelay();
|
||||
|
||||
const mockFetch: typeof fetch = async (input, init) => {
|
||||
const req = new Request(input, init);
|
||||
if (cb) {
|
||||
return await cb(req);
|
||||
} else {
|
||||
return new Response('Not mocked', { status: 404 });
|
||||
}
|
||||
};
|
||||
|
||||
const store = new DittoRelayStore({ conf, db, pool, relay, fetch: mockFetch });
|
||||
|
||||
return {
|
||||
db,
|
||||
conf,
|
||||
pool,
|
||||
store,
|
||||
[Symbol.asyncDispose]: async () => {
|
||||
await store[Symbol.asyncDispose]();
|
||||
await db[Symbol.asyncDispose]();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function jsonResponse(body: unknown): Response {
|
||||
return new Response(JSON.stringify(body), {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue