mirror of
https://gitlab.com/soapbox-pub/ditto.git
synced 2025-12-06 03:19:46 +00:00
Merge remote-tracking branch 'origin/main' into feat-promove-admin
This commit is contained in:
commit
d29bc8c020
38 changed files with 994 additions and 211 deletions
|
|
@ -12,7 +12,7 @@ test:
|
|||
- deno fmt --check
|
||||
- deno task lint
|
||||
- deno task check
|
||||
- deno task test --coverage=cov_profile
|
||||
- deno task test --ignore=packages/transcode --coverage=cov_profile
|
||||
- deno coverage cov_profile
|
||||
coverage: /All files[^\|]*\|[^\|]*\s+([\d\.]+)/
|
||||
services:
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@
|
|||
"./packages/nip98",
|
||||
"./packages/policies",
|
||||
"./packages/ratelimiter",
|
||||
"./packages/transcode",
|
||||
"./packages/translators",
|
||||
"./packages/uploaders"
|
||||
],
|
||||
|
|
@ -73,6 +74,7 @@
|
|||
"@soapbox/logi": "jsr:@soapbox/logi@^0.3.0",
|
||||
"@soapbox/safe-fetch": "jsr:@soapbox/safe-fetch@^2.0.0",
|
||||
"@std/assert": "jsr:@std/assert@^0.225.1",
|
||||
"@std/async": "jsr:@std/async@^1.0.10",
|
||||
"@std/cli": "jsr:@std/cli@^0.223.0",
|
||||
"@std/crypto": "jsr:@std/crypto@^0.224.0",
|
||||
"@std/encoding": "jsr:@std/encoding@^0.224.0",
|
||||
|
|
|
|||
5
deno.lock
generated
5
deno.lock
generated
|
|
@ -58,6 +58,7 @@
|
|||
"jsr:@std/assert@^1.0.10": "1.0.11",
|
||||
"jsr:@std/assert@~0.213.1": "0.213.1",
|
||||
"jsr:@std/assert@~0.225.1": "0.225.3",
|
||||
"jsr:@std/async@^1.0.10": "1.0.10",
|
||||
"jsr:@std/bytes@0.223": "0.223.0",
|
||||
"jsr:@std/bytes@0.224": "0.224.0",
|
||||
"jsr:@std/bytes@0.224.0": "0.224.0",
|
||||
|
|
@ -604,6 +605,9 @@
|
|||
"jsr:@std/internal@^1.0.5"
|
||||
]
|
||||
},
|
||||
"@std/async@1.0.10": {
|
||||
"integrity": "2ff1b1c7d33d1416159989b0f69e59ec7ee8cb58510df01e454def2108b3dbec"
|
||||
},
|
||||
"@std/bytes@0.223.0": {
|
||||
"integrity": "84b75052cd8680942c397c2631318772b295019098f40aac5c36cead4cba51a8"
|
||||
},
|
||||
|
|
@ -2489,6 +2493,7 @@
|
|||
"jsr:@soapbox/logi@0.3",
|
||||
"jsr:@soapbox/safe-fetch@2",
|
||||
"jsr:@std/assert@~0.225.1",
|
||||
"jsr:@std/async@^1.0.10",
|
||||
"jsr:@std/cli@0.223",
|
||||
"jsr:@std/crypto@0.224",
|
||||
"jsr:@std/encoding@0.224",
|
||||
|
|
|
|||
|
|
@ -279,6 +279,11 @@ export class DittoConf {
|
|||
return optionalBooleanSchema.parse(this.env.get('MEDIA_ANALYZE')) ?? false;
|
||||
}
|
||||
|
||||
/** Whether to transcode uploaded video files with ffmpeg. */
|
||||
get mediaTranscode(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('MEDIA_TRANSCODE')) ?? false;
|
||||
}
|
||||
|
||||
/** Max upload size for files in number of bytes. Default 100MiB. */
|
||||
get maxUploadSize(): number {
|
||||
return Number(this.env.get('MAX_UPLOAD_SIZE') || 100 * 1024 * 1024);
|
||||
|
|
@ -480,4 +485,14 @@ export class DittoConf {
|
|||
get precheck(): boolean {
|
||||
return optionalBooleanSchema.parse(this.env.get('DITTO_PRECHECK')) ?? true;
|
||||
}
|
||||
|
||||
/** Path to `ffmpeg` executable. */
|
||||
get ffmpegPath(): string {
|
||||
return this.env.get('FFMPEG_PATH') || 'ffmpeg';
|
||||
}
|
||||
|
||||
/** Path to `ffprobe` executable. */
|
||||
get ffprobePath(): string {
|
||||
return this.env.get('FFPROBE_PATH') || 'ffprobe';
|
||||
}
|
||||
}
|
||||
|
|
|
|||
25
packages/db/adapters/TestDB.test.ts
Normal file
25
packages/db/adapters/TestDB.test.ts
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { NPostgres } from '@nostrify/db';
|
||||
import { genEvent } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { DittoPolyPg } from './DittoPolyPg.ts';
|
||||
import { TestDB } from './TestDB.ts';
|
||||
|
||||
Deno.test('TestDB', async () => {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const orig = new DittoPolyPg(conf.databaseUrl);
|
||||
|
||||
await using db = new TestDB(orig);
|
||||
await db.migrate();
|
||||
await db.clear();
|
||||
|
||||
const store = new NPostgres(orig.kysely);
|
||||
await store.event(genEvent());
|
||||
|
||||
assertEquals((await store.count([{}])).count, 1);
|
||||
|
||||
await db.clear();
|
||||
|
||||
assertEquals((await store.count([{}])).count, 0);
|
||||
});
|
||||
49
packages/db/adapters/TestDB.ts
Normal file
49
packages/db/adapters/TestDB.ts
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
import type { DittoDB } from '../DittoDB.ts';
|
||||
import type { DittoTables } from '../DittoTables.ts';
|
||||
|
||||
/** Wraps another DittoDB implementation to clear all data when disposed. */
|
||||
export class TestDB implements DittoDB {
|
||||
constructor(private db: DittoDB) {}
|
||||
|
||||
get kysely(): Kysely<DittoTables> {
|
||||
return this.db.kysely;
|
||||
}
|
||||
|
||||
get poolSize(): number {
|
||||
return this.db.poolSize;
|
||||
}
|
||||
|
||||
get availableConnections(): number {
|
||||
return this.db.availableConnections;
|
||||
}
|
||||
|
||||
migrate(): Promise<void> {
|
||||
return this.db.migrate();
|
||||
}
|
||||
|
||||
listen(channel: string, callback: (payload: string) => void): void {
|
||||
return this.db.listen(channel, callback);
|
||||
}
|
||||
|
||||
/** Truncate all tables. */
|
||||
async clear(): Promise<void> {
|
||||
const query = sql<{ tablename: string }>`select tablename from pg_tables where schemaname = current_schema()`;
|
||||
|
||||
const { rows } = await query.execute(this.db.kysely);
|
||||
|
||||
for (const { tablename } of rows) {
|
||||
if (tablename.startsWith('kysely_')) {
|
||||
continue; // Skip Kysely's internal tables
|
||||
} else {
|
||||
await sql`truncate table ${sql.ref(tablename)} cascade`.execute(this.db.kysely);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.clear();
|
||||
await this.db[Symbol.asyncDispose]();
|
||||
}
|
||||
}
|
||||
|
|
@ -2,6 +2,7 @@ export { DittoPglite } from './adapters/DittoPglite.ts';
|
|||
export { DittoPolyPg } from './adapters/DittoPolyPg.ts';
|
||||
export { DittoPostgres } from './adapters/DittoPostgres.ts';
|
||||
export { DummyDB } from './adapters/DummyDB.ts';
|
||||
export { TestDB } from './adapters/TestDB.ts';
|
||||
|
||||
export type { DittoDB } from './DittoDB.ts';
|
||||
export type { DittoTables } from './DittoTables.ts';
|
||||
|
|
|
|||
|
|
@ -55,8 +55,6 @@ import {
|
|||
adminSetRelaysController,
|
||||
deleteZapSplitsController,
|
||||
getZapSplitsController,
|
||||
nameRequestController,
|
||||
nameRequestsController,
|
||||
statusZapSplitsController,
|
||||
updateInstanceController,
|
||||
updateZapSplitsController,
|
||||
|
|
@ -150,6 +148,7 @@ import { rateLimitMiddleware } from '@/middleware/rateLimitMiddleware.ts';
|
|||
import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts';
|
||||
import { translatorMiddleware } from '@/middleware/translatorMiddleware.ts';
|
||||
import { logiMiddleware } from '@/middleware/logiMiddleware.ts';
|
||||
import dittoNamesRoute from '@/routes/dittoNamesRoute.ts';
|
||||
import { DittoRelayStore } from '@/storages/DittoRelayStore.ts';
|
||||
|
||||
export interface AppEnv extends DittoEnv {
|
||||
|
|
@ -452,8 +451,7 @@ app.put('/api/v1/admin/ditto/relays', userMiddleware({ role: 'admin' }), adminSe
|
|||
|
||||
app.put('/api/v1/admin/ditto/instance', userMiddleware({ role: 'admin' }), updateInstanceController);
|
||||
|
||||
app.post('/api/v1/ditto/names', userMiddleware(), nameRequestController);
|
||||
app.get('/api/v1/ditto/names', userMiddleware(), nameRequestsController);
|
||||
app.route('/api/v1/ditto/names', dittoNamesRoute);
|
||||
|
||||
app.get('/api/v1/ditto/captcha', rateLimitMiddleware(3, Time.minutes(1)), captchaController);
|
||||
app.post(
|
||||
|
|
|
|||
|
|
@ -1,3 +0,0 @@
|
|||
import { LRUCache } from 'lru-cache';
|
||||
|
||||
export const pipelineEncounters = new LRUCache<string, true>({ max: 5000 });
|
||||
|
|
@ -1,19 +1,17 @@
|
|||
import { paginated } from '@ditto/mastoapi/pagination';
|
||||
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
|
||||
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { AppController } from '@/app.ts';
|
||||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { getAuthor } from '@/queries.ts';
|
||||
import { addTag } from '@/utils/tags.ts';
|
||||
import { createEvent, parseBody, updateAdminEvent } from '@/utils/api.ts';
|
||||
import { parseBody, updateAdminEvent } from '@/utils/api.ts';
|
||||
import { getInstanceMetadata } from '@/utils/instance.ts';
|
||||
import { deleteTag } from '@/utils/tags.ts';
|
||||
import { DittoZapSplits, getZapSplits } from '@/utils/zap-split.ts';
|
||||
import { screenshotsSchema } from '@/schemas/nostr.ts';
|
||||
import { booleanParamSchema, percentageSchema } from '@/schema.ts';
|
||||
import { percentageSchema } from '@/schema.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { renderNameRequest } from '@/views/ditto.ts';
|
||||
import { accountFromPubkey } from '@/views/mastodon/accounts.ts';
|
||||
import { renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { updateListAdminEvent } from '@/utils/api.ts';
|
||||
|
|
@ -81,102 +79,6 @@ function renderRelays(event: NostrEvent): RelayEntity[] {
|
|||
}, [] as RelayEntity[]);
|
||||
}
|
||||
|
||||
const nameRequestSchema = z.object({
|
||||
name: z.string().email(),
|
||||
reason: z.string().max(500).optional(),
|
||||
});
|
||||
|
||||
export const nameRequestController: AppController = async (c) => {
|
||||
const { conf, relay, user } = c.var;
|
||||
|
||||
const pubkey = await user!.signer.getPublicKey();
|
||||
const result = nameRequestSchema.safeParse(await c.req.json());
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Invalid username', schema: result.error }, 400);
|
||||
}
|
||||
|
||||
const { name, reason } = result.data;
|
||||
|
||||
const [existing] = await relay.query([{ kinds: [3036], authors: [pubkey], '#r': [name.toLowerCase()], limit: 1 }]);
|
||||
if (existing) {
|
||||
return c.json({ error: 'Name request already exists' }, 400);
|
||||
}
|
||||
|
||||
const r: string[][] = [['r', name]];
|
||||
|
||||
if (name !== name.toLowerCase()) {
|
||||
r.push(['r', name.toLowerCase()]);
|
||||
}
|
||||
|
||||
const event = await createEvent({
|
||||
kind: 3036,
|
||||
content: reason,
|
||||
tags: [
|
||||
...r,
|
||||
['L', 'nip05.domain'],
|
||||
['l', name.split('@')[1], 'nip05.domain'],
|
||||
['p', await conf.signer.getPublicKey()],
|
||||
],
|
||||
}, c);
|
||||
|
||||
await hydrateEvents({ ...c.var, events: [event] });
|
||||
|
||||
const nameRequest = await renderNameRequest(event);
|
||||
return c.json(nameRequest);
|
||||
};
|
||||
|
||||
const nameRequestsSchema = z.object({
|
||||
approved: booleanParamSchema.optional(),
|
||||
rejected: booleanParamSchema.optional(),
|
||||
});
|
||||
|
||||
export const nameRequestsController: AppController = async (c) => {
|
||||
const { conf, relay, user } = c.var;
|
||||
const pubkey = await user!.signer.getPublicKey();
|
||||
|
||||
const params = c.get('pagination');
|
||||
const { approved, rejected } = nameRequestsSchema.parse(c.req.query());
|
||||
|
||||
const filter: NostrFilter = {
|
||||
kinds: [30383],
|
||||
authors: [await conf.signer.getPublicKey()],
|
||||
'#k': ['3036'],
|
||||
'#p': [pubkey],
|
||||
...params,
|
||||
};
|
||||
|
||||
if (approved) {
|
||||
filter['#n'] = ['approved'];
|
||||
}
|
||||
if (rejected) {
|
||||
filter['#n'] = ['rejected'];
|
||||
}
|
||||
|
||||
const orig = await relay.query([filter]);
|
||||
const ids = new Set<string>();
|
||||
|
||||
for (const event of orig) {
|
||||
const d = event.tags.find(([name]) => name === 'd')?.[1];
|
||||
if (d) {
|
||||
ids.add(d);
|
||||
}
|
||||
}
|
||||
|
||||
if (!ids.size) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
const events = await relay.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }])
|
||||
.then((events) => hydrateEvents({ ...c.var, events }));
|
||||
|
||||
const nameRequests = await Promise.all(
|
||||
events.map((event) => renderNameRequest(event)),
|
||||
);
|
||||
|
||||
return paginated(c, orig, nameRequests);
|
||||
};
|
||||
|
||||
const zapSplitSchema = z.record(
|
||||
n.id(),
|
||||
z.object({
|
||||
|
|
|
|||
62
packages/ditto/routes/dittoNamesRoute.test.ts
Normal file
62
packages/ditto/routes/dittoNamesRoute.test.ts
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import route from './dittoNamesRoute.ts';
|
||||
|
||||
Deno.test('POST / creates a name request event', async () => {
|
||||
await using app = new TestApp();
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
const user = app.user();
|
||||
app.route('/', route);
|
||||
|
||||
const response = await app.api.post('/', { name: 'Alex@Ditto.pub', reason: 'for testing' });
|
||||
|
||||
assertEquals(response.status, 200);
|
||||
|
||||
const [event] = await relay.query([{ kinds: [3036], authors: [await user.signer.getPublicKey()] }]);
|
||||
|
||||
assertEquals(event?.tags, [
|
||||
['r', 'Alex@Ditto.pub'],
|
||||
['r', 'alex@ditto.pub'],
|
||||
['L', 'nip05.domain'],
|
||||
['l', 'ditto.pub', 'nip05.domain'],
|
||||
['p', await conf.signer.getPublicKey()],
|
||||
]);
|
||||
|
||||
assertEquals(event?.content, 'for testing');
|
||||
});
|
||||
|
||||
Deno.test('POST / can be called multiple times with the same name', async () => {
|
||||
await using app = new TestApp();
|
||||
|
||||
app.user();
|
||||
app.route('/', route);
|
||||
|
||||
const response1 = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
const response2 = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
|
||||
assertEquals(response1.status, 200);
|
||||
assertEquals(response2.status, 200);
|
||||
});
|
||||
|
||||
Deno.test('POST / returns 400 if the name has already been granted', async () => {
|
||||
await using app = new TestApp();
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
app.user();
|
||||
app.route('/', route);
|
||||
|
||||
const grant = await conf.signer.signEvent({
|
||||
kind: 30360,
|
||||
tags: [['d', 'alex@ditto.pub']],
|
||||
content: '',
|
||||
created_at: 0,
|
||||
});
|
||||
|
||||
await relay.event(grant);
|
||||
|
||||
const response = await app.api.post('/', { name: 'alex@ditto.pub' });
|
||||
|
||||
assertEquals(response.status, 400);
|
||||
});
|
||||
130
packages/ditto/routes/dittoNamesRoute.ts
Normal file
130
packages/ditto/routes/dittoNamesRoute.ts
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
import { paginationMiddleware, userMiddleware } from '@ditto/mastoapi/middleware';
|
||||
import { DittoRoute } from '@ditto/mastoapi/router';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { createEvent } from '@/utils/api.ts';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { renderNameRequest } from '@/views/ditto.ts';
|
||||
import { booleanParamSchema } from '@/schema.ts';
|
||||
import { NostrFilter } from '@nostrify/nostrify';
|
||||
|
||||
const nameRequestSchema = z.object({
|
||||
name: z.string().email(),
|
||||
reason: z.string().max(500).optional(),
|
||||
});
|
||||
|
||||
const route = new DittoRoute();
|
||||
|
||||
route.post('/', userMiddleware(), async (c) => {
|
||||
const { conf, relay, user } = c.var;
|
||||
|
||||
const result = nameRequestSchema.safeParse(await c.req.json());
|
||||
|
||||
if (!result.success) {
|
||||
return c.json({ error: 'Invalid username', schema: result.error }, 422);
|
||||
}
|
||||
|
||||
const pubkey = await user.signer.getPublicKey();
|
||||
const adminPubkey = await conf.signer.getPublicKey();
|
||||
|
||||
const { name, reason } = result.data;
|
||||
const [_localpart, domain] = name.split('@');
|
||||
|
||||
if (domain.toLowerCase() !== conf.url.host.toLowerCase()) {
|
||||
return c.json({ error: 'Unsupported domain' }, 422);
|
||||
}
|
||||
|
||||
const d = name.toLowerCase();
|
||||
|
||||
const [grant] = await relay.query([{ kinds: [30360], authors: [adminPubkey], '#d': [d] }]);
|
||||
if (grant) {
|
||||
return c.json({ error: 'Name has already been granted' }, 400);
|
||||
}
|
||||
|
||||
const [pending] = await relay.query([{
|
||||
kinds: [30383],
|
||||
authors: [adminPubkey],
|
||||
'#p': [pubkey],
|
||||
'#k': ['3036'],
|
||||
'#r': [d],
|
||||
'#n': ['pending'],
|
||||
limit: 1,
|
||||
}]);
|
||||
if (pending) {
|
||||
return c.json({ error: 'You have already requested that name, and it is pending approval by staff' }, 400);
|
||||
}
|
||||
|
||||
const tags: string[][] = [['r', name]];
|
||||
|
||||
if (name !== name.toLowerCase()) {
|
||||
tags.push(['r', name.toLowerCase()]);
|
||||
}
|
||||
|
||||
const event = await createEvent({
|
||||
kind: 3036,
|
||||
content: reason,
|
||||
tags: [
|
||||
...tags,
|
||||
['L', 'nip05.domain'],
|
||||
['l', domain.toLowerCase(), 'nip05.domain'],
|
||||
['p', await conf.signer.getPublicKey()],
|
||||
],
|
||||
}, c);
|
||||
|
||||
await hydrateEvents({ ...c.var, events: [event] });
|
||||
|
||||
const nameRequest = await renderNameRequest(event);
|
||||
return c.json(nameRequest);
|
||||
});
|
||||
|
||||
const nameRequestsSchema = z.object({
|
||||
approved: booleanParamSchema.optional(),
|
||||
rejected: booleanParamSchema.optional(),
|
||||
});
|
||||
|
||||
route.get('/', paginationMiddleware(), userMiddleware(), async (c) => {
|
||||
const { conf, relay, user, pagination } = c.var;
|
||||
const pubkey = await user!.signer.getPublicKey();
|
||||
|
||||
const { approved, rejected } = nameRequestsSchema.parse(c.req.query());
|
||||
|
||||
const filter: NostrFilter = {
|
||||
kinds: [30383],
|
||||
authors: [await conf.signer.getPublicKey()],
|
||||
'#k': ['3036'],
|
||||
'#p': [pubkey],
|
||||
...pagination,
|
||||
};
|
||||
|
||||
if (approved) {
|
||||
filter['#n'] = ['approved'];
|
||||
}
|
||||
if (rejected) {
|
||||
filter['#n'] = ['rejected'];
|
||||
}
|
||||
|
||||
const orig = await relay.query([filter]);
|
||||
const ids = new Set<string>();
|
||||
|
||||
for (const event of orig) {
|
||||
const d = event.tags.find(([name]) => name === 'd')?.[1];
|
||||
if (d) {
|
||||
ids.add(d);
|
||||
}
|
||||
}
|
||||
|
||||
if (!ids.size) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
const events = await relay.query([{ kinds: [3036], ids: [...ids], authors: [pubkey] }])
|
||||
.then((events) => hydrateEvents({ ...c.var, events }));
|
||||
|
||||
const nameRequests = await Promise.all(
|
||||
events.map((event) => renderNameRequest(event)),
|
||||
);
|
||||
|
||||
return c.var.paginate(orig, nameRequests);
|
||||
});
|
||||
|
||||
export default route;
|
||||
|
|
@ -2,12 +2,39 @@ import { DittoPolyPg } from '@ditto/db';
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { genEvent, MockRelay } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { waitFor } from '@std/async/unstable-wait-for';
|
||||
import { generateSecretKey, getPublicKey } from 'nostr-tools';
|
||||
|
||||
import { DittoRelayStore } from './DittoRelayStore.ts';
|
||||
|
||||
import type { NostrMetadata } from '@nostrify/types';
|
||||
|
||||
Deno.test('generates set event for nip05 request', async () => {
|
||||
await using test = setupTest();
|
||||
|
||||
const admin = await test.conf.signer.getPublicKey();
|
||||
const event = genEvent({ kind: 3036, tags: [['r', 'alex@gleasonator.dev'], ['p', admin]] });
|
||||
|
||||
await test.store.event(event);
|
||||
|
||||
const filter = { kinds: [30383], authors: [admin], '#d': [event.id] };
|
||||
|
||||
await waitFor(async () => {
|
||||
const { count } = await test.store.count([filter]);
|
||||
return count > 0;
|
||||
}, 3000);
|
||||
|
||||
const [result] = await test.store.query([filter]);
|
||||
|
||||
assertEquals(result?.tags, [
|
||||
['d', event.id],
|
||||
['p', event.pubkey],
|
||||
['k', '3036'],
|
||||
['r', 'alex@gleasonator.dev'],
|
||||
['n', 'pending'],
|
||||
]);
|
||||
});
|
||||
|
||||
Deno.test('updateAuthorData sets nip05', async () => {
|
||||
const alex = generateSecretKey();
|
||||
|
||||
|
|
@ -38,20 +65,25 @@ Deno.test('updateAuthorData sets nip05', async () => {
|
|||
assertEquals(row?.nip05_hostname, 'gleasonator.dev');
|
||||
});
|
||||
|
||||
function setupTest(cb: (req: Request) => Response | Promise<Response>) {
|
||||
function setupTest(cb?: (req: Request) => Response | Promise<Response>) {
|
||||
const conf = new DittoConf(Deno.env);
|
||||
const db = new DittoPolyPg(conf.databaseUrl);
|
||||
const relay = new MockRelay();
|
||||
|
||||
const mockFetch: typeof fetch = async (input, init) => {
|
||||
const req = new Request(input, init);
|
||||
return await cb(req);
|
||||
if (cb) {
|
||||
return await cb(req);
|
||||
} else {
|
||||
return new Response('Not mocked', { status: 404 });
|
||||
}
|
||||
};
|
||||
|
||||
const store = new DittoRelayStore({ conf, db, relay, fetch: mockFetch });
|
||||
|
||||
return {
|
||||
db,
|
||||
conf,
|
||||
store,
|
||||
[Symbol.asyncDispose]: async () => {
|
||||
await store[Symbol.asyncDispose]();
|
||||
|
|
|
|||
|
|
@ -358,19 +358,24 @@ export class DittoRelayStore implements NRelay {
|
|||
}
|
||||
|
||||
if (event.kind === 3036 && tagsAdmin) {
|
||||
const rel = await signer.signEvent({
|
||||
kind: 30383,
|
||||
content: '',
|
||||
tags: [
|
||||
['d', event.id],
|
||||
['p', event.pubkey],
|
||||
['k', '3036'],
|
||||
['n', 'pending'],
|
||||
],
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
});
|
||||
const r = event.tags.find(([name]) => name === 'r')?.[1];
|
||||
|
||||
await this.event(rel, { signal: AbortSignal.timeout(1000) });
|
||||
if (r) {
|
||||
const rel = await signer.signEvent({
|
||||
kind: 30383,
|
||||
content: '',
|
||||
tags: [
|
||||
['d', event.id],
|
||||
['p', event.pubkey],
|
||||
['k', '3036'],
|
||||
['r', r.toLowerCase()],
|
||||
['n', 'pending'],
|
||||
],
|
||||
created_at: Math.floor(Date.now() / 1000),
|
||||
});
|
||||
|
||||
await this.event(rel, { signal: AbortSignal.timeout(1000) });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -58,17 +58,19 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
|
|||
return result;
|
||||
}, new Set<string>());
|
||||
|
||||
const favicons = (
|
||||
await db.kysely
|
||||
.selectFrom('domain_favicons')
|
||||
.select(['domain', 'favicon'])
|
||||
.where('domain', 'in', [...domains])
|
||||
.execute()
|
||||
)
|
||||
.reduce((result, { domain, favicon }) => {
|
||||
result[domain] = favicon;
|
||||
return result;
|
||||
}, {} as Record<string, string>);
|
||||
const favicons: Record<string, string> = domains.size
|
||||
? (
|
||||
await db.kysely
|
||||
.selectFrom('domain_favicons')
|
||||
.select(['domain', 'favicon'])
|
||||
.where('domain', 'in', [...domains])
|
||||
.execute()
|
||||
)
|
||||
.reduce((result, { domain, favicon }) => {
|
||||
result[domain] = favicon;
|
||||
return result;
|
||||
}, {} as Record<string, string>)
|
||||
: {};
|
||||
|
||||
const stats = {
|
||||
authors: authorStats,
|
||||
|
|
|
|||
|
|
@ -27,10 +27,10 @@ async function createEvent<E extends (DittoEnv & { Variables: { user?: User } })
|
|||
}
|
||||
|
||||
const event = await user.signer.signEvent({
|
||||
content: '',
|
||||
created_at: nostrNow(),
|
||||
tags: [],
|
||||
...t,
|
||||
content: t.content ?? '',
|
||||
created_at: t.created_at ?? nostrNow(),
|
||||
tags: t.tags ?? [],
|
||||
});
|
||||
|
||||
await relay.event(event, { signal, publish: true });
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { analyzeFile, extractVideoFrame, transcodeVideo } from '@ditto/transcode';
|
||||
import { ScopedPerformance } from '@esroyo/scoped-performance';
|
||||
import { HTTPException } from '@hono/hono/http-exception';
|
||||
import { logi } from '@soapbox/logi';
|
||||
import { crypto } from '@std/crypto';
|
||||
|
|
@ -21,7 +23,11 @@ export async function uploadFile(
|
|||
meta: FileMeta,
|
||||
signal?: AbortSignal,
|
||||
): Promise<DittoUpload> {
|
||||
using perf = new ScopedPerformance();
|
||||
perf.mark('start');
|
||||
|
||||
const { conf, uploader } = c.var;
|
||||
const { ffmpegPath, ffprobePath, mediaAnalyze, mediaTranscode } = conf;
|
||||
|
||||
if (!uploader) {
|
||||
throw new HTTPException(500, {
|
||||
|
|
@ -35,7 +41,43 @@ export async function uploadFile(
|
|||
throw new Error('File size is too large.');
|
||||
}
|
||||
|
||||
const [baseType] = file.type.split('/');
|
||||
|
||||
perf.mark('probe-start');
|
||||
const probe = mediaTranscode ? await analyzeFile(file.stream(), { ffprobePath }).catch(() => null) : null;
|
||||
const video = probe?.streams.find((stream) => stream.codec_type === 'video');
|
||||
perf.mark('probe-end');
|
||||
|
||||
perf.mark('transcode-start');
|
||||
if (baseType === 'video' && mediaTranscode) {
|
||||
let needsTranscode = false;
|
||||
|
||||
for (const stream of probe?.streams ?? []) {
|
||||
if (stream.codec_type === 'video' && stream.codec_name !== 'h264') {
|
||||
needsTranscode = true;
|
||||
break;
|
||||
}
|
||||
if (stream.codec_type === 'audio' && stream.codec_name !== 'aac') {
|
||||
needsTranscode = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (needsTranscode) {
|
||||
const tmp = new URL('file://' + await Deno.makeTempFile());
|
||||
await Deno.writeFile(tmp, file.stream());
|
||||
const stream = transcodeVideo(tmp, { ffmpegPath });
|
||||
const transcoded = await new Response(stream).bytes();
|
||||
file = new File([transcoded], file.name, { type: 'video/mp4' });
|
||||
await Deno.remove(tmp);
|
||||
}
|
||||
}
|
||||
perf.mark('transcode-end');
|
||||
|
||||
perf.mark('upload-start');
|
||||
const tags = await uploader.upload(file, { signal });
|
||||
perf.mark('upload-end');
|
||||
|
||||
const url = tags[0][1];
|
||||
|
||||
if (description) {
|
||||
|
|
@ -46,6 +88,8 @@ export async function uploadFile(
|
|||
const m = tags.find(([key]) => key === 'm')?.[1];
|
||||
const dim = tags.find(([key]) => key === 'dim')?.[1];
|
||||
const size = tags.find(([key]) => key === 'size')?.[1];
|
||||
const image = tags.find(([key]) => key === 'image')?.[1];
|
||||
const thumb = tags.find(([key]) => key === 'thumb')?.[1];
|
||||
const blurhash = tags.find(([key]) => key === 'blurhash')?.[1];
|
||||
|
||||
if (!x) {
|
||||
|
|
@ -61,34 +105,50 @@ export async function uploadFile(
|
|||
tags.push(['size', file.size.toString()]);
|
||||
}
|
||||
|
||||
// If the uploader didn't already, try to get a blurhash and media dimensions.
|
||||
// This requires `MEDIA_ANALYZE=true` to be configured because it comes with security tradeoffs.
|
||||
if (conf.mediaAnalyze && (!blurhash || !dim)) {
|
||||
perf.mark('analyze-start');
|
||||
|
||||
if (baseType === 'video' && mediaAnalyze && mediaTranscode && video && (!image || !thumb)) {
|
||||
try {
|
||||
const bytes = await new Response(file.stream()).bytes();
|
||||
const img = sharp(bytes);
|
||||
const tmp = new URL('file://' + await Deno.makeTempFile());
|
||||
await Deno.writeFile(tmp, file.stream());
|
||||
const frame = await extractVideoFrame(tmp, '00:00:01', { ffmpegPath });
|
||||
await Deno.remove(tmp);
|
||||
const [[, url]] = await uploader.upload(new File([frame], 'thumb.jpg', { type: 'image/jpeg' }), { signal });
|
||||
|
||||
const { width, height } = await img.metadata();
|
||||
|
||||
if (!dim && (width && height)) {
|
||||
tags.push(['dim', `${width}x${height}`]);
|
||||
if (!image) {
|
||||
tags.push(['image', url]);
|
||||
}
|
||||
|
||||
if (!blurhash && (width && height)) {
|
||||
const pixels = await img
|
||||
.raw()
|
||||
.ensureAlpha()
|
||||
.toBuffer({ resolveWithObject: false })
|
||||
.then((buffer) => new Uint8ClampedArray(buffer));
|
||||
if (!dim) {
|
||||
tags.push(['dim', await getImageDim(frame)]);
|
||||
}
|
||||
|
||||
const blurhash = encode(pixels, width, height, 4, 4);
|
||||
tags.push(['blurhash', blurhash]);
|
||||
if (!blurhash) {
|
||||
tags.push(['blurhash', await getBlurhash(frame)]);
|
||||
}
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.upload.analyze', error: errorJson(e) });
|
||||
}
|
||||
}
|
||||
|
||||
if (baseType === 'image' && mediaAnalyze && (!blurhash || !dim)) {
|
||||
try {
|
||||
const bytes = await new Response(file.stream()).bytes();
|
||||
|
||||
if (!dim) {
|
||||
tags.push(['dim', await getImageDim(bytes)]);
|
||||
}
|
||||
|
||||
if (!blurhash) {
|
||||
tags.push(['blurhash', await getBlurhash(bytes)]);
|
||||
}
|
||||
} catch (e) {
|
||||
logi({ level: 'error', ns: 'ditto.upload.analyze', error: errorJson(e) });
|
||||
}
|
||||
}
|
||||
|
||||
perf.mark('analyze-end');
|
||||
|
||||
const upload = {
|
||||
id: crypto.randomUUID(),
|
||||
url,
|
||||
|
|
@ -99,5 +159,62 @@ export async function uploadFile(
|
|||
|
||||
dittoUploads.set(upload.id, upload);
|
||||
|
||||
const timing = [
|
||||
perf.measure('probe', 'probe-start', 'probe-end'),
|
||||
perf.measure('transcode', 'transcode-start', 'transcode-end'),
|
||||
perf.measure('upload', 'upload-start', 'upload-end'),
|
||||
perf.measure('analyze', 'analyze-start', 'analyze-end'),
|
||||
].reduce<Record<string, number>>((acc, m) => {
|
||||
const name = m.name.split('::')[1]; // ScopedPerformance uses `::` to separate the name.
|
||||
acc[name] = m.duration / 1000; // Convert to seconds for logging.
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
perf.mark('end');
|
||||
|
||||
logi({
|
||||
level: 'info',
|
||||
ns: 'ditto.upload',
|
||||
upload: { ...upload, uploadedAt: upload.uploadedAt.toISOString() },
|
||||
timing,
|
||||
duration: perf.measure('total', 'start', 'end').duration / 1000,
|
||||
});
|
||||
|
||||
return upload;
|
||||
}
|
||||
|
||||
async function getImageDim(bytes: Uint8Array): Promise<`${number}x${number}`> {
|
||||
const img = sharp(bytes);
|
||||
const { width, height } = await img.metadata();
|
||||
|
||||
if (!width || !height) {
|
||||
throw new Error('Image metadata is missing.');
|
||||
}
|
||||
|
||||
return `${width}x${height}`;
|
||||
}
|
||||
|
||||
/** Get a blurhash from an image file. */
|
||||
async function getBlurhash(bytes: Uint8Array, maxDim = 64): Promise<string> {
|
||||
const img = sharp(bytes);
|
||||
|
||||
const { width, height } = await img.metadata();
|
||||
|
||||
if (!width || !height) {
|
||||
throw new Error('Image metadata is missing.');
|
||||
}
|
||||
|
||||
const { data, info } = await img
|
||||
.raw()
|
||||
.ensureAlpha()
|
||||
.resize({
|
||||
width: width > height ? undefined : maxDim,
|
||||
height: height > width ? undefined : maxDim,
|
||||
fit: 'inside',
|
||||
})
|
||||
.toBuffer({ resolveWithObject: true });
|
||||
|
||||
const pixels = new Uint8ClampedArray(data);
|
||||
|
||||
return encode(pixels, info.width, info.height, 4, 4);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ function renderAttachment(
|
|||
const alt = tags.find(([name]) => name === 'alt')?.[1];
|
||||
const cid = tags.find(([name]) => name === 'cid')?.[1];
|
||||
const dim = tags.find(([name]) => name === 'dim')?.[1];
|
||||
const image = tags.find(([key]) => key === 'image')?.[1];
|
||||
const thumb = tags.find(([key]) => key === 'thumb')?.[1];
|
||||
const blurhash = tags.find(([name]) => name === 'blurhash')?.[1];
|
||||
|
||||
if (!url) return;
|
||||
|
|
@ -34,7 +36,7 @@ function renderAttachment(
|
|||
id: id ?? url,
|
||||
type: getAttachmentType(m ?? ''),
|
||||
url,
|
||||
preview_url: url,
|
||||
preview_url: image ?? thumb ?? url,
|
||||
remote_url: null,
|
||||
description: alt ?? '',
|
||||
blurhash: blurhash || null,
|
||||
|
|
|
|||
|
|
@ -1,21 +1,26 @@
|
|||
import { setUser, testApp } from '@ditto/mastoapi/test';
|
||||
import { TestApp } from '@ditto/mastoapi/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { userMiddleware } from './userMiddleware.ts';
|
||||
import { ReadOnlySigner } from '../signers/ReadOnlySigner.ts';
|
||||
|
||||
Deno.test('no user 401', async () => {
|
||||
const { app } = testApp();
|
||||
await using app = new TestApp();
|
||||
const response = await app.use(userMiddleware()).request('/');
|
||||
assertEquals(response.status, 401);
|
||||
});
|
||||
|
||||
Deno.test('unsupported signer 400', async () => {
|
||||
const { app, relay } = testApp();
|
||||
const signer = new ReadOnlySigner('0461fcbecc4c3374439932d6b8f11269ccdb7cc973ad7a50ae362db135a474dd');
|
||||
await using app = new TestApp();
|
||||
|
||||
const user = {
|
||||
signer: new ReadOnlySigner('0461fcbecc4c3374439932d6b8f11269ccdb7cc973ad7a50ae362db135a474dd'),
|
||||
relay: app.var.relay,
|
||||
};
|
||||
|
||||
app.user(user);
|
||||
|
||||
const response = await app
|
||||
.use(setUser({ signer, relay }))
|
||||
.use(userMiddleware({ enc: 'nip44' }))
|
||||
.use((c, next) => {
|
||||
c.var.user.signer.nip44.encrypt; // test that the type is set
|
||||
|
|
@ -27,10 +32,11 @@ Deno.test('unsupported signer 400', async () => {
|
|||
});
|
||||
|
||||
Deno.test('with user 200', async () => {
|
||||
const { app, user } = testApp();
|
||||
await using app = new TestApp();
|
||||
|
||||
app.user();
|
||||
|
||||
const response = await app
|
||||
.use(setUser(user))
|
||||
.use(userMiddleware())
|
||||
.get('/', (c) => c.text('ok'))
|
||||
.request('/');
|
||||
|
|
@ -39,10 +45,11 @@ Deno.test('with user 200', async () => {
|
|||
});
|
||||
|
||||
Deno.test('user and role 403', async () => {
|
||||
const { app, user } = testApp();
|
||||
await using app = new TestApp();
|
||||
|
||||
app.user();
|
||||
|
||||
const response = await app
|
||||
.use(setUser(user))
|
||||
.use(userMiddleware({ role: 'admin' }))
|
||||
.request('/');
|
||||
|
||||
|
|
@ -50,7 +57,10 @@ Deno.test('user and role 403', async () => {
|
|||
});
|
||||
|
||||
Deno.test('admin role 200', async () => {
|
||||
const { conf, app, user, relay } = testApp();
|
||||
await using app = new TestApp();
|
||||
const { conf, relay } = app.var;
|
||||
|
||||
const user = app.user();
|
||||
|
||||
const event = await conf.signer.signEvent({
|
||||
kind: 30382,
|
||||
|
|
@ -65,7 +75,6 @@ Deno.test('admin role 200', async () => {
|
|||
await relay.event(event);
|
||||
|
||||
const response = await app
|
||||
.use(setUser(user))
|
||||
.use(userMiddleware({ role: 'admin' }))
|
||||
.get('/', (c) => c.text('ok'))
|
||||
.request('/');
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { DittoPolyPg } from '@ditto/db';
|
||||
import { DummyDB } from '@ditto/db';
|
||||
import { Hono } from '@hono/hono';
|
||||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { DittoApp } from './DittoApp.ts';
|
||||
import { DittoRoute } from './DittoRoute.ts';
|
||||
|
||||
Deno.test('DittoApp', async () => {
|
||||
await using db = new DittoPolyPg('memory://');
|
||||
await using db = new DummyDB();
|
||||
const conf = new DittoConf(new Map());
|
||||
const relay = new MockRelay();
|
||||
|
||||
|
|
@ -20,4 +21,11 @@ Deno.test('DittoApp', async () => {
|
|||
|
||||
// @ts-expect-error Passing a non-DittoRoute to route.
|
||||
app.route('/', hono);
|
||||
|
||||
app.get('/error', () => {
|
||||
throw new Error('test error');
|
||||
});
|
||||
|
||||
const response = await app.request('/error');
|
||||
assertEquals(response.status, 500);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,11 +3,13 @@ import { Hono } from '@hono/hono';
|
|||
import type { HonoOptions } from '@hono/hono/hono-base';
|
||||
import type { DittoEnv } from './DittoEnv.ts';
|
||||
|
||||
export type DittoAppOpts = Omit<DittoEnv['Variables'], 'signal' | 'requestId'> & HonoOptions<DittoEnv>;
|
||||
|
||||
export class DittoApp extends Hono<DittoEnv> {
|
||||
// @ts-ignore Require a DittoRoute for type safety.
|
||||
declare route: (path: string, app: Hono<DittoEnv>) => Hono<DittoEnv>;
|
||||
|
||||
constructor(opts: Omit<DittoEnv['Variables'], 'signal' | 'requestId'> & HonoOptions<DittoEnv>) {
|
||||
constructor(protected opts: DittoAppOpts) {
|
||||
super(opts);
|
||||
|
||||
this.use((c, next) => {
|
||||
|
|
|
|||
|
|
@ -50,6 +50,6 @@ export class DittoRoute extends Hono<DittoEnv> {
|
|||
}
|
||||
}
|
||||
|
||||
return c.json({ error: 'Something went wrong' }, 500);
|
||||
throw error;
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,41 +1 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { type DittoDB, DummyDB } from '@ditto/db';
|
||||
import { DittoApp, type DittoMiddleware } from '@ditto/mastoapi/router';
|
||||
import { type NostrSigner, type NRelay, NSecSigner } from '@nostrify/nostrify';
|
||||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
import { generateSecretKey, nip19 } from 'nostr-tools';
|
||||
|
||||
import type { User } from '@ditto/mastoapi/middleware';
|
||||
|
||||
export function testApp(): {
|
||||
app: DittoApp;
|
||||
relay: NRelay;
|
||||
conf: DittoConf;
|
||||
db: DittoDB;
|
||||
user: {
|
||||
signer: NostrSigner;
|
||||
relay: NRelay;
|
||||
};
|
||||
} {
|
||||
const db = new DummyDB();
|
||||
|
||||
const nsec = nip19.nsecEncode(generateSecretKey());
|
||||
const conf = new DittoConf(new Map([['DITTO_NSEC', nsec]]));
|
||||
|
||||
const relay = new MockRelay();
|
||||
const app = new DittoApp({ conf, relay, db });
|
||||
|
||||
const user = {
|
||||
signer: new NSecSigner(generateSecretKey()),
|
||||
relay,
|
||||
};
|
||||
|
||||
return { app, relay, conf, db, user };
|
||||
}
|
||||
|
||||
export function setUser<S extends NostrSigner>(user: User<S>): DittoMiddleware<{ user: User<S> }> {
|
||||
return async (c, next) => {
|
||||
c.set('user', user);
|
||||
await next();
|
||||
};
|
||||
}
|
||||
export { TestApp } from './test/TestApp.ts';
|
||||
|
|
|
|||
97
packages/mastoapi/test/TestApp.ts
Normal file
97
packages/mastoapi/test/TestApp.ts
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
import { DittoConf } from '@ditto/conf';
|
||||
import { type DittoDB, DummyDB } from '@ditto/db';
|
||||
import { HTTPException } from '@hono/hono/http-exception';
|
||||
import { type NRelay, NSecSigner } from '@nostrify/nostrify';
|
||||
import { generateSecretKey, nip19 } from 'nostr-tools';
|
||||
|
||||
import { DittoApp, type DittoAppOpts } from '../router/DittoApp.ts';
|
||||
|
||||
import type { Context } from '@hono/hono';
|
||||
import type { User } from '../middleware/User.ts';
|
||||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
|
||||
interface DittoVars {
|
||||
db: DittoDB;
|
||||
conf: DittoConf;
|
||||
relay: NRelay;
|
||||
}
|
||||
|
||||
export class TestApp extends DittoApp implements AsyncDisposable {
|
||||
private _user?: User;
|
||||
|
||||
constructor(opts?: Partial<DittoAppOpts>) {
|
||||
const nsec = nip19.nsecEncode(generateSecretKey());
|
||||
|
||||
const conf = opts?.conf ?? new DittoConf(
|
||||
new Map([
|
||||
['DITTO_NSEC', nsec],
|
||||
['LOCAL_DOMAIN', 'https://ditto.pub'],
|
||||
]),
|
||||
);
|
||||
|
||||
const db = opts?.db ?? new DummyDB();
|
||||
const relay = opts?.relay ?? new MockRelay();
|
||||
|
||||
super({
|
||||
db,
|
||||
conf,
|
||||
relay,
|
||||
...opts,
|
||||
});
|
||||
|
||||
this.use(async (c: Context<{ Variables: { user?: User } }>, next) => {
|
||||
c.set('user', this._user);
|
||||
await next();
|
||||
});
|
||||
|
||||
this.onError((err, c) => {
|
||||
if (err instanceof HTTPException) {
|
||||
if (err.res) {
|
||||
return err.res;
|
||||
} else {
|
||||
return c.json({ error: err.message }, err.status);
|
||||
}
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
get var(): DittoVars {
|
||||
return {
|
||||
db: this.opts.db,
|
||||
conf: this.opts.conf,
|
||||
relay: this.opts.relay,
|
||||
};
|
||||
}
|
||||
|
||||
user(user?: User): User {
|
||||
user ??= this.createUser();
|
||||
this._user = user;
|
||||
return user;
|
||||
}
|
||||
|
||||
createUser(sk?: Uint8Array): User {
|
||||
return {
|
||||
relay: this.opts.relay,
|
||||
signer: new NSecSigner(sk ?? generateSecretKey()),
|
||||
};
|
||||
}
|
||||
|
||||
api = {
|
||||
get: async (path: string): Promise<Response> => {
|
||||
return await this.request(path);
|
||||
},
|
||||
post: async (path: string, body: unknown): Promise<Response> => {
|
||||
return await this.request(path, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
async [Symbol.asyncDispose](): Promise<void> {
|
||||
await this.opts.db[Symbol.asyncDispose]();
|
||||
}
|
||||
}
|
||||
1
packages/transcode/.gitignore
vendored
Normal file
1
packages/transcode/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
tmp/
|
||||
13
packages/transcode/analyze.test.ts
Normal file
13
packages/transcode/analyze.test.ts
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import { assertObjectMatch } from '@std/assert';
|
||||
|
||||
import { analyzeFile } from './analyze.ts';
|
||||
|
||||
Deno.test('analyzeFile', async () => {
|
||||
const uri = new URL('./buckbunny.mp4', import.meta.url);
|
||||
|
||||
const { streams } = await analyzeFile(uri);
|
||||
|
||||
const videoStream = streams.find((stream) => stream.codec_type === 'video')!;
|
||||
|
||||
assertObjectMatch(videoStream, { width: 1920, height: 1080 });
|
||||
});
|
||||
102
packages/transcode/analyze.ts
Normal file
102
packages/transcode/analyze.ts
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
import { ffprobe } from './ffprobe.ts';
|
||||
|
||||
interface AnalyzeResult {
|
||||
streams: Stream[];
|
||||
format: Format;
|
||||
}
|
||||
|
||||
interface Stream {
|
||||
index: number;
|
||||
codec_tag_string: string;
|
||||
codec_tag: string;
|
||||
codec_name?: string;
|
||||
codec_long_name?: string;
|
||||
profile?: string;
|
||||
codec_type?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
coded_width?: number;
|
||||
coded_height?: number;
|
||||
closed_captions?: number;
|
||||
has_b_frames?: number;
|
||||
sample_aspect_ratio?: string;
|
||||
display_aspect_ratio?: string;
|
||||
pix_fmt?: string;
|
||||
level?: number;
|
||||
color_range?: string;
|
||||
color_space?: string;
|
||||
color_transfer?: string;
|
||||
color_primaries?: string;
|
||||
chroma_location?: string;
|
||||
field_order?: string;
|
||||
refs?: number;
|
||||
sample_fmt?: string;
|
||||
sample_rate?: string;
|
||||
channels?: number;
|
||||
channel_layout?: string;
|
||||
bits_per_sample?: number;
|
||||
id?: string;
|
||||
r_frame_rate?: string;
|
||||
avg_frame_rate?: string;
|
||||
time_base?: string;
|
||||
start_pts?: number;
|
||||
start_time?: string;
|
||||
duration_ts?: number;
|
||||
duration?: string;
|
||||
bit_rate?: string;
|
||||
max_bit_rate?: string;
|
||||
bits_per_raw_sample?: string;
|
||||
nb_frames?: string;
|
||||
nb_read_frames?: string;
|
||||
nb_read_packets?: string;
|
||||
disposition?: Disposition;
|
||||
tags?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface Format {
|
||||
filename: string;
|
||||
nb_streams: number;
|
||||
nb_programs: number;
|
||||
format_name: string;
|
||||
probe_score: number;
|
||||
format_long_name?: string;
|
||||
start_time?: string;
|
||||
duration?: string;
|
||||
size?: string;
|
||||
bit_rate?: string;
|
||||
tags?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface Disposition {
|
||||
default: number;
|
||||
dub: number;
|
||||
original: number;
|
||||
comment: number;
|
||||
lyrics: number;
|
||||
karaoke: number;
|
||||
forced: number;
|
||||
hearing_impaired: number;
|
||||
visual_impaired: number;
|
||||
clean_effects: number;
|
||||
attached_pic: number;
|
||||
timed_thumbnails: number;
|
||||
captions: number;
|
||||
descriptions: number;
|
||||
metadata: number;
|
||||
dependent: number;
|
||||
still_image: number;
|
||||
}
|
||||
|
||||
export function analyzeFile(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
opts?: { ffprobePath?: string | URL },
|
||||
): Promise<AnalyzeResult> {
|
||||
const stream = ffprobe(input, {
|
||||
'loglevel': 'fatal',
|
||||
'show_streams': '',
|
||||
'show_format': '',
|
||||
'of': 'json',
|
||||
}, opts);
|
||||
|
||||
return new Response(stream).json();
|
||||
}
|
||||
BIN
packages/transcode/buckbunny.mp4
Normal file
BIN
packages/transcode/buckbunny.mp4
Normal file
Binary file not shown.
7
packages/transcode/deno.json
Normal file
7
packages/transcode/deno.json
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@ditto/transcode",
|
||||
"version": "1.0.0",
|
||||
"exports": {
|
||||
".": "./mod.ts"
|
||||
}
|
||||
}
|
||||
31
packages/transcode/ffmpeg.test.ts
Normal file
31
packages/transcode/ffmpeg.test.ts
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import { ffmpeg } from './ffmpeg.ts';
|
||||
|
||||
const uri = new URL('./buckbunny.mp4', import.meta.url);
|
||||
|
||||
Deno.test('ffmpeg', async () => {
|
||||
await using file = await Deno.open(uri);
|
||||
|
||||
const output = ffmpeg(file.readable, {
|
||||
'c:v': 'libx264',
|
||||
'preset': 'veryfast',
|
||||
'loglevel': 'fatal',
|
||||
'movflags': 'frag_keyframe+empty_moov',
|
||||
'f': 'mp4',
|
||||
});
|
||||
|
||||
await Deno.mkdir(new URL('./tmp', import.meta.url), { recursive: true });
|
||||
await Deno.writeFile(new URL('./tmp/transcoded-1.mp4', import.meta.url), output);
|
||||
});
|
||||
|
||||
Deno.test('ffmpeg from file URI', async () => {
|
||||
const output = ffmpeg(uri, {
|
||||
'c:v': 'libx264',
|
||||
'preset': 'veryfast',
|
||||
'loglevel': 'fatal',
|
||||
'movflags': 'frag_keyframe+empty_moov',
|
||||
'f': 'mp4',
|
||||
});
|
||||
|
||||
await Deno.mkdir(new URL('./tmp', import.meta.url), { recursive: true });
|
||||
await Deno.writeFile(new URL('./tmp/transcoded-2.mp4', import.meta.url), output);
|
||||
});
|
||||
58
packages/transcode/ffmpeg.ts
Normal file
58
packages/transcode/ffmpeg.ts
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
export interface FFmpegFlags {
|
||||
'safe'?: string;
|
||||
'nostdin'?: string;
|
||||
'c:v'?: string;
|
||||
'preset'?: string;
|
||||
'loglevel'?: string;
|
||||
'crf'?: string;
|
||||
'c:a'?: string;
|
||||
'b:a'?: string;
|
||||
'movflags'?: string;
|
||||
'f'?: string;
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
||||
export function ffmpeg(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
flags: FFmpegFlags,
|
||||
opts?: { ffmpegPath?: string | URL },
|
||||
): ReadableStream<Uint8Array> {
|
||||
const { ffmpegPath = 'ffmpeg' } = opts ?? {};
|
||||
|
||||
const args = ['-i', input instanceof URL ? input.href : 'pipe:0'];
|
||||
|
||||
for (const [key, value] of Object.entries(flags)) {
|
||||
if (typeof value === 'string') {
|
||||
if (value) {
|
||||
args.push(`-${key}`, value);
|
||||
} else {
|
||||
args.push(`-${key}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
args.push('pipe:1'); // Output to stdout
|
||||
|
||||
// Spawn the FFmpeg process
|
||||
const command = new Deno.Command(ffmpegPath, {
|
||||
args,
|
||||
stdin: input instanceof ReadableStream ? 'piped' : 'null',
|
||||
stdout: 'piped',
|
||||
});
|
||||
|
||||
const child = command.spawn();
|
||||
|
||||
// Pipe the input stream into FFmpeg stdin and ensure completion
|
||||
if (input instanceof ReadableStream) {
|
||||
input.pipeTo(child.stdin).catch((e: unknown) => {
|
||||
if (e instanceof Error && e.name === 'BrokenPipe') {
|
||||
// Ignore. ffprobe closes the pipe once it has read the metadata.
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Return the FFmpeg stdout stream
|
||||
return child.stdout;
|
||||
}
|
||||
33
packages/transcode/ffprobe.test.ts
Normal file
33
packages/transcode/ffprobe.test.ts
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
import { assertObjectMatch } from '@std/assert';
|
||||
|
||||
import { ffprobe } from './ffprobe.ts';
|
||||
|
||||
const uri = new URL('./buckbunny.mp4', import.meta.url);
|
||||
|
||||
Deno.test('ffprobe from ReadableStream', async () => {
|
||||
await using file = await Deno.open(uri);
|
||||
|
||||
const stream = ffprobe(file.readable, {
|
||||
'v': 'error',
|
||||
'select_streams': 'v:0',
|
||||
'show_entries': 'stream=width,height',
|
||||
'of': 'json',
|
||||
});
|
||||
|
||||
const { streams: [dimensions] } = await new Response(stream).json();
|
||||
|
||||
assertObjectMatch(dimensions, { width: 1920, height: 1080 });
|
||||
});
|
||||
|
||||
Deno.test('ffprobe from file URI', async () => {
|
||||
const stream = ffprobe(uri, {
|
||||
'v': 'error',
|
||||
'select_streams': 'v:0',
|
||||
'show_entries': 'stream=width,height',
|
||||
'of': 'json',
|
||||
});
|
||||
|
||||
const { streams: [dimensions] } = await new Response(stream).json();
|
||||
|
||||
assertObjectMatch(dimensions, { width: 1920, height: 1080 });
|
||||
});
|
||||
56
packages/transcode/ffprobe.ts
Normal file
56
packages/transcode/ffprobe.ts
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
export interface FFprobeFlags {
|
||||
'v'?: string;
|
||||
'select_streams'?: string;
|
||||
'show_entries'?: string;
|
||||
'of'?: string;
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
||||
export function ffprobe(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
flags: FFprobeFlags,
|
||||
opts?: { ffprobePath?: string | URL },
|
||||
): ReadableStream<Uint8Array> {
|
||||
const { ffprobePath = 'ffprobe' } = opts ?? {};
|
||||
|
||||
const args = [];
|
||||
|
||||
for (const [key, value] of Object.entries(flags)) {
|
||||
if (typeof value === 'string') {
|
||||
if (value) {
|
||||
args.push(`-${key}`, value);
|
||||
} else {
|
||||
args.push(`-${key}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (input instanceof URL) {
|
||||
args.push('-i', input.href);
|
||||
} else {
|
||||
args.push('-i', 'pipe:0');
|
||||
}
|
||||
|
||||
// Spawn the FFprobe process
|
||||
const command = new Deno.Command(ffprobePath, {
|
||||
args,
|
||||
stdin: input instanceof ReadableStream ? 'piped' : 'null',
|
||||
stdout: 'piped',
|
||||
});
|
||||
|
||||
const child = command.spawn();
|
||||
|
||||
// Pipe the input stream into FFmpeg stdin and ensure completion
|
||||
if (input instanceof ReadableStream) {
|
||||
input.pipeTo(child.stdin).catch((e: unknown) => {
|
||||
if (e instanceof Error && e.name === 'BrokenPipe') {
|
||||
// Ignore. ffprobe closes the pipe once it has read the metadata.
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Return the FFmpeg stdout stream
|
||||
return child.stdout;
|
||||
}
|
||||
12
packages/transcode/frame.test.ts
Normal file
12
packages/transcode/frame.test.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import { extractVideoFrame } from './frame.ts';
|
||||
|
||||
const uri = new URL('./buckbunny.mp4', import.meta.url);
|
||||
|
||||
Deno.test('extractVideoFrame', async () => {
|
||||
await using file = await Deno.open(uri);
|
||||
|
||||
const result = await extractVideoFrame(file.readable);
|
||||
|
||||
await Deno.mkdir(new URL('./tmp', import.meta.url), { recursive: true });
|
||||
await Deno.writeFile(new URL('./tmp/poster.jpg', import.meta.url), result);
|
||||
});
|
||||
17
packages/transcode/frame.ts
Normal file
17
packages/transcode/frame.ts
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import { ffmpeg } from './ffmpeg.ts';
|
||||
|
||||
export function extractVideoFrame(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
ss: string = '00:00:01',
|
||||
opts?: { ffmpegPath?: string | URL },
|
||||
): Promise<Uint8Array> {
|
||||
const output = ffmpeg(input, {
|
||||
'ss': ss, // Seek to timestamp
|
||||
'frames:v': '1', // Extract only 1 frame
|
||||
'q:v': '2', // High-quality JPEG (lower = better quality)
|
||||
'f': 'image2', // Force image format
|
||||
'loglevel': 'fatal',
|
||||
}, opts);
|
||||
|
||||
return new Response(output).bytes();
|
||||
}
|
||||
5
packages/transcode/mod.ts
Normal file
5
packages/transcode/mod.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
export { analyzeFile } from './analyze.ts';
|
||||
export { ffmpeg, type FFmpegFlags } from './ffmpeg.ts';
|
||||
export { ffprobe, type FFprobeFlags } from './ffprobe.ts';
|
||||
export { extractVideoFrame } from './frame.ts';
|
||||
export { transcodeVideo } from './transcode.ts';
|
||||
9
packages/transcode/transcode.test.ts
Normal file
9
packages/transcode/transcode.test.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import { transcodeVideo } from './transcode.ts';
|
||||
|
||||
Deno.test('transcodeVideo', async () => {
|
||||
await using file = await Deno.open(new URL('./buckbunny.mp4', import.meta.url));
|
||||
const output = transcodeVideo(file.readable);
|
||||
|
||||
await Deno.mkdir(new URL('./tmp', import.meta.url), { recursive: true });
|
||||
await Deno.writeFile(new URL('./tmp/buckbunny-transcoded.mp4', import.meta.url), output);
|
||||
});
|
||||
19
packages/transcode/transcode.ts
Normal file
19
packages/transcode/transcode.ts
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import { ffmpeg } from './ffmpeg.ts';
|
||||
|
||||
export function transcodeVideo(
|
||||
input: URL | ReadableStream<Uint8Array>,
|
||||
opts?: { ffmpegPath?: string | URL },
|
||||
): ReadableStream<Uint8Array> {
|
||||
return ffmpeg(input, {
|
||||
'safe': '1', // Safe mode
|
||||
'nostdin': '', // Disable stdin
|
||||
'c:v': 'libx264', // Convert to H.264
|
||||
'preset': 'veryfast', // Encoding speed
|
||||
'loglevel': 'fatal', // Suppress logs
|
||||
'crf': '23', // Compression level (lower = better quality)
|
||||
'c:a': 'aac', // Convert to AAC audio
|
||||
'b:a': '128k', // Audio bitrate
|
||||
'movflags': 'frag_keyframe+empty_moov', // Ensures MP4 streaming compatibility
|
||||
'f': 'mp4', // Force MP4 format
|
||||
}, opts);
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue