mirror of
https://gitlab.com/soapbox-pub/ditto.git
synced 2025-12-06 11:29:46 +00:00
First commit with headless stuff
This commit is contained in:
parent
8adc87f1d9
commit
ebc27e8297
4 changed files with 159 additions and 1 deletions
10
Dockerfile
10
Dockerfile
|
|
@ -1,8 +1,16 @@
|
||||||
|
ARG DITTO_DOMAIN
|
||||||
|
ARG DITTO_UPLOADER_CONFIG
|
||||||
|
|
||||||
|
ENV DITTO_DOMAIN ${DITTO_DOMAIN}
|
||||||
|
ENV DITTO_UPLOADER_CONFIG ${DITTO_UPLOADER_CONFIG}
|
||||||
|
ENV PORT 5000
|
||||||
|
|
||||||
FROM denoland/deno:1.44.2
|
FROM denoland/deno:1.44.2
|
||||||
EXPOSE 4036
|
EXPOSE 5000
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN mkdir -p data && chown -R deno data
|
RUN mkdir -p data && chown -R deno data
|
||||||
USER deno
|
USER deno
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN deno cache src/server.ts
|
RUN deno cache src/server.ts
|
||||||
|
RUN deno task setup:headless
|
||||||
CMD deno task start
|
CMD deno task start
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,8 @@
|
||||||
"db:export": "deno run -A scripts/db-export.ts",
|
"db:export": "deno run -A scripts/db-export.ts",
|
||||||
"db:import": "deno run -A scripts/db-import.ts",
|
"db:import": "deno run -A scripts/db-import.ts",
|
||||||
"db:migrate": "deno run -A scripts/db-migrate.ts",
|
"db:migrate": "deno run -A scripts/db-migrate.ts",
|
||||||
|
"headless:setup": "deno run -A scripts/headless/setup.ts",
|
||||||
|
"headless:uploader-config": "deno run -A scripts/headless/uploader-config.ts",
|
||||||
"nostr:pull": "deno run -A scripts/nostr-pull.ts",
|
"nostr:pull": "deno run -A scripts/nostr-pull.ts",
|
||||||
"debug": "deno run -A --inspect src/server.ts",
|
"debug": "deno run -A --inspect src/server.ts",
|
||||||
"test": "deno test -A --junit-path=./deno-test.xml",
|
"test": "deno test -A --junit-path=./deno-test.xml",
|
||||||
|
|
|
||||||
40
scripts/headless/setup.ts
Normal file
40
scripts/headless/setup.ts
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
import { generateSecretKey, nip19 } from 'nostr-tools';
|
||||||
|
import { parseUploaderConfig } from './uploader-config.ts';
|
||||||
|
|
||||||
|
function scream(...args: any[]) {
|
||||||
|
console.error('FATAL:', ...args);
|
||||||
|
Deno.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function missingEnv(what: string, v: string) {
|
||||||
|
scream(`${what} not set! Set the ${v} config variable before trying again.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (import.meta.main) {
|
||||||
|
const key = generateSecretKey();
|
||||||
|
const DITTO_NSEC = nip19.nsecEncode(key);
|
||||||
|
|
||||||
|
const LOCAL_DOMAIN = Deno.env.get('DITTO_DOMAIN');
|
||||||
|
if (!LOCAL_DOMAIN) missingEnv('Domain value', 'DITTO_DOMAIN');
|
||||||
|
|
||||||
|
const uploaderConfig = Deno.env.get('DITTO_UPLOADER_CONFIG');
|
||||||
|
if (!uploaderConfig) missingEnv('Uploader configuration', 'DITTO_UPLOADER_CONFIG');
|
||||||
|
|
||||||
|
let uploader: ReturnType<typeof parseUploaderConfig>;
|
||||||
|
try {
|
||||||
|
uploader = parseUploaderConfig(uploaderConfig!);
|
||||||
|
} catch (e) {
|
||||||
|
scream('Error decoding uploader config:', e.message || e.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
const vars = {
|
||||||
|
LOCAL_DOMAIN,
|
||||||
|
DITTO_NSEC,
|
||||||
|
...uploader!,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = Object.entries(vars)
|
||||||
|
.reduce((acc, [key, value]) => value ? `${acc}${key}="${value}"\n` : acc, '');
|
||||||
|
|
||||||
|
await Deno.writeTextFile('./.env', result);
|
||||||
|
}
|
||||||
108
scripts/headless/uploader-config.ts
Normal file
108
scripts/headless/uploader-config.ts
Normal file
|
|
@ -0,0 +1,108 @@
|
||||||
|
import { base64 } from '@scure/base';
|
||||||
|
import { z } from 'zod';
|
||||||
|
import { Conf } from '@/config.ts';
|
||||||
|
import question from 'question-deno';
|
||||||
|
|
||||||
|
const s3Schema = z.object({
|
||||||
|
DITTO_UPLOADER: z.literal('s3'),
|
||||||
|
S3_ACCESS_KEY: z.string(),
|
||||||
|
S3_SECRET_KEY: z.string(),
|
||||||
|
S3_ENDPOINT: z.string().url(),
|
||||||
|
S3_BUCKET: z.string(),
|
||||||
|
S3_REGION: z.string(),
|
||||||
|
S3_PATH_STYLE: z.union([z.literal('true'), z.literal('false')]),
|
||||||
|
MEDIA_DOMAIN: z.string().url(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const blossomSchema = z.object({
|
||||||
|
DITTO_UPLOADER: z.literal('blossom'),
|
||||||
|
BLOSSOM_SERVERS: z.string().refine((value) => {
|
||||||
|
return value.split(',').every((server) => {
|
||||||
|
try {
|
||||||
|
new URL(server);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, { message: 'All Blossom servers must be valid URLs' }),
|
||||||
|
});
|
||||||
|
|
||||||
|
const nostrBuildSchema = z.object({
|
||||||
|
DITTO_UPLOADER: z.literal('nostrbuild'),
|
||||||
|
NOSTRBUILD_ENDPOINT: z.string().url(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const ipfsSchema = z.object({
|
||||||
|
DITTO_UPLOADER: z.literal('ipfs'),
|
||||||
|
IPFS_API_URL: z.string().url(),
|
||||||
|
MEDIA_DOMAIN: z.string().url(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const localSchema = z.object({
|
||||||
|
DITTO_UPLOADER: z.literal('local'),
|
||||||
|
UPLOADS_DIR: z.string().default(Conf.nostrbuildEndpoint),
|
||||||
|
MEDIA_DOMAIN: z.string().url(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const uploaderSchema = z.union([
|
||||||
|
nostrBuildSchema,
|
||||||
|
blossomSchema,
|
||||||
|
s3Schema,
|
||||||
|
ipfsSchema,
|
||||||
|
localSchema,
|
||||||
|
]);
|
||||||
|
|
||||||
|
export function parseUploaderConfig(cfg: string) {
|
||||||
|
const decoded = new TextDecoder().decode(base64.decode(cfg!));
|
||||||
|
const parsed = JSON.parse(decoded);
|
||||||
|
const validated = uploaderSchema.parse(parsed);
|
||||||
|
return validated;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (import.meta.main) {
|
||||||
|
const vars: Record<string, string | undefined> = {};
|
||||||
|
|
||||||
|
const domain = await question('input', 'Instance domain? (eg ditto.pub)');
|
||||||
|
if (!domain) {
|
||||||
|
throw new Error('Domain is required!');
|
||||||
|
}
|
||||||
|
|
||||||
|
vars.DITTO_UPLOADER = await question('list', 'How do you want to upload files?', [
|
||||||
|
'nostrbuild',
|
||||||
|
'blossom',
|
||||||
|
's3',
|
||||||
|
'ipfs',
|
||||||
|
'local',
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (vars.DITTO_UPLOADER === 'nostrbuild') {
|
||||||
|
vars.NOSTRBUILD_ENDPOINT = await question('input', 'nostr.build endpoint', Conf.nostrbuildEndpoint);
|
||||||
|
}
|
||||||
|
if (vars.DITTO_UPLOADER === 'blossom') {
|
||||||
|
vars.BLOSSOM_SERVERS = await question('input', 'Blossom servers (comma separated)', Conf.blossomServers.join(','));
|
||||||
|
}
|
||||||
|
if (vars.DITTO_UPLOADER === 's3') {
|
||||||
|
vars.S3_ACCESS_KEY = await question('input', 'S3 access key', Conf.s3.accessKey);
|
||||||
|
vars.S3_SECRET_KEY = await question('input', 'S3 secret key', Conf.s3.secretKey);
|
||||||
|
vars.S3_ENDPOINT = await question('input', 'S3 endpoint', Conf.s3.endPoint);
|
||||||
|
vars.S3_BUCKET = await question('input', 'S3 bucket', Conf.s3.bucket);
|
||||||
|
vars.S3_REGION = await question('input', 'S3 region', Conf.s3.region);
|
||||||
|
vars.S3_PATH_STYLE = String(await question('confirm', 'Use path style?', Conf.s3.pathStyle ?? false));
|
||||||
|
const mediaDomain = await question('input', 'Media domain', `media.${domain}`);
|
||||||
|
vars.MEDIA_DOMAIN = `https://${mediaDomain}`;
|
||||||
|
}
|
||||||
|
if (vars.DITTO_UPLOADER === 'ipfs') {
|
||||||
|
vars.IPFS_API_URL = await question('input', 'IPFS API URL', Conf.ipfs.apiUrl);
|
||||||
|
const mediaDomain = await question('input', 'Media domain', `media.${domain}`);
|
||||||
|
vars.MEDIA_DOMAIN = `https://${mediaDomain}`;
|
||||||
|
}
|
||||||
|
if (vars.DITTO_UPLOADER === 'local') {
|
||||||
|
vars.UPLOADS_DIR = await question('input', 'Local uploads directory', Conf.uploadsDir);
|
||||||
|
const mediaDomain = await question('input', 'Media domain', `media.${domain}`);
|
||||||
|
vars.MEDIA_DOMAIN = `https://${mediaDomain}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const encoded = base64.encode(new TextEncoder().encode(JSON.stringify(vars)));
|
||||||
|
console.log(encoded);
|
||||||
|
}
|
||||||
Loading…
Add table
Reference in a new issue