Make transcoding optional, refactor a bit

This commit is contained in:
Alex Gleason 2025-03-01 17:45:01 -06:00
parent 414a3b7651
commit 3035ecaca9
No known key found for this signature in database
GPG key ID: 7211D1F99744FBB7
2 changed files with 62 additions and 52 deletions

View file

@ -279,6 +279,11 @@ export class DittoConf {
return optionalBooleanSchema.parse(this.env.get('MEDIA_ANALYZE')) ?? false;
}
/** Whether to transcode uploaded video files with ffmpeg. */
get mediaTranscode(): boolean {
return optionalBooleanSchema.parse(this.env.get('MEDIA_TRANSCODE')) ?? false;
}
/** Max upload size for files in number of bytes. Default 100MiB. */
get maxUploadSize(): number {
return Number(this.env.get('MAX_UPLOAD_SIZE') || 100 * 1024 * 1024);

View file

@ -27,7 +27,7 @@ export async function uploadFile(
perf.mark('start');
const { conf, uploader } = c.var;
const { ffmpegPath, ffprobePath } = conf;
const { ffmpegPath, ffprobePath, mediaAnalyze, mediaTranscode } = conf;
if (!uploader) {
throw new HTTPException(500, {
@ -45,10 +45,11 @@ export async function uploadFile(
perf.mark('probe-start');
const probe = await analyzeFile(file.stream(), { ffprobePath }).catch(() => null);
const video = probe?.streams.find((stream) => stream.codec_type === 'video');
perf.mark('probe-end');
perf.mark('transcode-start');
if (baseType === 'video') {
if (baseType === 'video' && mediaTranscode) {
let needsTranscode = false;
for (const stream of probe?.streams ?? []) {
@ -76,8 +77,6 @@ export async function uploadFile(
const url = tags[0][1];
perf.mark('analyze-start');
if (description) {
tags.push(['alt', description]);
}
@ -103,7 +102,12 @@ export async function uploadFile(
tags.push(['size', file.size.toString()]);
}
if (baseType === 'video' && (!image || !thumb)) {
perf.mark('analyze-start');
if (baseType === 'video' && mediaAnalyze && mediaTranscode && video && (!image || !thumb)) {
const { width, height } = video;
try {
const bytes = await extractVideoFrame(file.stream(), '00:00:01', { ffmpegPath });
const [[, url]] = await uploader.upload(new File([bytes], 'thumb.jpg', { type: 'image/jpeg' }), { signal });
@ -111,63 +115,28 @@ export async function uploadFile(
tags.push(['image', url]);
}
const video = probe?.streams.find((stream) => stream.codec_type === 'video');
if (video && video.width && video.height) {
const { width, height } = video;
if (!dim) {
if (!dim && width && height) {
tags.push(['dim', `${width}x${height}`]);
}
if (!blurhash) {
try {
const { data, info } = await sharp(bytes)
.raw()
.ensureAlpha()
.resize({
width: width > height ? undefined : 64,
height: height > width ? undefined : 64,
fit: 'inside',
})
.toBuffer({ resolveWithObject: true });
const blurhash = encode(new Uint8ClampedArray(data), info.width, info.height, 4, 4);
tags.push(['blurhash', blurhash]);
tags.push(['blurhash', await getBlurhash(bytes)]);
}
} catch (e) {
logi({ level: 'error', ns: 'ditto.upload.analyze', error: errorJson(e) });
}
}
}
}
// If the uploader didn't already, try to get a blurhash and media dimensions.
// This requires `MEDIA_ANALYZE=true` to be configured because it comes with security tradeoffs.
if (baseType === 'image' && conf.mediaAnalyze && (!blurhash || !dim)) {
if (baseType === 'image' && mediaAnalyze && (!blurhash || !dim)) {
try {
const bytes = await new Response(file.stream()).bytes();
const img = sharp(bytes);
const { width, height } = await img.metadata();
if (!dim && (width && height)) {
tags.push(['dim', `${width}x${height}`]);
if (!dim) {
tags.push(['dim', await getImageDim(bytes)]);
}
if (!blurhash && (width && height)) {
const pixels = await img
.raw()
.ensureAlpha()
.resize({
width: width > height ? undefined : 64,
height: height > width ? undefined : 64,
fit: 'inside',
})
.toBuffer({ resolveWithObject: false })
.then((buffer) => new Uint8ClampedArray(buffer));
const blurhash = encode(pixels, width, height, 4, 4);
tags.push(['blurhash', blurhash]);
if (!blurhash) {
tags.push(['blurhash', await getBlurhash(bytes)]);
}
} catch (e) {
logi({ level: 'error', ns: 'ditto.upload.analyze', error: errorJson(e) });
@ -209,3 +178,39 @@ export async function uploadFile(
return upload;
}
async function getImageDim(bytes: Uint8Array): Promise<`${number}x${number}`> {
const img = sharp(bytes);
const { width, height } = await img.metadata();
if (!width || !height) {
throw new Error('Image metadata is missing.');
}
return `${width}x${height}`;
}
/** Get a blurhash from an image file. */
async function getBlurhash(bytes: Uint8Array, maxDim = 64): Promise<string> {
const img = sharp(bytes);
const { width, height } = await img.metadata();
if (!width || !height) {
throw new Error('Image metadata is missing.');
}
const { data, info } = await img
.raw()
.ensureAlpha()
.resize({
width: width > height ? undefined : maxDim,
height: height > width ? undefined : maxDim,
fit: 'inside',
})
.toBuffer({ resolveWithObject: true });
const pixels = new Uint8ClampedArray(data);
return encode(pixels, info.width, info.height, 4, 4);
}