refactor(media): store filenames, use pre-signed s3/azure URLs, UUIDs

Signed-off-by: Erik Michelson <github@erik.michelson.eu>
This commit is contained in:
Erik Michelson 2024-06-12 18:45:49 +02:00 committed by Philip Molares
parent 4132833b5d
commit 157a0fe278
47 changed files with 869 additions and 389 deletions

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -100,28 +100,33 @@ export class MediaController {
'uploadMedia', 'uploadMedia',
); );
} }
const upload = await this.mediaService.saveFile(file.buffer, user, note); const upload = await this.mediaService.saveFile(
file.originalname,
file.buffer,
user,
note,
);
return await this.mediaService.toMediaUploadDto(upload); return await this.mediaService.toMediaUploadDto(upload);
} }
@Get(':filename') @Get(':uuid')
@OpenApi(404, 500) @OpenApi(200, 404, 500)
async getMedia( async getMedia(
@Param('filename') filename: string, @Param('uuid') uuid: string,
@Res() response: Response, @Res() response: Response,
): Promise<void> { ): Promise<void> {
const mediaUpload = await this.mediaService.findUploadByFilename(filename); const mediaUpload = await this.mediaService.findUploadByUuid(uuid);
const targetUrl = mediaUpload.fileUrl; const dto = await this.mediaService.toMediaUploadDto(mediaUpload);
response.redirect(targetUrl); response.send(dto);
} }
@Delete(':filename') @Delete(':uuid')
@OpenApi(204, 403, 404, 500) @OpenApi(204, 403, 404, 500)
async deleteMedia( async deleteMedia(
@RequestUser() user: User, @RequestUser() user: User,
@Param('filename') filename: string, @Param('uuid') uuid: string,
): Promise<void> { ): Promise<void> {
const mediaUpload = await this.mediaService.findUploadByFilename(filename); const mediaUpload = await this.mediaService.findUploadByUuid(uuid);
if ( if (
await this.permissionsService.checkMediaDeletePermission( await this.permissionsService.checkMediaDeletePermission(
user, user,
@ -129,18 +134,18 @@ export class MediaController {
) )
) { ) {
this.logger.debug( this.logger.debug(
`Deleting '${filename}' for user '${user.username}'`, `Deleting '${uuid}' for user '${user.username}'`,
'deleteMedia', 'deleteMedia',
); );
await this.mediaService.deleteFile(mediaUpload); await this.mediaService.deleteFile(mediaUpload);
} else { } else {
this.logger.warn( this.logger.warn(
`${user.username} tried to delete '${filename}', but is not the owner of upload or connected note`, `${user.username} tried to delete '${uuid}', but is not the owner of upload or connected note`,
'deleteMedia', 'deleteMedia',
); );
const mediaUploadNote = await mediaUpload.note; const mediaUploadNote = await mediaUpload.note;
throw new PermissionError( throw new PermissionError(
`Neither file '${filename}' nor note '${ `Neither file '${uuid}' nor note '${
mediaUploadNote?.publicId ?? 'unknown' mediaUploadNote?.publicId ?? 'unknown'
}'is owned by '${user.username}'`, }'is owned by '${user.username}'`,
); );

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -100,28 +100,33 @@ export class MediaController {
`Received filename '${file.originalname}' for note '${note.publicId}' from user '${user.username}'`, `Received filename '${file.originalname}' for note '${note.publicId}' from user '${user.username}'`,
'uploadMedia', 'uploadMedia',
); );
const upload = await this.mediaService.saveFile(file.buffer, user, note); const upload = await this.mediaService.saveFile(
file.originalname,
file.buffer,
user,
note,
);
return await this.mediaService.toMediaUploadDto(upload); return await this.mediaService.toMediaUploadDto(upload);
} }
@Get(':filename') @Get(':uuid')
@OpenApi(404, 500) @OpenApi(200, 404, 500)
async getMedia( async getMedia(
@Param('filename') filename: string, @Param('uuid') uuid: string,
@Res() response: Response, @Res() response: Response,
): Promise<void> { ): Promise<void> {
const mediaUpload = await this.mediaService.findUploadByFilename(filename); const mediaUpload = await this.mediaService.findUploadByUuid(uuid);
const targetUrl = mediaUpload.fileUrl; const dto = await this.mediaService.toMediaUploadDto(mediaUpload);
response.redirect(targetUrl); response.send(dto);
} }
@Delete(':filename') @Delete(':uuid')
@OpenApi(204, 403, 404, 500) @OpenApi(204, 403, 404, 500)
async deleteMedia( async deleteMedia(
@RequestUser() user: User, @RequestUser() user: User,
@Param('filename') filename: string, @Param('uuid') uuid: string,
): Promise<void> { ): Promise<void> {
const mediaUpload = await this.mediaService.findUploadByFilename(filename); const mediaUpload = await this.mediaService.findUploadByUuid(uuid);
if ( if (
await this.permissionsService.checkMediaDeletePermission( await this.permissionsService.checkMediaDeletePermission(
user, user,
@ -129,18 +134,18 @@ export class MediaController {
) )
) { ) {
this.logger.debug( this.logger.debug(
`Deleting '${filename}' for user '${user.username}'`, `Deleting '${uuid}' for user '${user.username}'`,
'deleteMedia', 'deleteMedia',
); );
await this.mediaService.deleteFile(mediaUpload); await this.mediaService.deleteFile(mediaUpload);
} else { } else {
this.logger.warn( this.logger.warn(
`${user.username} tried to delete '${filename}', but is not the owner of upload or connected note`, `${user.username} tried to delete '${uuid}', but is not the owner of upload or connected note`,
'deleteMedia', 'deleteMedia',
); );
const mediaUploadNote = await mediaUpload.note; const mediaUploadNote = await mediaUpload.note;
throw new PermissionError( throw new PermissionError(
`Neither file '${filename}' nor note '${ `Neither file '${uuid}' nor note '${
mediaUploadNote?.publicId ?? 'unknown' mediaUploadNote?.publicId ?? 'unknown'
}'is owned by '${user.username}'`, }'is owned by '${user.username}'`,
); );

View file

@ -1,10 +1,12 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
export const okDescription = 'This request was successful'; export const okDescription = 'This request was successful';
export const foundDescription =
'The requested resource was found at another URL';
export const createdDescription = export const createdDescription =
'The requested resource was successfully created'; 'The requested resource was successfully created';
export const noContentDescription = export const noContentDescription =

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -8,6 +8,7 @@ import {
ApiBadRequestResponse, ApiBadRequestResponse,
ApiConflictResponse, ApiConflictResponse,
ApiCreatedResponse, ApiCreatedResponse,
ApiFoundResponse,
ApiInternalServerErrorResponse, ApiInternalServerErrorResponse,
ApiNoContentResponse, ApiNoContentResponse,
ApiNotFoundResponse, ApiNotFoundResponse,
@ -21,6 +22,7 @@ import {
badRequestDescription, badRequestDescription,
conflictDescription, conflictDescription,
createdDescription, createdDescription,
foundDescription,
internalServerErrorDescription, internalServerErrorDescription,
noContentDescription, noContentDescription,
notFoundDescription, notFoundDescription,
@ -33,6 +35,7 @@ export type HttpStatusCodes =
| 200 | 200
| 201 | 201
| 204 | 204
| 302
| 400 | 400
| 401 | 401
| 403 | 403
@ -130,6 +133,14 @@ export const OpenApi = (
HttpCode(204), HttpCode(204),
); );
break; break;
case 302:
decoratorsToApply.push(
ApiFoundResponse({
description: description ?? foundDescription,
}),
HttpCode(302),
);
break;
case 400: case 400:
decoratorsToApply.push( decoratorsToApply.push(
ApiBadRequestResponse({ ApiBadRequestResponse({

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -29,6 +29,7 @@ import { HistoryModule } from './history/history.module';
import { IdentityModule } from './identity/identity.module'; import { IdentityModule } from './identity/identity.module';
import { LoggerModule } from './logger/logger.module'; import { LoggerModule } from './logger/logger.module';
import { TypeormLoggerService } from './logger/typeorm-logger.service'; import { TypeormLoggerService } from './logger/typeorm-logger.service';
import { MediaRedirectModule } from './media-redirect/media-redirect.module';
import { MediaModule } from './media/media.module'; import { MediaModule } from './media/media.module';
import { MonitoringModule } from './monitoring/monitoring.module'; import { MonitoringModule } from './monitoring/monitoring.module';
import { NotesModule } from './notes/notes.module'; import { NotesModule } from './notes/notes.module';
@ -49,6 +50,10 @@ const routes: Routes = [
path: '/api/private', path: '/api/private',
module: PrivateApiModule, module: PrivateApiModule,
}, },
{
path: '/media',
module: MediaRedirectModule,
},
]; ];
@Module({ @Module({
@ -112,6 +117,7 @@ const routes: Routes = [
WebsocketModule, WebsocketModule,
IdentityModule, IdentityModule,
SessionModule, SessionModule,
MediaRedirectModule,
], ],
controllers: [], controllers: [],
providers: [FrontendConfigService], providers: [FrontendConfigService],

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -16,6 +16,8 @@ describe('mediaConfig', () => {
const secretAccessKey = 'secretAccessKey'; const secretAccessKey = 'secretAccessKey';
const bucket = 'bucket'; const bucket = 'bucket';
const endPoint = 'https://endPoint'; const endPoint = 'https://endPoint';
const region = 'us-east-1';
const pathStyle = false;
// Azure // Azure
const azureConnectionString = 'connectionString'; const azureConnectionString = 'connectionString';
const container = 'container'; const container = 'container';
@ -54,6 +56,8 @@ describe('mediaConfig', () => {
HD_MEDIA_BACKEND_S3_SECRET_KEY: secretAccessKey, HD_MEDIA_BACKEND_S3_SECRET_KEY: secretAccessKey,
HD_MEDIA_BACKEND_S3_BUCKET: bucket, HD_MEDIA_BACKEND_S3_BUCKET: bucket,
HD_MEDIA_BACKEND_S3_ENDPOINT: endPoint, HD_MEDIA_BACKEND_S3_ENDPOINT: endPoint,
HD_MEDIA_BACKEND_S3_REGION: region,
HD_MEDIA_BACKEND_S3_PATH_STYLE: pathStyle.toString(),
/* eslint-enable @typescript-eslint/naming-convention */ /* eslint-enable @typescript-eslint/naming-convention */
}, },
{ {
@ -66,6 +70,8 @@ describe('mediaConfig', () => {
expect(config.backend.s3.secretAccessKey).toEqual(secretAccessKey); expect(config.backend.s3.secretAccessKey).toEqual(secretAccessKey);
expect(config.backend.s3.bucket).toEqual(bucket); expect(config.backend.s3.bucket).toEqual(bucket);
expect(config.backend.s3.endPoint).toEqual(endPoint); expect(config.backend.s3.endPoint).toEqual(endPoint);
expect(config.backend.s3.region).toEqual(region);
expect(config.backend.s3.pathStyle).toEqual(pathStyle);
restore(); restore();
}); });

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -7,7 +7,7 @@ import { registerAs } from '@nestjs/config';
import * as Joi from 'joi'; import * as Joi from 'joi';
import { BackendType } from '../media/backends/backend-type.enum'; import { BackendType } from '../media/backends/backend-type.enum';
import { buildErrorMessage } from './utils'; import { buildErrorMessage, parseOptionalBoolean } from './utils';
export interface MediaConfig { export interface MediaConfig {
backend: MediaBackendConfig; backend: MediaBackendConfig;
@ -23,6 +23,8 @@ export interface MediaBackendConfig {
secretAccessKey: string; secretAccessKey: string;
bucket: string; bucket: string;
endPoint: string; endPoint: string;
region: string;
pathStyle: boolean;
}; };
azure: { azure: {
connectionString: string; connectionString: string;
@ -59,6 +61,10 @@ const mediaSchema = Joi.object({
endPoint: Joi.string() endPoint: Joi.string()
.uri({ scheme: /^https?/ }) .uri({ scheme: /^https?/ })
.label('HD_MEDIA_BACKEND_S3_ENDPOINT'), .label('HD_MEDIA_BACKEND_S3_ENDPOINT'),
region: Joi.string().optional().label('HD_MEDIA_BACKEND_S3_REGION'),
pathStyle: Joi.boolean()
.default(false)
.label('HD_MEDIA_BACKEND_S3_PATH_STYLE'),
}), }),
otherwise: Joi.optional(), otherwise: Joi.optional(),
}), }),
@ -110,6 +116,10 @@ export default registerAs('mediaConfig', () => {
secretAccessKey: process.env.HD_MEDIA_BACKEND_S3_SECRET_KEY, secretAccessKey: process.env.HD_MEDIA_BACKEND_S3_SECRET_KEY,
bucket: process.env.HD_MEDIA_BACKEND_S3_BUCKET, bucket: process.env.HD_MEDIA_BACKEND_S3_BUCKET,
endPoint: process.env.HD_MEDIA_BACKEND_S3_ENDPOINT, endPoint: process.env.HD_MEDIA_BACKEND_S3_ENDPOINT,
region: process.env.HD_MEDIA_BACKEND_S3_REGION,
pathStyle: parseOptionalBoolean(
process.env.HD_MEDIA_BACKEND_S3_PATH_STYLE,
),
}, },
azure: { azure: {
connectionString: connectionString:

View file

@ -22,6 +22,8 @@ export function createDefaultMockMediaConfig(): MediaConfig {
secretAccessKey: '', secretAccessKey: '',
bucket: '', bucket: '',
endPoint: '', endPoint: '',
pathStyle: false,
region: '',
}, },
azure: { azure: {
connectionString: '', connectionString: '',

View file

@ -8,6 +8,7 @@ import {
ensureNoDuplicatesExist, ensureNoDuplicatesExist,
findDuplicatesInArray, findDuplicatesInArray,
needToLog, needToLog,
parseOptionalBoolean,
parseOptionalNumber, parseOptionalNumber,
replaceAuthErrorsWithEnvironmentVariables, replaceAuthErrorsWithEnvironmentVariables,
toArrayConfig, toArrayConfig,
@ -141,4 +142,17 @@ describe('config utils', () => {
expect(parseOptionalNumber('3.14')).toEqual(3.14); expect(parseOptionalNumber('3.14')).toEqual(3.14);
}); });
}); });
describe('parseOptionalBoolean', () => {
it('returns undefined on undefined parameter', () => {
expect(parseOptionalBoolean(undefined)).toEqual(undefined);
});
it('correctly parses a given string', () => {
expect(parseOptionalBoolean('true')).toEqual(true);
expect(parseOptionalBoolean('1')).toEqual(true);
expect(parseOptionalBoolean('y')).toEqual(true);
expect(parseOptionalBoolean('false')).toEqual(false);
expect(parseOptionalBoolean('0')).toEqual(false);
expect(parseOptionalBoolean('HedgeDoc')).toEqual(false);
});
});
}); });

View file

@ -118,3 +118,17 @@ export function parseOptionalNumber(value?: string): number | undefined {
} }
return Number(value); return Number(value);
} }
/**
* Parses a string to a boolean. The following values are considered true:
* true, 1, y
*
* @param value The value to parse
* @returns The parsed boolean or undefined if the value is undefined
*/
export function parseOptionalBoolean(value?: string): boolean | undefined {
if (value === undefined) {
return undefined;
}
return value === 'true' || value === '1' || value === 'y';
}

View file

@ -0,0 +1,35 @@
/*
* SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
*
* SPDX-License-Identifier: AGPL-3.0-only
*/
import { Controller, Get, Param, Res } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { Response } from 'express';
import { OpenApi } from '../api/utils/openapi.decorator';
import { ConsoleLoggerService } from '../logger/console-logger.service';
import { MediaService } from '../media/media.service';
@OpenApi()
@ApiTags('media-redirect')
@Controller()
export class MediaRedirectController {
constructor(
private readonly logger: ConsoleLoggerService,
private mediaService: MediaService,
) {
this.logger.setContext(MediaRedirectController.name);
}
@Get(':uuid')
@OpenApi(302, 404, 500)
async getMedia(
@Param('uuid') uuid: string,
@Res() response: Response,
): Promise<void> {
const mediaUpload = await this.mediaService.findUploadByUuid(uuid);
const url = await this.mediaService.getFileUrl(mediaUpload);
response.redirect(url);
}
}

View file

@ -0,0 +1,16 @@
/*
* SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
*
* SPDX-License-Identifier: AGPL-3.0-only
*/
import { Module } from '@nestjs/common';
import { LoggerModule } from '../logger/logger.module';
import { MediaModule } from '../media/media.module';
import { MediaRedirectController } from './media-redirect.controller';
@Module({
imports: [MediaModule, LoggerModule],
controllers: [MediaRedirectController],
})
export class MediaRedirectModule {}

View file

@ -1,26 +1,30 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
import { import {
BlobSASPermissions,
BlobServiceClient, BlobServiceClient,
BlockBlobClient, BlockBlobClient,
ContainerClient, ContainerClient,
generateBlobSASQueryParameters,
StorageSharedKeyCredential,
} from '@azure/storage-blob'; } from '@azure/storage-blob';
import { Inject, Injectable } from '@nestjs/common'; import { Inject, Injectable } from '@nestjs/common';
import { FileTypeResult } from 'file-type';
import mediaConfiguration, { MediaConfig } from '../../config/media.config'; import mediaConfiguration, { MediaConfig } from '../../config/media.config';
import { MediaBackendError } from '../../errors/errors'; import { MediaBackendError } from '../../errors/errors';
import { ConsoleLoggerService } from '../../logger/console-logger.service'; import { ConsoleLoggerService } from '../../logger/console-logger.service';
import { MediaBackend } from '../media-backend.interface'; import { MediaBackend } from '../media-backend.interface';
import { BackendData } from '../media-upload.entity';
import { BackendType } from './backend-type.enum'; import { BackendType } from './backend-type.enum';
@Injectable() @Injectable()
export class AzureBackend implements MediaBackend { export class AzureBackend implements MediaBackend {
private config: MediaConfig['backend']['azure']; private config: MediaConfig['backend']['azure'];
private client: ContainerClient; private client: ContainerClient;
private readonly credential: StorageSharedKeyCredential;
constructor( constructor(
private readonly logger: ConsoleLoggerService, private readonly logger: ConsoleLoggerService,
@ -28,56 +32,76 @@ export class AzureBackend implements MediaBackend {
private mediaConfig: MediaConfig, private mediaConfig: MediaConfig,
) { ) {
this.logger.setContext(AzureBackend.name); this.logger.setContext(AzureBackend.name);
this.config = mediaConfig.backend.azure; this.config = this.mediaConfig.backend.azure;
if (mediaConfig.backend.use === BackendType.AZURE) { if (this.mediaConfig.backend.use === BackendType.AZURE) {
// only create the client if the backend is configured to azure // only create the client if the backend is configured to azure
const blobServiceClient = BlobServiceClient.fromConnectionString( const blobServiceClient = BlobServiceClient.fromConnectionString(
this.config.connectionString, this.config.connectionString,
); );
this.credential =
blobServiceClient.credential as StorageSharedKeyCredential;
this.client = blobServiceClient.getContainerClient(this.config.container); this.client = blobServiceClient.getContainerClient(this.config.container);
} }
} }
async saveFile( async saveFile(
uuid: string,
buffer: Buffer, buffer: Buffer,
fileName: string, fileType: FileTypeResult,
): Promise<[string, BackendData]> { ): Promise<null> {
const blockBlobClient: BlockBlobClient = const blockBlobClient: BlockBlobClient =
this.client.getBlockBlobClient(fileName); this.client.getBlockBlobClient(uuid);
try { try {
await blockBlobClient.upload(buffer, buffer.length); await blockBlobClient.upload(buffer, buffer.length, {
const url = this.getUrl(fileName); blobHTTPHeaders: {
this.logger.log(`Uploaded ${url}`, 'saveFile'); blobContentType: fileType.mime,
return [url, null]; },
});
this.logger.log(`Uploaded file ${uuid}`, 'saveFile');
return null;
} catch (e) { } catch (e) {
this.logger.error( this.logger.error(
`error: ${(e as Error).message}`, `error: ${(e as Error).message}`,
(e as Error).stack, (e as Error).stack,
'saveFile', 'saveFile',
); );
throw new MediaBackendError(`Could not save '${fileName}' on Azure`); throw new MediaBackendError(`Could not save file '${uuid}'`);
} }
} }
async deleteFile(fileName: string, _: BackendData): Promise<void> { async deleteFile(uuid: string, _: unknown): Promise<void> {
const blockBlobClient: BlockBlobClient = const blockBlobClient: BlockBlobClient =
this.client.getBlockBlobClient(fileName); this.client.getBlockBlobClient(uuid);
try { try {
await blockBlobClient.delete(); const response = await blockBlobClient.delete();
const url = this.getUrl(fileName); if (response.errorCode !== undefined) {
this.logger.log(`Deleted ${url}`, 'deleteFile'); throw new MediaBackendError(
return; `Could not delete '${uuid}': ${response.errorCode}`,
);
}
this.logger.log(`Deleted file ${uuid}`, 'deleteFile');
} catch (e) { } catch (e) {
this.logger.error( this.logger.error(
`error: ${(e as Error).message}`, `error: ${(e as Error).message}`,
(e as Error).stack, (e as Error).stack,
'deleteFile', 'deleteFile',
); );
throw new MediaBackendError(`Could not delete '${fileName}' on Azure`); throw new MediaBackendError(`Could not delete file ${uuid}`);
} }
} }
private getUrl(fileName: string): string { getFileUrl(uuid: string, _: unknown): Promise<string> {
return `${this.client.url}/${fileName}`; const blockBlobClient: BlockBlobClient =
this.client.getBlockBlobClient(uuid);
const blobSAS = generateBlobSASQueryParameters(
{
containerName: this.config.container,
blobName: uuid,
permissions: BlobSASPermissions.parse('r'),
expiresOn: new Date(new Date().valueOf() + 3600 * 1000),
},
this.credential,
);
return Promise.resolve(`${blockBlobClient.url}?${blobSAS.toString()}`);
} }
} }

View file

@ -1,9 +1,10 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
import { Inject, Injectable } from '@nestjs/common'; import { Inject, Injectable } from '@nestjs/common';
import { FileTypeResult } from 'file-type';
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { join } from 'path'; import { join } from 'path';
@ -11,11 +12,10 @@ import mediaConfiguration, { MediaConfig } from '../../config/media.config';
import { MediaBackendError } from '../../errors/errors'; import { MediaBackendError } from '../../errors/errors';
import { ConsoleLoggerService } from '../../logger/console-logger.service'; import { ConsoleLoggerService } from '../../logger/console-logger.service';
import { MediaBackend } from '../media-backend.interface'; import { MediaBackend } from '../media-backend.interface';
import { BackendData } from '../media-upload.entity';
@Injectable() @Injectable()
export class FilesystemBackend implements MediaBackend { export class FilesystemBackend implements MediaBackend {
uploadDirectory = './uploads'; private readonly uploadDirectory;
constructor( constructor(
private readonly logger: ConsoleLoggerService, private readonly logger: ConsoleLoggerService,
@ -23,37 +23,56 @@ export class FilesystemBackend implements MediaBackend {
private mediaConfig: MediaConfig, private mediaConfig: MediaConfig,
) { ) {
this.logger.setContext(FilesystemBackend.name); this.logger.setContext(FilesystemBackend.name);
this.uploadDirectory = mediaConfig.backend.filesystem.uploadPath; this.uploadDirectory = this.mediaConfig.backend.filesystem.uploadPath;
} }
async saveFile( async saveFile(
uuid: string,
buffer: Buffer, buffer: Buffer,
fileName: string, fileType: FileTypeResult,
): Promise<[string, BackendData]> { ): Promise<string> {
const filePath = this.getFilePath(fileName); const filePath = this.getFilePath(uuid, fileType.ext);
this.logger.debug(`Writing file to: ${filePath}`, 'saveFile'); this.logger.debug(`Writing uploaded file to '${filePath}'`, 'saveFile');
await this.ensureDirectory(); await this.ensureDirectory();
try { try {
await fs.writeFile(filePath, buffer, null); await fs.writeFile(filePath, buffer, null);
return ['/uploads/' + fileName, null]; return JSON.stringify({ ext: fileType.ext });
} catch (e) { } catch (e) {
this.logger.error((e as Error).message, (e as Error).stack, 'saveFile'); this.logger.error((e as Error).message, (e as Error).stack, 'saveFile');
throw new MediaBackendError(`Could not save '${filePath}'`); throw new MediaBackendError(`Could not save file '${filePath}'`);
} }
} }
async deleteFile(fileName: string, _: BackendData): Promise<void> { async deleteFile(uuid: string, backendData: string): Promise<void> {
const filePath = this.getFilePath(fileName); if (!backendData) {
throw new MediaBackendError('No backend data provided');
}
const { ext } = JSON.parse(backendData) as { ext: string };
if (!ext) {
throw new MediaBackendError('No file extension in backend data');
}
const filePath = this.getFilePath(uuid, ext);
try { try {
return await fs.unlink(filePath); return await fs.unlink(filePath);
} catch (e) { } catch (e) {
this.logger.error((e as Error).message, (e as Error).stack, 'deleteFile'); this.logger.error((e as Error).message, (e as Error).stack, 'deleteFile');
throw new MediaBackendError(`Could not delete '${filePath}'`); throw new MediaBackendError(`Could not delete file '${filePath}'`);
} }
} }
private getFilePath(fileName: string): string { getFileUrl(uuid: string, backendData: string): Promise<string> {
return join(this.uploadDirectory, fileName); if (!backendData) {
throw new MediaBackendError('No backend data provided');
}
const { ext } = JSON.parse(backendData) as { ext: string };
if (!ext) {
throw new MediaBackendError('No file extension in backend data');
}
return Promise.resolve(`/uploads/${uuid}.${ext}`);
}
private getFilePath(fileName: string, extension: string): string {
return join(this.uploadDirectory, `${fileName}.${extension}`);
} }
private async ensureDirectory(): Promise<void> { private async ensureDirectory(): Promise<void> {

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -11,15 +11,19 @@ import mediaConfiguration, { MediaConfig } from '../../config/media.config';
import { MediaBackendError } from '../../errors/errors'; import { MediaBackendError } from '../../errors/errors';
import { ConsoleLoggerService } from '../../logger/console-logger.service'; import { ConsoleLoggerService } from '../../logger/console-logger.service';
import { MediaBackend } from '../media-backend.interface'; import { MediaBackend } from '../media-backend.interface';
import { BackendData } from '../media-upload.entity';
type UploadResult = { type UploadResult = {
data: { data: {
link: string; link: string;
deletehash: string; deletehash: string | null;
}; };
}; };
interface ImgurBackendData {
url: string;
deleteHash: string | null;
}
@Injectable() @Injectable()
export class ImgurBackend implements MediaBackend { export class ImgurBackend implements MediaBackend {
private config: MediaConfig['backend']['imgur']; private config: MediaConfig['backend']['imgur'];
@ -30,13 +34,10 @@ export class ImgurBackend implements MediaBackend {
private mediaConfig: MediaConfig, private mediaConfig: MediaConfig,
) { ) {
this.logger.setContext(ImgurBackend.name); this.logger.setContext(ImgurBackend.name);
this.config = mediaConfig.backend.imgur; this.config = this.mediaConfig.backend.imgur;
} }
async saveFile( async saveFile(uuid: string, buffer: Buffer): Promise<string> {
buffer: Buffer,
fileName: string,
): Promise<[string, BackendData]> {
const params = new URLSearchParams(); const params = new URLSearchParams();
params.append('image', buffer.toString('base64')); params.append('image', buffer.toString('base64'));
params.append('type', 'base64'); params.append('type', 'base64');
@ -50,36 +51,41 @@ export class ImgurBackend implements MediaBackend {
.then((res) => ImgurBackend.checkStatus(res)) .then((res) => ImgurBackend.checkStatus(res))
.then((res) => res.json())) as UploadResult; .then((res) => res.json())) as UploadResult;
this.logger.debug(`Response: ${JSON.stringify(result)}`, 'saveFile'); this.logger.debug(`Response: ${JSON.stringify(result)}`, 'saveFile');
this.logger.log(`Uploaded ${fileName}`, 'saveFile'); this.logger.log(`Uploaded file ${uuid}`, 'saveFile');
return [result.data.link, result.data.deletehash]; const backendData: ImgurBackendData = {
url: result.data.link,
deleteHash: result.data.deletehash,
};
return JSON.stringify(backendData);
} catch (e) { } catch (e) {
this.logger.error( this.logger.error(
`error: ${(e as Error).message}`, `error: ${(e as Error).message}`,
(e as Error).stack, (e as Error).stack,
'saveFile', 'saveFile',
); );
throw new MediaBackendError(`Could not save '${fileName}' on imgur`); throw new MediaBackendError(`Could not save file ${uuid}`);
} }
} }
async deleteFile(fileName: string, backendData: BackendData): Promise<void> { async deleteFile(uuid: string, jsonBackendData: string): Promise<void> {
if (backendData === null) { const backendData = JSON.parse(jsonBackendData) as ImgurBackendData;
if (backendData.deleteHash === null) {
throw new MediaBackendError( throw new MediaBackendError(
`We don't have any delete tokens for '${fileName}' and therefore can't delete this image on imgur`, `We don't have any delete tokens for file ${uuid} and therefore can't delete this image`,
); );
} }
try { try {
const result = await fetch( const result = await fetch(
`https://api.imgur.com/3/image/${backendData}`, `https://api.imgur.com/3/image/${backendData.deleteHash}`,
{ {
method: 'POST', method: 'DELETE',
// eslint-disable-next-line @typescript-eslint/naming-convention // eslint-disable-next-line @typescript-eslint/naming-convention
headers: { Authorization: `Client-ID ${this.config.clientID}` }, headers: { Authorization: `Client-ID ${this.config.clientID}` },
}, },
).then((res) => ImgurBackend.checkStatus(res)); );
ImgurBackend.checkStatus(result);
// eslint-disable-next-line @typescript-eslint/no-base-to-string // eslint-disable-next-line @typescript-eslint/no-base-to-string
this.logger.debug(`Response: ${result.toString()}`, 'deleteFile'); this.logger.log(`Deleted file ${uuid}`, 'deleteFile');
this.logger.log(`Deleted ${fileName}`, 'deleteFile');
return; return;
} catch (e) { } catch (e) {
this.logger.error( this.logger.error(
@ -87,10 +93,20 @@ export class ImgurBackend implements MediaBackend {
(e as Error).stack, (e as Error).stack,
'deleteFile', 'deleteFile',
); );
throw new MediaBackendError(`Could not delete '${fileName}' on imgur`); throw new MediaBackendError(`Could not delete file '${uuid}'`);
} }
} }
getFileUrl(uuid: string, backendData: string | null): Promise<string> {
if (backendData === null) {
throw new MediaBackendError(
`We don't have any data for file ${uuid} and therefore can't get the url of this image`,
);
}
const data = JSON.parse(backendData) as ImgurBackendData;
return Promise.resolve(data.url);
}
private static checkStatus(res: Response): Response { private static checkStatus(res: Response): Response {
if (res.ok) { if (res.ok) {
// res.status >= 200 && res.status < 300 // res.status >= 200 && res.status < 300

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2023 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -17,6 +17,7 @@ describe('s3 backend', () => {
const mockedS3AccessKeyId = 'mockedS3AccessKeyId'; const mockedS3AccessKeyId = 'mockedS3AccessKeyId';
const mockedS3SecretAccessKey = 'mockedS3SecretAccessKey'; const mockedS3SecretAccessKey = 'mockedS3SecretAccessKey';
const mockedS3Bucket = 'mockedS3Bucket'; const mockedS3Bucket = 'mockedS3Bucket';
const mockedUuid = 'cbe87987-8e70-4092-a879-878e70b09245';
const mockedLoggerService = Mock.of<ConsoleLoggerService>({ const mockedLoggerService = Mock.of<ConsoleLoggerService>({
setContext: jest.fn(), setContext: jest.fn(),
@ -31,6 +32,7 @@ describe('s3 backend', () => {
mockedClient = Mock.of<Client>({ mockedClient = Mock.of<Client>({
putObject: jest.fn(), putObject: jest.fn(),
removeObject: jest.fn(), removeObject: jest.fn(),
presignedGetObject: jest.fn(),
}); });
clientConstructorSpy = jest clientConstructorSpy = jest
@ -143,19 +145,21 @@ describe('s3 backend', () => {
const sut = new S3Backend(mockedLoggerService, mediaConfig); const sut = new S3Backend(mockedLoggerService, mediaConfig);
const mockedBuffer = Mock.of<Buffer>({}); const mockedBuffer = Mock.of<Buffer>({});
const mockedFileName = 'mockedFileName'; await sut.saveFile(mockedUuid, mockedBuffer, {
const [url, backendData] = await sut.saveFile( mime: 'image/png',
mockedBuffer, ext: 'png',
mockedFileName, });
);
expect(saveSpy).toHaveBeenCalledWith( expect(saveSpy).toHaveBeenCalledWith(
mockedS3Bucket, mockedS3Bucket,
mockedFileName, mockedUuid,
mockedBuffer, mockedBuffer,
mockedBuffer.length,
{
// eslint-disable-next-line @typescript-eslint/naming-convention
'Content-Type': 'image/png',
},
); );
expect(url).toBe('https://s3.example.org/mockedS3Bucket/mockedFileName');
expect(backendData).toBeNull();
}); });
it("will throw a MediaBackendError if the s3 client couldn't save the file", async () => { it("will throw a MediaBackendError if the s3 client couldn't save the file", async () => {
@ -167,15 +171,24 @@ describe('s3 backend', () => {
const sut = new S3Backend(mockedLoggerService, mediaConfig); const sut = new S3Backend(mockedLoggerService, mediaConfig);
const mockedBuffer = Mock.of<Buffer>({}); const mockedBuffer = Mock.of<Buffer>({});
const mockedFileName = 'mockedFileName';
await expect(() => await expect(() =>
sut.saveFile(mockedBuffer, mockedFileName), sut.saveFile(mockedUuid, mockedBuffer, {
).rejects.toThrow("Could not save 'mockedFileName' on S3"); mime: 'image/png',
ext: 'png',
}),
).rejects.toThrow(
'Could not save file cbe87987-8e70-4092-a879-878e70b09245',
);
expect(saveSpy).toHaveBeenCalledWith( expect(saveSpy).toHaveBeenCalledWith(
mockedS3Bucket, mockedS3Bucket,
mockedFileName, mockedUuid,
mockedBuffer, mockedBuffer,
mockedBuffer.length,
{
// eslint-disable-next-line @typescript-eslint/naming-convention
'Content-Type': 'image/png',
},
); );
}); });
}); });
@ -185,12 +198,11 @@ describe('s3 backend', () => {
const deleteSpy = jest const deleteSpy = jest
.spyOn(mockedClient, 'removeObject') .spyOn(mockedClient, 'removeObject')
.mockImplementation(() => Promise.resolve()); .mockImplementation(() => Promise.resolve());
const mockedFileName = 'mockedFileName';
const sut = new S3Backend(mockedLoggerService, mediaConfig); const sut = new S3Backend(mockedLoggerService, mediaConfig);
await sut.deleteFile(mockedFileName); await sut.deleteFile(mockedUuid, null);
expect(deleteSpy).toHaveBeenCalledWith(mockedS3Bucket, mockedFileName); expect(deleteSpy).toHaveBeenCalledWith(mockedS3Bucket, mockedUuid);
}); });
it("will throw a MediaBackendError if the client couldn't delete the file", async () => { it("will throw a MediaBackendError if the client couldn't delete the file", async () => {
@ -198,15 +210,50 @@ describe('s3 backend', () => {
const deleteSpy = jest const deleteSpy = jest
.spyOn(mockedClient, 'removeObject') .spyOn(mockedClient, 'removeObject')
.mockImplementation(() => Promise.reject(new Error('mocked error'))); .mockImplementation(() => Promise.reject(new Error('mocked error')));
const mockedFileName = 'mockedFileName';
const sut = new S3Backend(mockedLoggerService, mediaConfig); const sut = new S3Backend(mockedLoggerService, mediaConfig);
await expect(() => sut.deleteFile(mockedFileName)).rejects.toThrow( await expect(() => sut.deleteFile(mockedUuid, null)).rejects.toThrow(
"Could not delete 'mockedFileName' on S3", 'Could not delete file cbe87987-8e70-4092-a879-878e70b09245',
); );
expect(deleteSpy).toHaveBeenCalledWith(mockedS3Bucket, mockedFileName); expect(deleteSpy).toHaveBeenCalledWith(mockedS3Bucket, mockedUuid);
});
});
describe('getFileUrl', () => {
it('returns a signed url', async () => {
const mediaConfig = mockMediaConfig('https://s3.example.org');
const fileUrlSpy = jest
.spyOn(mockedClient, 'presignedGetObject')
.mockImplementation(() =>
Promise.resolve(
'https://s3.example.org/mockedS3Bucket/cbe87987-8e70-4092-a879-878e70b09245?mockedSignature',
),
);
const sut = new S3Backend(mockedLoggerService, mediaConfig);
const url = await sut.getFileUrl(mockedUuid, null);
expect(fileUrlSpy).toHaveBeenCalledWith(mockedS3Bucket, mockedUuid);
expect(url).toBe(
'https://s3.example.org/mockedS3Bucket/cbe87987-8e70-4092-a879-878e70b09245?mockedSignature',
);
});
it('throws a MediaBackendError if the client could not generate a signed url', async () => {
const mediaConfig = mockMediaConfig('https://s3.example.org');
const fileUrlSpy = jest
.spyOn(mockedClient, 'presignedGetObject')
.mockImplementation(() => {
throw new Error('mocked error');
});
const sut = new S3Backend(mockedLoggerService, mediaConfig);
await expect(() => sut.getFileUrl(mockedUuid, null)).rejects.toThrow(
'Could not get URL for file cbe87987-8e70-4092-a879-878e70b09245',
);
expect(fileUrlSpy).toHaveBeenCalledWith(mockedS3Bucket, mockedUuid);
}); });
}); });
}); });

View file

@ -1,9 +1,10 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
import { Inject, Injectable } from '@nestjs/common'; import { Inject, Injectable } from '@nestjs/common';
import { FileTypeResult } from 'file-type';
import { Client } from 'minio'; import { Client } from 'minio';
import { URL } from 'url'; import { URL } from 'url';
@ -11,7 +12,6 @@ import mediaConfiguration, { MediaConfig } from '../../config/media.config';
import { MediaBackendError } from '../../errors/errors'; import { MediaBackendError } from '../../errors/errors';
import { ConsoleLoggerService } from '../../logger/console-logger.service'; import { ConsoleLoggerService } from '../../logger/console-logger.service';
import { MediaBackend } from '../media-backend.interface'; import { MediaBackend } from '../media-backend.interface';
import { BackendData } from '../media-upload.entity';
import { BackendType } from './backend-type.enum'; import { BackendType } from './backend-type.enum';
@Injectable() @Injectable()
@ -19,64 +19,74 @@ export class S3Backend implements MediaBackend {
private config: MediaConfig['backend']['s3']; private config: MediaConfig['backend']['s3'];
private client: Client; private client: Client;
private static determinePort(url: URL): number | undefined {
const port = parseInt(url.port);
return isNaN(port) ? undefined : port;
}
constructor( constructor(
private readonly logger: ConsoleLoggerService, private readonly logger: ConsoleLoggerService,
@Inject(mediaConfiguration.KEY) @Inject(mediaConfiguration.KEY)
private mediaConfig: MediaConfig, private mediaConfig: MediaConfig,
) { ) {
this.logger.setContext(S3Backend.name); this.logger.setContext(S3Backend.name);
if (mediaConfig.backend.use !== BackendType.S3) { if (this.mediaConfig.backend.use !== BackendType.S3) {
return; return;
} }
this.config = mediaConfig.backend.s3; this.config = this.mediaConfig.backend.s3;
const url = new URL(this.config.endPoint); const url = new URL(this.config.endPoint);
const isSecure = url.protocol === 'https:'; const isSecure = url.protocol === 'https:';
this.client = new Client({ this.client = new Client({
endPoint: url.hostname, endPoint: url.hostname,
port: this.determinePort(url), port: S3Backend.determinePort(url),
useSSL: isSecure, useSSL: isSecure,
accessKey: this.config.accessKeyId, accessKey: this.config.accessKeyId,
secretKey: this.config.secretAccessKey, secretKey: this.config.secretAccessKey,
pathStyle: this.config.pathStyle,
region: this.config.region,
}); });
} }
private determinePort(url: URL): number | undefined {
const port = parseInt(url.port);
return isNaN(port) ? undefined : port;
}
async saveFile( async saveFile(
uuid: string,
buffer: Buffer, buffer: Buffer,
fileName: string, fileType: FileTypeResult,
): Promise<[string, BackendData]> { ): Promise<null> {
try { try {
await this.client.putObject(this.config.bucket, fileName, buffer); await this.client.putObject(
this.logger.log(`Uploaded file ${fileName}`, 'saveFile'); this.config.bucket,
return [this.getUrl(fileName), null]; uuid,
buffer,
buffer.length,
{
// eslint-disable-next-line @typescript-eslint/naming-convention
'Content-Type': fileType.mime,
},
);
this.logger.log(`Uploaded file ${uuid}`, 'saveFile');
return null;
} catch (e) { } catch (e) {
this.logger.error((e as Error).message, (e as Error).stack, 'saveFile'); this.logger.error((e as Error).message, (e as Error).stack, 'saveFile');
throw new MediaBackendError(`Could not save '${fileName}' on S3`); throw new MediaBackendError(`Could not save file ${uuid}`);
} }
} }
async deleteFile(fileName: string): Promise<void> { async deleteFile(uuid: string, _: unknown): Promise<void> {
try { try {
await this.client.removeObject(this.config.bucket, fileName); await this.client.removeObject(this.config.bucket, uuid);
const url = this.getUrl(fileName); this.logger.log(`Deleted uploaded file ${uuid}`, 'deleteFile');
this.logger.log(`Deleted ${url}`, 'deleteFile');
return;
} catch (e) { } catch (e) {
this.logger.error((e as Error).message, (e as Error).stack, 'saveFile'); this.logger.error((e as Error).message, (e as Error).stack, 'deleteFile');
throw new MediaBackendError(`Could not delete '${fileName}' on S3`); throw new MediaBackendError(`Could not delete file ${uuid}`);
} }
} }
private getUrl(fileName: string): string { async getFileUrl(uuid: string, _: unknown): Promise<string> {
const url = new URL(this.config.endPoint); try {
if (!url.pathname.endsWith('/')) { return await this.client.presignedGetObject(this.config.bucket, uuid);
url.pathname += '/'; } catch (e) {
} this.logger.error((e as Error).message, (e as Error).stack, 'getFileUrl');
url.pathname += `${this.config.bucket}/${fileName}`; throw new MediaBackendError(`Could not get URL for file ${uuid}`);
return url.toString(); }
} }
} }

View file

@ -1,9 +1,10 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
import { Inject, Injectable } from '@nestjs/common'; import { Inject, Injectable } from '@nestjs/common';
import { FileTypeResult } from 'file-type';
import fetch, { Response } from 'node-fetch'; import fetch, { Response } from 'node-fetch';
import { URL } from 'url'; import { URL } from 'url';
@ -11,14 +12,13 @@ import mediaConfiguration, { MediaConfig } from '../../config/media.config';
import { MediaBackendError } from '../../errors/errors'; import { MediaBackendError } from '../../errors/errors';
import { ConsoleLoggerService } from '../../logger/console-logger.service'; import { ConsoleLoggerService } from '../../logger/console-logger.service';
import { MediaBackend } from '../media-backend.interface'; import { MediaBackend } from '../media-backend.interface';
import { BackendData } from '../media-upload.entity';
import { BackendType } from './backend-type.enum'; import { BackendType } from './backend-type.enum';
@Injectable() @Injectable()
export class WebdavBackend implements MediaBackend { export class WebdavBackend implements MediaBackend {
private config: MediaConfig['backend']['webdav']; private config: MediaConfig['backend']['webdav'];
private authHeader: string; private authHeader: string;
private baseUrl: string; private readonly baseUrl: string;
constructor( constructor(
private readonly logger: ConsoleLoggerService, private readonly logger: ConsoleLoggerService,
@ -26,11 +26,10 @@ export class WebdavBackend implements MediaBackend {
private mediaConfig: MediaConfig, private mediaConfig: MediaConfig,
) { ) {
this.logger.setContext(WebdavBackend.name); this.logger.setContext(WebdavBackend.name);
if (mediaConfig.backend.use === BackendType.WEBDAV) { if (this.mediaConfig.backend.use === BackendType.WEBDAV) {
this.config = mediaConfig.backend.webdav; this.config = this.mediaConfig.backend.webdav;
const url = new URL(this.config.connectionString); const url = new URL(this.config.connectionString);
const port = url.port !== '' ? `:${url.port}` : ''; this.baseUrl = url.toString();
this.baseUrl = `${url.protocol}//${url.hostname}${port}${url.pathname}`;
if (this.config.uploadDir && this.config.uploadDir !== '') { if (this.config.uploadDir && this.config.uploadDir !== '') {
this.baseUrl = WebdavBackend.joinURL( this.baseUrl = WebdavBackend.joinURL(
this.baseUrl, this.baseUrl,
@ -61,12 +60,14 @@ export class WebdavBackend implements MediaBackend {
} }
async saveFile( async saveFile(
uuid: string,
buffer: Buffer, buffer: Buffer,
fileName: string, fileType: FileTypeResult,
): Promise<[string, BackendData]> { ): Promise<string> {
try { try {
const contentLength = buffer.length; const contentLength = buffer.length;
await fetch(WebdavBackend.joinURL(this.baseUrl, '/', fileName), { const remoteFileName = `${uuid}.${fileType.ext}`;
await fetch(WebdavBackend.joinURL(this.baseUrl, '/', remoteFileName), {
method: 'PUT', method: 'PUT',
body: buffer, body: buffer,
headers: { headers: {
@ -77,34 +78,49 @@ export class WebdavBackend implements MediaBackend {
'If-None-Match': '*', // Don't overwrite already existing files 'If-None-Match': '*', // Don't overwrite already existing files
}, },
}).then((res) => WebdavBackend.checkStatus(res)); }).then((res) => WebdavBackend.checkStatus(res));
this.logger.log(`Uploaded file ${fileName}`, 'saveFile'); this.logger.log(`Uploaded file ${uuid}`, 'saveFile');
return [this.getUrl(fileName), null]; return JSON.stringify({ file: remoteFileName });
} catch (e) { } catch (e) {
this.logger.error((e as Error).message, (e as Error).stack, 'saveFile'); this.logger.error((e as Error).message, (e as Error).stack, 'saveFile');
throw new MediaBackendError(`Could not save '${fileName}' on WebDav`); throw new MediaBackendError(`Could not save upload '${uuid}'`);
} }
} }
async deleteFile(fileName: string, _: BackendData): Promise<void> { async deleteFile(uuid: string, backendData: string): Promise<void> {
if (!backendData) {
throw new MediaBackendError('No backend data provided');
}
try { try {
await fetch(WebdavBackend.joinURL(this.baseUrl, '/', fileName), { const { file } = JSON.parse(backendData) as { file: string };
if (!file) {
throw new MediaBackendError('No file name in backend data');
}
await fetch(WebdavBackend.joinURL(this.baseUrl, '/', file), {
method: 'DELETE', method: 'DELETE',
headers: { headers: {
// eslint-disable-next-line @typescript-eslint/naming-convention // eslint-disable-next-line @typescript-eslint/naming-convention
Authorization: this.authHeader, Authorization: this.authHeader,
}, },
}).then((res) => WebdavBackend.checkStatus(res)); }).then((res) => WebdavBackend.checkStatus(res));
const url = this.getUrl(fileName); this.logger.log(`Deleted upload ${uuid}`, 'deleteFile');
this.logger.log(`Deleted ${url}`, 'deleteFile');
return; return;
} catch (e) { } catch (e) {
this.logger.error((e as Error).message, (e as Error).stack, 'saveFile'); this.logger.error((e as Error).message, (e as Error).stack, 'deleteFile');
throw new MediaBackendError(`Could not delete '${fileName}' on WebDav`); throw new MediaBackendError(`Could not delete upload '${uuid}'`);
} }
} }
private getUrl(fileName: string): string { getFileUrl(_: string, backendData: string): Promise<string> {
return WebdavBackend.joinURL(this.config.publicUrl, '/', fileName); if (!backendData) {
throw new MediaBackendError('No backend data provided');
}
const { file } = JSON.parse(backendData) as { file: string };
if (!file) {
throw new MediaBackendError('No file name in backend data');
}
return Promise.resolve(
WebdavBackend.joinURL(this.config.publicUrl, '/', file),
);
} }
private static generateBasicAuthHeader( private static generateBasicAuthHeader(

View file

@ -1,25 +1,39 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
import { BackendData } from './media-upload.entity'; import { FileTypeResult } from 'file-type';
export interface MediaBackend { export interface MediaBackend {
/** /**
* Saves a file according to backend internals. * Saves a file according to backend internals.
* @param uuid Unique identifier of the uploaded file
* @param buffer File data * @param buffer File data
* @param fileName Name of the file to save. Can include a file extension. * @param fileType File type result
* @throws {MediaBackendError} - there was an error saving the file * @throws {MediaBackendError} - there was an error saving the file
* @return Tuple of file URL and internal backend data, which should be saved. * @return The internal backend data, which should be saved
*/ */
saveFile(buffer: Buffer, fileName: string): Promise<[string, BackendData]>; saveFile(
uuid: string,
buffer: Buffer,
fileType?: FileTypeResult,
): Promise<string | null>;
/** /**
* Delete a file from the backend * Delete a file from the backend
* @param fileName String to identify the file * @param uuid Unique identifier of the uploaded file
* @param backendData Internal backend data * @param backendData Internal backend data
* @throws {MediaBackendError} - there was an error deleting the file * @throws {MediaBackendError} - there was an error deleting the file
*/ */
deleteFile(fileName: string, backendData: BackendData): Promise<void>; deleteFile(uuid: string, backendData: string | null): Promise<void>;
/**
* Get a publicly accessible URL of a file from the backend
* @param uuid Unique identifier of the uploaded file
* @param backendData Internal backend data
* @throws {MediaBackendError} - there was an error getting the file
* @return Public accessible URL of the file
*/
getFileUrl(uuid: string, backendData: string | null): Promise<string>;
} }

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2023 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -12,12 +12,20 @@ import { Username } from '../utils/username';
export class MediaUploadDto extends BaseDto { export class MediaUploadDto extends BaseDto {
/** /**
* The id of the media file. * The uuid of the media file.
* @example "testfile123.jpg" * @example "7697582e-0020-4188-9758-2e00207188ca"
*/ */
@IsString() @IsString()
@ApiProperty() @ApiProperty()
id: string; uuid: string;
/**
* The original filename of the media upload.
* @example "example.png"
*/
@IsString()
@ApiProperty()
fileName: string;
/** /**
* The publicId of the note to which the uploaded file is linked to. * The publicId of the note to which the uploaded file is linked to.
@ -26,7 +34,7 @@ export class MediaUploadDto extends BaseDto {
@IsString() @IsString()
@IsOptional() @IsOptional()
@ApiProperty() @ApiProperty()
notePublicId: string | null; noteId: string | null;
/** /**
* The date when the upload objects was created. * The date when the upload objects was created.

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -15,37 +15,49 @@ import { Note } from '../notes/note.entity';
import { User } from '../users/user.entity'; import { User } from '../users/user.entity';
import { BackendType } from './backends/backend-type.enum'; import { BackendType } from './backends/backend-type.enum';
export type BackendData = string | null;
@Entity() @Entity()
export class MediaUpload { export class MediaUpload {
/** The unique identifier of a media upload */
@PrimaryColumn() @PrimaryColumn()
id: string; uuid: string;
/**
* The note where a media file was uploaded, required for the media browser in the note editor.
* Can be set to null after creation when the note was deleted without the associated uploads
*/
@ManyToOne((_) => Note, (note) => note.mediaUploads, { @ManyToOne((_) => Note, (note) => note.mediaUploads, {
nullable: true, nullable: true,
}) })
note: Promise<Note | null>; note: Promise<Note | null>;
/** The user who uploaded the media file or {@code null} if uploaded by a guest user */
@ManyToOne((_) => User, (user) => user.mediaUploads, { @ManyToOne((_) => User, (user) => user.mediaUploads, {
nullable: true, nullable: true,
}) })
user: Promise<User | null>; user: Promise<User | null>;
/** The original filename of the media upload */
@Column()
fileName: string;
/** The backend type where this upload is stored */
@Column({ @Column({
nullable: false, nullable: false,
}) })
backendType: string; backendType: string;
@Column() /**
fileUrl: string; * Additional data, depending on the backend type, serialized as JSON.
* This can include for example required additional identifiers for retrieving the file from the backend or to
* delete the file afterward again.
*/
@Column({ @Column({
nullable: true, nullable: true,
type: 'text', type: 'text',
}) })
backendData: BackendData | null; backendData: string | null;
/** The date when the upload was created */
@CreateDateColumn() @CreateDateColumn()
createdAt: Date; createdAt: Date;
@ -53,30 +65,30 @@ export class MediaUpload {
private constructor() {} private constructor() {}
/** /**
* Create a new media upload enity * Create a new media upload entity
* @param id the id of the upload *
* @param uuid the unique identifier of the upload
* @param fileName the original filename of the uploaded file
* @param note the note the upload should be associated with. This is required despite the fact the note field is optional, because it's possible to delete a note without also deleting the associated media uploads, but a note is required for the initial creation. * @param note the note the upload should be associated with. This is required despite the fact the note field is optional, because it's possible to delete a note without also deleting the associated media uploads, but a note is required for the initial creation.
* @param user the user that owns the upload * @param user the user that owns the upload
* @param extension which file extension the upload has
* @param backendType on which type of media backend the upload is saved * @param backendType on which type of media backend the upload is saved
* @param backendData the backend data returned by the media backend * @param backendData the backend data returned by the media backend
* @param fileUrl the url where the upload can be accessed
*/ */
public static create( public static create(
id: string, uuid: string,
fileName: string,
note: Note, note: Note,
user: User | null, user: User | null,
extension: string,
backendType: BackendType, backendType: BackendType,
fileUrl: string, backendData: string | null,
): Omit<MediaUpload, 'createdAt'> { ): Omit<MediaUpload, 'createdAt'> {
const upload = new MediaUpload(); const upload = new MediaUpload();
upload.id = id; upload.uuid = uuid;
upload.fileName = fileName;
upload.note = Promise.resolve(note); upload.note = Promise.resolve(note);
upload.user = Promise.resolve(user); upload.user = Promise.resolve(user);
upload.backendType = backendType; upload.backendType = backendType;
upload.backendData = null; upload.backendData = backendData;
upload.fileUrl = fileUrl;
return upload; return upload;
} }
} }

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -35,7 +35,7 @@ import { User } from '../users/user.entity';
import { UsersModule } from '../users/users.module'; import { UsersModule } from '../users/users.module';
import { BackendType } from './backends/backend-type.enum'; import { BackendType } from './backends/backend-type.enum';
import { FilesystemBackend } from './backends/filesystem-backend'; import { FilesystemBackend } from './backends/filesystem-backend';
import { BackendData, MediaUpload } from './media-upload.entity'; import { MediaUpload } from './media-upload.entity';
import { MediaService } from './media.service'; import { MediaService } from './media.service';
describe('MediaService', () => { describe('MediaService', () => {
@ -120,14 +120,16 @@ describe('MediaService', () => {
); );
const user = User.create('test123', 'Test 123') as User; const user = User.create('test123', 'Test 123') as User;
const uuid = 'f7d334bb-6bb6-451b-9334-bb6bb6d51b5a';
const filename = 'test.jpg';
const note = Note.create(user) as Note; const note = Note.create(user) as Note;
const mediaUpload = MediaUpload.create( const mediaUpload = MediaUpload.create(
'test', uuid,
filename,
note, note,
user, user,
'.jpg',
BackendType.FILESYSTEM, BackendType.FILESYSTEM,
'test/test', null,
) as MediaUpload; ) as MediaUpload;
const createQueryBuilder = { const createQueryBuilder = {
@ -174,40 +176,40 @@ describe('MediaService', () => {
it('works', async () => { it('works', async () => {
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
let fileId = ''; let givenUuid = '';
jest jest.spyOn(mediaRepo, 'save').mockImplementation();
.spyOn(mediaRepo, 'save')
.mockImplementationOnce(async (entry: MediaUpload) => {
fileId = entry.id;
return entry;
});
jest jest
.spyOn(service.mediaBackend, 'saveFile') .spyOn(service.mediaBackend, 'saveFile')
.mockImplementationOnce( .mockImplementationOnce(
async ( async (uuid: string, buffer: Buffer): Promise<string | null> => {
buffer: Buffer,
fileName: string,
): Promise<[string, BackendData]> => {
expect(buffer).toEqual(testImage); expect(buffer).toEqual(testImage);
return [fileName, null]; givenUuid = uuid;
return null;
}, },
); );
const upload = await service.saveFile(testImage, user, note); jest.spyOn(mediaRepo, 'save').mockImplementationOnce(async (entry) => {
expect(upload.fileUrl).toEqual(fileId); expect(entry.uuid).toEqual(givenUuid);
return entry as MediaUpload;
});
const upload = await service.saveFile('test.jpg', testImage, user, note);
expect(upload.fileName).toEqual('test.jpg');
expect(upload.uuid).toEqual(givenUuid);
await expect(upload.note).resolves.toEqual(note);
await expect(upload.user).resolves.toEqual(user);
}); });
describe('fails:', () => { describe('fails:', () => {
it('MIME type not identifiable', async () => { it('MIME type not identifiable', async () => {
await expect( await expect(
service.saveFile(Buffer.alloc(1), user, note), service.saveFile('fail.png', Buffer.alloc(1), user, note),
).rejects.toThrow(ClientError); ).rejects.toThrow(ClientError);
}); });
it('MIME type not supported', async () => { it('MIME type not supported', async () => {
const testText = await fs.readFile('test/public-api/fixtures/test.zip'); const testText = await fs.readFile('test/public-api/fixtures/test.zip');
await expect(service.saveFile(testText, user, note)).rejects.toThrow( await expect(
ClientError, service.saveFile('fail.zip', testText, user, note),
); ).rejects.toThrow(ClientError);
}); });
}); });
}); });
@ -215,7 +217,12 @@ describe('MediaService', () => {
describe('deleteFile', () => { describe('deleteFile', () => {
it('works', async () => { it('works', async () => {
const mockMediaUploadEntry = { const mockMediaUploadEntry = {
id: 'testMediaUpload', uuid: '64f260cc-e0d0-47e7-b260-cce0d097e767',
fileName: 'testFileName',
note: Promise.resolve({
id: 123,
} as Note),
backendType: BackendType.FILESYSTEM,
backendData: 'testBackendData', backendData: 'testBackendData',
user: Promise.resolve({ user: Promise.resolve({
username: 'hardcoded', username: 'hardcoded',
@ -224,8 +231,8 @@ describe('MediaService', () => {
jest jest
.spyOn(service.mediaBackend, 'deleteFile') .spyOn(service.mediaBackend, 'deleteFile')
.mockImplementationOnce( .mockImplementationOnce(
async (fileName: string, backendData: BackendData): Promise<void> => { async (uuid: string, backendData: string | null): Promise<void> => {
expect(fileName).toEqual(mockMediaUploadEntry.id); expect(uuid).toEqual(mockMediaUploadEntry.uuid);
expect(backendData).toEqual(mockMediaUploadEntry.backendData); expect(backendData).toEqual(mockMediaUploadEntry.backendData);
}, },
); );
@ -238,23 +245,49 @@ describe('MediaService', () => {
await service.deleteFile(mockMediaUploadEntry); await service.deleteFile(mockMediaUploadEntry);
}); });
}); });
describe('getFileUrl', () => {
it('works', async () => {
const mockMediaUploadEntry = {
uuid: '64f260cc-e0d0-47e7-b260-cce0d097e767',
fileName: 'testFileName',
note: Promise.resolve({
id: 123,
} as Note),
backendType: BackendType.FILESYSTEM,
backendData: '{"ext": "png"}',
user: Promise.resolve({
username: 'hardcoded',
} as User),
} as MediaUpload;
await expect(service.getFileUrl(mockMediaUploadEntry)).resolves.toEqual(
'/uploads/64f260cc-e0d0-47e7-b260-cce0d097e767.png',
);
});
});
describe('findUploadByFilename', () => { describe('findUploadByFilename', () => {
it('works', async () => { it('works', async () => {
const testFileName = 'testFilename'; const testFileName = 'testFilename';
const username = 'hardcoded'; const username = 'hardcoded';
const backendData = 'testBackendData'; const backendData = 'testBackendData';
const mockMediaUploadEntry = { const mockMediaUploadEntry = {
id: 'testMediaUpload', uuid: '64f260cc-e0d0-47e7-b260-cce0d097e767',
backendData: backendData, fileName: testFileName,
note: Promise.resolve({
id: 123,
} as Note),
backendType: BackendType.FILESYSTEM,
backendData,
user: Promise.resolve({ user: Promise.resolve({
username: username, username,
} as User), } as User),
} as MediaUpload; } as MediaUpload;
jest jest
.spyOn(mediaRepo, 'findOne') .spyOn(mediaRepo, 'findOne')
.mockResolvedValueOnce(mockMediaUploadEntry); .mockResolvedValueOnce(mockMediaUploadEntry);
const mediaUpload = await service.findUploadByFilename(testFileName); const mediaUpload = await service.findUploadByFilename(testFileName);
expect((await mediaUpload.user).username).toEqual(username); expect((await mediaUpload.user)?.username).toEqual(username);
expect(mediaUpload.backendData).toEqual(backendData); expect(mediaUpload.backendData).toEqual(backendData);
}); });
it("fails: can't find mediaUpload", async () => { it("fails: can't find mediaUpload", async () => {
@ -271,10 +304,15 @@ describe('MediaService', () => {
const username = 'hardcoded'; const username = 'hardcoded';
it('with one upload from user', async () => { it('with one upload from user', async () => {
const mockMediaUploadEntry = { const mockMediaUploadEntry = {
id: 'testMediaUpload', uuid: '64f260cc-e0d0-47e7-b260-cce0d097e767',
backendData: 'testBackendData', fileName: 'testFileName',
note: Promise.resolve({
id: 123,
} as Note),
backendType: BackendType.FILESYSTEM,
backendData: null,
user: Promise.resolve({ user: Promise.resolve({
username: username, username,
} as User), } as User),
} as MediaUpload; } as MediaUpload;
createQueryBuilderFunc.getMany = () => [mockMediaUploadEntry]; createQueryBuilderFunc.getMany = () => [mockMediaUploadEntry];
@ -304,11 +342,16 @@ describe('MediaService', () => {
describe('works', () => { describe('works', () => {
it('with one upload to note', async () => { it('with one upload to note', async () => {
const mockMediaUploadEntry = { const mockMediaUploadEntry = {
id: 'testMediaUpload', uuid: '64f260cc-e0d0-47e7-b260-cce0d097e767',
backendData: 'testBackendData', fileName: 'testFileName',
note: Promise.resolve({ note: Promise.resolve({
id: 123, id: 123,
} as Note), } as Note),
backendType: BackendType.FILESYSTEM,
backendData: null,
user: Promise.resolve({
username: 'mockUser',
} as User),
} as MediaUpload; } as MediaUpload;
const createQueryBuilder = { const createQueryBuilder = {
where: () => createQueryBuilder, where: () => createQueryBuilder,
@ -371,18 +414,18 @@ describe('MediaService', () => {
Alias.create('test', mockNote, true) as Alias, Alias.create('test', mockNote, true) as Alias,
]); ]);
const mockMediaUploadEntry = { const mockMediaUploadEntry = {
id: 'testMediaUpload', uuid: '64f260cc-e0d0-47e7-b260-cce0d097e767',
backendData: 'testBackendData', fileName: 'testFileName',
note: Promise.resolve(mockNote), note: mockNote,
backendType: BackendType.FILESYSTEM,
backendData: null,
user: Promise.resolve({ user: Promise.resolve({
username: 'hardcoded', username: 'mockUser',
} as User), } as User),
} as MediaUpload; } as unknown as MediaUpload;
jest jest.spyOn(mediaRepo, 'save').mockImplementationOnce(async (entry) => {
.spyOn(mediaRepo, 'save')
.mockImplementationOnce(async (entry: MediaUpload) => {
expect(await entry.note).toBeNull(); expect(await entry.note).toBeNull();
return entry; return entry as MediaUpload;
}); });
await service.removeNoteFromMediaUpload(mockMediaUploadEntry); await service.removeNoteFromMediaUpload(mockMediaUploadEntry);
expect(mediaRepo.save).toHaveBeenCalled(); expect(mediaRepo.save).toHaveBeenCalled();

View file

@ -1,22 +1,20 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
import { Inject, Injectable } from '@nestjs/common'; import { Inject, Injectable } from '@nestjs/common';
import { ModuleRef } from '@nestjs/core'; import { ModuleRef } from '@nestjs/core';
import { InjectRepository } from '@nestjs/typeorm'; import { InjectRepository } from '@nestjs/typeorm';
import crypto from 'crypto';
import * as FileType from 'file-type'; import * as FileType from 'file-type';
import { Repository } from 'typeorm'; import { Repository } from 'typeorm';
import { v4 as uuidV4 } from 'uuid';
import mediaConfiguration, { MediaConfig } from '../config/media.config'; import mediaConfiguration, { MediaConfig } from '../config/media.config';
import { ClientError, NotInDBError } from '../errors/errors'; import { ClientError, NotInDBError } from '../errors/errors';
import { ConsoleLoggerService } from '../logger/console-logger.service'; import { ConsoleLoggerService } from '../logger/console-logger.service';
import { Note } from '../notes/note.entity'; import { Note } from '../notes/note.entity';
import { NotesService } from '../notes/notes.service';
import { User } from '../users/user.entity'; import { User } from '../users/user.entity';
import { UsersService } from '../users/users.service';
import { AzureBackend } from './backends/azure-backend'; import { AzureBackend } from './backends/azure-backend';
import { BackendType } from './backends/backend-type.enum'; import { BackendType } from './backends/backend-type.enum';
import { FilesystemBackend } from './backends/filesystem-backend'; import { FilesystemBackend } from './backends/filesystem-backend';
@ -36,8 +34,6 @@ export class MediaService {
private readonly logger: ConsoleLoggerService, private readonly logger: ConsoleLoggerService,
@InjectRepository(MediaUpload) @InjectRepository(MediaUpload)
private mediaUploadRepository: Repository<MediaUpload>, private mediaUploadRepository: Repository<MediaUpload>,
private notesService: NotesService,
private usersService: UsersService,
private moduleRef: ModuleRef, private moduleRef: ModuleRef,
@Inject(mediaConfiguration.KEY) @Inject(mediaConfiguration.KEY)
private mediaConfig: MediaConfig, private mediaConfig: MediaConfig,
@ -68,15 +64,17 @@ export class MediaService {
/** /**
* @async * @async
* Save the given buffer to the configured MediaBackend and create a MediaUploadEntity to track where the file is, who uploaded it and to which note. * Save the given buffer to the configured MediaBackend and create a MediaUploadEntity to track where the file is, who uploaded it and to which note.
* @param {string} fileName - the original file name
* @param {Buffer} fileBuffer - the buffer of the file to save. * @param {Buffer} fileBuffer - the buffer of the file to save.
* @param {User} user - the user who uploaded this file * @param {User} user - the user who uploaded this file
* @param {Note} note - the note which will be associated with the new file. * @param {Note} note - the note which will be associated with the new file.
* @return {string} the url of the saved file * @return {MediaUpload} the created MediaUpload entity
* @throws {ClientError} the MIME type of the file is not supported. * @throws {ClientError} the MIME type of the file is not supported.
* @throws {NotInDBError} - the note or user is not in the database * @throws {NotInDBError} - the note or user is not in the database
* @throws {MediaBackendError} - there was an error saving the file * @throws {MediaBackendError} - there was an error saving the file
*/ */
async saveFile( async saveFile(
fileName: string,
fileBuffer: Buffer, fileBuffer: Buffer,
user: User | null, user: User | null,
note: Note, note: Note,
@ -99,19 +97,20 @@ export class MediaService {
if (!MediaService.isAllowedMimeType(fileTypeResult.mime)) { if (!MediaService.isAllowedMimeType(fileTypeResult.mime)) {
throw new ClientError('MIME type not allowed.'); throw new ClientError('MIME type not allowed.');
} }
const randomBytes = crypto.randomBytes(16); const uuid = uuidV4(); // TODO replace this with uuid-v7 in a later PR
const id = randomBytes.toString('hex') + '.' + fileTypeResult.ext; const backendData = await this.mediaBackend.saveFile(
this.logger.debug(`Generated filename: '${id}'`, 'saveFile'); uuid,
const [url, backendData] = await this.mediaBackend.saveFile(fileBuffer, id); fileBuffer,
fileTypeResult,
);
const mediaUpload = MediaUpload.create( const mediaUpload = MediaUpload.create(
id, uuid,
fileName,
note, note,
user, user,
fileTypeResult.ext,
this.mediaBackendType, this.mediaBackendType,
url, backendData,
); );
mediaUpload.backendData = backendData;
return await this.mediaUploadRepository.save(mediaUpload); return await this.mediaUploadRepository.save(mediaUpload);
} }
@ -122,10 +121,26 @@ export class MediaService {
* @throws {MediaBackendError} - there was an error deleting the file * @throws {MediaBackendError} - there was an error deleting the file
*/ */
async deleteFile(mediaUpload: MediaUpload): Promise<void> { async deleteFile(mediaUpload: MediaUpload): Promise<void> {
await this.mediaBackend.deleteFile(mediaUpload.id, mediaUpload.backendData); await this.mediaBackend.deleteFile(
mediaUpload.uuid,
mediaUpload.backendData,
);
await this.mediaUploadRepository.remove(mediaUpload); await this.mediaUploadRepository.remove(mediaUpload);
} }
/**
* @async
* Get the URL of the file.
* @param {MediaUpload} mediaUpload - the file to get the URL for.
* @return {string} the URL of the file.
* @throws {MediaBackendError} - there was an error retrieving the url
*/
async getFileUrl(mediaUpload: MediaUpload): Promise<string> {
const backendName = mediaUpload.backendType as BackendType;
const backend = this.getBackendFromType(backendName);
return await backend.getFileUrl(mediaUpload.uuid, mediaUpload.backendData);
}
/** /**
* @async * @async
* Find a file entry by its filename. * Find a file entry by its filename.
@ -136,7 +151,7 @@ export class MediaService {
*/ */
async findUploadByFilename(filename: string): Promise<MediaUpload> { async findUploadByFilename(filename: string): Promise<MediaUpload> {
const mediaUpload = await this.mediaUploadRepository.findOne({ const mediaUpload = await this.mediaUploadRepository.findOne({
where: { id: filename }, where: { fileName: filename },
relations: ['user'], relations: ['user'],
}); });
if (mediaUpload === null) { if (mediaUpload === null) {
@ -147,6 +162,24 @@ export class MediaService {
return mediaUpload; return mediaUpload;
} }
/**
* @async
* Find a file entry by its UUID.
* @param {string} uuid - The UUID of the MediaUpload entity to find.
* @returns {MediaUpload} - the MediaUpload entity if found.
* @throws {NotInDBError} - the MediaUpload entity with the provided UUID is not found in the database.
*/
async findUploadByUuid(uuid: string): Promise<MediaUpload> {
const mediaUpload = await this.mediaUploadRepository.findOne({
where: { uuid },
relations: ['user'],
});
if (mediaUpload === null) {
throw new NotInDBError(`MediaUpload with uuid '${uuid}' not found`);
}
return mediaUpload;
}
/** /**
* @async * @async
* List all uploads by a specific user * List all uploads by a specific user
@ -166,9 +199,9 @@ export class MediaService {
/** /**
* @async * @async
* List all uploads by a specific note * List all uploads to a specific note
* @param {Note} note - the specific user * @param {Note} note - the specific user
* @return {MediaUpload[]} arary of media uploads owned by the user * @return {MediaUpload[]} array of media uploads owned by the user
*/ */
async listUploadsByNote(note: Note): Promise<MediaUpload[]> { async listUploadsByNote(note: Note): Promise<MediaUpload[]> {
const mediaUploads = await this.mediaUploadRepository const mediaUploads = await this.mediaUploadRepository
@ -188,7 +221,7 @@ export class MediaService {
*/ */
async removeNoteFromMediaUpload(mediaUpload: MediaUpload): Promise<void> { async removeNoteFromMediaUpload(mediaUpload: MediaUpload): Promise<void> {
this.logger.debug( this.logger.debug(
'Setting note to null for mediaUpload: ' + mediaUpload.id, 'Setting note to null for mediaUpload: ' + mediaUpload.uuid,
'removeNoteFromMediaUpload', 'removeNoteFromMediaUpload',
); );
mediaUpload.note = Promise.resolve(null); mediaUpload.note = Promise.resolve(null);
@ -232,8 +265,9 @@ export class MediaService {
async toMediaUploadDto(mediaUpload: MediaUpload): Promise<MediaUploadDto> { async toMediaUploadDto(mediaUpload: MediaUpload): Promise<MediaUploadDto> {
const user = await mediaUpload.user; const user = await mediaUpload.user;
return { return {
id: mediaUpload.id, uuid: mediaUpload.uuid,
notePublicId: (await mediaUpload.note)?.publicId ?? null, fileName: mediaUpload.fileName,
noteId: (await mediaUpload.note)?.publicId ?? null,
createdAt: mediaUpload.createdAt, createdAt: mediaUpload.createdAt,
username: user?.username ?? null, username: user?.username ?? null,
}; };

View file

@ -1,19 +1,14 @@
/*
* SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
*
* SPDX-License-Identifier: AGPL-3.0-only
*/
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
export class Init1725266569705 implements MigrationInterface { export class Init1726084491570 implements MigrationInterface {
name = 'Init1725266569705'; name = 'Init1726084491570';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query( await queryRunner.query(
`CREATE TABLE \`history_entry\` (\`id\` int NOT NULL AUTO_INCREMENT, \`pinStatus\` tinyint NOT NULL, \`updatedAt\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), \`userId\` int NULL, \`noteId\` int NULL, UNIQUE INDEX \`IDX_928dd947355b0837366470a916\` (\`noteId\`, \`userId\`), PRIMARY KEY (\`id\`)) ENGINE=InnoDB`, `CREATE TABLE \`history_entry\` (\`id\` int NOT NULL AUTO_INCREMENT, \`pinStatus\` tinyint NOT NULL, \`updatedAt\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), \`userId\` int NULL, \`noteId\` int NULL, UNIQUE INDEX \`IDX_928dd947355b0837366470a916\` (\`noteId\`, \`userId\`), PRIMARY KEY (\`id\`)) ENGINE=InnoDB`,
); );
await queryRunner.query( await queryRunner.query(
`CREATE TABLE \`media_upload\` (\`id\` varchar(255) NOT NULL, \`backendType\` varchar(255) NOT NULL, \`fileUrl\` varchar(255) NOT NULL, \`backendData\` text NULL, \`createdAt\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), \`noteId\` int NULL, \`userId\` int NULL, PRIMARY KEY (\`id\`)) ENGINE=InnoDB`, `CREATE TABLE \`media_upload\` (\`uuid\` varchar(255) NOT NULL, \`fileName\` varchar(255) NOT NULL, \`backendType\` varchar(255) NOT NULL, \`backendData\` text NULL, \`createdAt\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), \`noteId\` int NULL, \`userId\` int NULL, PRIMARY KEY (\`uuid\`)) ENGINE=InnoDB`,
); );
await queryRunner.query( await queryRunner.query(
`CREATE TABLE \`note_group_permission\` (\`id\` int NOT NULL AUTO_INCREMENT, \`canEdit\` tinyint NOT NULL, \`groupId\` int NULL, \`noteId\` int NULL, UNIQUE INDEX \`IDX_ee1744842a9ef3ffbc05a7016a\` (\`groupId\`, \`noteId\`), PRIMARY KEY (\`id\`)) ENGINE=InnoDB`, `CREATE TABLE \`note_group_permission\` (\`id\` int NOT NULL AUTO_INCREMENT, \`canEdit\` tinyint NOT NULL, \`groupId\` int NULL, \`noteId\` int NULL, UNIQUE INDEX \`IDX_ee1744842a9ef3ffbc05a7016a\` (\`groupId\`, \`noteId\`), PRIMARY KEY (\`id\`)) ENGINE=InnoDB`,

View file

@ -1,12 +1,7 @@
/*
* SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
*
* SPDX-License-Identifier: AGPL-3.0-only
*/
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
export class Init1725266697932 implements MigrationInterface { export class Init1726084117959 implements MigrationInterface {
name = 'Init1725266697932'; name = 'Init1726084117959';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query( await queryRunner.query(
@ -16,7 +11,7 @@ export class Init1725266697932 implements MigrationInterface {
`CREATE UNIQUE INDEX "IDX_928dd947355b0837366470a916" ON "history_entry" ("noteId", "userId") `, `CREATE UNIQUE INDEX "IDX_928dd947355b0837366470a916" ON "history_entry" ("noteId", "userId") `,
); );
await queryRunner.query( await queryRunner.query(
`CREATE TABLE "media_upload" ("id" character varying NOT NULL, "backendType" character varying NOT NULL, "fileUrl" character varying NOT NULL, "backendData" text, "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "noteId" integer, "userId" integer, CONSTRAINT "PK_b406d9cee56e253dfd3b3d52706" PRIMARY KEY ("id"))`, `CREATE TABLE "media_upload" ("uuid" character varying NOT NULL, "fileName" character varying NOT NULL, "backendType" character varying NOT NULL, "backendData" text, "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "noteId" integer, "userId" integer, CONSTRAINT "PK_573c2a4f2a8f8382f2a8758444e" PRIMARY KEY ("uuid"))`,
); );
await queryRunner.query( await queryRunner.query(
`CREATE TABLE "note_group_permission" ("id" SERIAL NOT NULL, "canEdit" boolean NOT NULL, "groupId" integer, "noteId" integer, CONSTRAINT "PK_6327989190949e6a55d02a080c3" PRIMARY KEY ("id"))`, `CREATE TABLE "note_group_permission" ("id" SERIAL NOT NULL, "canEdit" boolean NOT NULL, "groupId" integer, "noteId" integer, CONSTRAINT "PK_6327989190949e6a55d02a080c3" PRIMARY KEY ("id"))`,

View file

@ -1,12 +1,7 @@
/*
* SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
*
* SPDX-License-Identifier: AGPL-3.0-only
*/
import { MigrationInterface, QueryRunner } from 'typeorm'; import { MigrationInterface, QueryRunner } from 'typeorm';
export class Init1725268109950 implements MigrationInterface { export class Init1726084595852 implements MigrationInterface {
name = 'Init1725268109950'; name = 'Init1726084595852';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query( await queryRunner.query(
@ -16,7 +11,7 @@ export class Init1725268109950 implements MigrationInterface {
`CREATE UNIQUE INDEX "IDX_928dd947355b0837366470a916" ON "history_entry" ("noteId", "userId") `, `CREATE UNIQUE INDEX "IDX_928dd947355b0837366470a916" ON "history_entry" ("noteId", "userId") `,
); );
await queryRunner.query( await queryRunner.query(
`CREATE TABLE "media_upload" ("id" varchar PRIMARY KEY NOT NULL, "backendType" varchar NOT NULL, "fileUrl" varchar NOT NULL, "backendData" text, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "noteId" integer, "userId" integer)`, `CREATE TABLE "media_upload" ("uuid" varchar PRIMARY KEY NOT NULL, "fileName" varchar NOT NULL, "backendType" varchar NOT NULL, "backendData" text, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "noteId" integer, "userId" integer)`,
); );
await queryRunner.query( await queryRunner.query(
`CREATE TABLE "note_group_permission" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "canEdit" boolean NOT NULL, "groupId" integer, "noteId" integer)`, `CREATE TABLE "note_group_permission" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "canEdit" boolean NOT NULL, "groupId" integer, "noteId" integer)`,
@ -108,10 +103,10 @@ export class Init1725268109950 implements MigrationInterface {
`CREATE UNIQUE INDEX "IDX_928dd947355b0837366470a916" ON "history_entry" ("noteId", "userId") `, `CREATE UNIQUE INDEX "IDX_928dd947355b0837366470a916" ON "history_entry" ("noteId", "userId") `,
); );
await queryRunner.query( await queryRunner.query(
`CREATE TABLE "temporary_media_upload" ("id" varchar PRIMARY KEY NOT NULL, "backendType" varchar NOT NULL, "fileUrl" varchar NOT NULL, "backendData" text, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "noteId" integer, "userId" integer, CONSTRAINT "FK_edba6d4e0f3bcf6605772f0af6b" FOREIGN KEY ("noteId") REFERENCES "note" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION, CONSTRAINT "FK_73ce66b082df1df2003e305e9ac" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`, `CREATE TABLE "temporary_media_upload" ("uuid" varchar PRIMARY KEY NOT NULL, "fileName" varchar NOT NULL, "backendType" varchar NOT NULL, "backendData" text, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "noteId" integer, "userId" integer, CONSTRAINT "FK_edba6d4e0f3bcf6605772f0af6b" FOREIGN KEY ("noteId") REFERENCES "note" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION, CONSTRAINT "FK_73ce66b082df1df2003e305e9ac" FOREIGN KEY ("userId") REFERENCES "user" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`,
); );
await queryRunner.query( await queryRunner.query(
`INSERT INTO "temporary_media_upload"("id", "backendType", "fileUrl", "backendData", "createdAt", "noteId", "userId") SELECT "id", "backendType", "fileUrl", "backendData", "createdAt", "noteId", "userId" FROM "media_upload"`, `INSERT INTO "temporary_media_upload"("uuid", "fileName", "backendType", "backendData", "createdAt", "noteId", "userId") SELECT "uuid", "fileName", "backendType", "backendData", "createdAt", "noteId", "userId" FROM "media_upload"`,
); );
await queryRunner.query(`DROP TABLE "media_upload"`); await queryRunner.query(`DROP TABLE "media_upload"`);
await queryRunner.query( await queryRunner.query(
@ -444,10 +439,10 @@ export class Init1725268109950 implements MigrationInterface {
`ALTER TABLE "media_upload" RENAME TO "temporary_media_upload"`, `ALTER TABLE "media_upload" RENAME TO "temporary_media_upload"`,
); );
await queryRunner.query( await queryRunner.query(
`CREATE TABLE "media_upload" ("id" varchar PRIMARY KEY NOT NULL, "backendType" varchar NOT NULL, "fileUrl" varchar NOT NULL, "backendData" text, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "noteId" integer, "userId" integer)`, `CREATE TABLE "media_upload" ("uuid" varchar PRIMARY KEY NOT NULL, "fileName" varchar NOT NULL, "backendType" varchar NOT NULL, "backendData" text, "createdAt" datetime NOT NULL DEFAULT (datetime('now')), "noteId" integer, "userId" integer)`,
); );
await queryRunner.query( await queryRunner.query(
`INSERT INTO "media_upload"("id", "backendType", "fileUrl", "backendData", "createdAt", "noteId", "userId") SELECT "id", "backendType", "fileUrl", "backendData", "createdAt", "noteId", "userId" FROM "temporary_media_upload"`, `INSERT INTO "media_upload"("uuid", "fileName", "backendType", "backendData", "createdAt", "noteId", "userId") SELECT "uuid", "fileName", "backendType", "backendData", "createdAt", "noteId", "userId" FROM "temporary_media_upload"`,
); );
await queryRunner.query(`DROP TABLE "temporary_media_upload"`); await queryRunner.query(`DROP TABLE "temporary_media_upload"`);
await queryRunner.query(`DROP INDEX "IDX_928dd947355b0837366470a916"`); await queryRunner.query(`DROP INDEX "IDX_928dd947355b0837366470a916"`);

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -70,16 +70,44 @@ describe('Me', () => {
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const imageIds = []; const imageIds = [];
imageIds.push( imageIds.push(
(await testSetup.mediaService.saveFile(testImage, user, note1)).id, (
await testSetup.mediaService.saveFile(
'test.png',
testImage,
user,
note1,
)
).uuid,
); );
imageIds.push( imageIds.push(
(await testSetup.mediaService.saveFile(testImage, user, note1)).id, (
await testSetup.mediaService.saveFile(
'test.png',
testImage,
user,
note1,
)
).uuid,
); );
imageIds.push( imageIds.push(
(await testSetup.mediaService.saveFile(testImage, user, note2)).id, (
await testSetup.mediaService.saveFile(
'test.png',
testImage,
user,
note2,
)
).uuid,
); );
imageIds.push( imageIds.push(
(await testSetup.mediaService.saveFile(testImage, user, note2)).id, (
await testSetup.mediaService.saveFile(
'test.png',
testImage,
user,
note2,
)
).uuid,
); );
const response = await agent const response = await agent
@ -87,10 +115,10 @@ describe('Me', () => {
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(200); .expect(200);
expect(response.body).toHaveLength(4); expect(response.body).toHaveLength(4);
expect(imageIds).toContain(response.body[0].id); expect(imageIds).toContain(response.body[0].uuid);
expect(imageIds).toContain(response.body[1].id); expect(imageIds).toContain(response.body[1].uuid);
expect(imageIds).toContain(response.body[2].id); expect(imageIds).toContain(response.body[2].uuid);
expect(imageIds).toContain(response.body[3].id); expect(imageIds).toContain(response.body[3].uuid);
const mediaUploads = await testSetup.mediaService.listUploadsByUser(user); const mediaUploads = await testSetup.mediaService.listUploadsByUser(user);
for (const upload of mediaUploads) { for (const upload of mediaUploads) {
await testSetup.mediaService.deleteFile(upload); await testSetup.mediaService.deleteFile(upload);
@ -114,6 +142,7 @@ describe('Me', () => {
it('DELETE /me', async () => { it('DELETE /me', async () => {
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
user, user,
note1, note1,
@ -122,7 +151,7 @@ describe('Me', () => {
expect(dbUser).toBeInstanceOf(User); expect(dbUser).toBeInstanceOf(User);
const mediaUploads = await testSetup.mediaService.listUploadsByUser(dbUser); const mediaUploads = await testSetup.mediaService.listUploadsByUser(dbUser);
expect(mediaUploads).toHaveLength(1); expect(mediaUploads).toHaveLength(1);
expect(mediaUploads[0].id).toEqual(upload.id); expect(mediaUploads[0].uuid).toEqual(upload.uuid);
await agent.delete('/api/private/me').expect(204); await agent.delete('/api/private/me').expect(204);
await expect( await expect(
testSetup.userService.getUserByUsername('hardcoded'), testSetup.userService.getUserByUsername('hardcoded'),

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -71,17 +71,17 @@ describe('Media', () => {
.set('HedgeDoc-Note', 'test_upload_media') .set('HedgeDoc-Note', 'test_upload_media')
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(201); .expect(201);
const fileName: string = uploadResponse.body.id; const uuid: string = uploadResponse.body.uuid;
const testImage = await fs.readFile( const testImage = await fs.readFile(
'test/private-api/fixtures/test.png', 'test/private-api/fixtures/test.png',
); );
const path = '/api/private/media/' + fileName; const path = '/api/private/media/' + uuid;
const apiResponse = await agent.get(path); const apiResponse = await agent.get(path);
expect(apiResponse.statusCode).toEqual(302); expect(apiResponse.statusCode).toEqual(200);
const downloadResponse = await agent.get(apiResponse.header.location); const downloadResponse = await agent.get(`/uploads/${uuid}.png`);
expect(downloadResponse.body).toEqual(testImage); expect(downloadResponse.body).toEqual(testImage);
// delete the file afterwards // delete the file afterwards
await fs.unlink(join(uploadPath, fileName)); await fs.unlink(join(uploadPath, uuid + '.png'));
}); });
it('without user', async () => { it('without user', async () => {
const agent = request.agent(testSetup.app.getHttpServer()); const agent = request.agent(testSetup.app.getHttpServer());
@ -91,17 +91,17 @@ describe('Media', () => {
.set('HedgeDoc-Note', 'test_upload_media') .set('HedgeDoc-Note', 'test_upload_media')
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(201); .expect(201);
const fileName: string = uploadResponse.body.id; const uuid: string = uploadResponse.body.uuid;
const testImage = await fs.readFile( const testImage = await fs.readFile(
'test/private-api/fixtures/test.png', 'test/private-api/fixtures/test.png',
); );
const path = '/api/private/media/' + fileName; const path = '/api/private/media/' + uuid;
const apiResponse = await agent.get(path); const apiResponse = await agent.get(path);
expect(apiResponse.statusCode).toEqual(302); expect(apiResponse.statusCode).toEqual(200);
const downloadResponse = await agent.get(apiResponse.header.location); const downloadResponse = await agent.get(`/uploads/${uuid}.png`);
expect(downloadResponse.body).toEqual(testImage); expect(downloadResponse.body).toEqual(testImage);
// delete the file afterwards // delete the file afterwards
await fs.unlink(join(uploadPath, fileName)); await fs.unlink(join(uploadPath, uuid + '.png'));
}); });
}); });
describe('fails:', () => { describe('fails:', () => {
@ -158,11 +158,12 @@ describe('Media', () => {
); );
const testImage = await fs.readFile('test/private-api/fixtures/test.png'); const testImage = await fs.readFile('test/private-api/fixtures/test.png');
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
user, user,
testNote, testNote,
); );
const filename = upload.id; const uuid = upload.uuid;
// login with a different user; // login with a different user;
const agent2 = request.agent(testSetup.app.getHttpServer()); const agent2 = request.agent(testSetup.app.getHttpServer());
@ -172,15 +173,15 @@ describe('Media', () => {
.expect(201); .expect(201);
// try to delete upload with second user // try to delete upload with second user
await agent2.delete('/api/private/media/' + filename).expect(403); await agent2.delete('/api/private/media/' + uuid).expect(403);
await agent.get('/uploads/' + filename).expect(200); await agent.get(`/uploads/${uuid}.png`).expect(200);
// delete upload for real // delete upload for real
await agent.delete('/api/private/media/' + filename).expect(204); await agent.delete('/api/private/media/' + uuid).expect(204);
// Test if file is really deleted // Test if file is really deleted
await agent.get('/uploads/' + filename).expect(404); await agent.get(`/uploads/${uuid}.png`).expect(404);
}); });
it('deleting user is owner of note', async () => { it('deleting user is owner of note', async () => {
// upload a file with the default test user // upload a file with the default test user
@ -191,11 +192,12 @@ describe('Media', () => {
); );
const testImage = await fs.readFile('test/private-api/fixtures/test.png'); const testImage = await fs.readFile('test/private-api/fixtures/test.png');
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
null, null,
testNote, testNote,
); );
const filename = upload.fileUrl.split('/').pop() || ''; const uuid = upload.uuid;
// login with a different user; // login with a different user;
const agent2 = request.agent(testSetup.app.getHttpServer()); const agent2 = request.agent(testSetup.app.getHttpServer());
@ -207,18 +209,18 @@ describe('Media', () => {
const agentGuest = request.agent(testSetup.app.getHttpServer()); const agentGuest = request.agent(testSetup.app.getHttpServer());
// try to delete upload with second user // try to delete upload with second user
await agent.delete('/api/private/media/' + filename).expect(403); await agent.delete('/api/private/media/' + uuid).expect(403);
await agent.get('/uploads/' + filename).expect(200); await agent.get(`/uploads/${uuid}.png`).expect(200);
await agentGuest.delete('/api/private/media/' + filename).expect(401); await agentGuest.delete('/api/private/media/' + uuid).expect(401);
await agent.get('/uploads/' + filename).expect(200); await agent.get(`/uploads/${uuid}.png`).expect(200);
// delete upload for real // delete upload for real
await agent2.delete('/api/private/media/' + filename).expect(204); await agent2.delete('/api/private/media/' + uuid).expect(204);
// Test if file is really deleted // Test if file is really deleted
await agent.get('/uploads/' + filename).expect(404); await agent.get(`/uploads/${uuid}.png`).expect(404);
}); });
}); });
}); });

View file

@ -165,7 +165,12 @@ describe('Notes', () => {
user1, user1,
noteId, noteId,
); );
await testSetup.mediaService.saveFile(testImage, user1, note); await testSetup.mediaService.saveFile(
'test.png',
testImage,
user1,
note,
);
await agent await agent
.delete(`/api/private/notes/${noteId}`) .delete(`/api/private/notes/${noteId}`)
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
@ -191,6 +196,7 @@ describe('Notes', () => {
noteId, noteId,
); );
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
user1, user1,
note, note,
@ -210,10 +216,8 @@ describe('Notes', () => {
expect( expect(
await testSetup.mediaService.listUploadsByUser(user1), await testSetup.mediaService.listUploadsByUser(user1),
).toHaveLength(1); ).toHaveLength(1);
// Remove /upload/ from path as we just need the filename.
const fileName = upload.fileUrl.replace('/uploads/', '');
// delete the file afterwards // delete the file afterwards
await fs.unlink(join(uploadPath, fileName)); await fs.unlink(join(uploadPath, upload.uuid + '.png'));
await fs.rmdir(uploadPath); await fs.rmdir(uploadPath);
}); });
}); });
@ -406,11 +410,13 @@ describe('Notes', () => {
const testImage = await fs.readFile('test/private-api/fixtures/test.png'); const testImage = await fs.readFile('test/private-api/fixtures/test.png');
const upload0 = await testSetup.mediaService.saveFile( const upload0 = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
user1, user1,
note1, note1,
); );
const upload1 = await testSetup.mediaService.saveFile( const upload1 = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
user1, user1,
note2, note2,
@ -421,11 +427,11 @@ describe('Notes', () => {
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(200); .expect(200);
expect(responseAfter.body).toHaveLength(1); expect(responseAfter.body).toHaveLength(1);
expect(responseAfter.body[0].id).toEqual(upload0.id); expect(responseAfter.body[0].uuid).toEqual(upload0.uuid);
expect(responseAfter.body[0].id).not.toEqual(upload1.id); expect(responseAfter.body[0].uuid).not.toEqual(upload1.uuid);
for (const upload of [upload0, upload1]) { for (const upload of [upload0, upload1]) {
// delete the file afterwards // delete the file afterwards
await fs.unlink(join(uploadPath, upload.id)); await fs.unlink(join(uploadPath, upload.uuid + '.png'));
} }
await fs.rm(uploadPath, { recursive: true }); await fs.rm(uploadPath, { recursive: true });
}); });

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -201,16 +201,44 @@ describe('Me', () => {
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const imageIds = []; const imageIds = [];
imageIds.push( imageIds.push(
(await testSetup.mediaService.saveFile(testImage, user, note1)).id, (
await testSetup.mediaService.saveFile(
'test.png',
testImage,
user,
note1,
)
).uuid,
); );
imageIds.push( imageIds.push(
(await testSetup.mediaService.saveFile(testImage, user, note1)).id, (
await testSetup.mediaService.saveFile(
'test.png',
testImage,
user,
note1,
)
).uuid,
); );
imageIds.push( imageIds.push(
(await testSetup.mediaService.saveFile(testImage, user, note2)).id, (
await testSetup.mediaService.saveFile(
'test.png',
testImage,
user,
note2,
)
).uuid,
); );
imageIds.push( imageIds.push(
(await testSetup.mediaService.saveFile(testImage, user, note2)).id, (
await testSetup.mediaService.saveFile(
'test.png',
testImage,
user,
note2,
)
).uuid,
); );
const response = await request(httpServer) const response = await request(httpServer)
@ -218,13 +246,13 @@ describe('Me', () => {
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(200); .expect(200);
expect(response.body).toHaveLength(4); expect(response.body).toHaveLength(4);
expect(imageIds).toContain(response.body[0].id); expect(imageIds).toContain(response.body[0].uuid);
expect(imageIds).toContain(response.body[1].id); expect(imageIds).toContain(response.body[1].uuid);
expect(imageIds).toContain(response.body[2].id); expect(imageIds).toContain(response.body[2].uuid);
expect(imageIds).toContain(response.body[3].id); expect(imageIds).toContain(response.body[3].uuid);
for (const imageId of imageIds) { for (const imageId of imageIds) {
// delete the file afterwards // delete the file afterwards
await fs.unlink(join(uploadPath, imageId)); await fs.unlink(join(uploadPath, imageId + '.png'));
} }
await fs.rm(uploadPath, { recursive: true }); await fs.rm(uploadPath, { recursive: true });
}); });

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2021 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -49,17 +49,17 @@ describe('Media', () => {
.set('HedgeDoc-Note', 'testAlias1') .set('HedgeDoc-Note', 'testAlias1')
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(201); .expect(201);
const fileName = uploadResponse.body.id; const uuid = uploadResponse.body.uuid;
const path: string = '/api/v2/media/' + fileName; const path: string = '/api/v2/media/' + uuid;
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const apiResponse = await agent const apiResponse = await agent
.get(path) .get(path)
.set('Authorization', `Bearer ${testSetup.authTokens[0].secret}`); .set('Authorization', `Bearer ${testSetup.authTokens[0].secret}`);
expect(apiResponse.statusCode).toEqual(302); expect(apiResponse.statusCode).toEqual(200);
const downloadResponse = await agent.get(apiResponse.header.location); const downloadResponse = await agent.get(`/uploads/${uuid}.png`);
expect(downloadResponse.body).toEqual(testImage); expect(downloadResponse.body).toEqual(testImage);
// delete the file afterwards // delete the file afterwards
await fs.unlink(join(uploadPath, fileName)); await fs.unlink(join(uploadPath, uuid + '.png'));
}); });
describe('fails:', () => { describe('fails:', () => {
beforeEach(async () => { beforeEach(async () => {
@ -114,26 +114,26 @@ describe('Media', () => {
it('successfully deletes an uploaded file', async () => { it('successfully deletes an uploaded file', async () => {
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
testSetup.users[0], testSetup.users[0],
testSetup.ownedNotes[0], testSetup.ownedNotes[0],
); );
const filename = upload.fileUrl.split('/').pop() || '';
await request(testSetup.app.getHttpServer()) await request(testSetup.app.getHttpServer())
.delete('/api/v2/media/' + filename) .delete('/api/v2/media/' + upload.uuid)
.set('Authorization', `Bearer ${testSetup.authTokens[0].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[0].secret}`)
.expect(204); .expect(204);
}); });
it('returns an error if the user does not own the file', async () => { it('returns an error if the user does not own the file', async () => {
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
testSetup.users[0], testSetup.users[0],
testSetup.ownedNotes[0], testSetup.ownedNotes[0],
); );
const filename = upload.fileUrl.split('/').pop() || '';
await request(testSetup.app.getHttpServer()) await request(testSetup.app.getHttpServer())
.delete('/api/v2/media/' + filename) .delete('/api/v2/media/' + upload.uuid)
.set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`)
.expect(403); .expect(403);
}); });
@ -146,34 +146,34 @@ describe('Media', () => {
); );
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
testSetup.users[0], testSetup.users[0],
testNote, testNote,
); );
const filename = upload.fileUrl.split('/').pop() || '';
const agent2 = request.agent(testSetup.app.getHttpServer()); const agent2 = request.agent(testSetup.app.getHttpServer());
// try to delete upload with second user // try to delete upload with second user
await agent2 await agent2
.delete('/api/v2/media/' + filename) .delete('/api/v2/media/' + upload.uuid)
.set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`)
.expect(403); .expect(403);
await agent2 await agent2
.get('/uploads/' + filename) .get(`/uploads/${upload.uuid}.png`)
.set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`)
.expect(200); .expect(200);
// delete upload for real // delete upload for real
await agent2 await agent2
.delete('/api/v2/media/' + filename) .delete('/api/v2/media/' + upload.uuid)
.set('Authorization', `Bearer ${testSetup.authTokens[0].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[0].secret}`)
.expect(204); .expect(204);
// Test if file is really deleted // Test if file is really deleted
await agent2 await agent2
.get('/uploads/' + filename) .get(`/uploads/${upload.uuid}.png`)
.set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`)
.expect(404); .expect(404);
}); });
@ -186,33 +186,33 @@ describe('Media', () => {
); );
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
testSetup.users[0], testSetup.users[0],
testNote, testNote,
); );
const filename = upload.fileUrl.split('/').pop() || '';
const agent2 = request.agent(testSetup.app.getHttpServer()); const agent2 = request.agent(testSetup.app.getHttpServer());
// try to delete upload with second user // try to delete upload with second user
await agent2 await agent2
.delete('/api/v2/media/' + filename) .delete('/api/v2/media/' + upload.uuid)
.set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`)
.expect(403); .expect(403);
await agent2 await agent2
.get('/uploads/' + filename) .get(`/uploads/${upload.uuid}.png`)
.set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`)
.expect(200); .expect(200);
// delete upload for real // delete upload for real
await agent2 await agent2
.delete('/api/v2/media/' + filename) .delete('/api/v2/media/' + upload.uuid)
.set('Authorization', `Bearer ${testSetup.authTokens[2].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[2].secret}`)
.expect(204); .expect(204);
// Test if file is really deleted // Test if file is really deleted
await agent2 await agent2
.get('/uploads/' + filename) .get(`/uploads/${upload.uuid}.png`)
.set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`) .set('Authorization', `Bearer ${testSetup.authTokens[1].secret}`)
.expect(404); .expect(404);
}); });

View file

@ -158,6 +158,7 @@ describe('Notes', () => {
noteId, noteId,
); );
await testSetup.mediaService.saveFile( await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
testSetup.users[0], testSetup.users[0],
note, note,
@ -187,6 +188,7 @@ describe('Notes', () => {
noteId, noteId,
); );
const upload = await testSetup.mediaService.saveFile( const upload = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
testSetup.users[0], testSetup.users[0],
note, note,
@ -207,10 +209,8 @@ describe('Notes', () => {
expect( expect(
await testSetup.mediaService.listUploadsByUser(testSetup.users[0]), await testSetup.mediaService.listUploadsByUser(testSetup.users[0]),
).toHaveLength(1); ).toHaveLength(1);
// Remove /upload/ from path as we just need the filename.
const fileName = upload.fileUrl.replace('/uploads/', '');
// delete the file afterwards // delete the file afterwards
await fs.unlink(join(uploadPath, fileName)); await fs.unlink(join(uploadPath, upload.uuid + '.png'));
}); });
}); });
it('works with an existing alias with permissions', async () => { it('works with an existing alias with permissions', async () => {
@ -326,7 +326,6 @@ describe('Notes', () => {
expect(metadata.body.editedBy).toEqual([]); expect(metadata.body.editedBy).toEqual([]);
expect(metadata.body.permissions.owner).toEqual('testuser1'); expect(metadata.body.permissions.owner).toEqual('testuser1');
expect(metadata.body.permissions.sharedToUsers).toEqual([]); expect(metadata.body.permissions.sharedToUsers).toEqual([]);
expect(metadata.body.permissions.sharedToUsers).toEqual([]);
expect(metadata.body.tags).toEqual([]); expect(metadata.body.tags).toEqual([]);
expect(typeof metadata.body.updatedAt).toEqual('string'); expect(typeof metadata.body.updatedAt).toEqual('string');
expect(typeof metadata.body.updateUsername).toEqual('string'); expect(typeof metadata.body.updateUsername).toEqual('string');
@ -489,11 +488,13 @@ describe('Notes', () => {
const testImage = await fs.readFile('test/public-api/fixtures/test.png'); const testImage = await fs.readFile('test/public-api/fixtures/test.png');
const upload0 = await testSetup.mediaService.saveFile( const upload0 = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
testSetup.users[0], testSetup.users[0],
note1, note1,
); );
const upload1 = await testSetup.mediaService.saveFile( const upload1 = await testSetup.mediaService.saveFile(
'test.png',
testImage, testImage,
testSetup.users[0], testSetup.users[0],
note2, note2,
@ -505,11 +506,11 @@ describe('Notes', () => {
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(200); .expect(200);
expect(responseAfter.body).toHaveLength(1); expect(responseAfter.body).toHaveLength(1);
expect(responseAfter.body[0].id).toEqual(upload0.id); expect(responseAfter.body[0].uuid).toEqual(upload0.uuid);
expect(responseAfter.body[0].id).not.toEqual(upload1.id); expect(responseAfter.body[0].uuid).not.toEqual(upload1.uuid);
for (const upload of [upload0, upload1]) { for (const upload of [upload0, upload1]) {
// delete the file afterwards // delete the file afterwards
await fs.unlink(join(uploadPath, upload.id)); await fs.unlink(join(uploadPath, upload.uuid + '.png'));
} }
await fs.rm(uploadPath, { recursive: true }); await fs.rm(uploadPath, { recursive: true });
}); });

View file

@ -16,4 +16,5 @@ reverse_proxy /realtime http://localhost:{$HD_BACKEND_PORT:3000}
reverse_proxy /api/* http://localhost:{$HD_BACKEND_PORT:3000} reverse_proxy /api/* http://localhost:{$HD_BACKEND_PORT:3000}
reverse_proxy /public/* http://localhost:{$HD_BACKEND_PORT:3000} reverse_proxy /public/* http://localhost:{$HD_BACKEND_PORT:3000}
reverse_proxy /uploads/* http://localhost:{$HD_BACKEND_PORT:3000} reverse_proxy /uploads/* http://localhost:{$HD_BACKEND_PORT:3000}
reverse_proxy /media/* http://localhost:{$HD_BACKEND_PORT:3000}
reverse_proxy /* http://localhost:{$HD_FRONTEND_PORT:3001} reverse_proxy /* http://localhost:{$HD_FRONTEND_PORT:3001}

View file

@ -11,6 +11,12 @@ background information and explanations. They are especially useful for contribu
<span>Notes</span> <span>Notes</span>
</div> </div>
</a> </a>
<a href='/concepts/media/'>
<div class='topic'>
<span>📸</span>
<span>Media</span>
</div>
</a>
<a href='/concepts/user-profiles/'> <a href='/concepts/user-profiles/'>
<div class='topic'> <div class='topic'>
<span>🙎</span> <span>🙎</span>

View file

@ -0,0 +1,23 @@
# Media
!!! info "Design Document"
This is a design document, explaining the design and vision for a HedgeDoc 2
feature. It is not a user guide and may or may not be fully implemented.
Media is the term for uploads associated with a note in HedgeDoc.
Currently, there's only support for images.
Media files can be saved to different storage backends like the local filesystem, S3, Azure Blob
storage, generic WebDAV shares, or imgur.
Each storage backend needs to implement an interface with three methods:
- `saveFile(uuid, buffer, fileType)` should store a given file and may return stringified metadata
to store in the database for this upload. The metadata does not need to follow a specific format,
and will only be used inside the storage backend.
- `deleteFile(uuid, metadata)` should delete a file with the given UUID. The stored metadata can
be used for example to identify the file on the storage platform.
- `getFileUrl(uuid, metadata)` should return a URL to the file with the given UUID. The stored
metadata can be used to identify the file on the storage platform.
The returned URL may be temporary.
Uploads are checked for their MIME type and compared to an allow-list and if not matching rejected.

View file

@ -31,7 +31,7 @@ in your `docker-compose.yml`:
- hedgedoc_uploads:/usr/src/app/backend/uploads - hedgedoc_uploads:/usr/src/app/backend/uploads
labels: labels:
traefik.enable: "true" traefik.enable: "true"
traefik.http.routers.hedgedoc_2_backend.rule: "Host(`md.example.com`) && (PathPrefix(`/realtime`) || PathPrefix(`/api`) || PathPrefix(`/public`))" traefik.http.routers.hedgedoc_2_backend.rule: "Host(`md.example.com`) && (PathPrefix(`/realtime`) || PathPrefix(`/api`) || PathPrefix(`/public`) || PathPrefix(`/uploads`) || PathPrefix(`/media`))"
traefik.http.routers.hedgedoc_2_backend.tls: "true" traefik.http.routers.hedgedoc_2_backend.tls: "true"
traefik.http.routers.hedgedoc_2_backend.tls.certresolver: "letsencrypt" traefik.http.routers.hedgedoc_2_backend.tls.certresolver: "letsencrypt"
traefik.http.services.hedgedoc_2_backend.loadbalancer.server.port: "3000" traefik.http.services.hedgedoc_2_backend.loadbalancer.server.port: "3000"
@ -113,7 +113,7 @@ Here is an example configuration for [nginx][nginx].
server { server {
server_name md.example.com; server_name md.example.com;
location ~ ^/(api|public|uploads)/ { location ~ ^/(api|public|uploads|media)/ {
proxy_pass http://127.0.0.1:3000; proxy_pass http://127.0.0.1:3000;
proxy_set_header X-Forwarded-Host $host; proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
@ -173,6 +173,8 @@ Here is an example config snippet for [Apache][apache]:
ProxyPassReverse /api http://127.0.0.1:3000/ ProxyPassReverse /api http://127.0.0.1:3000/
ProxyPassReverse /public http://127.0.0.1:3000/ ProxyPassReverse /public http://127.0.0.1:3000/
ProxyPassReverse /uploads http://127.0.0.1:3000/
ProxyPassReverse /media http://127.0.0.1:3000/
ProxyPassReverse /realtime http://127.0.0.1:3000/ ProxyPassReverse /realtime http://127.0.0.1:3000/
ProxyPass / http://127.0.0.1:3001/ ProxyPass / http://127.0.0.1:3001/
@ -200,6 +202,7 @@ Here is a list of things your reverse proxy needs to do to let HedgeDoc work:
- Passing `/api/*` to <http://localhost:3000> - Passing `/api/*` to <http://localhost:3000>
- Passing `/public/*` to <http://localhost:3000> - Passing `/public/*` to <http://localhost:3000>
- Passing `/uploads/*` to <http://localhost:3000> - Passing `/uploads/*` to <http://localhost:3000>
- Passing `/media/*` to <http://localhost:3000>
- Passing `/*` to <http://localhost:3001> - Passing `/*` to <http://localhost:3001>
- Set the `X-Forwarded-Proto` header - Set the `X-Forwarded-Proto` header

View file

@ -7,7 +7,7 @@ Your S3 bucket must be configured to be writeable.
You just add the following lines to your configuration: You just add the following lines to your configuration:
(with the appropriate substitution for `<ACCESS_KEY>`, `<SECRET_KEY>`, (with the appropriate substitution for `<ACCESS_KEY>`, `<SECRET_KEY>`,
`<BUCKET>`, and `<ENDPOINT>` of course) `<BUCKET>`, `<REGION>`, and `<ENDPOINT>` of course)
```dotenv ```dotenv
HD_MEDIA_BACKEND="s3" HD_MEDIA_BACKEND="s3"
@ -15,11 +15,16 @@ HD_MEDIA_BACKEND_S3_ACCESS_KEY="<ACCESS_KEY>"
HD_MEDIA_BACKEND_S3_SECRET_KEY="<SECRET_KEY>" HD_MEDIA_BACKEND_S3_SECRET_KEY="<SECRET_KEY>"
HD_MEDIA_BACKEND_S3_BUCKET="<BUCKET>" HD_MEDIA_BACKEND_S3_BUCKET="<BUCKET>"
HD_MEDIA_BACKEND_S3_ENDPOINT="<ENDPOINT>" HD_MEDIA_BACKEND_S3_ENDPOINT="<ENDPOINT>"
HD_MEDIA_BACKEND_S3_REGION="<REGION>"
HD_MEDIA_BACKEND_S3_PATH_STYLE="<true|false>"
``` ```
`<ENDPOINT>` should be an URL and contain the protocol, the domain and if necessary the port. `<ENDPOINT>` should be an URL and contain the protocol, the domain and if necessary the port.
For example: `https://s3.example.org` or `http://s3.example.org:9000` For example: `https://s3.example.org` or `http://s3.example.org:9000`
`<PATH_STYLE>` should be set to `true` if you are using a S3-compatible storage like MinIO that
uses path-style URLs.
If you use Amazon S3, `<ENDPOINT>` should contain your [Amazon Region][amazon-region]. If you use Amazon S3, `<ENDPOINT>` should contain your [Amazon Region][amazon-region].
For example: If your Amazon Region is `us-east-2`,your endpoint `<ENDPOINT>` For example: If your Amazon Region is `us-east-2`,your endpoint `<ENDPOINT>`
should be `https://s3.us-east-2.amazonaws.com`. should be `https://s3.us-east-2.amazonaws.com`.

View file

@ -3,8 +3,7 @@
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
const fakeUuid = '77fdcf1c-35fa-4a65-bdcf-1c35fa8a65d5'
const imageId = 'non-existing.png'
describe('File upload', () => { describe('File upload', () => {
beforeEach(() => { beforeEach(() => {
@ -22,7 +21,8 @@ describe('File upload', () => {
{ {
statusCode: 201, statusCode: 201,
body: { body: {
id: imageId uuid: fakeUuid,
fileName: 'demo.png'
} }
} }
) )
@ -38,7 +38,7 @@ describe('File upload', () => {
}, },
{ force: true } { force: true }
) )
cy.get('.cm-line').contains(`![demo.png](http://127.0.0.1:3001/api/private/media/${imageId})`) cy.get('.cm-line').contains(`![demo.png](http://127.0.0.1:3001/media/${fakeUuid})`)
}) })
it('via paste', () => { it('via paste', () => {
@ -51,7 +51,7 @@ describe('File upload', () => {
} }
} }
cy.get('.cm-content').trigger('paste', pasteEvent) cy.get('.cm-content').trigger('paste', pasteEvent)
cy.get('.cm-line').contains(`![](http://127.0.0.1:3001/api/private/media/${imageId})`) cy.get('.cm-line').contains(`![](http://127.0.0.1:3001/media/${fakeUuid})`)
}) })
}) })
@ -65,7 +65,7 @@ describe('File upload', () => {
}, },
{ action: 'drag-drop', force: true } { action: 'drag-drop', force: true }
) )
cy.get('.cm-line').contains(`![demo.png](http://127.0.0.1:3001/api/private/media/${imageId})`) cy.get('.cm-line').contains(`![demo.png](http://127.0.0.1:3001/media/${fakeUuid})`)
}) })
}) })

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -31,7 +31,7 @@ export const getProxiedUrl = async (imageUrl: string): Promise<ImageProxyRespons
* @return The URL of the uploaded media object. * @return The URL of the uploaded media object.
* @throws {Error} when the api request wasn't successful. * @throws {Error} when the api request wasn't successful.
*/ */
export const uploadFile = async (noteIdOrAlias: string, media: Blob): Promise<MediaUpload> => { export const uploadFile = async (noteIdOrAlias: string, media: File): Promise<MediaUpload> => {
const postData = new FormData() const postData = new FormData()
postData.append('file', media) postData.append('file', media)
const response = await new PostApiRequestBuilder<MediaUpload, void>('media') const response = await new PostApiRequestBuilder<MediaUpload, void>('media')

View file

@ -4,10 +4,11 @@
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
export interface MediaUpload { export interface MediaUpload {
id: string uuid: string
fileName: string
noteId: string | null noteId: string | null
createdAt: string createdAt: string
username: string username: string | null
} }
export interface ImageProxyResponse { export interface ImageProxyResponse {

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -60,8 +60,8 @@ export const useHandleUpload = (): handleUploadSignature => {
return replaceSelection(cursorSelection ?? currentSelection, uploadPlaceholder, false) return replaceSelection(cursorSelection ?? currentSelection, uploadPlaceholder, false)
}) })
uploadFile(noteId, file) uploadFile(noteId, file)
.then(({ id }) => { .then(({ uuid }) => {
const fullUrl = `${baseUrl}api/private/media/${id}` const fullUrl = `${baseUrl}media/${uuid}`
const replacement = `![${description ?? file.name ?? ''}](${fullUrl}${additionalUrlText ?? ''})` const replacement = `![${description ?? file.name ?? ''}](${fullUrl}${additionalUrlText ?? ''})`
changeContent(({ markdownContent }) => [ changeContent(({ markdownContent }) => [
replaceInContent(markdownContent, uploadPlaceholder, replacement), replaceInContent(markdownContent, uploadPlaceholder, replacement),

View file

@ -49,7 +49,7 @@ export const MediaBrowserSidebarMenu: React.FC<SpecificSidebarMenuProps> = ({
if (loading || error || !value) { if (loading || error || !value) {
return [] return []
} }
return value.map((entry) => <MediaEntry entry={entry} key={entry.id} onDelete={setMediaEntryForDeletion} />) return value.map((entry) => <MediaEntry entry={entry} key={entry.uuid} onDelete={setMediaEntryForDeletion} />)
}, [value, loading, error, setMediaEntryForDeletion]) }, [value, loading, error, setMediaEntryForDeletion])
const cancelDeletion = useCallback(() => { const cancelDeletion = useCallback(() => {

View file

@ -25,7 +25,7 @@ export const MediaEntryDeletionModal: React.FC<MediaEntryDeletionModalProps> = (
const { showErrorNotification, dispatchUiNotification } = useUiNotifications() const { showErrorNotification, dispatchUiNotification } = useUiNotifications()
const handleDelete = useCallback(() => { const handleDelete = useCallback(() => {
deleteUploadedMedia(entry.id) deleteUploadedMedia(entry.uuid)
.then(() => { .then(() => {
dispatchUiNotification('common.success', 'editor.mediaBrowser.mediaDeleted', {}) dispatchUiNotification('common.success', 'editor.mediaBrowser.mediaDeleted', {})
}) })

View file

@ -0,0 +1,11 @@
/*
* SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
*
* SPDX-License-Identifier: AGPL-3.0-only
*/
.preview {
max-width: 100%;
max-height: 150px;
height: auto;
width: auto;
}

View file

@ -11,13 +11,15 @@ import {
Trash as IconTrash, Trash as IconTrash,
FileRichtextFill as IconFileRichtextFill, FileRichtextFill as IconFileRichtextFill,
Person as IconPerson, Person as IconPerson,
Clock as IconClock Clock as IconClock,
FileText as IconFileText
} from 'react-bootstrap-icons' } from 'react-bootstrap-icons'
import { useIsOwner } from '../../../../../hooks/common/use-is-owner' import { useIsOwner } from '../../../../../hooks/common/use-is-owner'
import { useApplicationState } from '../../../../../hooks/common/use-application-state' import { useApplicationState } from '../../../../../hooks/common/use-application-state'
import { UserAvatarForUsername } from '../../../../common/user-avatar/user-avatar-for-username' import { UserAvatarForUsername } from '../../../../common/user-avatar/user-avatar-for-username'
import { useChangeEditorContentCallback } from '../../../change-content-context/use-change-editor-content-callback' import { useChangeEditorContentCallback } from '../../../change-content-context/use-change-editor-content-callback'
import { replaceSelection } from '../../../editor-pane/tool-bar/formatters/replace-selection' import { replaceSelection } from '../../../editor-pane/tool-bar/formatters/replace-selection'
import styles from './media-entry.module.css'
export interface MediaEntryProps { export interface MediaEntryProps {
entry: MediaUpload entry: MediaUpload
@ -37,7 +39,7 @@ export const MediaEntry: React.FC<MediaEntryProps> = ({ entry, onDelete }) => {
const isOwner = useIsOwner() const isOwner = useIsOwner()
const imageUrl = useMemo(() => { const imageUrl = useMemo(() => {
return `${baseUrl}api/private/media/${entry.id}` return `${baseUrl}media/${entry.uuid}`
}, [entry, baseUrl]) }, [entry, baseUrl])
const textCreatedTime = useMemo(() => { const textCreatedTime = useMemo(() => {
return new Date(entry.createdAt).toLocaleString() return new Date(entry.createdAt).toLocaleString()
@ -47,7 +49,7 @@ export const MediaEntry: React.FC<MediaEntryProps> = ({ entry, onDelete }) => {
changeEditorContent?.(({ currentSelection }) => { changeEditorContent?.(({ currentSelection }) => {
return replaceSelection( return replaceSelection(
{ from: currentSelection.to ?? currentSelection.from }, { from: currentSelection.to ?? currentSelection.from },
`![${entry.id}](${imageUrl})`, `![${entry.fileName}](${imageUrl})`,
true true
) )
}) })
@ -61,10 +63,15 @@ export const MediaEntry: React.FC<MediaEntryProps> = ({ entry, onDelete }) => {
<div className={'p-2 border-bottom border-opacity-50'}> <div className={'p-2 border-bottom border-opacity-50'}>
<a href={imageUrl} target={'_blank'} rel={'noreferrer'} className={'text-center d-block mb-2'}> <a href={imageUrl} target={'_blank'} rel={'noreferrer'} className={'text-center d-block mb-2'}>
{/* eslint-disable-next-line @next/next/no-img-element */} {/* eslint-disable-next-line @next/next/no-img-element */}
<img src={imageUrl} alt={`Upload ${entry.id}`} height={100} className={'mw-100'} /> <img src={imageUrl} alt={`Upload ${entry.fileName}`} className={styles.preview} />
</a> </a>
<div className={'w-100 d-flex flex-row align-items-center justify-content-between'}> <div className={'w-100 d-flex flex-row align-items-center justify-content-between'}>
<div> <div>
<small>
<IconFileText className={'me-1'} />
{entry.fileName}
</small>
<br />
<small className={'d-inline-flex flex-row align-items-center'}> <small className={'d-inline-flex flex-row align-items-center'}>
<IconPerson className={'me-1'} /> <IconPerson className={'me-1'} />
<UserAvatarForUsername username={entry.username} size={'sm'} /> <UserAvatarForUsername username={entry.username} size={'sm'} />

View file

@ -12,13 +12,15 @@ const handler = (req: NextApiRequest, res: NextApiResponse) => {
{ {
username: 'tilman', username: 'tilman',
createdAt: '2022-03-20T20:36:32Z', createdAt: '2022-03-20T20:36:32Z',
id: 'dummy.png', uuid: '5355ed83-7e12-4db0-95ed-837e124db08c',
fileName: 'dummy.png',
noteId: 'features' noteId: 'features'
}, },
{ {
username: 'tilman', username: 'tilman',
createdAt: '2022-03-20T20:36:57+0000', createdAt: '2022-03-20T20:36:57+0000',
id: 'dummy.png', uuid: '656745ab-fbf9-47f1-a745-abfbf9a7f10c',
fileName: 'dummy2.png',
noteId: null noteId: null
} }
]) ])

View file

@ -1,5 +1,5 @@
/* /*
* SPDX-FileCopyrightText: 2022 The HedgeDoc developers (see AUTHORS file) * SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
* *
* SPDX-License-Identifier: AGPL-3.0-only * SPDX-License-Identifier: AGPL-3.0-only
*/ */
@ -20,7 +20,8 @@ const handler = async (req: NextApiRequest, res: NextApiResponse): Promise<void>
req, req,
res, res,
{ {
id: '/public/img/avatar.png', uuid: 'e81f57cd-5866-4253-9f57-cd5866a253ca',
fileName: 'avatar.png',
noteId: null, noteId: null,
username: 'test', username: 'test',
createdAt: '2022-02-27T21:54:23.856Z' createdAt: '2022-02-27T21:54:23.856Z'