mirror of
https://github.com/immich-app/immich.git
synced 2026-02-11 03:17:59 +03:00
merge: remote-tracking branch 'origin/main' into feat/integrity-checks-izzy
This commit is contained in:
@@ -45,14 +45,14 @@
|
||||
"@nestjs/websockets": "^11.0.4",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/context-async-hooks": "^2.0.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.208.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.208.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.57.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.61.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.210.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.210.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.58.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.56.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.62.0",
|
||||
"@opentelemetry/resources": "^2.0.1",
|
||||
"@opentelemetry/sdk-metrics": "^2.0.1",
|
||||
"@opentelemetry/sdk-node": "^0.208.0",
|
||||
"@opentelemetry/sdk-node": "^0.210.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.34.0",
|
||||
"@react-email/components": "^0.5.0",
|
||||
"@react-email/render": "^1.1.2",
|
||||
@@ -135,7 +135,7 @@
|
||||
"@types/luxon": "^3.6.2",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^24.10.4",
|
||||
"@types/node": "^24.10.8",
|
||||
"@types/nodemailer": "^7.0.0",
|
||||
"@types/picomatch": "^4.0.0",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
|
||||
@@ -10,6 +10,7 @@ import { IWorker } from 'src/constants';
|
||||
import { controllers } from 'src/controllers';
|
||||
import { ImmichWorker } from 'src/enum';
|
||||
import { MaintenanceAuthGuard } from 'src/maintenance/maintenance-auth.guard';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerController } from 'src/maintenance/maintenance-worker.controller';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
@@ -21,8 +22,11 @@ import { LoggingInterceptor } from 'src/middleware/logging.interceptor';
|
||||
import { repositories } from 'src/repositories';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemetry.repository';
|
||||
import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
@@ -103,8 +107,12 @@ export class ApiModule extends BaseModule {}
|
||||
providers: [
|
||||
ConfigRepository,
|
||||
LoggingRepository,
|
||||
StorageRepository,
|
||||
ProcessRepository,
|
||||
DatabaseRepository,
|
||||
SystemMetadataRepository,
|
||||
AppRepository,
|
||||
MaintenanceHealthRepository,
|
||||
MaintenanceWebsocketRepository,
|
||||
MaintenanceWorkerService,
|
||||
...commonMiddleware,
|
||||
@@ -116,9 +124,14 @@ export class MaintenanceModule {
|
||||
constructor(
|
||||
@Inject(IWorker) private worker: ImmichWorker,
|
||||
logger: LoggingRepository,
|
||||
private maintenanceWorkerService: MaintenanceWorkerService,
|
||||
) {
|
||||
logger.setAppName(this.worker);
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.maintenanceWorkerService.init();
|
||||
}
|
||||
}
|
||||
|
||||
@Module({
|
||||
|
||||
@@ -141,6 +141,7 @@ export const endpointTags: Record<ApiTag, string> = {
|
||||
[ApiTag.Assets]: 'An asset is an image or video that has been uploaded to Immich.',
|
||||
[ApiTag.Authentication]: 'Endpoints related to user authentication, including OAuth.',
|
||||
[ApiTag.AuthenticationAdmin]: 'Administrative endpoints related to authentication.',
|
||||
[ApiTag.DatabaseBackups]: 'Manage backups of the Immich database.',
|
||||
[ApiTag.Deprecated]: 'Deprecated endpoints that are planned for removal in the next major release.',
|
||||
[ApiTag.Download]: 'Endpoints for downloading assets or collections of assets.',
|
||||
[ApiTag.Duplicates]: 'Endpoints for managing and identifying duplicate assets.',
|
||||
|
||||
101
server/src/controllers/database-backup.controller.ts
Normal file
101
server/src/controllers/database-backup.controller.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { Body, Controller, Delete, Get, Next, Param, Post, Res, UploadedFile, UseInterceptors } from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { ApiBody, ApiConsumes, ApiTags } from '@nestjs/swagger';
|
||||
import { NextFunction, Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import {
|
||||
DatabaseBackupDeleteDto,
|
||||
DatabaseBackupListResponseDto,
|
||||
DatabaseBackupUploadDto,
|
||||
} from 'src/dtos/database-backup.dto';
|
||||
import { ApiTag, ImmichCookie, Permission } from 'src/enum';
|
||||
import { Authenticated, FileResponse, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
@ApiTags(ApiTag.DatabaseBackups)
|
||||
@Controller('admin/database-backups')
|
||||
export class DatabaseBackupController {
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: DatabaseBackupService,
|
||||
private maintenanceService: MaintenanceService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
@Endpoint({
|
||||
summary: 'List database backups',
|
||||
description: 'Get the list of the successful and failed backups',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
@Get(':filename')
|
||||
@FileResponse()
|
||||
@Endpoint({
|
||||
summary: 'Download database backup',
|
||||
description: 'Downloads the database backup file',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDownload, admin: true })
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
): Promise<void> {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
@Delete()
|
||||
@Endpoint({
|
||||
summary: 'Delete database backup',
|
||||
description: 'Delete a backup by its filename',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDelete, admin: true })
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
@Post('start-restore')
|
||||
@Endpoint({
|
||||
summary: 'Start database backup restore flow',
|
||||
description: 'Put Immich into maintenance mode to restore a backup (Immich must not be configured)',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
async startDatabaseRestoreFlow(
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
const { jwt } = await this.maintenanceService.startRestoreFlow();
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
});
|
||||
}
|
||||
|
||||
@Post('upload')
|
||||
@Authenticated({ permission: Permission.BackupUpload, admin: true })
|
||||
@ApiConsumes('multipart/form-data')
|
||||
@ApiBody({ description: 'Backup Upload', type: DatabaseBackupUploadDto })
|
||||
@Endpoint({
|
||||
summary: 'Upload database backup',
|
||||
description: 'Uploads .sql/.sql.gz file to restore backup from',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { AssetMediaController } from 'src/controllers/asset-media.controller';
|
||||
import { AssetController } from 'src/controllers/asset.controller';
|
||||
import { AuthAdminController } from 'src/controllers/auth-admin.controller';
|
||||
import { AuthController } from 'src/controllers/auth.controller';
|
||||
import { DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import { DownloadController } from 'src/controllers/download.controller';
|
||||
import { DuplicateController } from 'src/controllers/duplicate.controller';
|
||||
import { FaceController } from 'src/controllers/face.controller';
|
||||
@@ -47,6 +48,7 @@ export const controllers = [
|
||||
AssetMediaController,
|
||||
AuthController,
|
||||
AuthAdminController,
|
||||
DatabaseBackupController,
|
||||
DownloadController,
|
||||
DuplicateController,
|
||||
FaceController,
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
import { BadRequestException, Body, Controller, Post, Res } from '@nestjs/common';
|
||||
import { BadRequestException, Body, Controller, Get, Post, Res } from '@nestjs/common';
|
||||
import { ApiTags } from '@nestjs/swagger';
|
||||
import { Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ApiTag, ImmichCookie, MaintenanceAction, Permission } from 'src/enum';
|
||||
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
@@ -15,6 +21,27 @@ import { respondWithCookie } from 'src/utils/response';
|
||||
export class MaintenanceController {
|
||||
constructor(private service: MaintenanceService) {}
|
||||
|
||||
@Get('status')
|
||||
@Endpoint({
|
||||
summary: 'Get maintenance mode status',
|
||||
description: 'Fetch information about the currently running maintenance action.',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return this.service.getMaintenanceStatus();
|
||||
}
|
||||
|
||||
@Get('detect-install')
|
||||
@Endpoint({
|
||||
summary: 'Detect existing install',
|
||||
description: 'Collect integrity checks and other heuristics about local data.',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('login')
|
||||
@Endpoint({
|
||||
summary: 'Log into maintenance mode',
|
||||
@@ -38,8 +65,8 @@ export class MaintenanceController {
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.Start) {
|
||||
const { jwt } = await this.service.startMaintenance(auth.user.name);
|
||||
if (dto.action !== MaintenanceAction.End) {
|
||||
const { jwt } = await this.service.startMaintenance(dto, auth.user.name);
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { dirname, join, resolve } from 'node:path';
|
||||
import { StorageAsset } from 'src/database';
|
||||
import { AssetFileType, AssetPathType, ImageFormat, PathType, PersonPathType, StorageFolder } from 'src/enum';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetPathType,
|
||||
ImageFormat,
|
||||
PathType,
|
||||
PersonPathType,
|
||||
RawExtractedFormat,
|
||||
StorageFolder,
|
||||
} from 'src/enum';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { CryptoRepository } from 'src/repositories/crypto.repository';
|
||||
@@ -24,15 +32,6 @@ export interface MoveRequest {
|
||||
};
|
||||
}
|
||||
|
||||
export type GeneratedImageType =
|
||||
| AssetPathType.Preview
|
||||
| AssetPathType.Thumbnail
|
||||
| AssetPathType.FullSize
|
||||
| AssetPathType.EditedPreview
|
||||
| AssetPathType.EditedThumbnail
|
||||
| AssetPathType.EditedFullSize;
|
||||
export type GeneratedAssetType = GeneratedImageType | AssetPathType.EncodedVideo;
|
||||
|
||||
export type ThumbnailPathEntity = { id: string; ownerId: string };
|
||||
|
||||
let instance: StorageCore | null;
|
||||
@@ -111,8 +110,19 @@ export class StorageCore {
|
||||
return StorageCore.getNestedPath(StorageFolder.Thumbnails, person.ownerId, `${person.id}.jpeg`);
|
||||
}
|
||||
|
||||
static getImagePath(asset: ThumbnailPathEntity, type: GeneratedImageType, format: 'jpeg' | 'webp') {
|
||||
return StorageCore.getNestedPath(StorageFolder.Thumbnails, asset.ownerId, `${asset.id}-${type}.${format}`);
|
||||
static getImagePath(
|
||||
asset: ThumbnailPathEntity,
|
||||
{
|
||||
fileType,
|
||||
format,
|
||||
isEdited,
|
||||
}: { fileType: AssetFileType; format: ImageFormat | RawExtractedFormat; isEdited: boolean },
|
||||
) {
|
||||
return StorageCore.getNestedPath(
|
||||
StorageFolder.Thumbnails,
|
||||
asset.ownerId,
|
||||
`${asset.id}_${fileType}${isEdited ? '_edited' : ''}.${format}`,
|
||||
);
|
||||
}
|
||||
|
||||
static getEncodedVideoPath(asset: ThumbnailPathEntity) {
|
||||
@@ -137,14 +147,14 @@ export class StorageCore {
|
||||
return normalizedPath.startsWith(normalizedAppMediaLocation);
|
||||
}
|
||||
|
||||
async moveAssetImage(asset: StorageAsset, pathType: GeneratedImageType, format: ImageFormat) {
|
||||
async moveAssetImage(asset: StorageAsset, fileType: AssetFileType, format: ImageFormat) {
|
||||
const { id: entityId, files } = asset;
|
||||
const oldFile = getAssetFile(files, pathType);
|
||||
const oldFile = getAssetFile(files, fileType, { isEdited: false });
|
||||
return this.moveFile({
|
||||
entityId,
|
||||
pathType,
|
||||
pathType: fileType,
|
||||
oldPath: oldFile?.path || null,
|
||||
newPath: StorageCore.getImagePath(asset, pathType, format),
|
||||
newPath: StorageCore.getImagePath(asset, { fileType, format, isEdited: false }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -298,19 +308,19 @@ export class StorageCore {
|
||||
case AssetPathType.Original: {
|
||||
return this.assetRepository.update({ id, originalPath: newPath });
|
||||
}
|
||||
case AssetPathType.FullSize: {
|
||||
case AssetFileType.FullSize: {
|
||||
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.FullSize, path: newPath });
|
||||
}
|
||||
case AssetPathType.Preview: {
|
||||
case AssetFileType.Preview: {
|
||||
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Preview, path: newPath });
|
||||
}
|
||||
case AssetPathType.Thumbnail: {
|
||||
case AssetFileType.Thumbnail: {
|
||||
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Thumbnail, path: newPath });
|
||||
}
|
||||
case AssetPathType.EncodedVideo: {
|
||||
return this.assetRepository.update({ id, encodedVideoPath: newPath });
|
||||
}
|
||||
case AssetPathType.Sidecar: {
|
||||
case AssetFileType.Sidecar: {
|
||||
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: newPath });
|
||||
}
|
||||
case PersonPathType.Face: {
|
||||
|
||||
@@ -39,6 +39,7 @@ export type AssetFile = {
|
||||
id: string;
|
||||
type: AssetFileType;
|
||||
path: string;
|
||||
isEdited: boolean;
|
||||
};
|
||||
|
||||
export type Library = {
|
||||
@@ -344,7 +345,7 @@ export const columns = {
|
||||
'asset.width',
|
||||
'asset.height',
|
||||
],
|
||||
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type'],
|
||||
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type', 'asset_file.isEdited'],
|
||||
authUser: ['user.id', 'user.name', 'user.email', 'user.isAdmin', 'user.quotaUsageInBytes', 'user.quotaSizeInBytes'],
|
||||
authApiKey: ['api_key.id', 'api_key.permissions'],
|
||||
authSession: ['session.id', 'session.updatedAt', 'session.pinExpiresAt', 'session.appVersion'],
|
||||
@@ -395,6 +396,7 @@ export const columns = {
|
||||
'asset.libraryId',
|
||||
'asset.width',
|
||||
'asset.height',
|
||||
'asset.isEdited',
|
||||
],
|
||||
syncAlbumUser: ['album_user.albumId as albumId', 'album_user.userId as userId', 'album_user.role'],
|
||||
syncStack: ['stack.id', 'stack.createdAt', 'stack.updatedAt', 'stack.primaryAssetId', 'stack.ownerId'],
|
||||
@@ -456,6 +458,7 @@ export const columns = {
|
||||
'asset_exif.projectionType',
|
||||
'asset_exif.rating',
|
||||
'asset_exif.state',
|
||||
'asset_exif.tags',
|
||||
'asset_exif.timeZone',
|
||||
],
|
||||
plugin: [
|
||||
@@ -479,4 +482,5 @@ export const lockableProperties = [
|
||||
'longitude',
|
||||
'rating',
|
||||
'timeZone',
|
||||
'tags',
|
||||
] as const;
|
||||
|
||||
@@ -98,6 +98,8 @@ export class AssetResponseDto extends SanitizedAssetResponseDto {
|
||||
|
||||
@Property({ history: new HistoryBuilder().added('v1').deprecated('v1.113.0') })
|
||||
resized?: boolean;
|
||||
@Property({ history: new HistoryBuilder().added('v2.5.0').beta('v2.5.0') })
|
||||
isEdited!: boolean;
|
||||
}
|
||||
|
||||
export type MapAsset = {
|
||||
@@ -137,6 +139,7 @@ export type MapAsset = {
|
||||
type: AssetType;
|
||||
width: number | null;
|
||||
height: number | null;
|
||||
isEdited: boolean;
|
||||
};
|
||||
|
||||
export class AssetStackResponseDto {
|
||||
@@ -245,5 +248,6 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
|
||||
resized: true,
|
||||
width: entity.width,
|
||||
height: entity.height,
|
||||
isEdited: entity.isEdited,
|
||||
};
|
||||
}
|
||||
|
||||
21
server/src/dtos/database-backup.dto.ts
Normal file
21
server/src/dtos/database-backup.dto.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsString } from 'class-validator';
|
||||
|
||||
export class DatabaseBackupDto {
|
||||
filename!: string;
|
||||
filesize!: number;
|
||||
}
|
||||
|
||||
export class DatabaseBackupListResponseDto {
|
||||
backups!: DatabaseBackupDto[];
|
||||
}
|
||||
|
||||
export class DatabaseBackupUploadDto {
|
||||
@ApiProperty({ type: 'string', format: 'binary', required: false })
|
||||
file?: any;
|
||||
}
|
||||
|
||||
export class DatabaseBackupDeleteDto {
|
||||
@IsString({ each: true })
|
||||
backups!: string[];
|
||||
}
|
||||
@@ -1,9 +1,14 @@
|
||||
import { MaintenanceAction } from 'src/enum';
|
||||
import { ValidateIf } from 'class-validator';
|
||||
import { MaintenanceAction, StorageFolder } from 'src/enum';
|
||||
import { ValidateEnum, ValidateString } from 'src/validation';
|
||||
|
||||
export class SetMaintenanceModeDto {
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
@ValidateIf((o) => o.action === MaintenanceAction.RestoreDatabase)
|
||||
@ValidateString()
|
||||
restoreBackupFilename?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceLoginDto {
|
||||
@@ -14,3 +19,26 @@ export class MaintenanceLoginDto {
|
||||
export class MaintenanceAuthDto {
|
||||
username!: string;
|
||||
}
|
||||
|
||||
export class MaintenanceStatusResponseDto {
|
||||
active!: boolean;
|
||||
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
progress?: number;
|
||||
task?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallStorageFolderDto {
|
||||
@ValidateEnum({ enum: StorageFolder, name: 'StorageFolder' })
|
||||
folder!: StorageFolder;
|
||||
readable!: boolean;
|
||||
writable!: boolean;
|
||||
files!: number;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallResponseDto {
|
||||
storage!: MaintenanceDetectInstallStorageFolderDto[];
|
||||
}
|
||||
|
||||
@@ -121,6 +121,8 @@ export class SyncAssetV1 {
|
||||
width!: number | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
height!: number | null;
|
||||
@ApiProperty({ type: 'boolean' })
|
||||
isEdited!: boolean;
|
||||
}
|
||||
|
||||
@ExtraModel()
|
||||
|
||||
@@ -45,9 +45,6 @@ export enum AssetFileType {
|
||||
Preview = 'preview',
|
||||
Thumbnail = 'thumbnail',
|
||||
Sidecar = 'sidecar',
|
||||
FullSizeEdited = 'fullsize_edited',
|
||||
PreviewEdited = 'preview_edited',
|
||||
ThumbnailEdited = 'thumbnail_edited',
|
||||
}
|
||||
|
||||
export enum AlbumUserRole {
|
||||
@@ -136,6 +133,11 @@ export enum Permission {
|
||||
|
||||
ArchiveRead = 'archive.read',
|
||||
|
||||
BackupList = 'backup.list',
|
||||
BackupDownload = 'backup.download',
|
||||
BackupUpload = 'backup.upload',
|
||||
BackupDelete = 'backup.delete',
|
||||
|
||||
DuplicateRead = 'duplicate.read',
|
||||
DuplicateDelete = 'duplicate.delete',
|
||||
|
||||
@@ -380,14 +382,7 @@ export enum ManualJobName {
|
||||
|
||||
export enum AssetPathType {
|
||||
Original = 'original',
|
||||
FullSize = 'fullsize',
|
||||
Preview = 'preview',
|
||||
EditedFullSize = 'edited_fullsize',
|
||||
EditedPreview = 'edited_preview',
|
||||
EditedThumbnail = 'edited_thumbnail',
|
||||
Thumbnail = 'thumbnail',
|
||||
EncodedVideo = 'encoded_video',
|
||||
Sidecar = 'sidecar',
|
||||
}
|
||||
|
||||
export enum PersonPathType {
|
||||
@@ -398,7 +393,7 @@ export enum UserPathType {
|
||||
Profile = 'profile',
|
||||
}
|
||||
|
||||
export type PathType = AssetPathType | PersonPathType | UserPathType;
|
||||
export type PathType = AssetFileType | AssetPathType | PersonPathType | UserPathType;
|
||||
|
||||
export enum TranscodePolicy {
|
||||
All = 'all',
|
||||
@@ -726,6 +721,7 @@ export enum DatabaseLock {
|
||||
MediaLocation = 700,
|
||||
GetSystemConfig = 69,
|
||||
BackupDatabase = 42,
|
||||
MaintenanceOperation = 621,
|
||||
MemoryCreation = 777,
|
||||
IntegrityCheck = 67,
|
||||
}
|
||||
@@ -733,6 +729,8 @@ export enum DatabaseLock {
|
||||
export enum MaintenanceAction {
|
||||
Start = 'start',
|
||||
End = 'end',
|
||||
SelectDatabaseRestore = 'select_database_restore',
|
||||
RestoreDatabase = 'restore_database',
|
||||
}
|
||||
|
||||
export enum ExitCode {
|
||||
@@ -879,6 +877,7 @@ export enum ApiTag {
|
||||
Authentication = 'Authentication',
|
||||
AuthenticationAdmin = 'Authentication (admin)',
|
||||
Assets = 'Assets',
|
||||
DatabaseBackups = 'Database Backups (admin)',
|
||||
Deprecated = 'Deprecated',
|
||||
Download = 'Download',
|
||||
Duplicates = 'Duplicates',
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
import { ChildProcess, fork } from 'node:child_process';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import { PostgresError } from 'postgres';
|
||||
import { ImmichAdminModule } from 'src/app.module';
|
||||
import { ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { DatabaseLock, ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type DB } from 'src/schema';
|
||||
@@ -35,19 +35,18 @@ class Workers {
|
||||
if (isMaintenanceMode) {
|
||||
this.startWorker(ImmichWorker.Maintenance);
|
||||
} else {
|
||||
await this.waitForFreeLock();
|
||||
|
||||
for (const worker of workers) {
|
||||
this.startWorker(worker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise a short-lived Nest application to build configuration
|
||||
* @returns System configuration
|
||||
*/
|
||||
private async isMaintenanceMode(): Promise<boolean> {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
const { log: _, ...kyselyConfig } = getKyselyConfig(database.config);
|
||||
const kysely = new Kysely<DB>(kyselyConfig);
|
||||
const systemMetadataRepository = new SystemMetadataRepository(kysely);
|
||||
|
||||
try {
|
||||
@@ -65,6 +64,32 @@ class Workers {
|
||||
}
|
||||
}
|
||||
|
||||
private async waitForFreeLock() {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
|
||||
let locked = false;
|
||||
while (!locked) {
|
||||
locked = await kysely.connection().execute(async (conn) => {
|
||||
const { rows } = await sql<{
|
||||
pg_try_advisory_lock: boolean;
|
||||
}>`SELECT pg_try_advisory_lock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
|
||||
const isLocked = rows[0].pg_try_advisory_lock;
|
||||
|
||||
if (isLocked) {
|
||||
await sql`SELECT pg_advisory_unlock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
}
|
||||
|
||||
return isLocked;
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
await kysely.destroy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start an individual worker
|
||||
* @param name Worker
|
||||
|
||||
67
server/src/maintenance/maintenance-health.repository.ts
Normal file
67
server/src/maintenance/maintenance-health.repository.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { fork } from 'node:child_process';
|
||||
import { dirname, join } from 'node:path';
|
||||
|
||||
@Injectable()
|
||||
export class MaintenanceHealthRepository {
|
||||
checkApiHealth(): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
const basePath = dirname(__filename);
|
||||
const workerFile = join(basePath, '..', 'workers', `api.js`);
|
||||
|
||||
const worker = fork(workerFile, [], {
|
||||
execArgv: process.execArgv.filter((arg) => !arg.startsWith('--inspect')),
|
||||
env: {
|
||||
...process.env,
|
||||
IMMICH_HOST: '127.0.0.1',
|
||||
IMMICH_PORT: '33001',
|
||||
},
|
||||
stdio: ['ignore', 'pipe', 'ignore', 'ipc'],
|
||||
});
|
||||
|
||||
async function checkHealth() {
|
||||
try {
|
||||
const response = await fetch('http://127.0.0.1:33001/api/server/config');
|
||||
const { isOnboarded } = await response.json();
|
||||
if (isOnboarded) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error('Server health check failed, no admin exists.'));
|
||||
}
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
} finally {
|
||||
if (worker.exitCode === null) {
|
||||
worker.kill('SIGTERM');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let output = '',
|
||||
alive = false;
|
||||
|
||||
worker.stdout?.on('data', (data) => {
|
||||
if (alive) {
|
||||
return;
|
||||
}
|
||||
|
||||
output += data;
|
||||
|
||||
if (output.includes('Immich Server is listening')) {
|
||||
alive = true;
|
||||
void checkHealth();
|
||||
}
|
||||
});
|
||||
|
||||
worker.on('exit', reject);
|
||||
worker.on('error', reject);
|
||||
|
||||
setTimeout(() => {
|
||||
if (worker.exitCode === null) {
|
||||
worker.kill('SIGTERM');
|
||||
}
|
||||
}, 20_000);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -7,17 +7,24 @@ import {
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { MaintenanceAuthDto, MaintenanceStatusResponseDto } from 'src/dtos/maintenance.dto';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { AppRestartEvent, ArgsOf } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
|
||||
export const serverEvents = ['AppRestart'] as const;
|
||||
export type ServerEvents = (typeof serverEvents)[number];
|
||||
|
||||
export interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
interface ServerEventMap {
|
||||
AppRestart: [AppRestartEvent];
|
||||
MaintenanceStatus: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
MaintenanceStatusV1: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
type AuthFn = (client: Socket) => Promise<MaintenanceAuthDto>;
|
||||
type StatusUpdateFn = (status: MaintenanceStatusResponseDto) => void;
|
||||
|
||||
@WebSocketGateway({
|
||||
cors: true,
|
||||
path: '/api/socket.io',
|
||||
@@ -25,8 +32,11 @@ export interface ClientEventMap {
|
||||
})
|
||||
@Injectable()
|
||||
export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGatewayDisconnect, OnGatewayInit {
|
||||
private authFn?: AuthFn;
|
||||
private statusUpdateFn?: StatusUpdateFn;
|
||||
|
||||
@WebSocketServer()
|
||||
private websocketServer?: Server;
|
||||
private server?: Server;
|
||||
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
@@ -35,10 +45,10 @@ export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGa
|
||||
this.logger.setContext(MaintenanceWebsocketRepository.name);
|
||||
}
|
||||
|
||||
afterInit(websocketServer: Server) {
|
||||
afterInit(server: Server) {
|
||||
this.logger.log('Initialized websocket server');
|
||||
|
||||
websocketServer.on('AppRestart', (event: ArgsOf<'AppRestart'>, ack?: (ok: 'ok') => void) => {
|
||||
server.on('MaintenanceStatus', (status) => this.statusUpdateFn?.(status));
|
||||
server.on('AppRestart', (event: ArgsOf<'AppRestart'>, ack?: (ok: 'ok') => void) => {
|
||||
this.logger.log(`Restarting due to event... ${JSON.stringify(event)}`);
|
||||
|
||||
ack?.('ok');
|
||||
@@ -46,20 +56,40 @@ export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGa
|
||||
});
|
||||
}
|
||||
|
||||
clientSend<T extends keyof ClientEventMap>(event: T, room: string, ...data: ClientEventMap[T]) {
|
||||
this.server?.to(room).emit(event, ...data);
|
||||
}
|
||||
|
||||
clientBroadcast<T extends keyof ClientEventMap>(event: T, ...data: ClientEventMap[T]) {
|
||||
this.websocketServer?.emit(event, ...data);
|
||||
this.server?.emit(event, ...data);
|
||||
}
|
||||
|
||||
serverSend<T extends ServerEvents>(event: T, ...args: ArgsOf<T>): void {
|
||||
serverSend<T extends keyof ServerEventMap>(event: T, ...args: ServerEventMap[T]): void {
|
||||
this.logger.debug(`Server event: ${event} (send)`);
|
||||
this.websocketServer?.serverSideEmit(event, ...args);
|
||||
this.server?.serverSideEmit(event, ...args);
|
||||
}
|
||||
|
||||
handleConnection(client: Socket) {
|
||||
this.logger.log(`Websocket Connect: ${client.id}`);
|
||||
async handleConnection(client: Socket) {
|
||||
try {
|
||||
await this.authFn!(client);
|
||||
await client.join('private');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (private)`);
|
||||
} catch {
|
||||
await client.join('public');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (public)`);
|
||||
}
|
||||
}
|
||||
|
||||
handleDisconnect(client: Socket) {
|
||||
async handleDisconnect(client: Socket) {
|
||||
this.logger.log(`Websocket Disconnect: ${client.id}`);
|
||||
await Promise.allSettled([client.leave('private'), client.leave('public')]);
|
||||
}
|
||||
|
||||
setAuthFn(fn: (client: Socket) => Promise<MaintenanceAuthDto>) {
|
||||
this.authFn = fn;
|
||||
}
|
||||
|
||||
setStatusUpdateFn(fn: (status: MaintenanceStatusResponseDto) => void) {
|
||||
this.statusUpdateFn = fn;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,114 @@
|
||||
import { Body, Controller, Get, Post, Req, Res } from '@nestjs/common';
|
||||
import { Request, Response } from 'express';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto } from 'src/dtos/server.dto';
|
||||
import { ImmichCookie, MaintenanceAction } from 'src/enum';
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
Next,
|
||||
Param,
|
||||
Post,
|
||||
Req,
|
||||
Res,
|
||||
UploadedFile,
|
||||
UseInterceptors,
|
||||
} from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto, ServerVersionResponseDto } from 'src/dtos/server.dto';
|
||||
import { ImmichCookie } from 'src/enum';
|
||||
import { MaintenanceRoute } from 'src/maintenance/maintenance-auth.guard';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
import type { DatabaseBackupController as _DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import type { ServerController as _ServerController } from 'src/controllers/server.controller';
|
||||
import { DatabaseBackupDeleteDto, DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
|
||||
@Controller()
|
||||
export class MaintenanceWorkerController {
|
||||
constructor(private service: MaintenanceWorkerService) {}
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: MaintenanceWorkerService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* {@link _ServerController.getServerConfig }
|
||||
*/
|
||||
@Get('server/config')
|
||||
getServerConfig(): Promise<ServerConfigDto> {
|
||||
getServerConfig(): ServerConfigDto {
|
||||
return this.service.getSystemConfig();
|
||||
}
|
||||
|
||||
@Get('server/version')
|
||||
getServerVersion(): ServerVersionResponseDto {
|
||||
return this.service.getVersion();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.listDatabaseBackups}
|
||||
*/
|
||||
@Get('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.downloadDatabaseBackup}
|
||||
*/
|
||||
@Get('admin/database-backups/:filename')
|
||||
@MaintenanceRoute()
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
) {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.deleteDatabaseBackup}
|
||||
*/
|
||||
@Delete('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.uploadDatabaseBackup}
|
||||
*/
|
||||
@Post('admin/database-backups/upload')
|
||||
@MaintenanceRoute()
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/status')
|
||||
maintenanceStatus(@Req() request: Request): Promise<MaintenanceStatusResponseDto> {
|
||||
return this.service.status(request.cookies[ImmichCookie.MaintenanceToken]);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/detect-install')
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('admin/maintenance/login')
|
||||
async maintenanceLogin(
|
||||
@Req() request: Request,
|
||||
@@ -35,9 +126,7 @@ export class MaintenanceWorkerController {
|
||||
|
||||
@Post('admin/maintenance')
|
||||
@MaintenanceRoute()
|
||||
async setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.End) {
|
||||
await this.service.endMaintenance();
|
||||
}
|
||||
setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): void {
|
||||
void this.service.setAction(dto);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,51 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { SignJWT } from 'jose';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { DateTime } from 'luxon';
|
||||
import { PassThrough, Readable } from 'node:stream';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAction, StorageFolder, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { automock, getMocks, ServiceMocks } from 'test/utils';
|
||||
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
|
||||
|
||||
function* mockData() {
|
||||
yield '';
|
||||
}
|
||||
|
||||
describe(MaintenanceWorkerService.name, () => {
|
||||
let sut: MaintenanceWorkerService;
|
||||
let mocks: ServiceMocks;
|
||||
let maintenanceWorkerRepositoryMock: MaintenanceWebsocketRepository;
|
||||
let maintenanceWebsocketRepositoryMock: AutoMocked<MaintenanceWebsocketRepository>;
|
||||
let maintenanceHealthRepositoryMock: AutoMocked<MaintenanceHealthRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
mocks = getMocks();
|
||||
maintenanceWorkerRepositoryMock = automock(MaintenanceWebsocketRepository, { args: [mocks.logger], strict: false });
|
||||
maintenanceWebsocketRepositoryMock = automock(MaintenanceWebsocketRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
maintenanceHealthRepositoryMock = automock(MaintenanceHealthRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
|
||||
sut = new MaintenanceWorkerService(
|
||||
mocks.logger as never,
|
||||
mocks.app,
|
||||
mocks.config,
|
||||
mocks.systemMetadata as never,
|
||||
maintenanceWorkerRepositoryMock,
|
||||
maintenanceWebsocketRepositoryMock,
|
||||
maintenanceHealthRepositoryMock,
|
||||
mocks.storage as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
);
|
||||
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
@@ -27,14 +53,43 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
describe('getSystemConfig', () => {
|
||||
it('should respond the server is in maintenance mode', async () => {
|
||||
await expect(sut.getSystemConfig()).resolves.toMatchObject(
|
||||
it('should respond the server is in maintenance mode', () => {
|
||||
expect(sut.getSystemConfig()).toMatchObject(
|
||||
expect.objectContaining({
|
||||
maintenanceMode: true,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.skip('ssr');
|
||||
describe.skip('detectMediaLocation');
|
||||
|
||||
describe('setStatus', () => {
|
||||
it('should broadcast status', () => {
|
||||
sut.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalled();
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledTimes(2);
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'public', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'Something went wrong, see logs!',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -42,7 +97,14 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should log a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.logSecret()).resolves.toBeUndefined();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith(expect.stringMatching(RE_LOGIN_URL));
|
||||
|
||||
@@ -63,7 +125,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should parse cookie properly', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
@@ -73,13 +141,102 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('status', () => {
|
||||
beforeEach(() => {
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
error: 'secret value!',
|
||||
});
|
||||
});
|
||||
|
||||
it('generates private status', async () => {
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
.setIssuedAt()
|
||||
.setExpirationTime('4h')
|
||||
.sign(new TextEncoder().encode('secret'));
|
||||
|
||||
await expect(sut.status(jwt)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'secret value!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates public status', async () => {
|
||||
await expect(sut.status()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectPriorInstall', () => {
|
||||
it('generate report about prior installation', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should fail without token', async () => {
|
||||
await expect(sut.login()).rejects.toThrowError(new UnauthorizedException('Missing JWT Token'));
|
||||
});
|
||||
|
||||
it('should fail with expired JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const jwt = await new SignJWT({})
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -91,7 +248,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should succeed with valid JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -107,22 +270,275 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('endMaintenance', () => {
|
||||
describe.skip('setAction'); // just calls setStatus+runAction
|
||||
|
||||
/**
|
||||
* Actions
|
||||
*/
|
||||
|
||||
describe('action: start', () => {
|
||||
it('should not do anything', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
|
||||
expect(mocks.logger.log).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: end', () => {
|
||||
it('should set maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
await expect(sut.endMaintenance()).resolves.toBeUndefined();
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWorkerRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
expect(maintenanceWebsocketRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWorkerRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: restore database', () => {
|
||||
beforeEach(() => {
|
||||
mocks.database.tryLock.mockResolvedValueOnce(true);
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.process.fork.mockImplementation(() => mockSpawn(0, 'Immich Server is listening', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGzip.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
|
||||
});
|
||||
|
||||
it('should update maintenance mode state', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(mocks.database.tryLock).toHaveBeenCalled();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith('Running maintenance action restore_database');
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail to restore invalid backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Invalid backup file format!',
|
||||
task: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should successfully run a backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: expect.any(Number),
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: 'end',
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: pg_dump non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail if restore itself fails', async () => {
|
||||
mocks.process.spawnDuplexStream
|
||||
.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('gzip', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('psql', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: psql non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should rollback if database migrations fail', async () => {
|
||||
mocks.database.runMigrations.mockRejectedValue(new Error('Migrations Error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Migrations Error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalledTimes(0);
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
|
||||
it('should rollback if API healthcheck fails', async () => {
|
||||
maintenanceHealthRepositoryMock.checkApiHealth.mockRejectedValue(new Error('Health Error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Health Error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Backups
|
||||
*/
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
{ filename: 'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-1753789649000.sql.gz', filesize: 1024 },
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,19 +4,41 @@ import { NextFunction, Request, Response } from 'express';
|
||||
import { jwtVerify } from 'jose';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { IncomingHttpHeaders } from 'node:http';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { ImmichCookie, SystemMetadataKey } from 'src/enum';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto, ServerVersionResponseDto } from 'src/dtos/server.dto';
|
||||
import { DatabaseLock, ImmichCookie, MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type ApiService as _ApiService } from 'src/services/api.service';
|
||||
import { type BaseService as _BaseService } from 'src/services/base.service';
|
||||
import { type DatabaseBackupService as _DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { type ServerService as _ServerService } from 'src/services/server.service';
|
||||
import { type VersionService as _VersionService } from 'src/services/version.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { getConfig } from 'src/utils/config';
|
||||
import { createMaintenanceLoginUrl } from 'src/utils/maintenance';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
restoreDatabaseBackup,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { createMaintenanceLoginUrl, detectPriorInstall } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -24,16 +46,51 @@ import { getExternalDomain } from 'src/utils/misc';
|
||||
*/
|
||||
@Injectable()
|
||||
export class MaintenanceWorkerService {
|
||||
#secret: string | null = null;
|
||||
#status: MaintenanceStatusResponseDto = {
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
};
|
||||
|
||||
constructor(
|
||||
protected logger: LoggingRepository,
|
||||
private appRepository: AppRepository,
|
||||
private configRepository: ConfigRepository,
|
||||
private systemMetadataRepository: SystemMetadataRepository,
|
||||
private maintenanceWorkerRepository: MaintenanceWebsocketRepository,
|
||||
private maintenanceWebsocketRepository: MaintenanceWebsocketRepository,
|
||||
private maintenanceHealthRepository: MaintenanceHealthRepository,
|
||||
private storageRepository: StorageRepository,
|
||||
private processRepository: ProcessRepository,
|
||||
private databaseRepository: DatabaseRepository,
|
||||
) {
|
||||
this.logger.setContext(this.constructor.name);
|
||||
}
|
||||
|
||||
mock(status: MaintenanceStatusResponseDto) {
|
||||
this.#secret = 'secret';
|
||||
this.#status = status;
|
||||
}
|
||||
|
||||
async init() {
|
||||
const state = (await this.systemMetadataRepository.get(
|
||||
SystemMetadataKey.MaintenanceMode,
|
||||
)) as MaintenanceModeState & { isMaintenanceMode: true };
|
||||
|
||||
this.#secret = state.secret;
|
||||
this.#status = {
|
||||
active: true,
|
||||
action: state.action.action,
|
||||
};
|
||||
|
||||
StorageCore.setMediaLocation(this.detectMediaLocation());
|
||||
|
||||
this.maintenanceWebsocketRepository.setAuthFn(async (client) => this.authenticate(client.request.headers));
|
||||
this.maintenanceWebsocketRepository.setStatusUpdateFn((status) => (this.#status = status));
|
||||
|
||||
await this.logSecret();
|
||||
void this.runAction(state.action);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _BaseService.configRepos}
|
||||
*/
|
||||
@@ -55,22 +112,17 @@ export class MaintenanceWorkerService {
|
||||
/**
|
||||
* {@link _ServerService.getSystemConfig}
|
||||
*/
|
||||
async getSystemConfig() {
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
|
||||
getSystemConfig() {
|
||||
return {
|
||||
loginPageMessage: config.server.loginPageMessage,
|
||||
trashDays: config.trash.days,
|
||||
userDeleteDelay: config.user.deleteDelay,
|
||||
oauthButtonText: config.oauth.buttonText,
|
||||
isInitialized: true,
|
||||
isOnboarded: true,
|
||||
externalDomain: config.server.externalDomain,
|
||||
publicUsers: config.server.publicUsers,
|
||||
mapDarkStyleUrl: config.map.darkStyle,
|
||||
mapLightStyleUrl: config.map.lightStyle,
|
||||
maintenanceMode: true,
|
||||
};
|
||||
} as ServerConfigDto;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _VersionService.getVersion}
|
||||
*/
|
||||
getVersion() {
|
||||
return ServerVersionResponseDto.fromSemVer(serverVersion);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -106,12 +158,99 @@ export class MaintenanceWorkerService {
|
||||
};
|
||||
}
|
||||
|
||||
private async secret(): Promise<string> {
|
||||
const state = (await this.systemMetadataRepository.get(SystemMetadataKey.MaintenanceMode)) as {
|
||||
secret: string;
|
||||
};
|
||||
/**
|
||||
* {@link _StorageService.detectMediaLocation}
|
||||
*/
|
||||
detectMediaLocation(): string {
|
||||
const envData = this.configRepository.getEnv();
|
||||
if (envData.storage.mediaLocation) {
|
||||
return envData.storage.mediaLocation;
|
||||
}
|
||||
|
||||
return state.secret;
|
||||
const targets: string[] = [];
|
||||
const candidates = ['/data', '/usr/src/app/upload'];
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const exists = this.storageRepository.existsSync(candidate);
|
||||
if (exists) {
|
||||
targets.push(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
if (targets.length === 1) {
|
||||
return targets[0];
|
||||
}
|
||||
|
||||
return '/usr/src/app/upload';
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.listBackups}
|
||||
*/
|
||||
async listBackups(): Promise<{ backups: { filename: string; filesize: number }[] }> {
|
||||
const backups = await listDatabaseBackups(this.backupRepos);
|
||||
return { backups };
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.deleteBackup}
|
||||
*/
|
||||
async deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.uploadBackup}
|
||||
*/
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.downloadBackup}
|
||||
*/
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get secret() {
|
||||
if (!this.#secret) {
|
||||
throw new Error('Secret is not initialised yet.');
|
||||
}
|
||||
|
||||
return this.#secret;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
health: this.maintenanceHealthRepository,
|
||||
};
|
||||
}
|
||||
|
||||
private getStatus(): MaintenanceStatusResponseDto {
|
||||
return this.#status;
|
||||
}
|
||||
|
||||
private getPublicStatus(): MaintenanceStatusResponseDto {
|
||||
const state = structuredClone(this.#status);
|
||||
|
||||
if (state.error) {
|
||||
state.error = 'Something went wrong, see logs!';
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
setStatus(status: MaintenanceStatusResponseDto): void {
|
||||
this.#status = status;
|
||||
this.maintenanceWebsocketRepository.serverSend('MaintenanceStatus', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'private', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'public', this.getPublicStatus());
|
||||
}
|
||||
|
||||
async logSecret(): Promise<void> {
|
||||
@@ -123,7 +262,7 @@ export class MaintenanceWorkerService {
|
||||
{
|
||||
username: 'immich-admin',
|
||||
},
|
||||
await this.secret(),
|
||||
this.secret,
|
||||
);
|
||||
|
||||
this.logger.log(`\n\n🚧 Immich is in maintenance mode, you can log in using the following URL:\n${url}\n`);
|
||||
@@ -134,28 +273,115 @@ export class MaintenanceWorkerService {
|
||||
return this.login(jwtToken);
|
||||
}
|
||||
|
||||
async status(potentiallyJwt?: string): Promise<MaintenanceStatusResponseDto> {
|
||||
try {
|
||||
await this.login(potentiallyJwt);
|
||||
return this.getStatus();
|
||||
} catch {
|
||||
return this.getPublicStatus();
|
||||
}
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async login(jwt?: string): Promise<MaintenanceAuthDto> {
|
||||
if (!jwt) {
|
||||
throw new UnauthorizedException('Missing JWT Token');
|
||||
}
|
||||
|
||||
const secret = await this.secret();
|
||||
|
||||
try {
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(secret));
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(this.secret));
|
||||
return result.payload;
|
||||
} catch {
|
||||
throw new UnauthorizedException('Invalid JWT Token');
|
||||
}
|
||||
}
|
||||
|
||||
async endMaintenance(): Promise<void> {
|
||||
async setAction(action: SetMaintenanceModeDto) {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
});
|
||||
|
||||
await this.runAction(action);
|
||||
}
|
||||
|
||||
async runAction(action: SetMaintenanceModeDto) {
|
||||
switch (action.action) {
|
||||
case MaintenanceAction.Start: {
|
||||
return;
|
||||
}
|
||||
case MaintenanceAction.End: {
|
||||
return this.endMaintenance();
|
||||
}
|
||||
case MaintenanceAction.SelectDatabaseRestore: {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const lock = await this.databaseRepository.tryLock(DatabaseLock.MaintenanceOperation);
|
||||
if (!lock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log(`Running maintenance action ${action.action}`);
|
||||
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: this.secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
if (!action.restoreBackupFilename) {
|
||||
throw new Error("Expected restoreBackupFilename but it's missing!");
|
||||
}
|
||||
|
||||
await this.restoreBackup(action.restoreBackupFilename);
|
||||
} catch (error) {
|
||||
this.logger.error(`Encountered error running action: ${error}`);
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
task: 'error',
|
||||
error: '' + error,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async restoreBackup(filename: string): Promise<void> {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await restoreDatabaseBackup(this.backupRepos, filename, (task, progress) =>
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
progress,
|
||||
task,
|
||||
}),
|
||||
);
|
||||
|
||||
await this.setAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
}
|
||||
|
||||
private async endMaintenance(): Promise<void> {
|
||||
const state: MaintenanceModeState = { isMaintenanceMode: false as const };
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, state);
|
||||
|
||||
// => corresponds to notification.service.ts#onAppRestart
|
||||
this.maintenanceWorkerRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWorkerRepository.serverSend('AppRestart', state);
|
||||
this.maintenanceWebsocketRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWebsocketRepository.serverSend('AppRestart', state);
|
||||
this.appRepository.exitApp();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -37,20 +38,6 @@ select
|
||||
and "asset_file"."type" = $1
|
||||
) as agg
|
||||
) as "files",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"tag"."value"
|
||||
from
|
||||
"tag"
|
||||
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
|
||||
where
|
||||
"asset"."id" = "tag_asset"."assetId"
|
||||
) as agg
|
||||
) as "tags",
|
||||
to_json("asset_exif") as "exifInfo"
|
||||
from
|
||||
"asset"
|
||||
@@ -72,7 +59,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -99,7 +87,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -145,7 +134,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -174,7 +164,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -244,7 +235,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -269,7 +261,8 @@ where
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -318,7 +311,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -357,7 +351,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -444,7 +439,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -536,7 +532,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
@@ -575,7 +572,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
|
||||
@@ -286,7 +286,8 @@ select
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type"
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
|
||||
@@ -43,6 +43,7 @@ select
|
||||
"asset_exif"."projectionType",
|
||||
"asset_exif"."rating",
|
||||
"asset_exif"."state",
|
||||
"asset_exif"."tags",
|
||||
"asset_exif"."timeZone"
|
||||
from
|
||||
"asset_exif"
|
||||
@@ -127,6 +128,7 @@ select
|
||||
"asset_exif"."projectionType",
|
||||
"asset_exif"."rating",
|
||||
"asset_exif"."state",
|
||||
"asset_exif"."tags",
|
||||
"asset_exif"."timeZone"
|
||||
from
|
||||
"asset_exif"
|
||||
|
||||
@@ -71,6 +71,7 @@ select
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"album_asset"."updateId"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
@@ -103,6 +104,7 @@ select
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
@@ -140,7 +142,8 @@ select
|
||||
"asset"."stackId",
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height"
|
||||
"asset"."height",
|
||||
"asset"."isEdited"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
inner join "asset" on "asset"."id" = "album_asset"."assetId"
|
||||
@@ -456,6 +459,7 @@ select
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
@@ -751,6 +755,7 @@ select
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
@@ -802,6 +807,7 @@ select
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Kysely } from 'kysely';
|
||||
import { jsonArrayFrom } from 'kysely/helpers/postgres';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { Asset, columns } from 'src/database';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
@@ -42,15 +41,6 @@ export class AssetJobRepository {
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.select(['id', 'originalPath'])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.select((eb) =>
|
||||
jsonArrayFrom(
|
||||
eb
|
||||
.selectFrom('tag')
|
||||
.select(['tag.value'])
|
||||
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
|
||||
.whereRef('asset.id', '=', 'tag_asset.assetId'),
|
||||
).as('tags'),
|
||||
)
|
||||
.$call(withExifInner)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
|
||||
@@ -178,6 +178,7 @@ export class AssetRepository {
|
||||
bitsPerSample: ref('bitsPerSample'),
|
||||
rating: ref('rating'),
|
||||
fps: ref('fps'),
|
||||
tags: ref('tags'),
|
||||
lockedProperties:
|
||||
lockedPropertiesBehavior === 'append'
|
||||
? distinctLocked(eb, exif.lockedProperties ?? null)
|
||||
@@ -903,20 +904,22 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
async upsertFile(file: Pick<Insertable<AssetFileTable>, 'assetId' | 'path' | 'type'>): Promise<void> {
|
||||
async upsertFile(file: Pick<Insertable<AssetFileTable>, 'assetId' | 'path' | 'type' | 'isEdited'>): Promise<void> {
|
||||
const value = { ...file, assetId: asUuid(file.assetId) };
|
||||
await this.db
|
||||
.insertInto('asset_file')
|
||||
.values(value)
|
||||
.onConflict((oc) =>
|
||||
oc.columns(['assetId', 'type']).doUpdateSet((eb) => ({
|
||||
oc.columns(['assetId', 'type', 'isEdited']).doUpdateSet((eb) => ({
|
||||
path: eb.ref('excluded.path'),
|
||||
})),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async upsertFiles(files: Pick<Insertable<AssetFileTable>, 'assetId' | 'path' | 'type'>[]): Promise<void> {
|
||||
async upsertFiles(
|
||||
files: Pick<Insertable<AssetFileTable>, 'assetId' | 'path' | 'type' | 'isEdited'>[],
|
||||
): Promise<void> {
|
||||
if (files.length === 0) {
|
||||
return;
|
||||
}
|
||||
@@ -926,7 +929,7 @@ export class AssetRepository {
|
||||
.insertInto('asset_file')
|
||||
.values(values)
|
||||
.onConflict((oc) =>
|
||||
oc.columns(['assetId', 'type']).doUpdateSet((eb) => ({
|
||||
oc.columns(['assetId', 'type', 'isEdited']).doUpdateSet((eb) => ({
|
||||
path: eb.ref('excluded.path'),
|
||||
})),
|
||||
)
|
||||
|
||||
85
server/src/repositories/process.repository.spec.ts
Normal file
85
server/src/repositories/process.repository.spec.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
|
||||
function* data() {
|
||||
yield 'Hello, world!';
|
||||
}
|
||||
|
||||
describe(ProcessRepository.name, () => {
|
||||
let sut: ProcessRepository;
|
||||
let sink: Writable;
|
||||
|
||||
beforeAll(() => {
|
||||
sut = new ProcessRepository();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
sink = new Writable({
|
||||
write(_chunk, _encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('createSpawnDuplexStream', () => {
|
||||
it('should work (drain to stdout)', async () => {
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
await pipeline(process, sink);
|
||||
});
|
||||
|
||||
it('should throw on non-zero exit code', async () => {
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'echo "error message" >&2; exit 1']);
|
||||
await expect(pipeline(process, sink)).rejects.toThrowErrorMatchingInlineSnapshot(`
|
||||
[Error: bash non-zero exit code (1)
|
||||
error message
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should accept stdin / output stdout', async () => {
|
||||
let output = '';
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
output += chunk;
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const echoProcess = sut.spawnDuplexStream('cat');
|
||||
await pipeline(Readable.from(data()), echoProcess, sink);
|
||||
expect(output).toBe('Hello, world!');
|
||||
});
|
||||
|
||||
it('should drain stdin on process exit', async () => {
|
||||
let resolve1: () => void;
|
||||
let resolve2: () => void;
|
||||
const promise1 = new Promise<void>((r) => (resolve1 = r));
|
||||
const promise2 = new Promise<void>((r) => (resolve2 = r));
|
||||
|
||||
async function* data() {
|
||||
yield 'Hello, world!';
|
||||
await promise1;
|
||||
await promise2;
|
||||
yield 'Write after stdin close / process exit!';
|
||||
}
|
||||
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
|
||||
const realProcess = (process as never as { _process: ChildProcessWithoutNullStreams })._process;
|
||||
realProcess.on('close', () => setImmediate(() => resolve1()));
|
||||
realProcess.stdin.on('close', () => setImmediate(() => resolve2()));
|
||||
|
||||
await pipeline(Readable.from(data()), process);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,110 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { ChildProcessWithoutNullStreams, fork, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { Duplex } from 'node:stream';
|
||||
|
||||
@Injectable()
|
||||
export class ProcessRepository {
|
||||
spawn(command: string, args: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
return spawn(command, args, options);
|
||||
}
|
||||
|
||||
spawnDuplexStream(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): Duplex {
|
||||
let stdinClosed = false;
|
||||
let drainCallback: undefined | (() => void);
|
||||
|
||||
const process = this.spawn(command, args, options);
|
||||
const duplex = new Duplex({
|
||||
// duplex -> stdin
|
||||
write(chunk, encoding, callback) {
|
||||
// drain the input if process dies
|
||||
if (stdinClosed) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
// handle stream backpressure
|
||||
if (process.stdin.write(chunk, encoding)) {
|
||||
callback();
|
||||
} else {
|
||||
drainCallback = callback;
|
||||
process.stdin.once('drain', () => {
|
||||
drainCallback = undefined;
|
||||
callback();
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
read() {
|
||||
// no-op
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
if (stdinClosed) {
|
||||
callback();
|
||||
} else {
|
||||
process.stdin.end(callback);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// stdout -> duplex
|
||||
process.stdout.on('data', (chunk) => {
|
||||
// handle stream backpressure
|
||||
if (!duplex.push(chunk)) {
|
||||
process.stdout.pause();
|
||||
}
|
||||
});
|
||||
|
||||
duplex.on('resume', () => process.stdout.resume());
|
||||
|
||||
// end handling
|
||||
let stdoutClosed = false;
|
||||
function close(error?: Error) {
|
||||
stdinClosed = true;
|
||||
|
||||
if (error) {
|
||||
duplex.destroy(error);
|
||||
} else if (stdoutClosed && typeof process.exitCode === 'number') {
|
||||
duplex.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.on('close', () => {
|
||||
stdoutClosed = true;
|
||||
close();
|
||||
});
|
||||
|
||||
// error handling
|
||||
process.on('error', close);
|
||||
process.stdout.on('error', close);
|
||||
process.stdin.on('error', (error) => {
|
||||
if ((error as { code?: 'EPIPE' })?.code === 'EPIPE') {
|
||||
try {
|
||||
drainCallback!();
|
||||
} catch (error) {
|
||||
close(error as Error);
|
||||
}
|
||||
} else {
|
||||
close(error);
|
||||
}
|
||||
});
|
||||
|
||||
let stderr = '';
|
||||
process.stderr.on('data', (chunk) => (stderr += chunk));
|
||||
|
||||
process.on('exit', (code) => {
|
||||
console.info(`${command} exited (${code})`);
|
||||
|
||||
if (code === 0) {
|
||||
close();
|
||||
} else {
|
||||
close(new Error(`${command} non-zero exit code (${code})\n${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
return Object.assign(duplex, { _process: process });
|
||||
}
|
||||
|
||||
fork(...args: Parameters<typeof fork>): ReturnType<typeof fork> {
|
||||
return fork(...args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,8 @@ import { escapePath, glob, globStream } from 'fast-glob';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync, ReadOptionsWithBuffer } from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { createGunzip, createGzip } from 'node:zlib';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
@@ -93,6 +94,14 @@ export class StorageRepository {
|
||||
return { stream: archive, addFile, finalize };
|
||||
}
|
||||
|
||||
createGzip(): PassThrough {
|
||||
return createGzip();
|
||||
}
|
||||
|
||||
createGunzip(): PassThrough {
|
||||
return createGunzip();
|
||||
}
|
||||
|
||||
createPlainReadStream(filepath: string): Readable {
|
||||
return createReadStream(filepath);
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ export interface ClientEventMap {
|
||||
|
||||
AssetUploadReadyV1: [{ asset: SyncAssetV1; exif: SyncAssetExifV1 }];
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
AssetEditReadyV1: [{ assetId: string }];
|
||||
AssetEditReadyV1: [{ asset: SyncAssetV1 }];
|
||||
}
|
||||
|
||||
export type AuthFn = (client: Socket) => Promise<AuthDto>;
|
||||
|
||||
@@ -255,3 +255,34 @@ export const asset_face_audit = registerFunction({
|
||||
RETURN NULL;
|
||||
END`,
|
||||
});
|
||||
|
||||
export const asset_edit_insert = registerFunction({
|
||||
name: 'asset_edit_insert',
|
||||
returnType: 'TRIGGER',
|
||||
language: 'PLPGSQL',
|
||||
body: `
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "isEdited" = true
|
||||
FROM inserted_edit
|
||||
WHERE asset.id = inserted_edit."assetId" AND NOT asset."isEdited";
|
||||
RETURN NULL;
|
||||
END
|
||||
`,
|
||||
});
|
||||
|
||||
export const asset_edit_delete = registerFunction({
|
||||
name: 'asset_edit_delete',
|
||||
returnType: 'TRIGGER',
|
||||
language: 'PLPGSQL',
|
||||
body: `
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "isEdited" = false
|
||||
FROM deleted_edit
|
||||
WHERE asset.id = deleted_edit."assetId" AND asset."isEdited"
|
||||
AND NOT EXISTS (SELECT FROM asset_edit edit WHERE edit."assetId" = asset.id);
|
||||
RETURN NULL;
|
||||
END
|
||||
`,
|
||||
});
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE OR REPLACE FUNCTION asset_edit_insert()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "editCount" = "editCount" + 1
|
||||
WHERE "id" = NEW."assetId";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION asset_edit_delete()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "editCount" = "editCount" - 1
|
||||
WHERE "id" = OLD."assetId";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`ALTER TABLE "asset" ADD "editCount" integer NOT NULL DEFAULT 0;`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_delete"
|
||||
AFTER DELETE ON "asset_edit"
|
||||
REFERENCING OLD TABLE AS "old"
|
||||
FOR EACH ROW
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION asset_edit_delete();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_insert"
|
||||
AFTER INSERT ON "asset_edit"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION asset_edit_insert();`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_asset_edit_insert', '{"type":"function","name":"asset_edit_insert","sql":"CREATE OR REPLACE FUNCTION asset_edit_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"editCount\\" = \\"editCount\\" + 1\\n WHERE \\"id\\" = NEW.\\"assetId\\";\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_asset_edit_delete', '{"type":"function","name":"asset_edit_delete","sql":"CREATE OR REPLACE FUNCTION asset_edit_delete()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"editCount\\" = \\"editCount\\" - 1\\n WHERE \\"id\\" = OLD.\\"assetId\\";\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_edit_delete', '{"type":"trigger","name":"asset_edit_delete","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_delete\\"\\n AFTER DELETE ON \\"asset_edit\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH ROW\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION asset_edit_delete();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_edit_insert', '{"type":"trigger","name":"asset_edit_insert","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_insert\\"\\n AFTER INSERT ON \\"asset_edit\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION asset_edit_insert();"}'::jsonb);`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP TRIGGER "asset_edit_delete" ON "asset_edit";`.execute(db);
|
||||
await sql`DROP TRIGGER "asset_edit_insert" ON "asset_edit";`.execute(db);
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "editCount";`.execute(db);
|
||||
await sql`DROP FUNCTION asset_edit_insert;`.execute(db);
|
||||
await sql`DROP FUNCTION asset_edit_delete;`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'function_asset_edit_insert';`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'function_asset_edit_delete';`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'trigger_asset_edit_delete';`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'trigger_asset_edit_insert';`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE OR REPLACE FUNCTION asset_edit_insert()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "isEdited" = true
|
||||
FROM inserted_edit
|
||||
WHERE asset.id = inserted_edit."assetId" AND NOT asset."isEdited";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION asset_edit_delete()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "isEdited" = false
|
||||
FROM deleted_edit
|
||||
WHERE asset.id = deleted_edit."assetId" AND asset."isEdited"
|
||||
AND NOT EXISTS (SELECT FROM asset_edit edit WHERE edit."assetId" = asset.id);
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`ALTER TABLE "asset" ADD "isEdited" boolean NOT NULL DEFAULT false;`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_delete"
|
||||
AFTER DELETE ON "asset_edit"
|
||||
REFERENCING OLD TABLE AS "deleted_edit"
|
||||
FOR EACH STATEMENT
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION asset_edit_delete();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_insert"
|
||||
AFTER INSERT ON "asset_edit"
|
||||
REFERENCING NEW TABLE AS "inserted_edit"
|
||||
FOR EACH STATEMENT
|
||||
EXECUTE FUNCTION asset_edit_insert();`.execute(db);
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "editCount";`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"asset_edit_insert","sql":"CREATE OR REPLACE FUNCTION asset_edit_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"isEdited\\" = true\\n FROM inserted_edit\\n WHERE asset.id = inserted_edit.\\"assetId\\" AND NOT asset.\\"isEdited\\";\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_asset_edit_insert';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"asset_edit_delete","sql":"CREATE OR REPLACE FUNCTION asset_edit_delete()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"isEdited\\" = false\\n FROM deleted_edit\\n WHERE asset.id = deleted_edit.\\"assetId\\" AND asset.\\"isEdited\\" \\n AND NOT EXISTS (SELECT FROM asset_edit edit WHERE edit.\\"assetId\\" = asset.id);\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_asset_edit_delete';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"trigger","name":"asset_edit_delete","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_delete\\"\\n AFTER DELETE ON \\"asset_edit\\"\\n REFERENCING OLD TABLE AS \\"deleted_edit\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION asset_edit_delete();"}'::jsonb WHERE "name" = 'trigger_asset_edit_delete';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"trigger","name":"asset_edit_insert","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_insert\\"\\n AFTER INSERT ON \\"asset_edit\\"\\n REFERENCING NEW TABLE AS \\"inserted_edit\\"\\n FOR EACH STATEMENT\\n EXECUTE FUNCTION asset_edit_insert();"}'::jsonb WHERE "name" = 'trigger_asset_edit_insert';`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE OR REPLACE FUNCTION public.asset_edit_insert()
|
||||
RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "editCount" = "editCount" + 1
|
||||
WHERE "id" = NEW."assetId";
|
||||
RETURN NULL;
|
||||
END
|
||||
$function$
|
||||
`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION public.asset_edit_delete()
|
||||
RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "editCount" = "editCount" - 1
|
||||
WHERE "id" = OLD."assetId";
|
||||
RETURN NULL;
|
||||
END
|
||||
$function$
|
||||
`.execute(db);
|
||||
await sql`ALTER TABLE "asset" ADD "editCount" integer NOT NULL DEFAULT 0;`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_delete"
|
||||
AFTER DELETE ON "asset_edit"
|
||||
REFERENCING OLD TABLE AS "old"
|
||||
FOR EACH ROW
|
||||
WHEN ((pg_trigger_depth() = 0))
|
||||
EXECUTE FUNCTION asset_edit_delete();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_insert"
|
||||
AFTER INSERT ON "asset_edit"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION asset_edit_insert();`.execute(db);
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "isEdited";`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"sql":"CREATE OR REPLACE FUNCTION asset_edit_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"editCount\\" = \\"editCount\\" + 1\\n WHERE \\"id\\" = NEW.\\"assetId\\";\\n RETURN NULL;\\n END\\n $$;","name":"asset_edit_insert","type":"function"}'::jsonb WHERE "name" = 'function_asset_edit_insert';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"sql":"CREATE OR REPLACE FUNCTION asset_edit_delete()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"editCount\\" = \\"editCount\\" - 1\\n WHERE \\"id\\" = OLD.\\"assetId\\";\\n RETURN NULL;\\n END\\n $$;","name":"asset_edit_delete","type":"function"}'::jsonb WHERE "name" = 'function_asset_edit_delete';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_delete\\"\\n AFTER DELETE ON \\"asset_edit\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH ROW\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION asset_edit_delete();","name":"asset_edit_delete","type":"trigger"}'::jsonb WHERE "name" = 'trigger_asset_edit_delete';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_insert\\"\\n AFTER INSERT ON \\"asset_edit\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION asset_edit_insert();","name":"asset_edit_insert","type":"trigger"}'::jsonb WHERE "name" = 'trigger_asset_edit_insert';`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset_file" DROP CONSTRAINT "asset_file_assetId_type_uq";`.execute(db);
|
||||
await sql`ALTER TABLE "asset_file" ADD "isEdited" boolean NOT NULL DEFAULT false;`.execute(db);
|
||||
await sql`ALTER TABLE "asset_file" ADD CONSTRAINT "asset_file_assetId_type_isEdited_uq" UNIQUE ("assetId", "type", "isEdited");`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset_file" DROP CONSTRAINT "asset_file_assetId_type_isEdited_uq";`.execute(db);
|
||||
await sql`ALTER TABLE "asset_file" ADD CONSTRAINT "asset_file_assetId_type_uq" UNIQUE ("assetId", "type");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_file" DROP COLUMN "isEdited";`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset_exif" ADD "tags" character varying[];`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset_exif" DROP COLUMN "tags";`.execute(db);
|
||||
}
|
||||
@@ -1,7 +1,24 @@
|
||||
import { AssetEditAction, AssetEditActionParameter } from 'src/dtos/editing.dto';
|
||||
import { asset_edit_delete, asset_edit_insert } from 'src/schema/functions';
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { Column, ForeignKeyColumn, Generated, PrimaryGeneratedColumn } from 'src/sql-tools';
|
||||
import {
|
||||
AfterDeleteTrigger,
|
||||
AfterInsertTrigger,
|
||||
Column,
|
||||
ForeignKeyColumn,
|
||||
Generated,
|
||||
PrimaryGeneratedColumn,
|
||||
Table,
|
||||
} from 'src/sql-tools';
|
||||
|
||||
@Table('asset_edit')
|
||||
@AfterInsertTrigger({ scope: 'statement', function: asset_edit_insert, referencingNewTableAs: 'inserted_edit' })
|
||||
@AfterDeleteTrigger({
|
||||
scope: 'statement',
|
||||
function: asset_edit_delete,
|
||||
referencingOldTableAs: 'deleted_edit',
|
||||
when: 'pg_trigger_depth() = 0',
|
||||
})
|
||||
export class AssetEditTable<T extends AssetEditAction = AssetEditAction> {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: Generated<string>;
|
||||
|
||||
@@ -93,6 +93,9 @@ export class AssetExifTable {
|
||||
@Column({ type: 'integer', nullable: true })
|
||||
rating!: number | null;
|
||||
|
||||
@Column({ type: 'character varying', array: true, nullable: true })
|
||||
tags!: string[] | null;
|
||||
|
||||
@UpdateDateColumn({ default: () => 'clock_timestamp()' })
|
||||
updatedAt!: Generated<Date>;
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
} from 'src/sql-tools';
|
||||
|
||||
@Table('asset_file')
|
||||
@Unique({ columns: ['assetId', 'type'] })
|
||||
@Unique({ columns: ['assetId', 'type', 'isEdited'] })
|
||||
@UpdatedAtTrigger('asset_file_updatedAt')
|
||||
export class AssetFileTable {
|
||||
@PrimaryGeneratedColumn()
|
||||
@@ -37,4 +37,7 @@ export class AssetFileTable {
|
||||
|
||||
@UpdateIdColumn({ index: true })
|
||||
updateId!: Generated<string>;
|
||||
|
||||
@Column({ type: 'boolean', default: false })
|
||||
isEdited!: Generated<boolean>;
|
||||
}
|
||||
|
||||
@@ -143,4 +143,7 @@ export class AssetTable {
|
||||
|
||||
@Column({ type: 'integer', nullable: true })
|
||||
height!: number | null;
|
||||
|
||||
@Column({ type: 'boolean', default: false })
|
||||
isEdited!: Generated<boolean>;
|
||||
}
|
||||
|
||||
@@ -107,6 +107,78 @@ describe(ApiKeyService.name, () => {
|
||||
permissions: newPermissions,
|
||||
});
|
||||
});
|
||||
|
||||
describe('api key auth', () => {
|
||||
it('should prevent adding Permission.all', async () => {
|
||||
const permissions = [Permission.ApiKeyCreate, Permission.ApiKeyUpdate, Permission.AssetRead];
|
||||
const auth = factory.auth({ apiKey: { permissions } });
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id, permissions });
|
||||
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
|
||||
await expect(sut.update(auth, apiKey.id, { permissions: [Permission.All] })).rejects.toThrow(
|
||||
'Cannot grant permissions you do not have',
|
||||
);
|
||||
|
||||
expect(mocks.apiKey.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should prevent adding a new permission', async () => {
|
||||
const permissions = [Permission.ApiKeyCreate, Permission.ApiKeyUpdate, Permission.AssetRead];
|
||||
const auth = factory.auth({ apiKey: { permissions } });
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id, permissions });
|
||||
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
|
||||
await expect(sut.update(auth, apiKey.id, { permissions: [Permission.AssetCopy] })).rejects.toThrow(
|
||||
'Cannot grant permissions you do not have',
|
||||
);
|
||||
|
||||
expect(mocks.apiKey.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow removing permissions', async () => {
|
||||
const auth = factory.auth({ apiKey: { permissions: [Permission.ApiKeyUpdate, Permission.AssetRead] } });
|
||||
const apiKey = factory.apiKey({
|
||||
userId: auth.user.id,
|
||||
permissions: [Permission.AssetRead, Permission.AssetDelete],
|
||||
});
|
||||
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
mocks.apiKey.update.mockResolvedValue(apiKey);
|
||||
|
||||
// remove Permission.AssetDelete
|
||||
await sut.update(auth, apiKey.id, { permissions: [Permission.AssetRead] });
|
||||
|
||||
expect(mocks.apiKey.update).toHaveBeenCalledWith(
|
||||
auth.user.id,
|
||||
apiKey.id,
|
||||
expect.objectContaining({ permissions: [Permission.AssetRead] }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow adding new permissions', async () => {
|
||||
const auth = factory.auth({
|
||||
apiKey: { permissions: [Permission.ApiKeyUpdate, Permission.AssetRead, Permission.AssetUpdate] },
|
||||
});
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id, permissions: [Permission.AssetRead] });
|
||||
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
mocks.apiKey.update.mockResolvedValue(apiKey);
|
||||
|
||||
// add Permission.AssetUpdate
|
||||
await sut.update(auth, apiKey.id, {
|
||||
name: apiKey.name,
|
||||
permissions: [Permission.AssetRead, Permission.AssetUpdate],
|
||||
});
|
||||
|
||||
expect(mocks.apiKey.update).toHaveBeenCalledWith(
|
||||
auth.user.id,
|
||||
apiKey.id,
|
||||
expect.objectContaining({ permissions: [Permission.AssetRead, Permission.AssetUpdate] }),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
|
||||
@@ -32,6 +32,14 @@ export class ApiKeyService extends BaseService {
|
||||
throw new BadRequestException('API Key not found');
|
||||
}
|
||||
|
||||
if (
|
||||
auth.apiKey &&
|
||||
dto.permissions &&
|
||||
!isGranted({ requested: dto.permissions, current: auth.apiKey.permissions })
|
||||
) {
|
||||
throw new BadRequestException('Cannot grant permissions you do not have');
|
||||
}
|
||||
|
||||
const key = await this.apiKeyRepository.update(auth.user.id, id, { name: dto.name, permissions: dto.permissions });
|
||||
|
||||
return this.map(key);
|
||||
|
||||
@@ -529,9 +529,10 @@ describe(AssetMediaService.name, () => {
|
||||
...assetStub.withCropEdit.files,
|
||||
{
|
||||
id: 'edited-file',
|
||||
type: AssetFileType.FullSizeEdited,
|
||||
type: AssetFileType.FullSize,
|
||||
path: '/uploads/user-id/fullsize/edited.jpg',
|
||||
} as AssetFile,
|
||||
isEdited: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
@@ -554,9 +555,10 @@ describe(AssetMediaService.name, () => {
|
||||
...assetStub.withCropEdit.files,
|
||||
{
|
||||
id: 'edited-file',
|
||||
type: AssetFileType.FullSizeEdited,
|
||||
type: AssetFileType.FullSize,
|
||||
path: '/uploads/user-id/fullsize/edited.jpg',
|
||||
} as AssetFile,
|
||||
isEdited: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
@@ -579,9 +581,10 @@ describe(AssetMediaService.name, () => {
|
||||
...assetStub.withCropEdit.files,
|
||||
{
|
||||
id: 'edited-file',
|
||||
type: AssetFileType.FullSizeEdited,
|
||||
type: AssetFileType.FullSize,
|
||||
path: '/uploads/user-id/fullsize/edited.jpg',
|
||||
} as AssetFile,
|
||||
isEdited: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
@@ -656,6 +659,7 @@ describe(AssetMediaService.name, () => {
|
||||
id: '42',
|
||||
path: '/path/to/preview',
|
||||
type: AssetFileType.Thumbnail,
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
});
|
||||
@@ -673,6 +677,7 @@ describe(AssetMediaService.name, () => {
|
||||
id: '42',
|
||||
path: '/path/to/preview.jpg',
|
||||
type: AssetFileType.Preview,
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ import { DateTime, Duration } from 'luxon';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { AssetFile } from 'src/database';
|
||||
import { OnJob } from 'src/decorators';
|
||||
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetResponseDto, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
AssetBulkDeleteDto,
|
||||
AssetBulkUpdateDto,
|
||||
@@ -112,7 +112,7 @@ export class AssetService extends BaseService {
|
||||
const { description, dateTimeOriginal, latitude, longitude, rating, ...rest } = dto;
|
||||
const repos = { asset: this.assetRepository, event: this.eventRepository };
|
||||
|
||||
let previousMotion: MapAsset | null = null;
|
||||
let previousMotion: { id: string } | null = null;
|
||||
if (rest.livePhotoVideoId) {
|
||||
await onBeforeLink(repos, { userId: auth.user.id, livePhotoVideoId: rest.livePhotoVideoId });
|
||||
} else if (rest.livePhotoVideoId === null) {
|
||||
|
||||
@@ -5,7 +5,7 @@ import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { mockDuplex, mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { describe } from 'vitest';
|
||||
|
||||
describe(BackupService.name, () => {
|
||||
@@ -147,6 +147,7 @@ describe(BackupService.name, () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -165,7 +166,7 @@ describe(BackupService.name, () => {
|
||||
({ sut, mocks } = newTestService(BackupService, { config: configMock }));
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -174,14 +175,16 @@ describe(BackupService.name, () => {
|
||||
|
||||
await sut.handleBackupDatabase();
|
||||
|
||||
expect(mocks.process.spawn).toHaveBeenCalled();
|
||||
const call = mocks.process.spawn.mock.calls[0];
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.spawnDuplexStream.mock.calls[0];
|
||||
const args = call[1] as string[];
|
||||
// ['--dbname', '<url>', '--clean', '--if-exists']
|
||||
expect(args[0]).toBe('--dbname');
|
||||
const passedUrl = args[1];
|
||||
expect(passedUrl).not.toContain('uselibpqcompat');
|
||||
expect(passedUrl).toContain('sslmode=require');
|
||||
expect(args).toMatchInlineSnapshot(`
|
||||
[
|
||||
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should run a database backup successfully', async () => {
|
||||
@@ -196,21 +199,21 @@ describe(BackupService.name, () => {
|
||||
expect(mocks.storage.rename).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if pg_dumpall fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
it('should fail if pg_dump fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if gzip fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Gzip failed with code 1');
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should fail if write stream fails', async () => {
|
||||
@@ -226,9 +229,9 @@ describe(BackupService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -242,12 +245,12 @@ describe(BackupService.name, () => {
|
||||
${'17.15.1'} | ${17}
|
||||
${'18.0.0'} | ${18}
|
||||
`(
|
||||
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
|
||||
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(mocks.process.spawn).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dumpall`,
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import path from 'node:path';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
createDatabaseBackup,
|
||||
isFailedDatabaseBackupName,
|
||||
isValidDatabaseRoutineBackupName,
|
||||
UnsupportedPostgresError,
|
||||
} from 'src/utils/database-backups';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
@@ -53,16 +56,11 @@ export class BackupService extends BaseService {
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const failedBackups = files.filter((file) => file.match(/immich-db-backup-.*\.sql\.gz\.tmp$/));
|
||||
const backups = files
|
||||
.filter((file) => {
|
||||
const oldBackupStyle = file.match(/immich-db-backup-\d+\.sql\.gz$/);
|
||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
||||
const newBackupStyle = file.match(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
})
|
||||
.filter((filename) => isValidDatabaseRoutineBackupName(filename))
|
||||
.toSorted()
|
||||
.toReversed();
|
||||
const failedBackups = files.filter((filename) => isFailedDatabaseBackupName(filename));
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
@@ -75,123 +73,27 @@ export class BackupService extends BaseService {
|
||||
|
||||
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
this.logger.debug(`Database Backup Started`);
|
||||
const { database } = this.configRepository.getEnv();
|
||||
const config = database.config;
|
||||
|
||||
const isUrlConnection = config.connectionType === 'url';
|
||||
|
||||
let connectionUrl: string = isUrlConnection ? config.url : '';
|
||||
if (URL.canParse(connectionUrl)) {
|
||||
// remove known bad url parameters for pg_dumpall
|
||||
const url = new URL(connectionUrl);
|
||||
url.searchParams.delete('uselibpqcompat');
|
||||
connectionUrl = url.toString();
|
||||
}
|
||||
|
||||
const databaseParams = isUrlConnection
|
||||
? ['--dbname', connectionUrl]
|
||||
: [
|
||||
'--username',
|
||||
config.username,
|
||||
'--host',
|
||||
config.host,
|
||||
'--port',
|
||||
`${config.port}`,
|
||||
'--database',
|
||||
config.database,
|
||||
];
|
||||
|
||||
databaseParams.push('--clean', '--if-exists');
|
||||
const databaseVersion = await this.databaseRepository.getPostgresVersion();
|
||||
const backupFilePath = path.join(
|
||||
StorageCore.getBaseFolder(StorageFolder.Backups),
|
||||
`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
||||
);
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const pgdump = this.processRepository.spawn(
|
||||
`/usr/lib/postgresql/${databaseMajorVersion}/bin/pg_dumpall`,
|
||||
databaseParams,
|
||||
{
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: isUrlConnection ? new URL(connectionUrl).password : config.password,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// NOTE: `--rsyncable` is only supported in GNU gzip
|
||||
const gzip = this.processRepository.spawn(`gzip`, ['--rsyncable']);
|
||||
pgdump.stdout.pipe(gzip.stdin);
|
||||
|
||||
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
||||
|
||||
gzip.stdout.pipe(fileStream);
|
||||
|
||||
pgdump.on('error', (err) => {
|
||||
this.logger.error(`Backup failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
gzip.on('error', (err) => {
|
||||
this.logger.error(`Gzip failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
let pgdumpLogs = '';
|
||||
let gzipLogs = '';
|
||||
|
||||
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
||||
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
||||
|
||||
pgdump.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Backup failed with code ${code}`);
|
||||
reject(`Backup failed with code ${code}`);
|
||||
this.logger.error(pgdumpLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdumpLogs) {
|
||||
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
||||
}
|
||||
});
|
||||
|
||||
gzip.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Gzip failed with code ${code}`);
|
||||
reject(`Gzip failed with code ${code}`);
|
||||
this.logger.error(gzipLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdump.exitCode !== 0) {
|
||||
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
await createDatabaseBackup(this.backupRepos);
|
||||
} catch (error) {
|
||||
this.logger.error(`Database Backup Failure: ${error}`);
|
||||
await this.storageRepository
|
||||
.unlink(backupFilePath)
|
||||
.catch((error) => this.logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
if (error instanceof UnsupportedPostgresError) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Success`);
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { jwtVerify } from 'jose';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
@@ -95,7 +95,14 @@ describe(CliService.name, () => {
|
||||
});
|
||||
|
||||
it('should disable maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.disableMaintenanceMode()).resolves.toEqual({
|
||||
alreadyDisabled: false,
|
||||
});
|
||||
@@ -109,7 +116,14 @@ describe(CliService.name, () => {
|
||||
|
||||
describe('enableMaintenanceMode', () => {
|
||||
it('should not do anything if in maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.enableMaintenanceMode()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
alreadyEnabled: true,
|
||||
@@ -133,13 +147,22 @@ describe(CliService.name, () => {
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should return a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const result = await sut.enableMaintenanceMode();
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { isAbsolute } from 'node:path';
|
||||
import { SALT_ROUNDS } from 'src/constants';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { UserAdminResponseDto, mapUserAdmin } from 'src/dtos/user.dto';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
@@ -86,6 +86,9 @@ export class CliService extends BaseService {
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await this.appRepository.sendOneShotAppRestart({
|
||||
|
||||
83
server/src/services/database-backup.service.spec.ts
Normal file
83
server/src/services/database-backup.service.spec.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(MaintenanceService.name, () => {
|
||||
let sut: DatabaseBackupService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(DatabaseBackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
{ filename: 'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-1753789649000.sql.gz', filesize: 1024 },
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
43
server/src/services/database-backup.service.ts
Normal file
43
server/src/services/database-backup.service.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
|
||||
/**
|
||||
* This service is available outside of maintenance mode to manage maintenance mode
|
||||
*/
|
||||
@Injectable()
|
||||
export class DatabaseBackupService extends BaseService {
|
||||
async listBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
const backups = await listDatabaseBackups(this.backupRepos);
|
||||
return { backups };
|
||||
}
|
||||
|
||||
deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import { AuthAdminService } from 'src/services/auth-admin.service';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { DatabaseService } from 'src/services/database.service';
|
||||
import { DownloadService } from 'src/services/download.service';
|
||||
import { DuplicateService } from 'src/services/duplicate.service';
|
||||
@@ -60,6 +61,7 @@ export const services = [
|
||||
AuthAdminService,
|
||||
BackupService,
|
||||
CliService,
|
||||
DatabaseBackupService,
|
||||
DatabaseService,
|
||||
DownloadService,
|
||||
DuplicateService,
|
||||
|
||||
@@ -136,7 +136,29 @@ export class JobService extends BaseService {
|
||||
const asset = await this.assetRepository.getById(item.data.id);
|
||||
|
||||
if (asset) {
|
||||
this.websocketRepository.clientSend('AssetEditReadyV1', asset.ownerId, { assetId: item.data.id });
|
||||
this.websocketRepository.clientSend('AssetEditReadyV1', asset.ownerId, {
|
||||
asset: {
|
||||
id: asset.id,
|
||||
ownerId: asset.ownerId,
|
||||
originalFileName: asset.originalFileName,
|
||||
thumbhash: asset.thumbhash ? hexOrBufferToBase64(asset.thumbhash) : null,
|
||||
checksum: hexOrBufferToBase64(asset.checksum),
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
localDateTime: asset.localDateTime,
|
||||
duration: asset.duration,
|
||||
type: asset.type,
|
||||
deletedAt: asset.deletedAt,
|
||||
isFavorite: asset.isFavorite,
|
||||
visibility: asset.visibility,
|
||||
livePhotoVideoId: asset.livePhotoVideoId,
|
||||
stackId: asset.stackId,
|
||||
libraryId: asset.libraryId,
|
||||
width: asset.width,
|
||||
height: asset.height,
|
||||
isEdited: asset.isEdited,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
break;
|
||||
@@ -189,6 +211,7 @@ export class JobService extends BaseService {
|
||||
libraryId: asset.libraryId,
|
||||
width: asset.width,
|
||||
height: asset.height,
|
||||
isEdited: asset.isEdited,
|
||||
},
|
||||
exif: {
|
||||
assetId: exif.assetId,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
@@ -36,28 +36,96 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
|
||||
it('should return true if enabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: '' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: '',
|
||||
action: { action: MaintenanceAction.Start },
|
||||
});
|
||||
|
||||
await expect(sut.getMaintenanceMode()).resolves.toEqual({
|
||||
isMaintenanceMode: true,
|
||||
secret: '',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('integrityCheck', () => {
|
||||
it('generate integrity report', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('startMaintenance', () => {
|
||||
it('should set maintenance mode and return a secret', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
|
||||
await expect(sut.startMaintenance('admin')).resolves.toMatchObject({
|
||||
await expect(
|
||||
sut.startMaintenance(
|
||||
{
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
'admin',
|
||||
),
|
||||
).resolves.toMatchObject({
|
||||
jwt: expect.any(String),
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AppRestart', {
|
||||
@@ -78,7 +146,13 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
|
||||
it('should generate a login url with JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.createLoginUrl({
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from 'src/decorators';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret, signMaintenanceJwt } from 'src/utils/maintenance';
|
||||
import {
|
||||
createMaintenanceLoginUrl,
|
||||
detectPriorInstall,
|
||||
generateMaintenanceSecret,
|
||||
signMaintenanceJwt,
|
||||
} from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -19,9 +29,25 @@ export class MaintenanceService extends BaseService {
|
||||
.then((state) => state ?? { isMaintenanceMode: false });
|
||||
}
|
||||
|
||||
async startMaintenance(username: string): Promise<{ jwt: string }> {
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return {
|
||||
active: false,
|
||||
action: MaintenanceAction.End,
|
||||
};
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async startMaintenance(action: SetMaintenanceModeDto, username: string): Promise<{ jwt: string }> {
|
||||
const secret = generateMaintenanceSecret();
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, { isMaintenanceMode: true, secret });
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action,
|
||||
});
|
||||
|
||||
await this.eventRepository.emit('AppRestart', { isMaintenanceMode: true });
|
||||
|
||||
return {
|
||||
@@ -31,6 +57,20 @@ export class MaintenanceService extends BaseService {
|
||||
};
|
||||
}
|
||||
|
||||
async startRestoreFlow(): Promise<{ jwt: string }> {
|
||||
const adminUser = await this.userRepository.getAdmin();
|
||||
if (adminUser) {
|
||||
throw new BadRequestException('The server already has an admin');
|
||||
}
|
||||
|
||||
return this.startMaintenance(
|
||||
{
|
||||
action: MaintenanceAction.SelectDatabaseRestore,
|
||||
},
|
||||
'admin',
|
||||
);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AppRestart', server: true })
|
||||
onRestart(event: ArgOf<'AppRestart'>, ack?: (ok: 'ok') => void): void {
|
||||
this.logger.log(`Restarting due to event... ${JSON.stringify(event)}`);
|
||||
|
||||
@@ -241,21 +241,21 @@ describe(MediaService.name, () => {
|
||||
await expect(sut.handleAssetMigration({ id: assetStub.image.id })).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.move.create).toHaveBeenCalledWith({
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.FullSize,
|
||||
pathType: AssetFileType.FullSize,
|
||||
oldPath: '/uploads/user-id/fullsize/path.webp',
|
||||
newPath: expect.stringContaining('/data/thumbs/user-id/as/se/asset-id-fullsize.jpeg'),
|
||||
newPath: expect.stringContaining('/data/thumbs/user-id/as/se/asset-id_fullsize.jpeg'),
|
||||
});
|
||||
expect(mocks.move.create).toHaveBeenCalledWith({
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.Preview,
|
||||
pathType: AssetFileType.Preview,
|
||||
oldPath: '/uploads/user-id/thumbs/path.jpg',
|
||||
newPath: expect.stringContaining('/data/thumbs/user-id/as/se/asset-id-preview.jpeg'),
|
||||
newPath: expect.stringContaining('/data/thumbs/user-id/as/se/asset-id_preview.jpeg'),
|
||||
});
|
||||
expect(mocks.move.create).toHaveBeenCalledWith({
|
||||
entityId: assetStub.image.id,
|
||||
pathType: AssetPathType.Thumbnail,
|
||||
pathType: AssetFileType.Thumbnail,
|
||||
oldPath: '/uploads/user-id/webp/path.ext',
|
||||
newPath: expect.stringContaining('/data/thumbs/user-id/as/se/asset-id-thumbnail.webp'),
|
||||
newPath: expect.stringContaining('/data/thumbs/user-id/as/se/asset-id_thumbnail.webp'),
|
||||
});
|
||||
expect(mocks.move.create).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
@@ -385,11 +385,13 @@ describe(MediaService.name, () => {
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Preview,
|
||||
path: expect.any(String),
|
||||
isEdited: false,
|
||||
},
|
||||
{
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: expect.any(String),
|
||||
isEdited: false,
|
||||
},
|
||||
]);
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'asset-id', thumbhash: thumbhashBuffer });
|
||||
@@ -421,11 +423,13 @@ describe(MediaService.name, () => {
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Preview,
|
||||
path: expect.any(String),
|
||||
isEdited: false,
|
||||
},
|
||||
{
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: expect.any(String),
|
||||
isEdited: false,
|
||||
},
|
||||
]);
|
||||
});
|
||||
@@ -456,11 +460,13 @@ describe(MediaService.name, () => {
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Preview,
|
||||
path: expect.any(String),
|
||||
isEdited: false,
|
||||
},
|
||||
{
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: expect.any(String),
|
||||
isEdited: false,
|
||||
},
|
||||
]);
|
||||
});
|
||||
@@ -548,8 +554,8 @@ describe(MediaService.name, () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image);
|
||||
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
|
||||
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
|
||||
const previewPath = `/data/thumbs/user-id/as/se/asset-id-preview.${format}`;
|
||||
const thumbnailPath = `/data/thumbs/user-id/as/se/asset-id-thumbnail.webp`;
|
||||
const previewPath = `/data/thumbs/user-id/as/se/asset-id_preview.${format}`;
|
||||
const thumbnailPath = `/data/thumbs/user-id/as/se/asset-id_thumbnail.webp`;
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
|
||||
@@ -595,8 +601,8 @@ describe(MediaService.name, () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.image);
|
||||
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
|
||||
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
|
||||
const previewPath = expect.stringContaining(`/data/thumbs/user-id/as/se/asset-id-preview.jpeg`);
|
||||
const thumbnailPath = expect.stringContaining(`/data/thumbs/user-id/as/se/asset-id-thumbnail.${format}`);
|
||||
const previewPath = expect.stringContaining(`/data/thumbs/user-id/as/se/asset-id_preview.jpeg`);
|
||||
const thumbnailPath = expect.stringContaining(`/data/thumbs/user-id/as/se/asset-id_thumbnail.${format}`);
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
|
||||
@@ -1026,9 +1032,9 @@ describe(MediaService.name, () => {
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ type: AssetFileType.FullSizeEdited }),
|
||||
expect.objectContaining({ type: AssetFileType.PreviewEdited }),
|
||||
expect.objectContaining({ type: AssetFileType.ThumbnailEdited }),
|
||||
expect.objectContaining({ type: AssetFileType.FullSize, isEdited: true }),
|
||||
expect.objectContaining({ type: AssetFileType.Preview, isEdited: true }),
|
||||
expect.objectContaining({ type: AssetFileType.Thumbnail, isEdited: true }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
@@ -1098,17 +1104,17 @@ describe(MediaService.name, () => {
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
rawBuffer,
|
||||
expect.anything(),
|
||||
expect.stringContaining('edited_preview.jpeg'),
|
||||
expect.stringContaining('preview_edited.jpeg'),
|
||||
);
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
rawBuffer,
|
||||
expect.anything(),
|
||||
expect.stringContaining('edited_thumbnail.webp'),
|
||||
expect.stringContaining('thumbnail_edited.webp'),
|
||||
);
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
rawBuffer,
|
||||
expect.anything(),
|
||||
expect.stringContaining('edited_fullsize.jpeg'),
|
||||
expect.stringContaining('fullsize_edited.jpeg'),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -3254,13 +3260,13 @@ describe(MediaService.name, () => {
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg' },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/new/thumbnail.jpg' },
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg', isEdited: false },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/new/thumbnail.jpg', isEdited: false },
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview },
|
||||
{ assetId: 'asset-id', path: '/new/thumbnail.jpg', type: AssetFileType.Thumbnail },
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview, isEdited: false },
|
||||
{ assetId: 'asset-id', path: '/new/thumbnail.jpg', type: AssetFileType.Thumbnail, isEdited: false },
|
||||
]);
|
||||
expect(mocks.asset.deleteFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queue).not.toHaveBeenCalled();
|
||||
@@ -3270,19 +3276,31 @@ describe(MediaService.name, () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
{
|
||||
id: 'file-1',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Preview,
|
||||
path: '/old/preview.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
{
|
||||
id: 'file-2',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/old/thumbnail.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg' },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/new/thumbnail.jpg' },
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg', isEdited: false },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/new/thumbnail.jpg', isEdited: false },
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview },
|
||||
{ assetId: 'asset-id', path: '/new/thumbnail.jpg', type: AssetFileType.Thumbnail },
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview, isEdited: false },
|
||||
{ assetId: 'asset-id', path: '/new/thumbnail.jpg', type: AssetFileType.Thumbnail, isEdited: false },
|
||||
]);
|
||||
expect(mocks.asset.deleteFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
@@ -3295,17 +3313,38 @@ describe(MediaService.name, () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
{
|
||||
id: 'file-1',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Preview,
|
||||
path: '/old/preview.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
{
|
||||
id: 'file-2',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/old/thumbnail.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [{ type: AssetFileType.Preview }, { type: AssetFileType.Thumbnail }]);
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, isEdited: false },
|
||||
{ type: AssetFileType.Thumbnail, isEdited: false },
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFiles).toHaveBeenCalledWith([
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg', isEdited: false },
|
||||
{
|
||||
id: 'file-2',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/old/thumbnail.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
]);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
@@ -3317,14 +3356,26 @@ describe(MediaService.name, () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/same/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/same/thumbnail.jpg' },
|
||||
{
|
||||
id: 'file-1',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Preview,
|
||||
path: '/same/preview.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
{
|
||||
id: 'file-2',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/same/thumbnail.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, newPath: '/same/preview.jpg' },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/same/thumbnail.jpg' },
|
||||
{ type: AssetFileType.Preview, newPath: '/same/preview.jpg', isEdited: false },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/same/thumbnail.jpg', isEdited: false },
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
@@ -3336,23 +3387,41 @@ describe(MediaService.name, () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
{
|
||||
id: 'file-1',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Preview,
|
||||
path: '/old/preview.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
{
|
||||
id: 'file-2',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/old/thumbnail.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg' }, // replace
|
||||
{ type: AssetFileType.Thumbnail }, // delete
|
||||
{ type: AssetFileType.FullSize, newPath: '/new/fullsize.jpg' }, // new
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg', isEdited: false }, // replace
|
||||
{ type: AssetFileType.Thumbnail, isEdited: false }, // delete
|
||||
{ type: AssetFileType.FullSize, newPath: '/new/fullsize.jpg', isEdited: false }, // new
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview },
|
||||
{ assetId: 'asset-id', path: '/new/fullsize.jpg', type: AssetFileType.FullSize },
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview, isEdited: false },
|
||||
{ assetId: 'asset-id', path: '/new/fullsize.jpg', type: AssetFileType.FullSize, isEdited: false },
|
||||
]);
|
||||
expect(mocks.asset.deleteFiles).toHaveBeenCalledWith([
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
{
|
||||
id: 'file-2',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/old/thumbnail.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
]);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
@@ -3376,11 +3445,19 @@ describe(MediaService.name, () => {
|
||||
it('should delete non-existent file types when newPath is not provided', async () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' }],
|
||||
files: [
|
||||
{
|
||||
id: 'file-1',
|
||||
assetId: 'asset-id',
|
||||
type: AssetFileType.Preview,
|
||||
path: '/old/preview.jpg',
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Thumbnail }, // file doesn't exist, newPath not provided
|
||||
{ type: AssetFileType.Thumbnail, isEdited: false }, // file doesn't exist, newPath not provided
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
|
||||
@@ -8,7 +8,6 @@ import { AssetEditAction, CropParameters } from 'src/dtos/editing.dto';
|
||||
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetPathType,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
AudioCodec,
|
||||
@@ -50,6 +49,7 @@ interface UpsertFileOptions {
|
||||
assetId: string;
|
||||
type: AssetFileType;
|
||||
path: string;
|
||||
isEdited: boolean;
|
||||
}
|
||||
|
||||
type ThumbnailAsset = NonNullable<Awaited<ReturnType<AssetJobRepository['getForGenerateThumbnailJob']>>>;
|
||||
@@ -160,9 +160,9 @@ export class MediaService extends BaseService {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.FullSize, image.fullsize.format);
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.Preview, image.preview.format);
|
||||
await this.storageCore.moveAssetImage(asset, AssetPathType.Thumbnail, image.thumbnail.format);
|
||||
await this.storageCore.moveAssetImage(asset, AssetFileType.FullSize, image.fullsize.format);
|
||||
await this.storageCore.moveAssetImage(asset, AssetFileType.Preview, image.preview.format);
|
||||
await this.storageCore.moveAssetImage(asset, AssetFileType.Thumbnail, image.thumbnail.format);
|
||||
await this.storageCore.moveAssetVideo(asset);
|
||||
|
||||
return JobStatus.Success;
|
||||
@@ -236,9 +236,9 @@ export class MediaService extends BaseService {
|
||||
}
|
||||
|
||||
await this.syncFiles(asset, [
|
||||
{ type: AssetFileType.Preview, newPath: generated.previewPath },
|
||||
{ type: AssetFileType.Thumbnail, newPath: generated.thumbnailPath },
|
||||
{ type: AssetFileType.FullSize, newPath: generated.fullsizePath },
|
||||
{ type: AssetFileType.Preview, newPath: generated.previewPath, isEdited: false },
|
||||
{ type: AssetFileType.Thumbnail, newPath: generated.thumbnailPath, isEdited: false },
|
||||
{ type: AssetFileType.FullSize, newPath: generated.fullsizePath, isEdited: false },
|
||||
]);
|
||||
|
||||
const editiedGenerated = await this.generateEditedThumbnails(asset);
|
||||
@@ -307,16 +307,16 @@ export class MediaService extends BaseService {
|
||||
|
||||
private async generateImageThumbnails(asset: ThumbnailAsset, useEdits: boolean = false) {
|
||||
const { image } = await this.getConfig({ withCache: true });
|
||||
const previewPath = StorageCore.getImagePath(
|
||||
asset,
|
||||
useEdits ? AssetPathType.EditedPreview : AssetPathType.Preview,
|
||||
image.preview.format,
|
||||
);
|
||||
const thumbnailPath = StorageCore.getImagePath(
|
||||
asset,
|
||||
useEdits ? AssetPathType.EditedThumbnail : AssetPathType.Thumbnail,
|
||||
image.thumbnail.format,
|
||||
);
|
||||
const previewPath = StorageCore.getImagePath(asset, {
|
||||
fileType: AssetFileType.Preview,
|
||||
isEdited: useEdits,
|
||||
format: image.preview.format,
|
||||
});
|
||||
const thumbnailPath = StorageCore.getImagePath(asset, {
|
||||
fileType: AssetFileType.Thumbnail,
|
||||
isEdited: useEdits,
|
||||
format: image.thumbnail.format,
|
||||
});
|
||||
this.storageCore.ensureFolders(previewPath);
|
||||
|
||||
// Handle embedded preview extraction for RAW files
|
||||
@@ -343,11 +343,11 @@ export class MediaService extends BaseService {
|
||||
|
||||
if (convertFullsize) {
|
||||
// convert a new fullsize image from the same source as the thumbnail
|
||||
fullsizePath = StorageCore.getImagePath(
|
||||
asset,
|
||||
useEdits ? AssetPathType.EditedFullSize : AssetPathType.FullSize,
|
||||
image.fullsize.format,
|
||||
);
|
||||
fullsizePath = StorageCore.getImagePath(asset, {
|
||||
fileType: AssetFileType.FullSize,
|
||||
isEdited: useEdits,
|
||||
format: image.fullsize.format,
|
||||
});
|
||||
const fullsizeOptions = {
|
||||
format: image.fullsize.format,
|
||||
quality: image.fullsize.quality,
|
||||
@@ -355,7 +355,11 @@ export class MediaService extends BaseService {
|
||||
};
|
||||
promises.push(this.mediaRepository.generateThumbnail(data, fullsizeOptions, fullsizePath));
|
||||
} else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.Jpeg) {
|
||||
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, extracted.format);
|
||||
fullsizePath = StorageCore.getImagePath(asset, {
|
||||
fileType: AssetFileType.FullSize,
|
||||
format: extracted.format,
|
||||
isEdited: false,
|
||||
});
|
||||
this.storageCore.ensureFolders(fullsizePath);
|
||||
|
||||
// Write the buffer to disk with essential EXIF data
|
||||
@@ -489,8 +493,16 @@ export class MediaService extends BaseService {
|
||||
|
||||
private async generateVideoThumbnails(asset: ThumbnailPathEntity & { originalPath: string }) {
|
||||
const { image, ffmpeg } = await this.getConfig({ withCache: true });
|
||||
const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format);
|
||||
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format);
|
||||
const previewPath = StorageCore.getImagePath(asset, {
|
||||
fileType: AssetFileType.Preview,
|
||||
format: image.preview.format,
|
||||
isEdited: false,
|
||||
});
|
||||
const thumbnailPath = StorageCore.getImagePath(asset, {
|
||||
fileType: AssetFileType.Thumbnail,
|
||||
format: image.thumbnail.format,
|
||||
isEdited: false,
|
||||
});
|
||||
this.storageCore.ensureFolders(previewPath);
|
||||
|
||||
const { format, audioStreams, videoStreams } = await this.mediaRepository.probe(asset.originalPath);
|
||||
@@ -779,18 +791,18 @@ export class MediaService extends BaseService {
|
||||
|
||||
private async syncFiles(
|
||||
asset: { id: string; files: AssetFile[] },
|
||||
files: { type: AssetFileType; newPath?: string }[],
|
||||
files: { type: AssetFileType; newPath?: string; isEdited: boolean }[],
|
||||
) {
|
||||
const toUpsert: UpsertFileOptions[] = [];
|
||||
const pathsToDelete: string[] = [];
|
||||
const toDelete: AssetFile[] = [];
|
||||
|
||||
for (const { type, newPath } of files) {
|
||||
const existingFile = asset.files.find((file) => file.type === type);
|
||||
for (const { type, newPath, isEdited } of files) {
|
||||
const existingFile = asset.files.find((file) => file.type === type && file.isEdited === isEdited);
|
||||
|
||||
// upsert new file path
|
||||
if (newPath && existingFile?.path !== newPath) {
|
||||
toUpsert.push({ assetId: asset.id, path: newPath, type });
|
||||
toUpsert.push({ assetId: asset.id, path: newPath, type, isEdited });
|
||||
|
||||
// delete old file from disk
|
||||
if (existingFile) {
|
||||
@@ -829,9 +841,9 @@ export class MediaService extends BaseService {
|
||||
const generated = asset.edits.length > 0 ? await this.generateImageThumbnails(asset, true) : undefined;
|
||||
|
||||
await this.syncFiles(asset, [
|
||||
{ type: AssetFileType.PreviewEdited, newPath: generated?.previewPath },
|
||||
{ type: AssetFileType.ThumbnailEdited, newPath: generated?.thumbnailPath },
|
||||
{ type: AssetFileType.FullSizeEdited, newPath: generated?.fullsizePath },
|
||||
{ type: AssetFileType.Preview, newPath: generated?.previewPath, isEdited: true },
|
||||
{ type: AssetFileType.Thumbnail, newPath: generated?.thumbnailPath, isEdited: true },
|
||||
{ type: AssetFileType.FullSize, newPath: generated?.fullsizePath, isEdited: true },
|
||||
]);
|
||||
|
||||
const crop = asset.edits.find((e) => e.action === AssetEditAction.Crop);
|
||||
|
||||
@@ -35,7 +35,7 @@ const forSidecarJob = (
|
||||
asset: {
|
||||
id?: string;
|
||||
originalPath?: string;
|
||||
files?: { id: string; type: AssetFileType; path: string }[];
|
||||
files?: { id: string; type: AssetFileType; path: string; isEdited: boolean }[];
|
||||
} = {},
|
||||
) => {
|
||||
return {
|
||||
@@ -387,6 +387,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract tags from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent'] } } as any);
|
||||
mockReadTags({ TagsList: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -397,6 +398,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract hierarchy from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child'] } } as any);
|
||||
mockReadTags({ TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -417,6 +419,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract tags from Keywords as a string', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent'] } } as any);
|
||||
mockReadTags({ Keywords: 'Parent' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -427,6 +430,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract tags from Keywords as a list', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent'] } } as any);
|
||||
mockReadTags({ Keywords: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -437,6 +441,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract tags from Keywords as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent', '2024'] } } as any);
|
||||
mockReadTags({ Keywords: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -448,6 +453,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract hierarchal tags from Keywords', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child'] } } as any);
|
||||
mockReadTags({ Keywords: 'Parent/Child' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -467,6 +473,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should ignore Keywords when TagsList is present', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child', 'Child'] } } as any);
|
||||
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -486,6 +493,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract hierarchy from HierarchicalSubject', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child', 'TagA'] } } as any);
|
||||
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -507,6 +515,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent', '2024'] } } as any);
|
||||
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -518,6 +527,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should extract ignore / characters in a HierarchicalSubject tag', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Mom|Dad'] } } as any);
|
||||
mockReadTags({ HierarchicalSubject: ['Mom/Dad'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
|
||||
@@ -532,6 +542,7 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should ignore HierarchicalSubject when TagsList is present', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child', 'Parent2/Child2'] } } as any);
|
||||
mockReadTags({ HierarchicalSubject: ['Parent2|Child2'], TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
@@ -896,6 +907,7 @@ describe(MetadataService.name, () => {
|
||||
ProfileDescription: 'extensive description',
|
||||
ProjectionType: 'equirectangular',
|
||||
tz: 'UTC-11:30',
|
||||
TagsList: ['parent/child'],
|
||||
Rating: 3,
|
||||
};
|
||||
|
||||
@@ -935,6 +947,7 @@ describe(MetadataService.name, () => {
|
||||
country: null,
|
||||
state: null,
|
||||
city: null,
|
||||
tags: ['parent/child'],
|
||||
},
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
);
|
||||
@@ -1084,6 +1097,7 @@ describe(MetadataService.name, () => {
|
||||
id: 'some-id',
|
||||
type: AssetFileType.Sidecar,
|
||||
path: '/path/to/something',
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
});
|
||||
@@ -1691,7 +1705,7 @@ describe(MetadataService.name, () => {
|
||||
it('should unset sidecar path if file no longer exist', async () => {
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar, isEdited: false }],
|
||||
});
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
mocks.storage.checkFileExists.mockResolvedValue(false);
|
||||
@@ -1704,7 +1718,7 @@ describe(MetadataService.name, () => {
|
||||
it('should do nothing if the sidecar file still exists', async () => {
|
||||
const asset = forSidecarJob({
|
||||
originalPath: '/path/to/IMG_123.jpg',
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar }],
|
||||
files: [{ id: 'sidecar', path: '/path/to/IMG_123.jpg.xmp', type: AssetFileType.Sidecar, isEdited: false }],
|
||||
});
|
||||
|
||||
mocks.assetJob.getForSidecarCheckJob.mockResolvedValue(asset);
|
||||
|
||||
@@ -254,6 +254,8 @@ export class MetadataService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
const tags = this.getTagList(exifTags);
|
||||
|
||||
const exifData: Insertable<AssetExifTable> = {
|
||||
assetId: asset.id,
|
||||
|
||||
@@ -296,6 +298,8 @@ export class MetadataService extends BaseService {
|
||||
// grouping
|
||||
livePhotoCID: (exifTags.ContentIdentifier || exifTags.MediaGroupUUID) ?? null,
|
||||
autoStackId: this.getAutoStackId(exifTags),
|
||||
|
||||
tags: tags.length > 0 ? tags : null,
|
||||
};
|
||||
|
||||
const isSidewards = exifTags.Orientation && this.isOrientationSidewards(exifTags.Orientation);
|
||||
@@ -316,9 +320,10 @@ export class MetadataService extends BaseService {
|
||||
width: asset.width == null ? assetWidth : undefined,
|
||||
height: asset.height == null ? assetHeight : undefined,
|
||||
}),
|
||||
this.applyTagList(asset, exifTags),
|
||||
];
|
||||
|
||||
await this.applyTagList(asset);
|
||||
|
||||
if (this.isMotionPhoto(asset, exifTags)) {
|
||||
promises.push(this.applyMotionPhotos(asset, exifTags, dates, stats));
|
||||
}
|
||||
@@ -405,35 +410,35 @@ export class MetadataService extends BaseService {
|
||||
|
||||
@OnEvent({ name: 'AssetTag' })
|
||||
async handleTagAsset({ assetId }: ArgOf<'AssetTag'>) {
|
||||
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId, tags: true } });
|
||||
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId } });
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AssetUntag' })
|
||||
async handleUntagAsset({ assetId }: ArgOf<'AssetUntag'>) {
|
||||
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId, tags: true } });
|
||||
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId } });
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.SidecarWrite, queue: QueueName.Sidecar })
|
||||
async handleSidecarWrite(job: JobOf<JobName.SidecarWrite>): Promise<JobStatus> {
|
||||
const { id, tags } = job;
|
||||
const { id } = job;
|
||||
const asset = await this.assetJobRepository.getForSidecarWriteJob(id);
|
||||
if (!asset) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
const lockedProperties = await this.assetJobRepository.getLockedPropertiesForMetadataExtraction(id);
|
||||
const tagsList = (asset.tags || []).map((tag) => tag.value);
|
||||
|
||||
const { sidecarFile } = getAssetFiles(asset.files);
|
||||
const sidecarPath = sidecarFile?.path || `${asset.originalPath}.xmp`;
|
||||
|
||||
const { description, dateTimeOriginal, latitude, longitude, rating } = _.pick(
|
||||
const { description, dateTimeOriginal, latitude, longitude, rating, tags } = _.pick(
|
||||
{
|
||||
description: asset.exifInfo.description,
|
||||
dateTimeOriginal: asset.exifInfo.dateTimeOriginal,
|
||||
latitude: asset.exifInfo.latitude,
|
||||
longitude: asset.exifInfo.longitude,
|
||||
rating: asset.exifInfo.rating,
|
||||
tags: asset.exifInfo.tags,
|
||||
},
|
||||
lockedProperties,
|
||||
);
|
||||
@@ -446,7 +451,7 @@ export class MetadataService extends BaseService {
|
||||
GPSLatitude: latitude,
|
||||
GPSLongitude: longitude,
|
||||
Rating: rating,
|
||||
TagsList: tags ? tagsList : undefined,
|
||||
TagsList: tags?.length ? tags : undefined,
|
||||
},
|
||||
_.isUndefined,
|
||||
);
|
||||
@@ -560,11 +565,14 @@ export class MetadataService extends BaseService {
|
||||
return tags;
|
||||
}
|
||||
|
||||
private async applyTagList(asset: { id: string; ownerId: string }, exifTags: ImmichTags) {
|
||||
const tags = this.getTagList(exifTags);
|
||||
const results = await upsertTags(this.tagRepository, { userId: asset.ownerId, tags });
|
||||
private async applyTagList({ id, ownerId }: { id: string; ownerId: string }) {
|
||||
const asset = await this.assetRepository.getById(id, { exifInfo: true });
|
||||
const results = await upsertTags(this.tagRepository, {
|
||||
userId: ownerId,
|
||||
tags: asset?.exifInfo?.tags ?? [],
|
||||
});
|
||||
await this.tagRepository.replaceAssetTags(
|
||||
asset.id,
|
||||
id,
|
||||
results.map((tag) => tag.id),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -372,7 +372,7 @@ describe(NotificationService.name, () => {
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([
|
||||
{ id: '1', type: AssetFileType.Thumbnail, path: 'path-to-thumb.jpg' },
|
||||
{ id: '1', type: AssetFileType.Thumbnail, path: 'path-to-thumb.jpg', isEdited: false },
|
||||
]);
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
|
||||
@@ -403,7 +403,7 @@ describe(NotificationService.name, () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([assetStub.image.files[2]]);
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([{ ...assetStub.image.files[2], isEdited: false }]);
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith(
|
||||
|
||||
@@ -117,7 +117,7 @@ export class SmartInfoService extends BaseService {
|
||||
|
||||
const newConfig = await this.getConfig({ withCache: true });
|
||||
if (machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName) {
|
||||
// Skip the job if the the model has changed since the embedding was generated.
|
||||
// Skip the job if the model has changed since the embedding was generated.
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
|
||||
@@ -240,11 +240,11 @@ export class StorageTemplateService extends BaseService {
|
||||
assetInfo: { sizeInBytes: fileSizeInByte, checksum },
|
||||
});
|
||||
|
||||
const sidecarPath = getAssetFile(asset.files, AssetFileType.Sidecar)?.path;
|
||||
const sidecarPath = getAssetFile(asset.files, AssetFileType.Sidecar, { isEdited: false })?.path;
|
||||
if (sidecarPath) {
|
||||
await this.storageCore.moveFile({
|
||||
entityId: id,
|
||||
pathType: AssetPathType.Sidecar,
|
||||
pathType: AssetFileType.Sidecar,
|
||||
oldPath: sidecarPath,
|
||||
newPath: `${newPath}.xmp`,
|
||||
});
|
||||
|
||||
@@ -191,6 +191,7 @@ describe(TagService.name, () => {
|
||||
it('should upsert records', async () => {
|
||||
mocks.access.tag.checkOwnerAccess.mockResolvedValue(new Set(['tag-1', 'tag-2']));
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2', 'asset-3']));
|
||||
mocks.asset.getById.mockResolvedValue({ tags: [{ value: 'tag-1' }, { value: 'tag-2' }] } as any);
|
||||
mocks.tag.upsertAssetIds.mockResolvedValue([
|
||||
{ tagId: 'tag-1', assetId: 'asset-1' },
|
||||
{ tagId: 'tag-1', assetId: 'asset-2' },
|
||||
@@ -204,6 +205,18 @@ describe(TagService.name, () => {
|
||||
).resolves.toEqual({
|
||||
count: 6,
|
||||
});
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
{ assetId: 'asset-1', tags: ['tag-1', 'tag-2'] },
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
{ assetId: 'asset-2', tags: ['tag-1', 'tag-2'] },
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
{ assetId: 'asset-3', tags: ['tag-1', 'tag-2'] },
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
expect(mocks.tag.upsertAssetIds).toHaveBeenCalledWith([
|
||||
{ tagId: 'tag-1', assetId: 'asset-1' },
|
||||
{ tagId: 'tag-1', assetId: 'asset-2' },
|
||||
@@ -229,6 +242,7 @@ describe(TagService.name, () => {
|
||||
mocks.tag.get.mockResolvedValue(tagStub.tag);
|
||||
mocks.tag.getAssetIds.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.tag.addAssetIds.mockResolvedValue();
|
||||
mocks.asset.getById.mockResolvedValue({ tags: [{ value: 'tag-1' }] } as any);
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-2']));
|
||||
|
||||
await expect(
|
||||
@@ -240,6 +254,14 @@ describe(TagService.name, () => {
|
||||
{ id: 'asset-2', success: true },
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertExif).not.toHaveBeenCalledWith(
|
||||
{ assetId: 'asset-1', tags: ['tag-1'] },
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
{ assetId: 'asset-2', tags: ['tag-1'] },
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
expect(mocks.tag.getAssetIds).toHaveBeenCalledWith('tag-1', ['asset-1', 'asset-2']);
|
||||
expect(mocks.tag.addAssetIds).toHaveBeenCalledWith('tag-1', ['asset-2']);
|
||||
});
|
||||
|
||||
@@ -90,6 +90,7 @@ export class TagService extends BaseService {
|
||||
|
||||
const results = await this.tagRepository.upsertAssetIds(items);
|
||||
for (const assetId of new Set(results.map((item) => item.assetId))) {
|
||||
await this.updateTags(assetId);
|
||||
await this.eventRepository.emit('AssetTag', { assetId });
|
||||
}
|
||||
|
||||
@@ -107,6 +108,7 @@ export class TagService extends BaseService {
|
||||
|
||||
for (const { id: assetId, success } of results) {
|
||||
if (success) {
|
||||
await this.updateTags(assetId);
|
||||
await this.eventRepository.emit('AssetTag', { assetId });
|
||||
}
|
||||
}
|
||||
@@ -125,6 +127,7 @@ export class TagService extends BaseService {
|
||||
|
||||
for (const { id: assetId, success } of results) {
|
||||
if (success) {
|
||||
await this.updateTags(assetId);
|
||||
await this.eventRepository.emit('AssetUntag', { assetId });
|
||||
}
|
||||
}
|
||||
@@ -145,4 +148,12 @@ export class TagService extends BaseService {
|
||||
}
|
||||
return tag;
|
||||
}
|
||||
|
||||
private async updateTags(assetId: string) {
|
||||
const asset = await this.assetRepository.getById(assetId, { tags: true });
|
||||
await this.assetRepository.upsertExif(
|
||||
{ assetId, tags: asset?.tags?.map(({ value }) => value) ?? [] },
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { Asset, AssetFile } from 'src/database';
|
||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetEditActionItem } from 'src/dtos/editing.dto';
|
||||
import { SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import {
|
||||
AssetOrder,
|
||||
AssetType,
|
||||
@@ -357,7 +358,7 @@ export type JobItem =
|
||||
// Sidecar Scanning
|
||||
| { name: JobName.SidecarQueueAll; data: IBaseJob }
|
||||
| { name: JobName.SidecarCheck; data: IEntityJob }
|
||||
| { name: JobName.SidecarWrite; data: ISidecarWriteJob }
|
||||
| { name: JobName.SidecarWrite; data: IEntityJob }
|
||||
|
||||
// Facial Recognition
|
||||
| { name: JobName.AssetDetectFacesQueueAll; data: IBaseJob }
|
||||
@@ -533,7 +534,9 @@ export interface MemoryData {
|
||||
|
||||
export type VersionCheckMetadata = { checkedAt: string; releaseVersion: string };
|
||||
export type SystemFlags = { mountChecks: Record<StorageFolder, boolean> };
|
||||
export type MaintenanceModeState = { isMaintenanceMode: true; secret: string } | { isMaintenanceMode: false };
|
||||
export type MaintenanceModeState =
|
||||
| { isMaintenanceMode: true; secret: string; action: SetMaintenanceModeDto }
|
||||
| { isMaintenanceMode: false };
|
||||
export type MemoriesState = {
|
||||
/** memories have already been created through this date */
|
||||
lastOnThisDayDate: string;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { GeneratedImageType, StorageCore } from 'src/cores/storage.core';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { AssetFile, Exif } from 'src/database';
|
||||
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
|
||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||
@@ -14,19 +14,19 @@ import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { IBulkAsset, ImmichFile, UploadFile, UploadRequest } from 'src/types';
|
||||
import { checkAccess } from 'src/utils/access';
|
||||
|
||||
export const getAssetFile = (files: AssetFile[], type: AssetFileType | GeneratedImageType) => {
|
||||
return files.find((file) => file.type === type);
|
||||
export const getAssetFile = (files: AssetFile[], type: AssetFileType, { isEdited }: { isEdited: boolean }) => {
|
||||
return files.find((file) => file.type === type && file.isEdited === isEdited);
|
||||
};
|
||||
|
||||
export const getAssetFiles = (files: AssetFile[]) => ({
|
||||
fullsizeFile: getAssetFile(files, AssetFileType.FullSize),
|
||||
previewFile: getAssetFile(files, AssetFileType.Preview),
|
||||
thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail),
|
||||
sidecarFile: getAssetFile(files, AssetFileType.Sidecar),
|
||||
fullsizeFile: getAssetFile(files, AssetFileType.FullSize, { isEdited: false }),
|
||||
previewFile: getAssetFile(files, AssetFileType.Preview, { isEdited: false }),
|
||||
thumbnailFile: getAssetFile(files, AssetFileType.Thumbnail, { isEdited: false }),
|
||||
sidecarFile: getAssetFile(files, AssetFileType.Sidecar, { isEdited: false }),
|
||||
|
||||
editedFullsizeFile: getAssetFile(files, AssetFileType.FullSizeEdited),
|
||||
editedPreviewFile: getAssetFile(files, AssetFileType.PreviewEdited),
|
||||
editedThumbnailFile: getAssetFile(files, AssetFileType.ThumbnailEdited),
|
||||
editedFullsizeFile: getAssetFile(files, AssetFileType.FullSize, { isEdited: true }),
|
||||
editedPreviewFile: getAssetFile(files, AssetFileType.Preview, { isEdited: true }),
|
||||
editedThumbnailFile: getAssetFile(files, AssetFileType.Preview, { isEdited: true }),
|
||||
});
|
||||
|
||||
export const addAssets = async (
|
||||
|
||||
494
server/src/utils/database-backups.ts
Normal file
494
server/src/utils/database-backups.ts
Normal file
@@ -0,0 +1,494 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { debounce } from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import path, { basename, join } from 'node:path';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { CacheControl, StorageFolder } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
export function isValidDatabaseBackupName(filename: string) {
|
||||
return filename.match(/^[\d\w-.]+\.sql(?:\.gz)?$/);
|
||||
}
|
||||
|
||||
export function isValidDatabaseRoutineBackupName(filename: string) {
|
||||
const oldBackupStyle = filename.match(/^immich-db-backup-\d+\.sql\.gz$/);
|
||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
||||
const newBackupStyle = filename.match(/^immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
}
|
||||
|
||||
export function isFailedDatabaseBackupName(filename: string) {
|
||||
return filename.match(/^immich-db-backup-.*\.sql\.gz\.tmp$/);
|
||||
}
|
||||
|
||||
export function findVersion(filename: string) {
|
||||
return /-v(.*)-/.exec(filename)?.[1];
|
||||
}
|
||||
|
||||
type BackupRepos = {
|
||||
logger: LoggingRepository;
|
||||
storage: StorageRepository;
|
||||
config: ConfigRepository;
|
||||
process: ProcessRepository;
|
||||
database: DatabaseRepository;
|
||||
health: MaintenanceHealthRepository;
|
||||
};
|
||||
|
||||
export class UnsupportedPostgresError extends Error {
|
||||
constructor(databaseVersion: string) {
|
||||
super(`Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildPostgresLaunchArguments(
|
||||
{ logger, config, database }: Pick<BackupRepos, 'logger' | 'config' | 'database'>,
|
||||
bin: 'pg_dump' | 'pg_dumpall' | 'psql',
|
||||
options: {
|
||||
singleTransaction?: boolean;
|
||||
username?: string;
|
||||
} = {},
|
||||
): Promise<{
|
||||
bin: string;
|
||||
args: string[];
|
||||
databasePassword: string;
|
||||
databaseVersion: string;
|
||||
databaseMajorVersion?: number;
|
||||
}> {
|
||||
const {
|
||||
database: { config: databaseConfig },
|
||||
} = config.getEnv();
|
||||
const isUrlConnection = databaseConfig.connectionType === 'url';
|
||||
|
||||
const databaseVersion = await database.getPostgresVersion();
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
const args: string[] = [];
|
||||
|
||||
if (isUrlConnection) {
|
||||
if (bin !== 'pg_dump') {
|
||||
args.push('--dbname');
|
||||
}
|
||||
|
||||
let url = databaseConfig.url;
|
||||
if (URL.canParse(databaseConfig.url)) {
|
||||
const parsedUrl = new URL(databaseConfig.url);
|
||||
// remove known bad parameters
|
||||
parsedUrl.searchParams.delete('uselibpqcompat');
|
||||
|
||||
if (options.username) {
|
||||
parsedUrl.username = options.username;
|
||||
}
|
||||
|
||||
url = parsedUrl.toString();
|
||||
}
|
||||
|
||||
args.push(url);
|
||||
} else {
|
||||
args.push(
|
||||
'--username',
|
||||
options.username ?? databaseConfig.username,
|
||||
'--host',
|
||||
databaseConfig.host,
|
||||
'--port',
|
||||
databaseConfig.port.toString(),
|
||||
);
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dumpall': {
|
||||
args.push('--database');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
args.push('--dbname');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
args.push(databaseConfig.database);
|
||||
}
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dump':
|
||||
case 'pg_dumpall': {
|
||||
args.push('--clean', '--if-exists');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
if (options.singleTransaction) {
|
||||
args.push(
|
||||
// don't commit any transaction on failure
|
||||
'--single-transaction',
|
||||
// exit with non-zero code on error
|
||||
'--set',
|
||||
'ON_ERROR_STOP=on',
|
||||
);
|
||||
}
|
||||
|
||||
args.push(
|
||||
// used for progress monitoring
|
||||
'--echo-all',
|
||||
'--output=/dev/null',
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
logger.error(`Database Restore Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
throw new UnsupportedPostgresError(databaseVersion);
|
||||
}
|
||||
|
||||
return {
|
||||
bin: `/usr/lib/postgresql/${databaseMajorVersion}/bin/${bin}`,
|
||||
args,
|
||||
databasePassword: isUrlConnection ? new URL(databaseConfig.url).password : databaseConfig.password,
|
||||
databaseVersion,
|
||||
databaseMajorVersion,
|
||||
};
|
||||
}
|
||||
|
||||
export async function createDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, ...pgRepos }: Omit<BackupRepos, 'health'>,
|
||||
filenamePrefix: string = '',
|
||||
): Promise<string> {
|
||||
logger.debug(`Database Backup Started`);
|
||||
|
||||
const { bin, args, databasePassword, databaseVersion, databaseMajorVersion } = await buildPostgresLaunchArguments(
|
||||
{ logger, ...pgRepos },
|
||||
'pg_dump',
|
||||
);
|
||||
|
||||
logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
const filename = `${filenamePrefix}immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz`;
|
||||
const backupFilePath = join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
const temporaryFilePath = `${backupFilePath}.tmp`;
|
||||
|
||||
try {
|
||||
const pgdump = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const gzip = processRepository.spawnDuplexStream('gzip', ['--rsyncable']);
|
||||
const fileStream = storage.createWriteStream(temporaryFilePath);
|
||||
|
||||
await pipeline(pgdump, gzip, fileStream);
|
||||
await storage.rename(temporaryFilePath, backupFilePath);
|
||||
} catch (error) {
|
||||
logger.error(`Database Backup Failure: ${error}`);
|
||||
await storage
|
||||
.unlink(temporaryFilePath)
|
||||
.catch((error) => logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.log(`Database Backup Success`);
|
||||
return backupFilePath;
|
||||
}
|
||||
|
||||
const SQL_DROP_CONNECTIONS = `
|
||||
-- drop all other database connections
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = current_database()
|
||||
AND pid <> pg_backend_pid();
|
||||
`;
|
||||
|
||||
const SQL_RESET_SCHEMA = `
|
||||
-- re-create the default schema
|
||||
DROP SCHEMA public CASCADE;
|
||||
CREATE SCHEMA public;
|
||||
|
||||
-- restore access to schema
|
||||
GRANT ALL ON SCHEMA public TO postgres;
|
||||
GRANT ALL ON SCHEMA public TO public;
|
||||
`;
|
||||
|
||||
async function* sql(inputStream: Readable, isPgClusterDump: boolean) {
|
||||
yield SQL_DROP_CONNECTIONS;
|
||||
yield isPgClusterDump
|
||||
? String.raw`
|
||||
\c postgres
|
||||
`
|
||||
: SQL_RESET_SCHEMA;
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
async function* sqlRollback(inputStream: Readable, isPgClusterDump: boolean) {
|
||||
yield SQL_DROP_CONNECTIONS;
|
||||
|
||||
if (isPgClusterDump) {
|
||||
yield String.raw`
|
||||
-- try to create database
|
||||
-- may fail but script will continue running
|
||||
CREATE DATABASE immich;
|
||||
|
||||
-- switch to database / newly created database
|
||||
\c immich
|
||||
`;
|
||||
}
|
||||
|
||||
yield SQL_RESET_SCHEMA;
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, database: databaseRepository, health, ...pgRepos }: BackupRepos,
|
||||
filename: string,
|
||||
progressCb?: (action: 'backup' | 'restore' | 'migrations' | 'rollback', progress: number) => void,
|
||||
): Promise<void> {
|
||||
logger.debug(`Database Restore Started`);
|
||||
|
||||
let complete = false;
|
||||
try {
|
||||
if (!isValidDatabaseBackupName(filename)) {
|
||||
throw new Error('Invalid backup file format!');
|
||||
}
|
||||
|
||||
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
await storage.stat(backupFilePath); // => check file exists
|
||||
|
||||
let isPgClusterDump = false;
|
||||
const version = findVersion(filename);
|
||||
if (version && semver.satisfies(version, '<= 2.4')) {
|
||||
isPgClusterDump = true;
|
||||
}
|
||||
|
||||
const { bin, args, databasePassword, databaseMajorVersion } = await buildPostgresLaunchArguments(
|
||||
{ logger, database: databaseRepository, ...pgRepos },
|
||||
'psql',
|
||||
{
|
||||
singleTransaction: !isPgClusterDump,
|
||||
username: isPgClusterDump ? 'postgres' : undefined,
|
||||
},
|
||||
);
|
||||
|
||||
progressCb?.('backup', 0.05);
|
||||
|
||||
const restorePointFilePath = await createDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, database: databaseRepository, ...pgRepos },
|
||||
'restore-point-',
|
||||
);
|
||||
|
||||
logger.log(`Database Restore Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
let inputStream: Readable;
|
||||
if (backupFilePath.endsWith('.gz')) {
|
||||
const fileStream = storage.createPlainReadStream(backupFilePath);
|
||||
const gunzip = storage.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
} else {
|
||||
inputStream = storage.createPlainReadStream(backupFilePath);
|
||||
}
|
||||
|
||||
const sqlStream = Readable.from(sql(inputStream, isPgClusterDump));
|
||||
const psql = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log(`Restore progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('restore', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
|
||||
try {
|
||||
progressCb?.('migrations', 0.9);
|
||||
await databaseRepository.runMigrations();
|
||||
await health.checkApiHealth();
|
||||
} catch (error) {
|
||||
progressCb?.('rollback', 0);
|
||||
|
||||
const fileStream = storage.createPlainReadStream(restorePointFilePath);
|
||||
const gunzip = storage.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
|
||||
const sqlStream = Readable.from(sqlRollback(inputStream, isPgClusterDump));
|
||||
const psql = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log(`Rollback progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('rollback', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Database Restore Failure: ${error}`);
|
||||
throw error;
|
||||
} finally {
|
||||
complete = true;
|
||||
}
|
||||
|
||||
logger.log(`Database Restore Success`);
|
||||
}
|
||||
|
||||
export async function deleteDatabaseBackup({ storage }: Pick<BackupRepos, 'storage'>, files: string[]): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
|
||||
if (files.some((filename) => !isValidDatabaseBackupName(filename))) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
await Promise.all(files.map((filename) => storage.unlink(path.join(backupsFolder, filename))));
|
||||
}
|
||||
|
||||
export async function listDatabaseBackups({
|
||||
storage,
|
||||
}: Pick<BackupRepos, 'storage'>): Promise<{ filename: string; filesize: number }[]> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await storage.readdir(backupsFolder);
|
||||
|
||||
const validFiles = files
|
||||
.filter((fn) => isValidDatabaseBackupName(fn))
|
||||
.toSorted((a, b) => (a.startsWith('uploaded-') === b.startsWith('uploaded-') ? a.localeCompare(b) : 1))
|
||||
.toReversed();
|
||||
|
||||
const backups = await Promise.all(
|
||||
validFiles.map(async (filename) => {
|
||||
const stats = await storage.stat(path.join(backupsFolder, filename));
|
||||
return { filename, filesize: stats.size };
|
||||
}),
|
||||
);
|
||||
|
||||
return backups;
|
||||
}
|
||||
|
||||
export async function uploadDatabaseBackup(
|
||||
{ storage }: Pick<BackupRepos, 'storage'>,
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const fn = basename(file.originalname);
|
||||
if (!isValidDatabaseBackupName(fn)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const path = join(backupsFolder, `uploaded-${fn}`);
|
||||
await storage.createOrOverwriteFile(path, file.buffer);
|
||||
}
|
||||
|
||||
export function downloadDatabaseBackup(fileName: string) {
|
||||
if (!isValidDatabaseBackupName(fileName)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const path = join(StorageCore.getBaseFolder(StorageFolder.Backups), fileName);
|
||||
|
||||
return {
|
||||
path,
|
||||
fileName,
|
||||
cacheControl: CacheControl.PrivateWithoutCache,
|
||||
contentType: fileName.endsWith('.gz') ? 'application/gzip' : 'application/sql',
|
||||
};
|
||||
}
|
||||
|
||||
function createSqlProgressStreams(cb: (progress: number) => void) {
|
||||
const STDIN_START_MARKER = new TextEncoder().encode('FROM stdin');
|
||||
const STDIN_END_MARKER = new TextEncoder().encode(String.raw`\.`);
|
||||
|
||||
let readingStdin = false;
|
||||
let sequenceIdx = 0;
|
||||
|
||||
let linesSent = 0;
|
||||
let linesProcessed = 0;
|
||||
|
||||
const startedAt = +Date.now();
|
||||
const cbDebounced = debounce(
|
||||
() => {
|
||||
const progress = source.writableEnded
|
||||
? Math.min(1, linesProcessed / linesSent)
|
||||
: // progress simulation while we're in an indeterminate state
|
||||
Math.min(0.3, 0.1 + (Date.now() - startedAt) / 1e4);
|
||||
cb(progress);
|
||||
},
|
||||
100,
|
||||
{
|
||||
maxWait: 100,
|
||||
},
|
||||
);
|
||||
|
||||
let lastByte = -1;
|
||||
const source = new PassThrough({
|
||||
transform(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (!readingStdin && byte === 10 && lastByte !== 10) {
|
||||
linesSent += 1;
|
||||
}
|
||||
|
||||
lastByte = byte;
|
||||
|
||||
const sequence = readingStdin ? STDIN_END_MARKER : STDIN_START_MARKER;
|
||||
if (sequence[sequenceIdx] === byte) {
|
||||
sequenceIdx += 1;
|
||||
|
||||
if (sequence.length === sequenceIdx) {
|
||||
sequenceIdx = 0;
|
||||
readingStdin = !readingStdin;
|
||||
}
|
||||
} else {
|
||||
sequenceIdx = 0;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
this.push(chunk);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (byte === 10) {
|
||||
linesProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
return [source, sink];
|
||||
}
|
||||
@@ -42,7 +42,7 @@ const cacheControlHeaders: Record<CacheControl, string | null> = {
|
||||
export const sendFile = async (
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
handler: () => Promise<ImmichFileResponse>,
|
||||
handler: () => Promise<ImmichFileResponse> | ImmichFileResponse,
|
||||
logger: LoggingRepository,
|
||||
): Promise<void> => {
|
||||
// promisified version of 'res.sendFile' for cleaner async handling
|
||||
|
||||
@@ -1,6 +1,59 @@
|
||||
import { createAdapter } from '@socket.io/redis-adapter';
|
||||
import Redis from 'ioredis';
|
||||
import { SignJWT } from 'jose';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { join } from 'node:path';
|
||||
import { Server as SocketIO } from 'socket.io';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAuthDto, MaintenanceDetectInstallResponseDto } from 'src/dtos/maintenance.dto';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { AppRestartEvent } from 'src/repositories/event.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
export function sendOneShotAppRestart(state: AppRestartEvent): void {
|
||||
const server = new SocketIO();
|
||||
const { redis } = new ConfigRepository().getEnv();
|
||||
const pubClient = new Redis(redis);
|
||||
const subClient = pubClient.duplicate();
|
||||
server.adapter(createAdapter(pubClient, subClient));
|
||||
|
||||
/**
|
||||
* Keep trying until we manage to stop Immich
|
||||
*
|
||||
* Sometimes there appear to be communication
|
||||
* issues between to the other servers.
|
||||
*
|
||||
* This issue only occurs with this method.
|
||||
*/
|
||||
async function tryTerminate() {
|
||||
while (true) {
|
||||
try {
|
||||
const responses = await server.serverSideEmitWithAck('AppRestart', state);
|
||||
if (responses.length > 0) {
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
console.error('Encountered an error while telling Immich to stop.');
|
||||
}
|
||||
|
||||
console.info(
|
||||
"\nIt doesn't appear that Immich stopped, trying again in a moment.\nIf Immich is already not running, you can ignore this error.",
|
||||
);
|
||||
|
||||
await new Promise((r) => setTimeout(r, 1e3));
|
||||
}
|
||||
}
|
||||
|
||||
// => corresponds to notification.service.ts#onAppRestart
|
||||
server.emit('AppRestartV1', state, () => {
|
||||
void tryTerminate().finally(() => {
|
||||
pubClient.disconnect();
|
||||
subClient.disconnect();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function createMaintenanceLoginUrl(
|
||||
baseUrl: string,
|
||||
@@ -23,3 +76,37 @@ export async function signMaintenanceJwt(secret: string, data: MaintenanceAuthDt
|
||||
export function generateMaintenanceSecret(): string {
|
||||
return randomBytes(64).toString('hex');
|
||||
}
|
||||
|
||||
export async function detectPriorInstall(
|
||||
storageRepository: StorageRepository,
|
||||
): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return {
|
||||
storage: await Promise.all(
|
||||
Object.values(StorageFolder).map(async (folder) => {
|
||||
const path = StorageCore.getBaseFolder(folder);
|
||||
const files = await storageRepository.readdir(path);
|
||||
const filename = join(StorageCore.getBaseFolder(folder), '.immich');
|
||||
|
||||
let readable = false,
|
||||
writable = false;
|
||||
|
||||
try {
|
||||
await storageRepository.readFile(filename);
|
||||
readable = true;
|
||||
|
||||
await storageRepository.overwriteFile(filename, Buffer.from(`${Date.now()}`));
|
||||
writable = true;
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
|
||||
return {
|
||||
folder,
|
||||
readable,
|
||||
writable,
|
||||
files: files.filter((fn) => fn !== '.immich').length,
|
||||
};
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -153,6 +153,16 @@ export class IntegrityReportTypeParamDto {
|
||||
type!: IntegrityReportType;
|
||||
}
|
||||
|
||||
export class FilenameParamDto {
|
||||
@IsNotEmpty()
|
||||
@IsString()
|
||||
@ApiProperty({ format: 'string' })
|
||||
@Matches(/^[a-zA-Z0-9_\-.]+$/, {
|
||||
message: 'Filename contains invalid characters',
|
||||
})
|
||||
filename!: string;
|
||||
}
|
||||
|
||||
type PinCodeOptions = { optional?: boolean } & OptionalOptions;
|
||||
export const PinCode = (options?: PinCodeOptions & ApiPropertyOptions) => {
|
||||
const { optional, nullable, emptyToNull, ...apiPropertyOptions } = {
|
||||
|
||||
@@ -12,12 +12,11 @@ async function bootstrap() {
|
||||
|
||||
const app = await NestFactory.create<NestExpressApplication>(MaintenanceModule, { bufferLogs: true });
|
||||
app.get(AppRepository).setCloseFn(() => app.close());
|
||||
|
||||
void configureExpress(app, {
|
||||
permitSwaggerWrite: false,
|
||||
ssr: MaintenanceWorkerService,
|
||||
});
|
||||
|
||||
void app.get(MaintenanceWorkerService).logSecret();
|
||||
}
|
||||
|
||||
bootstrap().catch((error) => {
|
||||
|
||||
37
server/test/fixtures/asset.stub.ts
vendored
37
server/test/fixtures/asset.stub.ts
vendored
@@ -31,18 +31,21 @@ const sidecarFileWithoutExt = factory.assetFile({
|
||||
});
|
||||
|
||||
const editedPreviewFile = factory.assetFile({
|
||||
type: AssetFileType.PreviewEdited,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/uploads/user-id/preview/path_edited.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const editedThumbnailFile = factory.assetFile({
|
||||
type: AssetFileType.ThumbnailEdited,
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/uploads/user-id/thumbnail/path_edited.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const editedFullsizeFile = factory.assetFile({
|
||||
type: AssetFileType.FullSizeEdited,
|
||||
type: AssetFileType.FullSize,
|
||||
path: '/uploads/user-id/fullsize/path_edited.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const files: AssetFile[] = [fullsizeFile, previewFile, thumbnailFile];
|
||||
@@ -86,6 +89,7 @@ export const assetStub = {
|
||||
make: 'FUJIFILM',
|
||||
model: 'X-T50',
|
||||
lensModel: 'XF27mm F2.8 R WR',
|
||||
isEdited: false,
|
||||
...asset,
|
||||
}),
|
||||
noResizePath: Object.freeze({
|
||||
@@ -125,6 +129,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
noWebpPath: Object.freeze({
|
||||
@@ -166,6 +171,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
noThumbhash: Object.freeze({
|
||||
@@ -204,6 +210,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
primaryImage: Object.freeze({
|
||||
@@ -252,6 +259,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
image: Object.freeze({
|
||||
@@ -298,6 +306,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
trashed: Object.freeze({
|
||||
@@ -341,6 +350,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
trashedOffline: Object.freeze({
|
||||
@@ -384,6 +394,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
archived: Object.freeze({
|
||||
id: 'asset-id',
|
||||
@@ -426,6 +437,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
external: Object.freeze({
|
||||
@@ -468,6 +480,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
image1: Object.freeze({
|
||||
@@ -510,6 +523,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
imageFrom2015: Object.freeze({
|
||||
@@ -551,6 +565,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
video: Object.freeze({
|
||||
@@ -594,6 +609,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
livePhotoMotionAsset: Object.freeze({
|
||||
@@ -614,6 +630,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [] as AssetEditActionItem[],
|
||||
isEdited: false,
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; exifInfo: Exif; edits: AssetEditActionItem[] }),
|
||||
|
||||
livePhotoStillAsset: Object.freeze({
|
||||
@@ -635,6 +652,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [] as AssetEditActionItem[],
|
||||
isEdited: false,
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; edits: AssetEditActionItem[] }),
|
||||
|
||||
livePhotoWithOriginalFileName: Object.freeze({
|
||||
@@ -658,6 +676,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [] as AssetEditActionItem[],
|
||||
isEdited: false,
|
||||
} as MapAsset & { faces: AssetFace[]; files: AssetFile[]; edits: AssetEditActionItem[] }),
|
||||
|
||||
withLocation: Object.freeze({
|
||||
@@ -705,6 +724,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
sidecar: Object.freeze({
|
||||
@@ -743,6 +763,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
sidecarWithoutExt: Object.freeze({
|
||||
@@ -778,6 +799,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
hasEncodedVideo: Object.freeze({
|
||||
@@ -820,6 +842,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
hasFileExtension: Object.freeze({
|
||||
@@ -859,6 +882,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
imageDng: Object.freeze({
|
||||
@@ -902,6 +926,7 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
imageHif: Object.freeze({
|
||||
@@ -945,7 +970,9 @@ export const assetStub = {
|
||||
width: null,
|
||||
height: null,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
|
||||
panoramaTif: Object.freeze({
|
||||
id: 'asset-id',
|
||||
status: AssetStatus.Active,
|
||||
@@ -988,6 +1015,7 @@ export const assetStub = {
|
||||
height: null,
|
||||
edits: [],
|
||||
}),
|
||||
|
||||
withCropEdit: Object.freeze({
|
||||
id: 'asset-id',
|
||||
status: AssetStatus.Active,
|
||||
@@ -1043,7 +1071,9 @@ export const assetStub = {
|
||||
},
|
||||
},
|
||||
] as AssetEditActionItem[],
|
||||
isEdited: true,
|
||||
}),
|
||||
|
||||
withoutEdits: Object.freeze({
|
||||
id: 'asset-id',
|
||||
status: AssetStatus.Active,
|
||||
@@ -1089,5 +1119,6 @@ export const assetStub = {
|
||||
width: 2160,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
edits: [],
|
||||
isEdited: false,
|
||||
}),
|
||||
};
|
||||
|
||||
2
server/test/fixtures/shared-link.stub.ts
vendored
2
server/test/fixtures/shared-link.stub.ts
vendored
@@ -147,6 +147,7 @@ export const sharedLinkStub = {
|
||||
visibility: AssetVisibility.Timeline,
|
||||
width: 500,
|
||||
height: 500,
|
||||
tags: [],
|
||||
},
|
||||
sharedLinks: [],
|
||||
faces: [],
|
||||
@@ -159,6 +160,7 @@ export const sharedLinkStub = {
|
||||
visibility: AssetVisibility.Timeline,
|
||||
width: 500,
|
||||
height: 500,
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
albumId: null,
|
||||
|
||||
@@ -19,6 +19,7 @@ import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { ActivityRepository } from 'src/repositories/activity.repository';
|
||||
import { AlbumUserRepository } from 'src/repositories/album-user.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
@@ -384,6 +385,7 @@ const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
|
||||
case AlbumUserRepository:
|
||||
case ActivityRepository:
|
||||
case AssetRepository:
|
||||
case AssetEditRepository:
|
||||
case AssetJobRepository:
|
||||
case MemoryRepository:
|
||||
case NotificationRepository:
|
||||
@@ -535,6 +537,7 @@ const assetInsert = (asset: Partial<Insertable<AssetTable>> = {}) => {
|
||||
fileModifiedAt: now,
|
||||
localDateTime: now,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
isEdited: false,
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { AssetEditAction, MirrorAxis } from 'src/dtos/editing.dto';
|
||||
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
const { ctx } = newMediumService(BaseService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
return { ctx, sut: ctx.get(AssetEditRepository) };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AssetEditRepository.name, () => {
|
||||
describe('replaceAll', () => {
|
||||
it('should set isEdited on insert', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
]);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: true });
|
||||
});
|
||||
|
||||
it('should set isEdited when inserting multiple edits', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: true });
|
||||
});
|
||||
|
||||
it('should keep isEdited when removing some edits', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: true });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
]);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: true });
|
||||
});
|
||||
|
||||
it('should set isEdited to false if all edits are deleted', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
await sut.replaceAll(asset.id, []);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -83,6 +83,7 @@ describe(SyncRequestType.AlbumAssetsV1, () => {
|
||||
libraryId: asset.libraryId,
|
||||
width: asset.width,
|
||||
height: asset.height,
|
||||
isEdited: asset.isEdited,
|
||||
},
|
||||
type: SyncEntityType.AlbumAssetCreateV1,
|
||||
},
|
||||
|
||||
@@ -64,6 +64,7 @@ describe(SyncEntityType.AssetV1, () => {
|
||||
libraryId: asset.libraryId,
|
||||
width: asset.width,
|
||||
height: asset.height,
|
||||
isEdited: asset.isEdited,
|
||||
},
|
||||
type: 'AssetV1',
|
||||
},
|
||||
|
||||
@@ -63,6 +63,7 @@ describe(SyncRequestType.PartnerAssetsV1, () => {
|
||||
type: asset.type,
|
||||
visibility: asset.visibility,
|
||||
duration: asset.duration,
|
||||
isEdited: asset.isEdited,
|
||||
stackId: null,
|
||||
livePhotoVideoId: null,
|
||||
libraryId: asset.libraryId,
|
||||
|
||||
@@ -50,6 +50,9 @@ export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRe
|
||||
createZipStream: vitest.fn(),
|
||||
createPlainReadStream: vitest.fn(),
|
||||
createReadStream: vitest.fn(),
|
||||
createPlainReadStream: vitest.fn(),
|
||||
createGzip: vitest.fn(),
|
||||
createGunzip: vitest.fn(),
|
||||
readFile: vitest.fn(),
|
||||
readTextFile: vitest.fn(),
|
||||
createFile: vitest.fn(),
|
||||
|
||||
@@ -253,6 +253,7 @@ const assetFactory = (asset: Partial<MapAsset> = {}) => ({
|
||||
visibility: AssetVisibility.Timeline,
|
||||
width: null,
|
||||
height: null,
|
||||
isEdited: false,
|
||||
...asset,
|
||||
});
|
||||
|
||||
@@ -334,6 +335,7 @@ const assetSidecarWriteFactory = () => {
|
||||
id: newUuid(),
|
||||
path: '/path/to/original-path.jpg.xmp',
|
||||
type: AssetFileType.Sidecar,
|
||||
isEdited: false,
|
||||
},
|
||||
],
|
||||
exifInfo: {
|
||||
@@ -385,6 +387,7 @@ const assetFileFactory = (file: Partial<AssetFile> = {}): AssetFile => ({
|
||||
id: newUuid(),
|
||||
type: AssetFileType.Preview,
|
||||
path: '/uploads/user-id/thumbs/path.jpg',
|
||||
isEdited: false,
|
||||
...file,
|
||||
});
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import { NextFunction } from 'express';
|
||||
import { Kysely } from 'kysely';
|
||||
import multer from 'multer';
|
||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { Duplex, Readable, Writable } from 'node:stream';
|
||||
import { PNG } from 'pngjs';
|
||||
import postgres from 'postgres';
|
||||
import { UploadFieldName } from 'src/dtos/asset-media.dto';
|
||||
@@ -500,6 +500,74 @@ export const mockSpawn = vitest.fn((exitCode: number, stdout: string, stderr: st
|
||||
} as unknown as ChildProcessWithoutNullStreams;
|
||||
});
|
||||
|
||||
export const mockDuplex = vitest.fn(
|
||||
(command: string, exitCode: number, stdout: string, stderr: string, error?: unknown) => {
|
||||
const duplex = new Duplex({
|
||||
write(_chunk, _encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
|
||||
read() {},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
setImmediate(() => {
|
||||
if (error) {
|
||||
duplex.destroy(error as Error);
|
||||
} else if (exitCode === 0) {
|
||||
/* eslint-disable unicorn/prefer-single-call */
|
||||
duplex.push(stdout);
|
||||
duplex.push(null);
|
||||
/* eslint-enable unicorn/prefer-single-call */
|
||||
} else {
|
||||
duplex.destroy(new Error(`${command} non-zero exit code (${exitCode})\n${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
return duplex;
|
||||
},
|
||||
);
|
||||
|
||||
export const mockFork = vitest.fn((exitCode: number, stdout: string, stderr: string, error?: unknown) => {
|
||||
const stdoutStream = new Readable({
|
||||
read() {
|
||||
this.push(stdout); // write mock data to stdout
|
||||
this.push(null); // end stream
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
stdout: stdoutStream,
|
||||
stderr: new Readable({
|
||||
read() {
|
||||
this.push(stderr); // write mock data to stderr
|
||||
this.push(null); // end stream
|
||||
},
|
||||
}),
|
||||
stdin: new Writable({
|
||||
write(chunk, encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
}),
|
||||
exitCode,
|
||||
on: vitest.fn((event, callback: any) => {
|
||||
if (event === 'close') {
|
||||
stdoutStream.once('end', () => callback(0));
|
||||
}
|
||||
if (event === 'error' && error) {
|
||||
stdoutStream.once('end', () => callback(error));
|
||||
}
|
||||
if (event === 'exit') {
|
||||
stdoutStream.once('end', () => callback(exitCode));
|
||||
}
|
||||
}),
|
||||
kill: vitest.fn(),
|
||||
} as unknown as ChildProcessWithoutNullStreams;
|
||||
});
|
||||
|
||||
export async function* makeStream<T>(items: T[] = []): AsyncIterableIterator<T> {
|
||||
for (const item of items) {
|
||||
await Promise.resolve();
|
||||
|
||||
Reference in New Issue
Block a user