Compare commits

..

9 Commits

Author SHA1 Message Date
Luis Nachtigall
354dd3cc3c feat(mobile): enhance album sorting functionality with order handling (#24816)
* feat: enhance album sorting functionality with effective order handling

* mobile: formatting

* test: align album sorting order in unit tests with defaultSortOrder

* test(mobile): add reverse order validation for album sorting

* chore(PR): remove OppositeSortOrder Extension and move it directly into SortOrder enum

* refactor: return sorted list directly in album sorting function

* refactor: remove sort_order_extensions.dart
2026-02-07 10:11:37 +05:30
shenlong
57483a1e7f fix: user profile refetched each time on opening app dialog (#25992)
fix: user profile on opening app dialog

Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2026-02-07 10:10:17 +05:30
Romo
bcea64875f fix: image and video download complete notification shows "file_name" (#25975)
* fix: image and video download complete notification shows "file_name"

* fix lint

---------

Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2026-02-07 02:56:14 +00:00
Matthew Momjian
84e30abe5d feat(docs): version policy (#25979)
version policy
2026-02-06 20:31:57 -05:00
Jason Rasmussen
e3e243fa2b refactor: tests (#25987) 2026-02-07 00:47:54 +01:00
Jason Rasmussen
b3820c259e refactor: test factories (#25977) 2026-02-06 16:32:50 -05:00
Daniel Dietzler
a356497d96 refactor: album service small tests (#25640) 2026-02-06 08:56:46 -05:00
Michel Heusschen
16fe828913 fix: revert "fix(web): Ensure profile picture is cropped to 1:1 ratio (#25892)" (#25956)
Revert "fix(web): Ensure profile picture is cropped to 1:1 ratio (#25892)"

This reverts commit 3c77c724c5.
2026-02-06 08:21:56 -05:00
Michel Heusschen
211dc3c056 fix: add missing translations for image editor (#25957) 2026-02-06 13:08:24 +01:00
50 changed files with 2630 additions and 2684 deletions

View File

@@ -26,6 +26,16 @@ docker image prune
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
[releases]: https://github.com/immich-app/immich/releases
## Versioning Policy
Immich follows [semantic versioning][semver], which tags releases in the format `<major>.<minor>.<patch>`. We intend for breaking changes to be limited to major version releases.
You can configure your Docker image to point to the current major version by using a metatag, such as `:v2`.
Currently, we have no plans to backport patches to earlier versions. We encourage all users to run the most recent release of Immich.
Switching back to an earlier version, even within the same minor release tag, is not supported.
[semver]: https://semver.org/
## Migrating to VectorChord
:::info

View File

@@ -997,6 +997,11 @@
"editor_close_without_save_prompt": "The changes will not be saved",
"editor_close_without_save_title": "Close editor?",
"editor_confirm_reset_all_changes": "Are you sure you want to reset all changes?",
"editor_discard_edits_confirm": "Discard edits",
"editor_discard_edits_prompt": "You have unsaved edits. Are you sure you want to discard them?",
"editor_discard_edits_title": "Discard edits?",
"editor_edits_applied_error": "Failed to apply edits",
"editor_edits_applied_success": "Edits applied successfully",
"editor_flip_horizontal": "Flip horizontal",
"editor_flip_vertical": "Flip vertical",
"editor_orientation": "Orientation",

View File

@@ -1,4 +1,11 @@
enum SortOrder { asc, desc }
enum SortOrder {
asc,
desc;
SortOrder reverse() {
return this == SortOrder.asc ? SortOrder.desc : SortOrder.asc;
}
}
enum TextSearchType { context, filename, description, ocr }

View File

@@ -1,6 +1,7 @@
import 'dart:async';
import 'package:collection/collection.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/user.model.dart';
@@ -36,6 +37,7 @@ class RemoteAlbumService {
AlbumSortMode sortMode, {
bool isReverse = false,
}) async {
// list of albums sorted ascendingly according to the selected sort mode
final List<RemoteAlbum> sorted = switch (sortMode) {
AlbumSortMode.created => albums.sortedBy((album) => album.createdAt),
AlbumSortMode.title => albums.sortedBy((album) => album.name),
@@ -44,8 +46,9 @@ class RemoteAlbumService {
AlbumSortMode.mostRecent => await _sortByNewestAsset(albums),
AlbumSortMode.mostOldest => await _sortByOldestAsset(albums),
};
final effectiveOrder = isReverse ? sortMode.defaultOrder.reverse() : sortMode.defaultOrder;
return (isReverse ? sorted.reversed : sorted).toList();
return (effectiveOrder == SortOrder.asc ? sorted : sorted.reversed).toList();
}
List<RemoteAlbum> searchAlbums(
@@ -209,6 +212,6 @@ class RemoteAlbumService {
return aDate.compareTo(bDate);
});
return sorted.reversed.toList();
return sorted;
}
}

View File

@@ -5,6 +5,7 @@ import 'package:auto_route/auto_route.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/domain/models/album/album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
@@ -281,6 +282,8 @@ class _SortButtonState extends ConsumerState<_SortButton> {
setState(() {
albumSortOption = sortMode;
isSorting = true;
// reset sort order to default state when switching option
albumSortIsReverse = false;
});
}
@@ -293,6 +296,7 @@ class _SortButtonState extends ConsumerState<_SortButton> {
@override
Widget build(BuildContext context) {
final effectiveOrder = albumSortOption.effectiveOrder(albumSortIsReverse);
return MenuAnchor(
controller: widget.controller,
style: MenuStyle(
@@ -307,7 +311,7 @@ class _SortButtonState extends ConsumerState<_SortButton> {
.map(
(sortMode) => MenuItemButton(
leadingIcon: albumSortOption == sortMode
? albumSortIsReverse
? effectiveOrder == SortOrder.desc
? Icon(
Icons.keyboard_arrow_down,
color: albumSortOption == sortMode
@@ -355,7 +359,7 @@ class _SortButtonState extends ConsumerState<_SortButton> {
children: [
Padding(
padding: const EdgeInsets.only(right: 5),
child: albumSortIsReverse
child: effectiveOrder == SortOrder.desc
? Icon(Icons.keyboard_arrow_down, color: context.colorScheme.onSurface)
: Icon(Icons.keyboard_arrow_up_rounded, color: context.colorScheme.onSurface),
),

View File

@@ -1,4 +1,5 @@
import 'package:collection/collection.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/providers/app_settings.provider.dart';
import 'package:immich_mobile/services/app_settings.service.dart';
import 'package:immich_mobile/entities/album.entity.dart';
@@ -73,18 +74,21 @@ class _AlbumSortHandlers {
// Store index allows us to re-arrange the values without affecting the saved prefs
enum AlbumSortMode {
title(1, "library_page_sort_title", _AlbumSortHandlers.title),
assetCount(4, "library_page_sort_asset_count", _AlbumSortHandlers.assetCount),
lastModified(3, "library_page_sort_last_modified", _AlbumSortHandlers.lastModified),
created(0, "library_page_sort_created", _AlbumSortHandlers.created),
mostRecent(2, "sort_recent", _AlbumSortHandlers.mostRecent),
mostOldest(5, "sort_oldest", _AlbumSortHandlers.mostOldest);
title(1, "library_page_sort_title", _AlbumSortHandlers.title, SortOrder.asc),
assetCount(4, "library_page_sort_asset_count", _AlbumSortHandlers.assetCount, SortOrder.desc),
lastModified(3, "library_page_sort_last_modified", _AlbumSortHandlers.lastModified, SortOrder.desc),
created(0, "library_page_sort_created", _AlbumSortHandlers.created, SortOrder.desc),
mostRecent(2, "sort_recent", _AlbumSortHandlers.mostRecent, SortOrder.desc),
mostOldest(5, "sort_oldest", _AlbumSortHandlers.mostOldest, SortOrder.asc);
final int storeIndex;
final String label;
final AlbumSortFn sortFn;
final SortOrder defaultOrder;
const AlbumSortMode(this.storeIndex, this.label, this.sortFn);
const AlbumSortMode(this.storeIndex, this.label, this.sortFn, this.defaultOrder);
SortOrder effectiveOrder(bool isReverse) => isReverse ? defaultOrder.reverse() : defaultOrder;
}
@riverpod

View File

@@ -27,19 +27,17 @@ import 'package:isar/isar.dart';
import 'package:path_provider/path_provider.dart';
void configureFileDownloaderNotifications() {
final fileName = 'file_name'.t(args: {'file_name': '{filename}'});
FileDownloader().configureNotificationForGroup(
kDownloadGroupImage,
running: TaskNotification('downloading_media'.t(), fileName),
complete: TaskNotification('download_finished'.t(), fileName),
running: TaskNotification('downloading_media'.t(), '${'file_name_text'.t()}: {filename}'),
complete: TaskNotification('download_finished'.t(), '${'file_name_text'.t()}: {filename}'),
progressBar: true,
);
FileDownloader().configureNotificationForGroup(
kDownloadGroupVideo,
running: TaskNotification('downloading_media'.t(), fileName),
complete: TaskNotification('download_finished'.t(), fileName),
running: TaskNotification('downloading_media'.t(), '${'file_name_text'.t()}: {filename}'),
complete: TaskNotification('download_finished'.t(), '${'file_name_text'.t()}: {filename}'),
progressBar: true,
);

View File

@@ -1,5 +1,3 @@
import 'dart:math';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
@@ -20,7 +18,7 @@ class UserCircleAvatar extends ConsumerWidget {
Widget build(BuildContext context, WidgetRef ref) {
final userAvatarColor = user.avatarColor.toColor();
final profileImageUrl =
'${Store.get(StoreKey.serverEndpoint)}/users/${user.id}/profile-image?d=${Random().nextInt(1024)}';
'${Store.get(StoreKey.serverEndpoint)}/users/${user.id}/profile-image?d=${user.profileChangedAt.millisecondsSinceEpoch}';
final textIcon = DefaultTextStyle(
style: TextStyle(

View File

@@ -85,35 +85,47 @@ void main() {
final albums = [albumB, albumA];
final result = await sut.sortAlbums(albums, AlbumSortMode.created);
expect(result, [albumA, albumB]);
expect(result, [albumB, albumA]);
});
test('should sort correctly based on updatedAt', () async {
final albums = [albumB, albumA];
final result = await sut.sortAlbums(albums, AlbumSortMode.lastModified);
expect(result, [albumA, albumB]);
expect(result, [albumB, albumA]);
});
test('should sort correctly based on assetCount', () async {
final albums = [albumB, albumA];
final result = await sut.sortAlbums(albums, AlbumSortMode.assetCount);
expect(result, [albumA, albumB]);
expect(result, [albumB, albumA]);
});
test('should sort correctly based on newestAssetTimestamp', () async {
final albums = [albumB, albumA];
final result = await sut.sortAlbums(albums, AlbumSortMode.mostRecent);
expect(result, [albumA, albumB]);
expect(result, [albumB, albumA]);
});
test('should sort correctly based on oldestAssetTimestamp', () async {
final albums = [albumB, albumA];
final result = await sut.sortAlbums(albums, AlbumSortMode.mostOldest);
expect(result, [albumB, albumA]);
expect(result, [albumA, albumB]);
});
test('should flip order when isReverse is true for all modes', () async {
final albums = [albumB, albumA];
for (final mode in AlbumSortMode.values) {
final normal = await sut.sortAlbums(albums, mode, isReverse: false);
final reversed = await sut.sortAlbums(albums, mode, isReverse: true);
// reversed should be the exact inverse of normal
expect(reversed, normal.reversed.toList(), reason: 'Mode: $mode');
}
});
});
}

View File

@@ -33,7 +33,6 @@ import { WebsocketRepository } from 'src/repositories/websocket.repository';
import { services } from 'src/services';
import { AuthService } from 'src/services/auth.service';
import { CliService } from 'src/services/cli.service';
import { DatabaseBackupService } from 'src/services/database-backup.service';
import { QueueService } from 'src/services/queue.service';
import { getKyselyConfig } from 'src/utils/database';
@@ -115,7 +114,6 @@ export class ApiModule extends BaseModule {}
AppRepository,
MaintenanceHealthRepository,
MaintenanceWebsocketRepository,
DatabaseBackupService,
MaintenanceWorkerService,
...commonMiddleware,
{ provide: APP_GUARD, useClass: MaintenanceAuthGuard },

View File

@@ -1,15 +1,18 @@
import { mapAlbum } from 'src/dtos/album.dto';
import { albumStub } from 'test/fixtures/album.stub';
import { AlbumFactory } from 'test/factories/album.factory';
describe('mapAlbum', () => {
it('should set start and end dates', () => {
const dto = mapAlbum(albumStub.twoAssets, false);
expect(dto.startDate).toEqual(new Date('2020-12-31T23:59:00.000Z'));
expect(dto.endDate).toEqual(new Date('2025-01-01T01:02:03.456Z'));
const startDate = new Date('2023-02-22T05:06:29.716Z');
const endDate = new Date('2025-01-01T01:02:03.456Z');
const album = AlbumFactory.from().asset({ localDateTime: endDate }).asset({ localDateTime: startDate }).build();
const dto = mapAlbum(album, false);
expect(dto.startDate).toEqual(startDate);
expect(dto.endDate).toEqual(endDate);
});
it('should not set start and end dates for empty assets', () => {
const dto = mapAlbum(albumStub.empty, false);
const dto = mapAlbum(AlbumFactory.create(), false);
expect(dto.startDate).toBeUndefined();
expect(dto.endDate).toBeUndefined();
});

View File

@@ -34,14 +34,12 @@ import { FilenameParamDto } from 'src/validation';
import type { DatabaseBackupController as _DatabaseBackupController } from 'src/controllers/database-backup.controller';
import type { ServerController as _ServerController } from 'src/controllers/server.controller';
import { DatabaseBackupDeleteDto, DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
import { DatabaseBackupService } from 'src/services/database-backup.service';
@Controller()
export class MaintenanceWorkerController {
constructor(
private logger: LoggingRepository,
private service: MaintenanceWorkerService,
private databaseBackupService: DatabaseBackupService,
) {}
/**
@@ -63,7 +61,7 @@ export class MaintenanceWorkerController {
@Get('admin/database-backups')
@MaintenanceRoute()
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
return this.databaseBackupService.listBackups();
return this.service.listBackups();
}
/**
@@ -76,7 +74,7 @@ export class MaintenanceWorkerController {
@Res() res: Response,
@Next() next: NextFunction,
) {
await sendFile(res, next, () => this.databaseBackupService.downloadBackup(filename), this.logger);
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
}
/**
@@ -85,7 +83,7 @@ export class MaintenanceWorkerController {
@Delete('admin/database-backups')
@MaintenanceRoute()
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
return this.databaseBackupService.deleteBackup(dto.backups);
return this.service.deleteBackup(dto.backups);
}
/**
@@ -98,7 +96,7 @@ export class MaintenanceWorkerController {
@UploadedFile()
file: Express.Multer.File,
): Promise<void> {
return this.databaseBackupService.uploadBackup(file);
return this.service.uploadBackup(file);
}
@Get('admin/maintenance/status')

View File

@@ -1,18 +1,23 @@
import { UnauthorizedException } from '@nestjs/common';
import { BadRequestException, UnauthorizedException } from '@nestjs/common';
import { SignJWT } from 'jose';
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
import { DateTime } from 'luxon';
import { PassThrough, Readable } from 'node:stream';
import { StorageCore } from 'src/cores/storage.core';
import { MaintenanceAction, StorageFolder, SystemMetadataKey } from 'src/enum';
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
import { DatabaseBackupService } from 'src/services/database-backup.service';
import { automock, AutoMocked, getMocks, ServiceMocks } from 'test/utils';
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
function* mockData() {
yield '';
}
describe(MaintenanceWorkerService.name, () => {
let sut: MaintenanceWorkerService;
let mocks: ServiceMocks;
let maintenanceWebsocketRepositoryMock: AutoMocked<MaintenanceWebsocketRepository>;
let maintenanceHealthRepositoryMock: AutoMocked<MaintenanceHealthRepository>;
let databaseBackupServiceMock: AutoMocked<DatabaseBackupService>;
beforeEach(() => {
mocks = getMocks();
@@ -24,20 +29,6 @@ describe(MaintenanceWorkerService.name, () => {
args: [mocks.logger],
strict: false,
});
databaseBackupServiceMock = automock(DatabaseBackupService, {
args: [
mocks.logger,
mocks.storage,
mocks.config,
mocks.systemMetadata,
mocks.process,
mocks.database,
mocks.cron,
mocks.job,
maintenanceHealthRepositoryMock,
],
strict: false,
});
sut = new MaintenanceWorkerService(
mocks.logger as never,
@@ -49,7 +40,6 @@ describe(MaintenanceWorkerService.name, () => {
mocks.storage as never,
mocks.process,
mocks.database as never,
databaseBackupServiceMock,
);
sut.mock({
@@ -320,6 +310,17 @@ describe(MaintenanceWorkerService.name, () => {
describe('action: restore database', () => {
beforeEach(() => {
mocks.database.tryLock.mockResolvedValueOnce(true);
mocks.storage.readdir.mockResolvedValue([]);
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
mocks.process.fork.mockImplementation(() => mockSpawn(0, 'Immich Server is listening', ''));
mocks.storage.rename.mockResolvedValue();
mocks.storage.unlink.mockResolvedValue();
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
mocks.storage.createGzip.mockReturnValue(new PassThrough());
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
});
it('should update maintenance mode state', async () => {
@@ -340,7 +341,21 @@ describe(MaintenanceWorkerService.name, () => {
});
});
it('should defer to database backup service', async () => {
it('should fail to restore invalid backup', async () => {
await sut.runAction({
action: MaintenanceAction.RestoreDatabase,
restoreBackupFilename: 'filename',
});
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
active: true,
action: MaintenanceAction.RestoreDatabase,
error: 'Error: Invalid backup file format!',
task: 'error',
});
});
it('should successfully run a backup', async () => {
await sut.runAction({
action: MaintenanceAction.RestoreDatabase,
restoreBackupFilename: 'development-filename.sql',
@@ -365,10 +380,13 @@ describe(MaintenanceWorkerService.name, () => {
action: 'end',
},
);
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
});
it('should forward errors from database backup service', async () => {
databaseBackupServiceMock.restoreDatabaseBackup.mockRejectedValue('Sample error');
it('should fail if backup creation fails', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
await sut.runAction({
action: MaintenanceAction.RestoreDatabase,
@@ -378,16 +396,149 @@ describe(MaintenanceWorkerService.name, () => {
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
active: true,
action: MaintenanceAction.RestoreDatabase,
error: 'Sample error',
error: 'Error: pg_dump non-zero exit code (1)\nerror',
task: 'error',
});
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'public', {
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
'MaintenanceStatusV1',
expect.any(String),
expect.objectContaining({
task: 'error',
}),
);
});
it('should fail if restore itself fails', async () => {
mocks.process.spawnDuplexStream
.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''))
.mockReturnValueOnce(mockDuplex('gzip', 0, 'data', ''))
.mockReturnValueOnce(mockDuplex('psql', 1, '', 'error'));
await sut.runAction({
action: MaintenanceAction.RestoreDatabase,
restoreBackupFilename: 'development-filename.sql',
});
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
active: true,
action: MaintenanceAction.RestoreDatabase,
error: 'Something went wrong, see logs!',
error: 'Error: psql non-zero exit code (1)\nerror',
task: 'error',
});
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
'MaintenanceStatusV1',
expect.any(String),
expect.objectContaining({
task: 'error',
}),
);
});
it('should rollback if database migrations fail', async () => {
mocks.database.runMigrations.mockRejectedValue(new Error('Migrations Error'));
await sut.runAction({
action: MaintenanceAction.RestoreDatabase,
restoreBackupFilename: 'development-filename.sql',
});
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
active: true,
action: MaintenanceAction.RestoreDatabase,
error: 'Error: Migrations Error',
task: 'error',
});
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalledTimes(0);
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
});
it('should rollback if API healthcheck fails', async () => {
maintenanceHealthRepositoryMock.checkApiHealth.mockRejectedValue(new Error('Health Error'));
await sut.runAction({
action: MaintenanceAction.RestoreDatabase,
restoreBackupFilename: 'development-filename.sql',
});
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
active: true,
action: MaintenanceAction.RestoreDatabase,
error: 'Error: Health Error',
task: 'error',
});
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
});
});
/**
* Backups
*/
describe('listBackups', () => {
it('should give us all backups', async () => {
mocks.storage.readdir.mockResolvedValue([
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
'immich-db-backup-1753789649000.sql.gz',
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
]);
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
await expect(sut.listBackups()).resolves.toMatchObject({
backups: [
{ filename: 'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
{ filename: 'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
{ filename: 'immich-db-backup-1753789649000.sql.gz', filesize: 1024 },
],
});
});
});
describe('deleteBackup', () => {
it('should reject invalid file names', async () => {
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
new BadRequestException('Invalid backup name!'),
);
});
it('should unlink the target file', async () => {
await sut.deleteBackup(['filename.sql']);
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
);
});
});
describe('uploadBackup', () => {
it('should reject invalid file names', async () => {
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
new BadRequestException('Invalid backup name!'),
);
});
it('should write file', async () => {
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
});
});
describe('downloadBackup', () => {
it('should reject invalid file names', () => {
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
});
it('should get backup path', () => {
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
expect.objectContaining({
path: '/data/backups/hello.sql.gz',
}),
);
});
});
});

View File

@@ -25,11 +25,19 @@ import { StorageRepository } from 'src/repositories/storage.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { type ApiService as _ApiService } from 'src/services/api.service';
import { type BaseService as _BaseService } from 'src/services/base.service';
import { DatabaseBackupService } from 'src/services/database-backup.service';
import { type DatabaseBackupService as _DatabaseBackupService } from 'src/services/database-backup.service';
import { type ServerService as _ServerService } from 'src/services/server.service';
import { type VersionService as _VersionService } from 'src/services/version.service';
import { MaintenanceModeState } from 'src/types';
import { getConfig } from 'src/utils/config';
import {
deleteDatabaseBackup,
downloadDatabaseBackup,
listDatabaseBackups,
restoreDatabaseBackup,
uploadDatabaseBackup,
} from 'src/utils/database-backups';
import { ImmichFileResponse } from 'src/utils/file';
import { createMaintenanceLoginUrl, detectPriorInstall } from 'src/utils/maintenance';
import { getExternalDomain } from 'src/utils/misc';
@@ -54,7 +62,6 @@ export class MaintenanceWorkerService {
private storageRepository: StorageRepository,
private processRepository: ProcessRepository,
private databaseRepository: DatabaseRepository,
private databaseBackupService: DatabaseBackupService,
) {
this.logger.setContext(this.constructor.name);
}
@@ -180,6 +187,35 @@ export class MaintenanceWorkerService {
return '/usr/src/app/upload';
}
/**
* {@link _DatabaseBackupService.listBackups}
*/
async listBackups(): Promise<{ backups: { filename: string; filesize: number }[] }> {
const backups = await listDatabaseBackups(this.backupRepos);
return { backups };
}
/**
* {@link _DatabaseBackupService.deleteBackup}
*/
async deleteBackup(files: string[]): Promise<void> {
return deleteDatabaseBackup(this.backupRepos, files);
}
/**
* {@link _DatabaseBackupService.uploadBackup}
*/
async uploadBackup(file: Express.Multer.File): Promise<void> {
return uploadDatabaseBackup(this.backupRepos, file);
}
/**
* {@link _DatabaseBackupService.downloadBackup}
*/
downloadBackup(fileName: string): ImmichFileResponse {
return downloadDatabaseBackup(fileName);
}
private get secret() {
if (!this.#secret) {
throw new Error('Secret is not initialised yet.');
@@ -328,7 +364,7 @@ export class MaintenanceWorkerService {
progress: 0,
});
await this.databaseBackupService.restoreDatabaseBackup(filename, (task, progress) =>
await restoreDatabaseBackup(this.backupRepos, filename, (task, progress) =>
this.setStatus({
active: true,
action: MaintenanceAction.RestoreDatabase,

View File

@@ -58,7 +58,7 @@ select
from
(
select
*
"shared_link".*
from
"shared_link"
where
@@ -243,7 +243,7 @@ select
from
(
select
*
"shared_link".*
from
"shared_link"
where
@@ -316,7 +316,7 @@ select
from
(
select
*
"shared_link".*
from
"shared_link"
where

View File

@@ -17,8 +17,6 @@ set
where
"userId" = $2
and "albumId" = $3
returning
*
-- AlbumUserRepository.delete
delete from "album_user"

View File

@@ -25,14 +25,13 @@ export class AlbumUserRepository {
}
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }, { role: AlbumUserRole.Viewer }] })
update({ userId, albumId }: AlbumPermissionId, dto: Updateable<AlbumUserTable>) {
return this.db
async update({ userId, albumId }: AlbumPermissionId, dto: Updateable<AlbumUserTable>) {
await this.db
.updateTable('album_user')
.set(dto)
.where('userId', '=', userId)
.where('albumId', '=', albumId)
.returningAll()
.executeTakeFirstOrThrow();
.execute();
}
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })

View File

@@ -44,9 +44,9 @@ const withAlbumUsers = (eb: ExpressionBuilder<DB, 'album'>) => {
};
const withSharedLink = (eb: ExpressionBuilder<DB, 'album'>) => {
return jsonArrayFrom(eb.selectFrom('shared_link').selectAll().whereRef('shared_link.albumId', '=', 'album.id')).as(
'sharedLinks',
);
return jsonArrayFrom(
eb.selectFrom('shared_link').selectAll('shared_link').whereRef('shared_link.albumId', '=', 'album.id'),
).as('sharedLinks');
};
const withAssets = (eb: ExpressionBuilder<DB, 'album'>) => {
@@ -283,7 +283,7 @@ export class AlbumRepository {
return tx
.selectFrom('album')
.selectAll()
.selectAll('album')
.where('id', '=', newAlbum.id)
.select(withOwner)
.select(withAssets)

View File

@@ -260,7 +260,7 @@ export class SharedLinkRepository {
.selectAll('asset')
.innerJoinLateral(
(eb) =>
eb.selectFrom('asset_exif').whereRef('asset_exif.assetId', '=', 'asset.id').selectAll().as('exif'),
eb.selectFrom('asset_exif').whereRef('asset_exif.assetId', '=', 'asset.id').selectAll().as('exifInfo'),
(join) => join.onTrue(),
)
.as('assets'),

File diff suppressed because it is too large Load Diff

View File

@@ -6,9 +6,10 @@ import { AssetEditAction } from 'src/dtos/editing.dto';
import { AssetMetadataKey, AssetStatus, AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum';
import { AssetStats } from 'src/repositories/asset.repository';
import { AssetService } from 'src/services/asset.service';
import { AssetFactory } from 'test/factories/asset.factory';
import { AuthFactory } from 'test/factories/auth.factory';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { userStub } from 'test/fixtures/user.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
@@ -45,35 +46,33 @@ describe(AssetService.name, () => {
describe('getStatistics', () => {
it('should get the statistics for a user, excluding archived assets', async () => {
const auth = AuthFactory.create();
mocks.asset.getStatistics.mockResolvedValue(stats);
await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.Timeline })).resolves.toEqual(
statResponse,
);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, {
visibility: AssetVisibility.Timeline,
});
await expect(sut.getStatistics(auth, { visibility: AssetVisibility.Timeline })).resolves.toEqual(statResponse);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(auth.user.id, { visibility: AssetVisibility.Timeline });
});
it('should get the statistics for a user for archived assets', async () => {
const auth = AuthFactory.create();
mocks.asset.getStatistics.mockResolvedValue(stats);
await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.Archive })).resolves.toEqual(
statResponse,
);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, {
await expect(sut.getStatistics(auth, { visibility: AssetVisibility.Archive })).resolves.toEqual(statResponse);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(auth.user.id, {
visibility: AssetVisibility.Archive,
});
});
it('should get the statistics for a user for favorite assets', async () => {
const auth = AuthFactory.create();
mocks.asset.getStatistics.mockResolvedValue(stats);
await expect(sut.getStatistics(authStub.admin, { isFavorite: true })).resolves.toEqual(statResponse);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, { isFavorite: true });
await expect(sut.getStatistics(auth, { isFavorite: true })).resolves.toEqual(statResponse);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(auth.user.id, { isFavorite: true });
});
it('should get the statistics for a user for all assets', async () => {
const auth = AuthFactory.create();
mocks.asset.getStatistics.mockResolvedValue(stats);
await expect(sut.getStatistics(authStub.admin, {})).resolves.toEqual(statResponse);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, {});
await expect(sut.getStatistics(auth, {})).resolves.toEqual(statResponse);
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(auth.user.id, {});
});
});
@@ -249,10 +248,11 @@ describe(AssetService.name, () => {
});
it('should fail linking a live video if the motion part could not be found', async () => {
const auth = AuthFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
await expect(
sut.update(authStub.admin, assetStub.livePhotoStillAsset.id, {
sut.update(auth, assetStub.livePhotoStillAsset.id, {
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
}),
).rejects.toBeInstanceOf(BadRequestException);
@@ -267,11 +267,12 @@ describe(AssetService.name, () => {
});
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
assetId: assetStub.livePhotoMotionAsset.id,
userId: userStub.admin.id,
userId: auth.user.id,
});
});
it('should fail linking a live video if the motion part is not a video', async () => {
const auth = AuthFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoStillAsset);
@@ -291,16 +292,17 @@ describe(AssetService.name, () => {
});
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
assetId: assetStub.livePhotoMotionAsset.id,
userId: userStub.admin.id,
userId: auth.user.id,
});
});
it('should fail linking a live video if the motion part has a different owner', async () => {
const auth = AuthFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoMotionAsset);
await expect(
sut.update(authStub.admin, assetStub.livePhotoStillAsset.id, {
sut.update(auth, assetStub.livePhotoStillAsset.id, {
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
}),
).rejects.toBeInstanceOf(BadRequestException);
@@ -315,52 +317,41 @@ describe(AssetService.name, () => {
});
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
assetId: assetStub.livePhotoMotionAsset.id,
userId: userStub.admin.id,
userId: auth.user.id,
});
});
it('should link a live video', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
mocks.asset.getById.mockResolvedValueOnce({
...assetStub.livePhotoMotionAsset,
ownerId: authStub.admin.user.id,
visibility: AssetVisibility.Timeline,
});
mocks.asset.getById.mockResolvedValueOnce(assetStub.image);
mocks.asset.update.mockResolvedValue(assetStub.image);
const motionAsset = AssetFactory.create({ type: AssetType.Video, visibility: AssetVisibility.Timeline });
const stillAsset = AssetFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([stillAsset.id]));
mocks.asset.getById.mockResolvedValueOnce(motionAsset);
mocks.asset.getById.mockResolvedValueOnce(stillAsset);
mocks.asset.update.mockResolvedValue(stillAsset);
const auth = AuthFactory.from(motionAsset.owner).build();
await sut.update(authStub.admin, assetStub.livePhotoStillAsset.id, {
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
});
await sut.update(auth, stillAsset.id, { livePhotoVideoId: motionAsset.id });
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.livePhotoMotionAsset.id,
visibility: AssetVisibility.Hidden,
});
expect(mocks.event.emit).toHaveBeenCalledWith('AssetHide', {
assetId: assetStub.livePhotoMotionAsset.id,
userId: userStub.admin.id,
});
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.livePhotoStillAsset.id,
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
});
expect(mocks.asset.update).toHaveBeenCalledWith({ id: motionAsset.id, visibility: AssetVisibility.Hidden });
expect(mocks.event.emit).toHaveBeenCalledWith('AssetHide', { assetId: motionAsset.id, userId: auth.user.id });
expect(mocks.asset.update).toHaveBeenCalledWith({ id: stillAsset.id, livePhotoVideoId: motionAsset.id });
});
it('should throw an error if asset could not be found after update', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
await expect(sut.update(authStub.admin, 'asset-1', { isFavorite: true })).rejects.toBeInstanceOf(
await expect(sut.update(AuthFactory.create(), 'asset-1', { isFavorite: true })).rejects.toBeInstanceOf(
BadRequestException,
);
});
it('should unlink a live video', async () => {
const auth = AuthFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
mocks.asset.getById.mockResolvedValueOnce(assetStub.livePhotoStillAsset);
mocks.asset.getById.mockResolvedValueOnce(assetStub.livePhotoMotionAsset);
mocks.asset.update.mockResolvedValueOnce(assetStub.image);
await sut.update(authStub.admin, assetStub.livePhotoStillAsset.id, { livePhotoVideoId: null });
await sut.update(auth, assetStub.livePhotoStillAsset.id, { livePhotoVideoId: null });
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.livePhotoStillAsset.id,
@@ -372,7 +363,7 @@ describe(AssetService.name, () => {
});
expect(mocks.event.emit).toHaveBeenCalledWith('AssetShow', {
assetId: assetStub.livePhotoMotionAsset.id,
userId: userStub.admin.id,
userId: auth.user.id,
});
});
@@ -392,17 +383,15 @@ describe(AssetService.name, () => {
describe('updateAll', () => {
it('should require asset write access for all ids', async () => {
await expect(
sut.updateAll(authStub.admin, {
ids: ['asset-1'],
}),
).rejects.toBeInstanceOf(BadRequestException);
const auth = AuthFactory.create();
await expect(sut.updateAll(auth, { ids: ['asset-1'] })).rejects.toBeInstanceOf(BadRequestException);
});
it('should update all assets', async () => {
const auth = AuthFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
await sut.updateAll(authStub.admin, { ids: ['asset-1', 'asset-2'], visibility: AssetVisibility.Archive });
await sut.updateAll(auth, { ids: ['asset-1', 'asset-2'], visibility: AssetVisibility.Archive });
expect(mocks.asset.updateAll).toHaveBeenCalledWith(['asset-1', 'asset-2'], {
visibility: AssetVisibility.Archive,
@@ -410,9 +399,10 @@ describe(AssetService.name, () => {
});
it('should not update Assets table if no relevant fields are provided', async () => {
const auth = AuthFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
await sut.updateAll(authStub.admin, {
await sut.updateAll(auth, {
ids: ['asset-1'],
latitude: 0,
longitude: 0,

View File

@@ -0,0 +1,270 @@
import { DateTime } from 'luxon';
import { PassThrough } from 'node:stream';
import { defaults, SystemConfig } from 'src/config';
import { StorageCore } from 'src/cores/storage.core';
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
import { BackupService } from 'src/services/backup.service';
import { systemConfigStub } from 'test/fixtures/system-config.stub';
import { mockDuplex, mockSpawn, newTestService, ServiceMocks } from 'test/utils';
import { describe } from 'vitest';
describe(BackupService.name, () => {
let sut: BackupService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(BackupService));
});
it('should work', () => {
expect(sut).toBeDefined();
});
describe('onBootstrapEvent', () => {
it('should init cron job and handle config changes', async () => {
mocks.database.tryLock.mockResolvedValue(true);
mocks.cron.create.mockResolvedValue();
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(mocks.cron.create).toHaveBeenCalled();
});
it('should not initialize backup database cron job when lock is taken', async () => {
mocks.database.tryLock.mockResolvedValue(false);
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(mocks.cron.create).not.toHaveBeenCalled();
});
it('should not initialise backup database job when running on microservices', async () => {
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(mocks.cron.create).not.toHaveBeenCalled();
});
});
describe('onConfigUpdateEvent', () => {
beforeEach(async () => {
mocks.database.tryLock.mockResolvedValue(true);
mocks.cron.create.mockResolvedValue();
await sut.onConfigInit({ newConfig: defaults });
});
it('should update cron job if backup is enabled', () => {
mocks.cron.update.mockResolvedValue();
sut.onConfigUpdate({
oldConfig: defaults,
newConfig: {
backup: {
database: {
enabled: true,
cronExpression: '0 1 * * *',
},
},
} as SystemConfig,
});
expect(mocks.cron.update).toHaveBeenCalledWith({ name: 'backupDatabase', expression: '0 1 * * *', start: true });
expect(mocks.cron.update).toHaveBeenCalled();
});
it('should do nothing if instance does not have the backup database lock', async () => {
mocks.database.tryLock.mockResolvedValue(false);
await sut.onConfigInit({ newConfig: defaults });
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
expect(mocks.cron.update).not.toHaveBeenCalled();
});
});
describe('cleanupDatabaseBackups', () => {
it('should do nothing if not reached keepLastAmount', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz']);
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).not.toHaveBeenCalled();
});
it('should remove failed backup files', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
//`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
mocks.storage.readdir.mockResolvedValue([
'immich-db-backup-123.sql.gz.tmp',
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
]);
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(3);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-123.sql.gz.tmp`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250725T110216-v1.234.5-pg14.5.sql.gz.tmp`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz.tmp`,
);
});
it('should remove old backup files over keepLastAmount', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz', 'immich-db-backup-2.sql.gz']);
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1.sql.gz`,
);
});
it('should remove old backup files over keepLastAmount and failed backups', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.readdir.mockResolvedValue([
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
'immich-db-backup-1753789649000.sql.gz',
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
]);
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(3);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1753789649000.sql.gz`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250725T110216-v1.234.5-pg14.5.sql.gz.tmp`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz`,
);
});
});
describe('handleBackupDatabase', () => {
beforeEach(() => {
mocks.storage.readdir.mockResolvedValue([]);
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
mocks.storage.rename.mockResolvedValue();
mocks.storage.unlink.mockResolvedValue();
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
});
it('should sanitize DB_URL (remove uselibpqcompat) before calling pg_dumpall', async () => {
// create a service instance with a URL connection that includes libpqcompat
const dbUrl = 'postgresql://postgres:pwd@host:5432/immich?sslmode=require&uselibpqcompat=true';
const configMock = {
getEnv: () => ({ database: { config: { connectionType: 'url', url: dbUrl }, skipMigrations: false } }),
getWorker: () => ImmichWorker.Api,
isDev: () => false,
} as unknown as any;
({ sut, mocks } = newTestService(BackupService, { config: configMock }));
mocks.storage.readdir.mockResolvedValue([]);
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
mocks.storage.rename.mockResolvedValue();
mocks.storage.unlink.mockResolvedValue();
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
mocks.database.getPostgresVersion.mockResolvedValue('14.10');
await sut.handleBackupDatabase();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalled();
const call = mocks.process.spawnDuplexStream.mock.calls[0];
const args = call[1] as string[];
expect(args).toMatchInlineSnapshot(`
[
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
"--clean",
"--if-exists",
]
`);
});
it('should run a database backup successfully', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.createWriteStream).toHaveBeenCalled();
});
it('should rename file on success', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.rename).toHaveBeenCalled();
});
it('should fail if pg_dump fails', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
});
it('should not rename file if pgdump fails and gzip succeeds', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
expect(mocks.storage.rename).not.toHaveBeenCalled();
});
it('should fail if gzip fails', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
});
it('should fail if write stream fails', async () => {
mocks.storage.createWriteStream.mockImplementation(() => {
throw new Error('error');
});
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
});
it('should fail if rename fails', async () => {
mocks.storage.rename.mockRejectedValue(new Error('error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
});
it('should ignore unlink failing and still return failed job status', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
mocks.storage.unlink.mockRejectedValue(new Error('error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
expect(mocks.storage.unlink).toHaveBeenCalled();
});
it.each`
postgresVersion | expectedVersion
${'14.10'} | ${14}
${'14.10.3'} | ${14}
${'14.10 (Debian 14.10-1.pgdg120+1)'} | ${14}
${'15.3.3'} | ${15}
${'16.4.2'} | ${16}
${'17.15.1'} | ${17}
${'18.0.0'} | ${18}
`(
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
async ({ postgresVersion, expectedVersion }) => {
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
await sut.handleBackupDatabase();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledWith(
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
expect.any(Array),
expect.any(Object),
);
},
);
it.each`
postgresVersion
${'13.99.99'}
${'19.0.0'}
`(`should fail if postgres version $postgresVersion is not supported`, async ({ postgresVersion }) => {
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
const result = await sut.handleBackupDatabase();
expect(mocks.process.spawn).not.toHaveBeenCalled();
expect(result).toBe(JobStatus.Failed);
});
});
});

View File

@@ -0,0 +1,99 @@
import { Injectable } from '@nestjs/common';
import path from 'node:path';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import {
createDatabaseBackup,
isFailedDatabaseBackupName,
isValidDatabaseRoutineBackupName,
UnsupportedPostgresError,
} from 'src/utils/database-backups';
import { handlePromiseError } from 'src/utils/misc';
@Injectable()
export class BackupService extends BaseService {
private backupLock = false;
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
async onConfigInit({
newConfig: {
backup: { database },
},
}: ArgOf<'ConfigInit'>) {
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
if (this.backupLock) {
this.cronRepository.create({
name: 'backupDatabase',
expression: database.cronExpression,
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.DatabaseBackup }), this.logger),
start: database.enabled,
});
}
}
@OnEvent({ name: 'ConfigUpdate', server: true })
onConfigUpdate({ newConfig: { backup } }: ArgOf<'ConfigUpdate'>) {
if (!this.backupLock) {
return;
}
this.cronRepository.update({
name: 'backupDatabase',
expression: backup.database.cronExpression,
start: backup.database.enabled,
});
}
async cleanupDatabaseBackups() {
this.logger.debug(`Database Backup Cleanup Started`);
const {
backup: { database: config },
} = await this.getConfig({ withCache: false });
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
const files = await this.storageRepository.readdir(backupsFolder);
const backups = files
.filter((filename) => isValidDatabaseRoutineBackupName(filename))
.toSorted()
.toReversed();
const failedBackups = files.filter((filename) => isFailedDatabaseBackupName(filename));
const toDelete = backups.slice(config.keepLastAmount);
toDelete.push(...failedBackups);
for (const file of toDelete) {
await this.storageRepository.unlink(path.join(backupsFolder, file));
}
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
}
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
async handleBackupDatabase(): Promise<JobStatus> {
try {
await createDatabaseBackup(this.backupRepos);
} catch (error) {
if (error instanceof UnsupportedPostgresError) {
return JobStatus.Failed;
}
throw error;
}
await this.cleanupDatabaseBackups();
return JobStatus.Success;
}
private get backupRepos() {
return {
logger: this.logger,
storage: this.storageRepository,
config: this.configRepository,
process: this.processRepository,
database: this.databaseRepository,
};
}
}

View File

@@ -1,594 +1,23 @@
import { BadRequestException } from '@nestjs/common';
import { DateTime } from 'luxon';
import { PassThrough, Readable } from 'node:stream';
import { defaults, SystemConfig } from 'src/config';
import { StorageCore } from 'src/cores/storage.core';
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
import { StorageFolder } from 'src/enum';
import { DatabaseBackupService } from 'src/services/database-backup.service';
import { systemConfigStub } from 'test/fixtures/system-config.stub';
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
import { MaintenanceService } from 'src/services/maintenance.service';
import { newTestService, ServiceMocks } from 'test/utils';
describe(DatabaseBackupService.name, () => {
describe(MaintenanceService.name, () => {
let sut: DatabaseBackupService;
let mocks: ServiceMocks;
let maintenanceHealthRepositoryMock: AutoMocked<MaintenanceHealthRepository>;
beforeEach(() => {
mocks = getMocks();
maintenanceHealthRepositoryMock = automock(MaintenanceHealthRepository, {
args: [mocks.logger],
strict: false,
});
sut = new DatabaseBackupService(
mocks.logger as never,
mocks.storage as never,
mocks.config,
mocks.systemMetadata as never,
mocks.process,
mocks.database as never,
mocks.cron as never,
mocks.job as never,
maintenanceHealthRepositoryMock as never,
);
({ sut, mocks } = newTestService(DatabaseBackupService));
});
it('should work', () => {
expect(sut).toBeDefined();
});
describe('onBootstrapEvent', () => {
it('should init cron job and handle config changes', async () => {
mocks.database.tryLock.mockResolvedValue(true);
mocks.cron.create.mockResolvedValue();
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(mocks.cron.create).toHaveBeenCalled();
});
it('should not initialize backup database cron job when lock is taken', async () => {
mocks.database.tryLock.mockResolvedValue(false);
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(mocks.cron.create).not.toHaveBeenCalled();
});
it('should not initialise backup database job when running on microservices', async () => {
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
expect(mocks.cron.create).not.toHaveBeenCalled();
});
});
describe('onConfigUpdateEvent', () => {
beforeEach(async () => {
mocks.database.tryLock.mockResolvedValue(true);
mocks.cron.create.mockResolvedValue();
await sut.onConfigInit({ newConfig: defaults });
});
it('should update cron job if backup is enabled', () => {
mocks.cron.update.mockResolvedValue();
sut.onConfigUpdate({
oldConfig: defaults,
newConfig: {
backup: {
database: {
enabled: true,
cronExpression: '0 1 * * *',
},
},
} as SystemConfig,
});
expect(mocks.cron.update).toHaveBeenCalledWith({ name: 'backupDatabase', expression: '0 1 * * *', start: true });
expect(mocks.cron.update).toHaveBeenCalled();
});
it('should do nothing if instance does not have the backup database lock', async () => {
mocks.database.tryLock.mockResolvedValue(false);
await sut.onConfigInit({ newConfig: defaults });
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
expect(mocks.cron.update).not.toHaveBeenCalled();
});
});
describe('cleanupDatabaseBackups', () => {
it('should do nothing if not reached keepLastAmount', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz']);
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).not.toHaveBeenCalled();
});
it('should remove failed backup files', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
//`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
mocks.storage.readdir.mockResolvedValue([
'immich-db-backup-123.sql.gz.tmp',
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
]);
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(3);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-123.sql.gz.tmp`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250725T110216-v1.234.5-pg14.5.sql.gz.tmp`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz.tmp`,
);
});
it('should remove old backup files over keepLastAmount', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz', 'immich-db-backup-2.sql.gz']);
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1.sql.gz`,
);
});
it('should remove old backup files over keepLastAmount and failed backups', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.readdir.mockResolvedValue([
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
'immich-db-backup-1753789649000.sql.gz',
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
]);
await sut.cleanupDatabaseBackups();
expect(mocks.storage.unlink).toHaveBeenCalledTimes(3);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1753789649000.sql.gz`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250725T110216-v1.234.5-pg14.5.sql.gz.tmp`,
);
expect(mocks.storage.unlink).toHaveBeenCalledWith(
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz`,
);
});
});
describe('handleBackupDatabase / createDatabaseBackup', () => {
beforeEach(() => {
mocks.storage.readdir.mockResolvedValue([]);
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex()('command', 0, 'data', ''));
mocks.storage.rename.mockResolvedValue();
mocks.storage.unlink.mockResolvedValue();
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
});
it('should sanitize DB_URL (remove uselibpqcompat) before calling pg_dumpall', async () => {
// create a service instance with a URL connection that includes libpqcompat
const dbUrl = 'postgresql://postgres:pwd@host:5432/immich?sslmode=require&uselibpqcompat=true';
const configMock = {
getEnv: () => ({ database: { config: { connectionType: 'url', url: dbUrl }, skipMigrations: false } }),
getWorker: () => ImmichWorker.Api,
isDev: () => false,
} as unknown as any;
sut = new DatabaseBackupService(
mocks.logger as never,
mocks.storage as never,
configMock as never,
mocks.systemMetadata as never,
mocks.process,
mocks.database as never,
mocks.cron as never,
mocks.job as never,
void 0 as never,
);
mocks.storage.readdir.mockResolvedValue([]);
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex()('command', 0, 'data', ''));
mocks.storage.rename.mockResolvedValue();
mocks.storage.unlink.mockResolvedValue();
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
mocks.database.getPostgresVersion.mockResolvedValue('14.10');
await sut.handleBackupDatabase();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalled();
const call = mocks.process.spawnDuplexStream.mock.calls[0];
const args = call[1] as string[];
expect(args).toMatchInlineSnapshot(`
[
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
"--clean",
"--if-exists",
]
`);
});
it('should run a database backup successfully', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.createWriteStream).toHaveBeenCalled();
});
it('should rename file on success', async () => {
const result = await sut.handleBackupDatabase();
expect(result).toBe(JobStatus.Success);
expect(mocks.storage.rename).toHaveBeenCalled();
});
it('should fail if pg_dump fails', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
});
it('should not rename file if pgdump fails and gzip succeeds', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
expect(mocks.storage.rename).not.toHaveBeenCalled();
});
it('should fail if gzip fails', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 0, 'data', ''));
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('gzip', 1, '', 'error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
});
it('should fail if write stream fails', async () => {
mocks.storage.createWriteStream.mockImplementation(() => {
throw new Error('error');
});
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
});
it('should fail if rename fails', async () => {
mocks.storage.rename.mockRejectedValue(new Error('error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
});
it('should ignore unlink failing and still return failed job status', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 1, '', 'error'));
mocks.storage.unlink.mockRejectedValue(new Error('error'));
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
expect(mocks.storage.unlink).toHaveBeenCalled();
});
it.each`
postgresVersion | expectedVersion
${'14.10'} | ${14}
${'14.10.3'} | ${14}
${'14.10 (Debian 14.10-1.pgdg120+1)'} | ${14}
${'15.3.3'} | ${15}
${'16.4.2'} | ${16}
${'17.15.1'} | ${17}
${'18.0.0'} | ${18}
`(
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
async ({ postgresVersion, expectedVersion }) => {
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
await sut.handleBackupDatabase();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledWith(
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
expect.any(Array),
expect.any(Object),
);
},
);
it.each`
postgresVersion
${'13.99.99'}
${'19.0.0'}
`(`should fail if postgres version $postgresVersion is not supported`, async ({ postgresVersion }) => {
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
const result = await sut.handleBackupDatabase();
expect(mocks.process.spawn).not.toHaveBeenCalled();
expect(result).toBe(JobStatus.Failed);
});
});
describe('buildPostgresLaunchArguments', () => {
describe('default config', () => {
it('should generate pg_dump arguments', async () => {
await expect(sut.buildPostgresLaunchArguments('pg_dump')).resolves.toMatchInlineSnapshot(`
{
"args": [
"--username",
"postgres",
"--host",
"database",
"--port",
"5432",
"immich",
"--clean",
"--if-exists",
],
"bin": "/usr/lib/postgresql/14/bin/pg_dump",
"databaseMajorVersion": 14,
"databasePassword": "postgres",
"databaseUsername": "postgres",
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
}
`);
});
it('should generate psql arguments', async () => {
await expect(sut.buildPostgresLaunchArguments('psql')).resolves.toMatchInlineSnapshot(`
{
"args": [
"--username",
"postgres",
"--host",
"database",
"--port",
"5432",
"--dbname",
"immich",
"--echo-all",
"--output=/dev/null",
],
"bin": "/usr/lib/postgresql/14/bin/psql",
"databaseMajorVersion": 14,
"databasePassword": "postgres",
"databaseUsername": "postgres",
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
}
`);
});
it('should generate psql (single transaction) arguments', async () => {
await expect(sut.buildPostgresLaunchArguments('psql', { singleTransaction: true })).resolves
.toMatchInlineSnapshot(`
{
"args": [
"--username",
"postgres",
"--host",
"database",
"--port",
"5432",
"--dbname",
"immich",
"--single-transaction",
"--set",
"ON_ERROR_STOP=on",
"--echo-all",
"--output=/dev/null",
],
"bin": "/usr/lib/postgresql/14/bin/psql",
"databaseMajorVersion": 14,
"databasePassword": "postgres",
"databaseUsername": "postgres",
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
}
`);
});
});
describe('using custom parts', () => {
beforeEach(() => {
const configMock = {
getEnv: () => ({
database: {
config: {
connectionType: 'parts',
host: 'myhost',
port: 1234,
username: 'mypg',
password: 'mypwd',
database: 'myimmich',
},
skipMigrations: false,
},
}),
getWorker: () => ImmichWorker.Api,
isDev: () => false,
} as unknown as any;
sut = new DatabaseBackupService(
mocks.logger as never,
mocks.storage as never,
configMock as never,
mocks.systemMetadata as never,
mocks.process,
mocks.database as never,
mocks.cron as never,
mocks.job as never,
void 0 as never,
);
});
it('should generate pg_dump arguments', async () => {
await expect(sut.buildPostgresLaunchArguments('pg_dump')).resolves.toMatchInlineSnapshot(`
{
"args": [
"--username",
"mypg",
"--host",
"myhost",
"--port",
"1234",
"myimmich",
"--clean",
"--if-exists",
],
"bin": "/usr/lib/postgresql/14/bin/pg_dump",
"databaseMajorVersion": 14,
"databasePassword": "mypwd",
"databaseUsername": "mypg",
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
}
`);
});
it('should generate psql (single transaction) arguments', async () => {
await expect(sut.buildPostgresLaunchArguments('psql', { singleTransaction: true })).resolves
.toMatchInlineSnapshot(`
{
"args": [
"--username",
"mypg",
"--host",
"myhost",
"--port",
"1234",
"--dbname",
"myimmich",
"--single-transaction",
"--set",
"ON_ERROR_STOP=on",
"--echo-all",
"--output=/dev/null",
],
"bin": "/usr/lib/postgresql/14/bin/psql",
"databaseMajorVersion": 14,
"databasePassword": "mypwd",
"databaseUsername": "mypg",
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
}
`);
});
});
describe('using URL', () => {
beforeEach(() => {
const dbUrl = 'postgresql://mypg:mypwd@myhost:1234/myimmich?sslmode=require&uselibpqcompat=true';
const configMock = {
getEnv: () => ({ database: { config: { connectionType: 'url', url: dbUrl }, skipMigrations: false } }),
getWorker: () => ImmichWorker.Api,
isDev: () => false,
} as unknown as any;
sut = new DatabaseBackupService(
mocks.logger as never,
mocks.storage as never,
configMock as never,
mocks.systemMetadata as never,
mocks.process,
mocks.database as never,
mocks.cron as never,
mocks.job as never,
void 0 as never,
);
});
it('should generate pg_dump arguments', async () => {
await expect(sut.buildPostgresLaunchArguments('pg_dump')).resolves.toMatchInlineSnapshot(`
{
"args": [
"postgresql://mypg:mypwd@myhost:1234/myimmich?sslmode=require",
"--clean",
"--if-exists",
],
"bin": "/usr/lib/postgresql/14/bin/pg_dump",
"databaseMajorVersion": 14,
"databasePassword": "mypwd",
"databaseUsername": "mypg",
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
}
`);
});
it('should generate psql (single transaction) arguments', async () => {
await expect(sut.buildPostgresLaunchArguments('psql', { singleTransaction: true })).resolves
.toMatchInlineSnapshot(`
{
"args": [
"--dbname",
"postgresql://mypg:mypwd@myhost:1234/myimmich?sslmode=require",
"--single-transaction",
"--set",
"ON_ERROR_STOP=on",
"--echo-all",
"--output=/dev/null",
],
"bin": "/usr/lib/postgresql/14/bin/psql",
"databaseMajorVersion": 14,
"databasePassword": "mypwd",
"databaseUsername": "mypg",
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
}
`);
});
});
describe('using bad URL', () => {
beforeEach(() => {
const dbUrl = 'post://gresql://mypg:myp@wd@myhos:t:1234/myimmich?sslmode=require&uselibpqcompat=true';
const configMock = {
getEnv: () => ({ database: { config: { connectionType: 'url', url: dbUrl }, skipMigrations: false } }),
getWorker: () => ImmichWorker.Api,
isDev: () => false,
} as unknown as any;
sut = new DatabaseBackupService(
mocks.logger as never,
mocks.storage as never,
configMock as never,
mocks.systemMetadata as never,
mocks.process,
mocks.database as never,
mocks.cron as never,
mocks.job as never,
void 0 as never,
);
});
it('should fallback to reasonable defaults', async () => {
await expect(sut.buildPostgresLaunchArguments('psql')).resolves.toMatchInlineSnapshot(`
{
"args": [
"--dbname",
"post://gresql//mypg:myp@wd@myhos:t:1234/myimmich?sslmode=require",
"--echo-all",
"--output=/dev/null",
],
"bin": "/usr/lib/postgresql/14/bin/psql",
"databaseMajorVersion": 14,
"databasePassword": "",
"databaseUsername": "",
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
}
`);
});
});
});
describe('uploadBackup', () => {
it('should reject invalid file names', async () => {
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
new BadRequestException('Invalid backup name!'),
);
});
it('should write file', async () => {
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
});
});
describe('downloadBackup', () => {
it('should reject invalid file names', () => {
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
});
it('should get backup path', () => {
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
expect.objectContaining({
path: '/data/backups/hello.sql.gz',
}),
);
});
});
describe('listBackups', () => {
it('should give us all backups', async () => {
mocks.storage.readdir.mockResolvedValue([
@@ -625,233 +54,30 @@ describe(DatabaseBackupService.name, () => {
});
});
describe('restoreDatabaseBackup', () => {
beforeEach(() => {
mocks.storage.readdir.mockResolvedValue([]);
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex()('command', 0, 'data', ''));
mocks.process.fork.mockImplementation(() => mockSpawn(0, 'Immich Server is listening', ''));
mocks.storage.rename.mockResolvedValue();
mocks.storage.unlink.mockResolvedValue();
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
mocks.storage.createGzip.mockReturnValue(new PassThrough());
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
const configMock = {
getEnv: () => ({
database: {
config: {
connectionType: 'parts',
host: 'myhost',
port: 1234,
username: 'mypg',
password: 'mypwd',
database: 'myimmich',
},
skipMigrations: false,
},
}),
getWorker: () => ImmichWorker.Api,
isDev: () => false,
} as unknown as any;
sut = new DatabaseBackupService(
mocks.logger as never,
mocks.storage as never,
configMock as never,
mocks.systemMetadata as never,
mocks.process,
mocks.database as never,
mocks.cron as never,
mocks.job as never,
maintenanceHealthRepositoryMock,
describe('uploadBackup', () => {
it('should reject invalid file names', async () => {
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
new BadRequestException('Invalid backup name!'),
);
});
it('should fail to restore invalid backup', async () => {
await expect(sut.restoreDatabaseBackup('filename')).rejects.toThrowErrorMatchingInlineSnapshot(
`[Error: Invalid backup file format!]`,
);
it('should write file', async () => {
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
});
});
describe('downloadBackup', () => {
it('should reject invalid file names', () => {
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
});
it('should successfully restore a backup', async () => {
let writtenToPsql = '';
mocks.process.spawnDuplexStream.mockImplementationOnce(() => mockDuplex()('command', 0, 'data', ''));
mocks.process.spawnDuplexStream.mockImplementationOnce(() => mockDuplex()('command', 0, 'data', ''));
mocks.process.spawnDuplexStream.mockImplementationOnce(() => {
return mockDuplex((chunk) => (writtenToPsql += chunk))('command', 0, 'data', '');
});
const progress = vitest.fn();
await sut.restoreDatabaseBackup('development-filename.sql', progress);
expect(progress).toHaveBeenCalledWith('backup', 0.05);
expect(progress).toHaveBeenCalledWith('migrations', 0.9);
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
expect(mocks.process.spawnDuplexStream).toHaveBeenLastCalledWith(
expect.stringMatching('/bin/psql'),
[
'--username',
'mypg',
'--host',
'myhost',
'--port',
'1234',
'--dbname',
'myimmich',
'--single-transaction',
'--set',
'ON_ERROR_STOP=on',
'--echo-all',
'--output=/dev/null',
],
it('should get backup path', () => {
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
expect.objectContaining({
env: expect.objectContaining({
PATH: expect.any(String),
PGPASSWORD: 'mypwd',
}),
path: '/data/backups/hello.sql.gz',
}),
);
expect(writtenToPsql).toMatchInlineSnapshot(`
"
-- drop all other database connections
SELECT pg_terminate_backend(pid)
FROM pg_stat_activity
WHERE datname = current_database()
AND pid <> pg_backend_pid();
-- re-create the default schema
DROP SCHEMA public CASCADE;
CREATE SCHEMA public;
-- restore access to schema
GRANT ALL ON SCHEMA public TO "mypg";
GRANT ALL ON SCHEMA public TO public;
SELECT 1;"
`);
});
it('should generate pg_dumpall specific SQL instructions', async () => {
let writtenToPsql = '';
mocks.process.spawnDuplexStream.mockImplementationOnce(() => mockDuplex()('command', 0, 'data', ''));
mocks.process.spawnDuplexStream.mockImplementationOnce(() => mockDuplex()('command', 0, 'data', ''));
mocks.process.spawnDuplexStream.mockImplementationOnce(() => {
return mockDuplex((chunk) => (writtenToPsql += chunk))('command', 0, 'data', '');
});
const progress = vitest.fn();
await sut.restoreDatabaseBackup('development-v2.4.0-.sql', progress);
expect(progress).toHaveBeenCalledWith('backup', 0.05);
expect(progress).toHaveBeenCalledWith('migrations', 0.9);
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
expect(mocks.process.spawnDuplexStream).toHaveBeenLastCalledWith(
expect.stringMatching('/bin/psql'),
[
'--username',
'mypg',
'--host',
'myhost',
'--port',
'1234',
'--dbname',
'myimmich',
'--echo-all',
'--output=/dev/null',
],
expect.objectContaining({
env: expect.objectContaining({
PATH: expect.any(String),
PGPASSWORD: 'mypwd',
}),
}),
);
expect(writtenToPsql).toMatchInlineSnapshot(String.raw`
"
-- drop all other database connections
SELECT pg_terminate_backend(pid)
FROM pg_stat_activity
WHERE datname = current_database()
AND pid <> pg_backend_pid();
\c postgres
SELECT 1;"
`);
});
it('should fail if backup creation fails', async () => {
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 1, '', 'error'));
const progress = vitest.fn();
await expect(sut.restoreDatabaseBackup('development-filename.sql', progress)).rejects
.toThrowErrorMatchingInlineSnapshot(`
[Error: pg_dump non-zero exit code (1)
error]
`);
expect(progress).toHaveBeenCalledWith('backup', 0.05);
});
it('should fail if restore itself fails', async () => {
mocks.process.spawnDuplexStream
.mockReturnValueOnce(mockDuplex()('pg_dump', 0, 'data', ''))
.mockReturnValueOnce(mockDuplex()('gzip', 0, 'data', ''))
.mockReturnValueOnce(mockDuplex()('psql', 1, '', 'error'));
const progress = vitest.fn();
await expect(sut.restoreDatabaseBackup('development-filename.sql', progress)).rejects
.toThrowErrorMatchingInlineSnapshot(`
[Error: psql non-zero exit code (1)
error]
`);
expect(progress).toHaveBeenCalledWith('backup', 0.05);
});
it('should rollback if database migrations fail', async () => {
mocks.database.runMigrations.mockRejectedValue(new Error('Migrations Error'));
const progress = vitest.fn();
await expect(
sut.restoreDatabaseBackup('development-filename.sql', progress),
).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Migrations Error]`);
expect(progress).toHaveBeenCalledWith('backup', 0.05);
expect(progress).toHaveBeenCalledWith('migrations', 0.9);
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalledTimes(0);
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
});
it('should rollback if API healthcheck fails', async () => {
maintenanceHealthRepositoryMock.checkApiHealth.mockRejectedValue(new Error('Health Error'));
const progress = vitest.fn();
await expect(
sut.restoreDatabaseBackup('development-filename.sql', progress),
).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Health Error]`);
expect(progress).toHaveBeenCalledWith('backup', 0.05);
expect(progress).toHaveBeenCalledWith('migrations', 0.9);
expect(progress).toHaveBeenCalledWith('rollback', 0);
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
});
});
});
function* mockData() {
yield 'SELECT 1;';
}

View File

@@ -1,560 +1,43 @@
import { BadRequestException, Injectable, Optional } from '@nestjs/common';
import { debounce } from 'lodash';
import { DateTime } from 'luxon';
import path, { basename } from 'node:path';
import { PassThrough, Readable, Writable } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import semver from 'semver';
import { serverVersion } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { OnEvent, OnJob } from 'src/decorators';
import { Injectable } from '@nestjs/common';
import { DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
import { CacheControl, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CronRepository } from 'src/repositories/cron.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { ArgOf } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { ProcessRepository } from 'src/repositories/process.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { getConfig } from 'src/utils/config';
import { BaseService } from 'src/services/base.service';
import {
findDatabaseBackupVersion,
isFailedDatabaseBackupName,
isValidDatabaseBackupName,
isValidDatabaseRoutineBackupName,
UnsupportedPostgresError,
deleteDatabaseBackup,
downloadDatabaseBackup,
listDatabaseBackups,
uploadDatabaseBackup,
} from 'src/utils/database-backups';
import { ImmichFileResponse } from 'src/utils/file';
import { handlePromiseError } from 'src/utils/misc';
/**
* This service is available outside of maintenance mode to manage maintenance mode
*/
@Injectable()
export class DatabaseBackupService {
constructor(
private readonly logger: LoggingRepository,
private readonly storageRepository: StorageRepository,
private readonly configRepository: ConfigRepository,
private readonly systemMetadataRepository: SystemMetadataRepository,
private readonly processRepository: ProcessRepository,
private readonly databaseRepository: DatabaseRepository,
@Optional()
private readonly cronRepository: CronRepository,
@Optional()
private readonly jobRepository: JobRepository,
@Optional()
private readonly maintenanceHealthRepository: MaintenanceHealthRepository,
) {
this.logger.setContext(this.constructor.name);
export class DatabaseBackupService extends BaseService {
async listBackups(): Promise<DatabaseBackupListResponseDto> {
const backups = await listDatabaseBackups(this.backupRepos);
return { backups };
}
private backupLock = false;
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
async onConfigInit({
newConfig: {
backup: { database },
},
}: ArgOf<'ConfigInit'>) {
if (!this.cronRepository || !this.jobRepository) {
return;
}
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
if (this.backupLock) {
this.cronRepository.create({
name: 'backupDatabase',
expression: database.cronExpression,
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.DatabaseBackup }), this.logger),
start: database.enabled,
});
}
}
@OnEvent({ name: 'ConfigUpdate', server: true })
onConfigUpdate({ newConfig: { backup } }: ArgOf<'ConfigUpdate'>) {
if (!this.cronRepository || !this.jobRepository || !this.backupLock) {
return;
}
this.cronRepository.update({
name: 'backupDatabase',
expression: backup.database.cronExpression,
start: backup.database.enabled,
});
}
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
async handleBackupDatabase(): Promise<JobStatus> {
try {
await this.createDatabaseBackup();
} catch (error) {
if (error instanceof UnsupportedPostgresError) {
return JobStatus.Failed;
}
throw error;
}
await this.cleanupDatabaseBackups();
return JobStatus.Success;
}
async buildPostgresLaunchArguments(
bin: 'pg_dump' | 'pg_dumpall' | 'psql',
options: {
singleTransaction?: boolean;
} = {},
): Promise<{
bin: string;
args: string[];
databaseUsername: string;
databasePassword: string;
databaseVersion: string;
databaseMajorVersion?: number;
}> {
const {
database: { config: databaseConfig },
} = this.configRepository.getEnv();
const isUrlConnection = databaseConfig.connectionType === 'url';
const databaseVersion = await this.databaseRepository.getPostgresVersion();
const databaseSemver = semver.coerce(databaseVersion);
const databaseMajorVersion = databaseSemver?.major;
const args: string[] = [];
let databaseUsername;
if (isUrlConnection) {
if (bin !== 'pg_dump') {
args.push('--dbname');
}
let url = databaseConfig.url;
if (URL.canParse(databaseConfig.url)) {
const parsedUrl = new URL(databaseConfig.url);
// remove known bad parameters
parsedUrl.searchParams.delete('uselibpqcompat');
databaseUsername = parsedUrl.username;
url = parsedUrl.toString();
}
// assume typical values if we can't parse URL or not present
databaseUsername ??= 'postgres';
args.push(url);
} else {
databaseUsername = databaseConfig.username;
args.push(
'--username',
databaseUsername,
'--host',
databaseConfig.host,
'--port',
databaseConfig.port.toString(),
);
switch (bin) {
case 'pg_dumpall': {
args.push('--database');
break;
}
case 'psql': {
args.push('--dbname');
break;
}
}
args.push(databaseConfig.database);
}
switch (bin) {
case 'pg_dump':
case 'pg_dumpall': {
args.push('--clean', '--if-exists');
break;
}
case 'psql': {
if (options.singleTransaction) {
args.push(
// don't commit any transaction on failure
'--single-transaction',
// exit with non-zero code on error
'--set',
'ON_ERROR_STOP=on',
);
}
args.push(
// used for progress monitoring
'--echo-all',
'--output=/dev/null',
);
break;
}
}
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
this.logger.error(`Database Restore Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
throw new UnsupportedPostgresError(databaseVersion);
}
return {
bin: `/usr/lib/postgresql/${databaseMajorVersion}/bin/${bin}`,
args,
databaseUsername,
databasePassword: isUrlConnection ? new URL(databaseConfig.url).password : databaseConfig.password,
databaseVersion,
databaseMajorVersion,
};
}
async createDatabaseBackup(filenamePrefix: string = ''): Promise<string> {
this.logger.debug(`Database Backup Started`);
const { bin, args, databasePassword, databaseVersion, databaseMajorVersion } =
await this.buildPostgresLaunchArguments('pg_dump');
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
const filename = `${filenamePrefix}immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz`;
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
const temporaryFilePath = `${backupFilePath}.tmp`;
try {
const pgdump = this.processRepository.spawnDuplexStream(bin, args, {
env: {
PATH: process.env.PATH,
PGPASSWORD: databasePassword,
},
});
const gzip = this.processRepository.spawnDuplexStream('gzip', ['--rsyncable']);
const fileStream = this.storageRepository.createWriteStream(temporaryFilePath);
await pipeline(pgdump, gzip, fileStream);
await this.storageRepository.rename(temporaryFilePath, backupFilePath);
} catch (error) {
this.logger.error(`Database Backup Failure: ${error}`);
await this.storageRepository
.unlink(temporaryFilePath)
.catch((error) => this.logger.error(`Failed to delete failed backup file: ${error}`));
throw error;
}
this.logger.log(`Database Backup Success`);
return backupFilePath;
deleteBackup(files: string[]): Promise<void> {
return deleteDatabaseBackup(this.backupRepos, files);
}
async uploadBackup(file: Express.Multer.File): Promise<void> {
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
const fn = basename(file.originalname);
if (!isValidDatabaseBackupName(fn)) {
throw new BadRequestException('Invalid backup name!');
}
const filePath = path.join(backupsFolder, `uploaded-${fn}`);
await this.storageRepository.createOrOverwriteFile(filePath, file.buffer);
return uploadDatabaseBackup(this.backupRepos, file);
}
downloadBackup(fileName: string): ImmichFileResponse {
if (!isValidDatabaseBackupName(fileName)) {
throw new BadRequestException('Invalid backup name!');
}
const filePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), fileName);
return downloadDatabaseBackup(fileName);
}
private get backupRepos() {
return {
path: filePath,
fileName,
cacheControl: CacheControl.PrivateWithoutCache,
contentType: fileName.endsWith('.gz') ? 'application/gzip' : 'application/sql',
logger: this.logger,
storage: this.storageRepository,
config: this.configRepository,
process: this.processRepository,
database: this.databaseRepository,
};
}
async listBackups(): Promise<DatabaseBackupListResponseDto> {
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
const files = await this.storageRepository.readdir(backupsFolder);
const validFiles = files
.filter((fn) => isValidDatabaseBackupName(fn))
.toSorted((a, b) => (a.startsWith('uploaded-') === b.startsWith('uploaded-') ? a.localeCompare(b) : 1))
.toReversed();
const backups = await Promise.all(
validFiles.map(async (filename) => {
const stats = await this.storageRepository.stat(path.join(backupsFolder, filename));
return { filename, filesize: stats.size };
}),
);
return {
backups,
};
}
async deleteBackup(files: string[]): Promise<void> {
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
if (files.some((filename) => !isValidDatabaseBackupName(filename))) {
throw new BadRequestException('Invalid backup name!');
}
await Promise.all(files.map((filename) => this.storageRepository.unlink(path.join(backupsFolder, filename))));
}
async cleanupDatabaseBackups() {
this.logger.debug(`Database Backup Cleanup Started`);
const {
backup: { database: config },
} = await getConfig(
{
configRepo: this.configRepository,
metadataRepo: this.systemMetadataRepository,
logger: this.logger,
},
{
withCache: false,
},
);
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
const files = await this.storageRepository.readdir(backupsFolder);
const backups = files
.filter((filename) => isValidDatabaseRoutineBackupName(filename))
.toSorted()
.toReversed();
const failedBackups = files.filter((filename) => isFailedDatabaseBackupName(filename));
const toDelete = backups.slice(config.keepLastAmount);
toDelete.push(...failedBackups);
for (const file of toDelete) {
await this.storageRepository.unlink(path.join(backupsFolder, file));
}
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
}
async restoreDatabaseBackup(
filename: string,
progressCb?: (action: 'backup' | 'restore' | 'migrations' | 'rollback', progress: number) => void,
): Promise<void> {
this.logger.debug(`Database Restore Started`);
let complete = false;
try {
if (!isValidDatabaseBackupName(filename)) {
throw new Error('Invalid backup file format!');
}
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
await this.storageRepository.stat(backupFilePath); // => check file exists
let isPgClusterDump = false;
const version = findDatabaseBackupVersion(filename);
if (version && semver.satisfies(version, '<= 2.4')) {
isPgClusterDump = true;
}
const { bin, args, databaseUsername, databasePassword, databaseMajorVersion } =
await this.buildPostgresLaunchArguments('psql', {
singleTransaction: !isPgClusterDump,
});
progressCb?.('backup', 0.05);
const restorePointFilePath = await this.createDatabaseBackup('restore-point-');
this.logger.log(`Database Restore Starting. Database Version: ${databaseMajorVersion}`);
let inputStream: Readable;
if (backupFilePath.endsWith('.gz')) {
const fileStream = this.storageRepository.createPlainReadStream(backupFilePath);
const gunzip = this.storageRepository.createGunzip();
fileStream.pipe(gunzip);
inputStream = gunzip;
} else {
inputStream = this.storageRepository.createPlainReadStream(backupFilePath);
}
const sqlStream = Readable.from(sql(inputStream, databaseUsername, isPgClusterDump));
const psql = this.processRepository.spawnDuplexStream(bin, args, {
env: {
PATH: process.env.PATH,
PGPASSWORD: databasePassword,
},
});
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
if (complete) {
return;
}
this.logger.log(`Restore progress ~ ${(progress * 100).toFixed(2)}%`);
progressCb?.('restore', progress);
});
await pipeline(sqlStream, progressSource, psql, progressSink);
try {
progressCb?.('migrations', 0.9);
await this.databaseRepository.runMigrations();
await this.maintenanceHealthRepository.checkApiHealth();
} catch (error) {
progressCb?.('rollback', 0);
const fileStream = this.storageRepository.createPlainReadStream(restorePointFilePath);
const gunzip = this.storageRepository.createGunzip();
fileStream.pipe(gunzip);
inputStream = gunzip;
const sqlStream = Readable.from(sqlRollback(inputStream, databaseUsername));
const psql = this.processRepository.spawnDuplexStream(bin, args, {
env: {
PATH: process.env.PATH,
PGPASSWORD: databasePassword,
},
});
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
if (complete) {
return;
}
this.logger.log(`Rollback progress ~ ${(progress * 100).toFixed(2)}%`);
progressCb?.('rollback', progress);
});
await pipeline(sqlStream, progressSource, psql, progressSink);
throw error;
}
} catch (error) {
this.logger.error(`Database Restore Failure: ${error}`);
throw error;
} finally {
complete = true;
}
this.logger.log(`Database Restore Success`);
}
}
const SQL_DROP_CONNECTIONS = `
-- drop all other database connections
SELECT pg_terminate_backend(pid)
FROM pg_stat_activity
WHERE datname = current_database()
AND pid <> pg_backend_pid();
`;
const SQL_RESET_SCHEMA = (username: string) => `
-- re-create the default schema
DROP SCHEMA public CASCADE;
CREATE SCHEMA public;
-- restore access to schema
GRANT ALL ON SCHEMA public TO "${username}";
GRANT ALL ON SCHEMA public TO public;
`;
async function* sql(inputStream: Readable, databaseUsername: string, isPgClusterDump: boolean) {
yield SQL_DROP_CONNECTIONS;
yield isPgClusterDump
? // it is likely the dump contains SQL to try to drop the currently active
// database to ensure we have a fresh slate; if the `postgres` database exists
// then prefer to switch before continuing otherwise this will just silently fail
String.raw`
\c postgres
`
: SQL_RESET_SCHEMA(databaseUsername);
for await (const chunk of inputStream) {
yield chunk;
}
}
async function* sqlRollback(inputStream: Readable, databaseUsername: string) {
yield SQL_DROP_CONNECTIONS;
yield SQL_RESET_SCHEMA(databaseUsername);
for await (const chunk of inputStream) {
yield chunk;
}
}
function createSqlProgressStreams(cb: (progress: number) => void) {
const STDIN_START_MARKER = new TextEncoder().encode('FROM stdin');
const STDIN_END_MARKER = new TextEncoder().encode(String.raw`\.`);
let readingStdin = false;
let sequenceIdx = 0;
let linesSent = 0;
let linesProcessed = 0;
const startedAt = +Date.now();
const cbDebounced = debounce(
() => {
const progress = source.writableEnded
? Math.min(1, linesProcessed / linesSent)
: // progress simulation while we're in an indeterminate state
Math.min(0.3, 0.1 + (Date.now() - startedAt) / 1e4);
cb(progress);
},
100,
{
maxWait: 100,
},
);
let lastByte = -1;
const source = new PassThrough({
transform(chunk, _encoding, callback) {
for (const byte of chunk) {
if (!readingStdin && byte === 10 && lastByte !== 10) {
linesSent += 1;
}
lastByte = byte;
const sequence = readingStdin ? STDIN_END_MARKER : STDIN_START_MARKER;
if (sequence[sequenceIdx] === byte) {
sequenceIdx += 1;
if (sequence.length === sequenceIdx) {
sequenceIdx = 0;
readingStdin = !readingStdin;
}
} else {
sequenceIdx = 0;
}
}
cbDebounced();
this.push(chunk);
callback();
},
});
const sink = new Writable({
write(chunk, _encoding, callback) {
for (const byte of chunk) {
if (byte === 10) {
linesProcessed++;
}
}
cbDebounced();
callback();
},
});
return [source, sink];
}

View File

@@ -2,6 +2,7 @@ import { BadRequestException } from '@nestjs/common';
import { Readable } from 'node:stream';
import { DownloadResponseDto } from 'src/dtos/download.dto';
import { DownloadService } from 'src/services/download.service';
import { AssetFactory } from 'test/factories/asset.factory';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
@@ -60,22 +61,22 @@ describe(DownloadService.name, () => {
stream: new Readable(),
};
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
const asset1 = AssetFactory.create();
const asset2 = AssetFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([asset1.id, asset2.id]));
mocks.storage.realpath.mockRejectedValue(new Error('Could not read file'));
mocks.asset.getByIds.mockResolvedValue([
{ ...assetStub.noResizePath, id: 'asset-1' },
{ ...assetStub.noWebpPath, id: 'asset-2' },
]);
mocks.asset.getByIds.mockResolvedValue([asset1, asset2]);
mocks.storage.createZipStream.mockReturnValue(archiveMock);
await expect(sut.downloadArchive(authStub.admin, { assetIds: ['asset-1', 'asset-2'] })).resolves.toEqual({
await expect(sut.downloadArchive(authStub.admin, { assetIds: [asset1.id, asset2.id] })).resolves.toEqual({
stream: archiveMock.stream,
});
expect(mocks.logger.warn).toHaveBeenCalledTimes(2);
expect(archiveMock.addFile).toHaveBeenCalledTimes(2);
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, '/data/library/IMG_123.jpg', 'IMG_123.jpg');
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, '/data/library/IMG_456.jpg', 'IMG_456.jpg');
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, asset1.originalPath, asset1.originalFileName);
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, asset2.originalPath, asset2.originalFileName);
});
it('should download an archive', async () => {
@@ -85,20 +86,20 @@ describe(DownloadService.name, () => {
stream: new Readable(),
};
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
mocks.asset.getByIds.mockResolvedValue([
{ ...assetStub.noResizePath, id: 'asset-1' },
{ ...assetStub.noWebpPath, id: 'asset-2' },
]);
const asset1 = AssetFactory.create();
const asset2 = AssetFactory.create();
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([asset1.id, asset2.id]));
mocks.asset.getByIds.mockResolvedValue([asset1, asset2]);
mocks.storage.createZipStream.mockReturnValue(archiveMock);
await expect(sut.downloadArchive(authStub.admin, { assetIds: ['asset-1', 'asset-2'] })).resolves.toEqual({
await expect(sut.downloadArchive(authStub.admin, { assetIds: [asset1.id, asset2.id] })).resolves.toEqual({
stream: archiveMock.stream,
});
expect(archiveMock.addFile).toHaveBeenCalledTimes(2);
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, '/data/library/IMG_123.jpg', 'IMG_123.jpg');
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, '/data/library/IMG_456.jpg', 'IMG_456.jpg');
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, asset1.originalPath, asset1.originalFileName);
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, asset2.originalPath, asset2.originalFileName);
});
it('should handle duplicate file names', async () => {

View File

@@ -7,6 +7,7 @@ import { AssetService } from 'src/services/asset.service';
import { AuditService } from 'src/services/audit.service';
import { AuthAdminService } from 'src/services/auth-admin.service';
import { AuthService } from 'src/services/auth.service';
import { BackupService } from 'src/services/backup.service';
import { CliService } from 'src/services/cli.service';
import { DatabaseBackupService } from 'src/services/database-backup.service';
import { DatabaseService } from 'src/services/database.service';
@@ -57,6 +58,7 @@ export const services = [
AuditService,
AuthService,
AuthAdminService,
BackupService,
CliService,
DatabaseBackupService,
DatabaseService,

View File

@@ -18,6 +18,7 @@ import {
} from 'src/enum';
import { MediaService } from 'src/services/media.service';
import { JobCounts, RawImageInfo } from 'src/types';
import { AssetFactory } from 'test/factories/asset.factory';
import { assetStub, previewFile } from 'test/fixtures/asset.stub';
import { faceStub } from 'test/fixtures/face.stub';
import { probeStub } from 'test/fixtures/media.stub';
@@ -139,33 +140,30 @@ describe(MediaService.name, () => {
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
});
it('should queue all assets with missing webp path', async () => {
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.noWebpPath]));
it('should queue all assets with missing preview', async () => {
const asset = AssetFactory.create();
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([asset]));
mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueGenerateThumbnails({ force: false });
expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith({ force: false, fullsizeEnabled: false });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.AssetGenerateThumbnails,
data: { id: assetStub.image.id },
},
{ name: JobName.AssetGenerateThumbnails, data: { id: asset.id } },
]);
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
});
it('should queue all assets with missing thumbhash', async () => {
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.noThumbhash]));
const asset = AssetFactory.from({ thumbhash: null })
.files([AssetFileType.Thumbnail, AssetFileType.Preview])
.build();
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([asset]));
mocks.person.getAll.mockReturnValue(makeStream());
await sut.handleQueueGenerateThumbnails({ force: false });
expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith({ force: false, fullsizeEnabled: false });
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.AssetGenerateThumbnails,
data: { id: assetStub.image.id },
},
{ name: JobName.AssetGenerateThumbnails, data: { id: asset.id } },
]);
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
@@ -1052,12 +1050,19 @@ describe(MediaService.name, () => {
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
.exif({
fileSizeInByte: 5000,
profileDescription: 'Adobe RGB',
bitsPerSample: 14,
})
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.imageHif.originalPath, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
colorspace: Colorspace.P3,
processInvalidImages: false,
});
@@ -1107,12 +1112,19 @@ describe(MediaService.name, () => {
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.copyTagGroup.mockResolvedValue(true);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.panoramaTif);
const asset = AssetFactory.from({ originalFileName: 'panorama.tif' })
.exif({
fileSizeInByte: 5000,
projectionType: 'EQUIRECTANGULAR',
})
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.panoramaTif.originalPath, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
colorspace: Colorspace.Srgb,
orientation: undefined,
processInvalidImages: false,
@@ -1135,11 +1147,7 @@ describe(MediaService.name, () => {
);
expect(mocks.media.copyTagGroup).toHaveBeenCalledTimes(2);
expect(mocks.media.copyTagGroup).toHaveBeenCalledWith(
'XMP-GPano',
assetStub.panoramaTif.originalPath,
expect.any(String),
);
expect(mocks.media.copyTagGroup).toHaveBeenCalledWith('XMP-GPano', asset.originalPath, expect.any(String));
});
it('should respect encoding options when generating full-size preview', async () => {
@@ -1149,12 +1157,19 @@ describe(MediaService.name, () => {
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
.exif({
fileSizeInByte: 5000,
profileDescription: 'Adobe RGB',
bitsPerSample: 14,
})
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.imageHif.originalPath, {
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
colorspace: Colorspace.P3,
processInvalidImages: false,
});
@@ -1181,9 +1196,16 @@ describe(MediaService.name, () => {
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
.exif({
fileSizeInByte: 5000,
profileDescription: 'Adobe RGB',
bitsPerSample: 14,
})
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
@@ -1263,30 +1285,25 @@ describe(MediaService.name, () => {
});
it('should clean up edited files if an asset has no edits', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withoutEdits,
});
const asset = AssetFactory.from({ thumbhash: factory.buffer() })
.exif()
.files([
{ type: AssetFileType.Preview, path: 'edited1.jpg', isEdited: true },
{ type: AssetFileType.Thumbnail, path: 'edited2.jpg', isEdited: true },
{ type: AssetFileType.FullSize, path: 'edited3.jpg', isEdited: true },
])
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
const status = await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
const status = await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
data: {
files: expect.arrayContaining([
'/uploads/user-id/fullsize/path_edited.jpg',
'/uploads/user-id/preview/path_edited.jpg',
'/uploads/user-id/thumbnail/path_edited.jpg',
]),
files: expect.arrayContaining(['edited1.jpg', 'edited2.jpg', 'edited3.jpg']),
},
});
expect(mocks.asset.deleteFiles).toHaveBeenCalledWith(
expect.arrayContaining([
expect.objectContaining({ path: '/uploads/user-id/preview/path_edited.jpg' }),
expect.objectContaining({ path: '/uploads/user-id/thumbnail/path_edited.jpg' }),
expect.objectContaining({ path: '/uploads/user-id/fullsize/path_edited.jpg' }),
]),
);
expect(status).toBe(JobStatus.Success);
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
@@ -1320,11 +1337,9 @@ describe(MediaService.name, () => {
});
it('should generate the original thumbhash if no edits exist', async () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withoutEdits,
});
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
const asset = AssetFactory.from().exif().build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
mocks.media.generateThumbhash.mockResolvedValue(factory.buffer());
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id, source: 'upload' });
@@ -1335,18 +1350,14 @@ describe(MediaService.name, () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
...assetStub.withCropEdit,
});
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
const thumbhashBuffer = factory.buffer();
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
thumbhash: thumbhashBuffer,
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ thumbhash: thumbhashBuffer }));
});
});

View File

@@ -185,7 +185,7 @@ export class MediaService extends BaseService {
const generated = await this.generateEditedThumbnails(asset, config);
await this.syncFiles(
asset.files.filter((asset) => asset.isEdited),
asset.files.filter((file) => file.isEdited),
generated?.files ?? [],
);

View File

@@ -16,6 +16,7 @@ import {
} from 'src/enum';
import { ImmichTags } from 'src/repositories/metadata.repository';
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
import { AssetFactory } from 'test/factories/asset.factory';
import { assetStub } from 'test/fixtures/asset.stub';
import { fileStub } from 'test/fixtures/file.stub';
import { probeStub } from 'test/fixtures/media.stub';
@@ -24,13 +25,6 @@ import { tagStub } from 'test/fixtures/tag.stub';
import { factory } from 'test/small.factory';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
const removeNonSidecarFiles = (asset: any) => {
return {
...asset,
files: asset.files.filter((file: any) => file.type === AssetFileType.Sidecar),
};
};
const forSidecarJob = (
asset: {
id?: string;
@@ -182,17 +176,18 @@ describe(MetadataService.name, () => {
it('should handle a date in a sidecar file', async () => {
const originalDate = new Date('2023-11-21T16:13:17.517Z');
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
const asset = AssetFactory.from().file({ type: AssetFileType.Sidecar }).build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.sidecar.id);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate }), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
id: assetStub.image.id,
id: asset.id,
duration: null,
fileCreatedAt: sidecarDate,
localDateTime: sidecarDate,
@@ -203,7 +198,8 @@ describe(MetadataService.name, () => {
it('should take the file modification date when missing exif and earlier than creation date', async () => {
const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z');
const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: fileModifiedAt,
@@ -212,14 +208,14 @@ describe(MetadataService.name, () => {
} as Stats);
mockReadTags();
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ dateTimeOriginal: fileModifiedAt }),
{ lockedPropertiesBehavior: 'skip' },
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id,
id: asset.id,
duration: null,
fileCreatedAt: fileModifiedAt,
fileModifiedAt,
@@ -232,7 +228,8 @@ describe(MetadataService.name, () => {
it('should take the file creation date when missing exif and earlier than modification date', async () => {
const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z');
const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z');
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: fileModifiedAt,
@@ -241,14 +238,14 @@ describe(MetadataService.name, () => {
} as Stats);
mockReadTags();
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ dateTimeOriginal: fileCreatedAt }),
{ lockedPropertiesBehavior: 'skip' },
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id,
id: asset.id,
duration: null,
fileCreatedAt,
fileModifiedAt,
@@ -260,10 +257,11 @@ describe(MetadataService.name, () => {
it('should determine dateTimeOriginal regardless of the server time zone', async () => {
process.env.TZ = 'America/Los_Angeles';
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'),
@@ -279,16 +277,15 @@ describe(MetadataService.name, () => {
});
it('should handle lists of numbers', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: assetStub.image.fileModifiedAt,
mtimeMs: assetStub.image.fileModifiedAt.valueOf(),
birthtimeMs: assetStub.image.fileCreatedAt.valueOf(),
mtime: asset.fileModifiedAt,
mtimeMs: asset.fileModifiedAt.valueOf(),
birthtimeMs: asset.fileCreatedAt.valueOf(),
} as Stats);
mockReadTags({
ISO: [160],
});
mockReadTags({ ISO: [160] });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
@@ -296,11 +293,11 @@ describe(MetadataService.name, () => {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.image.id,
id: asset.id,
duration: null,
fileCreatedAt: assetStub.image.fileCreatedAt,
fileModifiedAt: assetStub.image.fileCreatedAt,
localDateTime: assetStub.image.fileCreatedAt,
fileCreatedAt: asset.fileCreatedAt,
fileModifiedAt: asset.fileCreatedAt,
localDateTime: asset.fileCreatedAt,
width: null,
height: null,
});
@@ -308,77 +305,77 @@ describe(MetadataService.name, () => {
it('should not delete latituide and longitude without reverse geocode', async () => {
// regression test for issue 17511
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
const asset = AssetFactory.from().exif().build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: false } });
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: assetStub.withLocation.fileModifiedAt,
mtimeMs: assetStub.withLocation.fileModifiedAt.valueOf(),
birthtimeMs: assetStub.withLocation.fileCreatedAt.valueOf(),
mtime: asset.fileModifiedAt,
mtimeMs: asset.fileModifiedAt.valueOf(),
birthtimeMs: asset.fileCreatedAt.valueOf(),
} as Stats);
mockReadTags({
GPSLatitude: assetStub.withLocation.exifInfo!.latitude!,
GPSLongitude: assetStub.withLocation.exifInfo!.longitude!,
GPSLatitude: asset.exifInfo.latitude!,
GPSLongitude: asset.exifInfo.longitude!,
});
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: null, state: null, country: null }),
{ lockedPropertiesBehavior: 'skip' },
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.withLocation.id,
id: asset.id,
duration: null,
fileCreatedAt: assetStub.withLocation.fileCreatedAt,
fileModifiedAt: assetStub.withLocation.fileModifiedAt,
localDateTime: new Date('2023-02-22T05:06:29.716Z'),
fileCreatedAt: asset.fileCreatedAt,
fileModifiedAt: asset.fileModifiedAt,
localDateTime: asset.localDateTime,
width: null,
height: null,
});
});
it('should apply reverse geocoding', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
const asset = AssetFactory.from().exif({ latitude: 10, longitude: 20 }).build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: assetStub.withLocation.fileModifiedAt,
mtimeMs: assetStub.withLocation.fileModifiedAt.valueOf(),
birthtimeMs: assetStub.withLocation.fileCreatedAt.valueOf(),
mtime: asset.fileModifiedAt,
mtimeMs: asset.fileModifiedAt.valueOf(),
birthtimeMs: asset.fileCreatedAt.valueOf(),
} as Stats);
mockReadTags({
GPSLatitude: assetStub.withLocation.exifInfo!.latitude!,
GPSLongitude: assetStub.withLocation.exifInfo!.longitude!,
});
mockReadTags({ GPSLatitude: 10, GPSLongitude: 20 });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
{ lockedPropertiesBehavior: 'skip' },
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: assetStub.withLocation.id,
id: asset.id,
duration: null,
fileCreatedAt: assetStub.withLocation.fileCreatedAt,
fileModifiedAt: assetStub.withLocation.fileModifiedAt,
localDateTime: new Date('2023-02-22T05:06:29.716Z'),
fileCreatedAt: asset.fileCreatedAt,
fileModifiedAt: asset.fileModifiedAt,
localDateTime: asset.localDateTime,
width: null,
height: null,
});
});
it('should discard latitude and longitude on null island', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mockReadTags({
GPSLatitude: 0,
GPSLongitude: 0,
});
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ latitude: null, longitude: null }),
{ lockedPropertiesBehavior: 'skip' },
@@ -386,19 +383,25 @@ describe(MetadataService.name, () => {
});
it('should extract tags from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
const asset = AssetFactory.from()
.exif({ tags: ['Parent'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ TagsList: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
});
it('should extract hierarchy from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent/Child'] }) });
const asset = AssetFactory.from()
.exif({ tags: ['Parent/Child'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -406,135 +409,147 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(1, {
userId: 'user-id',
userId: asset.ownerId,
value: 'Parent',
parentId: undefined,
});
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(2, {
userId: 'user-id',
userId: asset.ownerId,
value: 'Parent/Child',
parentId: 'tag-parent',
});
});
it('should extract tags from Keywords as a string', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
const asset = AssetFactory.from()
.exif({ tags: ['Parent'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ Keywords: 'Parent' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
});
it('should extract tags from Keywords as a list', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
const asset = AssetFactory.from()
.exif({ tags: ['Parent'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ Keywords: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
});
it('should extract tags from Keywords as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent', '2024'] }),
});
const asset = AssetFactory.from()
.exif({ tags: ['Parent', '2024'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ Keywords: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: '2024', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: '2024', parent: undefined });
});
it('should extract hierarchal tags from Keywords', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent/Child'] }) });
const asset = AssetFactory.from()
.exif({ tags: ['Parent/Child'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ Keywords: 'Parent/Child' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(1, {
userId: 'user-id',
userId: asset.ownerId,
value: 'Parent',
parentId: undefined,
});
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(2, {
userId: 'user-id',
userId: asset.ownerId,
value: 'Parent/Child',
parentId: 'tag-parent',
});
});
it('should ignore Keywords when TagsList is present', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent/Child', 'Child'] }),
});
const asset = AssetFactory.from()
.exif({ tags: ['Parent/Child', 'Child'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(1, {
userId: 'user-id',
userId: asset.ownerId,
value: 'Parent',
parentId: undefined,
});
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(2, {
userId: 'user-id',
userId: asset.ownerId,
value: 'Parent/Child',
parentId: 'tag-parent',
});
});
it('should extract hierarchy from HierarchicalSubject', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent/Child', 'TagA'] }),
});
const asset = AssetFactory.from()
.exif({ tags: ['Parent/Child', 'TagA'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(1, {
userId: 'user-id',
userId: asset.ownerId,
value: 'Parent',
parentId: undefined,
});
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(2, {
userId: 'user-id',
userId: asset.ownerId,
value: 'Parent/Child',
parentId: 'tag-parent',
});
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(3, { userId: 'user-id', value: 'TagA', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(3, {
userId: asset.ownerId,
value: 'TagA',
parent: undefined,
});
});
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({
...factory.asset(),
exifInfo: factory.exif({ tags: ['Parent', '2024'] }),
});
const asset = AssetFactory.from()
.exif({ tags: ['Parent', '2024'] })
.build();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
mocks.asset.getById.mockResolvedValue(asset);
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: '2024', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: '2024', parent: undefined });
});
it('should extract ignore / characters in a HierarchicalSubject tag', async () => {
@@ -1646,31 +1661,23 @@ describe(MetadataService.name, () => {
describe('handleQueueSidecar', () => {
it('should queue assets with sidecar files', async () => {
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
const asset = AssetFactory.create();
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([asset]));
await sut.handleQueueSidecar({ force: true });
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarCheck,
data: { id: assetStub.sidecar.id },
},
]);
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(true);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.SidecarCheck, data: { id: asset.id } }]);
});
it('should queue assets without sidecar files', async () => {
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
const asset = AssetFactory.create();
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([asset]));
await sut.handleQueueSidecar({ force: false });
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false);
expect(mocks.job.queueAll).toHaveBeenCalledWith([
{
name: JobName.SidecarCheck,
data: { id: assetStub.image.id },
},
]);
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.SidecarCheck, data: { id: asset.id } }]);
});
});

View File

@@ -2,6 +2,7 @@ import { BadRequestException } from '@nestjs/common';
import { StackService } from 'src/services/stack.service';
import { assetStub, stackStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { newUuid } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
describe(StackService.name, () => {
@@ -204,9 +205,9 @@ describe(StackService.name, () => {
mocks.access.stack.checkOwnerAccess.mockResolvedValue(new Set(['stack-id']));
mocks.stack.getForAssetRemoval.mockResolvedValue({ id: null, primaryAssetId: null });
await expect(
sut.removeAsset(authStub.admin, { id: 'stack-id', assetId: assetStub.imageFrom2015.id }),
).rejects.toBeInstanceOf(BadRequestException);
await expect(sut.removeAsset(authStub.admin, { id: 'stack-id', assetId: newUuid() })).rejects.toBeInstanceOf(
BadRequestException,
);
expect(mocks.asset.update).not.toHaveBeenCalled();
expect(mocks.event.emit).not.toHaveBeenCalled();

View File

@@ -1,3 +1,20 @@
import { BadRequestException } from '@nestjs/common';
import { debounce } from 'lodash';
import { DateTime } from 'luxon';
import path, { basename, join } from 'node:path';
import { PassThrough, Readable, Writable } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import semver from 'semver';
import { serverVersion } from 'src/constants';
import { StorageCore } from 'src/cores/storage.core';
import { CacheControl, StorageFolder } from 'src/enum';
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { ProcessRepository } from 'src/repositories/process.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
export function isValidDatabaseBackupName(filename: string) {
return filename.match(/^[\d\w-.]+\.sql(?:\.gz)?$/);
}
@@ -13,12 +30,453 @@ export function isFailedDatabaseBackupName(filename: string) {
return filename.match(/^immich-db-backup-.*\.sql\.gz\.tmp$/);
}
export function findDatabaseBackupVersion(filename: string) {
export function findVersion(filename: string) {
return /-v(.*)-/.exec(filename)?.[1];
}
type BackupRepos = {
logger: LoggingRepository;
storage: StorageRepository;
config: ConfigRepository;
process: ProcessRepository;
database: DatabaseRepository;
health: MaintenanceHealthRepository;
};
export class UnsupportedPostgresError extends Error {
constructor(databaseVersion: string) {
super(`Unsupported PostgreSQL version: ${databaseVersion}`);
}
}
export async function buildPostgresLaunchArguments(
{ logger, config, database }: Pick<BackupRepos, 'logger' | 'config' | 'database'>,
bin: 'pg_dump' | 'pg_dumpall' | 'psql',
options: {
singleTransaction?: boolean;
username?: string;
} = {},
): Promise<{
bin: string;
args: string[];
databaseUsername: string;
databasePassword: string;
databaseVersion: string;
databaseMajorVersion?: number;
}> {
const {
database: { config: databaseConfig },
} = config.getEnv();
const isUrlConnection = databaseConfig.connectionType === 'url';
const databaseVersion = await database.getPostgresVersion();
const databaseSemver = semver.coerce(databaseVersion);
const databaseMajorVersion = databaseSemver?.major;
const args: string[] = [];
let databaseUsername;
if (isUrlConnection) {
if (bin !== 'pg_dump') {
args.push('--dbname');
}
let url = databaseConfig.url;
if (URL.canParse(databaseConfig.url)) {
const parsedUrl = new URL(databaseConfig.url);
// remove known bad parameters
parsedUrl.searchParams.delete('uselibpqcompat');
databaseUsername = parsedUrl.username;
url = parsedUrl.toString();
}
// assume typical values if we can't parse URL or not present
databaseUsername ??= 'postgres';
args.push(url);
} else {
databaseUsername = databaseConfig.username;
args.push('--username', databaseUsername, '--host', databaseConfig.host, '--port', databaseConfig.port.toString());
switch (bin) {
case 'pg_dumpall': {
args.push('--database');
break;
}
case 'psql': {
args.push('--dbname');
break;
}
}
args.push(databaseConfig.database);
}
switch (bin) {
case 'pg_dump':
case 'pg_dumpall': {
args.push('--clean', '--if-exists');
break;
}
case 'psql': {
if (options.singleTransaction) {
args.push(
// don't commit any transaction on failure
'--single-transaction',
// exit with non-zero code on error
'--set',
'ON_ERROR_STOP=on',
);
}
args.push(
// used for progress monitoring
'--echo-all',
'--output=/dev/null',
);
break;
}
}
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
logger.error(`Database Restore Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
throw new UnsupportedPostgresError(databaseVersion);
}
return {
bin: `/usr/lib/postgresql/${databaseMajorVersion}/bin/${bin}`,
args,
databaseUsername,
databasePassword: isUrlConnection ? new URL(databaseConfig.url).password : databaseConfig.password,
databaseVersion,
databaseMajorVersion,
};
}
export async function createDatabaseBackup(
{ logger, storage, process: processRepository, ...pgRepos }: Omit<BackupRepos, 'health'>,
filenamePrefix: string = '',
): Promise<string> {
logger.debug(`Database Backup Started`);
const { bin, args, databasePassword, databaseVersion, databaseMajorVersion } = await buildPostgresLaunchArguments(
{ logger, ...pgRepos },
'pg_dump',
);
logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
const filename = `${filenamePrefix}immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz`;
const backupFilePath = join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
const temporaryFilePath = `${backupFilePath}.tmp`;
try {
const pgdump = processRepository.spawnDuplexStream(bin, args, {
env: {
PATH: process.env.PATH,
PGPASSWORD: databasePassword,
},
});
const gzip = processRepository.spawnDuplexStream('gzip', ['--rsyncable']);
const fileStream = storage.createWriteStream(temporaryFilePath);
await pipeline(pgdump, gzip, fileStream);
await storage.rename(temporaryFilePath, backupFilePath);
} catch (error) {
logger.error(`Database Backup Failure: ${error}`);
await storage
.unlink(temporaryFilePath)
.catch((error) => logger.error(`Failed to delete failed backup file: ${error}`));
throw error;
}
logger.log(`Database Backup Success`);
return backupFilePath;
}
const SQL_DROP_CONNECTIONS = `
-- drop all other database connections
SELECT pg_terminate_backend(pid)
FROM pg_stat_activity
WHERE datname = current_database()
AND pid <> pg_backend_pid();
`;
const SQL_RESET_SCHEMA = (username: string) => `
-- re-create the default schema
DROP SCHEMA public CASCADE;
CREATE SCHEMA public;
-- restore access to schema
GRANT ALL ON SCHEMA public TO "${username}";
GRANT ALL ON SCHEMA public TO public;
`;
async function* sql(inputStream: Readable, databaseUsername: string, isPgClusterDump: boolean) {
yield SQL_DROP_CONNECTIONS;
yield isPgClusterDump
? // it is likely the dump contains SQL to try to drop the currently active
// database to ensure we have a fresh slate; if the `postgres` database exists
// then prefer to switch before continuing otherwise this will just silently fail
String.raw`
\c postgres
`
: SQL_RESET_SCHEMA(databaseUsername);
for await (const chunk of inputStream) {
yield chunk;
}
}
async function* sqlRollback(inputStream: Readable, databaseUsername: string) {
yield SQL_DROP_CONNECTIONS;
yield SQL_RESET_SCHEMA(databaseUsername);
for await (const chunk of inputStream) {
yield chunk;
}
}
export async function restoreDatabaseBackup(
{ logger, storage, process: processRepository, database: databaseRepository, health, ...pgRepos }: BackupRepos,
filename: string,
progressCb?: (action: 'backup' | 'restore' | 'migrations' | 'rollback', progress: number) => void,
): Promise<void> {
logger.debug(`Database Restore Started`);
let complete = false;
try {
if (!isValidDatabaseBackupName(filename)) {
throw new Error('Invalid backup file format!');
}
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
await storage.stat(backupFilePath); // => check file exists
let isPgClusterDump = false;
const version = findVersion(filename);
if (version && semver.satisfies(version, '<= 2.4')) {
isPgClusterDump = true;
}
const { bin, args, databaseUsername, databasePassword, databaseMajorVersion } = await buildPostgresLaunchArguments(
{ logger, database: databaseRepository, ...pgRepos },
'psql',
{
singleTransaction: !isPgClusterDump,
},
);
progressCb?.('backup', 0.05);
const restorePointFilePath = await createDatabaseBackup(
{ logger, storage, process: processRepository, database: databaseRepository, ...pgRepos },
'restore-point-',
);
logger.log(`Database Restore Starting. Database Version: ${databaseMajorVersion}`);
let inputStream: Readable;
if (backupFilePath.endsWith('.gz')) {
const fileStream = storage.createPlainReadStream(backupFilePath);
const gunzip = storage.createGunzip();
fileStream.pipe(gunzip);
inputStream = gunzip;
} else {
inputStream = storage.createPlainReadStream(backupFilePath);
}
const sqlStream = Readable.from(sql(inputStream, databaseUsername, isPgClusterDump));
const psql = processRepository.spawnDuplexStream(bin, args, {
env: {
PATH: process.env.PATH,
PGPASSWORD: databasePassword,
},
});
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
if (complete) {
return;
}
logger.log(`Restore progress ~ ${(progress * 100).toFixed(2)}%`);
progressCb?.('restore', progress);
});
await pipeline(sqlStream, progressSource, psql, progressSink);
try {
progressCb?.('migrations', 0.9);
await databaseRepository.runMigrations();
await health.checkApiHealth();
} catch (error) {
progressCb?.('rollback', 0);
const fileStream = storage.createPlainReadStream(restorePointFilePath);
const gunzip = storage.createGunzip();
fileStream.pipe(gunzip);
inputStream = gunzip;
const sqlStream = Readable.from(sqlRollback(inputStream, databaseUsername));
const psql = processRepository.spawnDuplexStream(bin, args, {
env: {
PATH: process.env.PATH,
PGPASSWORD: databasePassword,
},
});
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
if (complete) {
return;
}
logger.log(`Rollback progress ~ ${(progress * 100).toFixed(2)}%`);
progressCb?.('rollback', progress);
});
await pipeline(sqlStream, progressSource, psql, progressSink);
throw error;
}
} catch (error) {
logger.error(`Database Restore Failure: ${error}`);
throw error;
} finally {
complete = true;
}
logger.log(`Database Restore Success`);
}
export async function deleteDatabaseBackup({ storage }: Pick<BackupRepos, 'storage'>, files: string[]): Promise<void> {
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
if (files.some((filename) => !isValidDatabaseBackupName(filename))) {
throw new BadRequestException('Invalid backup name!');
}
await Promise.all(files.map((filename) => storage.unlink(path.join(backupsFolder, filename))));
}
export async function listDatabaseBackups({
storage,
}: Pick<BackupRepos, 'storage'>): Promise<{ filename: string; filesize: number }[]> {
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
const files = await storage.readdir(backupsFolder);
const validFiles = files
.filter((fn) => isValidDatabaseBackupName(fn))
.toSorted((a, b) => (a.startsWith('uploaded-') === b.startsWith('uploaded-') ? a.localeCompare(b) : 1))
.toReversed();
const backups = await Promise.all(
validFiles.map(async (filename) => {
const stats = await storage.stat(path.join(backupsFolder, filename));
return { filename, filesize: stats.size };
}),
);
return backups;
}
export async function uploadDatabaseBackup(
{ storage }: Pick<BackupRepos, 'storage'>,
file: Express.Multer.File,
): Promise<void> {
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
const fn = basename(file.originalname);
if (!isValidDatabaseBackupName(fn)) {
throw new BadRequestException('Invalid backup name!');
}
const path = join(backupsFolder, `uploaded-${fn}`);
await storage.createOrOverwriteFile(path, file.buffer);
}
export function downloadDatabaseBackup(fileName: string) {
if (!isValidDatabaseBackupName(fileName)) {
throw new BadRequestException('Invalid backup name!');
}
const path = join(StorageCore.getBaseFolder(StorageFolder.Backups), fileName);
return {
path,
fileName,
cacheControl: CacheControl.PrivateWithoutCache,
contentType: fileName.endsWith('.gz') ? 'application/gzip' : 'application/sql',
};
}
function createSqlProgressStreams(cb: (progress: number) => void) {
const STDIN_START_MARKER = new TextEncoder().encode('FROM stdin');
const STDIN_END_MARKER = new TextEncoder().encode(String.raw`\.`);
let readingStdin = false;
let sequenceIdx = 0;
let linesSent = 0;
let linesProcessed = 0;
const startedAt = +Date.now();
const cbDebounced = debounce(
() => {
const progress = source.writableEnded
? Math.min(1, linesProcessed / linesSent)
: // progress simulation while we're in an indeterminate state
Math.min(0.3, 0.1 + (Date.now() - startedAt) / 1e4);
cb(progress);
},
100,
{
maxWait: 100,
},
);
let lastByte = -1;
const source = new PassThrough({
transform(chunk, _encoding, callback) {
for (const byte of chunk) {
if (!readingStdin && byte === 10 && lastByte !== 10) {
linesSent += 1;
}
lastByte = byte;
const sequence = readingStdin ? STDIN_END_MARKER : STDIN_START_MARKER;
if (sequence[sequenceIdx] === byte) {
sequenceIdx += 1;
if (sequence.length === sequenceIdx) {
sequenceIdx = 0;
readingStdin = !readingStdin;
}
} else {
sequenceIdx = 0;
}
}
cbDebounced();
this.push(chunk);
callback();
},
});
const sink = new Writable({
write(chunk, _encoding, callback) {
for (const byte of chunk) {
if (byte === 10) {
linesProcessed++;
}
}
cbDebounced();
callback();
},
});
return [source, sink];
}

View File

@@ -0,0 +1,54 @@
import { Selectable } from 'kysely';
import { AlbumUserRole } from 'src/enum';
import { AlbumUserTable } from 'src/schema/tables/album-user.table';
import { AlbumFactory } from 'test/factories/album.factory';
import { build } from 'test/factories/builder.factory';
import { AlbumUserLike, FactoryBuilder, UserLike } from 'test/factories/types';
import { UserFactory } from 'test/factories/user.factory';
import { newDate, newUuid, newUuidV7 } from 'test/small.factory';
export class AlbumUserFactory {
#user!: UserFactory;
private constructor(private readonly value: Selectable<AlbumUserTable>) {
value.userId ??= newUuid();
this.#user = UserFactory.from({ id: value.userId });
}
static create(dto: AlbumUserLike = {}) {
return AlbumUserFactory.from(dto).build();
}
static from(dto: AlbumUserLike = {}) {
return new AlbumUserFactory({
albumId: newUuid(),
userId: newUuid(),
role: AlbumUserRole.Editor,
createId: newUuidV7(),
createdAt: newDate(),
updateId: newUuidV7(),
updatedAt: newDate(),
...dto,
});
}
album(dto: AlbumUserLike = {}, builder?: FactoryBuilder<AlbumFactory>) {
const album = build(AlbumFactory.from(dto), builder);
this.value.albumId = album.build().id;
return this;
}
user(dto: UserLike = {}, builder?: FactoryBuilder<UserFactory>) {
const user = build(UserFactory.from(dto), builder);
this.value.userId = user.build().id;
this.#user = user;
return this;
}
build() {
return {
...this.value,
user: this.#user.build(),
};
}
}

View File

@@ -0,0 +1,87 @@
import { Selectable } from 'kysely';
import { AssetOrder } from 'src/enum';
import { AlbumTable } from 'src/schema/tables/album.table';
import { SharedLinkTable } from 'src/schema/tables/shared-link.table';
import { AlbumUserFactory } from 'test/factories/album-user.factory';
import { AssetFactory } from 'test/factories/asset.factory';
import { build } from 'test/factories/builder.factory';
import { AlbumLike, AlbumUserLike, AssetLike, FactoryBuilder, UserLike } from 'test/factories/types';
import { UserFactory } from 'test/factories/user.factory';
import { newDate, newUuid, newUuidV7 } from 'test/small.factory';
export class AlbumFactory {
#owner: UserFactory;
#sharedLinks: Selectable<SharedLinkTable>[] = [];
#albumUsers: AlbumUserFactory[] = [];
#assets: AssetFactory[] = [];
private constructor(private readonly value: Selectable<AlbumTable>) {
value.ownerId ??= newUuid();
this.#owner = UserFactory.from({ id: value.ownerId });
}
static create(dto: AlbumLike = {}) {
return AlbumFactory.from(dto).build();
}
static from(dto: AlbumLike = {}) {
return new AlbumFactory({
id: newUuid(),
ownerId: newUuid(),
albumName: 'My Album',
albumThumbnailAssetId: null,
createdAt: newDate(),
deletedAt: null,
description: 'Album description',
isActivityEnabled: false,
order: AssetOrder.Desc,
updatedAt: newDate(),
updateId: newUuidV7(),
...dto,
}).owner();
}
owner(dto: UserLike = {}, builder?: FactoryBuilder<UserFactory>) {
this.#owner = build(UserFactory.from(dto), builder);
this.value.ownerId = this.#owner.build().id;
return this;
}
sharedLinks() {
this.#sharedLinks = [];
return this;
}
albumUser(dto: AlbumUserLike = {}, builder?: FactoryBuilder<AlbumUserFactory>) {
const albumUser = build(AlbumUserFactory.from(dto).album(this.value), builder);
this.#albumUsers.push(albumUser);
return this;
}
asset(dto: AssetLike = {}, builder?: FactoryBuilder<AssetFactory>) {
const asset = build(AssetFactory.from(dto), builder);
// use album owner by default
if (!dto.ownerId) {
asset.owner(this.#owner.build());
}
if (!this.#assets) {
this.#assets = [];
}
this.#assets.push(asset);
return this;
}
build() {
return {
...this.value,
owner: this.#owner.build(),
assets: this.#assets.map((asset) => asset.build()),
albumUsers: this.#albumUsers.map((albumUser) => albumUser.build()),
sharedLinks: this.#sharedLinks,
};
}
}

View File

@@ -0,0 +1,38 @@
import { Selectable } from 'kysely';
import { AssetEditAction } from 'src/dtos/editing.dto';
import { AssetEditTable } from 'src/schema/tables/asset-edit.table';
import { AssetFactory } from 'test/factories/asset.factory';
import { build } from 'test/factories/builder.factory';
import { AssetEditLike, AssetLike, FactoryBuilder } from 'test/factories/types';
import { newUuid } from 'test/small.factory';
export class AssetEditFactory {
private constructor(private readonly value: Selectable<AssetEditTable>) {}
static create(dto: AssetEditLike = {}) {
return AssetEditFactory.from(dto).build();
}
static from(dto: AssetEditLike = {}) {
const id = dto.id ?? newUuid();
return new AssetEditFactory({
id,
assetId: newUuid(),
action: AssetEditAction.Crop,
parameters: { x: 5, y: 6, width: 200, height: 100 },
sequence: 1,
...dto,
});
}
asset(dto: AssetLike = {}, builder?: FactoryBuilder<AssetFactory>) {
const asset = build(AssetFactory.from(dto), builder);
this.value.assetId = asset.build().id;
return this;
}
build() {
return { ...this.value } as Selectable<AssetEditTable<AssetEditAction.Crop>>;
}
}

View File

@@ -0,0 +1,55 @@
import { Selectable } from 'kysely';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetExifLike } from 'test/factories/types';
import { factory } from 'test/small.factory';
export class AssetExifFactory {
private constructor(private readonly value: Selectable<AssetExifTable>) {}
static create(dto: AssetExifLike = {}) {
return AssetExifFactory.from(dto).build();
}
static from(dto: AssetExifLike = {}) {
return new AssetExifFactory({
updatedAt: factory.date(),
updateId: factory.uuid(),
assetId: factory.uuid(),
autoStackId: null,
bitsPerSample: null,
city: 'Austin',
colorspace: null,
country: 'United States of America',
dateTimeOriginal: factory.date(),
description: '',
exifImageHeight: 420,
exifImageWidth: 42,
exposureTime: null,
fileSizeInByte: 69,
fNumber: 1.7,
focalLength: 4.38,
fps: null,
iso: 947,
latitude: 30.267_334_570_570_195,
longitude: -97.789_833_534_282_07,
lensModel: null,
livePhotoCID: null,
make: 'Google',
model: 'Pixel 7',
modifyDate: factory.date(),
orientation: '1',
profileDescription: null,
projectionType: null,
rating: 4,
lockedProperties: [],
state: 'Texas',
tags: ['parent/child'],
timeZone: 'UTC-6',
...dto,
});
}
build() {
return { ...this.value };
}
}

View File

@@ -0,0 +1,43 @@
import { Selectable } from 'kysely';
import { AssetFileType } from 'src/enum';
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetFactory } from 'test/factories/asset.factory';
import { build } from 'test/factories/builder.factory';
import { AssetFileLike, AssetLike, FactoryBuilder } from 'test/factories/types';
import { newDate, newUuid, newUuidV7 } from 'test/small.factory';
export class AssetFileFactory {
private constructor(private readonly value: Selectable<AssetFileTable>) {}
static create(dto: AssetFileLike = {}) {
return AssetFileFactory.from(dto).build();
}
static from(dto: AssetFileLike = {}) {
const id = dto.id ?? newUuid();
const isEdited = dto.isEdited ?? false;
return new AssetFileFactory({
id,
assetId: newUuid(),
createdAt: newDate(),
updatedAt: newDate(),
type: AssetFileType.Thumbnail,
path: `/data/12/34/thumbs/${id.slice(0, 2)}/${id.slice(2, 4)}/${id}${isEdited ? '_edited' : ''}.jpg`,
updateId: newUuidV7(),
isProgressive: false,
isEdited,
...dto,
});
}
asset(dto: AssetLike = {}, builder?: FactoryBuilder<AssetFactory>) {
const asset = build(AssetFactory.from(dto), builder);
this.value.assetId = asset.build().id;
return this;
}
build() {
return { ...this.value };
}
}

View File

@@ -0,0 +1,126 @@
import { Selectable } from 'kysely';
import { AssetFileType, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
import { AssetTable } from 'src/schema/tables/asset.table';
import { AssetEditFactory } from 'test/factories/asset-edit.factory';
import { AssetExifFactory } from 'test/factories/asset-exif.factory';
import { AssetFileFactory } from 'test/factories/asset-file.factory';
import { build } from 'test/factories/builder.factory';
import { AssetEditLike, AssetExifLike, AssetFileLike, AssetLike, FactoryBuilder, UserLike } from 'test/factories/types';
import { UserFactory } from 'test/factories/user.factory';
import { newDate, newSha1, newUuid, newUuidV7 } from 'test/small.factory';
export class AssetFactory {
#owner!: UserFactory;
#assetExif?: AssetExifFactory;
#files: AssetFileFactory[] = [];
#edits: AssetEditFactory[] = [];
private constructor(private readonly value: Selectable<AssetTable>) {
value.ownerId ??= newUuid();
this.#owner = UserFactory.from({ id: value.ownerId });
}
static create(dto: AssetLike = {}) {
return AssetFactory.from(dto).build();
}
static from(dto: AssetLike = {}) {
const id = dto.id ?? newUuid();
const originalFileName = dto.originalFileName ?? `IMG_${id}.jpg`;
return new AssetFactory({
id,
createdAt: newDate(),
updatedAt: newDate(),
deletedAt: null,
updateId: newUuidV7(),
status: AssetStatus.Active,
checksum: newSha1(),
deviceAssetId: '',
deviceId: '',
duplicateId: null,
duration: null,
encodedVideoPath: null,
fileCreatedAt: newDate(),
fileModifiedAt: newDate(),
isExternal: false,
isFavorite: false,
isOffline: false,
libraryId: null,
livePhotoVideoId: null,
localDateTime: newDate(),
originalFileName,
originalPath: `/data/library/${originalFileName}`,
ownerId: newUuid(),
stackId: null,
thumbhash: null,
type: AssetType.Image,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
isEdited: false,
...dto,
});
}
owner(dto: UserLike = {}, builder?: FactoryBuilder<UserFactory>) {
this.#owner = build(UserFactory.from(dto), builder);
this.value.ownerId = this.#owner.build().id;
return this;
}
exif(dto: AssetExifLike = {}, builder?: FactoryBuilder<AssetExifFactory>) {
this.#assetExif = build(AssetExifFactory.from(dto), builder);
return this;
}
edit(dto: AssetEditLike = {}, builder?: FactoryBuilder<AssetEditFactory>) {
this.#edits.push(build(AssetEditFactory.from(dto).asset(this.value), builder));
this.value.isEdited = true;
return this;
}
file(dto: AssetFileLike = {}, builder?: FactoryBuilder<AssetFileFactory>) {
this.#files.push(build(AssetFileFactory.from(dto).asset(this.value), builder));
return this;
}
files(dto?: 'edits'): AssetFactory;
files(items: AssetFileLike[], builder?: FactoryBuilder<AssetFileFactory>): AssetFactory;
files(items: AssetFileType[], builder?: FactoryBuilder<AssetFileFactory>): AssetFactory;
files(dto?: 'edits' | AssetFileLike[] | AssetFileType[], builder?: FactoryBuilder<AssetFileFactory>): AssetFactory {
const items: AssetFileLike[] = [];
if (dto === undefined || dto === 'edits') {
items.push(...Object.values(AssetFileType).map((type) => ({ type })));
if (dto === 'edits') {
items.push(...Object.values(AssetFileType).map((type) => ({ type, isEdited: true })));
}
} else {
for (const item of dto) {
items.push(typeof item === 'string' ? { type: item as AssetFileType } : item);
}
}
for (const item of items) {
this.file(item, builder);
}
return this;
}
build() {
const exif = this.#assetExif?.build();
return {
...this.value,
owner: this.#owner.build(),
exifInfo: exif as NonNullable<typeof exif>,
files: this.#files.map((file) => file.build()),
edits: this.#edits.map((edit) => edit.build()),
faces: [] as Selectable<AssetFaceTable>[],
};
}
}

View File

@@ -0,0 +1,48 @@
import { AuthDto } from 'src/dtos/auth.dto';
import { build } from 'test/factories/builder.factory';
import { SharedLinkFactory } from 'test/factories/shared-link.factory';
import { FactoryBuilder, SharedLinkLike, UserLike } from 'test/factories/types';
import { UserFactory } from 'test/factories/user.factory';
export class AuthFactory {
#user: UserFactory;
#sharedLink?: SharedLinkFactory;
private constructor(user: UserFactory) {
this.#user = user;
}
static create(dto: UserLike = {}) {
return AuthFactory.from(dto).build();
}
static from(dto: UserLike = {}) {
return new AuthFactory(UserFactory.from(dto));
}
apiKey() {
// TODO
return this;
}
sharedLink(dto: SharedLinkLike = {}, builder?: FactoryBuilder<SharedLinkFactory>) {
this.#sharedLink = build(SharedLinkFactory.from(dto), builder);
return this;
}
build(): AuthDto {
const { id, isAdmin, name, email, quotaUsageInBytes, quotaSizeInBytes } = this.#user.build();
return {
user: {
id,
isAdmin,
name,
email,
quotaUsageInBytes,
quotaSizeInBytes,
},
sharedLink: this.#sharedLink?.build(),
};
}
}

View File

@@ -0,0 +1,5 @@
import { FactoryBuilder } from 'test/factories/types';
export const build = <T>(factory: T, builder?: FactoryBuilder<T>) => {
return builder ? builder(factory) : factory;
};

View File

@@ -0,0 +1,63 @@
import { Selectable } from 'kysely';
import { SharedLinkType } from 'src/enum';
import { SharedLinkTable } from 'src/schema/tables/shared-link.table';
import { AlbumFactory } from 'test/factories/album.factory';
import { build } from 'test/factories/builder.factory';
import { AlbumLike, FactoryBuilder, SharedLinkLike, UserLike } from 'test/factories/types';
import { UserFactory } from 'test/factories/user.factory';
import { factory, newDate, newUuid } from 'test/small.factory';
export class SharedLinkFactory {
#owner: UserFactory;
#album?: AlbumFactory;
private constructor(private readonly value: Selectable<SharedLinkTable>) {
value.userId ??= newUuid();
this.#owner = UserFactory.from({ id: value.userId });
}
static create(dto: SharedLinkLike = {}) {
return SharedLinkFactory.from(dto).build();
}
static from(dto: SharedLinkLike = {}) {
const type = dto.type ?? SharedLinkType.Individual;
const albumId = (dto.albumId ?? type === SharedLinkType.Album) ? newUuid() : null;
return new SharedLinkFactory({
id: factory.uuid(),
description: 'Shared link description',
userId: newUuid(),
key: factory.buffer(),
type,
albumId,
createdAt: newDate(),
expiresAt: null,
allowUpload: true,
allowDownload: true,
showExif: true,
password: null,
slug: null,
...dto,
});
}
owner(dto: UserLike = {}, builder?: FactoryBuilder<UserFactory>): SharedLinkFactory {
this.#owner = build(UserFactory.from(dto), builder);
return this;
}
album(dto: AlbumLike = {}, builder?: FactoryBuilder<AlbumFactory>) {
this.#album = build(AlbumFactory.from(dto), builder);
return this;
}
build() {
return {
...this.value,
owner: this.#owner.build(),
album: this.#album?.build(),
assets: [],
};
}
}

View File

@@ -0,0 +1,20 @@
import { Selectable } from 'kysely';
import { AlbumUserTable } from 'src/schema/tables/album-user.table';
import { AlbumTable } from 'src/schema/tables/album.table';
import { AssetEditTable } from 'src/schema/tables/asset-edit.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetTable } from 'src/schema/tables/asset.table';
import { SharedLinkTable } from 'src/schema/tables/shared-link.table';
import { UserTable } from 'src/schema/tables/user.table';
export type FactoryBuilder<T, R extends T = T> = (builder: T) => R;
export type AssetLike = Partial<Selectable<AssetTable>>;
export type AssetExifLike = Partial<Selectable<AssetExifTable>>;
export type AssetEditLike = Partial<Selectable<AssetEditTable>>;
export type AssetFileLike = Partial<Selectable<AssetFileTable>>;
export type AlbumLike = Partial<Selectable<AlbumTable>>;
export type AlbumUserLike = Partial<Selectable<AlbumUserTable>>;
export type SharedLinkLike = Partial<Selectable<SharedLinkTable>>;
export type UserLike = Partial<Selectable<UserTable>>;

View File

@@ -0,0 +1,46 @@
import { Selectable } from 'kysely';
import { UserStatus } from 'src/enum';
import { UserMetadataTable } from 'src/schema/tables/user-metadata.table';
import { UserTable } from 'src/schema/tables/user.table';
import { UserLike } from 'test/factories/types';
import { newDate, newUuid, newUuidV7 } from 'test/small.factory';
export class UserFactory {
private constructor(private value: Selectable<UserTable>) {}
static create(dto: UserLike = {}) {
return UserFactory.from(dto).build();
}
static from(dto: UserLike = {}) {
return new UserFactory({
id: newUuid(),
email: 'test@immich.cloud',
password: '',
pinCode: null,
createdAt: newDate(),
profileImagePath: '',
isAdmin: false,
shouldChangePassword: false,
avatarColor: null,
deletedAt: null,
oauthId: '',
updatedAt: newDate(),
storageLabel: null,
name: 'Test User',
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
status: UserStatus.Active,
profileChangedAt: newDate(),
updateId: newUuidV7(),
...dto,
});
}
build() {
return {
...this.value,
metadata: [] as UserMetadataTable[],
};
}
}

View File

@@ -45,56 +45,6 @@ export const albumStub = {
order: AssetOrder.Desc,
updateId: '42',
}),
sharedWithMultiple: Object.freeze({
id: 'album-3',
albumName: 'Empty album shared with users',
description: '',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
assets: [],
albumThumbnailAsset: null,
albumThumbnailAssetId: null,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [
{
user: userStub.user1,
role: AlbumUserRole.Editor,
},
{
user: userStub.user2,
role: AlbumUserRole.Editor,
},
],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
sharedWithAdmin: Object.freeze({
id: 'album-3',
albumName: 'Empty album shared with admin',
description: '',
ownerId: authStub.user1.user.id,
owner: userStub.user1,
assets: [],
albumThumbnailAsset: null,
albumThumbnailAssetId: null,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [
{
user: userStub.admin,
role: AlbumUserRole.Editor,
},
],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
oneAsset: Object.freeze({
id: 'album-4',
albumName: 'Album with one asset',
@@ -113,24 +63,6 @@ export const albumStub = {
order: AssetOrder.Desc,
updateId: '42',
}),
twoAssets: Object.freeze({
id: 'album-4a',
albumName: 'Album with two assets',
description: '',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
assets: [assetStub.image, assetStub.withLocation],
albumThumbnailAsset: assetStub.image,
albumThumbnailAssetId: assetStub.image.id,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
emptyWithValidThumbnail: Object.freeze({
id: 'album-5',
albumName: 'Empty album with valid thumbnail',

View File

@@ -20,45 +20,8 @@ const fullsizeFile = factory.assetFile({
path: '/uploads/user-id/fullsize/path.webp',
});
const sidecarFileWithExt = factory.assetFile({
type: AssetFileType.Sidecar,
path: '/original/path.ext.xmp',
});
const sidecarFileWithoutExt = factory.assetFile({
type: AssetFileType.Sidecar,
path: '/original/path.xmp',
});
const editedPreviewFile = factory.assetFile({
type: AssetFileType.Preview,
path: '/uploads/user-id/preview/path_edited.jpg',
isEdited: true,
});
const editedThumbnailFile = factory.assetFile({
type: AssetFileType.Thumbnail,
path: '/uploads/user-id/thumbnail/path_edited.jpg',
isEdited: true,
});
const editedFullsizeFile = factory.assetFile({
type: AssetFileType.FullSize,
path: '/uploads/user-id/fullsize/path_edited.jpg',
isEdited: true,
});
const files = [fullsizeFile, previewFile, thumbnailFile];
const editedFiles = [
fullsizeFile,
previewFile,
thumbnailFile,
editedFullsizeFile,
editedPreviewFile,
editedThumbnailFile,
];
export const stackStub = (stackId: string, assets: (MapAsset & { exifInfo: Exif })[]) => {
return {
id: stackId,
@@ -132,87 +95,6 @@ export const assetStub = {
isEdited: false,
}),
noWebpPath: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/data/library/IMG_456.jpg',
files: [previewFile],
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
isFavorite: true,
duration: null,
livePhotoVideo: null,
livePhotoVideoId: null,
sharedLinks: [],
originalFileName: 'IMG_456.jpg',
faces: [],
isExternal: false,
exifInfo: {
fileSizeInByte: 123_000,
} as Exif,
deletedAt: null,
duplicateId: null,
isOffline: false,
libraryId: null,
stackId: null,
updateId: '42',
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
isEdited: false,
}),
noThumbhash: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.ext',
files,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
thumbhash: null,
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
isFavorite: true,
duration: null,
isExternal: false,
livePhotoVideo: null,
livePhotoVideoId: null,
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
deletedAt: null,
duplicateId: null,
isOffline: false,
libraryId: null,
stackId: null,
updateId: '42',
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
isEdited: false,
}),
primaryImage: Object.freeze({
id: 'primary-asset-id',
status: AssetStatus.Active,
@@ -526,48 +408,6 @@ export const assetStub = {
isEdited: false,
}),
imageFrom2015: Object.freeze({
id: 'asset-id-2015',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2015-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2015-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.ext',
files,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2015-02-23T05:06:29.716Z'),
updatedAt: new Date('2015-02-23T05:06:29.716Z'),
localDateTime: new Date('2015-02-23T05:06:29.716Z'),
isFavorite: true,
isExternal: false,
duration: null,
livePhotoVideo: null,
livePhotoVideoId: null,
updateId: 'foo',
libraryId: null,
stackId: null,
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
deletedAt: null,
duplicateId: null,
isOffline: false,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
isEdited: false,
}),
video: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
@@ -736,81 +576,6 @@ export const assetStub = {
isEdited: false,
}),
sidecar: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.ext',
thumbhash: null,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files: [previewFile, sidecarFileWithExt],
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
isFavorite: true,
isExternal: false,
duration: null,
livePhotoVideo: null,
livePhotoVideoId: null,
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
deletedAt: null,
duplicateId: null,
isOffline: false,
updateId: 'foo',
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
isEdited: false,
}),
sidecarWithoutExt: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.ext',
thumbhash: null,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files: [previewFile, sidecarFileWithoutExt],
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
isFavorite: true,
isExternal: false,
duration: null,
livePhotoVideo: null,
livePhotoVideoId: null,
sharedLinks: [],
originalFileName: 'asset-id.ext',
faces: [],
deletedAt: null,
duplicateId: null,
isOffline: false,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
isEdited: false,
}),
hasEncodedVideo: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
@@ -854,46 +619,6 @@ export const assetStub = {
isEdited: false,
}),
hasFileExtension: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/data/user1/photo.jpg',
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
isFavorite: true,
isExternal: true,
duration: null,
livePhotoVideo: null,
livePhotoVideoId: null,
libraryId: 'library-id',
sharedLinks: [],
originalFileName: 'photo.jpg',
faces: [],
deletedAt: null,
exifInfo: {
fileSizeInByte: 5000,
} as Exif,
duplicateId: null,
isOffline: false,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
isEdited: false,
}),
imageDng: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
@@ -938,93 +663,6 @@ export const assetStub = {
isEdited: false,
}),
imageHif: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.hif',
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
isFavorite: true,
duration: null,
isExternal: false,
livePhotoVideo: null,
livePhotoVideoId: null,
sharedLinks: [],
originalFileName: 'asset-id.hif',
faces: [],
deletedAt: null,
exifInfo: {
fileSizeInByte: 5000,
profileDescription: 'Adobe RGB',
bitsPerSample: 14,
} as Exif,
duplicateId: null,
isOffline: false,
updateId: '42',
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
isEdited: false,
}),
panoramaTif: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.tif',
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
files,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2023-02-23T05:06:29.716Z'),
isFavorite: true,
duration: null,
isExternal: false,
livePhotoVideo: null,
livePhotoVideoId: null,
sharedLinks: [],
originalFileName: 'asset-id.tif',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
projectionType: 'EQUIRECTANGULAR',
} as Exif,
duplicateId: null,
isOffline: false,
updateId: '42',
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
edits: [],
}),
withCropEdit: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
@@ -1082,52 +720,4 @@ export const assetStub = {
] as AssetEditActionItem[],
isEdited: true,
}),
withoutEdits: Object.freeze({
id: 'asset-id',
status: AssetStatus.Active,
deviceAssetId: 'device-asset-id',
fileModifiedAt: new Date('2023-02-23T05:06:29.716Z'),
fileCreatedAt: new Date('2023-02-23T05:06:29.716Z'),
owner: userStub.user1,
ownerId: 'user-id',
deviceId: 'device-id',
originalPath: '/original/path.jpg',
files: editedFiles,
checksum: Buffer.from('file hash', 'utf8'),
type: AssetType.Image,
thumbhash: Buffer.from('blablabla', 'base64'),
encodedVideoPath: null,
createdAt: new Date('2023-02-23T05:06:29.716Z'),
updatedAt: new Date('2023-02-23T05:06:29.716Z'),
localDateTime: new Date('2025-01-01T01:02:03.456Z'),
isFavorite: true,
duration: null,
isExternal: false,
livePhotoVideo: null,
livePhotoVideoId: null,
updateId: 'foo',
libraryId: null,
stackId: null,
sharedLinks: [],
originalFileName: 'asset-id.jpg',
faces: [],
deletedAt: null,
sidecarPath: null,
exifInfo: {
fileSizeInByte: 5000,
exifImageHeight: 3840,
exifImageWidth: 2160,
} as Exif,
duplicateId: null,
isOffline: false,
stack: null,
orientation: '',
projectionType: null,
height: 3840,
width: 2160,
visibility: AssetVisibility.Timeline,
edits: [],
isEdited: false,
}),
};

View File

@@ -38,21 +38,4 @@ export const userStub = {
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
},
user2: <UserAdmin>{
...authStub.user2.user,
status: UserStatus.Active,
profileChangedAt: new Date('2021-01-01'),
metadata: [],
name: 'immich_name',
storageLabel: null,
oauthId: '',
shouldChangePassword: false,
avatarColor: null,
profileImagePath: '',
createdAt: new Date('2021-01-01'),
deletedAt: null,
updatedAt: new Date('2021-01-01'),
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
},
};

View File

@@ -1,5 +1,6 @@
import {
Activity,
Album,
ApiKey,
AssetFace,
AssetFile,
@@ -23,6 +24,7 @@ import { AssetEditAction, AssetEditActionItem, MirrorAxis } from 'src/dtos/editi
import { QueueStatisticsDto } from 'src/dtos/queue.dto';
import {
AssetFileType,
AssetOrder,
AssetStatus,
AssetType,
AssetVisibility,
@@ -506,6 +508,24 @@ const personFactory = (person?: Partial<Person>): Person => ({
...person,
});
const albumFactory = (album?: Partial<Omit<Album, 'assets'>>) => ({
albumName: 'My Album',
albumThumbnailAssetId: null,
albumUsers: [],
assets: [],
createdAt: newDate(),
deletedAt: null,
description: 'Album description',
id: newUuid(),
isActivityEnabled: false,
order: AssetOrder.Desc,
ownerId: newUuid(),
sharedLinks: [],
updatedAt: newDate(),
updateId: newUuidV7(),
...album,
});
export const factory = {
activity: activityFactory,
apiKey: apiKeyFactory,
@@ -532,6 +552,7 @@ export const factory = {
person: personFactory,
assetEdit: assetEditFactory,
tag: tagFactory,
album: albumFactory,
uuid: newUuid,
buffer: () => Buffer.from('this is a fake buffer'),
date: newDate,

View File

@@ -496,12 +496,10 @@ export const mockSpawn = vitest.fn((exitCode: number, stdout: string, stderr: st
} as unknown as ChildProcessWithoutNullStreams;
});
export const mockDuplex =
(chunkCb?: (chunk: Buffer) => void) =>
export const mockDuplex = vitest.fn(
(command: string, exitCode: number, stdout: string, stderr: string, error?: unknown) => {
const duplex = new Duplex({
write(chunk, _encoding, callback) {
chunkCb?.(chunk);
write(_chunk, _encoding, callback) {
callback();
},
@@ -526,7 +524,8 @@ export const mockDuplex =
});
return duplex;
};
},
);
export const mockFork = vitest.fn((exitCode: number, stdout: string, stderr: string, error?: unknown) => {
const stdoutStream = new Readable({

View File

@@ -2,6 +2,7 @@ import TransformTool from '$lib/components/asset-viewer/editor/transform-tool/tr
import { transformManager } from '$lib/managers/edit/transform-manager.svelte';
import { eventManager } from '$lib/managers/event-manager.svelte';
import { waitForWebsocketEvent } from '$lib/stores/websocket';
import { getFormatter } from '$lib/utils/i18n';
import { editAsset, removeAssetEdits, type AssetEditsDto, type AssetResponseDto } from '@immich/sdk';
import { ConfirmModal, modalManager, toastManager } from '@immich/ui';
import { mdiCropRotate } from '@mdi/js';
@@ -63,10 +64,12 @@ export class EditManager {
this.isShowingConfirmDialog = true;
const t = await getFormatter();
const confirmed = await modalManager.show(ConfirmModal, {
title: 'Discard Edits?',
prompt: 'You have unsaved edits. Are you sure you want to discard them?',
confirmText: 'Discard Edits',
title: t('editor_discard_edits_title'),
prompt: t('editor_discard_edits_prompt'),
confirmText: t('editor_discard_edits_confirm'),
});
this.isShowingConfirmDialog = false;
@@ -120,6 +123,7 @@ export class EditManager {
}
const assetId = this.currentAsset.id;
const t = await getFormatter();
try {
// Setup the websocket listener before sending the edit request
@@ -138,12 +142,12 @@ export class EditManager {
eventManager.emit('AssetEditsApplied', assetId);
toastManager.success('Edits applied successfully');
toastManager.success(t('editor_edits_applied_success'));
this.hasAppliedEdits = true;
return true;
} catch {
toastManager.danger('Failed to apply edits');
toastManager.danger(t('editor_edits_applied_error'));
return false;
} finally {
this.isApplyingEdits = false;

View File

@@ -16,7 +16,6 @@
let { asset, onClose }: Props = $props();
let imgElement: HTMLDivElement | undefined = $state();
let cropContainer: HTMLDivElement | undefined = $state();
onMount(() => {
if (!imgElement) {
@@ -52,23 +51,16 @@
};
const onSubmit = async () => {
if (!cropContainer) {
if (!imgElement) {
return;
}
try {
// Get the container dimensions (which is always square due to aspect-square class)
const containerSize = cropContainer.offsetWidth;
// Capture the crop container which maintains 1:1 aspect ratio
// Override border-radius and border to avoid transparent corners from rounded-full
const blob = await domtoimage.toBlob(cropContainer, {
width: containerSize,
height: containerSize,
style: {
borderRadius: '0',
border: 'none',
},
const imgElementHeight = imgElement.offsetHeight;
const imgElementWidth = imgElement.offsetWidth;
const blob = await domtoimage.toBlob(imgElement, {
width: imgElementWidth,
height: imgElementHeight,
});
if (await hasTransparentPixels(blob)) {
@@ -91,7 +83,6 @@
<FormModal size="small" title={$t('set_profile_picture')} {onClose} {onSubmit}>
<div class="flex place-items-center items-center justify-center">
<div
bind:this={cropContainer}
class="relative flex aspect-square w-62.5 overflow-hidden rounded-full border-4 border-immich-primary bg-immich-dark-primary dark:border-immich-dark-primary dark:bg-immich-primary"
>
<PhotoViewer bind:element={imgElement} cursor={{ current: asset }} />