mirror of
https://github.com/immich-app/immich.git
synced 2026-03-25 11:08:56 +03:00
Merge remote-tracking branch 'origin/main' into feat/integrity-checks-izzy
This commit is contained in:
@@ -33,6 +33,7 @@ import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
import { services } from 'src/services';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { QueueService } from 'src/services/queue.service';
|
||||
import { getKyselyConfig } from 'src/utils/database';
|
||||
|
||||
@@ -114,6 +115,7 @@ export class ApiModule extends BaseModule {}
|
||||
AppRepository,
|
||||
MaintenanceHealthRepository,
|
||||
MaintenanceWebsocketRepository,
|
||||
DatabaseBackupService,
|
||||
MaintenanceWorkerService,
|
||||
...commonMiddleware,
|
||||
{ provide: APP_GUARD, useClass: MaintenanceAuthGuard },
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
import { mapAlbum } from 'src/dtos/album.dto';
|
||||
import { albumStub } from 'test/fixtures/album.stub';
|
||||
import { AlbumFactory } from 'test/factories/album.factory';
|
||||
|
||||
describe('mapAlbum', () => {
|
||||
it('should set start and end dates', () => {
|
||||
const dto = mapAlbum(albumStub.twoAssets, false);
|
||||
expect(dto.startDate).toEqual(new Date('2020-12-31T23:59:00.000Z'));
|
||||
expect(dto.endDate).toEqual(new Date('2025-01-01T01:02:03.456Z'));
|
||||
const startDate = new Date('2023-02-22T05:06:29.716Z');
|
||||
const endDate = new Date('2025-01-01T01:02:03.456Z');
|
||||
const album = AlbumFactory.from().asset({ localDateTime: endDate }).asset({ localDateTime: startDate }).build();
|
||||
const dto = mapAlbum(album, false);
|
||||
expect(dto.startDate).toEqual(startDate);
|
||||
expect(dto.endDate).toEqual(endDate);
|
||||
});
|
||||
|
||||
it('should not set start and end dates for empty assets', () => {
|
||||
const dto = mapAlbum(albumStub.empty, false);
|
||||
const dto = mapAlbum(AlbumFactory.create(), false);
|
||||
expect(dto.startDate).toBeUndefined();
|
||||
expect(dto.endDate).toBeUndefined();
|
||||
});
|
||||
|
||||
@@ -34,12 +34,14 @@ import { FilenameParamDto } from 'src/validation';
|
||||
import type { DatabaseBackupController as _DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import type { ServerController as _ServerController } from 'src/controllers/server.controller';
|
||||
import { DatabaseBackupDeleteDto, DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
|
||||
@Controller()
|
||||
export class MaintenanceWorkerController {
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: MaintenanceWorkerService,
|
||||
private databaseBackupService: DatabaseBackupService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
@@ -61,7 +63,7 @@ export class MaintenanceWorkerController {
|
||||
@Get('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
return this.databaseBackupService.listBackups();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -74,7 +76,7 @@ export class MaintenanceWorkerController {
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
) {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
await sendFile(res, next, () => this.databaseBackupService.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -83,7 +85,7 @@ export class MaintenanceWorkerController {
|
||||
@Delete('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
return this.databaseBackupService.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -96,7 +98,7 @@ export class MaintenanceWorkerController {
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
return this.databaseBackupService.uploadBackup(file);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/status')
|
||||
|
||||
@@ -1,23 +1,18 @@
|
||||
import { BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { SignJWT } from 'jose';
|
||||
import { DateTime } from 'luxon';
|
||||
import { PassThrough, Readable } from 'node:stream';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAction, StorageFolder, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
|
||||
|
||||
function* mockData() {
|
||||
yield '';
|
||||
}
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { automock, AutoMocked, getMocks, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(MaintenanceWorkerService.name, () => {
|
||||
let sut: MaintenanceWorkerService;
|
||||
let mocks: ServiceMocks;
|
||||
let maintenanceWebsocketRepositoryMock: AutoMocked<MaintenanceWebsocketRepository>;
|
||||
let maintenanceHealthRepositoryMock: AutoMocked<MaintenanceHealthRepository>;
|
||||
let databaseBackupServiceMock: AutoMocked<DatabaseBackupService>;
|
||||
|
||||
beforeEach(() => {
|
||||
mocks = getMocks();
|
||||
@@ -29,6 +24,20 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
databaseBackupServiceMock = automock(DatabaseBackupService, {
|
||||
args: [
|
||||
mocks.logger,
|
||||
mocks.storage,
|
||||
mocks.config,
|
||||
mocks.systemMetadata,
|
||||
mocks.process,
|
||||
mocks.database,
|
||||
mocks.cron,
|
||||
mocks.job,
|
||||
maintenanceHealthRepositoryMock,
|
||||
],
|
||||
strict: false,
|
||||
});
|
||||
|
||||
sut = new MaintenanceWorkerService(
|
||||
mocks.logger as never,
|
||||
@@ -40,6 +49,7 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
mocks.storage as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
databaseBackupServiceMock,
|
||||
);
|
||||
|
||||
sut.mock({
|
||||
@@ -310,17 +320,6 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
describe('action: restore database', () => {
|
||||
beforeEach(() => {
|
||||
mocks.database.tryLock.mockResolvedValueOnce(true);
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.process.fork.mockImplementation(() => mockSpawn(0, 'Immich Server is listening', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGzip.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
|
||||
});
|
||||
|
||||
it('should update maintenance mode state', async () => {
|
||||
@@ -341,21 +340,7 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail to restore invalid backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Invalid backup file format!',
|
||||
task: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should successfully run a backup', async () => {
|
||||
it('should defer to database backup service', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
@@ -380,13 +365,10 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
action: 'end',
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
it('should forward errors from database backup service', async () => {
|
||||
databaseBackupServiceMock.restoreDatabaseBackup.mockRejectedValue('Sample error');
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
@@ -396,149 +378,16 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: pg_dump non-zero exit code (1)\nerror',
|
||||
error: 'Sample error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail if restore itself fails', async () => {
|
||||
mocks.process.spawnDuplexStream
|
||||
.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('gzip', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('psql', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'public', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: psql non-zero exit code (1)\nerror',
|
||||
error: 'Something went wrong, see logs!',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should rollback if database migrations fail', async () => {
|
||||
mocks.database.runMigrations.mockRejectedValue(new Error('Migrations Error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Migrations Error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalledTimes(0);
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
|
||||
it('should rollback if API healthcheck fails', async () => {
|
||||
maintenanceHealthRepositoryMock.checkApiHealth.mockRejectedValue(new Error('Health Error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Health Error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Backups
|
||||
*/
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
{ filename: 'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-1753789649000.sql.gz', filesize: 1024 },
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -25,19 +25,11 @@ import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type ApiService as _ApiService } from 'src/services/api.service';
|
||||
import { type BaseService as _BaseService } from 'src/services/base.service';
|
||||
import { type DatabaseBackupService as _DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { type ServerService as _ServerService } from 'src/services/server.service';
|
||||
import { type VersionService as _VersionService } from 'src/services/version.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { getConfig } from 'src/utils/config';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
restoreDatabaseBackup,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { createMaintenanceLoginUrl, detectPriorInstall } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
@@ -62,6 +54,7 @@ export class MaintenanceWorkerService {
|
||||
private storageRepository: StorageRepository,
|
||||
private processRepository: ProcessRepository,
|
||||
private databaseRepository: DatabaseRepository,
|
||||
private databaseBackupService: DatabaseBackupService,
|
||||
) {
|
||||
this.logger.setContext(this.constructor.name);
|
||||
}
|
||||
@@ -187,35 +180,6 @@ export class MaintenanceWorkerService {
|
||||
return '/usr/src/app/upload';
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.listBackups}
|
||||
*/
|
||||
async listBackups(): Promise<{ backups: { filename: string; filesize: number }[] }> {
|
||||
const backups = await listDatabaseBackups(this.backupRepos);
|
||||
return { backups };
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.deleteBackup}
|
||||
*/
|
||||
async deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.uploadBackup}
|
||||
*/
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.downloadBackup}
|
||||
*/
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get secret() {
|
||||
if (!this.#secret) {
|
||||
throw new Error('Secret is not initialised yet.');
|
||||
@@ -364,7 +328,7 @@ export class MaintenanceWorkerService {
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await restoreDatabaseBackup(this.backupRepos, filename, (task, progress) =>
|
||||
await this.databaseBackupService.restoreDatabaseBackup(filename, (task, progress) =>
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
|
||||
@@ -58,7 +58,7 @@ select
|
||||
from
|
||||
(
|
||||
select
|
||||
*
|
||||
"shared_link".*
|
||||
from
|
||||
"shared_link"
|
||||
where
|
||||
@@ -243,7 +243,7 @@ select
|
||||
from
|
||||
(
|
||||
select
|
||||
*
|
||||
"shared_link".*
|
||||
from
|
||||
"shared_link"
|
||||
where
|
||||
@@ -316,7 +316,7 @@ select
|
||||
from
|
||||
(
|
||||
select
|
||||
*
|
||||
"shared_link".*
|
||||
from
|
||||
"shared_link"
|
||||
where
|
||||
|
||||
@@ -78,43 +78,13 @@ limit
|
||||
-- AssetJobRepository.streamForThumbnailJob
|
||||
select
|
||||
"asset"."id",
|
||||
"asset"."thumbhash",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_file"."id",
|
||||
"asset_file"."path",
|
||||
"asset_file"."type",
|
||||
"asset_file"."isEdited"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "files",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_edit"."action",
|
||||
"asset_edit"."parameters"
|
||||
from
|
||||
"asset_edit"
|
||||
where
|
||||
"asset_edit"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "edits"
|
||||
"asset"."isEdited"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id"
|
||||
where
|
||||
"asset"."deletedAt" is null
|
||||
and "asset"."visibility" != $1
|
||||
and "asset"."visibility" != 'hidden'
|
||||
and (
|
||||
not exists (
|
||||
select
|
||||
@@ -122,7 +92,7 @@ where
|
||||
"asset_file"
|
||||
where
|
||||
"assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $2
|
||||
and "type" = 'thumbnail'
|
||||
)
|
||||
or not exists (
|
||||
select
|
||||
@@ -130,17 +100,75 @@ where
|
||||
"asset_file"
|
||||
where
|
||||
"assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $3
|
||||
and "type" = 'preview'
|
||||
)
|
||||
or not exists (
|
||||
select
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $4
|
||||
or (
|
||||
"asset"."isEdited" = true
|
||||
and not exists (
|
||||
select
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"assetId" = "asset"."id"
|
||||
and "type" = 'fullsize'
|
||||
and "asset_file"."isEdited" = true
|
||||
)
|
||||
)
|
||||
or "asset"."thumbhash" is null
|
||||
or (
|
||||
not exists (
|
||||
select
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"assetId" = "asset"."id"
|
||||
and "type" = 'fullsize'
|
||||
)
|
||||
and f_unaccent (asset."originalFileName") like any (
|
||||
array[
|
||||
'%.3fr',
|
||||
'%.ari',
|
||||
'%.arw',
|
||||
'%.cap',
|
||||
'%.cin',
|
||||
'%.cr2',
|
||||
'%.cr3',
|
||||
'%.crw',
|
||||
'%.dcr',
|
||||
'%.dng',
|
||||
'%.erf',
|
||||
'%.fff',
|
||||
'%.iiq',
|
||||
'%.k25',
|
||||
'%.kdc',
|
||||
'%.mrw',
|
||||
'%.nef',
|
||||
'%.nrw',
|
||||
'%.orf',
|
||||
'%.ori',
|
||||
'%.pef',
|
||||
'%.psd',
|
||||
'%.raf',
|
||||
'%.raw',
|
||||
'%.rw2',
|
||||
'%.rwl',
|
||||
'%.sr2',
|
||||
'%.srf',
|
||||
'%.srw',
|
||||
'%.x3f',
|
||||
'%.heic',
|
||||
'%.heif',
|
||||
'%.hif',
|
||||
'%.insp',
|
||||
'%.jp2',
|
||||
'%.jpe',
|
||||
'%.jxl',
|
||||
'%.svg',
|
||||
'%.tif',
|
||||
'%.tiff'
|
||||
]::text[]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
-- AssetJobRepository.getForMigrationJob
|
||||
|
||||
@@ -176,6 +176,7 @@ select
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = 'preview'
|
||||
and "asset_file"."isEdited" = $1
|
||||
) as "previewPath"
|
||||
from
|
||||
"person"
|
||||
@@ -183,7 +184,7 @@ from
|
||||
inner join "asset" on "asset_face"."assetId" = "asset"."id"
|
||||
left join "asset_exif" on "asset_exif"."assetId" = "asset"."id"
|
||||
where
|
||||
"person"."id" = $1
|
||||
"person"."id" = $2
|
||||
and "asset_face"."deletedAt" is null
|
||||
|
||||
-- PersonRepository.reassignFace
|
||||
|
||||
@@ -44,9 +44,9 @@ const withAlbumUsers = (eb: ExpressionBuilder<DB, 'album'>) => {
|
||||
};
|
||||
|
||||
const withSharedLink = (eb: ExpressionBuilder<DB, 'album'>) => {
|
||||
return jsonArrayFrom(eb.selectFrom('shared_link').selectAll().whereRef('shared_link.albumId', '=', 'album.id')).as(
|
||||
'sharedLinks',
|
||||
);
|
||||
return jsonArrayFrom(
|
||||
eb.selectFrom('shared_link').selectAll('shared_link').whereRef('shared_link.albumId', '=', 'album.id'),
|
||||
).as('sharedLinks');
|
||||
};
|
||||
|
||||
const withAssets = (eb: ExpressionBuilder<DB, 'album'>) => {
|
||||
@@ -283,7 +283,7 @@ export class AlbumRepository {
|
||||
|
||||
return tx
|
||||
.selectFrom('album')
|
||||
.selectAll()
|
||||
.selectAll('album')
|
||||
.where('id', '=', newAlbum.id)
|
||||
.select(withOwner)
|
||||
.select(withAssets)
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
withFilePath,
|
||||
withFiles,
|
||||
} from 'src/utils/database';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
|
||||
@Injectable()
|
||||
export class AssetJobRepository {
|
||||
@@ -61,51 +62,40 @@ export class AssetJobRepository {
|
||||
streamForThumbnailJob(options: { force: boolean | undefined; fullsizeEnabled: boolean }) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id', 'asset.thumbhash'])
|
||||
.select(withFiles)
|
||||
.select(withEdits)
|
||||
.select(['asset.id', 'asset.isEdited'])
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.where('asset.visibility', '!=', sql.lit(AssetVisibility.Hidden))
|
||||
.$if(!options.force, (qb) =>
|
||||
qb
|
||||
// If there aren't any entries, metadata extraction hasn't run yet which is required for thumbnails
|
||||
.innerJoin('asset_job_status', 'asset_job_status.assetId', 'asset.id')
|
||||
.where((eb) => {
|
||||
.where(({ and, eb, exists, not, or, selectFrom }) => {
|
||||
const file = (type: AssetFileType) =>
|
||||
selectFrom('asset_file').whereRef('assetId', '=', 'asset.id').where('type', '=', sql.lit(type));
|
||||
|
||||
const conditions = [
|
||||
eb.not((eb) =>
|
||||
eb.exists((qb) =>
|
||||
qb
|
||||
.selectFrom('asset_file')
|
||||
.whereRef('assetId', '=', 'asset.id')
|
||||
.where('asset_file.type', '=', AssetFileType.Preview),
|
||||
),
|
||||
),
|
||||
eb.not((eb) =>
|
||||
eb.exists((qb) =>
|
||||
qb
|
||||
.selectFrom('asset_file')
|
||||
.whereRef('assetId', '=', 'asset.id')
|
||||
.where('asset_file.type', '=', AssetFileType.Thumbnail),
|
||||
),
|
||||
),
|
||||
not(exists(file(AssetFileType.Thumbnail))),
|
||||
not(exists(file(AssetFileType.Preview))),
|
||||
and([
|
||||
eb('asset.isEdited', '=', sql.lit(true)),
|
||||
not(exists(file(AssetFileType.FullSize).where('asset_file.isEdited', '=', sql.lit(true)))),
|
||||
]),
|
||||
eb('asset.thumbhash', 'is', null),
|
||||
];
|
||||
|
||||
if (options.fullsizeEnabled) {
|
||||
const isWebUnsupported = sql.join(
|
||||
Object.keys(mimeTypes.webUnsupportedImage).map((ext) => sql.lit(`%${ext}`)),
|
||||
);
|
||||
conditions.push(
|
||||
eb.not((eb) =>
|
||||
eb.exists((qb) =>
|
||||
qb
|
||||
.selectFrom('asset_file')
|
||||
.whereRef('assetId', '=', 'asset.id')
|
||||
.where('asset_file.type', '=', AssetFileType.FullSize),
|
||||
),
|
||||
),
|
||||
and([
|
||||
not(exists(file(AssetFileType.FullSize))),
|
||||
eb(sql`f_unaccent(asset."originalFileName")`, 'like', sql`any(array[${isWebUnsupported}]::text[])`),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
conditions.push(eb('asset.thumbhash', 'is', null));
|
||||
|
||||
return eb.or(conditions);
|
||||
return or(conditions);
|
||||
}),
|
||||
)
|
||||
.stream();
|
||||
|
||||
@@ -288,6 +288,7 @@ export class PersonRepository {
|
||||
.select('asset_file.path')
|
||||
.whereRef('asset_file.assetId', '=', 'asset.id')
|
||||
.where('asset_file.type', '=', sql.lit(AssetFileType.Preview))
|
||||
.where('asset_file.isEdited', '=', false)
|
||||
.as('previewPath'),
|
||||
)
|
||||
.where('person.id', '=', id)
|
||||
|
||||
@@ -260,7 +260,7 @@ export class SharedLinkRepository {
|
||||
.selectAll('asset')
|
||||
.innerJoinLateral(
|
||||
(eb) =>
|
||||
eb.selectFrom('asset_exif').whereRef('asset_exif.assetId', '=', 'asset.id').selectAll().as('exif'),
|
||||
eb.selectFrom('asset_exif').whereRef('asset_exif.assetId', '=', 'asset.id').selectAll().as('exifInfo'),
|
||||
(join) => join.onTrue(),
|
||||
)
|
||||
.as('assets'),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,9 +6,10 @@ import { AssetEditAction } from 'src/dtos/editing.dto';
|
||||
import { AssetMetadataKey, AssetStatus, AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum';
|
||||
import { AssetStats } from 'src/repositories/asset.repository';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { AssetFactory } from 'test/factories/asset.factory';
|
||||
import { AuthFactory } from 'test/factories/auth.factory';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
@@ -45,35 +46,33 @@ describe(AssetService.name, () => {
|
||||
|
||||
describe('getStatistics', () => {
|
||||
it('should get the statistics for a user, excluding archived assets', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.asset.getStatistics.mockResolvedValue(stats);
|
||||
await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.Timeline })).resolves.toEqual(
|
||||
statResponse,
|
||||
);
|
||||
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, {
|
||||
visibility: AssetVisibility.Timeline,
|
||||
});
|
||||
await expect(sut.getStatistics(auth, { visibility: AssetVisibility.Timeline })).resolves.toEqual(statResponse);
|
||||
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(auth.user.id, { visibility: AssetVisibility.Timeline });
|
||||
});
|
||||
|
||||
it('should get the statistics for a user for archived assets', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.asset.getStatistics.mockResolvedValue(stats);
|
||||
await expect(sut.getStatistics(authStub.admin, { visibility: AssetVisibility.Archive })).resolves.toEqual(
|
||||
statResponse,
|
||||
);
|
||||
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, {
|
||||
await expect(sut.getStatistics(auth, { visibility: AssetVisibility.Archive })).resolves.toEqual(statResponse);
|
||||
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(auth.user.id, {
|
||||
visibility: AssetVisibility.Archive,
|
||||
});
|
||||
});
|
||||
|
||||
it('should get the statistics for a user for favorite assets', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.asset.getStatistics.mockResolvedValue(stats);
|
||||
await expect(sut.getStatistics(authStub.admin, { isFavorite: true })).resolves.toEqual(statResponse);
|
||||
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, { isFavorite: true });
|
||||
await expect(sut.getStatistics(auth, { isFavorite: true })).resolves.toEqual(statResponse);
|
||||
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(auth.user.id, { isFavorite: true });
|
||||
});
|
||||
|
||||
it('should get the statistics for a user for all assets', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.asset.getStatistics.mockResolvedValue(stats);
|
||||
await expect(sut.getStatistics(authStub.admin, {})).resolves.toEqual(statResponse);
|
||||
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(authStub.admin.user.id, {});
|
||||
await expect(sut.getStatistics(auth, {})).resolves.toEqual(statResponse);
|
||||
expect(mocks.asset.getStatistics).toHaveBeenCalledWith(auth.user.id, {});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -249,10 +248,11 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
|
||||
it('should fail linking a live video if the motion part could not be found', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
|
||||
|
||||
await expect(
|
||||
sut.update(authStub.admin, assetStub.livePhotoStillAsset.id, {
|
||||
sut.update(auth, assetStub.livePhotoStillAsset.id, {
|
||||
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
@@ -267,11 +267,12 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
|
||||
assetId: assetStub.livePhotoMotionAsset.id,
|
||||
userId: userStub.admin.id,
|
||||
userId: auth.user.id,
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail linking a live video if the motion part is not a video', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoStillAsset);
|
||||
|
||||
@@ -291,16 +292,17 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
|
||||
assetId: assetStub.livePhotoMotionAsset.id,
|
||||
userId: userStub.admin.id,
|
||||
userId: auth.user.id,
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail linking a live video if the motion part has a different owner', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.livePhotoMotionAsset);
|
||||
|
||||
await expect(
|
||||
sut.update(authStub.admin, assetStub.livePhotoStillAsset.id, {
|
||||
sut.update(auth, assetStub.livePhotoStillAsset.id, {
|
||||
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
@@ -315,52 +317,41 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
expect(mocks.event.emit).not.toHaveBeenCalledWith('AssetShow', {
|
||||
assetId: assetStub.livePhotoMotionAsset.id,
|
||||
userId: userStub.admin.id,
|
||||
userId: auth.user.id,
|
||||
});
|
||||
});
|
||||
|
||||
it('should link a live video', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
|
||||
mocks.asset.getById.mockResolvedValueOnce({
|
||||
...assetStub.livePhotoMotionAsset,
|
||||
ownerId: authStub.admin.user.id,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
});
|
||||
mocks.asset.getById.mockResolvedValueOnce(assetStub.image);
|
||||
mocks.asset.update.mockResolvedValue(assetStub.image);
|
||||
const motionAsset = AssetFactory.create({ type: AssetType.Video, visibility: AssetVisibility.Timeline });
|
||||
const stillAsset = AssetFactory.create();
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([stillAsset.id]));
|
||||
mocks.asset.getById.mockResolvedValueOnce(motionAsset);
|
||||
mocks.asset.getById.mockResolvedValueOnce(stillAsset);
|
||||
mocks.asset.update.mockResolvedValue(stillAsset);
|
||||
const auth = AuthFactory.from(motionAsset.owner).build();
|
||||
|
||||
await sut.update(authStub.admin, assetStub.livePhotoStillAsset.id, {
|
||||
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
|
||||
});
|
||||
await sut.update(auth, stillAsset.id, { livePhotoVideoId: motionAsset.id });
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({
|
||||
id: assetStub.livePhotoMotionAsset.id,
|
||||
visibility: AssetVisibility.Hidden,
|
||||
});
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AssetHide', {
|
||||
assetId: assetStub.livePhotoMotionAsset.id,
|
||||
userId: userStub.admin.id,
|
||||
});
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({
|
||||
id: assetStub.livePhotoStillAsset.id,
|
||||
livePhotoVideoId: assetStub.livePhotoMotionAsset.id,
|
||||
});
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: motionAsset.id, visibility: AssetVisibility.Hidden });
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AssetHide', { assetId: motionAsset.id, userId: auth.user.id });
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: stillAsset.id, livePhotoVideoId: motionAsset.id });
|
||||
});
|
||||
|
||||
it('should throw an error if asset could not be found after update', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
await expect(sut.update(authStub.admin, 'asset-1', { isFavorite: true })).rejects.toBeInstanceOf(
|
||||
await expect(sut.update(AuthFactory.create(), 'asset-1', { isFavorite: true })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink a live video', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([assetStub.livePhotoStillAsset.id]));
|
||||
mocks.asset.getById.mockResolvedValueOnce(assetStub.livePhotoStillAsset);
|
||||
mocks.asset.getById.mockResolvedValueOnce(assetStub.livePhotoMotionAsset);
|
||||
mocks.asset.update.mockResolvedValueOnce(assetStub.image);
|
||||
|
||||
await sut.update(authStub.admin, assetStub.livePhotoStillAsset.id, { livePhotoVideoId: null });
|
||||
await sut.update(auth, assetStub.livePhotoStillAsset.id, { livePhotoVideoId: null });
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({
|
||||
id: assetStub.livePhotoStillAsset.id,
|
||||
@@ -372,7 +363,7 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AssetShow', {
|
||||
assetId: assetStub.livePhotoMotionAsset.id,
|
||||
userId: userStub.admin.id,
|
||||
userId: auth.user.id,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -392,17 +383,15 @@ describe(AssetService.name, () => {
|
||||
|
||||
describe('updateAll', () => {
|
||||
it('should require asset write access for all ids', async () => {
|
||||
await expect(
|
||||
sut.updateAll(authStub.admin, {
|
||||
ids: ['asset-1'],
|
||||
}),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
const auth = AuthFactory.create();
|
||||
await expect(sut.updateAll(auth, { ids: ['asset-1'] })).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should update all assets', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
|
||||
|
||||
await sut.updateAll(authStub.admin, { ids: ['asset-1', 'asset-2'], visibility: AssetVisibility.Archive });
|
||||
await sut.updateAll(auth, { ids: ['asset-1', 'asset-2'], visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(mocks.asset.updateAll).toHaveBeenCalledWith(['asset-1', 'asset-2'], {
|
||||
visibility: AssetVisibility.Archive,
|
||||
@@ -410,9 +399,10 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
|
||||
it('should not update Assets table if no relevant fields are provided', async () => {
|
||||
const auth = AuthFactory.create();
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
|
||||
await sut.updateAll(authStub.admin, {
|
||||
await sut.updateAll(auth, {
|
||||
ids: ['asset-1'],
|
||||
latitude: 0,
|
||||
longitude: 0,
|
||||
|
||||
@@ -1,270 +0,0 @@
|
||||
import { DateTime } from 'luxon';
|
||||
import { PassThrough } from 'node:stream';
|
||||
import { defaults, SystemConfig } from 'src/config';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { mockDuplex, mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { describe } from 'vitest';
|
||||
|
||||
describe(BackupService.name, () => {
|
||||
let sut: BackupService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(BackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('onBootstrapEvent', () => {
|
||||
it('should init cron job and handle config changes', async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(true);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(mocks.cron.create).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialize backup database cron job when lock is taken', async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(false);
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(mocks.cron.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialise backup database job when running on microservices', async () => {
|
||||
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(mocks.cron.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('onConfigUpdateEvent', () => {
|
||||
beforeEach(async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(true);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
});
|
||||
|
||||
it('should update cron job if backup is enabled', () => {
|
||||
mocks.cron.update.mockResolvedValue();
|
||||
|
||||
sut.onConfigUpdate({
|
||||
oldConfig: defaults,
|
||||
newConfig: {
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: '0 1 * * *',
|
||||
},
|
||||
},
|
||||
} as SystemConfig,
|
||||
});
|
||||
|
||||
expect(mocks.cron.update).toHaveBeenCalledWith({ name: 'backupDatabase', expression: '0 1 * * *', start: true });
|
||||
expect(mocks.cron.update).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if instance does not have the backup database lock', async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(false);
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
|
||||
expect(mocks.cron.update).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupDatabaseBackups', () => {
|
||||
it('should do nothing if not reached keepLastAmount', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz']);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(mocks.storage.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove failed backup files', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
//`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
'immich-db-backup-123.sql.gz.tmp',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
]);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(3);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-123.sql.gz.tmp`,
|
||||
);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250725T110216-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove old backup files over keepLastAmount', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz', 'immich-db-backup-2.sql.gz']);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1.sql.gz`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove old backup files over keepLastAmount and failed backups', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(3);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1753789649000.sql.gz`,
|
||||
);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250725T110216-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleBackupDatabase', () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
});
|
||||
|
||||
it('should sanitize DB_URL (remove uselibpqcompat) before calling pg_dumpall', async () => {
|
||||
// create a service instance with a URL connection that includes libpqcompat
|
||||
const dbUrl = 'postgresql://postgres:pwd@host:5432/immich?sslmode=require&uselibpqcompat=true';
|
||||
const configMock = {
|
||||
getEnv: () => ({ database: { config: { connectionType: 'url', url: dbUrl }, skipMigrations: false } }),
|
||||
getWorker: () => ImmichWorker.Api,
|
||||
isDev: () => false,
|
||||
} as unknown as any;
|
||||
|
||||
({ sut, mocks } = newTestService(BackupService, { config: configMock }));
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.database.getPostgresVersion.mockResolvedValue('14.10');
|
||||
|
||||
await sut.handleBackupDatabase();
|
||||
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.spawnDuplexStream.mock.calls[0];
|
||||
const args = call[1] as string[];
|
||||
expect(args).toMatchInlineSnapshot(`
|
||||
[
|
||||
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should run a database backup successfully', async () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.storage.createWriteStream).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should rename file on success', async () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.storage.rename).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if pg_dump fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if gzip fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should fail if write stream fails', async () => {
|
||||
mocks.storage.createWriteStream.mockImplementation(() => {
|
||||
throw new Error('error');
|
||||
});
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
|
||||
});
|
||||
|
||||
it('should fail if rename fails', async () => {
|
||||
mocks.storage.rename.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
|
||||
});
|
||||
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each`
|
||||
postgresVersion | expectedVersion
|
||||
${'14.10'} | ${14}
|
||||
${'14.10.3'} | ${14}
|
||||
${'14.10 (Debian 14.10-1.pgdg120+1)'} | ${14}
|
||||
${'15.3.3'} | ${15}
|
||||
${'16.4.2'} | ${16}
|
||||
${'17.15.1'} | ${17}
|
||||
${'18.0.0'} | ${18}
|
||||
`(
|
||||
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
);
|
||||
},
|
||||
);
|
||||
it.each`
|
||||
postgresVersion
|
||||
${'13.99.99'}
|
||||
${'19.0.0'}
|
||||
`(`should fail if postgres version $postgresVersion is not supported`, async ({ postgresVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(mocks.process.spawn).not.toHaveBeenCalled();
|
||||
expect(result).toBe(JobStatus.Failed);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,99 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import path from 'node:path';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
createDatabaseBackup,
|
||||
isFailedDatabaseBackupName,
|
||||
isValidDatabaseRoutineBackupName,
|
||||
UnsupportedPostgresError,
|
||||
} from 'src/utils/database-backups';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
export class BackupService extends BaseService {
|
||||
private backupLock = false;
|
||||
|
||||
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
|
||||
async onConfigInit({
|
||||
newConfig: {
|
||||
backup: { database },
|
||||
},
|
||||
}: ArgOf<'ConfigInit'>) {
|
||||
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
|
||||
|
||||
if (this.backupLock) {
|
||||
this.cronRepository.create({
|
||||
name: 'backupDatabase',
|
||||
expression: database.cronExpression,
|
||||
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.DatabaseBackup }), this.logger),
|
||||
start: database.enabled,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'ConfigUpdate', server: true })
|
||||
onConfigUpdate({ newConfig: { backup } }: ArgOf<'ConfigUpdate'>) {
|
||||
if (!this.backupLock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.cronRepository.update({
|
||||
name: 'backupDatabase',
|
||||
expression: backup.database.cronExpression,
|
||||
start: backup.database.enabled,
|
||||
});
|
||||
}
|
||||
|
||||
async cleanupDatabaseBackups() {
|
||||
this.logger.debug(`Database Backup Cleanup Started`);
|
||||
const {
|
||||
backup: { database: config },
|
||||
} = await this.getConfig({ withCache: false });
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const backups = files
|
||||
.filter((filename) => isValidDatabaseRoutineBackupName(filename))
|
||||
.toSorted()
|
||||
.toReversed();
|
||||
const failedBackups = files.filter((filename) => isFailedDatabaseBackupName(filename));
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
|
||||
for (const file of toDelete) {
|
||||
await this.storageRepository.unlink(path.join(backupsFolder, file));
|
||||
}
|
||||
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
try {
|
||||
await createDatabaseBackup(this.backupRepos);
|
||||
} catch (error) {
|
||||
if (error instanceof UnsupportedPostgresError) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,594 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { PassThrough, Readable } from 'node:stream';
|
||||
import { defaults, SystemConfig } from 'src/config';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(MaintenanceService.name, () => {
|
||||
describe(DatabaseBackupService.name, () => {
|
||||
let sut: DatabaseBackupService;
|
||||
let mocks: ServiceMocks;
|
||||
let maintenanceHealthRepositoryMock: AutoMocked<MaintenanceHealthRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(DatabaseBackupService));
|
||||
mocks = getMocks();
|
||||
maintenanceHealthRepositoryMock = automock(MaintenanceHealthRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
sut = new DatabaseBackupService(
|
||||
mocks.logger as never,
|
||||
mocks.storage as never,
|
||||
mocks.config,
|
||||
mocks.systemMetadata as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
mocks.cron as never,
|
||||
mocks.job as never,
|
||||
maintenanceHealthRepositoryMock as never,
|
||||
);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('onBootstrapEvent', () => {
|
||||
it('should init cron job and handle config changes', async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(true);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(mocks.cron.create).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialize backup database cron job when lock is taken', async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(false);
|
||||
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(mocks.cron.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialise backup database job when running on microservices', async () => {
|
||||
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
|
||||
await sut.onConfigInit({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
|
||||
expect(mocks.cron.create).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('onConfigUpdateEvent', () => {
|
||||
beforeEach(async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(true);
|
||||
mocks.cron.create.mockResolvedValue();
|
||||
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
});
|
||||
|
||||
it('should update cron job if backup is enabled', () => {
|
||||
mocks.cron.update.mockResolvedValue();
|
||||
|
||||
sut.onConfigUpdate({
|
||||
oldConfig: defaults,
|
||||
newConfig: {
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: '0 1 * * *',
|
||||
},
|
||||
},
|
||||
} as SystemConfig,
|
||||
});
|
||||
|
||||
expect(mocks.cron.update).toHaveBeenCalledWith({ name: 'backupDatabase', expression: '0 1 * * *', start: true });
|
||||
expect(mocks.cron.update).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if instance does not have the backup database lock', async () => {
|
||||
mocks.database.tryLock.mockResolvedValue(false);
|
||||
await sut.onConfigInit({ newConfig: defaults });
|
||||
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
|
||||
expect(mocks.cron.update).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupDatabaseBackups', () => {
|
||||
it('should do nothing if not reached keepLastAmount', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz']);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(mocks.storage.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove failed backup files', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
//`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
'immich-db-backup-123.sql.gz.tmp',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
]);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(3);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-123.sql.gz.tmp`,
|
||||
);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250725T110216-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove old backup files over keepLastAmount', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz', 'immich-db-backup-2.sql.gz']);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1.sql.gz`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove old backup files over keepLastAmount and failed backups', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(3);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-1753789649000.sql.gz`,
|
||||
);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250725T110216-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleBackupDatabase / createDatabaseBackup', () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex()('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
});
|
||||
|
||||
it('should sanitize DB_URL (remove uselibpqcompat) before calling pg_dumpall', async () => {
|
||||
// create a service instance with a URL connection that includes libpqcompat
|
||||
const dbUrl = 'postgresql://postgres:pwd@host:5432/immich?sslmode=require&uselibpqcompat=true';
|
||||
const configMock = {
|
||||
getEnv: () => ({ database: { config: { connectionType: 'url', url: dbUrl }, skipMigrations: false } }),
|
||||
getWorker: () => ImmichWorker.Api,
|
||||
isDev: () => false,
|
||||
} as unknown as any;
|
||||
|
||||
sut = new DatabaseBackupService(
|
||||
mocks.logger as never,
|
||||
mocks.storage as never,
|
||||
configMock as never,
|
||||
mocks.systemMetadata as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
mocks.cron as never,
|
||||
mocks.job as never,
|
||||
void 0 as never,
|
||||
);
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex()('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.database.getPostgresVersion.mockResolvedValue('14.10');
|
||||
|
||||
await sut.handleBackupDatabase();
|
||||
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.spawnDuplexStream.mock.calls[0];
|
||||
const args = call[1] as string[];
|
||||
expect(args).toMatchInlineSnapshot(`
|
||||
[
|
||||
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should run a database backup successfully', async () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.storage.createWriteStream).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should rename file on success', async () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.storage.rename).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if pg_dump fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if gzip fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('gzip', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should fail if write stream fails', async () => {
|
||||
mocks.storage.createWriteStream.mockImplementation(() => {
|
||||
throw new Error('error');
|
||||
});
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
|
||||
});
|
||||
|
||||
it('should fail if rename fails', async () => {
|
||||
mocks.storage.rename.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('error');
|
||||
});
|
||||
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 1, '', 'error'));
|
||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each`
|
||||
postgresVersion | expectedVersion
|
||||
${'14.10'} | ${14}
|
||||
${'14.10.3'} | ${14}
|
||||
${'14.10 (Debian 14.10-1.pgdg120+1)'} | ${14}
|
||||
${'15.3.3'} | ${15}
|
||||
${'16.4.2'} | ${16}
|
||||
${'17.15.1'} | ${17}
|
||||
${'18.0.0'} | ${18}
|
||||
`(
|
||||
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
);
|
||||
},
|
||||
);
|
||||
it.each`
|
||||
postgresVersion
|
||||
${'13.99.99'}
|
||||
${'19.0.0'}
|
||||
`(`should fail if postgres version $postgresVersion is not supported`, async ({ postgresVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(mocks.process.spawn).not.toHaveBeenCalled();
|
||||
expect(result).toBe(JobStatus.Failed);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildPostgresLaunchArguments', () => {
|
||||
describe('default config', () => {
|
||||
it('should generate pg_dump arguments', async () => {
|
||||
await expect(sut.buildPostgresLaunchArguments('pg_dump')).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"args": [
|
||||
"--username",
|
||||
"postgres",
|
||||
"--host",
|
||||
"database",
|
||||
"--port",
|
||||
"5432",
|
||||
"immich",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
],
|
||||
"bin": "/usr/lib/postgresql/14/bin/pg_dump",
|
||||
"databaseMajorVersion": 14,
|
||||
"databasePassword": "postgres",
|
||||
"databaseUsername": "postgres",
|
||||
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('should generate psql arguments', async () => {
|
||||
await expect(sut.buildPostgresLaunchArguments('psql')).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"args": [
|
||||
"--username",
|
||||
"postgres",
|
||||
"--host",
|
||||
"database",
|
||||
"--port",
|
||||
"5432",
|
||||
"--dbname",
|
||||
"immich",
|
||||
"--echo-all",
|
||||
"--output=/dev/null",
|
||||
],
|
||||
"bin": "/usr/lib/postgresql/14/bin/psql",
|
||||
"databaseMajorVersion": 14,
|
||||
"databasePassword": "postgres",
|
||||
"databaseUsername": "postgres",
|
||||
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('should generate psql (single transaction) arguments', async () => {
|
||||
await expect(sut.buildPostgresLaunchArguments('psql', { singleTransaction: true })).resolves
|
||||
.toMatchInlineSnapshot(`
|
||||
{
|
||||
"args": [
|
||||
"--username",
|
||||
"postgres",
|
||||
"--host",
|
||||
"database",
|
||||
"--port",
|
||||
"5432",
|
||||
"--dbname",
|
||||
"immich",
|
||||
"--single-transaction",
|
||||
"--set",
|
||||
"ON_ERROR_STOP=on",
|
||||
"--echo-all",
|
||||
"--output=/dev/null",
|
||||
],
|
||||
"bin": "/usr/lib/postgresql/14/bin/psql",
|
||||
"databaseMajorVersion": 14,
|
||||
"databasePassword": "postgres",
|
||||
"databaseUsername": "postgres",
|
||||
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('using custom parts', () => {
|
||||
beforeEach(() => {
|
||||
const configMock = {
|
||||
getEnv: () => ({
|
||||
database: {
|
||||
config: {
|
||||
connectionType: 'parts',
|
||||
host: 'myhost',
|
||||
port: 1234,
|
||||
username: 'mypg',
|
||||
password: 'mypwd',
|
||||
database: 'myimmich',
|
||||
},
|
||||
skipMigrations: false,
|
||||
},
|
||||
}),
|
||||
getWorker: () => ImmichWorker.Api,
|
||||
isDev: () => false,
|
||||
} as unknown as any;
|
||||
|
||||
sut = new DatabaseBackupService(
|
||||
mocks.logger as never,
|
||||
mocks.storage as never,
|
||||
configMock as never,
|
||||
mocks.systemMetadata as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
mocks.cron as never,
|
||||
mocks.job as never,
|
||||
void 0 as never,
|
||||
);
|
||||
});
|
||||
|
||||
it('should generate pg_dump arguments', async () => {
|
||||
await expect(sut.buildPostgresLaunchArguments('pg_dump')).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"args": [
|
||||
"--username",
|
||||
"mypg",
|
||||
"--host",
|
||||
"myhost",
|
||||
"--port",
|
||||
"1234",
|
||||
"myimmich",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
],
|
||||
"bin": "/usr/lib/postgresql/14/bin/pg_dump",
|
||||
"databaseMajorVersion": 14,
|
||||
"databasePassword": "mypwd",
|
||||
"databaseUsername": "mypg",
|
||||
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('should generate psql (single transaction) arguments', async () => {
|
||||
await expect(sut.buildPostgresLaunchArguments('psql', { singleTransaction: true })).resolves
|
||||
.toMatchInlineSnapshot(`
|
||||
{
|
||||
"args": [
|
||||
"--username",
|
||||
"mypg",
|
||||
"--host",
|
||||
"myhost",
|
||||
"--port",
|
||||
"1234",
|
||||
"--dbname",
|
||||
"myimmich",
|
||||
"--single-transaction",
|
||||
"--set",
|
||||
"ON_ERROR_STOP=on",
|
||||
"--echo-all",
|
||||
"--output=/dev/null",
|
||||
],
|
||||
"bin": "/usr/lib/postgresql/14/bin/psql",
|
||||
"databaseMajorVersion": 14,
|
||||
"databasePassword": "mypwd",
|
||||
"databaseUsername": "mypg",
|
||||
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('using URL', () => {
|
||||
beforeEach(() => {
|
||||
const dbUrl = 'postgresql://mypg:mypwd@myhost:1234/myimmich?sslmode=require&uselibpqcompat=true';
|
||||
const configMock = {
|
||||
getEnv: () => ({ database: { config: { connectionType: 'url', url: dbUrl }, skipMigrations: false } }),
|
||||
getWorker: () => ImmichWorker.Api,
|
||||
isDev: () => false,
|
||||
} as unknown as any;
|
||||
|
||||
sut = new DatabaseBackupService(
|
||||
mocks.logger as never,
|
||||
mocks.storage as never,
|
||||
configMock as never,
|
||||
mocks.systemMetadata as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
mocks.cron as never,
|
||||
mocks.job as never,
|
||||
void 0 as never,
|
||||
);
|
||||
});
|
||||
|
||||
it('should generate pg_dump arguments', async () => {
|
||||
await expect(sut.buildPostgresLaunchArguments('pg_dump')).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"args": [
|
||||
"postgresql://mypg:mypwd@myhost:1234/myimmich?sslmode=require",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
],
|
||||
"bin": "/usr/lib/postgresql/14/bin/pg_dump",
|
||||
"databaseMajorVersion": 14,
|
||||
"databasePassword": "mypwd",
|
||||
"databaseUsername": "mypg",
|
||||
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('should generate psql (single transaction) arguments', async () => {
|
||||
await expect(sut.buildPostgresLaunchArguments('psql', { singleTransaction: true })).resolves
|
||||
.toMatchInlineSnapshot(`
|
||||
{
|
||||
"args": [
|
||||
"--dbname",
|
||||
"postgresql://mypg:mypwd@myhost:1234/myimmich?sslmode=require",
|
||||
"--single-transaction",
|
||||
"--set",
|
||||
"ON_ERROR_STOP=on",
|
||||
"--echo-all",
|
||||
"--output=/dev/null",
|
||||
],
|
||||
"bin": "/usr/lib/postgresql/14/bin/psql",
|
||||
"databaseMajorVersion": 14,
|
||||
"databasePassword": "mypwd",
|
||||
"databaseUsername": "mypg",
|
||||
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('using bad URL', () => {
|
||||
beforeEach(() => {
|
||||
const dbUrl = 'post://gresql://mypg:myp@wd@myhos:t:1234/myimmich?sslmode=require&uselibpqcompat=true';
|
||||
const configMock = {
|
||||
getEnv: () => ({ database: { config: { connectionType: 'url', url: dbUrl }, skipMigrations: false } }),
|
||||
getWorker: () => ImmichWorker.Api,
|
||||
isDev: () => false,
|
||||
} as unknown as any;
|
||||
|
||||
sut = new DatabaseBackupService(
|
||||
mocks.logger as never,
|
||||
mocks.storage as never,
|
||||
configMock as never,
|
||||
mocks.systemMetadata as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
mocks.cron as never,
|
||||
mocks.job as never,
|
||||
void 0 as never,
|
||||
);
|
||||
});
|
||||
|
||||
it('should fallback to reasonable defaults', async () => {
|
||||
await expect(sut.buildPostgresLaunchArguments('psql')).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"args": [
|
||||
"--dbname",
|
||||
"post://gresql//mypg:myp@wd@myhos:t:1234/myimmich?sslmode=require",
|
||||
"--echo-all",
|
||||
"--output=/dev/null",
|
||||
],
|
||||
"bin": "/usr/lib/postgresql/14/bin/psql",
|
||||
"databaseMajorVersion": 14,
|
||||
"databasePassword": "",
|
||||
"databaseUsername": "",
|
||||
"databaseVersion": "14.10 (Debian 14.10-1.pgdg120+1)",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
@@ -54,30 +625,233 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
describe('restoreDatabaseBackup', () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex()('command', 0, 'data', ''));
|
||||
mocks.process.fork.mockImplementation(() => mockSpawn(0, 'Immich Server is listening', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGzip.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
|
||||
|
||||
const configMock = {
|
||||
getEnv: () => ({
|
||||
database: {
|
||||
config: {
|
||||
connectionType: 'parts',
|
||||
host: 'myhost',
|
||||
port: 1234,
|
||||
username: 'mypg',
|
||||
password: 'mypwd',
|
||||
database: 'myimmich',
|
||||
},
|
||||
skipMigrations: false,
|
||||
},
|
||||
}),
|
||||
getWorker: () => ImmichWorker.Api,
|
||||
isDev: () => false,
|
||||
} as unknown as any;
|
||||
|
||||
sut = new DatabaseBackupService(
|
||||
mocks.logger as never,
|
||||
mocks.storage as never,
|
||||
configMock as never,
|
||||
mocks.systemMetadata as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
mocks.cron as never,
|
||||
mocks.job as never,
|
||||
maintenanceHealthRepositoryMock,
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
it('should fail to restore invalid backup', async () => {
|
||||
await expect(sut.restoreDatabaseBackup('filename')).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`[Error: Invalid backup file format!]`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
it('should successfully restore a backup', async () => {
|
||||
let writtenToPsql = '';
|
||||
|
||||
mocks.process.spawnDuplexStream.mockImplementationOnce(() => mockDuplex()('command', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementationOnce(() => mockDuplex()('command', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementationOnce(() => {
|
||||
return mockDuplex((chunk) => (writtenToPsql += chunk))('command', 0, 'data', '');
|
||||
});
|
||||
|
||||
const progress = vitest.fn();
|
||||
await sut.restoreDatabaseBackup('development-filename.sql', progress);
|
||||
|
||||
expect(progress).toHaveBeenCalledWith('backup', 0.05);
|
||||
expect(progress).toHaveBeenCalledWith('migrations', 0.9);
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
|
||||
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenLastCalledWith(
|
||||
expect.stringMatching('/bin/psql'),
|
||||
[
|
||||
'--username',
|
||||
'mypg',
|
||||
'--host',
|
||||
'myhost',
|
||||
'--port',
|
||||
'1234',
|
||||
'--dbname',
|
||||
'myimmich',
|
||||
'--single-transaction',
|
||||
'--set',
|
||||
'ON_ERROR_STOP=on',
|
||||
'--echo-all',
|
||||
'--output=/dev/null',
|
||||
],
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
env: expect.objectContaining({
|
||||
PATH: expect.any(String),
|
||||
PGPASSWORD: 'mypwd',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(writtenToPsql).toMatchInlineSnapshot(`
|
||||
"
|
||||
-- drop all other database connections
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = current_database()
|
||||
AND pid <> pg_backend_pid();
|
||||
|
||||
-- re-create the default schema
|
||||
DROP SCHEMA public CASCADE;
|
||||
CREATE SCHEMA public;
|
||||
|
||||
-- restore access to schema
|
||||
GRANT ALL ON SCHEMA public TO "mypg";
|
||||
GRANT ALL ON SCHEMA public TO public;
|
||||
SELECT 1;"
|
||||
`);
|
||||
});
|
||||
|
||||
it('should generate pg_dumpall specific SQL instructions', async () => {
|
||||
let writtenToPsql = '';
|
||||
|
||||
mocks.process.spawnDuplexStream.mockImplementationOnce(() => mockDuplex()('command', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementationOnce(() => mockDuplex()('command', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementationOnce(() => {
|
||||
return mockDuplex((chunk) => (writtenToPsql += chunk))('command', 0, 'data', '');
|
||||
});
|
||||
|
||||
const progress = vitest.fn();
|
||||
await sut.restoreDatabaseBackup('development-v2.4.0-.sql', progress);
|
||||
|
||||
expect(progress).toHaveBeenCalledWith('backup', 0.05);
|
||||
expect(progress).toHaveBeenCalledWith('migrations', 0.9);
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
|
||||
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenLastCalledWith(
|
||||
expect.stringMatching('/bin/psql'),
|
||||
[
|
||||
'--username',
|
||||
'mypg',
|
||||
'--host',
|
||||
'myhost',
|
||||
'--port',
|
||||
'1234',
|
||||
'--dbname',
|
||||
'myimmich',
|
||||
'--echo-all',
|
||||
'--output=/dev/null',
|
||||
],
|
||||
expect.objectContaining({
|
||||
env: expect.objectContaining({
|
||||
PATH: expect.any(String),
|
||||
PGPASSWORD: 'mypwd',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(writtenToPsql).toMatchInlineSnapshot(String.raw`
|
||||
"
|
||||
-- drop all other database connections
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = current_database()
|
||||
AND pid <> pg_backend_pid();
|
||||
|
||||
\c postgres
|
||||
SELECT 1;"
|
||||
`);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex()('pg_dump', 1, '', 'error'));
|
||||
|
||||
const progress = vitest.fn();
|
||||
await expect(sut.restoreDatabaseBackup('development-filename.sql', progress)).rejects
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
[Error: pg_dump non-zero exit code (1)
|
||||
error]
|
||||
`);
|
||||
|
||||
expect(progress).toHaveBeenCalledWith('backup', 0.05);
|
||||
});
|
||||
|
||||
it('should fail if restore itself fails', async () => {
|
||||
mocks.process.spawnDuplexStream
|
||||
.mockReturnValueOnce(mockDuplex()('pg_dump', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex()('gzip', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex()('psql', 1, '', 'error'));
|
||||
|
||||
const progress = vitest.fn();
|
||||
await expect(sut.restoreDatabaseBackup('development-filename.sql', progress)).rejects
|
||||
.toThrowErrorMatchingInlineSnapshot(`
|
||||
[Error: psql non-zero exit code (1)
|
||||
error]
|
||||
`);
|
||||
|
||||
expect(progress).toHaveBeenCalledWith('backup', 0.05);
|
||||
});
|
||||
|
||||
it('should rollback if database migrations fail', async () => {
|
||||
mocks.database.runMigrations.mockRejectedValue(new Error('Migrations Error'));
|
||||
|
||||
const progress = vitest.fn();
|
||||
await expect(
|
||||
sut.restoreDatabaseBackup('development-filename.sql', progress),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Migrations Error]`);
|
||||
|
||||
expect(progress).toHaveBeenCalledWith('backup', 0.05);
|
||||
expect(progress).toHaveBeenCalledWith('migrations', 0.9);
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalledTimes(0);
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
|
||||
it('should rollback if API healthcheck fails', async () => {
|
||||
maintenanceHealthRepositoryMock.checkApiHealth.mockRejectedValue(new Error('Health Error'));
|
||||
|
||||
const progress = vitest.fn();
|
||||
await expect(
|
||||
sut.restoreDatabaseBackup('development-filename.sql', progress),
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`[Error: Health Error]`);
|
||||
|
||||
expect(progress).toHaveBeenCalledWith('backup', 0.05);
|
||||
expect(progress).toHaveBeenCalledWith('migrations', 0.9);
|
||||
expect(progress).toHaveBeenCalledWith('rollback', 0);
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function* mockData() {
|
||||
yield 'SELECT 1;';
|
||||
}
|
||||
|
||||
@@ -1,43 +1,560 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { BadRequestException, Injectable, Optional } from '@nestjs/common';
|
||||
import { debounce } from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import path, { basename } from 'node:path';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { CacheControl, DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { CronRepository } from 'src/repositories/cron.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { getConfig } from 'src/utils/config';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
uploadDatabaseBackup,
|
||||
findDatabaseBackupVersion,
|
||||
isFailedDatabaseBackupName,
|
||||
isValidDatabaseBackupName,
|
||||
isValidDatabaseRoutineBackupName,
|
||||
UnsupportedPostgresError,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
* This service is available outside of maintenance mode to manage maintenance mode
|
||||
*/
|
||||
@Injectable()
|
||||
export class DatabaseBackupService extends BaseService {
|
||||
async listBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
const backups = await listDatabaseBackups(this.backupRepos);
|
||||
return { backups };
|
||||
export class DatabaseBackupService {
|
||||
constructor(
|
||||
private readonly logger: LoggingRepository,
|
||||
private readonly storageRepository: StorageRepository,
|
||||
private readonly configRepository: ConfigRepository,
|
||||
private readonly systemMetadataRepository: SystemMetadataRepository,
|
||||
private readonly processRepository: ProcessRepository,
|
||||
private readonly databaseRepository: DatabaseRepository,
|
||||
@Optional()
|
||||
private readonly cronRepository: CronRepository,
|
||||
@Optional()
|
||||
private readonly jobRepository: JobRepository,
|
||||
@Optional()
|
||||
private readonly maintenanceHealthRepository: MaintenanceHealthRepository,
|
||||
) {
|
||||
this.logger.setContext(this.constructor.name);
|
||||
}
|
||||
|
||||
deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
private backupLock = false;
|
||||
|
||||
@OnEvent({ name: 'ConfigInit', workers: [ImmichWorker.Microservices] })
|
||||
async onConfigInit({
|
||||
newConfig: {
|
||||
backup: { database },
|
||||
},
|
||||
}: ArgOf<'ConfigInit'>) {
|
||||
if (!this.cronRepository || !this.jobRepository) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
|
||||
|
||||
if (this.backupLock) {
|
||||
this.cronRepository.create({
|
||||
name: 'backupDatabase',
|
||||
expression: database.cronExpression,
|
||||
onTick: () => handlePromiseError(this.jobRepository.queue({ name: JobName.DatabaseBackup }), this.logger),
|
||||
start: database.enabled,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'ConfigUpdate', server: true })
|
||||
onConfigUpdate({ newConfig: { backup } }: ArgOf<'ConfigUpdate'>) {
|
||||
if (!this.cronRepository || !this.jobRepository || !this.backupLock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.cronRepository.update({
|
||||
name: 'backupDatabase',
|
||||
expression: backup.database.cronExpression,
|
||||
start: backup.database.enabled,
|
||||
});
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
try {
|
||||
await this.createDatabaseBackup();
|
||||
} catch (error) {
|
||||
if (error instanceof UnsupportedPostgresError) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
async buildPostgresLaunchArguments(
|
||||
bin: 'pg_dump' | 'pg_dumpall' | 'psql',
|
||||
options: {
|
||||
singleTransaction?: boolean;
|
||||
} = {},
|
||||
): Promise<{
|
||||
bin: string;
|
||||
args: string[];
|
||||
databaseUsername: string;
|
||||
databasePassword: string;
|
||||
databaseVersion: string;
|
||||
databaseMajorVersion?: number;
|
||||
}> {
|
||||
const {
|
||||
database: { config: databaseConfig },
|
||||
} = this.configRepository.getEnv();
|
||||
const isUrlConnection = databaseConfig.connectionType === 'url';
|
||||
|
||||
const databaseVersion = await this.databaseRepository.getPostgresVersion();
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
const args: string[] = [];
|
||||
let databaseUsername;
|
||||
|
||||
if (isUrlConnection) {
|
||||
if (bin !== 'pg_dump') {
|
||||
args.push('--dbname');
|
||||
}
|
||||
|
||||
let url = databaseConfig.url;
|
||||
if (URL.canParse(databaseConfig.url)) {
|
||||
const parsedUrl = new URL(databaseConfig.url);
|
||||
// remove known bad parameters
|
||||
parsedUrl.searchParams.delete('uselibpqcompat');
|
||||
|
||||
databaseUsername = parsedUrl.username;
|
||||
url = parsedUrl.toString();
|
||||
}
|
||||
|
||||
// assume typical values if we can't parse URL or not present
|
||||
databaseUsername ??= 'postgres';
|
||||
|
||||
args.push(url);
|
||||
} else {
|
||||
databaseUsername = databaseConfig.username;
|
||||
|
||||
args.push(
|
||||
'--username',
|
||||
databaseUsername,
|
||||
'--host',
|
||||
databaseConfig.host,
|
||||
'--port',
|
||||
databaseConfig.port.toString(),
|
||||
);
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dumpall': {
|
||||
args.push('--database');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
args.push('--dbname');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
args.push(databaseConfig.database);
|
||||
}
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dump':
|
||||
case 'pg_dumpall': {
|
||||
args.push('--clean', '--if-exists');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
if (options.singleTransaction) {
|
||||
args.push(
|
||||
// don't commit any transaction on failure
|
||||
'--single-transaction',
|
||||
// exit with non-zero code on error
|
||||
'--set',
|
||||
'ON_ERROR_STOP=on',
|
||||
);
|
||||
}
|
||||
|
||||
args.push(
|
||||
// used for progress monitoring
|
||||
'--echo-all',
|
||||
'--output=/dev/null',
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
this.logger.error(`Database Restore Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
throw new UnsupportedPostgresError(databaseVersion);
|
||||
}
|
||||
|
||||
return {
|
||||
bin: `/usr/lib/postgresql/${databaseMajorVersion}/bin/${bin}`,
|
||||
args,
|
||||
databaseUsername,
|
||||
databasePassword: isUrlConnection ? new URL(databaseConfig.url).password : databaseConfig.password,
|
||||
databaseVersion,
|
||||
databaseMajorVersion,
|
||||
};
|
||||
}
|
||||
|
||||
async createDatabaseBackup(filenamePrefix: string = ''): Promise<string> {
|
||||
this.logger.debug(`Database Backup Started`);
|
||||
|
||||
const { bin, args, databasePassword, databaseVersion, databaseMajorVersion } =
|
||||
await this.buildPostgresLaunchArguments('pg_dump');
|
||||
|
||||
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
const filename = `${filenamePrefix}immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz`;
|
||||
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
const temporaryFilePath = `${backupFilePath}.tmp`;
|
||||
|
||||
try {
|
||||
const pgdump = this.processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const gzip = this.processRepository.spawnDuplexStream('gzip', ['--rsyncable']);
|
||||
const fileStream = this.storageRepository.createWriteStream(temporaryFilePath);
|
||||
|
||||
await pipeline(pgdump, gzip, fileStream);
|
||||
await this.storageRepository.rename(temporaryFilePath, backupFilePath);
|
||||
} catch (error) {
|
||||
this.logger.error(`Database Backup Failure: ${error}`);
|
||||
await this.storageRepository
|
||||
.unlink(temporaryFilePath)
|
||||
.catch((error) => this.logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
throw error;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Success`);
|
||||
return backupFilePath;
|
||||
}
|
||||
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const fn = basename(file.originalname);
|
||||
if (!isValidDatabaseBackupName(fn)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const filePath = path.join(backupsFolder, `uploaded-${fn}`);
|
||||
await this.storageRepository.createOrOverwriteFile(filePath, file.buffer);
|
||||
}
|
||||
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
if (!isValidDatabaseBackupName(fileName)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const filePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), fileName);
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
path: filePath,
|
||||
fileName,
|
||||
cacheControl: CacheControl.PrivateWithoutCache,
|
||||
contentType: fileName.endsWith('.gz') ? 'application/gzip' : 'application/sql',
|
||||
};
|
||||
}
|
||||
|
||||
async listBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
|
||||
const validFiles = files
|
||||
.filter((fn) => isValidDatabaseBackupName(fn))
|
||||
.toSorted((a, b) => (a.startsWith('uploaded-') === b.startsWith('uploaded-') ? a.localeCompare(b) : 1))
|
||||
.toReversed();
|
||||
|
||||
const backups = await Promise.all(
|
||||
validFiles.map(async (filename) => {
|
||||
const stats = await this.storageRepository.stat(path.join(backupsFolder, filename));
|
||||
return { filename, filesize: stats.size };
|
||||
}),
|
||||
);
|
||||
|
||||
return {
|
||||
backups,
|
||||
};
|
||||
}
|
||||
|
||||
async deleteBackup(files: string[]): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
|
||||
if (files.some((filename) => !isValidDatabaseBackupName(filename))) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
await Promise.all(files.map((filename) => this.storageRepository.unlink(path.join(backupsFolder, filename))));
|
||||
}
|
||||
|
||||
async cleanupDatabaseBackups() {
|
||||
this.logger.debug(`Database Backup Cleanup Started`);
|
||||
const {
|
||||
backup: { database: config },
|
||||
} = await getConfig(
|
||||
{
|
||||
configRepo: this.configRepository,
|
||||
metadataRepo: this.systemMetadataRepository,
|
||||
logger: this.logger,
|
||||
},
|
||||
{
|
||||
withCache: false,
|
||||
},
|
||||
);
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const backups = files
|
||||
.filter((filename) => isValidDatabaseRoutineBackupName(filename))
|
||||
.toSorted()
|
||||
.toReversed();
|
||||
const failedBackups = files.filter((filename) => isFailedDatabaseBackupName(filename));
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
|
||||
for (const file of toDelete) {
|
||||
await this.storageRepository.unlink(path.join(backupsFolder, file));
|
||||
}
|
||||
|
||||
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
|
||||
}
|
||||
|
||||
async restoreDatabaseBackup(
|
||||
filename: string,
|
||||
progressCb?: (action: 'backup' | 'restore' | 'migrations' | 'rollback', progress: number) => void,
|
||||
): Promise<void> {
|
||||
this.logger.debug(`Database Restore Started`);
|
||||
|
||||
let complete = false;
|
||||
try {
|
||||
if (!isValidDatabaseBackupName(filename)) {
|
||||
throw new Error('Invalid backup file format!');
|
||||
}
|
||||
|
||||
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
await this.storageRepository.stat(backupFilePath); // => check file exists
|
||||
|
||||
let isPgClusterDump = false;
|
||||
const version = findDatabaseBackupVersion(filename);
|
||||
if (version && semver.satisfies(version, '<= 2.4')) {
|
||||
isPgClusterDump = true;
|
||||
}
|
||||
|
||||
const { bin, args, databaseUsername, databasePassword, databaseMajorVersion } =
|
||||
await this.buildPostgresLaunchArguments('psql', {
|
||||
singleTransaction: !isPgClusterDump,
|
||||
});
|
||||
|
||||
progressCb?.('backup', 0.05);
|
||||
|
||||
const restorePointFilePath = await this.createDatabaseBackup('restore-point-');
|
||||
|
||||
this.logger.log(`Database Restore Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
let inputStream: Readable;
|
||||
if (backupFilePath.endsWith('.gz')) {
|
||||
const fileStream = this.storageRepository.createPlainReadStream(backupFilePath);
|
||||
const gunzip = this.storageRepository.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
} else {
|
||||
inputStream = this.storageRepository.createPlainReadStream(backupFilePath);
|
||||
}
|
||||
|
||||
const sqlStream = Readable.from(sql(inputStream, databaseUsername, isPgClusterDump));
|
||||
const psql = this.processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log(`Restore progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('restore', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
|
||||
try {
|
||||
progressCb?.('migrations', 0.9);
|
||||
await this.databaseRepository.runMigrations();
|
||||
await this.maintenanceHealthRepository.checkApiHealth();
|
||||
} catch (error) {
|
||||
progressCb?.('rollback', 0);
|
||||
|
||||
const fileStream = this.storageRepository.createPlainReadStream(restorePointFilePath);
|
||||
const gunzip = this.storageRepository.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
|
||||
const sqlStream = Readable.from(sqlRollback(inputStream, databaseUsername));
|
||||
const psql = this.processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log(`Rollback progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('rollback', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(`Database Restore Failure: ${error}`);
|
||||
throw error;
|
||||
} finally {
|
||||
complete = true;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Restore Success`);
|
||||
}
|
||||
}
|
||||
|
||||
const SQL_DROP_CONNECTIONS = `
|
||||
-- drop all other database connections
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = current_database()
|
||||
AND pid <> pg_backend_pid();
|
||||
`;
|
||||
|
||||
const SQL_RESET_SCHEMA = (username: string) => `
|
||||
-- re-create the default schema
|
||||
DROP SCHEMA public CASCADE;
|
||||
CREATE SCHEMA public;
|
||||
|
||||
-- restore access to schema
|
||||
GRANT ALL ON SCHEMA public TO "${username}";
|
||||
GRANT ALL ON SCHEMA public TO public;
|
||||
`;
|
||||
|
||||
async function* sql(inputStream: Readable, databaseUsername: string, isPgClusterDump: boolean) {
|
||||
yield SQL_DROP_CONNECTIONS;
|
||||
yield isPgClusterDump
|
||||
? // it is likely the dump contains SQL to try to drop the currently active
|
||||
// database to ensure we have a fresh slate; if the `postgres` database exists
|
||||
// then prefer to switch before continuing otherwise this will just silently fail
|
||||
String.raw`
|
||||
\c postgres
|
||||
`
|
||||
: SQL_RESET_SCHEMA(databaseUsername);
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
async function* sqlRollback(inputStream: Readable, databaseUsername: string) {
|
||||
yield SQL_DROP_CONNECTIONS;
|
||||
yield SQL_RESET_SCHEMA(databaseUsername);
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
function createSqlProgressStreams(cb: (progress: number) => void) {
|
||||
const STDIN_START_MARKER = new TextEncoder().encode('FROM stdin');
|
||||
const STDIN_END_MARKER = new TextEncoder().encode(String.raw`\.`);
|
||||
|
||||
let readingStdin = false;
|
||||
let sequenceIdx = 0;
|
||||
|
||||
let linesSent = 0;
|
||||
let linesProcessed = 0;
|
||||
|
||||
const startedAt = +Date.now();
|
||||
const cbDebounced = debounce(
|
||||
() => {
|
||||
const progress = source.writableEnded
|
||||
? Math.min(1, linesProcessed / linesSent)
|
||||
: // progress simulation while we're in an indeterminate state
|
||||
Math.min(0.3, 0.1 + (Date.now() - startedAt) / 1e4);
|
||||
cb(progress);
|
||||
},
|
||||
100,
|
||||
{
|
||||
maxWait: 100,
|
||||
},
|
||||
);
|
||||
|
||||
let lastByte = -1;
|
||||
const source = new PassThrough({
|
||||
transform(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (!readingStdin && byte === 10 && lastByte !== 10) {
|
||||
linesSent += 1;
|
||||
}
|
||||
|
||||
lastByte = byte;
|
||||
|
||||
const sequence = readingStdin ? STDIN_END_MARKER : STDIN_START_MARKER;
|
||||
if (sequence[sequenceIdx] === byte) {
|
||||
sequenceIdx += 1;
|
||||
|
||||
if (sequence.length === sequenceIdx) {
|
||||
sequenceIdx = 0;
|
||||
readingStdin = !readingStdin;
|
||||
}
|
||||
} else {
|
||||
sequenceIdx = 0;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
this.push(chunk);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (byte === 10) {
|
||||
linesProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
return [source, sink];
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { BadRequestException } from '@nestjs/common';
|
||||
import { Readable } from 'node:stream';
|
||||
import { DownloadResponseDto } from 'src/dtos/download.dto';
|
||||
import { DownloadService } from 'src/services/download.service';
|
||||
import { AssetFactory } from 'test/factories/asset.factory';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
@@ -60,22 +61,22 @@ describe(DownloadService.name, () => {
|
||||
stream: new Readable(),
|
||||
};
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
|
||||
const asset1 = AssetFactory.create();
|
||||
const asset2 = AssetFactory.create();
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([asset1.id, asset2.id]));
|
||||
mocks.storage.realpath.mockRejectedValue(new Error('Could not read file'));
|
||||
mocks.asset.getByIds.mockResolvedValue([
|
||||
{ ...assetStub.noResizePath, id: 'asset-1' },
|
||||
{ ...assetStub.noWebpPath, id: 'asset-2' },
|
||||
]);
|
||||
mocks.asset.getByIds.mockResolvedValue([asset1, asset2]);
|
||||
mocks.storage.createZipStream.mockReturnValue(archiveMock);
|
||||
|
||||
await expect(sut.downloadArchive(authStub.admin, { assetIds: ['asset-1', 'asset-2'] })).resolves.toEqual({
|
||||
await expect(sut.downloadArchive(authStub.admin, { assetIds: [asset1.id, asset2.id] })).resolves.toEqual({
|
||||
stream: archiveMock.stream,
|
||||
});
|
||||
|
||||
expect(mocks.logger.warn).toHaveBeenCalledTimes(2);
|
||||
expect(archiveMock.addFile).toHaveBeenCalledTimes(2);
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, '/data/library/IMG_123.jpg', 'IMG_123.jpg');
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, '/data/library/IMG_456.jpg', 'IMG_456.jpg');
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, asset1.originalPath, asset1.originalFileName);
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, asset2.originalPath, asset2.originalFileName);
|
||||
});
|
||||
|
||||
it('should download an archive', async () => {
|
||||
@@ -85,20 +86,20 @@ describe(DownloadService.name, () => {
|
||||
stream: new Readable(),
|
||||
};
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2']));
|
||||
mocks.asset.getByIds.mockResolvedValue([
|
||||
{ ...assetStub.noResizePath, id: 'asset-1' },
|
||||
{ ...assetStub.noWebpPath, id: 'asset-2' },
|
||||
]);
|
||||
const asset1 = AssetFactory.create();
|
||||
const asset2 = AssetFactory.create();
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([asset1.id, asset2.id]));
|
||||
mocks.asset.getByIds.mockResolvedValue([asset1, asset2]);
|
||||
mocks.storage.createZipStream.mockReturnValue(archiveMock);
|
||||
|
||||
await expect(sut.downloadArchive(authStub.admin, { assetIds: ['asset-1', 'asset-2'] })).resolves.toEqual({
|
||||
await expect(sut.downloadArchive(authStub.admin, { assetIds: [asset1.id, asset2.id] })).resolves.toEqual({
|
||||
stream: archiveMock.stream,
|
||||
});
|
||||
|
||||
expect(archiveMock.addFile).toHaveBeenCalledTimes(2);
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, '/data/library/IMG_123.jpg', 'IMG_123.jpg');
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, '/data/library/IMG_456.jpg', 'IMG_456.jpg');
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(1, asset1.originalPath, asset1.originalFileName);
|
||||
expect(archiveMock.addFile).toHaveBeenNthCalledWith(2, asset2.originalPath, asset2.originalFileName);
|
||||
});
|
||||
|
||||
it('should handle duplicate file names', async () => {
|
||||
|
||||
@@ -7,7 +7,6 @@ import { AssetService } from 'src/services/asset.service';
|
||||
import { AuditService } from 'src/services/audit.service';
|
||||
import { AuthAdminService } from 'src/services/auth-admin.service';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { DatabaseService } from 'src/services/database.service';
|
||||
@@ -59,7 +58,6 @@ export const services = [
|
||||
AuditService,
|
||||
AuthService,
|
||||
AuthAdminService,
|
||||
BackupService,
|
||||
CliService,
|
||||
DatabaseBackupService,
|
||||
DatabaseService,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { MapService } from 'src/services/map.service';
|
||||
import { albumStub } from 'test/fixtures/album.stub';
|
||||
import { AlbumFactory } from 'test/factories/album.factory';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
@@ -72,8 +73,8 @@ describe(MapService.name, () => {
|
||||
};
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
mocks.map.getMapMarkers.mockResolvedValue([marker]);
|
||||
mocks.album.getOwned.mockResolvedValue([albumStub.empty]);
|
||||
mocks.album.getShared.mockResolvedValue([albumStub.sharedWithUser]);
|
||||
mocks.album.getOwned.mockResolvedValue([AlbumFactory.create()]);
|
||||
mocks.album.getShared.mockResolvedValue([AlbumFactory.from().albumUser({ userId: userStub.user1.id }).build()]);
|
||||
|
||||
const markers = await sut.getMapMarkers(authStub.user1, { withSharedAlbums: true });
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
} from 'src/enum';
|
||||
import { MediaService } from 'src/services/media.service';
|
||||
import { JobCounts, RawImageInfo } from 'src/types';
|
||||
import { AssetFactory } from 'test/factories/asset.factory';
|
||||
import { assetStub, previewFile } from 'test/fixtures/asset.stub';
|
||||
import { faceStub } from 'test/fixtures/face.stub';
|
||||
import { probeStub } from 'test/fixtures/media.stub';
|
||||
@@ -26,11 +27,6 @@ import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const filesNoFullsize = [
|
||||
factory.assetFile({ type: AssetFileType.Preview }),
|
||||
factory.assetFile({ type: AssetFileType.Thumbnail }),
|
||||
];
|
||||
|
||||
const fullsizeBuffer = Buffer.from('embedded image data');
|
||||
const rawBuffer = Buffer.from('raw image data');
|
||||
const extractedBuffer = Buffer.from('embedded image file');
|
||||
@@ -139,33 +135,30 @@ describe(MediaService.name, () => {
|
||||
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
|
||||
});
|
||||
|
||||
it('should queue all assets with missing webp path', async () => {
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.noWebpPath]));
|
||||
it('should queue all assets with missing preview', async () => {
|
||||
const asset = AssetFactory.create();
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([asset]));
|
||||
mocks.person.getAll.mockReturnValue(makeStream());
|
||||
await sut.handleQueueGenerateThumbnails({ force: false });
|
||||
|
||||
expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith({ force: false, fullsizeEnabled: false });
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.AssetGenerateThumbnails,
|
||||
data: { id: assetStub.image.id },
|
||||
},
|
||||
{ name: JobName.AssetGenerateThumbnails, data: { id: asset.id } },
|
||||
]);
|
||||
|
||||
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
|
||||
});
|
||||
|
||||
it('should queue all assets with missing thumbhash', async () => {
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.noThumbhash]));
|
||||
const asset = AssetFactory.from({ thumbhash: null })
|
||||
.files([AssetFileType.Thumbnail, AssetFileType.Preview])
|
||||
.build();
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([asset]));
|
||||
mocks.person.getAll.mockReturnValue(makeStream());
|
||||
await sut.handleQueueGenerateThumbnails({ force: false });
|
||||
|
||||
expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith({ force: false, fullsizeEnabled: false });
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.AssetGenerateThumbnails,
|
||||
data: { id: assetStub.image.id },
|
||||
},
|
||||
{ name: JobName.AssetGenerateThumbnails, data: { id: asset.id } },
|
||||
]);
|
||||
|
||||
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
|
||||
@@ -173,7 +166,7 @@ describe(MediaService.name, () => {
|
||||
|
||||
it('should queue all assets with missing fullsize when feature is enabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
|
||||
const asset = { id: factory.uuid(), thumbhash: factory.buffer(), edits: [], files: filesNoFullsize };
|
||||
const asset = { id: factory.uuid(), isEdited: false };
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([asset]));
|
||||
mocks.person.getAll.mockReturnValue(makeStream());
|
||||
await sut.handleQueueGenerateThumbnails({ force: false });
|
||||
@@ -191,7 +184,7 @@ describe(MediaService.name, () => {
|
||||
|
||||
it('should not queue assets with missing fullsize when feature is disabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: false } } });
|
||||
const asset = { id: factory.uuid(), thumbhash: factory.buffer(), edits: [], files: filesNoFullsize };
|
||||
const asset = { id: factory.uuid(), isEdited: false };
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([asset]));
|
||||
mocks.person.getAll.mockReturnValue(makeStream());
|
||||
await sut.handleQueueGenerateThumbnails({ force: false });
|
||||
@@ -232,7 +225,7 @@ describe(MediaService.name, () => {
|
||||
|
||||
it('should queue assets with missing fullsize when force is true, regardless of setting', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: false } } });
|
||||
const asset = { id: factory.uuid(), thumbhash: Buffer.from('thumbhash'), edits: [], files: filesNoFullsize };
|
||||
const asset = { id: factory.uuid(), isEdited: false };
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([asset]));
|
||||
mocks.person.getAll.mockReturnValue(makeStream());
|
||||
await sut.handleQueueGenerateThumbnails({ force: true });
|
||||
@@ -1052,12 +1045,19 @@ describe(MediaService.name, () => {
|
||||
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
|
||||
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
|
||||
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
|
||||
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
|
||||
.exif({
|
||||
fileSizeInByte: 5000,
|
||||
profileDescription: 'Adobe RGB',
|
||||
bitsPerSample: 14,
|
||||
})
|
||||
.build();
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
await sut.handleGenerateThumbnails({ id: asset.id });
|
||||
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.imageHif.originalPath, {
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
|
||||
colorspace: Colorspace.P3,
|
||||
processInvalidImages: false,
|
||||
});
|
||||
@@ -1107,12 +1107,19 @@ describe(MediaService.name, () => {
|
||||
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
|
||||
mocks.media.copyTagGroup.mockResolvedValue(true);
|
||||
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.panoramaTif);
|
||||
const asset = AssetFactory.from({ originalFileName: 'panorama.tif' })
|
||||
.exif({
|
||||
fileSizeInByte: 5000,
|
||||
projectionType: 'EQUIRECTANGULAR',
|
||||
})
|
||||
.build();
|
||||
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.panoramaTif.originalPath, {
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
|
||||
colorspace: Colorspace.Srgb,
|
||||
orientation: undefined,
|
||||
processInvalidImages: false,
|
||||
@@ -1135,11 +1142,7 @@ describe(MediaService.name, () => {
|
||||
);
|
||||
|
||||
expect(mocks.media.copyTagGroup).toHaveBeenCalledTimes(2);
|
||||
expect(mocks.media.copyTagGroup).toHaveBeenCalledWith(
|
||||
'XMP-GPano',
|
||||
assetStub.panoramaTif.originalPath,
|
||||
expect.any(String),
|
||||
);
|
||||
expect(mocks.media.copyTagGroup).toHaveBeenCalledWith('XMP-GPano', asset.originalPath, expect.any(String));
|
||||
});
|
||||
|
||||
it('should respect encoding options when generating full-size preview', async () => {
|
||||
@@ -1149,12 +1152,19 @@ describe(MediaService.name, () => {
|
||||
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
|
||||
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
|
||||
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
|
||||
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
|
||||
.exif({
|
||||
fileSizeInByte: 5000,
|
||||
profileDescription: 'Adobe RGB',
|
||||
bitsPerSample: 14,
|
||||
})
|
||||
.build();
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.imageHif.originalPath, {
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
|
||||
colorspace: Colorspace.P3,
|
||||
processInvalidImages: false,
|
||||
});
|
||||
@@ -1181,9 +1191,16 @@ describe(MediaService.name, () => {
|
||||
});
|
||||
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
|
||||
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.imageHif);
|
||||
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
|
||||
.exif({
|
||||
fileSizeInByte: 5000,
|
||||
profileDescription: 'Adobe RGB',
|
||||
bitsPerSample: 14,
|
||||
})
|
||||
.build();
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
await sut.handleGenerateThumbnails({ id: asset.id });
|
||||
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
@@ -1263,30 +1280,25 @@ describe(MediaService.name, () => {
|
||||
});
|
||||
|
||||
it('should clean up edited files if an asset has no edits', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withoutEdits,
|
||||
});
|
||||
const asset = AssetFactory.from({ thumbhash: factory.buffer() })
|
||||
.exif()
|
||||
.files([
|
||||
{ type: AssetFileType.Preview, path: 'edited1.jpg', isEdited: true },
|
||||
{ type: AssetFileType.Thumbnail, path: 'edited2.jpg', isEdited: true },
|
||||
{ type: AssetFileType.FullSize, path: 'edited3.jpg', isEdited: true },
|
||||
])
|
||||
.build();
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
|
||||
|
||||
const status = await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
|
||||
|
||||
const status = await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: {
|
||||
files: expect.arrayContaining([
|
||||
'/uploads/user-id/fullsize/path_edited.jpg',
|
||||
'/uploads/user-id/preview/path_edited.jpg',
|
||||
'/uploads/user-id/thumbnail/path_edited.jpg',
|
||||
]),
|
||||
files: expect.arrayContaining(['edited1.jpg', 'edited2.jpg', 'edited3.jpg']),
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.asset.deleteFiles).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ path: '/uploads/user-id/preview/path_edited.jpg' }),
|
||||
expect.objectContaining({ path: '/uploads/user-id/thumbnail/path_edited.jpg' }),
|
||||
expect.objectContaining({ path: '/uploads/user-id/fullsize/path_edited.jpg' }),
|
||||
]),
|
||||
);
|
||||
|
||||
expect(status).toBe(JobStatus.Success);
|
||||
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
@@ -1320,11 +1332,9 @@ describe(MediaService.name, () => {
|
||||
});
|
||||
|
||||
it('should generate the original thumbhash if no edits exist', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withoutEdits,
|
||||
});
|
||||
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
|
||||
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
|
||||
const asset = AssetFactory.from().exif().build();
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
|
||||
mocks.media.generateThumbhash.mockResolvedValue(factory.buffer());
|
||||
|
||||
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id, source: 'upload' });
|
||||
|
||||
@@ -1335,18 +1345,14 @@ describe(MediaService.name, () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withCropEdit,
|
||||
});
|
||||
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
|
||||
const thumbhashBuffer = factory.buffer();
|
||||
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
|
||||
mocks.person.getFaces.mockResolvedValue([]);
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
thumbhash: thumbhashBuffer,
|
||||
}),
|
||||
);
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ thumbhash: thumbhashBuffer }));
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ import {
|
||||
VideoInterfaces,
|
||||
VideoStreamInfo,
|
||||
} from 'src/types';
|
||||
import { getAssetFiles, getDimensions } from 'src/utils/asset.util';
|
||||
import { getDimensions } from 'src/utils/asset.util';
|
||||
import { checkFaceVisibility, checkOcrVisibility } from 'src/utils/editor';
|
||||
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
@@ -78,17 +78,11 @@ export class MediaService extends BaseService {
|
||||
|
||||
const fullsizeEnabled = config.image.fullsize.enabled;
|
||||
for await (const asset of this.assetJobRepository.streamForThumbnailJob({ force, fullsizeEnabled })) {
|
||||
const { previewFile, thumbnailFile, fullsizeFile, editedPreviewFile, editedThumbnailFile, editedFullsizeFile } =
|
||||
getAssetFiles(asset.files);
|
||||
|
||||
if (force || !previewFile || !thumbnailFile || !asset.thumbhash || (fullsizeEnabled && !fullsizeFile)) {
|
||||
if (force || !asset.isEdited) {
|
||||
jobs.push({ name: JobName.AssetGenerateThumbnails, data: { id: asset.id } });
|
||||
}
|
||||
|
||||
if (
|
||||
asset.edits.length > 0 &&
|
||||
(force || !editedPreviewFile || !editedThumbnailFile || (fullsizeEnabled && !editedFullsizeFile))
|
||||
) {
|
||||
if (asset.isEdited) {
|
||||
jobs.push({ name: JobName.AssetEditThumbnailGeneration, data: { id: asset.id } });
|
||||
}
|
||||
|
||||
@@ -185,7 +179,7 @@ export class MediaService extends BaseService {
|
||||
|
||||
const generated = await this.generateEditedThumbnails(asset, config);
|
||||
await this.syncFiles(
|
||||
asset.files.filter((asset) => asset.isEdited),
|
||||
asset.files.filter((file) => file.isEdited),
|
||||
generated?.files ?? [],
|
||||
);
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
} from 'src/enum';
|
||||
import { ImmichTags } from 'src/repositories/metadata.repository';
|
||||
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
|
||||
import { AssetFactory } from 'test/factories/asset.factory';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { fileStub } from 'test/fixtures/file.stub';
|
||||
import { probeStub } from 'test/fixtures/media.stub';
|
||||
@@ -24,13 +25,6 @@ import { tagStub } from 'test/fixtures/tag.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const removeNonSidecarFiles = (asset: any) => {
|
||||
return {
|
||||
...asset,
|
||||
files: asset.files.filter((file: any) => file.type === AssetFileType.Sidecar),
|
||||
};
|
||||
};
|
||||
|
||||
const forSidecarJob = (
|
||||
asset: {
|
||||
id?: string;
|
||||
@@ -182,17 +176,18 @@ describe(MetadataService.name, () => {
|
||||
it('should handle a date in a sidecar file', async () => {
|
||||
const originalDate = new Date('2023-11-21T16:13:17.517Z');
|
||||
const sidecarDate = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
|
||||
const asset = AssetFactory.from().file({ type: AssetFileType.Sidecar }).build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mockReadTags({ CreationDate: originalDate.toISOString() }, { CreationDate: sidecarDate.toISOString() });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.sidecar.id);
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate }), {
|
||||
lockedPropertiesBehavior: 'skip',
|
||||
});
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: assetStub.image.id,
|
||||
id: asset.id,
|
||||
duration: null,
|
||||
fileCreatedAt: sidecarDate,
|
||||
localDateTime: sidecarDate,
|
||||
@@ -203,7 +198,8 @@ describe(MetadataService.name, () => {
|
||||
it('should take the file modification date when missing exif and earlier than creation date', async () => {
|
||||
const fileCreatedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
const asset = AssetFactory.create();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
@@ -212,14 +208,14 @@ describe(MetadataService.name, () => {
|
||||
} as Stats);
|
||||
mockReadTags();
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ dateTimeOriginal: fileModifiedAt }),
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
);
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
id: asset.id,
|
||||
duration: null,
|
||||
fileCreatedAt: fileModifiedAt,
|
||||
fileModifiedAt,
|
||||
@@ -232,7 +228,8 @@ describe(MetadataService.name, () => {
|
||||
it('should take the file creation date when missing exif and earlier than modification date', async () => {
|
||||
const fileCreatedAt = new Date('2021-01-01T00:00:00.000Z');
|
||||
const fileModifiedAt = new Date('2022-01-01T00:00:00.000Z');
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
const asset = AssetFactory.create();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: fileModifiedAt,
|
||||
@@ -241,14 +238,14 @@ describe(MetadataService.name, () => {
|
||||
} as Stats);
|
||||
mockReadTags();
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ dateTimeOriginal: fileCreatedAt }),
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
);
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
id: asset.id,
|
||||
duration: null,
|
||||
fileCreatedAt,
|
||||
fileModifiedAt,
|
||||
@@ -260,10 +257,11 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should determine dateTimeOriginal regardless of the server time zone', async () => {
|
||||
process.env.TZ = 'America/Los_Angeles';
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.sidecar));
|
||||
const asset = AssetFactory.create();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'),
|
||||
@@ -279,16 +277,15 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should handle lists of numbers', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
const asset = AssetFactory.create();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.image.fileModifiedAt,
|
||||
mtimeMs: assetStub.image.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: assetStub.image.fileCreatedAt.valueOf(),
|
||||
mtime: asset.fileModifiedAt,
|
||||
mtimeMs: asset.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: asset.fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags({
|
||||
ISO: [160],
|
||||
});
|
||||
mockReadTags({ ISO: [160] });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
||||
@@ -296,11 +293,11 @@ describe(MetadataService.name, () => {
|
||||
lockedPropertiesBehavior: 'skip',
|
||||
});
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({
|
||||
id: assetStub.image.id,
|
||||
id: asset.id,
|
||||
duration: null,
|
||||
fileCreatedAt: assetStub.image.fileCreatedAt,
|
||||
fileModifiedAt: assetStub.image.fileCreatedAt,
|
||||
localDateTime: assetStub.image.fileCreatedAt,
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileCreatedAt,
|
||||
localDateTime: asset.fileCreatedAt,
|
||||
width: null,
|
||||
height: null,
|
||||
});
|
||||
@@ -308,77 +305,77 @@ describe(MetadataService.name, () => {
|
||||
|
||||
it('should not delete latituide and longitude without reverse geocode', async () => {
|
||||
// regression test for issue 17511
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.withLocation);
|
||||
const asset = AssetFactory.from().exif().build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: false } });
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.withLocation.fileModifiedAt,
|
||||
mtimeMs: assetStub.withLocation.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: assetStub.withLocation.fileCreatedAt.valueOf(),
|
||||
mtime: asset.fileModifiedAt,
|
||||
mtimeMs: asset.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: asset.fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags({
|
||||
GPSLatitude: assetStub.withLocation.exifInfo!.latitude!,
|
||||
GPSLongitude: assetStub.withLocation.exifInfo!.longitude!,
|
||||
GPSLatitude: asset.exifInfo.latitude!,
|
||||
GPSLongitude: asset.exifInfo.longitude!,
|
||||
});
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ city: null, state: null, country: null }),
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
);
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({
|
||||
id: assetStub.withLocation.id,
|
||||
id: asset.id,
|
||||
duration: null,
|
||||
fileCreatedAt: assetStub.withLocation.fileCreatedAt,
|
||||
fileModifiedAt: assetStub.withLocation.fileModifiedAt,
|
||||
localDateTime: new Date('2023-02-22T05:06:29.716Z'),
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
localDateTime: asset.localDateTime,
|
||||
width: null,
|
||||
height: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should apply reverse geocoding', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
|
||||
const asset = AssetFactory.from().exif({ latitude: 10, longitude: 20 }).build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.systemMetadata.get.mockResolvedValue({ reverseGeocoding: { enabled: true } });
|
||||
mocks.map.reverseGeocode.mockResolvedValue({ city: 'City', state: 'State', country: 'Country' });
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: assetStub.withLocation.fileModifiedAt,
|
||||
mtimeMs: assetStub.withLocation.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: assetStub.withLocation.fileCreatedAt.valueOf(),
|
||||
mtime: asset.fileModifiedAt,
|
||||
mtimeMs: asset.fileModifiedAt.valueOf(),
|
||||
birthtimeMs: asset.fileCreatedAt.valueOf(),
|
||||
} as Stats);
|
||||
mockReadTags({
|
||||
GPSLatitude: assetStub.withLocation.exifInfo!.latitude!,
|
||||
GPSLongitude: assetStub.withLocation.exifInfo!.longitude!,
|
||||
});
|
||||
mockReadTags({ GPSLatitude: 10, GPSLongitude: 20 });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
);
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith({
|
||||
id: assetStub.withLocation.id,
|
||||
id: asset.id,
|
||||
duration: null,
|
||||
fileCreatedAt: assetStub.withLocation.fileCreatedAt,
|
||||
fileModifiedAt: assetStub.withLocation.fileModifiedAt,
|
||||
localDateTime: new Date('2023-02-22T05:06:29.716Z'),
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
localDateTime: asset.localDateTime,
|
||||
width: null,
|
||||
height: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should discard latitude and longitude on null island', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.withLocation));
|
||||
const asset = AssetFactory.create();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mockReadTags({
|
||||
GPSLatitude: 0,
|
||||
GPSLongitude: 0,
|
||||
});
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(assetStub.image.id);
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ latitude: null, longitude: null }),
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
@@ -386,19 +383,25 @@ describe(MetadataService.name, () => {
|
||||
});
|
||||
|
||||
it('should extract tags from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ TagsList: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
|
||||
});
|
||||
|
||||
it('should extract hierarchy from TagsList', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent/Child'] }) });
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent/Child'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
@@ -406,135 +409,147 @@ describe(MetadataService.name, () => {
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(1, {
|
||||
userId: 'user-id',
|
||||
userId: asset.ownerId,
|
||||
value: 'Parent',
|
||||
parentId: undefined,
|
||||
});
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(2, {
|
||||
userId: 'user-id',
|
||||
userId: asset.ownerId,
|
||||
value: 'Parent/Child',
|
||||
parentId: 'tag-parent',
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a string', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ Keywords: 'Parent' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a list', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent'] }) });
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ Keywords: ['Parent'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
|
||||
});
|
||||
|
||||
it('should extract tags from Keywords as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({
|
||||
...factory.asset(),
|
||||
exifInfo: factory.exif({ tags: ['Parent', '2024'] }),
|
||||
});
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent', '2024'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ Keywords: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: '2024', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: '2024', parent: undefined });
|
||||
});
|
||||
|
||||
it('should extract hierarchal tags from Keywords', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({ ...factory.asset(), exifInfo: factory.exif({ tags: ['Parent/Child'] }) });
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent/Child'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ Keywords: 'Parent/Child' });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(1, {
|
||||
userId: 'user-id',
|
||||
userId: asset.ownerId,
|
||||
value: 'Parent',
|
||||
parentId: undefined,
|
||||
});
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(2, {
|
||||
userId: 'user-id',
|
||||
userId: asset.ownerId,
|
||||
value: 'Parent/Child',
|
||||
parentId: 'tag-parent',
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore Keywords when TagsList is present', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({
|
||||
...factory.asset(),
|
||||
exifInfo: factory.exif({ tags: ['Parent/Child', 'Child'] }),
|
||||
});
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent/Child', 'Child'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(1, {
|
||||
userId: 'user-id',
|
||||
userId: asset.ownerId,
|
||||
value: 'Parent',
|
||||
parentId: undefined,
|
||||
});
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(2, {
|
||||
userId: 'user-id',
|
||||
userId: asset.ownerId,
|
||||
value: 'Parent/Child',
|
||||
parentId: 'tag-parent',
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract hierarchy from HierarchicalSubject', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({
|
||||
...factory.asset(),
|
||||
exifInfo: factory.exif({ tags: ['Parent/Child', 'TagA'] }),
|
||||
});
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent/Child', 'TagA'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
|
||||
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(1, {
|
||||
userId: 'user-id',
|
||||
userId: asset.ownerId,
|
||||
value: 'Parent',
|
||||
parentId: undefined,
|
||||
});
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(2, {
|
||||
userId: 'user-id',
|
||||
userId: asset.ownerId,
|
||||
value: 'Parent/Child',
|
||||
parentId: 'tag-parent',
|
||||
});
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(3, { userId: 'user-id', value: 'TagA', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenNthCalledWith(3, {
|
||||
userId: asset.ownerId,
|
||||
value: 'TagA',
|
||||
parent: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
|
||||
mocks.asset.getById.mockResolvedValue({
|
||||
...factory.asset(),
|
||||
exifInfo: factory.exif({ tags: ['Parent', '2024'] }),
|
||||
});
|
||||
const asset = AssetFactory.from()
|
||||
.exif({ tags: ['Parent', '2024'] })
|
||||
.build();
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(asset);
|
||||
mocks.asset.getById.mockResolvedValue(asset);
|
||||
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
|
||||
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: '2024', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: 'Parent', parent: undefined });
|
||||
expect(mocks.tag.upsertValue).toHaveBeenCalledWith({ userId: asset.ownerId, value: '2024', parent: undefined });
|
||||
});
|
||||
|
||||
it('should extract ignore / characters in a HierarchicalSubject tag', async () => {
|
||||
@@ -1646,31 +1661,23 @@ describe(MetadataService.name, () => {
|
||||
|
||||
describe('handleQueueSidecar', () => {
|
||||
it('should queue assets with sidecar files', async () => {
|
||||
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
|
||||
const asset = AssetFactory.create();
|
||||
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([asset]));
|
||||
|
||||
await sut.handleQueueSidecar({ force: true });
|
||||
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(true);
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.SidecarCheck,
|
||||
data: { id: assetStub.sidecar.id },
|
||||
},
|
||||
]);
|
||||
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(true);
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.SidecarCheck, data: { id: asset.id } }]);
|
||||
});
|
||||
|
||||
it('should queue assets without sidecar files', async () => {
|
||||
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
|
||||
const asset = AssetFactory.create();
|
||||
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([asset]));
|
||||
|
||||
await sut.handleQueueSidecar({ force: false });
|
||||
|
||||
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false);
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.SidecarCheck,
|
||||
data: { id: assetStub.image.id },
|
||||
},
|
||||
]);
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.SidecarCheck, data: { id: asset.id } }]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import { defaults, SystemConfig } from 'src/config';
|
||||
import { AlbumUser } from 'src/database';
|
||||
import { SystemConfigDto } from 'src/dtos/system-config.dto';
|
||||
import { AssetFileType, JobName, JobStatus, UserMetadataKey } from 'src/enum';
|
||||
import { NotificationService } from 'src/services/notification.service';
|
||||
import { INotifyAlbumUpdateJob } from 'src/types';
|
||||
import { albumStub } from 'test/fixtures/album.stub';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { AlbumFactory } from 'test/factories/album.factory';
|
||||
import { AssetFileFactory } from 'test/factories/asset-file.factory';
|
||||
import { UserFactory } from 'test/factories/user.factory';
|
||||
import { notificationStub } from 'test/fixtures/notification.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { newUuid } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const configs = {
|
||||
@@ -267,14 +268,14 @@ describe(NotificationService.name, () => {
|
||||
});
|
||||
|
||||
it('should skip if recipient could not be found', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.empty);
|
||||
mocks.album.getById.mockResolvedValue(AlbumFactory.create());
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
|
||||
expect(mocks.job.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip if the recipient has email notifications disabled', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.empty);
|
||||
mocks.album.getById.mockResolvedValue(AlbumFactory.create());
|
||||
mocks.user.get.mockResolvedValue({
|
||||
...userStub.user1,
|
||||
metadata: [
|
||||
@@ -290,7 +291,7 @@ describe(NotificationService.name, () => {
|
||||
});
|
||||
|
||||
it('should skip if the recipient has email notifications for album invite disabled', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.empty);
|
||||
mocks.album.getById.mockResolvedValue(AlbumFactory.create());
|
||||
mocks.user.get.mockResolvedValue({
|
||||
...userStub.user1,
|
||||
metadata: [
|
||||
@@ -306,7 +307,7 @@ describe(NotificationService.name, () => {
|
||||
});
|
||||
|
||||
it('should send invite email', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.empty);
|
||||
mocks.album.getById.mockResolvedValue(AlbumFactory.create());
|
||||
mocks.user.get.mockResolvedValue({
|
||||
...userStub.user1,
|
||||
metadata: [
|
||||
@@ -328,7 +329,8 @@ describe(NotificationService.name, () => {
|
||||
});
|
||||
|
||||
it('should send invite email without album thumbnail if thumbnail asset does not exist', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
|
||||
const album = AlbumFactory.create({ albumThumbnailAssetId: newUuid() });
|
||||
mocks.album.getById.mockResolvedValue(album);
|
||||
mocks.user.get.mockResolvedValue({
|
||||
...userStub.user1,
|
||||
metadata: [
|
||||
@@ -345,7 +347,7 @@ describe(NotificationService.name, () => {
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith(
|
||||
albumStub.emptyWithValidThumbnail.albumThumbnailAssetId,
|
||||
album.albumThumbnailAssetId,
|
||||
AssetFileType.Thumbnail,
|
||||
);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
@@ -358,7 +360,9 @@ describe(NotificationService.name, () => {
|
||||
});
|
||||
|
||||
it('should send invite email with album thumbnail as jpeg', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
|
||||
const assetFile = AssetFileFactory.create({ type: AssetFileType.Thumbnail });
|
||||
const album = AlbumFactory.create({ albumThumbnailAssetId: assetFile.assetId });
|
||||
mocks.album.getById.mockResolvedValue(album);
|
||||
mocks.user.get.mockResolvedValue({
|
||||
...userStub.user1,
|
||||
metadata: [
|
||||
@@ -371,13 +375,11 @@ describe(NotificationService.name, () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([
|
||||
{ id: '1', type: AssetFileType.Thumbnail, path: 'path-to-thumb.jpg', isEdited: false },
|
||||
]);
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([assetFile]);
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith(
|
||||
albumStub.emptyWithValidThumbnail.albumThumbnailAssetId,
|
||||
album.albumThumbnailAssetId,
|
||||
AssetFileType.Thumbnail,
|
||||
);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
@@ -390,7 +392,9 @@ describe(NotificationService.name, () => {
|
||||
});
|
||||
|
||||
it('should send invite email with album thumbnail and arbitrary extension', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
|
||||
const assetFile = AssetFileFactory.create({ path: 'some-thumb.ext', type: AssetFileType.Thumbnail });
|
||||
const album = AlbumFactory.create({ albumThumbnailAssetId: assetFile.assetId });
|
||||
mocks.album.getById.mockResolvedValue(album);
|
||||
mocks.user.get.mockResolvedValue({
|
||||
...userStub.user1,
|
||||
metadata: [
|
||||
@@ -403,11 +407,11 @@ describe(NotificationService.name, () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([{ ...assetStub.image.files[2], isEdited: false }]);
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([assetFile]);
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.assetJob.getAlbumThumbnailFiles).toHaveBeenCalledWith(
|
||||
albumStub.emptyWithValidThumbnail.albumThumbnailAssetId,
|
||||
album.albumThumbnailAssetId,
|
||||
AssetFileType.Thumbnail,
|
||||
);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
@@ -427,85 +431,74 @@ describe(NotificationService.name, () => {
|
||||
});
|
||||
|
||||
it('should skip if owner could not be found', async () => {
|
||||
mocks.album.getById.mockResolvedValue(albumStub.emptyWithValidThumbnail);
|
||||
mocks.album.getById.mockResolvedValue(AlbumFactory.create({ ownerId: 'non-existent' }));
|
||||
|
||||
await expect(sut.handleAlbumUpdate({ id: '', recipientId: '1' })).resolves.toBe(JobStatus.Skipped);
|
||||
expect(mocks.systemMetadata.get).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip recipient that could not be looked up', async () => {
|
||||
mocks.album.getById.mockResolvedValue({
|
||||
...albumStub.emptyWithValidThumbnail,
|
||||
albumUsers: [{ user: { id: userStub.user1.id } } as AlbumUser],
|
||||
});
|
||||
mocks.user.get.mockResolvedValueOnce(userStub.user1);
|
||||
const album = AlbumFactory.from().albumUser({ userId: 'non-existent' }).build();
|
||||
mocks.album.getById.mockResolvedValue(album);
|
||||
mocks.user.get.mockResolvedValueOnce(album.owner);
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
|
||||
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
||||
await sut.handleAlbumUpdate({ id: '', recipientId: 'non-existent' });
|
||||
expect(mocks.user.get).toHaveBeenCalledWith('non-existent', { withDeleted: false });
|
||||
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip recipient with disabled email notifications', async () => {
|
||||
mocks.album.getById.mockResolvedValue({
|
||||
...albumStub.emptyWithValidThumbnail,
|
||||
albumUsers: [{ user: { id: userStub.user1.id } } as AlbumUser],
|
||||
});
|
||||
mocks.user.get.mockResolvedValue({
|
||||
...userStub.user1,
|
||||
metadata: [
|
||||
{
|
||||
key: UserMetadataKey.Preferences,
|
||||
value: { emailNotifications: { enabled: false, albumUpdate: true } },
|
||||
},
|
||||
],
|
||||
});
|
||||
const user = UserFactory.from()
|
||||
.metadata({
|
||||
key: UserMetadataKey.Preferences,
|
||||
value: { emailNotifications: { enabled: false, albumUpdate: true } },
|
||||
})
|
||||
.build();
|
||||
const album = AlbumFactory.from().albumUser({ userId: user.id }).build();
|
||||
mocks.album.getById.mockResolvedValue(album);
|
||||
mocks.user.get.mockResolvedValue(user);
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
|
||||
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
||||
await sut.handleAlbumUpdate({ id: '', recipientId: user.id });
|
||||
expect(mocks.user.get).toHaveBeenCalledWith(user.id, { withDeleted: false });
|
||||
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip recipient with disabled email notifications for the album update event', async () => {
|
||||
mocks.album.getById.mockResolvedValue({
|
||||
...albumStub.emptyWithValidThumbnail,
|
||||
albumUsers: [{ user: { id: userStub.user1.id } } as AlbumUser],
|
||||
});
|
||||
mocks.user.get.mockResolvedValue({
|
||||
...userStub.user1,
|
||||
metadata: [
|
||||
{
|
||||
key: UserMetadataKey.Preferences,
|
||||
value: { emailNotifications: { enabled: true, albumUpdate: false } },
|
||||
},
|
||||
],
|
||||
});
|
||||
const user = UserFactory.from()
|
||||
.metadata({
|
||||
key: UserMetadataKey.Preferences,
|
||||
value: { emailNotifications: { enabled: true, albumUpdate: false } },
|
||||
})
|
||||
.build();
|
||||
const album = AlbumFactory.from().albumUser({ userId: user.id }).build();
|
||||
mocks.album.getById.mockResolvedValue(album);
|
||||
mocks.user.get.mockResolvedValue(user);
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
|
||||
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
||||
await sut.handleAlbumUpdate({ id: '', recipientId: user.id });
|
||||
expect(mocks.user.get).toHaveBeenCalledWith(user.id, { withDeleted: false });
|
||||
expect(mocks.email.renderEmail).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should send email', async () => {
|
||||
mocks.album.getById.mockResolvedValue({
|
||||
...albumStub.emptyWithValidThumbnail,
|
||||
albumUsers: [{ user: { id: userStub.user1.id } } as AlbumUser],
|
||||
});
|
||||
mocks.user.get.mockResolvedValue(userStub.user1);
|
||||
const user = UserFactory.create();
|
||||
const album = AlbumFactory.from().albumUser({ userId: user.id }).build();
|
||||
mocks.album.getById.mockResolvedValue(album);
|
||||
mocks.user.get.mockResolvedValue(user);
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAlbumUpdate({ id: '', recipientId: userStub.user1.id });
|
||||
expect(mocks.user.get).toHaveBeenCalledWith(userStub.user1.id, { withDeleted: false });
|
||||
await sut.handleAlbumUpdate({ id: '', recipientId: user.id });
|
||||
expect(mocks.user.get).toHaveBeenCalledWith(user.id, { withDeleted: false });
|
||||
expect(mocks.email.renderEmail).toHaveBeenCalled();
|
||||
expect(mocks.job.queue).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -677,8 +677,9 @@ export class PersonService extends BaseService {
|
||||
};
|
||||
|
||||
// now coordinates are in original image space
|
||||
dto.imageHeight = asset.exifInfo.exifImageHeight;
|
||||
dto.imageWidth = asset.exifInfo.exifImageWidth;
|
||||
const originalDimensions = getDimensions(asset.exifInfo);
|
||||
dto.imageWidth = originalDimensions.width;
|
||||
dto.imageHeight = originalDimensions.height;
|
||||
}
|
||||
|
||||
await this.personRepository.createAssetFace({
|
||||
|
||||
@@ -3,7 +3,7 @@ import _ from 'lodash';
|
||||
import { AssetIdErrorReason } from 'src/dtos/asset-ids.response.dto';
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
import { SharedLinkService } from 'src/services/shared-link.service';
|
||||
import { albumStub } from 'test/fixtures/album.stub';
|
||||
import { AlbumFactory } from 'test/factories/album.factory';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { sharedLinkResponseStub, sharedLinkStub } from 'test/fixtures/shared-link.stub';
|
||||
@@ -120,19 +120,17 @@ describe(SharedLinkService.name, () => {
|
||||
});
|
||||
|
||||
it('should create an album shared link', async () => {
|
||||
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set([albumStub.oneAsset.id]));
|
||||
const album = AlbumFactory.from().asset().build();
|
||||
mocks.access.album.checkOwnerAccess.mockResolvedValue(new Set([album.id]));
|
||||
mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.valid);
|
||||
|
||||
await sut.create(authStub.admin, { type: SharedLinkType.Album, albumId: albumStub.oneAsset.id });
|
||||
await sut.create(authStub.admin, { type: SharedLinkType.Album, albumId: album.id });
|
||||
|
||||
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(
|
||||
authStub.admin.user.id,
|
||||
new Set([albumStub.oneAsset.id]),
|
||||
);
|
||||
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set([album.id]));
|
||||
expect(mocks.sharedLink.create).toHaveBeenCalledWith({
|
||||
type: SharedLinkType.Album,
|
||||
userId: authStub.admin.user.id,
|
||||
albumId: albumStub.oneAsset.id,
|
||||
albumId: album.id,
|
||||
allowDownload: true,
|
||||
allowUpload: true,
|
||||
description: null,
|
||||
|
||||
@@ -2,6 +2,7 @@ import { BadRequestException } from '@nestjs/common';
|
||||
import { StackService } from 'src/services/stack.service';
|
||||
import { assetStub, stackStub } from 'test/fixtures/asset.stub';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { newUuid } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(StackService.name, () => {
|
||||
@@ -204,9 +205,9 @@ describe(StackService.name, () => {
|
||||
mocks.access.stack.checkOwnerAccess.mockResolvedValue(new Set(['stack-id']));
|
||||
mocks.stack.getForAssetRemoval.mockResolvedValue({ id: null, primaryAssetId: null });
|
||||
|
||||
await expect(
|
||||
sut.removeAsset(authStub.admin, { id: 'stack-id', assetId: assetStub.imageFrom2015.id }),
|
||||
).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(sut.removeAsset(authStub.admin, { id: 'stack-id', assetId: newUuid() })).rejects.toBeInstanceOf(
|
||||
BadRequestException,
|
||||
);
|
||||
|
||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
||||
expect(mocks.event.emit).not.toHaveBeenCalled();
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Stats } from 'node:fs';
|
||||
import { defaults, SystemConfig } from 'src/config';
|
||||
import { AssetPathType, JobStatus } from 'src/enum';
|
||||
import { StorageTemplateService } from 'src/services/storage-template.service';
|
||||
import { albumStub } from 'test/fixtures/album.stub';
|
||||
import { AlbumFactory } from 'test/factories/album.factory';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
@@ -143,7 +143,7 @@ describe(StorageTemplateService.name, () => {
|
||||
it('should use handlebar if condition for album', async () => {
|
||||
const asset = assetStub.storageAsset();
|
||||
const user = userStub.user1;
|
||||
const album = albumStub.oneAsset;
|
||||
const album = AlbumFactory.from().asset().build();
|
||||
const config = structuredClone(defaults);
|
||||
config.storageTemplate.template = '{{y}}/{{#if album}}{{album}}{{else}}other/{{MM}}{{/if}}/{{filename}}';
|
||||
|
||||
@@ -191,7 +191,7 @@ describe(StorageTemplateService.name, () => {
|
||||
it('should handle album startDate', async () => {
|
||||
const asset = assetStub.storageAsset();
|
||||
const user = userStub.user1;
|
||||
const album = albumStub.oneAsset;
|
||||
const album = AlbumFactory.from().asset().build();
|
||||
const config = structuredClone(defaults);
|
||||
config.storageTemplate.template =
|
||||
'{{#if album}}{{album-startDate-y}}/{{album-startDate-MM}} - {{album}}{{else}}{{y}}/{{MM}}/{{/if}}/{{filename}}';
|
||||
|
||||
@@ -1,20 +1,3 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { debounce } from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import path, { basename, join } from 'node:path';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { CacheControl, StorageFolder } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
export function isValidDatabaseBackupName(filename: string) {
|
||||
return filename.match(/^[\d\w-.]+\.sql(?:\.gz)?$/);
|
||||
}
|
||||
@@ -30,453 +13,12 @@ export function isFailedDatabaseBackupName(filename: string) {
|
||||
return filename.match(/^immich-db-backup-.*\.sql\.gz\.tmp$/);
|
||||
}
|
||||
|
||||
export function findVersion(filename: string) {
|
||||
export function findDatabaseBackupVersion(filename: string) {
|
||||
return /-v(.*)-/.exec(filename)?.[1];
|
||||
}
|
||||
|
||||
type BackupRepos = {
|
||||
logger: LoggingRepository;
|
||||
storage: StorageRepository;
|
||||
config: ConfigRepository;
|
||||
process: ProcessRepository;
|
||||
database: DatabaseRepository;
|
||||
health: MaintenanceHealthRepository;
|
||||
};
|
||||
|
||||
export class UnsupportedPostgresError extends Error {
|
||||
constructor(databaseVersion: string) {
|
||||
super(`Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildPostgresLaunchArguments(
|
||||
{ logger, config, database }: Pick<BackupRepos, 'logger' | 'config' | 'database'>,
|
||||
bin: 'pg_dump' | 'pg_dumpall' | 'psql',
|
||||
options: {
|
||||
singleTransaction?: boolean;
|
||||
username?: string;
|
||||
} = {},
|
||||
): Promise<{
|
||||
bin: string;
|
||||
args: string[];
|
||||
databaseUsername: string;
|
||||
databasePassword: string;
|
||||
databaseVersion: string;
|
||||
databaseMajorVersion?: number;
|
||||
}> {
|
||||
const {
|
||||
database: { config: databaseConfig },
|
||||
} = config.getEnv();
|
||||
const isUrlConnection = databaseConfig.connectionType === 'url';
|
||||
|
||||
const databaseVersion = await database.getPostgresVersion();
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
const args: string[] = [];
|
||||
let databaseUsername;
|
||||
|
||||
if (isUrlConnection) {
|
||||
if (bin !== 'pg_dump') {
|
||||
args.push('--dbname');
|
||||
}
|
||||
|
||||
let url = databaseConfig.url;
|
||||
if (URL.canParse(databaseConfig.url)) {
|
||||
const parsedUrl = new URL(databaseConfig.url);
|
||||
// remove known bad parameters
|
||||
parsedUrl.searchParams.delete('uselibpqcompat');
|
||||
|
||||
databaseUsername = parsedUrl.username;
|
||||
url = parsedUrl.toString();
|
||||
}
|
||||
|
||||
// assume typical values if we can't parse URL or not present
|
||||
databaseUsername ??= 'postgres';
|
||||
|
||||
args.push(url);
|
||||
} else {
|
||||
databaseUsername = databaseConfig.username;
|
||||
|
||||
args.push('--username', databaseUsername, '--host', databaseConfig.host, '--port', databaseConfig.port.toString());
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dumpall': {
|
||||
args.push('--database');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
args.push('--dbname');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
args.push(databaseConfig.database);
|
||||
}
|
||||
|
||||
switch (bin) {
|
||||
case 'pg_dump':
|
||||
case 'pg_dumpall': {
|
||||
args.push('--clean', '--if-exists');
|
||||
break;
|
||||
}
|
||||
case 'psql': {
|
||||
if (options.singleTransaction) {
|
||||
args.push(
|
||||
// don't commit any transaction on failure
|
||||
'--single-transaction',
|
||||
// exit with non-zero code on error
|
||||
'--set',
|
||||
'ON_ERROR_STOP=on',
|
||||
);
|
||||
}
|
||||
|
||||
args.push(
|
||||
// used for progress monitoring
|
||||
'--echo-all',
|
||||
'--output=/dev/null',
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
logger.error(`Database Restore Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
throw new UnsupportedPostgresError(databaseVersion);
|
||||
}
|
||||
|
||||
return {
|
||||
bin: `/usr/lib/postgresql/${databaseMajorVersion}/bin/${bin}`,
|
||||
args,
|
||||
databaseUsername,
|
||||
databasePassword: isUrlConnection ? new URL(databaseConfig.url).password : databaseConfig.password,
|
||||
databaseVersion,
|
||||
databaseMajorVersion,
|
||||
};
|
||||
}
|
||||
|
||||
export async function createDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, ...pgRepos }: Omit<BackupRepos, 'health'>,
|
||||
filenamePrefix: string = '',
|
||||
): Promise<string> {
|
||||
logger.debug(`Database Backup Started`);
|
||||
|
||||
const { bin, args, databasePassword, databaseVersion, databaseMajorVersion } = await buildPostgresLaunchArguments(
|
||||
{ logger, ...pgRepos },
|
||||
'pg_dump',
|
||||
);
|
||||
|
||||
logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
const filename = `${filenamePrefix}immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz`;
|
||||
const backupFilePath = join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
const temporaryFilePath = `${backupFilePath}.tmp`;
|
||||
|
||||
try {
|
||||
const pgdump = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const gzip = processRepository.spawnDuplexStream('gzip', ['--rsyncable']);
|
||||
const fileStream = storage.createWriteStream(temporaryFilePath);
|
||||
|
||||
await pipeline(pgdump, gzip, fileStream);
|
||||
await storage.rename(temporaryFilePath, backupFilePath);
|
||||
} catch (error) {
|
||||
logger.error(`Database Backup Failure: ${error}`);
|
||||
await storage
|
||||
.unlink(temporaryFilePath)
|
||||
.catch((error) => logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.log(`Database Backup Success`);
|
||||
return backupFilePath;
|
||||
}
|
||||
|
||||
const SQL_DROP_CONNECTIONS = `
|
||||
-- drop all other database connections
|
||||
SELECT pg_terminate_backend(pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = current_database()
|
||||
AND pid <> pg_backend_pid();
|
||||
`;
|
||||
|
||||
const SQL_RESET_SCHEMA = (username: string) => `
|
||||
-- re-create the default schema
|
||||
DROP SCHEMA public CASCADE;
|
||||
CREATE SCHEMA public;
|
||||
|
||||
-- restore access to schema
|
||||
GRANT ALL ON SCHEMA public TO "${username}";
|
||||
GRANT ALL ON SCHEMA public TO public;
|
||||
`;
|
||||
|
||||
async function* sql(inputStream: Readable, databaseUsername: string, isPgClusterDump: boolean) {
|
||||
yield SQL_DROP_CONNECTIONS;
|
||||
yield isPgClusterDump
|
||||
? // it is likely the dump contains SQL to try to drop the currently active
|
||||
// database to ensure we have a fresh slate; if the `postgres` database exists
|
||||
// then prefer to switch before continuing otherwise this will just silently fail
|
||||
String.raw`
|
||||
\c postgres
|
||||
`
|
||||
: SQL_RESET_SCHEMA(databaseUsername);
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
async function* sqlRollback(inputStream: Readable, databaseUsername: string) {
|
||||
yield SQL_DROP_CONNECTIONS;
|
||||
yield SQL_RESET_SCHEMA(databaseUsername);
|
||||
|
||||
for await (const chunk of inputStream) {
|
||||
yield chunk;
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, database: databaseRepository, health, ...pgRepos }: BackupRepos,
|
||||
filename: string,
|
||||
progressCb?: (action: 'backup' | 'restore' | 'migrations' | 'rollback', progress: number) => void,
|
||||
): Promise<void> {
|
||||
logger.debug(`Database Restore Started`);
|
||||
|
||||
let complete = false;
|
||||
try {
|
||||
if (!isValidDatabaseBackupName(filename)) {
|
||||
throw new Error('Invalid backup file format!');
|
||||
}
|
||||
|
||||
const backupFilePath = path.join(StorageCore.getBaseFolder(StorageFolder.Backups), filename);
|
||||
await storage.stat(backupFilePath); // => check file exists
|
||||
|
||||
let isPgClusterDump = false;
|
||||
const version = findVersion(filename);
|
||||
if (version && semver.satisfies(version, '<= 2.4')) {
|
||||
isPgClusterDump = true;
|
||||
}
|
||||
|
||||
const { bin, args, databaseUsername, databasePassword, databaseMajorVersion } = await buildPostgresLaunchArguments(
|
||||
{ logger, database: databaseRepository, ...pgRepos },
|
||||
'psql',
|
||||
{
|
||||
singleTransaction: !isPgClusterDump,
|
||||
},
|
||||
);
|
||||
|
||||
progressCb?.('backup', 0.05);
|
||||
|
||||
const restorePointFilePath = await createDatabaseBackup(
|
||||
{ logger, storage, process: processRepository, database: databaseRepository, ...pgRepos },
|
||||
'restore-point-',
|
||||
);
|
||||
|
||||
logger.log(`Database Restore Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
let inputStream: Readable;
|
||||
if (backupFilePath.endsWith('.gz')) {
|
||||
const fileStream = storage.createPlainReadStream(backupFilePath);
|
||||
const gunzip = storage.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
} else {
|
||||
inputStream = storage.createPlainReadStream(backupFilePath);
|
||||
}
|
||||
|
||||
const sqlStream = Readable.from(sql(inputStream, databaseUsername, isPgClusterDump));
|
||||
const psql = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log(`Restore progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('restore', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
|
||||
try {
|
||||
progressCb?.('migrations', 0.9);
|
||||
await databaseRepository.runMigrations();
|
||||
await health.checkApiHealth();
|
||||
} catch (error) {
|
||||
progressCb?.('rollback', 0);
|
||||
|
||||
const fileStream = storage.createPlainReadStream(restorePointFilePath);
|
||||
const gunzip = storage.createGunzip();
|
||||
fileStream.pipe(gunzip);
|
||||
inputStream = gunzip;
|
||||
|
||||
const sqlStream = Readable.from(sqlRollback(inputStream, databaseUsername));
|
||||
const psql = processRepository.spawnDuplexStream(bin, args, {
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: databasePassword,
|
||||
},
|
||||
});
|
||||
|
||||
const [progressSource, progressSink] = createSqlProgressStreams((progress) => {
|
||||
if (complete) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log(`Rollback progress ~ ${(progress * 100).toFixed(2)}%`);
|
||||
progressCb?.('rollback', progress);
|
||||
});
|
||||
|
||||
await pipeline(sqlStream, progressSource, psql, progressSink);
|
||||
|
||||
throw error;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Database Restore Failure: ${error}`);
|
||||
throw error;
|
||||
} finally {
|
||||
complete = true;
|
||||
}
|
||||
|
||||
logger.log(`Database Restore Success`);
|
||||
}
|
||||
|
||||
export async function deleteDatabaseBackup({ storage }: Pick<BackupRepos, 'storage'>, files: string[]): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
|
||||
if (files.some((filename) => !isValidDatabaseBackupName(filename))) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
await Promise.all(files.map((filename) => storage.unlink(path.join(backupsFolder, filename))));
|
||||
}
|
||||
|
||||
export async function listDatabaseBackups({
|
||||
storage,
|
||||
}: Pick<BackupRepos, 'storage'>): Promise<{ filename: string; filesize: number }[]> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await storage.readdir(backupsFolder);
|
||||
|
||||
const validFiles = files
|
||||
.filter((fn) => isValidDatabaseBackupName(fn))
|
||||
.toSorted((a, b) => (a.startsWith('uploaded-') === b.startsWith('uploaded-') ? a.localeCompare(b) : 1))
|
||||
.toReversed();
|
||||
|
||||
const backups = await Promise.all(
|
||||
validFiles.map(async (filename) => {
|
||||
const stats = await storage.stat(path.join(backupsFolder, filename));
|
||||
return { filename, filesize: stats.size };
|
||||
}),
|
||||
);
|
||||
|
||||
return backups;
|
||||
}
|
||||
|
||||
export async function uploadDatabaseBackup(
|
||||
{ storage }: Pick<BackupRepos, 'storage'>,
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const fn = basename(file.originalname);
|
||||
if (!isValidDatabaseBackupName(fn)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const path = join(backupsFolder, `uploaded-${fn}`);
|
||||
await storage.createOrOverwriteFile(path, file.buffer);
|
||||
}
|
||||
|
||||
export function downloadDatabaseBackup(fileName: string) {
|
||||
if (!isValidDatabaseBackupName(fileName)) {
|
||||
throw new BadRequestException('Invalid backup name!');
|
||||
}
|
||||
|
||||
const path = join(StorageCore.getBaseFolder(StorageFolder.Backups), fileName);
|
||||
|
||||
return {
|
||||
path,
|
||||
fileName,
|
||||
cacheControl: CacheControl.PrivateWithoutCache,
|
||||
contentType: fileName.endsWith('.gz') ? 'application/gzip' : 'application/sql',
|
||||
};
|
||||
}
|
||||
|
||||
function createSqlProgressStreams(cb: (progress: number) => void) {
|
||||
const STDIN_START_MARKER = new TextEncoder().encode('FROM stdin');
|
||||
const STDIN_END_MARKER = new TextEncoder().encode(String.raw`\.`);
|
||||
|
||||
let readingStdin = false;
|
||||
let sequenceIdx = 0;
|
||||
|
||||
let linesSent = 0;
|
||||
let linesProcessed = 0;
|
||||
|
||||
const startedAt = +Date.now();
|
||||
const cbDebounced = debounce(
|
||||
() => {
|
||||
const progress = source.writableEnded
|
||||
? Math.min(1, linesProcessed / linesSent)
|
||||
: // progress simulation while we're in an indeterminate state
|
||||
Math.min(0.3, 0.1 + (Date.now() - startedAt) / 1e4);
|
||||
cb(progress);
|
||||
},
|
||||
100,
|
||||
{
|
||||
maxWait: 100,
|
||||
},
|
||||
);
|
||||
|
||||
let lastByte = -1;
|
||||
const source = new PassThrough({
|
||||
transform(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (!readingStdin && byte === 10 && lastByte !== 10) {
|
||||
linesSent += 1;
|
||||
}
|
||||
|
||||
lastByte = byte;
|
||||
|
||||
const sequence = readingStdin ? STDIN_END_MARKER : STDIN_START_MARKER;
|
||||
if (sequence[sequenceIdx] === byte) {
|
||||
sequenceIdx += 1;
|
||||
|
||||
if (sequence.length === sequenceIdx) {
|
||||
sequenceIdx = 0;
|
||||
readingStdin = !readingStdin;
|
||||
}
|
||||
} else {
|
||||
sequenceIdx = 0;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
this.push(chunk);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
for (const byte of chunk) {
|
||||
if (byte === 10) {
|
||||
linesProcessed++;
|
||||
}
|
||||
}
|
||||
|
||||
cbDebounced();
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
return [source, sink];
|
||||
}
|
||||
|
||||
@@ -102,6 +102,10 @@ export const getKyselyConfig = (
|
||||
}),
|
||||
log(event) {
|
||||
if (event.level === 'error') {
|
||||
if (isAssetChecksumConstraint(event.error)) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.error('Query failed :', {
|
||||
durationMs: event.queryDurationMillis,
|
||||
error: event.error,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { extname } from 'node:path';
|
||||
import { AssetType } from 'src/enum';
|
||||
|
||||
const raw: Record<string, string[]> = {
|
||||
const raw = {
|
||||
'.3fr': ['image/3fr', 'image/x-hasselblad-3fr'],
|
||||
'.ari': ['image/ari', 'image/x-arriflex-ari'],
|
||||
'.arw': ['image/arw', 'image/x-sony-arw'],
|
||||
@@ -41,6 +41,7 @@ const raw: Record<string, string[]> = {
|
||||
**/
|
||||
const webSupportedImage = {
|
||||
'.avif': ['image/avif'],
|
||||
'.bmp': ['image/bmp'],
|
||||
'.gif': ['image/gif'],
|
||||
'.jpeg': ['image/jpeg'],
|
||||
'.jpg': ['image/jpeg'],
|
||||
@@ -48,10 +49,8 @@ const webSupportedImage = {
|
||||
'.webp': ['image/webp'],
|
||||
};
|
||||
|
||||
const image: Record<string, string[]> = {
|
||||
const webUnsupportedImage = {
|
||||
...raw,
|
||||
...webSupportedImage,
|
||||
'.bmp': ['image/bmp'],
|
||||
'.heic': ['image/heic'],
|
||||
'.heif': ['image/heif'],
|
||||
'.hif': ['image/hif'],
|
||||
@@ -64,6 +63,11 @@ const image: Record<string, string[]> = {
|
||||
'.tiff': ['image/tiff'],
|
||||
};
|
||||
|
||||
const image: Record<string, string[]> = {
|
||||
...webSupportedImage,
|
||||
...webUnsupportedImage,
|
||||
};
|
||||
|
||||
const possiblyAnimatedImageExtensions = new Set(['.avif', '.gif', '.heic', '.heif', '.jxl', '.png', '.webp']);
|
||||
const possiblyAnimatedImage: Record<string, string[]> = Object.fromEntries(
|
||||
Object.entries(image).filter(([key]) => possiblyAnimatedImageExtensions.has(key)),
|
||||
@@ -120,6 +124,7 @@ export const mimeTypes = {
|
||||
sidecar,
|
||||
video,
|
||||
raw,
|
||||
webUnsupportedImage,
|
||||
|
||||
isAsset: (filename: string) => isType(filename, image) || isType(filename, video),
|
||||
isImage: (filename: string) => isType(filename, image),
|
||||
|
||||
Reference in New Issue
Block a user