Merge branch 'main' of https://github.com/immich-app/immich into feat/sidecar-asset-files

This commit is contained in:
Jonathan Jogenfors
2025-10-30 23:19:18 +01:00
739 changed files with 22252 additions and 13205 deletions

View File

@@ -79,12 +79,17 @@ export class AlbumService extends BaseService {
const album = await this.findOrFail(id, { withAssets });
const [albumMetadataForIds] = await this.albumRepository.getMetadataForIds([album.id]);
const hasSharedUsers = album.albumUsers && album.albumUsers.length > 0;
const hasSharedLink = album.sharedLinks && album.sharedLinks.length > 0;
const isShared = hasSharedUsers || hasSharedLink;
return {
...mapAlbum(album, withAssets, auth),
startDate: albumMetadataForIds?.startDate ?? undefined,
endDate: albumMetadataForIds?.endDate ?? undefined,
assetCount: albumMetadataForIds?.assetCount ?? 0,
lastModifiedAssetTimestamp: albumMetadataForIds?.lastModifiedAssetTimestamp ?? undefined,
contributorCounts: isShared ? await this.albumRepository.getContributorCounts(album.id) : undefined,
};
}

View File

@@ -700,6 +700,42 @@ describe(AssetService.name, () => {
});
});
describe('getOcr', () => {
it('should require asset read permission', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set());
await expect(sut.getOcr(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(BadRequestException);
expect(mocks.ocr.getByAssetId).not.toHaveBeenCalled();
});
it('should return OCR data for an asset', async () => {
const ocr1 = factory.assetOcr({ text: 'Hello World' });
const ocr2 = factory.assetOcr({ text: 'Test Image' });
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
mocks.ocr.getByAssetId.mockResolvedValue([ocr1, ocr2]);
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([ocr1, ocr2]);
expect(mocks.access.asset.checkOwnerAccess).toHaveBeenCalledWith(
authStub.admin.user.id,
new Set(['asset-1']),
undefined,
);
expect(mocks.ocr.getByAssetId).toHaveBeenCalledWith('asset-1');
});
it('should return empty array when no OCR data exists', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
mocks.ocr.getByAssetId.mockResolvedValue([]);
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([]);
expect(mocks.ocr.getByAssetId).toHaveBeenCalledWith('asset-1');
});
});
describe('run', () => {
it('should run the refresh faces job', async () => {
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));

View File

@@ -2,11 +2,13 @@ import { BadRequestException, Injectable } from '@nestjs/common';
import _ from 'lodash';
import { DateTime, Duration } from 'luxon';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { AssetFile } from 'src/database';
import { OnJob } from 'src/decorators';
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
import {
AssetBulkDeleteDto,
AssetBulkUpdateDto,
AssetCopyDto,
AssetJobName,
AssetJobsDto,
AssetMetadataResponseDto,
@@ -16,7 +18,17 @@ import {
mapStats,
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetMetadataKey, AssetStatus, AssetVisibility, JobName, JobStatus, Permission, QueueName } from 'src/enum';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import {
AssetFileType,
AssetMetadataKey,
AssetStatus,
AssetVisibility,
JobName,
JobStatus,
Permission,
QueueName,
} from 'src/enum';
import { BaseService } from 'src/services/base.service';
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
import { requireElevatedPermission } from 'src/utils/access';
@@ -182,6 +194,92 @@ export class AssetService extends BaseService {
}
}
async copy(
auth: AuthDto,
{
sourceId,
targetId,
albums = true,
sidecar = true,
sharedLinks = true,
stack = true,
favorite = true,
}: AssetCopyDto,
) {
await this.requireAccess({ auth, permission: Permission.AssetCopy, ids: [sourceId, targetId] });
const sourceAsset = await this.assetRepository.getById(sourceId, { files: true });
const targetAsset = await this.assetRepository.getById(targetId, { files: true });
if (!sourceAsset || !targetAsset) {
throw new BadRequestException('Both assets must exist');
}
if (sourceId === targetId) {
throw new BadRequestException('Source and target id must be distinct');
}
if (albums) {
await this.albumRepository.copyAlbums({ sourceAssetId: sourceId, targetAssetId: targetId });
}
if (sharedLinks) {
await this.sharedLinkAssetRepository.copySharedLinks({ sourceAssetId: sourceId, targetAssetId: targetId });
}
if (stack) {
await this.copyStack(sourceAsset, targetAsset);
}
if (favorite) {
await this.assetRepository.update({ id: targetId, isFavorite: sourceAsset.isFavorite });
}
if (sidecar) {
await this.copySidecar(sourceAsset, targetAsset);
}
}
private async copyStack(
sourceAsset: { id: string; stackId: string | null },
targetAsset: { id: string; stackId: string | null },
) {
if (!sourceAsset.stackId) {
return;
}
if (targetAsset.stackId) {
await this.stackRepository.merge({ sourceId: sourceAsset.stackId, targetId: targetAsset.stackId });
await this.stackRepository.delete(sourceAsset.stackId);
} else {
await this.assetRepository.update({ id: targetAsset.id, stackId: sourceAsset.stackId });
}
}
private async copySidecar(
sourceAsset: { id: string; files: AssetFile[] | undefined; originalPath: string },
targetAsset: { files: AssetFile[] | undefined },
) {
const targetSidecarFile = getAssetFiles(targetAsset.files).sidecarFile;
if (!targetSidecarFile) {
return;
}
const sourceSidecarFile = getAssetFiles(sourceAsset.files).sidecarFile;
if (sourceSidecarFile) {
await this.storageRepository.unlink(sourceSidecarFile.path);
}
await this.storageRepository.copyFile(targetSidecarFile.path, `${sourceAsset.originalPath}.xmp`);
await this.assetRepository.upsertFile({
assetId: sourceAsset.id,
type: AssetFileType.Sidecar,
path: `${sourceAsset.originalPath}.xmp`,
});
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: { id: sourceAsset.id } });
}
@OnJob({ name: JobName.AssetDeleteCheck, queue: QueueName.BackgroundTask })
async handleAssetDeletionCheck(): Promise<JobStatus> {
const config = await this.getConfig({ withCache: false });
@@ -289,6 +387,11 @@ export class AssetService extends BaseService {
return this.assetRepository.getMetadata(id);
}
async getOcr(auth: AuthDto, id: string): Promise<AssetOcrResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
return this.ocrRepository.getByAssetId(id);
}
async upsertMetadata(auth: AuthDto, id: string, dto: AssetMetadataUpsertDto): Promise<AssetMetadataResponseDto[]> {
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: [id] });
return this.assetRepository.upsertMetadata(id, dto.items);

View File

@@ -41,6 +41,7 @@ const loginDetails = {
clientIp: '127.0.0.1',
deviceOS: '',
deviceType: '',
appVersion: null,
};
const fixtures = {
@@ -123,6 +124,11 @@ describe(AuthService.name, () => {
expect(mocks.user.getForChangePassword).toHaveBeenCalledWith(user.id);
expect(mocks.crypto.compareBcrypt).toHaveBeenCalledWith('old-password', 'hash-password');
expect(mocks.event.emit).toHaveBeenCalledWith('AuthChangePassword', {
userId: user.id,
currentSessionId: auth.session?.id,
shouldLogoutSessions: undefined,
});
});
it('should throw when password does not match existing password', async () => {
@@ -146,6 +152,25 @@ describe(AuthService.name, () => {
await expect(sut.changePassword(auth, dto)).rejects.toBeInstanceOf(BadRequestException);
});
it('should change the password and logout other sessions', async () => {
const user = factory.userAdmin();
const auth = factory.auth({ user });
const dto = { password: 'old-password', newPassword: 'new-password', invalidateSessions: true };
mocks.user.getForChangePassword.mockResolvedValue({ id: user.id, password: 'hash-password' });
mocks.user.update.mockResolvedValue(user);
await sut.changePassword(auth, dto);
expect(mocks.user.getForChangePassword).toHaveBeenCalledWith(user.id);
expect(mocks.crypto.compareBcrypt).toHaveBeenCalledWith('old-password', 'hash-password');
expect(mocks.event.emit).toHaveBeenCalledWith('AuthChangePassword', {
userId: user.id,
invalidateSessions: true,
currentSessionId: auth.session?.id,
});
});
});
describe('logout', () => {
@@ -243,6 +268,7 @@ describe(AuthService.name, () => {
updatedAt: session.updatedAt,
user: factory.authUser(),
pinExpiresAt: null,
appVersion: null,
};
mocks.session.getByToken.mockResolvedValue(sessionWithToken);
@@ -408,6 +434,7 @@ describe(AuthService.name, () => {
updatedAt: session.updatedAt,
user: factory.authUser(),
pinExpiresAt: null,
appVersion: null,
};
mocks.session.getByToken.mockResolvedValue(sessionWithToken);
@@ -435,6 +462,7 @@ describe(AuthService.name, () => {
user: factory.authUser(),
isPendingSyncReset: false,
pinExpiresAt: null,
appVersion: null,
};
mocks.session.getByToken.mockResolvedValue(sessionWithToken);
@@ -456,6 +484,7 @@ describe(AuthService.name, () => {
user: factory.authUser(),
isPendingSyncReset: false,
pinExpiresAt: null,
appVersion: null,
};
mocks.session.getByToken.mockResolvedValue(sessionWithToken);

View File

@@ -29,11 +29,13 @@ import { BaseService } from 'src/services/base.service';
import { isGranted } from 'src/utils/access';
import { HumanReadableSize } from 'src/utils/bytes';
import { mimeTypes } from 'src/utils/mime-types';
import { getUserAgentDetails } from 'src/utils/request';
export interface LoginDetails {
isSecure: boolean;
clientIp: string;
deviceType: string;
deviceOS: string;
appVersion: string | null;
}
interface ClaimOptions<T> {
@@ -102,6 +104,12 @@ export class AuthService extends BaseService {
const updatedUser = await this.userRepository.update(user.id, { password: hashedPassword });
await this.eventRepository.emit('AuthChangePassword', {
userId: user.id,
currentSessionId: auth.session?.id,
invalidateSessions: dto.invalidateSessions,
});
return mapUserAdmin(updatedUser);
}
@@ -218,7 +226,7 @@ export class AuthService extends BaseService {
}
if (session) {
return this.validateSession(session);
return this.validateSession(session, headers);
}
if (apiKey) {
@@ -463,15 +471,22 @@ export class AuthService extends BaseService {
return this.cryptoRepository.compareBcrypt(inputSecret, existingHash);
}
private async validateSession(tokenValue: string): Promise<AuthDto> {
private async validateSession(tokenValue: string, headers: IncomingHttpHeaders): Promise<AuthDto> {
const hashedToken = this.cryptoRepository.hashSha256(tokenValue);
const session = await this.sessionRepository.getByToken(hashedToken);
if (session?.user) {
const { appVersion, deviceOS, deviceType } = getUserAgentDetails(headers);
const now = DateTime.now();
const updatedAt = DateTime.fromJSDate(session.updatedAt);
const diff = now.diff(updatedAt, ['hours']);
if (diff.hours > 1) {
await this.sessionRepository.update(session.id, { id: session.id, updatedAt: new Date() });
if (diff.hours > 1 || appVersion != session.appVersion) {
await this.sessionRepository.update(session.id, {
id: session.id,
updatedAt: new Date(),
appVersion,
deviceOS,
deviceType,
});
}
// Pin check
@@ -529,6 +544,7 @@ export class AuthService extends BaseService {
token: tokenHashed,
deviceOS: loginDetails.deviceOS,
deviceType: loginDetails.deviceType,
appVersion: loginDetails.appVersion,
userId: user.id,
});

View File

@@ -209,6 +209,7 @@ describe(BackupService.name, () => {
${'15.3.3'} | ${15}
${'16.4.2'} | ${16}
${'17.15.1'} | ${17}
${'18.0.0'} | ${18}
`(
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
async ({ postgresVersion, expectedVersion }) => {
@@ -224,7 +225,7 @@ describe(BackupService.name, () => {
it.each`
postgresVersion
${'13.99.99'}
${'18.0.0'}
${'19.0.0'}
`(`should fail if postgres version $postgresVersion is not supported`, async ({ postgresVersion }) => {
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
const result = await sut.handleBackupDatabase();

View File

@@ -103,7 +103,7 @@ export class BackupService extends BaseService {
const databaseSemver = semver.coerce(databaseVersion);
const databaseMajorVersion = databaseSemver?.major;
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <18.0.0')) {
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
return JobStatus.Failed;
}

View File

@@ -32,12 +32,14 @@ import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MoveRepository } from 'src/repositories/move.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { ProcessRepository } from 'src/repositories/process.repository';
import { SearchRepository } from 'src/repositories/search.repository';
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StackRepository } from 'src/repositories/stack.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
@@ -50,6 +52,7 @@ import { TrashRepository } from 'src/repositories/trash.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
import { ViewRepository } from 'src/repositories/view-repository';
import { WebsocketRepository } from 'src/repositories/websocket.repository';
import { UserTable } from 'src/schema/tables/user.table';
import { AccessRequest, checkAccess, requireAccess } from 'src/utils/access';
import { getConfig, updateConfig } from 'src/utils/config';
@@ -82,6 +85,7 @@ export const BASE_SERVICE_DEPENDENCIES = [
MoveRepository,
NotificationRepository,
OAuthRepository,
OcrRepository,
PartnerRepository,
PersonRepository,
ProcessRepository,
@@ -89,6 +93,7 @@ export const BASE_SERVICE_DEPENDENCIES = [
ServerInfoRepository,
SessionRepository,
SharedLinkRepository,
SharedLinkAssetRepository,
StackRepository,
StorageRepository,
SyncRepository,
@@ -134,6 +139,7 @@ export class BaseService {
protected moveRepository: MoveRepository,
protected notificationRepository: NotificationRepository,
protected oauthRepository: OAuthRepository,
protected ocrRepository: OcrRepository,
protected partnerRepository: PartnerRepository,
protected personRepository: PersonRepository,
protected processRepository: ProcessRepository,
@@ -141,6 +147,7 @@ export class BaseService {
protected serverInfoRepository: ServerInfoRepository,
protected sessionRepository: SessionRepository,
protected sharedLinkRepository: SharedLinkRepository,
protected sharedLinkAssetRepository: SharedLinkAssetRepository,
protected stackRepository: StackRepository,
protected storageRepository: StorageRepository,
protected syncRepository: SyncRepository,
@@ -152,6 +159,7 @@ export class BaseService {
protected userRepository: UserRepository,
protected versionRepository: VersionHistoryRepository,
protected viewRepository: ViewRepository,
protected websocketRepository: WebsocketRepository,
) {
this.logger.setContext(this.constructor.name);
this.storageCore = StorageCore.create(
@@ -195,8 +203,8 @@ export class BaseService {
}
async createUser(dto: Insertable<UserTable> & { email: string }): Promise<UserAdmin> {
const user = await this.userRepository.getByEmail(dto.email);
if (user) {
const exists = await this.userRepository.getByEmail(dto.email);
if (exists) {
throw new BadRequestException('User exists');
}
@@ -215,6 +223,10 @@ export class BaseService {
payload.storageLabel = sanitize(payload.storageLabel.replaceAll('.', ''));
}
return this.userRepository.create(payload);
const user = await this.userRepository.create(payload);
await this.eventRepository.emit('UserCreate', user);
return user;
}
}

View File

@@ -20,6 +20,7 @@ import { MemoryService } from 'src/services/memory.service';
import { MetadataService } from 'src/services/metadata.service';
import { NotificationAdminService } from 'src/services/notification-admin.service';
import { NotificationService } from 'src/services/notification.service';
import { OcrService } from 'src/services/ocr.service';
import { PartnerService } from 'src/services/partner.service';
import { PersonService } from 'src/services/person.service';
import { SearchService } from 'src/services/search.service';
@@ -34,6 +35,7 @@ import { SyncService } from 'src/services/sync.service';
import { SystemConfigService } from 'src/services/system-config.service';
import { SystemMetadataService } from 'src/services/system-metadata.service';
import { TagService } from 'src/services/tag.service';
import { TelemetryService } from 'src/services/telemetry.service';
import { TimelineService } from 'src/services/timeline.service';
import { TrashService } from 'src/services/trash.service';
import { UserAdminService } from 'src/services/user-admin.service';
@@ -64,6 +66,7 @@ export const services = [
MetadataService,
NotificationService,
NotificationAdminService,
OcrService,
PartnerService,
PersonService,
SearchService,
@@ -78,6 +81,7 @@ export const services = [
SystemConfigService,
SystemMetadataService,
TagService,
TelemetryService,
TimelineService,
TrashService,
UserAdminService,

View File

@@ -24,7 +24,7 @@ describe(JobService.name, () => {
it('should update concurrency', () => {
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(15);
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(16);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FacialRecognition, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DuplicateDetection, 1);
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BackgroundTask, 5);
@@ -98,6 +98,7 @@ describe(JobService.name, () => {
[QueueName.Library]: expectedJobStatus,
[QueueName.Notification]: expectedJobStatus,
[QueueName.BackupDatabase]: expectedJobStatus,
[QueueName.Ocr]: expectedJobStatus,
});
});
});
@@ -222,18 +223,16 @@ describe(JobService.name, () => {
});
});
describe('onJobStart', () => {
describe('onJobRun', () => {
it('should process a successful job', async () => {
mocks.job.run.mockResolvedValue(JobStatus.Success);
await sut.onJobStart(QueueName.BackgroundTask, {
name: JobName.FileDelete,
data: { files: ['path/to/file'] },
});
const job: JobItem = { name: JobName.FileDelete, data: { files: ['path/to/file'] } };
await sut.onJobRun(QueueName.BackgroundTask, job);
expect(mocks.telemetry.jobs.addToGauge).toHaveBeenCalledWith('immich.queues.background_task.active', 1);
expect(mocks.telemetry.jobs.addToGauge).toHaveBeenCalledWith('immich.queues.background_task.active', -1);
expect(mocks.telemetry.jobs.addToCounter).toHaveBeenCalledWith('immich.jobs.file_delete.success', 1);
expect(mocks.event.emit).toHaveBeenCalledWith('JobStart', QueueName.BackgroundTask, job);
expect(mocks.event.emit).toHaveBeenCalledWith('JobSuccess', { job, response: JobStatus.Success });
expect(mocks.event.emit).toHaveBeenCalledWith('JobComplete', QueueName.BackgroundTask, job);
expect(mocks.logger.error).not.toHaveBeenCalled();
});
@@ -270,12 +269,12 @@ describe(JobService.name, () => {
},
{
item: { name: JobName.AssetGenerateThumbnails, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.SmartSearch, JobName.AssetDetectFaces],
jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.Ocr],
stub: [assetStub.livePhotoStillAsset],
},
{
item: { name: JobName.AssetGenerateThumbnails, data: { id: 'asset-1', source: 'upload' } },
jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.AssetEncodeVideo],
jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.Ocr, JobName.AssetEncodeVideo],
stub: [assetStub.video],
},
{
@@ -300,7 +299,7 @@ describe(JobService.name, () => {
mocks.job.run.mockResolvedValue(JobStatus.Success);
await sut.onJobStart(QueueName.BackgroundTask, item);
await sut.onJobRun(QueueName.BackgroundTask, item);
if (jobs.length > 1) {
expect(mocks.job.queueAll).toHaveBeenCalledWith(
@@ -317,7 +316,7 @@ describe(JobService.name, () => {
it(`should not queue any jobs when ${item.name} fails`, async () => {
mocks.job.run.mockResolvedValue(JobStatus.Failed);
await sut.onJobStart(QueueName.BackgroundTask, item);
await sut.onJobRun(QueueName.BackgroundTask, item);
expect(mocks.job.queueAll).not.toHaveBeenCalled();
});

View File

@@ -1,6 +1,5 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { ClassConstructor } from 'class-transformer';
import { snakeCase } from 'lodash';
import { SystemConfig } from 'src/config';
import { OnEvent } from 'src/decorators';
import { mapAsset } from 'src/dtos/asset-response.dto';
@@ -186,7 +185,7 @@ export class JobService extends BaseService {
throw new BadRequestException(`Job is already running`);
}
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
await this.eventRepository.emit('QueueStart', { name });
switch (name) {
case QueueName.VideoConversion: {
@@ -237,27 +236,29 @@ export class JobService extends BaseService {
return this.jobRepository.queue({ name: JobName.DatabaseBackup, data: { force } });
}
case QueueName.Ocr: {
return this.jobRepository.queue({ name: JobName.OcrQueueAll, data: { force } });
}
default: {
throw new BadRequestException(`Invalid job name: ${name}`);
}
}
}
@OnEvent({ name: 'JobStart' })
async onJobStart(...[queueName, job]: ArgsOf<'JobStart'>) {
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
this.telemetryRepository.jobs.addToGauge(queueMetric, 1);
@OnEvent({ name: 'JobRun' })
async onJobRun(...[queueName, job]: ArgsOf<'JobRun'>) {
try {
const status = await this.jobRepository.run(job);
const jobMetric = `immich.jobs.${snakeCase(job.name)}.${status}`;
this.telemetryRepository.jobs.addToCounter(jobMetric, 1);
if (status === JobStatus.Success || status == JobStatus.Skipped) {
await this.eventRepository.emit('JobStart', queueName, job);
const response = await this.jobRepository.run(job);
await this.eventRepository.emit('JobSuccess', { job, response });
if (response && typeof response === 'string' && [JobStatus.Success, JobStatus.Skipped].includes(response)) {
await this.onDone(job);
}
} catch (error: Error | any) {
await this.eventRepository.emit('JobFailed', { job, error });
await this.eventRepository.emit('JobError', { job, error });
} finally {
this.telemetryRepository.jobs.addToGauge(queueMetric, -1);
await this.eventRepository.emit('JobComplete', queueName, job);
}
}
@@ -334,7 +335,7 @@ export class JobService extends BaseService {
const { id } = item.data;
const person = await this.personRepository.getById(id);
if (person) {
this.eventRepository.clientSend('on_person_thumbnail', person.ownerId, person.id);
this.websocketRepository.clientSend('on_person_thumbnail', person.ownerId, person.id);
}
break;
}
@@ -353,6 +354,7 @@ export class JobService extends BaseService {
const jobs: JobItem[] = [
{ name: JobName.SmartSearch, data: item.data },
{ name: JobName.AssetDetectFaces, data: item.data },
{ name: JobName.Ocr, data: item.data },
];
if (asset.type === AssetType.Video) {
@@ -361,10 +363,10 @@ export class JobService extends BaseService {
await this.jobRepository.queueAll(jobs);
if (asset.visibility === AssetVisibility.Timeline || asset.visibility === AssetVisibility.Archive) {
this.eventRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset));
this.websocketRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset));
if (asset.exifInfo) {
const exif = asset.exifInfo;
this.eventRepository.clientSend('AssetUploadReadyV1', asset.ownerId, {
this.websocketRepository.clientSend('AssetUploadReadyV1', asset.ownerId, {
// TODO remove `on_upload_success` and then modify the query to select only the required fields)
asset: {
id: asset.id,
@@ -424,11 +426,6 @@ export class JobService extends BaseService {
}
break;
}
case JobName.UserDelete: {
this.eventRepository.clientBroadcast('on_user_delete', item.data.id);
break;
}
}
}
}

View File

@@ -445,6 +445,7 @@ describe(MediaService.name, () => {
}),
);
});
it('should not skip intra frames for MTS file', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamMTS);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video);
@@ -462,6 +463,25 @@ describe(MediaService.name, () => {
);
});
it('should override reserved color metadata', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStreamReserved);
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video);
await sut.handleGenerateThumbnails({ id: assetStub.video.id });
expect(mocks.media.transcode).toHaveBeenCalledWith(
'/original/path.ext',
expect.any(String),
expect.objectContaining({
inputOptions: expect.arrayContaining([
'-bsf:v hevc_metadata=colour_primaries=1:matrix_coefficients=1:transfer_characteristics=1',
]),
outputOptions: expect.any(Array),
progress: expect.any(Object),
twoPass: false,
}),
);
});
it('should use scaling divisible by 2 even when using quick sync', async () => {
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } });
@@ -841,6 +861,37 @@ describe(MediaService.name, () => {
);
});
it('should always generate full-size preview from non-web-friendly panoramas', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: false } } });
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.panoramaTif);
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.panoramaTif.originalPath, {
colorspace: Colorspace.Srgb,
orientation: undefined,
processInvalidImages: false,
size: undefined,
});
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
{
colorspace: Colorspace.Srgb,
format: ImageFormat.Jpeg,
quality: 80,
processInvalidImages: false,
raw: rawInfo,
},
expect.any(String),
);
});
it('should respect encoding options when generating full-size preview', async () => {
mocks.systemMetadata.get.mockResolvedValue({
image: { fullsize: { enabled: true, format: ImageFormat.Webp, quality: 90 } },

View File

@@ -271,7 +271,9 @@ export class MediaService extends BaseService {
// Handle embedded preview extraction for RAW files
const extractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
const extracted = extractEmbedded ? await this.extractImage(asset.originalPath, image.preview.size) : null;
const generateFullsize = image.fullsize.enabled && !mimeTypes.isWebSupportedImage(asset.originalPath);
const generateFullsize =
(image.fullsize.enabled || asset.exifInfo.projectionType == 'EQUIRECTANGULAR') &&
!mimeTypes.isWebSupportedImage(asset.originalPath);
const convertFullsize = generateFullsize && (!extracted || !mimeTypes.isWebSupportedImage(` .${extracted.format}`));
const { info, data, colorspace } = await this.decodeImage(

View File

@@ -30,6 +30,7 @@ import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
import { PersonTable } from 'src/schema/tables/person.table';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { getAssetFiles } from 'src/utils/asset.util';
import { isAssetChecksumConstraint } from 'src/utils/database';
import { isFaceImportEnabled } from 'src/utils/misc';
import { upsertTags } from 'src/utils/tag';
@@ -454,14 +455,17 @@ export class MetadataService extends BaseService {
* For RAW images in the CR2 or RAF format, the "ImageSize" value seems to be correct,
* but ImageWidth and ImageHeight are not correct (they contain the dimensions of the preview image).
*/
let [width, height] = exifTags.ImageSize?.split('x').map((dim) => Number.parseInt(dim) || undefined) || [];
let [width, height] =
exifTags.ImageSize?.toString()
?.split('x')
?.map((dim) => Number.parseInt(dim) || undefined) ?? [];
if (!width || !height) {
[width, height] = [exifTags.ImageWidth, exifTags.ImageHeight];
}
return { width, height };
}
private getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
if (asset.type === AssetType.Image) {
const hasSidecar = asset.files?.some(({ type }) => type === AssetFileType.Sidecar);
@@ -470,21 +474,15 @@ export class MetadataService extends BaseService {
}
}
return this.mergeExifTags(asset);
}
private async mergeExifTags(asset: {
originalPath: string;
files: AssetFile[];
type: AssetType;
}): Promise<ImmichTags> {
if (asset.files && asset.files.length > 1) {
throw new Error(`Asset ${asset.originalPath} has multiple sidecar files`);
}
const sidecarFile = getAssetFiles(asset.files).sidecarFile;
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
this.metadataRepository.readTags(asset.originalPath),
asset.files && asset.files.length > 0 ? this.metadataRepository.readTags(asset.files[0].path) : null,
sidecarFile ? this.metadataRepository.readTags(sidecarFile.path) : null,
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
]);

View File

@@ -14,6 +14,7 @@ const smtpTransport = Object.freeze<SystemConfig>({
ignoreCert: false,
host: 'localhost',
port: 587,
secure: false,
username: 'test',
password: 'test',
},

View File

@@ -7,6 +7,7 @@ import { NotificationService } from 'src/services/notification.service';
import { INotifyAlbumUpdateJob } from 'src/types';
import { albumStub } from 'test/fixtures/album.stub';
import { assetStub } from 'test/fixtures/asset.stub';
import { notificationStub } from 'test/fixtures/notification.stub';
import { userStub } from 'test/fixtures/user.stub';
import { newTestService, ServiceMocks } from 'test/utils';
@@ -39,6 +40,7 @@ const configs = {
ignoreCert: false,
host: 'localhost',
port: 587,
secure: false,
username: 'test',
password: 'test',
},
@@ -63,8 +65,8 @@ describe(NotificationService.name, () => {
it('should emit client and server events', () => {
const update = { oldConfig: defaults, newConfig: defaults };
expect(sut.onConfigUpdate(update)).toBeUndefined();
expect(mocks.event.clientBroadcast).toHaveBeenCalledWith('on_config_update');
expect(mocks.event.serverSend).toHaveBeenCalledWith('ConfigUpdate', update);
expect(mocks.websocket.clientBroadcast).toHaveBeenCalledWith('on_config_update');
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('ConfigUpdate', update);
});
});
@@ -123,7 +125,7 @@ describe(NotificationService.name, () => {
describe('onAssetHide', () => {
it('should send connected clients an event', () => {
sut.onAssetHide({ assetId: 'asset-id', userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_hidden', 'user-id', 'asset-id');
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_hidden', 'user-id', 'asset-id');
});
});
@@ -176,67 +178,67 @@ describe(NotificationService.name, () => {
it('should send a on_session_delete client event', () => {
vi.useFakeTimers();
sut.onSessionDelete({ sessionId: 'id' });
expect(mocks.event.clientSend).not.toHaveBeenCalled();
expect(mocks.websocket.clientSend).not.toHaveBeenCalled();
vi.advanceTimersByTime(500);
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_session_delete', 'id', 'id');
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_session_delete', 'id', 'id');
});
});
describe('onAssetTrash', () => {
it('should send connected clients an event', () => {
it('should send connected clients an websocket', () => {
sut.onAssetTrash({ assetId: 'asset-id', userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_trash', 'user-id', ['asset-id']);
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_trash', 'user-id', ['asset-id']);
});
});
describe('onAssetDelete', () => {
it('should send connected clients an event', () => {
sut.onAssetDelete({ assetId: 'asset-id', userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_delete', 'user-id', 'asset-id');
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_delete', 'user-id', 'asset-id');
});
});
describe('onAssetsTrash', () => {
it('should send connected clients an event', () => {
sut.onAssetsTrash({ assetIds: ['asset-id'], userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_trash', 'user-id', ['asset-id']);
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_trash', 'user-id', ['asset-id']);
});
});
describe('onAssetsRestore', () => {
it('should send connected clients an event', () => {
sut.onAssetsRestore({ assetIds: ['asset-id'], userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_restore', 'user-id', ['asset-id']);
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_restore', 'user-id', ['asset-id']);
});
});
describe('onStackCreate', () => {
it('should send connected clients an event', () => {
sut.onStackCreate({ stackId: 'stack-id', userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
});
});
describe('onStackUpdate', () => {
it('should send connected clients an event', () => {
sut.onStackUpdate({ stackId: 'stack-id', userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
});
});
describe('onStackDelete', () => {
it('should send connected clients an event', () => {
sut.onStackDelete({ stackId: 'stack-id', userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
});
});
describe('onStacksDelete', () => {
it('should send connected clients an event', () => {
sut.onStacksDelete({ stackIds: ['stack-id'], userId: 'user-id' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
});
});
@@ -282,6 +284,7 @@ describe(NotificationService.name, () => {
},
],
});
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
});
@@ -297,6 +300,7 @@ describe(NotificationService.name, () => {
},
],
});
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
});
@@ -313,6 +317,7 @@ describe(NotificationService.name, () => {
],
});
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
@@ -334,6 +339,7 @@ describe(NotificationService.name, () => {
],
});
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
@@ -363,6 +369,7 @@ describe(NotificationService.name, () => {
],
});
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([
{ id: '1', type: AssetFileType.Thumbnail, path: 'path-to-thumb.jpg' },
@@ -394,6 +401,7 @@ describe(NotificationService.name, () => {
],
});
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([assetStub.image.files[2]]);
@@ -431,6 +439,7 @@ describe(NotificationService.name, () => {
albumUsers: [{ user: { id: userStub.user1.id } } as AlbumUser],
});
mocks.user.get.mockResolvedValueOnce(userStub.user1);
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
@@ -453,6 +462,7 @@ describe(NotificationService.name, () => {
},
],
});
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
@@ -475,6 +485,7 @@ describe(NotificationService.name, () => {
},
],
});
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
@@ -489,6 +500,7 @@ describe(NotificationService.name, () => {
albumUsers: [{ user: { id: userStub.user1.id } } as AlbumUser],
});
mocks.user.get.mockResolvedValue(userStub.user1);
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);

View File

@@ -1,5 +1,6 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { OnEvent, OnJob } from 'src/decorators';
import { MapAlbumDto } from 'src/dtos/album.dto';
import { mapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import {
@@ -77,8 +78,8 @@ export class NotificationService extends BaseService {
await this.notificationRepository.cleanup();
}
@OnEvent({ name: 'JobFailed' })
async onJobFailed({ job, error }: ArgOf<'JobFailed'>) {
@OnEvent({ name: 'JobError' })
async onJobError({ job, error }: ArgOf<'JobError'>) {
const admin = await this.userRepository.getAdmin();
if (!admin) {
return;
@@ -97,7 +98,7 @@ export class NotificationService extends BaseService {
description: `Job ${[job.name]} failed with error: ${errorMessage}`,
});
this.eventRepository.clientSend('on_notification', admin.id, mapNotification(item));
this.websocketRepository.clientSend('on_notification', admin.id, mapNotification(item));
break;
}
@@ -109,8 +110,8 @@ export class NotificationService extends BaseService {
@OnEvent({ name: 'ConfigUpdate' })
onConfigUpdate({ oldConfig, newConfig }: ArgOf<'ConfigUpdate'>) {
this.eventRepository.clientBroadcast('on_config_update');
this.eventRepository.serverSend('ConfigUpdate', { oldConfig, newConfig });
this.websocketRepository.clientBroadcast('on_config_update');
this.websocketRepository.serverSend('ConfigUpdate', { oldConfig, newConfig });
}
@OnEvent({ name: 'ConfigValidate', priority: -100 })
@@ -130,7 +131,7 @@ export class NotificationService extends BaseService {
@OnEvent({ name: 'AssetHide' })
onAssetHide({ assetId, userId }: ArgOf<'AssetHide'>) {
this.eventRepository.clientSend('on_asset_hidden', userId, assetId);
this.websocketRepository.clientSend('on_asset_hidden', userId, assetId);
}
@OnEvent({ name: 'AssetShow' })
@@ -140,17 +141,17 @@ export class NotificationService extends BaseService {
@OnEvent({ name: 'AssetTrash' })
onAssetTrash({ assetId, userId }: ArgOf<'AssetTrash'>) {
this.eventRepository.clientSend('on_asset_trash', userId, [assetId]);
this.websocketRepository.clientSend('on_asset_trash', userId, [assetId]);
}
@OnEvent({ name: 'AssetDelete' })
onAssetDelete({ assetId, userId }: ArgOf<'AssetDelete'>) {
this.eventRepository.clientSend('on_asset_delete', userId, assetId);
this.websocketRepository.clientSend('on_asset_delete', userId, assetId);
}
@OnEvent({ name: 'AssetTrashAll' })
onAssetsTrash({ assetIds, userId }: ArgOf<'AssetTrashAll'>) {
this.eventRepository.clientSend('on_asset_trash', userId, assetIds);
this.websocketRepository.clientSend('on_asset_trash', userId, assetIds);
}
@OnEvent({ name: 'AssetMetadataExtracted' })
@@ -161,33 +162,37 @@ export class NotificationService extends BaseService {
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([assetId]);
if (asset) {
this.eventRepository.clientSend('on_asset_update', userId, mapAsset(asset));
this.websocketRepository.clientSend(
'on_asset_update',
userId,
mapAsset(asset, { auth: { user: { id: userId } } as AuthDto }),
);
}
}
@OnEvent({ name: 'AssetRestoreAll' })
onAssetsRestore({ assetIds, userId }: ArgOf<'AssetRestoreAll'>) {
this.eventRepository.clientSend('on_asset_restore', userId, assetIds);
this.websocketRepository.clientSend('on_asset_restore', userId, assetIds);
}
@OnEvent({ name: 'StackCreate' })
onStackCreate({ userId }: ArgOf<'StackCreate'>) {
this.eventRepository.clientSend('on_asset_stack_update', userId);
this.websocketRepository.clientSend('on_asset_stack_update', userId);
}
@OnEvent({ name: 'StackUpdate' })
onStackUpdate({ userId }: ArgOf<'StackUpdate'>) {
this.eventRepository.clientSend('on_asset_stack_update', userId);
this.websocketRepository.clientSend('on_asset_stack_update', userId);
}
@OnEvent({ name: 'StackDelete' })
onStackDelete({ userId }: ArgOf<'StackDelete'>) {
this.eventRepository.clientSend('on_asset_stack_update', userId);
this.websocketRepository.clientSend('on_asset_stack_update', userId);
}
@OnEvent({ name: 'StackDeleteAll' })
onStacksDelete({ userId }: ArgOf<'StackDeleteAll'>) {
this.eventRepository.clientSend('on_asset_stack_update', userId);
this.websocketRepository.clientSend('on_asset_stack_update', userId);
}
@OnEvent({ name: 'UserSignup' })
@@ -197,6 +202,11 @@ export class NotificationService extends BaseService {
}
}
@OnEvent({ name: 'UserDelete' })
onUserDelete({ id }: ArgOf<'UserDelete'>) {
this.websocketRepository.clientBroadcast('on_user_delete', id);
}
@OnEvent({ name: 'AlbumUpdate' })
async onAlbumUpdate({ id, recipientId }: ArgOf<'AlbumUpdate'>) {
await this.jobRepository.removeJob(JobName.NotifyAlbumUpdate, `${id}/${recipientId}`);
@@ -214,7 +224,7 @@ export class NotificationService extends BaseService {
@OnEvent({ name: 'SessionDelete' })
onSessionDelete({ sessionId }: ArgOf<'SessionDelete'>) {
// after the response is sent
setTimeout(() => this.eventRepository.clientSend('on_session_delete', sessionId, sessionId), 500);
setTimeout(() => this.websocketRepository.clientSend('on_session_delete', sessionId, sessionId), 500);
}
async sendTestEmail(id: string, dto: SystemConfigSmtpDto, tempTemplate?: string) {
@@ -295,6 +305,8 @@ export class NotificationService extends BaseService {
return JobStatus.Skipped;
}
await this.sendAlbumLocalNotification(album, recipientId, NotificationType.AlbumInvite, album.owner.name);
const { emailNotifications } = getPreferences(recipient.metadata);
if (!emailNotifications.enabled || !emailNotifications.albumInvite) {
@@ -344,6 +356,8 @@ export class NotificationService extends BaseService {
return JobStatus.Skipped;
}
await this.sendAlbumLocalNotification(album, recipientId, NotificationType.AlbumUpdate);
const attachment = await this.getAlbumThumbnailAttachment(album);
const { server, templates } = await this.getConfig({ withCache: false });
@@ -431,4 +445,25 @@ export class NotificationService extends BaseService {
cid: 'album-thumbnail',
};
}
private async sendAlbumLocalNotification(
album: MapAlbumDto,
userId: string,
type: NotificationType.AlbumInvite | NotificationType.AlbumUpdate,
senderName?: string,
) {
const isInvite = type === NotificationType.AlbumInvite;
const item = await this.notificationRepository.create({
userId,
type,
level: isInvite ? NotificationLevel.Success : NotificationLevel.Info,
title: isInvite ? 'Shared Album Invitation' : 'Shared Album Update',
description: isInvite
? `${senderName} shared an album (${album.albumName}) with you`
: `New media has been added to the album (${album.albumName})`,
data: JSON.stringify({ albumId: album.id }),
});
this.websocketRepository.clientSend('on_notification', userId, mapNotification(item));
}
}

View File

@@ -0,0 +1,177 @@
import { AssetVisibility, ImmichWorker, JobName, JobStatus } from 'src/enum';
import { OcrService } from 'src/services/ocr.service';
import { assetStub } from 'test/fixtures/asset.stub';
import { systemConfigStub } from 'test/fixtures/system-config.stub';
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
describe(OcrService.name, () => {
let sut: OcrService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(OcrService));
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
});
it('should work', () => {
expect(sut).toBeDefined();
});
describe('handleQueueOcr', () => {
it('should do nothing if machine learning is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
await sut.handleQueueOcr({ force: false });
expect(mocks.database.setDimensionSize).not.toHaveBeenCalled();
});
it('should queue the assets without ocr', async () => {
mocks.assetJob.streamForOcrJob.mockReturnValue(makeStream([assetStub.image]));
await sut.handleQueueOcr({ force: false });
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.Ocr, data: { id: assetStub.image.id } }]);
expect(mocks.assetJob.streamForOcrJob).toHaveBeenCalledWith(false);
});
it('should queue all the assets', async () => {
mocks.assetJob.streamForOcrJob.mockReturnValue(makeStream([assetStub.image]));
await sut.handleQueueOcr({ force: true });
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.Ocr, data: { id: assetStub.image.id } }]);
expect(mocks.assetJob.streamForOcrJob).toHaveBeenCalledWith(true);
});
});
describe('handleOcr', () => {
it('should do nothing if machine learning is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
expect(await sut.handleOcr({ id: '123' })).toEqual(JobStatus.Skipped);
expect(mocks.asset.getByIds).not.toHaveBeenCalled();
expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled();
});
it('should skip assets without a resize path', async () => {
mocks.assetJob.getForOcr.mockResolvedValue({ visibility: AssetVisibility.Timeline, previewFile: null });
expect(await sut.handleOcr({ id: assetStub.noResizePath.id })).toEqual(JobStatus.Failed);
expect(mocks.ocr.upsert).not.toHaveBeenCalled();
expect(mocks.machineLearning.ocr).not.toHaveBeenCalled();
});
it('should save the returned objects', async () => {
mocks.machineLearning.ocr.mockResolvedValue({
box: [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160],
boxScore: [0.9, 0.8],
text: ['One Two Three', 'Four Five'],
textScore: [0.95, 0.85],
});
mocks.assetJob.getForOcr.mockResolvedValue({
visibility: AssetVisibility.Timeline,
previewFile: assetStub.image.files[1].path,
});
expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Success);
expect(mocks.machineLearning.ocr).toHaveBeenCalledWith(
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({
modelName: 'PP-OCRv5_mobile',
minDetectionScore: 0.5,
minRecognitionScore: 0.8,
maxResolution: 736,
}),
);
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, [
{
assetId: assetStub.image.id,
boxScore: 0.9,
text: 'One Two Three',
textScore: 0.95,
x1: 10,
y1: 20,
x2: 30,
y2: 40,
x3: 50,
y3: 60,
x4: 70,
y4: 80,
},
{
assetId: assetStub.image.id,
boxScore: 0.8,
text: 'Four Five',
textScore: 0.85,
x1: 90,
y1: 100,
x2: 110,
y2: 120,
x3: 130,
y3: 140,
x4: 150,
y4: 160,
},
]);
});
it('should apply config settings', async () => {
mocks.systemMetadata.get.mockResolvedValue({
machineLearning: {
enabled: true,
ocr: {
modelName: 'PP-OCRv5_server',
enabled: true,
minDetectionScore: 0.8,
minRecognitionScore: 0.9,
maxResolution: 1500,
},
},
});
mocks.machineLearning.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
mocks.assetJob.getForOcr.mockResolvedValue({
visibility: AssetVisibility.Timeline,
previewFile: assetStub.image.files[1].path,
});
expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Success);
expect(mocks.machineLearning.ocr).toHaveBeenCalledWith(
'/uploads/user-id/thumbs/path.jpg',
expect.objectContaining({
modelName: 'PP-OCRv5_server',
minDetectionScore: 0.8,
minRecognitionScore: 0.9,
maxResolution: 1500,
}),
);
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, []);
});
it('should skip invisible assets', async () => {
mocks.assetJob.getForOcr.mockResolvedValue({
visibility: AssetVisibility.Hidden,
previewFile: assetStub.image.files[1].path,
});
expect(await sut.handleOcr({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.Skipped);
expect(mocks.machineLearning.ocr).not.toHaveBeenCalled();
expect(mocks.ocr.upsert).not.toHaveBeenCalled();
});
it('should fail if asset could not be found', async () => {
mocks.assetJob.getForOcr.mockResolvedValue(void 0);
expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Failed);
expect(mocks.machineLearning.ocr).not.toHaveBeenCalled();
expect(mocks.ocr.upsert).not.toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,86 @@
import { Injectable } from '@nestjs/common';
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { OnJob } from 'src/decorators';
import { AssetVisibility, JobName, JobStatus, QueueName } from 'src/enum';
import { OCR } from 'src/repositories/machine-learning.repository';
import { BaseService } from 'src/services/base.service';
import { JobItem, JobOf } from 'src/types';
import { isOcrEnabled } from 'src/utils/misc';
@Injectable()
export class OcrService extends BaseService {
@OnJob({ name: JobName.OcrQueueAll, queue: QueueName.Ocr })
async handleQueueOcr({ force }: JobOf<JobName.OcrQueueAll>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: false });
if (!isOcrEnabled(machineLearning)) {
return JobStatus.Skipped;
}
if (force) {
await this.ocrRepository.deleteAll();
}
let jobs: JobItem[] = [];
const assets = this.assetJobRepository.streamForOcrJob(force);
for await (const asset of assets) {
jobs.push({ name: JobName.Ocr, data: { id: asset.id } });
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
}
await this.jobRepository.queueAll(jobs);
return JobStatus.Success;
}
@OnJob({ name: JobName.Ocr, queue: QueueName.Ocr })
async handleOcr({ id }: JobOf<JobName.Ocr>): Promise<JobStatus> {
const { machineLearning } = await this.getConfig({ withCache: true });
if (!isOcrEnabled(machineLearning)) {
return JobStatus.Skipped;
}
const asset = await this.assetJobRepository.getForOcr(id);
if (!asset || !asset.previewFile) {
return JobStatus.Failed;
}
if (asset.visibility === AssetVisibility.Hidden) {
return JobStatus.Skipped;
}
const ocrResults = await this.machineLearningRepository.ocr(asset.previewFile, machineLearning.ocr);
await this.ocrRepository.upsert(id, this.parseOcrResults(id, ocrResults));
await this.assetRepository.upsertJobStatus({ assetId: id, ocrAt: new Date() });
this.logger.debug(`Processed ${ocrResults.text.length} OCR result(s) for ${id}`);
return JobStatus.Success;
}
private parseOcrResults(id: string, { box, boxScore, text, textScore }: OCR) {
const ocrDataList = [];
for (let i = 0; i < text.length; i++) {
const boxOffset = i * 8;
ocrDataList.push({
assetId: id,
x1: box[boxOffset],
y1: box[boxOffset + 1],
x2: box[boxOffset + 2],
y2: box[boxOffset + 3],
x3: box[boxOffset + 4],
y3: box[boxOffset + 5],
x4: box[boxOffset + 6],
y4: box[boxOffset + 7],
boxScore: boxScore[i],
textScore: textScore[i],
text: text[i],
});
}
return ocrDataList;
}
}

View File

@@ -179,6 +179,26 @@ describe(SearchService.name, () => {
).resolves.toEqual(['Fujifilm X100VI', null]);
expect(mocks.search.getCameraModels).toHaveBeenCalledWith([authStub.user1.user.id], expect.anything());
});
it('should return search suggestions for camera lens model', async () => {
mocks.search.getCameraLensModels.mockResolvedValue(['10-24mm']);
mocks.partner.getAll.mockResolvedValue([]);
await expect(
sut.getSearchSuggestions(authStub.user1, { includeNull: false, type: SearchSuggestionType.CAMERA_LENS_MODEL }),
).resolves.toEqual(['10-24mm']);
expect(mocks.search.getCameraLensModels).toHaveBeenCalledWith([authStub.user1.user.id], expect.anything());
});
it('should return search suggestions for camera lens model (including null)', async () => {
mocks.search.getCameraLensModels.mockResolvedValue(['10-24mm']);
mocks.partner.getAll.mockResolvedValue([]);
await expect(
sut.getSearchSuggestions(authStub.user1, { includeNull: true, type: SearchSuggestionType.CAMERA_LENS_MODEL }),
).resolves.toEqual(['10-24mm', null]);
expect(mocks.search.getCameraLensModels).toHaveBeenCalledWith([authStub.user1.user.id], expect.anything());
});
});
describe('searchSmart', () => {

View File

@@ -177,6 +177,9 @@ export class SearchService extends BaseService {
case SearchSuggestionType.CAMERA_MODEL: {
return this.searchRepository.getCameraModels(userIds, dto);
}
case SearchSuggestionType.CAMERA_LENS_MODEL: {
return this.searchRepository.getCameraLensModels(userIds, dto);
}
default: {
return Promise.resolve([]);
}

View File

@@ -141,6 +141,7 @@ describe(ServerService.name, () => {
reverseGeocoding: true,
oauth: false,
oauthAutoLaunch: false,
ocr: true,
passwordLogin: true,
search: true,
sidecar: true,

View File

@@ -19,7 +19,12 @@ import { UserStatsQueryResponse } from 'src/repositories/user.repository';
import { BaseService } from 'src/services/base.service';
import { asHumanReadable } from 'src/utils/bytes';
import { mimeTypes } from 'src/utils/mime-types';
import { isDuplicateDetectionEnabled, isFacialRecognitionEnabled, isSmartSearchEnabled } from 'src/utils/misc';
import {
isDuplicateDetectionEnabled,
isFacialRecognitionEnabled,
isOcrEnabled,
isSmartSearchEnabled,
} from 'src/utils/misc';
@Injectable()
export class ServerService extends BaseService {
@@ -97,6 +102,7 @@ export class ServerService extends BaseService {
trash: trash.enabled,
oauth: oauth.enabled,
oauthAutoLaunch: oauth.autoLaunch,
ocr: isOcrEnabled(machineLearning),
passwordLogin: passwordLogin.enabled,
configFile: !!configFile,
email: notifications.smtp.enabled,

View File

@@ -43,17 +43,13 @@ describe('SessionService', () => {
describe('logoutDevices', () => {
it('should logout all devices', async () => {
const currentSession = factory.session();
const otherSession = factory.session();
const auth = factory.auth({ session: currentSession });
mocks.session.getByUserId.mockResolvedValue([currentSession, otherSession]);
mocks.session.delete.mockResolvedValue();
mocks.session.invalidate.mockResolvedValue();
await sut.deleteAll(auth);
expect(mocks.session.getByUserId).toHaveBeenCalledWith(auth.user.id);
expect(mocks.session.delete).toHaveBeenCalledWith(otherSession.id);
expect(mocks.session.delete).not.toHaveBeenCalledWith(currentSession.id);
expect(mocks.session.invalidate).toHaveBeenCalledWith({ userId: auth.user.id, excludeId: currentSession.id });
});
});

View File

@@ -1,6 +1,6 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { DateTime } from 'luxon';
import { OnJob } from 'src/decorators';
import { OnEvent, OnJob } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import {
SessionCreateDto,
@@ -10,6 +10,7 @@ import {
mapSession,
} from 'src/dtos/session.dto';
import { JobName, JobStatus, Permission, QueueName } from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
@Injectable()
@@ -69,18 +70,19 @@ export class SessionService extends BaseService {
await this.sessionRepository.delete(id);
}
async deleteAll(auth: AuthDto): Promise<void> {
const userId = auth.user.id;
const currentSessionId = auth.session?.id;
await this.sessionRepository.invalidate({ userId, excludeId: currentSessionId });
}
async lock(auth: AuthDto, id: string): Promise<void> {
await this.requireAccess({ auth, permission: Permission.SessionLock, ids: [id] });
await this.sessionRepository.update(id, { pinExpiresAt: null });
}
async deleteAll(auth: AuthDto): Promise<void> {
const sessions = await this.sessionRepository.getByUserId(auth.user.id);
for (const session of sessions) {
if (session.id === auth.session?.id) {
continue;
}
await this.sessionRepository.delete(session.id);
}
@OnEvent({ name: 'AuthChangePassword' })
async onAuthChangePassword({ userId, currentSessionId }: ArgOf<'AuthChangePassword'>): Promise<void> {
await this.sessionRepository.invalidate({ userId, excludeId: currentSessionId });
}
}

View File

@@ -300,6 +300,7 @@ describe(SharedLinkService.name, () => {
mocks.sharedLink.get.mockResolvedValue(_.cloneDeep(sharedLinkStub.individual));
mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.individual);
mocks.sharedLink.update.mockResolvedValue(sharedLinkStub.individual);
mocks.sharedLinkAsset.remove.mockResolvedValue([assetStub.image.id]);
await expect(
sut.removeAssets(authStub.admin, 'link-1', { assetIds: [assetStub.image.id, 'asset-2'] }),
@@ -308,6 +309,7 @@ describe(SharedLinkService.name, () => {
{ assetId: 'asset-2', success: false, error: AssetIdErrorReason.NOT_FOUND },
]);
expect(mocks.sharedLinkAsset.remove).toHaveBeenCalledWith('link-1', [assetStub.image.id, 'asset-2']);
expect(mocks.sharedLink.update).toHaveBeenCalledWith({ ...sharedLinkStub.individual, assets: [] });
});
});

View File

@@ -175,10 +175,12 @@ export class SharedLinkService extends BaseService {
throw new BadRequestException('Invalid shared link type');
}
const removedAssetIds = await this.sharedLinkAssetRepository.remove(id, dto.assetIds);
const results: AssetIdsResponseDto[] = [];
for (const assetId of dto.assetIds) {
const hasAsset = sharedLink.assets.find((asset) => asset.id === assetId);
if (!hasAsset) {
const wasRemoved = removedAssetIds.find((id) => id === assetId);
if (!wasRemoved) {
results.push({ assetId, success: false, error: AssetIdErrorReason.NOT_FOUND });
continue;
}

View File

@@ -115,7 +115,7 @@ export class StorageService extends BaseService {
if (!path.startsWith(previous)) {
throw new Error(
'Detected an inconsistent media location. For more information, see https://immich.app/errors#inconsistent-media-location',
'Detected an inconsistent media location. For more information, see https://docs.immich.app/errors#inconsistent-media-location',
);
}

View File

@@ -39,6 +39,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
[QueueName.ThumbnailGeneration]: { concurrency: 3 },
[QueueName.VideoConversion]: { concurrency: 1 },
[QueueName.Notification]: { concurrency: 5 },
[QueueName.Ocr]: { concurrency: 1 },
},
backup: {
database: {
@@ -102,6 +103,13 @@ const updatedConfig = Object.freeze<SystemConfig>({
maxDistance: 0.5,
minFaces: 3,
},
ocr: {
enabled: true,
modelName: 'PP-OCRv5_mobile',
minDetectionScore: 0.5,
minRecognitionScore: 0.8,
maxResolution: 736,
},
},
map: {
enabled: true,
@@ -197,6 +205,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
transport: {
host: '',
port: 587,
secure: false,
username: '',
password: '',
ignoreCert: false,

View File

@@ -0,0 +1,59 @@
import { snakeCase } from 'lodash';
import { OnEvent } from 'src/decorators';
import { ImmichWorker, JobStatus } from 'src/enum';
import { ArgOf, ArgsOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
export class TelemetryService extends BaseService {
@OnEvent({ name: 'AppBootstrap', workers: [ImmichWorker.Api] })
async onBootstrap(): Promise<void> {
const userCount = await this.userRepository.getCount();
this.telemetryRepository.api.addToGauge('immich.users.total', userCount);
}
@OnEvent({ name: 'UserCreate' })
onUserCreate() {
this.telemetryRepository.api.addToGauge(`immich.users.total`, 1);
}
@OnEvent({ name: 'UserTrash' })
onUserTrash() {
this.telemetryRepository.api.addToGauge(`immich.users.total`, -1);
}
@OnEvent({ name: 'UserRestore' })
onUserRestore() {
this.telemetryRepository.api.addToGauge(`immich.users.total`, 1);
}
@OnEvent({ name: 'JobStart' })
onJobStart(...[queueName]: ArgsOf<'JobStart'>) {
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
this.telemetryRepository.jobs.addToGauge(queueMetric, 1);
}
@OnEvent({ name: 'JobSuccess' })
onJobSuccess({ job, response }: ArgOf<'JobSuccess'>) {
if (response && Object.values(JobStatus).includes(response as JobStatus)) {
const jobMetric = `immich.jobs.${snakeCase(job.name)}.${response}`;
this.telemetryRepository.jobs.addToCounter(jobMetric, 1);
}
}
@OnEvent({ name: 'JobError' })
onJobError({ job }: ArgOf<'JobError'>) {
const jobMetric = `immich.jobs.${snakeCase(job.name)}.${JobStatus.Failed}`;
this.telemetryRepository.jobs.addToCounter(jobMetric, 1);
}
@OnEvent({ name: 'JobComplete' })
onJobComplete(...[queueName]: ArgsOf<'JobComplete'>) {
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
this.telemetryRepository.jobs.addToGauge(queueMetric, -1);
}
@OnEvent({ name: 'QueueStart' })
onQueueStart({ name }: ArgOf<'QueueStart'>) {
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
}
}

View File

@@ -36,10 +36,14 @@ describe(TimelineService.name, () => {
);
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['album-id']));
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
timeBucket: 'bucket',
albumId: 'album-id',
});
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
'bucket',
{
timeBucket: 'bucket',
albumId: 'album-id',
},
authStub.admin,
);
});
it('should return the assets for a archive time bucket if user has archive.read', async () => {
@@ -60,6 +64,7 @@ describe(TimelineService.name, () => {
visibility: AssetVisibility.Archive,
userIds: [authStub.admin.user.id],
}),
authStub.admin,
);
});
@@ -76,12 +81,16 @@ describe(TimelineService.name, () => {
withPartners: true,
}),
).resolves.toEqual(json);
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
timeBucket: 'bucket',
visibility: AssetVisibility.Timeline,
withPartners: true,
userIds: [authStub.admin.user.id],
});
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
'bucket',
{
timeBucket: 'bucket',
visibility: AssetVisibility.Timeline,
withPartners: true,
userIds: [authStub.admin.user.id],
},
authStub.admin,
);
});
it('should check permissions to read tag', async () => {
@@ -96,11 +105,15 @@ describe(TimelineService.name, () => {
tagId: 'tag-123',
}),
).resolves.toEqual(json);
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
tagId: 'tag-123',
timeBucket: 'bucket',
userIds: [authStub.admin.user.id],
});
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
'bucket',
{
tagId: 'tag-123',
timeBucket: 'bucket',
userIds: [authStub.admin.user.id],
},
authStub.admin,
);
});
it('should return the assets for a library time bucket if user has library.read', async () => {
@@ -119,6 +132,7 @@ describe(TimelineService.name, () => {
timeBucket: 'bucket',
userIds: [authStub.admin.user.id],
}),
authStub.admin,
);
});

View File

@@ -21,7 +21,7 @@ export class TimelineService extends BaseService {
const timeBucketOptions = await this.buildTimeBucketOptions(auth, { ...dto });
// TODO: use id cursor for pagination
const bucket = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
const bucket = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions, auth);
return bucket.assets;
}

View File

@@ -2,6 +2,7 @@ import { BadRequestException, ForbiddenException, Injectable } from '@nestjs/com
import { SALT_ROUNDS } from 'src/constants';
import { AssetStatsDto, AssetStatsResponseDto, mapStats } from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { SessionResponseDto, mapSession } from 'src/dtos/session.dto';
import { UserPreferencesResponseDto, UserPreferencesUpdateDto, mapPreferences } from 'src/dtos/user-preferences.dto';
import {
UserAdminCreateDto,
@@ -103,6 +104,8 @@ export class UserAdminService extends BaseService {
const status = force ? UserStatus.Removing : UserStatus.Deleted;
const user = await this.userRepository.update(id, { status, deletedAt: new Date() });
await this.eventRepository.emit('UserTrash', user);
if (force) {
await this.jobRepository.queue({ name: JobName.UserDelete, data: { id: user.id, force } });
}
@@ -114,9 +117,15 @@ export class UserAdminService extends BaseService {
await this.findOrFail(id, { withDeleted: true });
await this.albumRepository.restoreAll(id);
const user = await this.userRepository.restore(id);
await this.eventRepository.emit('UserRestore', user);
return mapUserAdmin(user);
}
async getSessions(auth: AuthDto, id: string): Promise<SessionResponseDto[]> {
const sessions = await this.sessionRepository.getByUserId(id);
return sessions.map((session) => mapSession(session));
}
async getStatistics(auth: AuthDto, id: string, dto: AssetStatsDto): Promise<AssetStatsResponseDto> {
const stats = await this.assetRepository.getStatistics(id, dto);
return mapStats(stats);

View File

@@ -228,17 +228,17 @@ export class UserService extends BaseService {
}
@OnJob({ name: JobName.UserDelete, queue: QueueName.BackgroundTask })
async handleUserDelete({ id, force }: JobOf<JobName.UserDelete>): Promise<JobStatus> {
async handleUserDelete({ id, force }: JobOf<JobName.UserDelete>) {
const config = await this.getConfig({ withCache: false });
const user = await this.userRepository.get(id, { withDeleted: true });
if (!user) {
return JobStatus.Failed;
return;
}
// just for extra protection here
if (!force && !this.isReadyForDeletion(user, config.user.deleteDelay)) {
this.logger.warn(`Skipped user that was not ready for deletion: id=${id}`);
return JobStatus.Skipped;
return;
}
this.logger.log(`Deleting user: ${user.id}`);
@@ -260,7 +260,7 @@ export class UserService extends BaseService {
await this.albumRepository.deleteAll(user.id);
await this.userRepository.delete(user, true);
return JobStatus.Success;
await this.eventRepository.emit('UserDelete', user);
}
private isReadyForDeletion(user: { id: string; deletedAt?: Date | null }, deleteDelay: number): boolean {

View File

@@ -108,7 +108,7 @@ describe(VersionService.name, () => {
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Success);
expect(mocks.systemMetadata.set).toHaveBeenCalled();
expect(mocks.logger.log).toHaveBeenCalled();
expect(mocks.event.clientBroadcast).toHaveBeenCalled();
expect(mocks.websocket.clientBroadcast).toHaveBeenCalled();
});
it('should not notify if the version is equal', async () => {
@@ -118,14 +118,14 @@ describe(VersionService.name, () => {
checkedAt: expect.any(String),
releaseVersion: serverVersion.toString(),
});
expect(mocks.event.clientBroadcast).not.toHaveBeenCalled();
expect(mocks.websocket.clientBroadcast).not.toHaveBeenCalled();
});
it('should handle a github error', async () => {
mocks.serverInfo.getGitHubRelease.mockRejectedValue(new Error('GitHub is down'));
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Failed);
expect(mocks.systemMetadata.set).not.toHaveBeenCalled();
expect(mocks.event.clientBroadcast).not.toHaveBeenCalled();
expect(mocks.websocket.clientBroadcast).not.toHaveBeenCalled();
expect(mocks.logger.warn).toHaveBeenCalled();
});
});
@@ -133,15 +133,15 @@ describe(VersionService.name, () => {
describe('onWebsocketConnectionEvent', () => {
it('should send on_server_version client event', async () => {
await sut.onWebsocketConnection({ userId: '42' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_server_version', '42', expect.any(SemVer));
expect(mocks.event.clientSend).toHaveBeenCalledTimes(1);
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_server_version', '42', expect.any(SemVer));
expect(mocks.websocket.clientSend).toHaveBeenCalledTimes(1);
});
it('should also send a new release notification', async () => {
mocks.systemMetadata.get.mockResolvedValue({ checkedAt: '2024-01-01', releaseVersion: 'v1.42.0' });
await sut.onWebsocketConnection({ userId: '42' });
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_server_version', '42', expect.any(SemVer));
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_new_release', '42', expect.any(Object));
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_server_version', '42', expect.any(SemVer));
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_new_release', '42', expect.any(Object));
});
});
});

View File

@@ -92,7 +92,7 @@ export class VersionService extends BaseService {
if (semver.gt(releaseVersion, serverVersion)) {
this.logger.log(`Found ${releaseVersion}, released at ${new Date(publishedAt).toLocaleString()}`);
this.eventRepository.clientBroadcast('on_new_release', asNotification(metadata));
this.websocketRepository.clientBroadcast('on_new_release', asNotification(metadata));
}
} catch (error: Error | any) {
this.logger.warn(`Unable to run version check: ${error}\n${error?.stack}`);
@@ -104,10 +104,10 @@ export class VersionService extends BaseService {
@OnEvent({ name: 'WebsocketConnect' })
async onWebsocketConnection({ userId }: ArgOf<'WebsocketConnect'>) {
this.eventRepository.clientSend('on_server_version', userId, serverVersion);
this.websocketRepository.clientSend('on_server_version', userId, serverVersion);
const metadata = await this.systemMetadataRepository.get(SystemMetadataKey.VersionCheckState);
if (metadata) {
this.eventRepository.clientSend('on_new_release', userId, asNotification(metadata));
this.websocketRepository.clientSend('on_new_release', userId, asNotification(metadata));
}
}
}