mirror of
https://github.com/immich-app/immich.git
synced 2026-02-12 20:08:25 +03:00
Merge branch 'main' of https://github.com/immich-app/immich into feat/sidecar-asset-files
This commit is contained in:
@@ -1 +1 @@
|
||||
22.20.0
|
||||
24.11.0
|
||||
|
||||
@@ -1,39 +1,54 @@
|
||||
FROM ghcr.io/immich-app/base-server-dev:202509210934@sha256:b5ce2d7eaf379d4cf15efd4bab180d8afc8a80d20b36c9800f4091aca6ae267e AS builder
|
||||
FROM ghcr.io/immich-app/base-server-dev:202510281104@sha256:e2f94c2e92cbae5982b014e610ff29731c0fbcb4bf69022c7fe27594e40c9f83 AS builder
|
||||
ENV COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
|
||||
CI=1 \
|
||||
COREPACK_HOME=/tmp
|
||||
COREPACK_HOME=/tmp \
|
||||
PNPM_HOME=/buildcache/pnpm-store \
|
||||
PATH="/buildcache/pnpm-store:$PATH"
|
||||
|
||||
RUN npm install --global corepack@latest && \
|
||||
corepack enable pnpm
|
||||
corepack enable pnpm && \
|
||||
pnpm config set store-dir "$PNPM_HOME"
|
||||
|
||||
FROM builder AS server
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./package* ./pnpm* .pnpmfile.cjs ./
|
||||
COPY ./server ./server/
|
||||
RUN SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich --frozen-lockfile build && \
|
||||
RUN --mount=type=cache,id=pnpm-server,target=/buildcache/pnpm-store \
|
||||
--mount=type=bind,source=package.json,target=package.json \
|
||||
--mount=type=bind,source=.pnpmfile.cjs,target=.pnpmfile.cjs \
|
||||
--mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
|
||||
--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml \
|
||||
SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich --frozen-lockfile build && \
|
||||
SHARP_FORCE_GLOBAL_LIBVIPS=true pnpm --filter immich --frozen-lockfile --prod --no-optional deploy /output/server-pruned
|
||||
|
||||
FROM builder AS web
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./package* ./pnpm* .pnpmfile.cjs ./
|
||||
COPY ./web ./web/
|
||||
COPY ./i18n ./i18n/
|
||||
COPY ./open-api ./open-api/
|
||||
RUN SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter @immich/sdk --filter immich-web --frozen-lockfile --force install && \
|
||||
RUN --mount=type=cache,id=pnpm-web,target=/buildcache/pnpm-store \
|
||||
--mount=type=bind,source=package.json,target=package.json \
|
||||
--mount=type=bind,source=.pnpmfile.cjs,target=.pnpmfile.cjs \
|
||||
--mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
|
||||
--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml \
|
||||
SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter @immich/sdk --filter immich-web --frozen-lockfile --force install && \
|
||||
pnpm --filter @immich/sdk --filter immich-web build
|
||||
|
||||
FROM builder AS cli
|
||||
|
||||
COPY ./package* ./pnpm* .pnpmfile.cjs ./
|
||||
COPY ./cli ./cli/
|
||||
COPY ./open-api ./open-api/
|
||||
RUN pnpm --filter @immich/sdk --filter @immich/cli --frozen-lockfile install && \
|
||||
RUN --mount=type=cache,id=pnpm-cli,target=/buildcache/pnpm-store \
|
||||
--mount=type=bind,source=package.json,target=package.json \
|
||||
--mount=type=bind,source=.pnpmfile.cjs,target=.pnpmfile.cjs \
|
||||
--mount=type=bind,source=pnpm-lock.yaml,target=pnpm-lock.yaml \
|
||||
--mount=type=bind,source=pnpm-workspace.yaml,target=pnpm-workspace.yaml \
|
||||
pnpm --filter @immich/sdk --filter @immich/cli --frozen-lockfile install && \
|
||||
pnpm --filter @immich/sdk --filter @immich/cli build && \
|
||||
pnpm --filter @immich/cli --prod --no-optional deploy /output/cli-pruned
|
||||
|
||||
FROM ghcr.io/immich-app/base-server-prod:202509210934@sha256:0c7eacf0ba88ca52e1a267cfc62d20d07792ea2c604818c2cbd37dc7dcefdac9
|
||||
FROM ghcr.io/immich-app/base-server-prod:202510281104@sha256:84f8f3eb4cfafc5e624235f7db703e1222fd60831bef1d488d8d8cad2be5023d
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ENV NODE_ENV=production \
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# dev build
|
||||
FROM ghcr.io/immich-app/base-server-dev:202509210934@sha256:b5ce2d7eaf379d4cf15efd4bab180d8afc8a80d20b36c9800f4091aca6ae267e AS dev
|
||||
FROM ghcr.io/immich-app/base-server-dev:202510281104@sha256:e2f94c2e92cbae5982b014e610ff29731c0fbcb4bf69022c7fe27594e40c9f83 AS dev
|
||||
|
||||
ENV COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
|
||||
CI=1 \
|
||||
@@ -27,8 +27,8 @@ ENTRYPOINT ["tini", "--", "/bin/bash", "-c"]
|
||||
FROM dev AS dev-container-server
|
||||
|
||||
RUN apt-get update --allow-releaseinfo-change && \
|
||||
apt-get install sudo inetutils-ping openjdk-11-jre-headless \
|
||||
vim nano \
|
||||
apt-get install sudo inetutils-ping openjdk-21-jre-headless \
|
||||
vim nano curl \
|
||||
-y --no-install-recommends --fix-missing
|
||||
|
||||
RUN usermod -aG sudo node && \
|
||||
@@ -44,19 +44,24 @@ FROM dev-container-server AS dev-container-mobile
|
||||
USER root
|
||||
# Enable multiarch for arm64 if necessary
|
||||
RUN if [ "$(dpkg --print-architecture)" = "arm64" ]; then \
|
||||
dpkg --add-architecture amd64 && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
libc6:amd64 \
|
||||
libstdc++6:amd64 \
|
||||
libgcc1:amd64; \
|
||||
dpkg --add-architecture amd64 && \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
gnupg \
|
||||
qemu-user-static \
|
||||
libc6:amd64 \
|
||||
libstdc++6:amd64 \
|
||||
libgcc1:amd64; \
|
||||
else \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
gnupg; \
|
||||
fi
|
||||
|
||||
# Flutter SDK
|
||||
# https://flutter.dev/docs/development/tools/sdk/releases?tab=linux
|
||||
ENV FLUTTER_CHANNEL="stable"
|
||||
ENV FLUTTER_VERSION="3.35.4"
|
||||
ENV FLUTTER_VERSION="3.35.7"
|
||||
ENV FLUTTER_HOME=/flutter
|
||||
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
|
||||
|
||||
@@ -65,11 +70,11 @@ RUN mkdir -p ${FLUTTER_HOME} \
|
||||
&& curl -C - --output flutter.tar.xz https://storage.googleapis.com/flutter_infra_release/releases/${FLUTTER_CHANNEL}/linux/flutter_linux_${FLUTTER_VERSION}-${FLUTTER_CHANNEL}.tar.xz \
|
||||
&& tar -xf flutter.tar.xz --strip-components=1 -C ${FLUTTER_HOME} \
|
||||
&& rm flutter.tar.xz \
|
||||
&& chown -R node ${FLUTTER_HOME}
|
||||
&& chown -R node ${FLUTTER_HOME} \
|
||||
&& git config --global --add safe.directory ${FLUTTER_HOME}
|
||||
|
||||
|
||||
RUN apt-get update \
|
||||
&& wget -qO- https://dcm.dev/pgp-key.public | gpg --dearmor -o /usr/share/keyrings/dcm.gpg \
|
||||
RUN wget -qO- https://dcm.dev/pgp-key.public | gpg --dearmor -o /usr/share/keyrings/dcm.gpg \
|
||||
&& echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | tee /etc/apt/sources.list.d/dart_stable.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install dcm -y
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "2.0.1",
|
||||
"version": "2.2.0",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
@@ -44,14 +44,14 @@
|
||||
"@nestjs/websockets": "^11.0.4",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/context-async-hooks": "^2.0.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.205.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.205.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.53.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.51.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.58.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.207.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.207.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.54.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.60.0",
|
||||
"@opentelemetry/resources": "^2.0.1",
|
||||
"@opentelemetry/sdk-metrics": "^2.0.1",
|
||||
"@opentelemetry/sdk-node": "^0.205.0",
|
||||
"@opentelemetry/sdk-node": "^0.207.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.34.0",
|
||||
"@react-email/components": "^0.5.0",
|
||||
"@react-email/render": "^1.1.2",
|
||||
@@ -67,25 +67,25 @@
|
||||
"compression": "^1.8.0",
|
||||
"cookie": "^1.0.2",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"cron": "4.3.0",
|
||||
"exiftool-vendored": "^28.8.0",
|
||||
"cron": "4.3.3",
|
||||
"exiftool-vendored": "^31.1.0",
|
||||
"express": "^5.1.0",
|
||||
"fast-glob": "^3.3.2",
|
||||
"fluent-ffmpeg": "^2.1.2",
|
||||
"geo-tz": "^8.0.0",
|
||||
"handlebars": "^4.7.8",
|
||||
"i18n-iso-countries": "^7.6.0",
|
||||
"ioredis": "^5.3.2",
|
||||
"ioredis": "^5.8.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"kysely": "0.28.2",
|
||||
"kysely-postgres-js": "^2.0.0",
|
||||
"kysely-postgres-js": "^3.0.0",
|
||||
"lodash": "^4.17.21",
|
||||
"luxon": "^3.4.2",
|
||||
"mnemonist": "^0.40.3",
|
||||
"multer": "^2.0.2",
|
||||
"nest-commander": "^3.16.0",
|
||||
"nestjs-cls": "^5.0.0",
|
||||
"nestjs-kysely": "^3.0.0",
|
||||
"nestjs-kysely": "3.1.2",
|
||||
"nestjs-otel": "^7.0.0",
|
||||
"nodemailer": "^7.0.0",
|
||||
"openid-client": "^6.3.3",
|
||||
@@ -101,7 +101,7 @@
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"sanitize-html": "^2.14.0",
|
||||
"semver": "^7.6.2",
|
||||
"sharp": "^0.34.3",
|
||||
"sharp": "^0.34.4",
|
||||
"sirv": "^3.0.0",
|
||||
"socket.io": "^4.8.1",
|
||||
"tailwindcss-preset-email": "^1.4.0",
|
||||
@@ -129,7 +129,7 @@
|
||||
"@types/luxon": "^3.6.2",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^22.18.1",
|
||||
"@types/node": "^22.18.12",
|
||||
"@types/nodemailer": "^7.0.0",
|
||||
"@types/picomatch": "^4.0.0",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
@@ -161,9 +161,9 @@
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.20.0"
|
||||
"node": "24.11.0"
|
||||
},
|
||||
"overrides": {
|
||||
"sharp": "^0.34.3"
|
||||
"sharp": "^0.34.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemetry.repository';
|
||||
import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
import { services } from 'src/services';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
@@ -52,6 +53,7 @@ class BaseModule implements OnModuleInit, OnModuleDestroy {
|
||||
@Inject(IWorker) private worker: ImmichWorker,
|
||||
logger: LoggingRepository,
|
||||
private eventRepository: EventRepository,
|
||||
private websocketRepository: WebsocketRepository,
|
||||
private jobService: JobService,
|
||||
private telemetryRepository: TelemetryRepository,
|
||||
private authService: AuthService,
|
||||
@@ -64,7 +66,7 @@ class BaseModule implements OnModuleInit, OnModuleDestroy {
|
||||
|
||||
this.jobService.setServices(services);
|
||||
|
||||
this.eventRepository.setAuthFn(async (client) =>
|
||||
this.websocketRepository.setAuthFn(async (client) =>
|
||||
this.authService.authenticate({
|
||||
headers: client.request.headers,
|
||||
queryParams: {},
|
||||
|
||||
@@ -74,6 +74,13 @@ export interface SystemConfig {
|
||||
minFaces: number;
|
||||
maxDistance: number;
|
||||
};
|
||||
ocr: {
|
||||
enabled: boolean;
|
||||
modelName: string;
|
||||
minDetectionScore: number;
|
||||
minRecognitionScore: number;
|
||||
maxResolution: number;
|
||||
};
|
||||
};
|
||||
map: {
|
||||
enabled: boolean;
|
||||
@@ -159,6 +166,7 @@ export interface SystemConfig {
|
||||
ignoreCert: boolean;
|
||||
host: string;
|
||||
port: number;
|
||||
secure: boolean;
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
@@ -226,6 +234,7 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
[QueueName.ThumbnailGeneration]: { concurrency: 3 },
|
||||
[QueueName.VideoConversion]: { concurrency: 1 },
|
||||
[QueueName.Notification]: { concurrency: 5 },
|
||||
[QueueName.Ocr]: { concurrency: 1 },
|
||||
},
|
||||
logging: {
|
||||
enabled: true,
|
||||
@@ -254,6 +263,13 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
maxDistance: 0.5,
|
||||
minFaces: 3,
|
||||
},
|
||||
ocr: {
|
||||
enabled: true,
|
||||
modelName: 'PP-OCRv5_mobile',
|
||||
minDetectionScore: 0.5,
|
||||
minRecognitionScore: 0.8,
|
||||
maxResolution: 736,
|
||||
},
|
||||
},
|
||||
map: {
|
||||
enabled: true,
|
||||
@@ -356,6 +372,7 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
ignoreCert: false,
|
||||
host: '',
|
||||
port: 587,
|
||||
secure: false,
|
||||
username: '',
|
||||
password: '',
|
||||
},
|
||||
|
||||
@@ -5,7 +5,7 @@ import { SemVer } from 'semver';
|
||||
import { DatabaseExtension, ExifOrientation, VectorIndex } from 'src/enum';
|
||||
|
||||
export const POSTGRES_VERSION_RANGE = '>=14.0.0';
|
||||
export const VECTORCHORD_VERSION_RANGE = '>=0.3 <0.5';
|
||||
export const VECTORCHORD_VERSION_RANGE = '>=0.3 <0.6';
|
||||
export const VECTORS_VERSION_RANGE = '>=0.2 <0.4';
|
||||
export const VECTOR_VERSION_RANGE = '>=0.5 <1';
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import { AssetResponseDto } from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
AssetBulkDeleteDto,
|
||||
AssetBulkUpdateDto,
|
||||
AssetCopyDto,
|
||||
AssetJobsDto,
|
||||
AssetMetadataResponseDto,
|
||||
AssetMetadataRouteParams,
|
||||
@@ -16,6 +17,7 @@ import {
|
||||
UpdateAssetDto,
|
||||
} from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { Permission, RouteKey } from 'src/enum';
|
||||
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
@@ -89,12 +91,25 @@ export class AssetController {
|
||||
return this.service.update(auth, id, dto);
|
||||
}
|
||||
|
||||
@Put('copy')
|
||||
@Authenticated({ permission: Permission.AssetCopy })
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
copyAsset(@Auth() auth: AuthDto, @Body() dto: AssetCopyDto): Promise<void> {
|
||||
return this.service.copy(auth, dto);
|
||||
}
|
||||
|
||||
@Get(':id/metadata')
|
||||
@Authenticated({ permission: Permission.AssetRead })
|
||||
getAssetMetadata(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<AssetMetadataResponseDto[]> {
|
||||
return this.service.getMetadata(auth, id);
|
||||
}
|
||||
|
||||
@Get(':id/ocr')
|
||||
@Authenticated({ permission: Permission.AssetRead })
|
||||
getAssetOcr(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<AssetOcrResponseDto[]> {
|
||||
return this.service.getOcr(auth, id);
|
||||
}
|
||||
|
||||
@Put(':id/metadata')
|
||||
@Authenticated({ permission: Permission.AssetUpdate })
|
||||
updateAssetMetadata(
|
||||
|
||||
@@ -183,7 +183,7 @@ describe(AuthController.name, () => {
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer())
|
||||
.post('/auth/change-password')
|
||||
.send({ password: 'password', newPassword: 'Password1234' });
|
||||
.send({ password: 'password', newPassword: 'Password1234', invalidateSessions: false });
|
||||
expect(ctx.authenticate).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Body, Controller, Delete, Get, HttpCode, HttpStatus, Param, Post, Put,
|
||||
import { ApiTags } from '@nestjs/swagger';
|
||||
import { AssetStatsDto, AssetStatsResponseDto } from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { SessionResponseDto } from 'src/dtos/session.dto';
|
||||
import { UserPreferencesResponseDto, UserPreferencesUpdateDto } from 'src/dtos/user-preferences.dto';
|
||||
import {
|
||||
UserAdminCreateDto,
|
||||
@@ -58,6 +59,12 @@ export class UserAdminController {
|
||||
return this.service.delete(auth, id, dto);
|
||||
}
|
||||
|
||||
@Get(':id/sessions')
|
||||
@Authenticated({ permission: Permission.AdminSessionRead, admin: true })
|
||||
getUserSessionsAdmin(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<SessionResponseDto[]> {
|
||||
return this.service.getSessions(auth, id);
|
||||
}
|
||||
|
||||
@Get(':id/statistics')
|
||||
@Authenticated({ permission: Permission.AdminUserRead, admin: true })
|
||||
getUserStatisticsAdmin(
|
||||
|
||||
@@ -237,6 +237,7 @@ export type Session = {
|
||||
expiresAt: Date | null;
|
||||
deviceOS: string;
|
||||
deviceType: string;
|
||||
appVersion: string | null;
|
||||
pinExpiresAt: Date | null;
|
||||
isPendingSyncReset: boolean;
|
||||
};
|
||||
@@ -306,7 +307,7 @@ export const columns = {
|
||||
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type'],
|
||||
authUser: ['user.id', 'user.name', 'user.email', 'user.isAdmin', 'user.quotaUsageInBytes', 'user.quotaSizeInBytes'],
|
||||
authApiKey: ['api_key.id', 'api_key.permissions'],
|
||||
authSession: ['session.id', 'session.updatedAt', 'session.pinExpiresAt'],
|
||||
authSession: ['session.id', 'session.updatedAt', 'session.pinExpiresAt', 'session.appVersion'],
|
||||
authSharedLink: [
|
||||
'shared_link.id',
|
||||
'shared_link.userId',
|
||||
|
||||
@@ -87,7 +87,7 @@ export function Chunked(
|
||||
|
||||
return Promise.all(
|
||||
chunks(argument, chunkSize).map(async (chunk) => {
|
||||
await Reflect.apply(originalMethod, this, [
|
||||
return await Reflect.apply(originalMethod, this, [
|
||||
...arguments_.slice(0, parameterIndex),
|
||||
chunk,
|
||||
...arguments_.slice(parameterIndex + 1),
|
||||
@@ -103,7 +103,7 @@ export function ChunkedArray(options?: { paramIndex?: number }): MethodDecorator
|
||||
}
|
||||
|
||||
export function ChunkedSet(options?: { paramIndex?: number }): MethodDecorator {
|
||||
return Chunked({ ...options, mergeFn: setUnion });
|
||||
return Chunked({ ...options, mergeFn: (args: Set<any>[]) => setUnion(...args) });
|
||||
}
|
||||
|
||||
const UUID = '00000000-0000-4000-a000-000000000000';
|
||||
|
||||
@@ -128,6 +128,14 @@ export class AlbumUserResponseDto {
|
||||
role!: AlbumUserRole;
|
||||
}
|
||||
|
||||
export class ContributorCountResponseDto {
|
||||
@ApiProperty()
|
||||
userId!: string;
|
||||
|
||||
@ApiProperty({ type: 'integer' })
|
||||
assetCount!: number;
|
||||
}
|
||||
|
||||
export class AlbumResponseDto {
|
||||
id!: string;
|
||||
ownerId!: string;
|
||||
@@ -149,6 +157,11 @@ export class AlbumResponseDto {
|
||||
isActivityEnabled!: boolean;
|
||||
@ValidateEnum({ enum: AssetOrder, name: 'AssetOrder', optional: true })
|
||||
order?: AssetOrder;
|
||||
|
||||
// Optional per-user contribution counts for shared albums
|
||||
@Type(() => ContributorCountResponseDto)
|
||||
@ApiProperty({ type: [ContributorCountResponseDto], required: false })
|
||||
contributorCounts?: ContributorCountResponseDto[];
|
||||
}
|
||||
|
||||
export type MapAlbumDto = {
|
||||
|
||||
@@ -211,7 +211,7 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
|
||||
fileModifiedAt: entity.fileModifiedAt,
|
||||
localDateTime: entity.localDateTime,
|
||||
updatedAt: entity.updatedAt,
|
||||
isFavorite: options.auth?.user.id === entity.ownerId ? entity.isFavorite : false,
|
||||
isFavorite: options.auth?.user.id === entity.ownerId && entity.isFavorite,
|
||||
isArchived: entity.visibility === AssetVisibility.Archive,
|
||||
isTrashed: !!entity.deletedAt,
|
||||
visibility: entity.visibility,
|
||||
|
||||
@@ -186,6 +186,29 @@ export class AssetMetadataResponseDto {
|
||||
updatedAt!: Date;
|
||||
}
|
||||
|
||||
export class AssetCopyDto {
|
||||
@ValidateUUID()
|
||||
sourceId!: string;
|
||||
|
||||
@ValidateUUID()
|
||||
targetId!: string;
|
||||
|
||||
@ValidateBoolean({ optional: true, default: true })
|
||||
sharedLinks?: boolean;
|
||||
|
||||
@ValidateBoolean({ optional: true, default: true })
|
||||
albums?: boolean;
|
||||
|
||||
@ValidateBoolean({ optional: true, default: true })
|
||||
sidecar?: boolean;
|
||||
|
||||
@ValidateBoolean({ optional: true, default: true })
|
||||
stack?: boolean;
|
||||
|
||||
@ValidateBoolean({ optional: true, default: true })
|
||||
favorite?: boolean;
|
||||
}
|
||||
|
||||
export const mapStats = (stats: AssetStats): AssetStatsResponseDto => {
|
||||
return {
|
||||
images: stats[AssetType.Image],
|
||||
|
||||
@@ -4,7 +4,7 @@ import { IsEmail, IsNotEmpty, IsString, MinLength } from 'class-validator';
|
||||
import { AuthApiKey, AuthSession, AuthSharedLink, AuthUser, UserAdmin } from 'src/database';
|
||||
import { ImmichCookie, UserMetadataKey } from 'src/enum';
|
||||
import { UserMetadataItem } from 'src/types';
|
||||
import { Optional, PinCode, toEmail } from 'src/validation';
|
||||
import { Optional, PinCode, toEmail, ValidateBoolean } from 'src/validation';
|
||||
|
||||
export type CookieResponse = {
|
||||
isSecure: boolean;
|
||||
@@ -83,6 +83,9 @@ export class ChangePasswordDto {
|
||||
@MinLength(8)
|
||||
@ApiProperty({ example: 'password' })
|
||||
newPassword!: string;
|
||||
|
||||
@ValidateBoolean({ optional: true, default: false })
|
||||
invalidateSessions?: boolean;
|
||||
}
|
||||
|
||||
export class PinCodeSetupDto {
|
||||
|
||||
@@ -93,4 +93,7 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
|
||||
|
||||
@ApiProperty({ type: JobStatusDto })
|
||||
[QueueName.BackupDatabase]!: JobStatusDto;
|
||||
|
||||
@ApiProperty({ type: JobStatusDto })
|
||||
[QueueName.Ocr]!: JobStatusDto;
|
||||
}
|
||||
|
||||
@@ -46,3 +46,25 @@ export class FacialRecognitionConfig extends ModelConfig {
|
||||
@ApiProperty({ type: 'integer' })
|
||||
minFaces!: number;
|
||||
}
|
||||
|
||||
export class OcrConfig extends ModelConfig {
|
||||
@IsNumber()
|
||||
@Min(1)
|
||||
@Type(() => Number)
|
||||
@ApiProperty({ type: 'integer' })
|
||||
maxResolution!: number;
|
||||
|
||||
@IsNumber()
|
||||
@Min(0.1)
|
||||
@Max(1)
|
||||
@Type(() => Number)
|
||||
@ApiProperty({ type: 'number', format: 'double' })
|
||||
minDetectionScore!: number;
|
||||
|
||||
@IsNumber()
|
||||
@Min(0.1)
|
||||
@Max(1)
|
||||
@Type(() => Number)
|
||||
@ApiProperty({ type: 'number', format: 'double' })
|
||||
minRecognitionScore!: number;
|
||||
}
|
||||
|
||||
42
server/src/dtos/ocr.dto.ts
Normal file
42
server/src/dtos/ocr.dto.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
|
||||
export class AssetOcrResponseDto {
|
||||
@ApiProperty({ type: 'string', format: 'uuid' })
|
||||
id!: string;
|
||||
|
||||
@ApiProperty({ type: 'string', format: 'uuid' })
|
||||
assetId!: string;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Normalized x coordinate of box corner 1 (0-1)' })
|
||||
x1!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Normalized y coordinate of box corner 1 (0-1)' })
|
||||
y1!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Normalized x coordinate of box corner 2 (0-1)' })
|
||||
x2!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Normalized y coordinate of box corner 2 (0-1)' })
|
||||
y2!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Normalized x coordinate of box corner 3 (0-1)' })
|
||||
x3!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Normalized y coordinate of box corner 3 (0-1)' })
|
||||
y3!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Normalized x coordinate of box corner 4 (0-1)' })
|
||||
x4!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Normalized y coordinate of box corner 4 (0-1)' })
|
||||
y4!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Confidence score for text detection box' })
|
||||
boxScore!: number;
|
||||
|
||||
@ApiProperty({ type: 'number', format: 'double', description: 'Confidence score for text recognition' })
|
||||
textScore!: number;
|
||||
|
||||
@ApiProperty({ type: 'string', description: 'Recognized text' })
|
||||
text!: string;
|
||||
}
|
||||
@@ -101,6 +101,11 @@ class BaseSearchDto {
|
||||
@Max(5)
|
||||
@Min(-1)
|
||||
rating?: number;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
@Optional()
|
||||
ocr?: string;
|
||||
}
|
||||
|
||||
class BaseSearchWithResultsDto extends BaseSearchDto {
|
||||
@@ -249,6 +254,7 @@ export enum SearchSuggestionType {
|
||||
CITY = 'city',
|
||||
CAMERA_MAKE = 'camera-make',
|
||||
CAMERA_MODEL = 'camera-model',
|
||||
CAMERA_LENS_MODEL = 'camera-lens-model',
|
||||
}
|
||||
|
||||
export class SearchSuggestionRequestDto {
|
||||
@@ -271,6 +277,10 @@ export class SearchSuggestionRequestDto {
|
||||
@Optional()
|
||||
model?: string;
|
||||
|
||||
@IsString()
|
||||
@Optional()
|
||||
lensModel?: string;
|
||||
|
||||
@ValidateBoolean({ optional: true })
|
||||
@PropertyLifecycle({ addedAt: 'v111.0.0' })
|
||||
includeNull?: boolean;
|
||||
|
||||
@@ -171,6 +171,7 @@ export class ServerFeaturesDto {
|
||||
sidecar!: boolean;
|
||||
search!: boolean;
|
||||
email!: boolean;
|
||||
ocr!: boolean;
|
||||
}
|
||||
|
||||
export interface ReleaseNotification {
|
||||
|
||||
@@ -34,6 +34,7 @@ export class SessionResponseDto {
|
||||
current!: boolean;
|
||||
deviceType!: string;
|
||||
deviceOS!: string;
|
||||
appVersion!: string | null;
|
||||
isPendingSyncReset!: boolean;
|
||||
}
|
||||
|
||||
@@ -47,6 +48,7 @@ export const mapSession = (entity: Session, currentId?: string): SessionResponse
|
||||
updatedAt: entity.updatedAt.toISOString(),
|
||||
expiresAt: entity.expiresAt?.toISOString(),
|
||||
current: currentId === entity.id,
|
||||
appVersion: entity.appVersion,
|
||||
deviceOS: entity.deviceOS,
|
||||
deviceType: entity.deviceType,
|
||||
isPendingSyncReset: entity.isPendingSyncReset,
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
ValidateNested,
|
||||
} from 'class-validator';
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig } from 'src/dtos/model-config.dto';
|
||||
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig, OcrConfig } from 'src/dtos/model-config.dto';
|
||||
import {
|
||||
AudioCodec,
|
||||
CQMode,
|
||||
@@ -201,6 +201,12 @@ class SystemConfigJobDto implements Record<ConcurrentQueueName, JobSettingsDto>
|
||||
@Type(() => JobSettingsDto)
|
||||
[QueueName.FaceDetection]!: JobSettingsDto;
|
||||
|
||||
@ApiProperty({ type: JobSettingsDto })
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
@Type(() => JobSettingsDto)
|
||||
[QueueName.Ocr]!: JobSettingsDto;
|
||||
|
||||
@ApiProperty({ type: JobSettingsDto })
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
@@ -296,6 +302,11 @@ class SystemConfigMachineLearningDto {
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
facialRecognition!: FacialRecognitionConfig;
|
||||
|
||||
@Type(() => OcrConfig)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
ocr!: OcrConfig;
|
||||
}
|
||||
|
||||
enum MapTheme {
|
||||
@@ -463,6 +474,9 @@ class SystemConfigSmtpTransportDto {
|
||||
@Max(65_535)
|
||||
port!: number;
|
||||
|
||||
@ValidateBoolean()
|
||||
secure!: boolean;
|
||||
|
||||
@IsString()
|
||||
username!: string;
|
||||
|
||||
|
||||
@@ -173,6 +173,7 @@ export function mapUserAdmin(entity: UserAdmin): UserAdminResponseDto {
|
||||
const license = metadata.find(
|
||||
(item): item is UserMetadataItem<UserMetadataKey.License> => item.key === UserMetadataKey.License,
|
||||
)?.value;
|
||||
|
||||
return {
|
||||
...mapUser(entity),
|
||||
storageLabel: entity.storageLabel,
|
||||
|
||||
@@ -96,6 +96,7 @@ export enum Permission {
|
||||
AssetDownload = 'asset.download',
|
||||
AssetUpload = 'asset.upload',
|
||||
AssetReplace = 'asset.replace',
|
||||
AssetCopy = 'asset.copy',
|
||||
|
||||
AlbumCreate = 'album.create',
|
||||
AlbumRead = 'album.read',
|
||||
@@ -237,6 +238,8 @@ export enum Permission {
|
||||
AdminUserUpdate = 'adminUser.update',
|
||||
AdminUserDelete = 'adminUser.delete',
|
||||
|
||||
AdminSessionRead = 'adminSession.read',
|
||||
|
||||
AdminAuthUnlinkAll = 'adminAuth.unlinkAll',
|
||||
}
|
||||
|
||||
@@ -512,6 +515,7 @@ export enum QueueName {
|
||||
Library = 'library',
|
||||
Notification = 'notifications',
|
||||
BackupDatabase = 'backupDatabase',
|
||||
Ocr = 'ocr',
|
||||
}
|
||||
|
||||
export enum JobName {
|
||||
@@ -584,6 +588,10 @@ export enum JobName {
|
||||
TagCleanup = 'TagCleanup',
|
||||
|
||||
VersionCheck = 'VersionCheck',
|
||||
|
||||
// OCR
|
||||
OcrQueueAll = 'OcrQueueAll',
|
||||
Ocr = 'Ocr',
|
||||
}
|
||||
|
||||
export enum JobCommand {
|
||||
@@ -723,6 +731,8 @@ export enum NotificationType {
|
||||
JobFailed = 'JobFailed',
|
||||
BackupFailed = 'BackupFailed',
|
||||
SystemMessage = 'SystemMessage',
|
||||
AlbumInvite = 'AlbumInvite',
|
||||
AlbumUpdate = 'AlbumUpdate',
|
||||
Custom = 'Custom',
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { ApiCustomExtension, ImmichQuery, MetadataKey, Permission } from 'src/enum';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { AuthService, LoginDetails } from 'src/services/auth.service';
|
||||
import { UAParser } from 'ua-parser-js';
|
||||
import { getUserAgentDetails } from 'src/utils/request';
|
||||
|
||||
type AdminRoute = { admin?: true };
|
||||
type SharedLinkRoute = { sharedLink?: true };
|
||||
@@ -56,13 +56,14 @@ export const FileResponse = () =>
|
||||
|
||||
export const GetLoginDetails = createParamDecorator((data, context: ExecutionContext): LoginDetails => {
|
||||
const request = context.switchToHttp().getRequest<Request>();
|
||||
const userAgent = UAParser(request.headers['user-agent']);
|
||||
const { deviceType, deviceOS, appVersion } = getUserAgentDetails(request.headers);
|
||||
|
||||
return {
|
||||
clientIp: request.ip ?? '',
|
||||
isSecure: request.secure,
|
||||
deviceType: userAgent.browser.name || userAgent.device.type || (request.headers.devicemodel as string) || '',
|
||||
deviceOS: userAgent.os.name || (request.headers.devicetype as string) || '',
|
||||
deviceType,
|
||||
deviceOS,
|
||||
appVersion,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -86,7 +87,6 @@ export class AuthGuard implements CanActivate {
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const targets = [context.getHandler()];
|
||||
|
||||
const options = this.reflector.getAllAndOverride<AuthenticatedOptions | undefined>(MetadataKey.AuthRoute, targets);
|
||||
if (!options) {
|
||||
return true;
|
||||
|
||||
@@ -71,6 +71,11 @@ where
|
||||
and "shared_link"."albumId" in ($2)
|
||||
|
||||
-- AccessRepository.asset.checkAlbumAccess
|
||||
with
|
||||
"target" as (
|
||||
select
|
||||
array[$1]::uuid[] as "ids"
|
||||
)
|
||||
select
|
||||
"asset"."id",
|
||||
"asset"."livePhotoVideoId"
|
||||
@@ -82,8 +87,12 @@ from
|
||||
left join "album_user" as "albumUsers" on "albumUsers"."albumsId" = "album"."id"
|
||||
left join "user" on "user"."id" = "albumUsers"."usersId"
|
||||
and "user"."deletedAt" is null
|
||||
cross join "target"
|
||||
where
|
||||
array["asset"."id", "asset"."livePhotoVideoId"] && array[$1]::uuid[]
|
||||
(
|
||||
"asset"."id" = any (target.ids)
|
||||
or "asset"."livePhotoVideoId" = any (target.ids)
|
||||
)
|
||||
and (
|
||||
"album"."ownerId" = $2
|
||||
or "user"."id" = $3
|
||||
|
||||
@@ -407,3 +407,30 @@ from
|
||||
where
|
||||
"album_asset"."albumsId" = $1
|
||||
and "album_asset"."assetsId" in ($2)
|
||||
|
||||
-- AlbumRepository.getContributorCounts
|
||||
select
|
||||
"asset"."ownerId" as "userId",
|
||||
count(*) as "assetCount"
|
||||
from
|
||||
"album_asset"
|
||||
inner join "asset" on "asset"."id" = "assetsId"
|
||||
where
|
||||
"asset"."deletedAt" is null
|
||||
and "album_asset"."albumsId" = $1
|
||||
group by
|
||||
"asset"."ownerId"
|
||||
order by
|
||||
"assetCount" desc
|
||||
|
||||
-- AlbumRepository.copyAlbums
|
||||
insert into
|
||||
"album_asset"
|
||||
select
|
||||
"album_asset"."albumsId",
|
||||
$1 as "assetsId"
|
||||
from
|
||||
"album_asset"
|
||||
where
|
||||
"album_asset"."assetsId" = $2
|
||||
on conflict do nothing
|
||||
|
||||
@@ -330,6 +330,23 @@ from
|
||||
where
|
||||
"asset"."id" = $2
|
||||
|
||||
-- AssetJobRepository.getForOcr
|
||||
select
|
||||
"asset"."visibility",
|
||||
(
|
||||
select
|
||||
"asset_file"."path"
|
||||
from
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
) as "previewFile"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $2
|
||||
|
||||
-- AssetJobRepository.getForSyncAssets
|
||||
select
|
||||
"asset"."id",
|
||||
@@ -551,6 +568,17 @@ where
|
||||
order by
|
||||
"asset"."fileCreatedAt" desc
|
||||
|
||||
-- AssetJobRepository.streamForOcrJob
|
||||
select
|
||||
"asset"."id"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id"
|
||||
where
|
||||
"asset_job_status"."ocrAt" is null
|
||||
and "asset"."deletedAt" is null
|
||||
and "asset"."visibility" != $1
|
||||
|
||||
-- AssetJobRepository.streamForMigrationJob
|
||||
select
|
||||
"id"
|
||||
|
||||
@@ -296,7 +296,8 @@ with
|
||||
"asset"."duration",
|
||||
"asset"."id",
|
||||
"asset"."visibility",
|
||||
"asset"."isFavorite",
|
||||
asset."isFavorite"
|
||||
and asset."ownerId" = $1 as "isFavorite",
|
||||
asset.type = 'IMAGE' as "isImage",
|
||||
asset."deletedAt" is not null as "isTrashed",
|
||||
"asset"."livePhotoVideoId",
|
||||
@@ -341,14 +342,14 @@ with
|
||||
where
|
||||
"stacked"."stackId" = "asset"."stackId"
|
||||
and "stacked"."deletedAt" is null
|
||||
and "stacked"."visibility" = $1
|
||||
and "stacked"."visibility" = $2
|
||||
group by
|
||||
"stacked"."stackId"
|
||||
) as "stacked_assets" on true
|
||||
where
|
||||
"asset"."deletedAt" is null
|
||||
and "asset"."visibility" in ('archive', 'timeline')
|
||||
and date_trunc('MONTH', "localDateTime" AT TIME ZONE 'UTC') AT TIME ZONE 'UTC' = $2
|
||||
and date_trunc('MONTH', "localDateTime" AT TIME ZONE 'UTC') AT TIME ZONE 'UTC' = $3
|
||||
and not exists (
|
||||
select
|
||||
from
|
||||
|
||||
68
server/src/queries/ocr.repository.sql
Normal file
68
server/src/queries/ocr.repository.sql
Normal file
@@ -0,0 +1,68 @@
|
||||
-- NOTE: This file is auto generated by ./sql-generator
|
||||
|
||||
-- OcrRepository.getById
|
||||
select
|
||||
"asset_ocr".*
|
||||
from
|
||||
"asset_ocr"
|
||||
where
|
||||
"asset_ocr"."id" = $1
|
||||
|
||||
-- OcrRepository.getByAssetId
|
||||
select
|
||||
"asset_ocr".*
|
||||
from
|
||||
"asset_ocr"
|
||||
where
|
||||
"asset_ocr"."assetId" = $1
|
||||
|
||||
-- OcrRepository.upsert
|
||||
with
|
||||
"deleted_ocr" as (
|
||||
delete from "asset_ocr"
|
||||
where
|
||||
"assetId" = $1
|
||||
),
|
||||
"inserted_ocr" as (
|
||||
insert into
|
||||
"asset_ocr" (
|
||||
"assetId",
|
||||
"x1",
|
||||
"y1",
|
||||
"x2",
|
||||
"y2",
|
||||
"x3",
|
||||
"y3",
|
||||
"x4",
|
||||
"y4",
|
||||
"text",
|
||||
"boxScore",
|
||||
"textScore"
|
||||
)
|
||||
values
|
||||
(
|
||||
$2,
|
||||
$3,
|
||||
$4,
|
||||
$5,
|
||||
$6,
|
||||
$7,
|
||||
$8,
|
||||
$9,
|
||||
$10,
|
||||
$11,
|
||||
$12,
|
||||
$13
|
||||
)
|
||||
),
|
||||
"inserted_search" as (
|
||||
insert into
|
||||
"ocr_search" ("assetId", "text")
|
||||
values
|
||||
($14, $15)
|
||||
on conflict ("assetId") do update
|
||||
set
|
||||
"text" = "excluded"."text"
|
||||
)
|
||||
select
|
||||
1 as "dummy"
|
||||
@@ -290,3 +290,15 @@ where
|
||||
and "visibility" = $2
|
||||
and "deletedAt" is null
|
||||
and "model" is not null
|
||||
|
||||
-- SearchRepository.getCameraLensModels
|
||||
select distinct
|
||||
on ("lensModel") "lensModel"
|
||||
from
|
||||
"asset_exif"
|
||||
inner join "asset" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"ownerId" = any ($1::uuid[])
|
||||
and "visibility" = $2
|
||||
and "deletedAt" is null
|
||||
and "lensModel" is not null
|
||||
|
||||
@@ -23,6 +23,7 @@ select
|
||||
"session"."id",
|
||||
"session"."updatedAt",
|
||||
"session"."pinExpiresAt",
|
||||
"session"."appVersion",
|
||||
(
|
||||
select
|
||||
to_json(obj)
|
||||
@@ -73,6 +74,12 @@ delete from "session"
|
||||
where
|
||||
"id" = $1::uuid
|
||||
|
||||
-- SessionRepository.invalidate
|
||||
delete from "session"
|
||||
where
|
||||
"userId" = $1
|
||||
and "id" != $2
|
||||
|
||||
-- SessionRepository.lockAll
|
||||
update "session"
|
||||
set
|
||||
|
||||
13
server/src/queries/shared.link.asset.repository.sql
Normal file
13
server/src/queries/shared.link.asset.repository.sql
Normal file
@@ -0,0 +1,13 @@
|
||||
-- NOTE: This file is auto generated by ./sql-generator
|
||||
|
||||
-- SharedLinkAssetRepository.copySharedLinks
|
||||
insert into
|
||||
"shared_link_asset"
|
||||
select
|
||||
$1 as "assetsId",
|
||||
"shared_link_asset"."sharedLinksId"
|
||||
from
|
||||
"shared_link_asset"
|
||||
where
|
||||
"shared_link_asset"."assetsId" = $2
|
||||
on conflict do nothing
|
||||
@@ -153,3 +153,10 @@ from
|
||||
left join "stack" on "stack"."id" = "asset"."stackId"
|
||||
where
|
||||
"asset"."id" = $1
|
||||
|
||||
-- StackRepository.merge
|
||||
update "asset"
|
||||
set
|
||||
"stackId" = $1
|
||||
where
|
||||
"asset"."stackId" = $2
|
||||
|
||||
@@ -363,6 +363,14 @@ group by
|
||||
order by
|
||||
"user"."createdAt" asc
|
||||
|
||||
-- UserRepository.getCount
|
||||
select
|
||||
count(*) as "count"
|
||||
from
|
||||
"user"
|
||||
where
|
||||
"user"."deletedAt" is null
|
||||
|
||||
-- UserRepository.updateUsage
|
||||
update "user"
|
||||
set
|
||||
|
||||
@@ -136,6 +136,7 @@ class AssetAccess {
|
||||
}
|
||||
|
||||
return this.db
|
||||
.with('target', (qb) => qb.selectNoFrom(sql`array[${sql.join([...assetIds])}]::uuid[]`.as('ids')))
|
||||
.selectFrom('album')
|
||||
.innerJoin('album_asset as albumAssets', 'album.id', 'albumAssets.albumsId')
|
||||
.innerJoin('asset', (join) =>
|
||||
@@ -143,11 +144,13 @@ class AssetAccess {
|
||||
)
|
||||
.leftJoin('album_user as albumUsers', 'albumUsers.albumsId', 'album.id')
|
||||
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.usersId').on('user.deletedAt', 'is', null))
|
||||
.crossJoin('target')
|
||||
.select(['asset.id', 'asset.livePhotoVideoId'])
|
||||
.where(
|
||||
sql`array["asset"."id", "asset"."livePhotoVideoId"]`,
|
||||
'&&',
|
||||
sql`array[${sql.join([...assetIds])}]::uuid[] `,
|
||||
.where((eb) =>
|
||||
eb.or([
|
||||
eb('asset.id', '=', sql<string>`any(target.ids)`),
|
||||
eb('asset.livePhotoVideoId', '=', sql<string>`any(target.ids)`),
|
||||
]),
|
||||
)
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('user.id', '=', userId)]))
|
||||
.where('album.deletedAt', 'is', null)
|
||||
|
||||
@@ -379,4 +379,36 @@ export class AlbumRepository {
|
||||
)
|
||||
.whereRef('album_asset.albumsId', '=', 'album.id');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get per-user asset contribution counts for a single album.
|
||||
* Excludes deleted assets, orders by count desc.
|
||||
*/
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getContributorCounts(id: string) {
|
||||
return this.db
|
||||
.selectFrom('album_asset')
|
||||
.innerJoin('asset', 'asset.id', 'assetsId')
|
||||
.where('asset.deletedAt', 'is', sql.lit(null))
|
||||
.where('album_asset.albumsId', '=', id)
|
||||
.select('asset.ownerId as userId')
|
||||
.select((eb) => eb.fn.countAll<number>().as('assetCount'))
|
||||
.groupBy('asset.ownerId')
|
||||
.orderBy('assetCount', 'desc')
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ sourceAssetId: DummyValue.UUID, targetAssetId: DummyValue.UUID }] })
|
||||
async copyAlbums({ sourceAssetId, targetAssetId }: { sourceAssetId: string; targetAssetId: string }) {
|
||||
return this.db
|
||||
.insertInto('album_asset')
|
||||
.expression((eb) =>
|
||||
eb
|
||||
.selectFrom('album_asset')
|
||||
.select((eb) => ['album_asset.albumsId', eb.val(targetAssetId).as('assetsId')])
|
||||
.where('album_asset.assetsId', '=', sourceAssetId),
|
||||
)
|
||||
.onConflict((oc) => oc.doNothing())
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
withExifInner,
|
||||
withFaces,
|
||||
withFacesAndPeople,
|
||||
withFilePath,
|
||||
withFiles,
|
||||
} from 'src/utils/database';
|
||||
|
||||
@@ -195,6 +196,15 @@ export class AssetJobRepository {
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getForOcr(id: string) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select((eb) => ['asset.visibility', withFilePath(eb, AssetFileType.Preview).as('previewFile')])
|
||||
.where('asset.id', '=', id)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||
getForSyncAssets(ids: string[]) {
|
||||
return this.db
|
||||
@@ -366,6 +376,21 @@ export class AssetJobRepository {
|
||||
.stream();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [], stream: true })
|
||||
streamForOcrJob(force?: boolean) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id'])
|
||||
.$if(!force, (qb) =>
|
||||
qb
|
||||
.innerJoin('asset_job_status', 'asset_job_status.assetId', 'asset.id')
|
||||
.where('asset_job_status.ocrAt', 'is', null),
|
||||
)
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.stream();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.DATE], stream: true })
|
||||
streamForMigrationJob() {
|
||||
return this.db.selectFrom('asset').select(['id']).where('asset.deletedAt', 'is', null).stream();
|
||||
|
||||
@@ -4,6 +4,7 @@ import { isEmpty, isUndefined, omitBy } from 'lodash';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { Stack } from 'src/database';
|
||||
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetFileType, AssetMetadataKey, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
|
||||
@@ -204,6 +205,7 @@ export class AssetRepository {
|
||||
metadataExtractedAt: eb.ref('excluded.metadataExtractedAt'),
|
||||
previewAt: eb.ref('excluded.previewAt'),
|
||||
thumbnailAt: eb.ref('excluded.thumbnailAt'),
|
||||
ocrAt: eb.ref('excluded.ocrAt'),
|
||||
},
|
||||
values[0],
|
||||
),
|
||||
@@ -589,9 +591,9 @@ export class AssetRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
params: [DummyValue.TIME_BUCKET, { withStacked: true }],
|
||||
params: [DummyValue.TIME_BUCKET, { withStacked: true }, { user: { id: DummyValue.UUID } }],
|
||||
})
|
||||
getTimeBucket(timeBucket: string, options: TimeBucketOptions) {
|
||||
getTimeBucket(timeBucket: string, options: TimeBucketOptions, auth: AuthDto) {
|
||||
const query = this.db
|
||||
.with('cte', (qb) =>
|
||||
qb
|
||||
@@ -601,7 +603,7 @@ export class AssetRepository {
|
||||
'asset.duration',
|
||||
'asset.id',
|
||||
'asset.visibility',
|
||||
'asset.isFavorite',
|
||||
sql`asset."isFavorite" and asset."ownerId" = ${auth.user.id}`.as('isFavorite'),
|
||||
sql`asset.type = 'IMAGE'`.as('isImage'),
|
||||
sql`asset."deletedAt" is not null`.as('isTrashed'),
|
||||
'asset.livePhotoVideoId',
|
||||
|
||||
@@ -231,7 +231,7 @@ export class DatabaseRepository {
|
||||
}
|
||||
|
||||
private async reindexVectors(indexName: VectorIndex, { lists }: { lists?: number } = {}): Promise<void> {
|
||||
this.logger.log(`Reindexing ${indexName}`);
|
||||
this.logger.log(`Reindexing ${indexName} (This may take a while, do not restart)`);
|
||||
const table = VECTOR_INDEX_TABLES[indexName];
|
||||
const vectorExtension = await getVectorExtension(this.db);
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ export type SendEmailOptions = {
|
||||
export type SmtpOptions = {
|
||||
host: string;
|
||||
port?: number;
|
||||
secure?: boolean;
|
||||
username?: string;
|
||||
password?: string;
|
||||
ignoreCert?: boolean;
|
||||
|
||||
@@ -1,27 +1,15 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ModuleRef, Reflector } from '@nestjs/core';
|
||||
import {
|
||||
OnGatewayConnection,
|
||||
OnGatewayDisconnect,
|
||||
OnGatewayInit,
|
||||
WebSocketGateway,
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { ClassConstructor } from 'class-transformer';
|
||||
import _ from 'lodash';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { Socket } from 'socket.io';
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { EventConfig } from 'src/decorators';
|
||||
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { NotificationDto } from 'src/dtos/notification.dto';
|
||||
import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.dto';
|
||||
import { SyncAssetExifV1, SyncAssetV1 } from 'src/dtos/sync.dto';
|
||||
import { ImmichWorker, MetadataKey, QueueName } from 'src/enum';
|
||||
import { ImmichWorker, JobStatus, MetadataKey, QueueName, UserAvatarColor, UserStatus } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { JobItem, JobSource } from 'src/types';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
type EmitHandlers = Partial<{ [T in EmitEvent]: Array<EventItem<T>> }>;
|
||||
|
||||
@@ -66,8 +54,19 @@ type EventMap = {
|
||||
AssetDeleteAll: [{ assetIds: string[]; userId: string }];
|
||||
AssetRestoreAll: [{ assetIds: string[]; userId: string }];
|
||||
|
||||
/** a worker receives a job and emits this event to run it */
|
||||
JobRun: [QueueName, JobItem];
|
||||
/** job pre-hook */
|
||||
JobStart: [QueueName, JobItem];
|
||||
JobFailed: [{ job: JobItem; error: Error | any }];
|
||||
/** job post-hook */
|
||||
JobComplete: [QueueName, JobItem];
|
||||
/** job finishes without error */
|
||||
JobSuccess: [JobSuccessEvent];
|
||||
/** job finishes with error */
|
||||
JobError: [JobErrorEvent];
|
||||
|
||||
// queue events
|
||||
QueueStart: [QueueStartEvent];
|
||||
|
||||
// session events
|
||||
SessionDelete: [{ sessionId: string }];
|
||||
@@ -82,38 +81,50 @@ type EventMap = {
|
||||
|
||||
// user events
|
||||
UserSignup: [{ notify: boolean; id: string; password?: string }];
|
||||
UserCreate: [UserEvent];
|
||||
/** user is soft deleted */
|
||||
UserTrash: [UserEvent];
|
||||
/** user is permanently deleted */
|
||||
UserDelete: [UserEvent];
|
||||
UserRestore: [UserEvent];
|
||||
|
||||
AuthChangePassword: [{ userId: string; currentSessionId?: string; invalidateSessions?: boolean }];
|
||||
|
||||
// websocket events
|
||||
WebsocketConnect: [{ userId: string }];
|
||||
};
|
||||
|
||||
export const serverEvents = ['ConfigUpdate'] as const;
|
||||
export type ServerEvents = (typeof serverEvents)[number];
|
||||
type JobSuccessEvent = { job: JobItem; response?: JobStatus };
|
||||
type JobErrorEvent = { job: JobItem; error: Error | any };
|
||||
|
||||
type QueueStartEvent = {
|
||||
name: QueueName;
|
||||
};
|
||||
|
||||
type UserEvent = {
|
||||
name: string;
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
deletedAt: Date | null;
|
||||
status: UserStatus;
|
||||
email: string;
|
||||
profileImagePath: string;
|
||||
isAdmin: boolean;
|
||||
shouldChangePassword: boolean;
|
||||
avatarColor: UserAvatarColor | null;
|
||||
oauthId: string;
|
||||
storageLabel: string | null;
|
||||
quotaSizeInBytes: number | null;
|
||||
quotaUsageInBytes: number;
|
||||
profileChangedAt: Date;
|
||||
};
|
||||
|
||||
export type EmitEvent = keyof EventMap;
|
||||
export type EmitHandler<T extends EmitEvent> = (...args: ArgsOf<T>) => Promise<void> | void;
|
||||
export type ArgOf<T extends EmitEvent> = EventMap[T][0];
|
||||
export type ArgsOf<T extends EmitEvent> = EventMap[T];
|
||||
|
||||
export interface ClientEventMap {
|
||||
on_upload_success: [AssetResponseDto];
|
||||
on_user_delete: [string];
|
||||
on_asset_delete: [string];
|
||||
on_asset_trash: [string[]];
|
||||
on_asset_update: [AssetResponseDto];
|
||||
on_asset_hidden: [string];
|
||||
on_asset_restore: [string[]];
|
||||
on_asset_stack_update: string[];
|
||||
on_person_thumbnail: [string];
|
||||
on_server_version: [ServerVersionResponseDto];
|
||||
on_config_update: [];
|
||||
on_new_release: [ReleaseNotification];
|
||||
on_notification: [NotificationDto];
|
||||
on_session_delete: [string];
|
||||
|
||||
AssetUploadReadyV1: [{ asset: SyncAssetV1; exif: SyncAssetExifV1 }];
|
||||
}
|
||||
|
||||
export type EventItem<T extends EmitEvent> = {
|
||||
event: T;
|
||||
handler: EmitHandler<T>;
|
||||
@@ -122,18 +133,9 @@ export type EventItem<T extends EmitEvent> = {
|
||||
|
||||
export type AuthFn = (client: Socket) => Promise<AuthDto>;
|
||||
|
||||
@WebSocketGateway({
|
||||
cors: true,
|
||||
path: '/api/socket.io',
|
||||
transports: ['websocket'],
|
||||
})
|
||||
@Injectable()
|
||||
export class EventRepository implements OnGatewayConnection, OnGatewayDisconnect, OnGatewayInit {
|
||||
export class EventRepository {
|
||||
private emitHandlers: EmitHandlers = {};
|
||||
private authFn?: AuthFn;
|
||||
|
||||
@WebSocketServer()
|
||||
private server?: Server;
|
||||
|
||||
constructor(
|
||||
private moduleRef: ModuleRef,
|
||||
@@ -194,38 +196,6 @@ export class EventRepository implements OnGatewayConnection, OnGatewayDisconnect
|
||||
}
|
||||
}
|
||||
|
||||
afterInit(server: Server) {
|
||||
this.logger.log('Initialized websocket server');
|
||||
|
||||
for (const event of serverEvents) {
|
||||
server.on(event, (...args: ArgsOf<any>) => {
|
||||
this.logger.debug(`Server event: ${event} (receive)`);
|
||||
handlePromiseError(this.onEvent({ name: event, args, server: true }), this.logger);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async handleConnection(client: Socket) {
|
||||
try {
|
||||
this.logger.log(`Websocket Connect: ${client.id}`);
|
||||
const auth = await this.authenticate(client);
|
||||
await client.join(auth.user.id);
|
||||
if (auth.session) {
|
||||
await client.join(auth.session.id);
|
||||
}
|
||||
await this.onEvent({ name: 'WebsocketConnect', args: [{ userId: auth.user.id }], server: false });
|
||||
} catch (error: Error | any) {
|
||||
this.logger.error(`Websocket connection error: ${error}`, error?.stack);
|
||||
client.emit('error', 'unauthorized');
|
||||
client.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
async handleDisconnect(client: Socket) {
|
||||
this.logger.log(`Websocket Disconnect: ${client.id}`);
|
||||
await client.leave(client.nsp.name);
|
||||
}
|
||||
|
||||
private addHandler<T extends EmitEvent>(item: Item<T>): void {
|
||||
const event = item.event;
|
||||
|
||||
@@ -240,7 +210,7 @@ export class EventRepository implements OnGatewayConnection, OnGatewayDisconnect
|
||||
return this.onEvent({ name: event, args, server: false });
|
||||
}
|
||||
|
||||
private async onEvent<T extends EmitEvent>(event: { name: T; args: ArgsOf<T>; server: boolean }): Promise<void> {
|
||||
async onEvent<T extends EmitEvent>(event: { name: T; args: ArgsOf<T>; server: boolean }): Promise<void> {
|
||||
const handlers = this.emitHandlers[event.name] || [];
|
||||
for (const { handler, server } of handlers) {
|
||||
// exclude handlers that ignore server events
|
||||
@@ -251,29 +221,4 @@ export class EventRepository implements OnGatewayConnection, OnGatewayDisconnect
|
||||
await handler(...event.args);
|
||||
}
|
||||
}
|
||||
|
||||
clientSend<T extends keyof ClientEventMap>(event: T, room: string, ...data: ClientEventMap[T]) {
|
||||
this.server?.to(room).emit(event, ...data);
|
||||
}
|
||||
|
||||
clientBroadcast<T extends keyof ClientEventMap>(event: T, ...data: ClientEventMap[T]) {
|
||||
this.server?.emit(event, ...data);
|
||||
}
|
||||
|
||||
serverSend<T extends ServerEvents>(event: T, ...args: ArgsOf<T>): void {
|
||||
this.logger.debug(`Server event: ${event} (send)`);
|
||||
this.server?.serverSideEmit(event, ...args);
|
||||
}
|
||||
|
||||
setAuthFn(fn: (client: Socket) => Promise<AuthDto>) {
|
||||
this.authFn = fn;
|
||||
}
|
||||
|
||||
private async authenticate(client: Socket) {
|
||||
if (!this.authFn) {
|
||||
throw new Error('Auth function not set');
|
||||
}
|
||||
|
||||
return this.authFn(client);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,12 +25,14 @@ import { MetadataRepository } from 'src/repositories/metadata.repository';
|
||||
import { MoveRepository } from 'src/repositories/move.repository';
|
||||
import { NotificationRepository } from 'src/repositories/notification.repository';
|
||||
import { OAuthRepository } from 'src/repositories/oauth.repository';
|
||||
import { OcrRepository } from 'src/repositories/ocr.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { SearchRepository } from 'src/repositories/search.repository';
|
||||
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
|
||||
import { SessionRepository } from 'src/repositories/session.repository';
|
||||
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
|
||||
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
|
||||
import { StackRepository } from 'src/repositories/stack.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
@@ -43,6 +45,7 @@ import { TrashRepository } from 'src/repositories/trash.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
|
||||
import { ViewRepository } from 'src/repositories/view-repository';
|
||||
import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
|
||||
export const repositories = [
|
||||
AccessRepository,
|
||||
@@ -72,6 +75,7 @@ export const repositories = [
|
||||
MoveRepository,
|
||||
NotificationRepository,
|
||||
OAuthRepository,
|
||||
OcrRepository,
|
||||
PartnerRepository,
|
||||
PersonRepository,
|
||||
ProcessRepository,
|
||||
@@ -79,6 +83,7 @@ export const repositories = [
|
||||
SessionRepository,
|
||||
ServerInfoRepository,
|
||||
SharedLinkRepository,
|
||||
SharedLinkAssetRepository,
|
||||
StackRepository,
|
||||
StorageRepository,
|
||||
SyncRepository,
|
||||
@@ -90,4 +95,5 @@ export const repositories = [
|
||||
UserRepository,
|
||||
ViewRepository,
|
||||
VersionHistoryRepository,
|
||||
WebsocketRepository,
|
||||
];
|
||||
|
||||
@@ -89,7 +89,7 @@ export class JobRepository {
|
||||
this.logger.debug(`Starting worker for queue: ${queueName}`);
|
||||
this.workers[queueName] = new Worker(
|
||||
queueName,
|
||||
(job) => this.eventRepository.emit('JobStart', queueName, job as JobItem),
|
||||
(job) => this.eventRepository.emit('JobRun', queueName, job as JobItem),
|
||||
{ ...bull.config, concurrency: 1 },
|
||||
);
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ export interface BoundingBox {
|
||||
export enum ModelTask {
|
||||
FACIAL_RECOGNITION = 'facial-recognition',
|
||||
SEARCH = 'clip',
|
||||
OCR = 'ocr',
|
||||
}
|
||||
|
||||
export enum ModelType {
|
||||
@@ -23,6 +24,7 @@ export enum ModelType {
|
||||
RECOGNITION = 'recognition',
|
||||
TEXTUAL = 'textual',
|
||||
VISUAL = 'visual',
|
||||
OCR = 'ocr',
|
||||
}
|
||||
|
||||
export type ModelPayload = { imagePath: string } | { text: string };
|
||||
@@ -30,7 +32,11 @@ export type ModelPayload = { imagePath: string } | { text: string };
|
||||
type ModelOptions = { modelName: string };
|
||||
|
||||
export type FaceDetectionOptions = ModelOptions & { minScore: number };
|
||||
|
||||
export type OcrOptions = ModelOptions & {
|
||||
minDetectionScore: number;
|
||||
minRecognitionScore: number;
|
||||
maxResolution: number;
|
||||
};
|
||||
type VisualResponse = { imageHeight: number; imageWidth: number };
|
||||
export type ClipVisualRequest = { [ModelTask.SEARCH]: { [ModelType.VISUAL]: ModelOptions } };
|
||||
export type ClipVisualResponse = { [ModelTask.SEARCH]: string } & VisualResponse;
|
||||
@@ -38,6 +44,21 @@ export type ClipVisualResponse = { [ModelTask.SEARCH]: string } & VisualResponse
|
||||
export type ClipTextualRequest = { [ModelTask.SEARCH]: { [ModelType.TEXTUAL]: ModelOptions } };
|
||||
export type ClipTextualResponse = { [ModelTask.SEARCH]: string };
|
||||
|
||||
export type OCR = {
|
||||
text: string[];
|
||||
box: number[];
|
||||
boxScore: number[];
|
||||
textScore: number[];
|
||||
};
|
||||
|
||||
export type OcrRequest = {
|
||||
[ModelTask.OCR]: {
|
||||
[ModelType.DETECTION]: ModelOptions & { options: { minScore: number; maxResolution: number } };
|
||||
[ModelType.RECOGNITION]: ModelOptions & { options: { minScore: number } };
|
||||
};
|
||||
};
|
||||
export type OcrResponse = { [ModelTask.OCR]: OCR } & VisualResponse;
|
||||
|
||||
export type FacialRecognitionRequest = {
|
||||
[ModelTask.FACIAL_RECOGNITION]: {
|
||||
[ModelType.DETECTION]: ModelOptions & { options: { minScore: number } };
|
||||
@@ -53,7 +74,7 @@ export interface Face {
|
||||
|
||||
export type FacialRecognitionResponse = { [ModelTask.FACIAL_RECOGNITION]: Face[] } & VisualResponse;
|
||||
export type DetectedFaces = { faces: Face[] } & VisualResponse;
|
||||
export type MachineLearningRequest = ClipVisualRequest | ClipTextualRequest | FacialRecognitionRequest;
|
||||
export type MachineLearningRequest = ClipVisualRequest | ClipTextualRequest | FacialRecognitionRequest | OcrRequest;
|
||||
export type TextEncodingOptions = ModelOptions & { language?: string };
|
||||
|
||||
@Injectable()
|
||||
@@ -85,7 +106,7 @@ export class MachineLearningRepository {
|
||||
}
|
||||
}
|
||||
|
||||
if (!config.availabilityChecks.enabled) {
|
||||
if (!config.enabled || !config.availabilityChecks.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -197,6 +218,17 @@ export class MachineLearningRepository {
|
||||
return response[ModelTask.SEARCH];
|
||||
}
|
||||
|
||||
async ocr(imagePath: string, { modelName, minDetectionScore, minRecognitionScore, maxResolution }: OcrOptions) {
|
||||
const request = {
|
||||
[ModelTask.OCR]: {
|
||||
[ModelType.DETECTION]: { modelName, options: { minScore: minDetectionScore, maxResolution } },
|
||||
[ModelType.RECOGNITION]: { modelName, options: { minScore: minRecognitionScore } },
|
||||
},
|
||||
};
|
||||
const response = await this.predict<OcrResponse>({ imagePath }, request);
|
||||
return response[ModelTask.OCR];
|
||||
}
|
||||
|
||||
private async getFormData(payload: ModelPayload, config: MachineLearningRequest): Promise<FormData> {
|
||||
const formData = new FormData();
|
||||
formData.append('entries', JSON.stringify(config));
|
||||
|
||||
@@ -203,6 +203,9 @@ export class MediaRepository {
|
||||
isHDR: stream.color_transfer === 'smpte2084' || stream.color_transfer === 'arib-std-b67',
|
||||
bitrate: this.parseInt(stream.bit_rate),
|
||||
pixelFormat: stream.pix_fmt || 'yuv420p',
|
||||
colorPrimaries: stream.color_primaries,
|
||||
colorSpace: stream.color_space,
|
||||
colorTransfer: stream.color_transfer,
|
||||
})),
|
||||
audioStreams: results.streams
|
||||
.filter((stream) => stream.codec_type === 'audio')
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
|
||||
import { BinaryField, DefaultReadTaskOptions, ExifTool, Tags } from 'exiftool-vendored';
|
||||
import geotz from 'geo-tz';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
|
||||
interface ExifDuration {
|
||||
Value: number;
|
||||
@@ -84,6 +85,7 @@ export class MetadataRepository {
|
||||
numericTags: [...DefaultReadTaskOptions.numericTags, 'FocalLength', 'FileSize'],
|
||||
/* eslint unicorn/no-array-callback-reference: off, unicorn/no-array-method-this-argument: off */
|
||||
geoTz: (lat, lon) => geotz.find(lat, lon)[0],
|
||||
geolocation: true,
|
||||
// Enable exiftool LFS to parse metadata for files larger than 2GB.
|
||||
readArgs: ['-api', 'largefilesupport=1'],
|
||||
writeArgs: ['-api', 'largefilesupport=1', '-overwrite_original'],
|
||||
@@ -102,7 +104,8 @@ export class MetadataRepository {
|
||||
}
|
||||
|
||||
readTags(path: string): Promise<ImmichTags> {
|
||||
return this.exiftool.read(path).catch((error) => {
|
||||
const args = mimeTypes.isVideo(path) ? ['-ee'] : [];
|
||||
return this.exiftool.read(path, args).catch((error) => {
|
||||
this.logger.warn(`Error reading exif data (${path}): ${error}\n${error?.stack}`);
|
||||
return {};
|
||||
}) as Promise<ImmichTags>;
|
||||
|
||||
68
server/src/repositories/ocr.repository.ts
Normal file
68
server/src/repositories/ocr.repository.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Insertable, Kysely, sql } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetOcrTable } from 'src/schema/tables/asset-ocr.table';
|
||||
|
||||
@Injectable()
|
||||
export class OcrRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getById(id: string) {
|
||||
return this.db.selectFrom('asset_ocr').selectAll('asset_ocr').where('asset_ocr.id', '=', id).executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getByAssetId(id: string) {
|
||||
return this.db.selectFrom('asset_ocr').selectAll('asset_ocr').where('asset_ocr.assetId', '=', id).execute();
|
||||
}
|
||||
|
||||
deleteAll() {
|
||||
return this.db.transaction().execute(async (trx: Kysely<DB>) => {
|
||||
await sql`truncate ${sql.table('asset_ocr')}`.execute(trx);
|
||||
await sql`truncate ${sql.table('ocr_search')}`.execute(trx);
|
||||
});
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
params: [
|
||||
DummyValue.UUID,
|
||||
[
|
||||
{
|
||||
assetId: DummyValue.UUID,
|
||||
x1: DummyValue.NUMBER,
|
||||
y1: DummyValue.NUMBER,
|
||||
x2: DummyValue.NUMBER,
|
||||
y2: DummyValue.NUMBER,
|
||||
x3: DummyValue.NUMBER,
|
||||
y3: DummyValue.NUMBER,
|
||||
x4: DummyValue.NUMBER,
|
||||
y4: DummyValue.NUMBER,
|
||||
text: DummyValue.STRING,
|
||||
boxScore: DummyValue.NUMBER,
|
||||
textScore: DummyValue.NUMBER,
|
||||
},
|
||||
],
|
||||
],
|
||||
})
|
||||
upsert(assetId: string, ocrDataList: Insertable<AssetOcrTable>[]) {
|
||||
let query = this.db.with('deleted_ocr', (db) => db.deleteFrom('asset_ocr').where('assetId', '=', assetId));
|
||||
if (ocrDataList.length > 0) {
|
||||
const searchText = ocrDataList.map((item) => item.text.trim()).join(' ');
|
||||
(query as any) = query
|
||||
.with('inserted_ocr', (db) => db.insertInto('asset_ocr').values(ocrDataList))
|
||||
.with('inserted_search', (db) =>
|
||||
db
|
||||
.insertInto('ocr_search')
|
||||
.values({ assetId, text: searchText })
|
||||
.onConflict((oc) => oc.column('assetId').doUpdateSet((eb) => ({ text: eb.ref('excluded.text') }))),
|
||||
);
|
||||
} else {
|
||||
(query as any) = query.with('deleted_search', (db) => db.deleteFrom('ocr_search').where('assetId', '=', assetId));
|
||||
}
|
||||
|
||||
return query.selectNoFrom(sql`1`.as('dummy')).execute();
|
||||
}
|
||||
}
|
||||
@@ -84,6 +84,10 @@ export interface SearchEmbeddingOptions {
|
||||
userIds: string[];
|
||||
}
|
||||
|
||||
export interface SearchOcrOptions {
|
||||
ocr?: string;
|
||||
}
|
||||
|
||||
export interface SearchPeopleOptions {
|
||||
personIds?: string[];
|
||||
}
|
||||
@@ -114,7 +118,8 @@ type BaseAssetSearchOptions = SearchDateOptions &
|
||||
SearchUserIdOptions &
|
||||
SearchPeopleOptions &
|
||||
SearchTagOptions &
|
||||
SearchAlbumOptions;
|
||||
SearchAlbumOptions &
|
||||
SearchOcrOptions;
|
||||
|
||||
export type AssetSearchOptions = BaseAssetSearchOptions & SearchRelationOptions;
|
||||
|
||||
@@ -127,7 +132,10 @@ export type SmartSearchOptions = SearchDateOptions &
|
||||
SearchStatusOptions &
|
||||
SearchUserIdOptions &
|
||||
SearchPeopleOptions &
|
||||
SearchTagOptions;
|
||||
SearchTagOptions &
|
||||
SearchOcrOptions;
|
||||
|
||||
export type OcrSearchOptions = SearchDateOptions & SearchOcrOptions;
|
||||
|
||||
export type LargeAssetSearchOptions = AssetSearchOptions & { minFileSize?: number };
|
||||
|
||||
@@ -160,10 +168,17 @@ export interface GetCitiesOptions extends GetStatesOptions {
|
||||
|
||||
export interface GetCameraModelsOptions {
|
||||
make?: string;
|
||||
lensModel?: string;
|
||||
}
|
||||
|
||||
export interface GetCameraMakesOptions {
|
||||
model?: string;
|
||||
lensModel?: string;
|
||||
}
|
||||
|
||||
export interface GetCameraLensModelsOptions {
|
||||
make?: string;
|
||||
model?: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
@@ -457,25 +472,40 @@ export class SearchRepository {
|
||||
return res.map((row) => row.city!);
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[DummyValue.UUID], DummyValue.STRING] })
|
||||
async getCameraMakes(userIds: string[], { model }: GetCameraMakesOptions): Promise<string[]> {
|
||||
@GenerateSql({ params: [[DummyValue.UUID], DummyValue.STRING, DummyValue.STRING] })
|
||||
async getCameraMakes(userIds: string[], { model, lensModel }: GetCameraMakesOptions): Promise<string[]> {
|
||||
const res = await this.getExifField('make', userIds)
|
||||
.$if(!!model, (qb) => qb.where('model', '=', model!))
|
||||
.$if(!!lensModel, (qb) => qb.where('lensModel', '=', lensModel!))
|
||||
.execute();
|
||||
|
||||
return res.map((row) => row.make!);
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[DummyValue.UUID], DummyValue.STRING] })
|
||||
async getCameraModels(userIds: string[], { make }: GetCameraModelsOptions): Promise<string[]> {
|
||||
@GenerateSql({ params: [[DummyValue.UUID], DummyValue.STRING, DummyValue.STRING] })
|
||||
async getCameraModels(userIds: string[], { make, lensModel }: GetCameraModelsOptions): Promise<string[]> {
|
||||
const res = await this.getExifField('model', userIds)
|
||||
.$if(!!make, (qb) => qb.where('make', '=', make!))
|
||||
.$if(!!lensModel, (qb) => qb.where('lensModel', '=', lensModel!))
|
||||
.execute();
|
||||
|
||||
return res.map((row) => row.model!);
|
||||
}
|
||||
|
||||
private getExifField<K extends 'city' | 'state' | 'country' | 'make' | 'model'>(field: K, userIds: string[]) {
|
||||
@GenerateSql({ params: [[DummyValue.UUID], DummyValue.STRING] })
|
||||
async getCameraLensModels(userIds: string[], { make, model }: GetCameraLensModelsOptions): Promise<string[]> {
|
||||
const res = await this.getExifField('lensModel', userIds)
|
||||
.$if(!!make, (qb) => qb.where('make', '=', make!))
|
||||
.$if(!!model, (qb) => qb.where('model', '=', model!))
|
||||
.execute();
|
||||
|
||||
return res.map((row) => row.lensModel!);
|
||||
}
|
||||
|
||||
private getExifField<K extends 'city' | 'state' | 'country' | 'make' | 'model' | 'lensModel'>(
|
||||
field: K,
|
||||
userIds: string[],
|
||||
) {
|
||||
return this.db
|
||||
.selectFrom('asset_exif')
|
||||
.select(field)
|
||||
|
||||
@@ -101,6 +101,15 @@ export class SessionRepository {
|
||||
await this.db.deleteFrom('session').where('id', '=', asUuid(id)).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ userId: DummyValue.UUID, excludeId: DummyValue.UUID }] })
|
||||
async invalidate({ userId, excludeId }: { userId: string; excludeId?: string }) {
|
||||
await this.db
|
||||
.deleteFrom('session')
|
||||
.where('userId', '=', userId)
|
||||
.$if(!!excludeId, (qb) => qb.where('id', '!=', excludeId!))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async lockAll(userId: string) {
|
||||
await this.db.updateTable('session').set({ pinExpiresAt: null }).where('userId', '=', userId).execute();
|
||||
|
||||
33
server/src/repositories/shared-link-asset.repository.ts
Normal file
33
server/src/repositories/shared-link-asset.repository.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { DB } from 'src/schema';
|
||||
|
||||
export class SharedLinkAssetRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
async remove(sharedLinkId: string, assetsId: string[]) {
|
||||
const deleted = await this.db
|
||||
.deleteFrom('shared_link_asset')
|
||||
.where('shared_link_asset.sharedLinksId', '=', sharedLinkId)
|
||||
.where('shared_link_asset.assetsId', 'in', assetsId)
|
||||
.returning('assetsId')
|
||||
.execute();
|
||||
|
||||
return deleted.map((row) => row.assetsId);
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ sourceAssetId: DummyValue.UUID, targetAssetId: DummyValue.UUID }] })
|
||||
async copySharedLinks({ sourceAssetId, targetAssetId }: { sourceAssetId: string; targetAssetId: string }) {
|
||||
return this.db
|
||||
.insertInto('shared_link_asset')
|
||||
.expression((eb) =>
|
||||
eb
|
||||
.selectFrom('shared_link_asset')
|
||||
.select((eb) => [eb.val(targetAssetId).as('assetsId'), 'shared_link_asset.sharedLinksId'])
|
||||
.where('shared_link_asset.assetsId', '=', sourceAssetId),
|
||||
)
|
||||
.onConflict((oc) => oc.doNothing())
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
@@ -162,4 +162,9 @@ export class StackRepository {
|
||||
.where('asset.id', '=', assetId)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ sourceId: DummyValue.UUID, targetId: DummyValue.UUID }] })
|
||||
merge({ sourceId, targetId }: { sourceId: string; targetId: string }) {
|
||||
return this.db.updateTable('asset').set({ stackId: targetId }).where('asset.stackId', '=', sourceId).execute();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Injectable } from '@nestjs/common';
|
||||
import archiver from 'archiver';
|
||||
import chokidar, { ChokidarOptions } from 'chokidar';
|
||||
import { escapePath, glob, globStream } from 'fast-glob';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync } from 'node:fs';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync, ReadOptionsWithBuffer } from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
@@ -103,11 +103,11 @@ export class StorageRepository {
|
||||
};
|
||||
}
|
||||
|
||||
async readFile(filepath: string, options?: fs.FileReadOptions<Buffer>): Promise<Buffer> {
|
||||
async readFile(filepath: string, options?: ReadOptionsWithBuffer<Buffer>): Promise<Buffer> {
|
||||
const file = await fs.open(filepath);
|
||||
try {
|
||||
const { buffer } = await file.read(options);
|
||||
return buffer;
|
||||
return buffer as Buffer;
|
||||
} finally {
|
||||
await file.close();
|
||||
}
|
||||
|
||||
@@ -163,22 +163,22 @@ export class TagRepository {
|
||||
}
|
||||
|
||||
async deleteEmptyTags() {
|
||||
// TODO rewrite as a single statement
|
||||
await this.db.transaction().execute(async (tx) => {
|
||||
const result = await tx
|
||||
.selectFrom('asset')
|
||||
.innerJoin('tag_asset', 'tag_asset.assetsId', 'asset.id')
|
||||
.innerJoin('tag_closure', 'tag_closure.id_descendant', 'tag_asset.tagsId')
|
||||
.innerJoin('tag', 'tag.id', 'tag_closure.id_descendant')
|
||||
.select((eb) => ['tag.id', eb.fn.count<number>('asset.id').as('count')])
|
||||
.groupBy('tag.id')
|
||||
.execute();
|
||||
const result = await this.db
|
||||
.deleteFrom('tag')
|
||||
.where(({ not, exists, selectFrom }) =>
|
||||
not(
|
||||
exists(
|
||||
selectFrom('tag_closure')
|
||||
.whereRef('tag.id', '=', 'tag_closure.id_ancestor')
|
||||
.innerJoin('tag_asset', 'tag_closure.id_descendant', 'tag_asset.tagsId'),
|
||||
),
|
||||
),
|
||||
)
|
||||
.executeTakeFirst();
|
||||
|
||||
const ids = result.filter(({ count }) => count === 0).map(({ id }) => id);
|
||||
if (ids.length > 0) {
|
||||
await this.db.deleteFrom('tag').where('id', 'in', ids).execute();
|
||||
this.logger.log(`Deleted ${ids.length} empty tags`);
|
||||
}
|
||||
});
|
||||
const deletedRows = Number(result.numDeletedRows);
|
||||
if (deletedRows > 0) {
|
||||
this.logger.log(`Deleted ${deletedRows} empty tags`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,6 +286,16 @@ export class UserRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql()
|
||||
async getCount(): Promise<number> {
|
||||
const result = await this.db
|
||||
.selectFrom('user')
|
||||
.select((eb) => eb.fn.countAll().as('count'))
|
||||
.where('user.deletedAt', 'is', null)
|
||||
.executeTakeFirstOrThrow();
|
||||
return Number(result.count);
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.NUMBER] })
|
||||
async updateUsage(id: string, delta: number): Promise<void> {
|
||||
await this.db
|
||||
|
||||
118
server/src/repositories/websocket.repository.ts
Normal file
118
server/src/repositories/websocket.repository.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import {
|
||||
OnGatewayConnection,
|
||||
OnGatewayDisconnect,
|
||||
OnGatewayInit,
|
||||
WebSocketGateway,
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { NotificationDto } from 'src/dtos/notification.dto';
|
||||
import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.dto';
|
||||
import { SyncAssetExifV1, SyncAssetV1 } from 'src/dtos/sync.dto';
|
||||
import { ArgsOf, EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
export const serverEvents = ['ConfigUpdate'] as const;
|
||||
export type ServerEvents = (typeof serverEvents)[number];
|
||||
|
||||
export interface ClientEventMap {
|
||||
on_upload_success: [AssetResponseDto];
|
||||
on_user_delete: [string];
|
||||
on_asset_delete: [string];
|
||||
on_asset_trash: [string[]];
|
||||
on_asset_update: [AssetResponseDto];
|
||||
on_asset_hidden: [string];
|
||||
on_asset_restore: [string[]];
|
||||
on_asset_stack_update: string[];
|
||||
on_person_thumbnail: [string];
|
||||
on_server_version: [ServerVersionResponseDto];
|
||||
on_config_update: [];
|
||||
on_new_release: [ReleaseNotification];
|
||||
on_notification: [NotificationDto];
|
||||
on_session_delete: [string];
|
||||
|
||||
AssetUploadReadyV1: [{ asset: SyncAssetV1; exif: SyncAssetExifV1 }];
|
||||
}
|
||||
|
||||
export type AuthFn = (client: Socket) => Promise<AuthDto>;
|
||||
|
||||
@WebSocketGateway({
|
||||
cors: true,
|
||||
path: '/api/socket.io',
|
||||
transports: ['websocket'],
|
||||
})
|
||||
@Injectable()
|
||||
export class WebsocketRepository implements OnGatewayConnection, OnGatewayDisconnect, OnGatewayInit {
|
||||
private authFn?: AuthFn;
|
||||
|
||||
@WebSocketServer()
|
||||
private server?: Server;
|
||||
|
||||
constructor(
|
||||
private eventRepository: EventRepository,
|
||||
private logger: LoggingRepository,
|
||||
) {
|
||||
this.logger.setContext(WebsocketRepository.name);
|
||||
}
|
||||
|
||||
afterInit(server: Server) {
|
||||
this.logger.log('Initialized websocket server');
|
||||
|
||||
for (const event of serverEvents) {
|
||||
server.on(event, (...args: ArgsOf<any>) => {
|
||||
this.logger.debug(`Server event: ${event} (receive)`);
|
||||
handlePromiseError(this.eventRepository.onEvent({ name: event, args, server: true }), this.logger);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async handleConnection(client: Socket) {
|
||||
try {
|
||||
this.logger.log(`Websocket Connect: ${client.id}`);
|
||||
const auth = await this.authenticate(client);
|
||||
await client.join(auth.user.id);
|
||||
if (auth.session) {
|
||||
await client.join(auth.session.id);
|
||||
}
|
||||
await this.eventRepository.emit('WebsocketConnect', { userId: auth.user.id });
|
||||
} catch (error: Error | any) {
|
||||
this.logger.error(`Websocket connection error: ${error}`, error?.stack);
|
||||
client.emit('error', 'unauthorized');
|
||||
client.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
async handleDisconnect(client: Socket) {
|
||||
this.logger.log(`Websocket Disconnect: ${client.id}`);
|
||||
await client.leave(client.nsp.name);
|
||||
}
|
||||
|
||||
clientSend<T extends keyof ClientEventMap>(event: T, room: string, ...data: ClientEventMap[T]) {
|
||||
this.server?.to(room).emit(event, ...data);
|
||||
}
|
||||
|
||||
clientBroadcast<T extends keyof ClientEventMap>(event: T, ...data: ClientEventMap[T]) {
|
||||
this.server?.emit(event, ...data);
|
||||
}
|
||||
|
||||
serverSend<T extends ServerEvents>(event: T, ...args: ArgsOf<T>): void {
|
||||
this.logger.debug(`Server event: ${event} (send)`);
|
||||
this.server?.serverSideEmit(event, ...args);
|
||||
}
|
||||
|
||||
setAuthFn(fn: (client: Socket) => Promise<AuthDto>) {
|
||||
this.authFn = fn;
|
||||
}
|
||||
|
||||
private async authenticate(client: Socket) {
|
||||
if (!this.authFn) {
|
||||
throw new Error('Auth function not set');
|
||||
}
|
||||
|
||||
return this.authFn(client);
|
||||
}
|
||||
}
|
||||
@@ -35,6 +35,7 @@ import { AssetFileTable } from 'src/schema/tables/asset-file.table';
|
||||
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
|
||||
import { AssetMetadataAuditTable } from 'src/schema/tables/asset-metadata-audit.table';
|
||||
import { AssetMetadataTable } from 'src/schema/tables/asset-metadata.table';
|
||||
import { AssetOcrTable } from 'src/schema/tables/asset-ocr.table';
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { AuditTable } from 'src/schema/tables/audit.table';
|
||||
import { FaceSearchTable } from 'src/schema/tables/face-search.table';
|
||||
@@ -47,6 +48,7 @@ import { MemoryTable } from 'src/schema/tables/memory.table';
|
||||
import { MoveTable } from 'src/schema/tables/move.table';
|
||||
import { NaturalEarthCountriesTable } from 'src/schema/tables/natural-earth-countries.table';
|
||||
import { NotificationTable } from 'src/schema/tables/notification.table';
|
||||
import { OcrSearchTable } from 'src/schema/tables/ocr-search.table';
|
||||
import { PartnerAuditTable } from 'src/schema/tables/partner-audit.table';
|
||||
import { PartnerTable } from 'src/schema/tables/partner.table';
|
||||
import { PersonAuditTable } from 'src/schema/tables/person-audit.table';
|
||||
@@ -87,6 +89,7 @@ export class ImmichDatabase {
|
||||
AssetMetadataTable,
|
||||
AssetMetadataAuditTable,
|
||||
AssetJobStatusTable,
|
||||
AssetOcrTable,
|
||||
AssetTable,
|
||||
AssetFileTable,
|
||||
AuditTable,
|
||||
@@ -101,6 +104,7 @@ export class ImmichDatabase {
|
||||
MoveTable,
|
||||
NaturalEarthCountriesTable,
|
||||
NotificationTable,
|
||||
OcrSearchTable,
|
||||
PartnerAuditTable,
|
||||
PartnerTable,
|
||||
PersonTable,
|
||||
@@ -174,6 +178,8 @@ export interface DB {
|
||||
asset_metadata: AssetMetadataTable;
|
||||
asset_metadata_audit: AssetMetadataAuditTable;
|
||||
asset_job_status: AssetJobStatusTable;
|
||||
asset_ocr: AssetOcrTable;
|
||||
ocr_search: OcrSearchTable;
|
||||
|
||||
audit: AuditTable;
|
||||
|
||||
|
||||
@@ -16,7 +16,9 @@ export async function up(db: Kysely<any>): Promise<void> {
|
||||
rows: [lastMigration],
|
||||
} = await lastMigrationSql.execute(db);
|
||||
if (lastMigration?.name !== 'AddMissingIndex1744910873956') {
|
||||
throw new Error('Invalid upgrade path. For more information, see https://immich.app/errors#typeorm-upgrade');
|
||||
throw new Error(
|
||||
'Invalid upgrade path. For more information, see https://docs.immich.app/errors/#typeorm-upgrade',
|
||||
);
|
||||
}
|
||||
logger.log('Database has up to date TypeORM migrations, skipping initial Kysely migration');
|
||||
return;
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE TABLE "asset_ocr" ("id" uuid NOT NULL DEFAULT uuid_generate_v4(), "assetId" uuid NOT NULL, "x1" real NOT NULL, "y1" real NOT NULL, "x2" real NOT NULL, "y2" real NOT NULL, "x3" real NOT NULL, "y3" real NOT NULL, "x4" real NOT NULL, "y4" real NOT NULL, "boxScore" real NOT NULL, "textScore" real NOT NULL, "text" text NOT NULL);`.execute(
|
||||
db,
|
||||
);
|
||||
await sql`ALTER TABLE "asset_ocr" ADD CONSTRAINT "asset_ocr_pkey" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_ocr" ADD CONSTRAINT "asset_ocr_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(
|
||||
db,
|
||||
);
|
||||
await sql`CREATE INDEX "asset_ocr_assetId_idx" ON "asset_ocr" ("assetId")`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP TABLE "asset_ocr";`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE TABLE "ocr_search" ("assetId" uuid NOT NULL, "text" text NOT NULL);`.execute(db);
|
||||
await sql`ALTER TABLE "ocr_search" ADD CONSTRAINT "ocr_search_pkey" PRIMARY KEY ("assetId");`.execute(db);
|
||||
await sql`ALTER TABLE "ocr_search" ADD CONSTRAINT "ocr_search_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(
|
||||
db,
|
||||
);
|
||||
await sql`CREATE INDEX "idx_ocr_search_text" ON "ocr_search" USING gin (f_unaccent("text") gin_trgm_ops);`.execute(
|
||||
db,
|
||||
);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('index_idx_ocr_search_text', '{"type":"index","name":"idx_ocr_search_text","sql":"CREATE INDEX \\"idx_ocr_search_text\\" ON \\"ocr_search\\" USING gin (f_unaccent(\\"text\\") gin_trgm_ops);"}'::jsonb);`.execute(
|
||||
db,
|
||||
);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP TABLE "ocr_search";`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'index_idx_ocr_search_text';`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset_job_status" ADD "ocrAt" timestamp with time zone;`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset_job_status" DROP COLUMN "ocrAt";`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "session" ADD "appVersion" character varying;`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "session" DROP COLUMN "appVersion";`.execute(db);
|
||||
}
|
||||
@@ -20,4 +20,7 @@ export class AssetJobStatusTable {
|
||||
|
||||
@Column({ type: 'timestamp with time zone', nullable: true })
|
||||
thumbnailAt!: Timestamp | null;
|
||||
|
||||
@Column({ type: 'timestamp with time zone', nullable: true })
|
||||
ocrAt!: Timestamp | null;
|
||||
}
|
||||
|
||||
45
server/src/schema/tables/asset-ocr.table.ts
Normal file
45
server/src/schema/tables/asset-ocr.table.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { Column, ForeignKeyColumn, Generated, PrimaryGeneratedColumn, Table } from 'src/sql-tools';
|
||||
|
||||
@Table('asset_ocr')
|
||||
export class AssetOcrTable {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: Generated<string>;
|
||||
|
||||
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
|
||||
assetId!: string;
|
||||
|
||||
// box positions are normalized, with values between 0 and 1
|
||||
@Column({ type: 'real' })
|
||||
x1!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
y1!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
x2!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
y2!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
x3!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
y3!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
x4!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
y4!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
boxScore!: number;
|
||||
|
||||
@Column({ type: 'real' })
|
||||
textScore!: number;
|
||||
|
||||
@Column({ type: 'text' })
|
||||
text!: string;
|
||||
}
|
||||
20
server/src/schema/tables/ocr-search.table.ts
Normal file
20
server/src/schema/tables/ocr-search.table.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { Column, ForeignKeyColumn, Index, Table } from 'src/sql-tools';
|
||||
|
||||
@Table('ocr_search')
|
||||
@Index({
|
||||
name: 'idx_ocr_search_text',
|
||||
using: 'gin',
|
||||
expression: 'f_unaccent("text") gin_trgm_ops',
|
||||
})
|
||||
export class OcrSearchTable {
|
||||
@ForeignKeyColumn(() => AssetTable, {
|
||||
onDelete: 'CASCADE',
|
||||
onUpdate: 'CASCADE',
|
||||
primary: true,
|
||||
})
|
||||
assetId!: string;
|
||||
|
||||
@Column({ type: 'text' })
|
||||
text!: string;
|
||||
}
|
||||
@@ -42,6 +42,9 @@ export class SessionTable {
|
||||
@Column({ default: '' })
|
||||
deviceOS!: Generated<string>;
|
||||
|
||||
@Column({ nullable: true })
|
||||
appVersion!: string | null;
|
||||
|
||||
@UpdateIdColumn({ index: true })
|
||||
updateId!: Generated<string>;
|
||||
|
||||
|
||||
@@ -79,12 +79,17 @@ export class AlbumService extends BaseService {
|
||||
const album = await this.findOrFail(id, { withAssets });
|
||||
const [albumMetadataForIds] = await this.albumRepository.getMetadataForIds([album.id]);
|
||||
|
||||
const hasSharedUsers = album.albumUsers && album.albumUsers.length > 0;
|
||||
const hasSharedLink = album.sharedLinks && album.sharedLinks.length > 0;
|
||||
const isShared = hasSharedUsers || hasSharedLink;
|
||||
|
||||
return {
|
||||
...mapAlbum(album, withAssets, auth),
|
||||
startDate: albumMetadataForIds?.startDate ?? undefined,
|
||||
endDate: albumMetadataForIds?.endDate ?? undefined,
|
||||
assetCount: albumMetadataForIds?.assetCount ?? 0,
|
||||
lastModifiedAssetTimestamp: albumMetadataForIds?.lastModifiedAssetTimestamp ?? undefined,
|
||||
contributorCounts: isShared ? await this.albumRepository.getContributorCounts(album.id) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -700,6 +700,42 @@ describe(AssetService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getOcr', () => {
|
||||
it('should require asset read permission', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set());
|
||||
|
||||
await expect(sut.getOcr(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.ocr.getByAssetId).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return OCR data for an asset', async () => {
|
||||
const ocr1 = factory.assetOcr({ text: 'Hello World' });
|
||||
const ocr2 = factory.assetOcr({ text: 'Test Image' });
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([ocr1, ocr2]);
|
||||
|
||||
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([ocr1, ocr2]);
|
||||
|
||||
expect(mocks.access.asset.checkOwnerAccess).toHaveBeenCalledWith(
|
||||
authStub.admin.user.id,
|
||||
new Set(['asset-1']),
|
||||
undefined,
|
||||
);
|
||||
expect(mocks.ocr.getByAssetId).toHaveBeenCalledWith('asset-1');
|
||||
});
|
||||
|
||||
it('should return empty array when no OCR data exists', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
|
||||
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([]);
|
||||
|
||||
expect(mocks.ocr.getByAssetId).toHaveBeenCalledWith('asset-1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('run', () => {
|
||||
it('should run the refresh faces job', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
|
||||
@@ -2,11 +2,13 @@ import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import _ from 'lodash';
|
||||
import { DateTime, Duration } from 'luxon';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { AssetFile } from 'src/database';
|
||||
import { OnJob } from 'src/decorators';
|
||||
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
AssetBulkDeleteDto,
|
||||
AssetBulkUpdateDto,
|
||||
AssetCopyDto,
|
||||
AssetJobName,
|
||||
AssetJobsDto,
|
||||
AssetMetadataResponseDto,
|
||||
@@ -16,7 +18,17 @@ import {
|
||||
mapStats,
|
||||
} from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetMetadataKey, AssetStatus, AssetVisibility, JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetMetadataKey,
|
||||
AssetStatus,
|
||||
AssetVisibility,
|
||||
JobName,
|
||||
JobStatus,
|
||||
Permission,
|
||||
QueueName,
|
||||
} from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
|
||||
import { requireElevatedPermission } from 'src/utils/access';
|
||||
@@ -182,6 +194,92 @@ export class AssetService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
async copy(
|
||||
auth: AuthDto,
|
||||
{
|
||||
sourceId,
|
||||
targetId,
|
||||
albums = true,
|
||||
sidecar = true,
|
||||
sharedLinks = true,
|
||||
stack = true,
|
||||
favorite = true,
|
||||
}: AssetCopyDto,
|
||||
) {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetCopy, ids: [sourceId, targetId] });
|
||||
const sourceAsset = await this.assetRepository.getById(sourceId, { files: true });
|
||||
const targetAsset = await this.assetRepository.getById(targetId, { files: true });
|
||||
|
||||
if (!sourceAsset || !targetAsset) {
|
||||
throw new BadRequestException('Both assets must exist');
|
||||
}
|
||||
|
||||
if (sourceId === targetId) {
|
||||
throw new BadRequestException('Source and target id must be distinct');
|
||||
}
|
||||
|
||||
if (albums) {
|
||||
await this.albumRepository.copyAlbums({ sourceAssetId: sourceId, targetAssetId: targetId });
|
||||
}
|
||||
|
||||
if (sharedLinks) {
|
||||
await this.sharedLinkAssetRepository.copySharedLinks({ sourceAssetId: sourceId, targetAssetId: targetId });
|
||||
}
|
||||
|
||||
if (stack) {
|
||||
await this.copyStack(sourceAsset, targetAsset);
|
||||
}
|
||||
|
||||
if (favorite) {
|
||||
await this.assetRepository.update({ id: targetId, isFavorite: sourceAsset.isFavorite });
|
||||
}
|
||||
|
||||
if (sidecar) {
|
||||
await this.copySidecar(sourceAsset, targetAsset);
|
||||
}
|
||||
}
|
||||
|
||||
private async copyStack(
|
||||
sourceAsset: { id: string; stackId: string | null },
|
||||
targetAsset: { id: string; stackId: string | null },
|
||||
) {
|
||||
if (!sourceAsset.stackId) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (targetAsset.stackId) {
|
||||
await this.stackRepository.merge({ sourceId: sourceAsset.stackId, targetId: targetAsset.stackId });
|
||||
await this.stackRepository.delete(sourceAsset.stackId);
|
||||
} else {
|
||||
await this.assetRepository.update({ id: targetAsset.id, stackId: sourceAsset.stackId });
|
||||
}
|
||||
}
|
||||
|
||||
private async copySidecar(
|
||||
sourceAsset: { id: string; files: AssetFile[] | undefined; originalPath: string },
|
||||
targetAsset: { files: AssetFile[] | undefined },
|
||||
) {
|
||||
const targetSidecarFile = getAssetFiles(targetAsset.files).sidecarFile;
|
||||
|
||||
if (!targetSidecarFile) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceSidecarFile = getAssetFiles(sourceAsset.files).sidecarFile;
|
||||
|
||||
if (sourceSidecarFile) {
|
||||
await this.storageRepository.unlink(sourceSidecarFile.path);
|
||||
}
|
||||
|
||||
await this.storageRepository.copyFile(targetSidecarFile.path, `${sourceAsset.originalPath}.xmp`);
|
||||
await this.assetRepository.upsertFile({
|
||||
assetId: sourceAsset.id,
|
||||
type: AssetFileType.Sidecar,
|
||||
path: `${sourceAsset.originalPath}.xmp`,
|
||||
});
|
||||
await this.jobRepository.queue({ name: JobName.AssetExtractMetadata, data: { id: sourceAsset.id } });
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.AssetDeleteCheck, queue: QueueName.BackgroundTask })
|
||||
async handleAssetDeletionCheck(): Promise<JobStatus> {
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
@@ -289,6 +387,11 @@ export class AssetService extends BaseService {
|
||||
return this.assetRepository.getMetadata(id);
|
||||
}
|
||||
|
||||
async getOcr(auth: AuthDto, id: string): Promise<AssetOcrResponseDto[]> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
|
||||
return this.ocrRepository.getByAssetId(id);
|
||||
}
|
||||
|
||||
async upsertMetadata(auth: AuthDto, id: string, dto: AssetMetadataUpsertDto): Promise<AssetMetadataResponseDto[]> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: [id] });
|
||||
return this.assetRepository.upsertMetadata(id, dto.items);
|
||||
|
||||
@@ -41,6 +41,7 @@ const loginDetails = {
|
||||
clientIp: '127.0.0.1',
|
||||
deviceOS: '',
|
||||
deviceType: '',
|
||||
appVersion: null,
|
||||
};
|
||||
|
||||
const fixtures = {
|
||||
@@ -123,6 +124,11 @@ describe(AuthService.name, () => {
|
||||
|
||||
expect(mocks.user.getForChangePassword).toHaveBeenCalledWith(user.id);
|
||||
expect(mocks.crypto.compareBcrypt).toHaveBeenCalledWith('old-password', 'hash-password');
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AuthChangePassword', {
|
||||
userId: user.id,
|
||||
currentSessionId: auth.session?.id,
|
||||
shouldLogoutSessions: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw when password does not match existing password', async () => {
|
||||
@@ -146,6 +152,25 @@ describe(AuthService.name, () => {
|
||||
|
||||
await expect(sut.changePassword(auth, dto)).rejects.toBeInstanceOf(BadRequestException);
|
||||
});
|
||||
|
||||
it('should change the password and logout other sessions', async () => {
|
||||
const user = factory.userAdmin();
|
||||
const auth = factory.auth({ user });
|
||||
const dto = { password: 'old-password', newPassword: 'new-password', invalidateSessions: true };
|
||||
|
||||
mocks.user.getForChangePassword.mockResolvedValue({ id: user.id, password: 'hash-password' });
|
||||
mocks.user.update.mockResolvedValue(user);
|
||||
|
||||
await sut.changePassword(auth, dto);
|
||||
|
||||
expect(mocks.user.getForChangePassword).toHaveBeenCalledWith(user.id);
|
||||
expect(mocks.crypto.compareBcrypt).toHaveBeenCalledWith('old-password', 'hash-password');
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AuthChangePassword', {
|
||||
userId: user.id,
|
||||
invalidateSessions: true,
|
||||
currentSessionId: auth.session?.id,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('logout', () => {
|
||||
@@ -243,6 +268,7 @@ describe(AuthService.name, () => {
|
||||
updatedAt: session.updatedAt,
|
||||
user: factory.authUser(),
|
||||
pinExpiresAt: null,
|
||||
appVersion: null,
|
||||
};
|
||||
|
||||
mocks.session.getByToken.mockResolvedValue(sessionWithToken);
|
||||
@@ -408,6 +434,7 @@ describe(AuthService.name, () => {
|
||||
updatedAt: session.updatedAt,
|
||||
user: factory.authUser(),
|
||||
pinExpiresAt: null,
|
||||
appVersion: null,
|
||||
};
|
||||
|
||||
mocks.session.getByToken.mockResolvedValue(sessionWithToken);
|
||||
@@ -435,6 +462,7 @@ describe(AuthService.name, () => {
|
||||
user: factory.authUser(),
|
||||
isPendingSyncReset: false,
|
||||
pinExpiresAt: null,
|
||||
appVersion: null,
|
||||
};
|
||||
|
||||
mocks.session.getByToken.mockResolvedValue(sessionWithToken);
|
||||
@@ -456,6 +484,7 @@ describe(AuthService.name, () => {
|
||||
user: factory.authUser(),
|
||||
isPendingSyncReset: false,
|
||||
pinExpiresAt: null,
|
||||
appVersion: null,
|
||||
};
|
||||
|
||||
mocks.session.getByToken.mockResolvedValue(sessionWithToken);
|
||||
|
||||
@@ -29,11 +29,13 @@ import { BaseService } from 'src/services/base.service';
|
||||
import { isGranted } from 'src/utils/access';
|
||||
import { HumanReadableSize } from 'src/utils/bytes';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
import { getUserAgentDetails } from 'src/utils/request';
|
||||
export interface LoginDetails {
|
||||
isSecure: boolean;
|
||||
clientIp: string;
|
||||
deviceType: string;
|
||||
deviceOS: string;
|
||||
appVersion: string | null;
|
||||
}
|
||||
|
||||
interface ClaimOptions<T> {
|
||||
@@ -102,6 +104,12 @@ export class AuthService extends BaseService {
|
||||
|
||||
const updatedUser = await this.userRepository.update(user.id, { password: hashedPassword });
|
||||
|
||||
await this.eventRepository.emit('AuthChangePassword', {
|
||||
userId: user.id,
|
||||
currentSessionId: auth.session?.id,
|
||||
invalidateSessions: dto.invalidateSessions,
|
||||
});
|
||||
|
||||
return mapUserAdmin(updatedUser);
|
||||
}
|
||||
|
||||
@@ -218,7 +226,7 @@ export class AuthService extends BaseService {
|
||||
}
|
||||
|
||||
if (session) {
|
||||
return this.validateSession(session);
|
||||
return this.validateSession(session, headers);
|
||||
}
|
||||
|
||||
if (apiKey) {
|
||||
@@ -463,15 +471,22 @@ export class AuthService extends BaseService {
|
||||
return this.cryptoRepository.compareBcrypt(inputSecret, existingHash);
|
||||
}
|
||||
|
||||
private async validateSession(tokenValue: string): Promise<AuthDto> {
|
||||
private async validateSession(tokenValue: string, headers: IncomingHttpHeaders): Promise<AuthDto> {
|
||||
const hashedToken = this.cryptoRepository.hashSha256(tokenValue);
|
||||
const session = await this.sessionRepository.getByToken(hashedToken);
|
||||
if (session?.user) {
|
||||
const { appVersion, deviceOS, deviceType } = getUserAgentDetails(headers);
|
||||
const now = DateTime.now();
|
||||
const updatedAt = DateTime.fromJSDate(session.updatedAt);
|
||||
const diff = now.diff(updatedAt, ['hours']);
|
||||
if (diff.hours > 1) {
|
||||
await this.sessionRepository.update(session.id, { id: session.id, updatedAt: new Date() });
|
||||
if (diff.hours > 1 || appVersion != session.appVersion) {
|
||||
await this.sessionRepository.update(session.id, {
|
||||
id: session.id,
|
||||
updatedAt: new Date(),
|
||||
appVersion,
|
||||
deviceOS,
|
||||
deviceType,
|
||||
});
|
||||
}
|
||||
|
||||
// Pin check
|
||||
@@ -529,6 +544,7 @@ export class AuthService extends BaseService {
|
||||
token: tokenHashed,
|
||||
deviceOS: loginDetails.deviceOS,
|
||||
deviceType: loginDetails.deviceType,
|
||||
appVersion: loginDetails.appVersion,
|
||||
userId: user.id,
|
||||
});
|
||||
|
||||
|
||||
@@ -209,6 +209,7 @@ describe(BackupService.name, () => {
|
||||
${'15.3.3'} | ${15}
|
||||
${'16.4.2'} | ${16}
|
||||
${'17.15.1'} | ${17}
|
||||
${'18.0.0'} | ${18}
|
||||
`(
|
||||
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
@@ -224,7 +225,7 @@ describe(BackupService.name, () => {
|
||||
it.each`
|
||||
postgresVersion
|
||||
${'13.99.99'}
|
||||
${'18.0.0'}
|
||||
${'19.0.0'}
|
||||
`(`should fail if postgres version $postgresVersion is not supported`, async ({ postgresVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
const result = await sut.handleBackupDatabase();
|
||||
|
||||
@@ -103,7 +103,7 @@ export class BackupService extends BaseService {
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <18.0.0')) {
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
@@ -32,12 +32,14 @@ import { MetadataRepository } from 'src/repositories/metadata.repository';
|
||||
import { MoveRepository } from 'src/repositories/move.repository';
|
||||
import { NotificationRepository } from 'src/repositories/notification.repository';
|
||||
import { OAuthRepository } from 'src/repositories/oauth.repository';
|
||||
import { OcrRepository } from 'src/repositories/ocr.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { SearchRepository } from 'src/repositories/search.repository';
|
||||
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
|
||||
import { SessionRepository } from 'src/repositories/session.repository';
|
||||
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
|
||||
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
|
||||
import { StackRepository } from 'src/repositories/stack.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
@@ -50,6 +52,7 @@ import { TrashRepository } from 'src/repositories/trash.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
|
||||
import { ViewRepository } from 'src/repositories/view-repository';
|
||||
import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
import { UserTable } from 'src/schema/tables/user.table';
|
||||
import { AccessRequest, checkAccess, requireAccess } from 'src/utils/access';
|
||||
import { getConfig, updateConfig } from 'src/utils/config';
|
||||
@@ -82,6 +85,7 @@ export const BASE_SERVICE_DEPENDENCIES = [
|
||||
MoveRepository,
|
||||
NotificationRepository,
|
||||
OAuthRepository,
|
||||
OcrRepository,
|
||||
PartnerRepository,
|
||||
PersonRepository,
|
||||
ProcessRepository,
|
||||
@@ -89,6 +93,7 @@ export const BASE_SERVICE_DEPENDENCIES = [
|
||||
ServerInfoRepository,
|
||||
SessionRepository,
|
||||
SharedLinkRepository,
|
||||
SharedLinkAssetRepository,
|
||||
StackRepository,
|
||||
StorageRepository,
|
||||
SyncRepository,
|
||||
@@ -134,6 +139,7 @@ export class BaseService {
|
||||
protected moveRepository: MoveRepository,
|
||||
protected notificationRepository: NotificationRepository,
|
||||
protected oauthRepository: OAuthRepository,
|
||||
protected ocrRepository: OcrRepository,
|
||||
protected partnerRepository: PartnerRepository,
|
||||
protected personRepository: PersonRepository,
|
||||
protected processRepository: ProcessRepository,
|
||||
@@ -141,6 +147,7 @@ export class BaseService {
|
||||
protected serverInfoRepository: ServerInfoRepository,
|
||||
protected sessionRepository: SessionRepository,
|
||||
protected sharedLinkRepository: SharedLinkRepository,
|
||||
protected sharedLinkAssetRepository: SharedLinkAssetRepository,
|
||||
protected stackRepository: StackRepository,
|
||||
protected storageRepository: StorageRepository,
|
||||
protected syncRepository: SyncRepository,
|
||||
@@ -152,6 +159,7 @@ export class BaseService {
|
||||
protected userRepository: UserRepository,
|
||||
protected versionRepository: VersionHistoryRepository,
|
||||
protected viewRepository: ViewRepository,
|
||||
protected websocketRepository: WebsocketRepository,
|
||||
) {
|
||||
this.logger.setContext(this.constructor.name);
|
||||
this.storageCore = StorageCore.create(
|
||||
@@ -195,8 +203,8 @@ export class BaseService {
|
||||
}
|
||||
|
||||
async createUser(dto: Insertable<UserTable> & { email: string }): Promise<UserAdmin> {
|
||||
const user = await this.userRepository.getByEmail(dto.email);
|
||||
if (user) {
|
||||
const exists = await this.userRepository.getByEmail(dto.email);
|
||||
if (exists) {
|
||||
throw new BadRequestException('User exists');
|
||||
}
|
||||
|
||||
@@ -215,6 +223,10 @@ export class BaseService {
|
||||
payload.storageLabel = sanitize(payload.storageLabel.replaceAll('.', ''));
|
||||
}
|
||||
|
||||
return this.userRepository.create(payload);
|
||||
const user = await this.userRepository.create(payload);
|
||||
|
||||
await this.eventRepository.emit('UserCreate', user);
|
||||
|
||||
return user;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import { MemoryService } from 'src/services/memory.service';
|
||||
import { MetadataService } from 'src/services/metadata.service';
|
||||
import { NotificationAdminService } from 'src/services/notification-admin.service';
|
||||
import { NotificationService } from 'src/services/notification.service';
|
||||
import { OcrService } from 'src/services/ocr.service';
|
||||
import { PartnerService } from 'src/services/partner.service';
|
||||
import { PersonService } from 'src/services/person.service';
|
||||
import { SearchService } from 'src/services/search.service';
|
||||
@@ -34,6 +35,7 @@ import { SyncService } from 'src/services/sync.service';
|
||||
import { SystemConfigService } from 'src/services/system-config.service';
|
||||
import { SystemMetadataService } from 'src/services/system-metadata.service';
|
||||
import { TagService } from 'src/services/tag.service';
|
||||
import { TelemetryService } from 'src/services/telemetry.service';
|
||||
import { TimelineService } from 'src/services/timeline.service';
|
||||
import { TrashService } from 'src/services/trash.service';
|
||||
import { UserAdminService } from 'src/services/user-admin.service';
|
||||
@@ -64,6 +66,7 @@ export const services = [
|
||||
MetadataService,
|
||||
NotificationService,
|
||||
NotificationAdminService,
|
||||
OcrService,
|
||||
PartnerService,
|
||||
PersonService,
|
||||
SearchService,
|
||||
@@ -78,6 +81,7 @@ export const services = [
|
||||
SystemConfigService,
|
||||
SystemMetadataService,
|
||||
TagService,
|
||||
TelemetryService,
|
||||
TimelineService,
|
||||
TrashService,
|
||||
UserAdminService,
|
||||
|
||||
@@ -24,7 +24,7 @@ describe(JobService.name, () => {
|
||||
it('should update concurrency', () => {
|
||||
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
|
||||
|
||||
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(15);
|
||||
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(16);
|
||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FacialRecognition, 1);
|
||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DuplicateDetection, 1);
|
||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BackgroundTask, 5);
|
||||
@@ -98,6 +98,7 @@ describe(JobService.name, () => {
|
||||
[QueueName.Library]: expectedJobStatus,
|
||||
[QueueName.Notification]: expectedJobStatus,
|
||||
[QueueName.BackupDatabase]: expectedJobStatus,
|
||||
[QueueName.Ocr]: expectedJobStatus,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -222,18 +223,16 @@ describe(JobService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('onJobStart', () => {
|
||||
describe('onJobRun', () => {
|
||||
it('should process a successful job', async () => {
|
||||
mocks.job.run.mockResolvedValue(JobStatus.Success);
|
||||
|
||||
await sut.onJobStart(QueueName.BackgroundTask, {
|
||||
name: JobName.FileDelete,
|
||||
data: { files: ['path/to/file'] },
|
||||
});
|
||||
const job: JobItem = { name: JobName.FileDelete, data: { files: ['path/to/file'] } };
|
||||
await sut.onJobRun(QueueName.BackgroundTask, job);
|
||||
|
||||
expect(mocks.telemetry.jobs.addToGauge).toHaveBeenCalledWith('immich.queues.background_task.active', 1);
|
||||
expect(mocks.telemetry.jobs.addToGauge).toHaveBeenCalledWith('immich.queues.background_task.active', -1);
|
||||
expect(mocks.telemetry.jobs.addToCounter).toHaveBeenCalledWith('immich.jobs.file_delete.success', 1);
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('JobStart', QueueName.BackgroundTask, job);
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('JobSuccess', { job, response: JobStatus.Success });
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('JobComplete', QueueName.BackgroundTask, job);
|
||||
expect(mocks.logger.error).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -270,12 +269,12 @@ describe(JobService.name, () => {
|
||||
},
|
||||
{
|
||||
item: { name: JobName.AssetGenerateThumbnails, data: { id: 'asset-1', source: 'upload' } },
|
||||
jobs: [JobName.SmartSearch, JobName.AssetDetectFaces],
|
||||
jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.Ocr],
|
||||
stub: [assetStub.livePhotoStillAsset],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.AssetGenerateThumbnails, data: { id: 'asset-1', source: 'upload' } },
|
||||
jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.AssetEncodeVideo],
|
||||
jobs: [JobName.SmartSearch, JobName.AssetDetectFaces, JobName.Ocr, JobName.AssetEncodeVideo],
|
||||
stub: [assetStub.video],
|
||||
},
|
||||
{
|
||||
@@ -300,7 +299,7 @@ describe(JobService.name, () => {
|
||||
|
||||
mocks.job.run.mockResolvedValue(JobStatus.Success);
|
||||
|
||||
await sut.onJobStart(QueueName.BackgroundTask, item);
|
||||
await sut.onJobRun(QueueName.BackgroundTask, item);
|
||||
|
||||
if (jobs.length > 1) {
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith(
|
||||
@@ -317,7 +316,7 @@ describe(JobService.name, () => {
|
||||
it(`should not queue any jobs when ${item.name} fails`, async () => {
|
||||
mocks.job.run.mockResolvedValue(JobStatus.Failed);
|
||||
|
||||
await sut.onJobStart(QueueName.BackgroundTask, item);
|
||||
await sut.onJobRun(QueueName.BackgroundTask, item);
|
||||
|
||||
expect(mocks.job.queueAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { ClassConstructor } from 'class-transformer';
|
||||
import { snakeCase } from 'lodash';
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { OnEvent } from 'src/decorators';
|
||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
||||
@@ -186,7 +185,7 @@ export class JobService extends BaseService {
|
||||
throw new BadRequestException(`Job is already running`);
|
||||
}
|
||||
|
||||
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
|
||||
await this.eventRepository.emit('QueueStart', { name });
|
||||
|
||||
switch (name) {
|
||||
case QueueName.VideoConversion: {
|
||||
@@ -237,27 +236,29 @@ export class JobService extends BaseService {
|
||||
return this.jobRepository.queue({ name: JobName.DatabaseBackup, data: { force } });
|
||||
}
|
||||
|
||||
case QueueName.Ocr: {
|
||||
return this.jobRepository.queue({ name: JobName.OcrQueueAll, data: { force } });
|
||||
}
|
||||
|
||||
default: {
|
||||
throw new BadRequestException(`Invalid job name: ${name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'JobStart' })
|
||||
async onJobStart(...[queueName, job]: ArgsOf<'JobStart'>) {
|
||||
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
|
||||
this.telemetryRepository.jobs.addToGauge(queueMetric, 1);
|
||||
@OnEvent({ name: 'JobRun' })
|
||||
async onJobRun(...[queueName, job]: ArgsOf<'JobRun'>) {
|
||||
try {
|
||||
const status = await this.jobRepository.run(job);
|
||||
const jobMetric = `immich.jobs.${snakeCase(job.name)}.${status}`;
|
||||
this.telemetryRepository.jobs.addToCounter(jobMetric, 1);
|
||||
if (status === JobStatus.Success || status == JobStatus.Skipped) {
|
||||
await this.eventRepository.emit('JobStart', queueName, job);
|
||||
const response = await this.jobRepository.run(job);
|
||||
await this.eventRepository.emit('JobSuccess', { job, response });
|
||||
if (response && typeof response === 'string' && [JobStatus.Success, JobStatus.Skipped].includes(response)) {
|
||||
await this.onDone(job);
|
||||
}
|
||||
} catch (error: Error | any) {
|
||||
await this.eventRepository.emit('JobFailed', { job, error });
|
||||
await this.eventRepository.emit('JobError', { job, error });
|
||||
} finally {
|
||||
this.telemetryRepository.jobs.addToGauge(queueMetric, -1);
|
||||
await this.eventRepository.emit('JobComplete', queueName, job);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -334,7 +335,7 @@ export class JobService extends BaseService {
|
||||
const { id } = item.data;
|
||||
const person = await this.personRepository.getById(id);
|
||||
if (person) {
|
||||
this.eventRepository.clientSend('on_person_thumbnail', person.ownerId, person.id);
|
||||
this.websocketRepository.clientSend('on_person_thumbnail', person.ownerId, person.id);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -353,6 +354,7 @@ export class JobService extends BaseService {
|
||||
const jobs: JobItem[] = [
|
||||
{ name: JobName.SmartSearch, data: item.data },
|
||||
{ name: JobName.AssetDetectFaces, data: item.data },
|
||||
{ name: JobName.Ocr, data: item.data },
|
||||
];
|
||||
|
||||
if (asset.type === AssetType.Video) {
|
||||
@@ -361,10 +363,10 @@ export class JobService extends BaseService {
|
||||
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
if (asset.visibility === AssetVisibility.Timeline || asset.visibility === AssetVisibility.Archive) {
|
||||
this.eventRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset));
|
||||
this.websocketRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset));
|
||||
if (asset.exifInfo) {
|
||||
const exif = asset.exifInfo;
|
||||
this.eventRepository.clientSend('AssetUploadReadyV1', asset.ownerId, {
|
||||
this.websocketRepository.clientSend('AssetUploadReadyV1', asset.ownerId, {
|
||||
// TODO remove `on_upload_success` and then modify the query to select only the required fields)
|
||||
asset: {
|
||||
id: asset.id,
|
||||
@@ -424,11 +426,6 @@ export class JobService extends BaseService {
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.UserDelete: {
|
||||
this.eventRepository.clientBroadcast('on_user_delete', item.data.id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -445,6 +445,7 @@ describe(MediaService.name, () => {
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not skip intra frames for MTS file', async () => {
|
||||
mocks.media.probe.mockResolvedValue(probeStub.videoStreamMTS);
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video);
|
||||
@@ -462,6 +463,25 @@ describe(MediaService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should override reserved color metadata', async () => {
|
||||
mocks.media.probe.mockResolvedValue(probeStub.videoStreamReserved);
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video);
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.video.id });
|
||||
|
||||
expect(mocks.media.transcode).toHaveBeenCalledWith(
|
||||
'/original/path.ext',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
inputOptions: expect.arrayContaining([
|
||||
'-bsf:v hevc_metadata=colour_primaries=1:matrix_coefficients=1:transfer_characteristics=1',
|
||||
]),
|
||||
outputOptions: expect.any(Array),
|
||||
progress: expect.any(Object),
|
||||
twoPass: false,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use scaling divisible by 2 even when using quick sync', async () => {
|
||||
mocks.media.probe.mockResolvedValue(probeStub.videoStream2160p);
|
||||
mocks.systemMetadata.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHardwareAcceleration.Qsv } });
|
||||
@@ -841,6 +861,37 @@ describe(MediaService.name, () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should always generate full-size preview from non-web-friendly panoramas', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: false } } });
|
||||
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
|
||||
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
|
||||
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.panoramaTif);
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
|
||||
expect(mocks.media.decodeImage).toHaveBeenCalledWith(assetStub.panoramaTif.originalPath, {
|
||||
colorspace: Colorspace.Srgb,
|
||||
orientation: undefined,
|
||||
processInvalidImages: false,
|
||||
size: undefined,
|
||||
});
|
||||
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
rawBuffer,
|
||||
{
|
||||
colorspace: Colorspace.Srgb,
|
||||
format: ImageFormat.Jpeg,
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
});
|
||||
|
||||
it('should respect encoding options when generating full-size preview', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
image: { fullsize: { enabled: true, format: ImageFormat.Webp, quality: 90 } },
|
||||
|
||||
@@ -271,7 +271,9 @@ export class MediaService extends BaseService {
|
||||
// Handle embedded preview extraction for RAW files
|
||||
const extractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
|
||||
const extracted = extractEmbedded ? await this.extractImage(asset.originalPath, image.preview.size) : null;
|
||||
const generateFullsize = image.fullsize.enabled && !mimeTypes.isWebSupportedImage(asset.originalPath);
|
||||
const generateFullsize =
|
||||
(image.fullsize.enabled || asset.exifInfo.projectionType == 'EQUIRECTANGULAR') &&
|
||||
!mimeTypes.isWebSupportedImage(asset.originalPath);
|
||||
const convertFullsize = generateFullsize && (!extracted || !mimeTypes.isWebSupportedImage(` .${extracted.format}`));
|
||||
|
||||
const { info, data, colorspace } = await this.decodeImage(
|
||||
|
||||
@@ -30,6 +30,7 @@ import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
|
||||
import { PersonTable } from 'src/schema/tables/person.table';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { getAssetFiles } from 'src/utils/asset.util';
|
||||
import { isAssetChecksumConstraint } from 'src/utils/database';
|
||||
import { isFaceImportEnabled } from 'src/utils/misc';
|
||||
import { upsertTags } from 'src/utils/tag';
|
||||
@@ -454,14 +455,17 @@ export class MetadataService extends BaseService {
|
||||
* For RAW images in the CR2 or RAF format, the "ImageSize" value seems to be correct,
|
||||
* but ImageWidth and ImageHeight are not correct (they contain the dimensions of the preview image).
|
||||
*/
|
||||
let [width, height] = exifTags.ImageSize?.split('x').map((dim) => Number.parseInt(dim) || undefined) || [];
|
||||
let [width, height] =
|
||||
exifTags.ImageSize?.toString()
|
||||
?.split('x')
|
||||
?.map((dim) => Number.parseInt(dim) || undefined) ?? [];
|
||||
if (!width || !height) {
|
||||
[width, height] = [exifTags.ImageWidth, exifTags.ImageHeight];
|
||||
}
|
||||
return { width, height };
|
||||
}
|
||||
|
||||
private getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
|
||||
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
|
||||
if (asset.type === AssetType.Image) {
|
||||
const hasSidecar = asset.files?.some(({ type }) => type === AssetFileType.Sidecar);
|
||||
|
||||
@@ -470,21 +474,15 @@ export class MetadataService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
return this.mergeExifTags(asset);
|
||||
}
|
||||
|
||||
private async mergeExifTags(asset: {
|
||||
originalPath: string;
|
||||
files: AssetFile[];
|
||||
type: AssetType;
|
||||
}): Promise<ImmichTags> {
|
||||
if (asset.files && asset.files.length > 1) {
|
||||
throw new Error(`Asset ${asset.originalPath} has multiple sidecar files`);
|
||||
}
|
||||
|
||||
const sidecarFile = getAssetFiles(asset.files).sidecarFile;
|
||||
|
||||
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
|
||||
this.metadataRepository.readTags(asset.originalPath),
|
||||
asset.files && asset.files.length > 0 ? this.metadataRepository.readTags(asset.files[0].path) : null,
|
||||
sidecarFile ? this.metadataRepository.readTags(sidecarFile.path) : null,
|
||||
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
|
||||
]);
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ const smtpTransport = Object.freeze<SystemConfig>({
|
||||
ignoreCert: false,
|
||||
host: 'localhost',
|
||||
port: 587,
|
||||
secure: false,
|
||||
username: 'test',
|
||||
password: 'test',
|
||||
},
|
||||
|
||||
@@ -7,6 +7,7 @@ import { NotificationService } from 'src/services/notification.service';
|
||||
import { INotifyAlbumUpdateJob } from 'src/types';
|
||||
import { albumStub } from 'test/fixtures/album.stub';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { notificationStub } from 'test/fixtures/notification.stub';
|
||||
import { userStub } from 'test/fixtures/user.stub';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
@@ -39,6 +40,7 @@ const configs = {
|
||||
ignoreCert: false,
|
||||
host: 'localhost',
|
||||
port: 587,
|
||||
secure: false,
|
||||
username: 'test',
|
||||
password: 'test',
|
||||
},
|
||||
@@ -63,8 +65,8 @@ describe(NotificationService.name, () => {
|
||||
it('should emit client and server events', () => {
|
||||
const update = { oldConfig: defaults, newConfig: defaults };
|
||||
expect(sut.onConfigUpdate(update)).toBeUndefined();
|
||||
expect(mocks.event.clientBroadcast).toHaveBeenCalledWith('on_config_update');
|
||||
expect(mocks.event.serverSend).toHaveBeenCalledWith('ConfigUpdate', update);
|
||||
expect(mocks.websocket.clientBroadcast).toHaveBeenCalledWith('on_config_update');
|
||||
expect(mocks.websocket.serverSend).toHaveBeenCalledWith('ConfigUpdate', update);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -123,7 +125,7 @@ describe(NotificationService.name, () => {
|
||||
describe('onAssetHide', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
sut.onAssetHide({ assetId: 'asset-id', userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_hidden', 'user-id', 'asset-id');
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_hidden', 'user-id', 'asset-id');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -176,67 +178,67 @@ describe(NotificationService.name, () => {
|
||||
it('should send a on_session_delete client event', () => {
|
||||
vi.useFakeTimers();
|
||||
sut.onSessionDelete({ sessionId: 'id' });
|
||||
expect(mocks.event.clientSend).not.toHaveBeenCalled();
|
||||
expect(mocks.websocket.clientSend).not.toHaveBeenCalled();
|
||||
|
||||
vi.advanceTimersByTime(500);
|
||||
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_session_delete', 'id', 'id');
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_session_delete', 'id', 'id');
|
||||
});
|
||||
});
|
||||
|
||||
describe('onAssetTrash', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
it('should send connected clients an websocket', () => {
|
||||
sut.onAssetTrash({ assetId: 'asset-id', userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_trash', 'user-id', ['asset-id']);
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_trash', 'user-id', ['asset-id']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onAssetDelete', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
sut.onAssetDelete({ assetId: 'asset-id', userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_delete', 'user-id', 'asset-id');
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_delete', 'user-id', 'asset-id');
|
||||
});
|
||||
});
|
||||
|
||||
describe('onAssetsTrash', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
sut.onAssetsTrash({ assetIds: ['asset-id'], userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_trash', 'user-id', ['asset-id']);
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_trash', 'user-id', ['asset-id']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onAssetsRestore', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
sut.onAssetsRestore({ assetIds: ['asset-id'], userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_restore', 'user-id', ['asset-id']);
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_restore', 'user-id', ['asset-id']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onStackCreate', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
sut.onStackCreate({ stackId: 'stack-id', userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
|
||||
});
|
||||
});
|
||||
|
||||
describe('onStackUpdate', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
sut.onStackUpdate({ stackId: 'stack-id', userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
|
||||
});
|
||||
});
|
||||
|
||||
describe('onStackDelete', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
sut.onStackDelete({ stackId: 'stack-id', userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
|
||||
});
|
||||
});
|
||||
|
||||
describe('onStacksDelete', () => {
|
||||
it('should send connected clients an event', () => {
|
||||
sut.onStacksDelete({ stackIds: ['stack-id'], userId: 'user-id' });
|
||||
expect(mocks.event.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
|
||||
expect(mocks.websocket.clientSend).toHaveBeenCalledWith('on_asset_stack_update', 'user-id');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -282,6 +284,7 @@ describe(NotificationService.name, () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
|
||||
});
|
||||
@@ -297,6 +300,7 @@ describe(NotificationService.name, () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Skipped);
|
||||
});
|
||||
@@ -313,6 +317,7 @@ describe(NotificationService.name, () => {
|
||||
],
|
||||
});
|
||||
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
|
||||
await expect(sut.handleAlbumInvite({ id: '', recipientId: '' })).resolves.toBe(JobStatus.Success);
|
||||
@@ -334,6 +339,7 @@ describe(NotificationService.name, () => {
|
||||
],
|
||||
});
|
||||
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
@@ -363,6 +369,7 @@ describe(NotificationService.name, () => {
|
||||
],
|
||||
});
|
||||
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([
|
||||
{ id: '1', type: AssetFileType.Thumbnail, path: 'path-to-thumb.jpg' },
|
||||
@@ -394,6 +401,7 @@ describe(NotificationService.name, () => {
|
||||
],
|
||||
});
|
||||
mocks.systemMetadata.get.mockResolvedValue({ server: {} });
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([assetStub.image.files[2]]);
|
||||
|
||||
@@ -431,6 +439,7 @@ describe(NotificationService.name, () => {
|
||||
albumUsers: [{ user: { id: userStub.user1.id } } as AlbumUser],
|
||||
});
|
||||
mocks.user.get.mockResolvedValueOnce(userStub.user1);
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
@@ -453,6 +462,7 @@ describe(NotificationService.name, () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
@@ -475,6 +485,7 @@ describe(NotificationService.name, () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
@@ -489,6 +500,7 @@ describe(NotificationService.name, () => {
|
||||
albumUsers: [{ user: { id: userStub.user1.id } } as AlbumUser],
|
||||
});
|
||||
mocks.user.get.mockResolvedValue(userStub.user1);
|
||||
mocks.notification.create.mockResolvedValue(notificationStub.albumEvent);
|
||||
mocks.email.renderEmail.mockResolvedValue({ html: '', text: '' });
|
||||
mocks.assetJob.getAlbumThumbnailFiles.mockResolvedValue([]);
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { MapAlbumDto } from 'src/dtos/album.dto';
|
||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
@@ -77,8 +78,8 @@ export class NotificationService extends BaseService {
|
||||
await this.notificationRepository.cleanup();
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'JobFailed' })
|
||||
async onJobFailed({ job, error }: ArgOf<'JobFailed'>) {
|
||||
@OnEvent({ name: 'JobError' })
|
||||
async onJobError({ job, error }: ArgOf<'JobError'>) {
|
||||
const admin = await this.userRepository.getAdmin();
|
||||
if (!admin) {
|
||||
return;
|
||||
@@ -97,7 +98,7 @@ export class NotificationService extends BaseService {
|
||||
description: `Job ${[job.name]} failed with error: ${errorMessage}`,
|
||||
});
|
||||
|
||||
this.eventRepository.clientSend('on_notification', admin.id, mapNotification(item));
|
||||
this.websocketRepository.clientSend('on_notification', admin.id, mapNotification(item));
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -109,8 +110,8 @@ export class NotificationService extends BaseService {
|
||||
|
||||
@OnEvent({ name: 'ConfigUpdate' })
|
||||
onConfigUpdate({ oldConfig, newConfig }: ArgOf<'ConfigUpdate'>) {
|
||||
this.eventRepository.clientBroadcast('on_config_update');
|
||||
this.eventRepository.serverSend('ConfigUpdate', { oldConfig, newConfig });
|
||||
this.websocketRepository.clientBroadcast('on_config_update');
|
||||
this.websocketRepository.serverSend('ConfigUpdate', { oldConfig, newConfig });
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'ConfigValidate', priority: -100 })
|
||||
@@ -130,7 +131,7 @@ export class NotificationService extends BaseService {
|
||||
|
||||
@OnEvent({ name: 'AssetHide' })
|
||||
onAssetHide({ assetId, userId }: ArgOf<'AssetHide'>) {
|
||||
this.eventRepository.clientSend('on_asset_hidden', userId, assetId);
|
||||
this.websocketRepository.clientSend('on_asset_hidden', userId, assetId);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AssetShow' })
|
||||
@@ -140,17 +141,17 @@ export class NotificationService extends BaseService {
|
||||
|
||||
@OnEvent({ name: 'AssetTrash' })
|
||||
onAssetTrash({ assetId, userId }: ArgOf<'AssetTrash'>) {
|
||||
this.eventRepository.clientSend('on_asset_trash', userId, [assetId]);
|
||||
this.websocketRepository.clientSend('on_asset_trash', userId, [assetId]);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AssetDelete' })
|
||||
onAssetDelete({ assetId, userId }: ArgOf<'AssetDelete'>) {
|
||||
this.eventRepository.clientSend('on_asset_delete', userId, assetId);
|
||||
this.websocketRepository.clientSend('on_asset_delete', userId, assetId);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AssetTrashAll' })
|
||||
onAssetsTrash({ assetIds, userId }: ArgOf<'AssetTrashAll'>) {
|
||||
this.eventRepository.clientSend('on_asset_trash', userId, assetIds);
|
||||
this.websocketRepository.clientSend('on_asset_trash', userId, assetIds);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AssetMetadataExtracted' })
|
||||
@@ -161,33 +162,37 @@ export class NotificationService extends BaseService {
|
||||
|
||||
const [asset] = await this.assetRepository.getByIdsWithAllRelationsButStacks([assetId]);
|
||||
if (asset) {
|
||||
this.eventRepository.clientSend('on_asset_update', userId, mapAsset(asset));
|
||||
this.websocketRepository.clientSend(
|
||||
'on_asset_update',
|
||||
userId,
|
||||
mapAsset(asset, { auth: { user: { id: userId } } as AuthDto }),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AssetRestoreAll' })
|
||||
onAssetsRestore({ assetIds, userId }: ArgOf<'AssetRestoreAll'>) {
|
||||
this.eventRepository.clientSend('on_asset_restore', userId, assetIds);
|
||||
this.websocketRepository.clientSend('on_asset_restore', userId, assetIds);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'StackCreate' })
|
||||
onStackCreate({ userId }: ArgOf<'StackCreate'>) {
|
||||
this.eventRepository.clientSend('on_asset_stack_update', userId);
|
||||
this.websocketRepository.clientSend('on_asset_stack_update', userId);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'StackUpdate' })
|
||||
onStackUpdate({ userId }: ArgOf<'StackUpdate'>) {
|
||||
this.eventRepository.clientSend('on_asset_stack_update', userId);
|
||||
this.websocketRepository.clientSend('on_asset_stack_update', userId);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'StackDelete' })
|
||||
onStackDelete({ userId }: ArgOf<'StackDelete'>) {
|
||||
this.eventRepository.clientSend('on_asset_stack_update', userId);
|
||||
this.websocketRepository.clientSend('on_asset_stack_update', userId);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'StackDeleteAll' })
|
||||
onStacksDelete({ userId }: ArgOf<'StackDeleteAll'>) {
|
||||
this.eventRepository.clientSend('on_asset_stack_update', userId);
|
||||
this.websocketRepository.clientSend('on_asset_stack_update', userId);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'UserSignup' })
|
||||
@@ -197,6 +202,11 @@ export class NotificationService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'UserDelete' })
|
||||
onUserDelete({ id }: ArgOf<'UserDelete'>) {
|
||||
this.websocketRepository.clientBroadcast('on_user_delete', id);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AlbumUpdate' })
|
||||
async onAlbumUpdate({ id, recipientId }: ArgOf<'AlbumUpdate'>) {
|
||||
await this.jobRepository.removeJob(JobName.NotifyAlbumUpdate, `${id}/${recipientId}`);
|
||||
@@ -214,7 +224,7 @@ export class NotificationService extends BaseService {
|
||||
@OnEvent({ name: 'SessionDelete' })
|
||||
onSessionDelete({ sessionId }: ArgOf<'SessionDelete'>) {
|
||||
// after the response is sent
|
||||
setTimeout(() => this.eventRepository.clientSend('on_session_delete', sessionId, sessionId), 500);
|
||||
setTimeout(() => this.websocketRepository.clientSend('on_session_delete', sessionId, sessionId), 500);
|
||||
}
|
||||
|
||||
async sendTestEmail(id: string, dto: SystemConfigSmtpDto, tempTemplate?: string) {
|
||||
@@ -295,6 +305,8 @@ export class NotificationService extends BaseService {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
await this.sendAlbumLocalNotification(album, recipientId, NotificationType.AlbumInvite, album.owner.name);
|
||||
|
||||
const { emailNotifications } = getPreferences(recipient.metadata);
|
||||
|
||||
if (!emailNotifications.enabled || !emailNotifications.albumInvite) {
|
||||
@@ -344,6 +356,8 @@ export class NotificationService extends BaseService {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
await this.sendAlbumLocalNotification(album, recipientId, NotificationType.AlbumUpdate);
|
||||
|
||||
const attachment = await this.getAlbumThumbnailAttachment(album);
|
||||
|
||||
const { server, templates } = await this.getConfig({ withCache: false });
|
||||
@@ -431,4 +445,25 @@ export class NotificationService extends BaseService {
|
||||
cid: 'album-thumbnail',
|
||||
};
|
||||
}
|
||||
|
||||
private async sendAlbumLocalNotification(
|
||||
album: MapAlbumDto,
|
||||
userId: string,
|
||||
type: NotificationType.AlbumInvite | NotificationType.AlbumUpdate,
|
||||
senderName?: string,
|
||||
) {
|
||||
const isInvite = type === NotificationType.AlbumInvite;
|
||||
const item = await this.notificationRepository.create({
|
||||
userId,
|
||||
type,
|
||||
level: isInvite ? NotificationLevel.Success : NotificationLevel.Info,
|
||||
title: isInvite ? 'Shared Album Invitation' : 'Shared Album Update',
|
||||
description: isInvite
|
||||
? `${senderName} shared an album (${album.albumName}) with you`
|
||||
: `New media has been added to the album (${album.albumName})`,
|
||||
data: JSON.stringify({ albumId: album.id }),
|
||||
});
|
||||
|
||||
this.websocketRepository.clientSend('on_notification', userId, mapNotification(item));
|
||||
}
|
||||
}
|
||||
|
||||
177
server/src/services/ocr.service.spec.ts
Normal file
177
server/src/services/ocr.service.spec.ts
Normal file
@@ -0,0 +1,177 @@
|
||||
import { AssetVisibility, ImmichWorker, JobName, JobStatus } from 'src/enum';
|
||||
import { OcrService } from 'src/services/ocr.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(OcrService.name, () => {
|
||||
let sut: OcrService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(OcrService));
|
||||
|
||||
mocks.config.getWorker.mockReturnValue(ImmichWorker.Microservices);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('handleQueueOcr', () => {
|
||||
it('should do nothing if machine learning is disabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
||||
|
||||
await sut.handleQueueOcr({ force: false });
|
||||
|
||||
expect(mocks.database.setDimensionSize).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should queue the assets without ocr', async () => {
|
||||
mocks.assetJob.streamForOcrJob.mockReturnValue(makeStream([assetStub.image]));
|
||||
|
||||
await sut.handleQueueOcr({ force: false });
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.Ocr, data: { id: assetStub.image.id } }]);
|
||||
expect(mocks.assetJob.streamForOcrJob).toHaveBeenCalledWith(false);
|
||||
});
|
||||
|
||||
it('should queue all the assets', async () => {
|
||||
mocks.assetJob.streamForOcrJob.mockReturnValue(makeStream([assetStub.image]));
|
||||
|
||||
await sut.handleQueueOcr({ force: true });
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([{ name: JobName.Ocr, data: { id: assetStub.image.id } }]);
|
||||
expect(mocks.assetJob.streamForOcrJob).toHaveBeenCalledWith(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleOcr', () => {
|
||||
it('should do nothing if machine learning is disabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
||||
|
||||
expect(await sut.handleOcr({ id: '123' })).toEqual(JobStatus.Skipped);
|
||||
|
||||
expect(mocks.asset.getByIds).not.toHaveBeenCalled();
|
||||
expect(mocks.machineLearning.encodeImage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip assets without a resize path', async () => {
|
||||
mocks.assetJob.getForOcr.mockResolvedValue({ visibility: AssetVisibility.Timeline, previewFile: null });
|
||||
|
||||
expect(await sut.handleOcr({ id: assetStub.noResizePath.id })).toEqual(JobStatus.Failed);
|
||||
|
||||
expect(mocks.ocr.upsert).not.toHaveBeenCalled();
|
||||
expect(mocks.machineLearning.ocr).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should save the returned objects', async () => {
|
||||
mocks.machineLearning.ocr.mockResolvedValue({
|
||||
box: [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160],
|
||||
boxScore: [0.9, 0.8],
|
||||
text: ['One Two Three', 'Four Five'],
|
||||
textScore: [0.95, 0.85],
|
||||
});
|
||||
mocks.assetJob.getForOcr.mockResolvedValue({
|
||||
visibility: AssetVisibility.Timeline,
|
||||
previewFile: assetStub.image.files[1].path,
|
||||
});
|
||||
|
||||
expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Success);
|
||||
|
||||
expect(mocks.machineLearning.ocr).toHaveBeenCalledWith(
|
||||
'/uploads/user-id/thumbs/path.jpg',
|
||||
expect.objectContaining({
|
||||
modelName: 'PP-OCRv5_mobile',
|
||||
minDetectionScore: 0.5,
|
||||
minRecognitionScore: 0.8,
|
||||
maxResolution: 736,
|
||||
}),
|
||||
);
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, [
|
||||
{
|
||||
assetId: assetStub.image.id,
|
||||
boxScore: 0.9,
|
||||
text: 'One Two Three',
|
||||
textScore: 0.95,
|
||||
x1: 10,
|
||||
y1: 20,
|
||||
x2: 30,
|
||||
y2: 40,
|
||||
x3: 50,
|
||||
y3: 60,
|
||||
x4: 70,
|
||||
y4: 80,
|
||||
},
|
||||
{
|
||||
assetId: assetStub.image.id,
|
||||
boxScore: 0.8,
|
||||
text: 'Four Five',
|
||||
textScore: 0.85,
|
||||
x1: 90,
|
||||
y1: 100,
|
||||
x2: 110,
|
||||
y2: 120,
|
||||
x3: 130,
|
||||
y3: 140,
|
||||
x4: 150,
|
||||
y4: 160,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should apply config settings', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
machineLearning: {
|
||||
enabled: true,
|
||||
ocr: {
|
||||
modelName: 'PP-OCRv5_server',
|
||||
enabled: true,
|
||||
minDetectionScore: 0.8,
|
||||
minRecognitionScore: 0.9,
|
||||
maxResolution: 1500,
|
||||
},
|
||||
},
|
||||
});
|
||||
mocks.machineLearning.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
|
||||
mocks.assetJob.getForOcr.mockResolvedValue({
|
||||
visibility: AssetVisibility.Timeline,
|
||||
previewFile: assetStub.image.files[1].path,
|
||||
});
|
||||
|
||||
expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Success);
|
||||
|
||||
expect(mocks.machineLearning.ocr).toHaveBeenCalledWith(
|
||||
'/uploads/user-id/thumbs/path.jpg',
|
||||
expect.objectContaining({
|
||||
modelName: 'PP-OCRv5_server',
|
||||
minDetectionScore: 0.8,
|
||||
minRecognitionScore: 0.9,
|
||||
maxResolution: 1500,
|
||||
}),
|
||||
);
|
||||
expect(mocks.ocr.upsert).toHaveBeenCalledWith(assetStub.image.id, []);
|
||||
});
|
||||
|
||||
it('should skip invisible assets', async () => {
|
||||
mocks.assetJob.getForOcr.mockResolvedValue({
|
||||
visibility: AssetVisibility.Hidden,
|
||||
previewFile: assetStub.image.files[1].path,
|
||||
});
|
||||
|
||||
expect(await sut.handleOcr({ id: assetStub.livePhotoMotionAsset.id })).toEqual(JobStatus.Skipped);
|
||||
|
||||
expect(mocks.machineLearning.ocr).not.toHaveBeenCalled();
|
||||
expect(mocks.ocr.upsert).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if asset could not be found', async () => {
|
||||
mocks.assetJob.getForOcr.mockResolvedValue(void 0);
|
||||
|
||||
expect(await sut.handleOcr({ id: assetStub.image.id })).toEqual(JobStatus.Failed);
|
||||
|
||||
expect(mocks.machineLearning.ocr).not.toHaveBeenCalled();
|
||||
expect(mocks.ocr.upsert).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
86
server/src/services/ocr.service.ts
Normal file
86
server/src/services/ocr.service.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { OnJob } from 'src/decorators';
|
||||
import { AssetVisibility, JobName, JobStatus, QueueName } from 'src/enum';
|
||||
import { OCR } from 'src/repositories/machine-learning.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { isOcrEnabled } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
export class OcrService extends BaseService {
|
||||
@OnJob({ name: JobName.OcrQueueAll, queue: QueueName.Ocr })
|
||||
async handleQueueOcr({ force }: JobOf<JobName.OcrQueueAll>): Promise<JobStatus> {
|
||||
const { machineLearning } = await this.getConfig({ withCache: false });
|
||||
if (!isOcrEnabled(machineLearning)) {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
if (force) {
|
||||
await this.ocrRepository.deleteAll();
|
||||
}
|
||||
|
||||
let jobs: JobItem[] = [];
|
||||
const assets = this.assetJobRepository.streamForOcrJob(force);
|
||||
|
||||
for await (const asset of assets) {
|
||||
jobs.push({ name: JobName.Ocr, data: { id: asset.id } });
|
||||
|
||||
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
jobs = [];
|
||||
}
|
||||
}
|
||||
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.Ocr, queue: QueueName.Ocr })
|
||||
async handleOcr({ id }: JobOf<JobName.Ocr>): Promise<JobStatus> {
|
||||
const { machineLearning } = await this.getConfig({ withCache: true });
|
||||
if (!isOcrEnabled(machineLearning)) {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
const asset = await this.assetJobRepository.getForOcr(id);
|
||||
if (!asset || !asset.previewFile) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
if (asset.visibility === AssetVisibility.Hidden) {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
const ocrResults = await this.machineLearningRepository.ocr(asset.previewFile, machineLearning.ocr);
|
||||
|
||||
await this.ocrRepository.upsert(id, this.parseOcrResults(id, ocrResults));
|
||||
|
||||
await this.assetRepository.upsertJobStatus({ assetId: id, ocrAt: new Date() });
|
||||
|
||||
this.logger.debug(`Processed ${ocrResults.text.length} OCR result(s) for ${id}`);
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private parseOcrResults(id: string, { box, boxScore, text, textScore }: OCR) {
|
||||
const ocrDataList = [];
|
||||
for (let i = 0; i < text.length; i++) {
|
||||
const boxOffset = i * 8;
|
||||
ocrDataList.push({
|
||||
assetId: id,
|
||||
x1: box[boxOffset],
|
||||
y1: box[boxOffset + 1],
|
||||
x2: box[boxOffset + 2],
|
||||
y2: box[boxOffset + 3],
|
||||
x3: box[boxOffset + 4],
|
||||
y3: box[boxOffset + 5],
|
||||
x4: box[boxOffset + 6],
|
||||
y4: box[boxOffset + 7],
|
||||
boxScore: boxScore[i],
|
||||
textScore: textScore[i],
|
||||
text: text[i],
|
||||
});
|
||||
}
|
||||
return ocrDataList;
|
||||
}
|
||||
}
|
||||
@@ -179,6 +179,26 @@ describe(SearchService.name, () => {
|
||||
).resolves.toEqual(['Fujifilm X100VI', null]);
|
||||
expect(mocks.search.getCameraModels).toHaveBeenCalledWith([authStub.user1.user.id], expect.anything());
|
||||
});
|
||||
|
||||
it('should return search suggestions for camera lens model', async () => {
|
||||
mocks.search.getCameraLensModels.mockResolvedValue(['10-24mm']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: false, type: SearchSuggestionType.CAMERA_LENS_MODEL }),
|
||||
).resolves.toEqual(['10-24mm']);
|
||||
expect(mocks.search.getCameraLensModels).toHaveBeenCalledWith([authStub.user1.user.id], expect.anything());
|
||||
});
|
||||
|
||||
it('should return search suggestions for camera lens model (including null)', async () => {
|
||||
mocks.search.getCameraLensModels.mockResolvedValue(['10-24mm']);
|
||||
mocks.partner.getAll.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
sut.getSearchSuggestions(authStub.user1, { includeNull: true, type: SearchSuggestionType.CAMERA_LENS_MODEL }),
|
||||
).resolves.toEqual(['10-24mm', null]);
|
||||
expect(mocks.search.getCameraLensModels).toHaveBeenCalledWith([authStub.user1.user.id], expect.anything());
|
||||
});
|
||||
});
|
||||
|
||||
describe('searchSmart', () => {
|
||||
|
||||
@@ -177,6 +177,9 @@ export class SearchService extends BaseService {
|
||||
case SearchSuggestionType.CAMERA_MODEL: {
|
||||
return this.searchRepository.getCameraModels(userIds, dto);
|
||||
}
|
||||
case SearchSuggestionType.CAMERA_LENS_MODEL: {
|
||||
return this.searchRepository.getCameraLensModels(userIds, dto);
|
||||
}
|
||||
default: {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
@@ -141,6 +141,7 @@ describe(ServerService.name, () => {
|
||||
reverseGeocoding: true,
|
||||
oauth: false,
|
||||
oauthAutoLaunch: false,
|
||||
ocr: true,
|
||||
passwordLogin: true,
|
||||
search: true,
|
||||
sidecar: true,
|
||||
|
||||
@@ -19,7 +19,12 @@ import { UserStatsQueryResponse } from 'src/repositories/user.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { asHumanReadable } from 'src/utils/bytes';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
import { isDuplicateDetectionEnabled, isFacialRecognitionEnabled, isSmartSearchEnabled } from 'src/utils/misc';
|
||||
import {
|
||||
isDuplicateDetectionEnabled,
|
||||
isFacialRecognitionEnabled,
|
||||
isOcrEnabled,
|
||||
isSmartSearchEnabled,
|
||||
} from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
export class ServerService extends BaseService {
|
||||
@@ -97,6 +102,7 @@ export class ServerService extends BaseService {
|
||||
trash: trash.enabled,
|
||||
oauth: oauth.enabled,
|
||||
oauthAutoLaunch: oauth.autoLaunch,
|
||||
ocr: isOcrEnabled(machineLearning),
|
||||
passwordLogin: passwordLogin.enabled,
|
||||
configFile: !!configFile,
|
||||
email: notifications.smtp.enabled,
|
||||
|
||||
@@ -43,17 +43,13 @@ describe('SessionService', () => {
|
||||
describe('logoutDevices', () => {
|
||||
it('should logout all devices', async () => {
|
||||
const currentSession = factory.session();
|
||||
const otherSession = factory.session();
|
||||
const auth = factory.auth({ session: currentSession });
|
||||
|
||||
mocks.session.getByUserId.mockResolvedValue([currentSession, otherSession]);
|
||||
mocks.session.delete.mockResolvedValue();
|
||||
mocks.session.invalidate.mockResolvedValue();
|
||||
|
||||
await sut.deleteAll(auth);
|
||||
|
||||
expect(mocks.session.getByUserId).toHaveBeenCalledWith(auth.user.id);
|
||||
expect(mocks.session.delete).toHaveBeenCalledWith(otherSession.id);
|
||||
expect(mocks.session.delete).not.toHaveBeenCalledWith(currentSession.id);
|
||||
expect(mocks.session.invalidate).toHaveBeenCalledWith({ userId: auth.user.id, excludeId: currentSession.id });
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { OnJob } from 'src/decorators';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
SessionCreateDto,
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
mapSession,
|
||||
} from 'src/dtos/session.dto';
|
||||
import { JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
|
||||
@Injectable()
|
||||
@@ -69,18 +70,19 @@ export class SessionService extends BaseService {
|
||||
await this.sessionRepository.delete(id);
|
||||
}
|
||||
|
||||
async deleteAll(auth: AuthDto): Promise<void> {
|
||||
const userId = auth.user.id;
|
||||
const currentSessionId = auth.session?.id;
|
||||
await this.sessionRepository.invalidate({ userId, excludeId: currentSessionId });
|
||||
}
|
||||
|
||||
async lock(auth: AuthDto, id: string): Promise<void> {
|
||||
await this.requireAccess({ auth, permission: Permission.SessionLock, ids: [id] });
|
||||
await this.sessionRepository.update(id, { pinExpiresAt: null });
|
||||
}
|
||||
|
||||
async deleteAll(auth: AuthDto): Promise<void> {
|
||||
const sessions = await this.sessionRepository.getByUserId(auth.user.id);
|
||||
for (const session of sessions) {
|
||||
if (session.id === auth.session?.id) {
|
||||
continue;
|
||||
}
|
||||
await this.sessionRepository.delete(session.id);
|
||||
}
|
||||
@OnEvent({ name: 'AuthChangePassword' })
|
||||
async onAuthChangePassword({ userId, currentSessionId }: ArgOf<'AuthChangePassword'>): Promise<void> {
|
||||
await this.sessionRepository.invalidate({ userId, excludeId: currentSessionId });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -300,6 +300,7 @@ describe(SharedLinkService.name, () => {
|
||||
mocks.sharedLink.get.mockResolvedValue(_.cloneDeep(sharedLinkStub.individual));
|
||||
mocks.sharedLink.create.mockResolvedValue(sharedLinkStub.individual);
|
||||
mocks.sharedLink.update.mockResolvedValue(sharedLinkStub.individual);
|
||||
mocks.sharedLinkAsset.remove.mockResolvedValue([assetStub.image.id]);
|
||||
|
||||
await expect(
|
||||
sut.removeAssets(authStub.admin, 'link-1', { assetIds: [assetStub.image.id, 'asset-2'] }),
|
||||
@@ -308,6 +309,7 @@ describe(SharedLinkService.name, () => {
|
||||
{ assetId: 'asset-2', success: false, error: AssetIdErrorReason.NOT_FOUND },
|
||||
]);
|
||||
|
||||
expect(mocks.sharedLinkAsset.remove).toHaveBeenCalledWith('link-1', [assetStub.image.id, 'asset-2']);
|
||||
expect(mocks.sharedLink.update).toHaveBeenCalledWith({ ...sharedLinkStub.individual, assets: [] });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -175,10 +175,12 @@ export class SharedLinkService extends BaseService {
|
||||
throw new BadRequestException('Invalid shared link type');
|
||||
}
|
||||
|
||||
const removedAssetIds = await this.sharedLinkAssetRepository.remove(id, dto.assetIds);
|
||||
|
||||
const results: AssetIdsResponseDto[] = [];
|
||||
for (const assetId of dto.assetIds) {
|
||||
const hasAsset = sharedLink.assets.find((asset) => asset.id === assetId);
|
||||
if (!hasAsset) {
|
||||
const wasRemoved = removedAssetIds.find((id) => id === assetId);
|
||||
if (!wasRemoved) {
|
||||
results.push({ assetId, success: false, error: AssetIdErrorReason.NOT_FOUND });
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -115,7 +115,7 @@ export class StorageService extends BaseService {
|
||||
|
||||
if (!path.startsWith(previous)) {
|
||||
throw new Error(
|
||||
'Detected an inconsistent media location. For more information, see https://immich.app/errors#inconsistent-media-location',
|
||||
'Detected an inconsistent media location. For more information, see https://docs.immich.app/errors#inconsistent-media-location',
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -39,6 +39,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
||||
[QueueName.ThumbnailGeneration]: { concurrency: 3 },
|
||||
[QueueName.VideoConversion]: { concurrency: 1 },
|
||||
[QueueName.Notification]: { concurrency: 5 },
|
||||
[QueueName.Ocr]: { concurrency: 1 },
|
||||
},
|
||||
backup: {
|
||||
database: {
|
||||
@@ -102,6 +103,13 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
||||
maxDistance: 0.5,
|
||||
minFaces: 3,
|
||||
},
|
||||
ocr: {
|
||||
enabled: true,
|
||||
modelName: 'PP-OCRv5_mobile',
|
||||
minDetectionScore: 0.5,
|
||||
minRecognitionScore: 0.8,
|
||||
maxResolution: 736,
|
||||
},
|
||||
},
|
||||
map: {
|
||||
enabled: true,
|
||||
@@ -197,6 +205,7 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
||||
transport: {
|
||||
host: '',
|
||||
port: 587,
|
||||
secure: false,
|
||||
username: '',
|
||||
password: '',
|
||||
ignoreCert: false,
|
||||
|
||||
59
server/src/services/telemetry.service.ts
Normal file
59
server/src/services/telemetry.service.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { snakeCase } from 'lodash';
|
||||
import { OnEvent } from 'src/decorators';
|
||||
import { ImmichWorker, JobStatus } from 'src/enum';
|
||||
import { ArgOf, ArgsOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
|
||||
export class TelemetryService extends BaseService {
|
||||
@OnEvent({ name: 'AppBootstrap', workers: [ImmichWorker.Api] })
|
||||
async onBootstrap(): Promise<void> {
|
||||
const userCount = await this.userRepository.getCount();
|
||||
this.telemetryRepository.api.addToGauge('immich.users.total', userCount);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'UserCreate' })
|
||||
onUserCreate() {
|
||||
this.telemetryRepository.api.addToGauge(`immich.users.total`, 1);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'UserTrash' })
|
||||
onUserTrash() {
|
||||
this.telemetryRepository.api.addToGauge(`immich.users.total`, -1);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'UserRestore' })
|
||||
onUserRestore() {
|
||||
this.telemetryRepository.api.addToGauge(`immich.users.total`, 1);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'JobStart' })
|
||||
onJobStart(...[queueName]: ArgsOf<'JobStart'>) {
|
||||
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
|
||||
this.telemetryRepository.jobs.addToGauge(queueMetric, 1);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'JobSuccess' })
|
||||
onJobSuccess({ job, response }: ArgOf<'JobSuccess'>) {
|
||||
if (response && Object.values(JobStatus).includes(response as JobStatus)) {
|
||||
const jobMetric = `immich.jobs.${snakeCase(job.name)}.${response}`;
|
||||
this.telemetryRepository.jobs.addToCounter(jobMetric, 1);
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'JobError' })
|
||||
onJobError({ job }: ArgOf<'JobError'>) {
|
||||
const jobMetric = `immich.jobs.${snakeCase(job.name)}.${JobStatus.Failed}`;
|
||||
this.telemetryRepository.jobs.addToCounter(jobMetric, 1);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'JobComplete' })
|
||||
onJobComplete(...[queueName]: ArgsOf<'JobComplete'>) {
|
||||
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
|
||||
this.telemetryRepository.jobs.addToGauge(queueMetric, -1);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'QueueStart' })
|
||||
onQueueStart({ name }: ArgOf<'QueueStart'>) {
|
||||
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
|
||||
}
|
||||
}
|
||||
@@ -36,10 +36,14 @@ describe(TimelineService.name, () => {
|
||||
);
|
||||
|
||||
expect(mocks.access.album.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.user.id, new Set(['album-id']));
|
||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
||||
timeBucket: 'bucket',
|
||||
albumId: 'album-id',
|
||||
});
|
||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
|
||||
'bucket',
|
||||
{
|
||||
timeBucket: 'bucket',
|
||||
albumId: 'album-id',
|
||||
},
|
||||
authStub.admin,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the assets for a archive time bucket if user has archive.read', async () => {
|
||||
@@ -60,6 +64,7 @@ describe(TimelineService.name, () => {
|
||||
visibility: AssetVisibility.Archive,
|
||||
userIds: [authStub.admin.user.id],
|
||||
}),
|
||||
authStub.admin,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -76,12 +81,16 @@ describe(TimelineService.name, () => {
|
||||
withPartners: true,
|
||||
}),
|
||||
).resolves.toEqual(json);
|
||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
||||
timeBucket: 'bucket',
|
||||
visibility: AssetVisibility.Timeline,
|
||||
withPartners: true,
|
||||
userIds: [authStub.admin.user.id],
|
||||
});
|
||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
|
||||
'bucket',
|
||||
{
|
||||
timeBucket: 'bucket',
|
||||
visibility: AssetVisibility.Timeline,
|
||||
withPartners: true,
|
||||
userIds: [authStub.admin.user.id],
|
||||
},
|
||||
authStub.admin,
|
||||
);
|
||||
});
|
||||
|
||||
it('should check permissions to read tag', async () => {
|
||||
@@ -96,11 +105,15 @@ describe(TimelineService.name, () => {
|
||||
tagId: 'tag-123',
|
||||
}),
|
||||
).resolves.toEqual(json);
|
||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith('bucket', {
|
||||
tagId: 'tag-123',
|
||||
timeBucket: 'bucket',
|
||||
userIds: [authStub.admin.user.id],
|
||||
});
|
||||
expect(mocks.asset.getTimeBucket).toHaveBeenCalledWith(
|
||||
'bucket',
|
||||
{
|
||||
tagId: 'tag-123',
|
||||
timeBucket: 'bucket',
|
||||
userIds: [authStub.admin.user.id],
|
||||
},
|
||||
authStub.admin,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the assets for a library time bucket if user has library.read', async () => {
|
||||
@@ -119,6 +132,7 @@ describe(TimelineService.name, () => {
|
||||
timeBucket: 'bucket',
|
||||
userIds: [authStub.admin.user.id],
|
||||
}),
|
||||
authStub.admin,
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ export class TimelineService extends BaseService {
|
||||
const timeBucketOptions = await this.buildTimeBucketOptions(auth, { ...dto });
|
||||
|
||||
// TODO: use id cursor for pagination
|
||||
const bucket = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions);
|
||||
const bucket = await this.assetRepository.getTimeBucket(dto.timeBucket, timeBucketOptions, auth);
|
||||
return bucket.assets;
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { BadRequestException, ForbiddenException, Injectable } from '@nestjs/com
|
||||
import { SALT_ROUNDS } from 'src/constants';
|
||||
import { AssetStatsDto, AssetStatsResponseDto, mapStats } from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { SessionResponseDto, mapSession } from 'src/dtos/session.dto';
|
||||
import { UserPreferencesResponseDto, UserPreferencesUpdateDto, mapPreferences } from 'src/dtos/user-preferences.dto';
|
||||
import {
|
||||
UserAdminCreateDto,
|
||||
@@ -103,6 +104,8 @@ export class UserAdminService extends BaseService {
|
||||
const status = force ? UserStatus.Removing : UserStatus.Deleted;
|
||||
const user = await this.userRepository.update(id, { status, deletedAt: new Date() });
|
||||
|
||||
await this.eventRepository.emit('UserTrash', user);
|
||||
|
||||
if (force) {
|
||||
await this.jobRepository.queue({ name: JobName.UserDelete, data: { id: user.id, force } });
|
||||
}
|
||||
@@ -114,9 +117,15 @@ export class UserAdminService extends BaseService {
|
||||
await this.findOrFail(id, { withDeleted: true });
|
||||
await this.albumRepository.restoreAll(id);
|
||||
const user = await this.userRepository.restore(id);
|
||||
await this.eventRepository.emit('UserRestore', user);
|
||||
return mapUserAdmin(user);
|
||||
}
|
||||
|
||||
async getSessions(auth: AuthDto, id: string): Promise<SessionResponseDto[]> {
|
||||
const sessions = await this.sessionRepository.getByUserId(id);
|
||||
return sessions.map((session) => mapSession(session));
|
||||
}
|
||||
|
||||
async getStatistics(auth: AuthDto, id: string, dto: AssetStatsDto): Promise<AssetStatsResponseDto> {
|
||||
const stats = await this.assetRepository.getStatistics(id, dto);
|
||||
return mapStats(stats);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user