mirror of
https://github.com/immich-app/immich.git
synced 2026-02-11 03:17:59 +03:00
Merge branch 'main' into filter-by-person
This commit is contained in:
@@ -1 +1 @@
|
||||
24.11.1
|
||||
24.13.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM ghcr.io/immich-app/base-server-dev:202511261514@sha256:cbcca5851fd11042463f09797e6d6068d94adbb108749e62aa69159df59c0591 AS builder
|
||||
FROM ghcr.io/immich-app/base-server-dev:202601131104@sha256:8d907eb3fe10dba4a1e034fd0060ea68c01854d92fcc9debc6b868b98f888ba7 AS builder
|
||||
ENV COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
|
||||
CI=1 \
|
||||
COREPACK_HOME=/tmp \
|
||||
@@ -52,7 +52,7 @@ FROM builder AS plugins
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
COPY --from=ghcr.io/jdx/mise:2025.11.3@sha256:ac26f5978c0e2783f3e68e58ce75eddb83e41b89bf8747c503bac2aa9baf22c5 /usr/local/bin/mise /usr/local/bin/mise
|
||||
COPY --from=ghcr.io/jdx/mise:2026.1.1@sha256:a55c391f7582f34c58bce1a85090cd526596402ba77fc32b06c49b8404ef9c14 /usr/local/bin/mise /usr/local/bin/mise
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./plugins/mise.toml ./plugins/
|
||||
@@ -71,7 +71,7 @@ RUN --mount=type=cache,id=pnpm-plugins,target=/buildcache/pnpm-store \
|
||||
--mount=type=cache,id=mise-tools-${TARGETPLATFORM},target=/buildcache/mise \
|
||||
cd plugins && mise run build
|
||||
|
||||
FROM ghcr.io/immich-app/base-server-prod:202511261514@sha256:c04c1c38dd90e53455b180aedf93c3c63474c8d20ffe2c6d7a3a61a2181e6d29
|
||||
FROM ghcr.io/immich-app/base-server-prod:202601131104@sha256:c649c5838b6348836d27db6d49cadbbc6157feae7a1a237180c3dec03577ba8f
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ENV NODE_ENV=production \
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# dev build
|
||||
FROM ghcr.io/immich-app/base-server-dev:202511261514@sha256:cbcca5851fd11042463f09797e6d6068d94adbb108749e62aa69159df59c0591 AS dev
|
||||
FROM ghcr.io/immich-app/base-server-dev:202601131104@sha256:8d907eb3fe10dba4a1e034fd0060ea68c01854d92fcc9debc6b868b98f888ba7 AS dev
|
||||
|
||||
ENV COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
|
||||
CI=1 \
|
||||
|
||||
@@ -45,14 +45,14 @@
|
||||
"@nestjs/websockets": "^11.0.4",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/context-async-hooks": "^2.0.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.208.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.208.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.56.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.55.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.61.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.210.0",
|
||||
"@opentelemetry/instrumentation-http": "^0.210.0",
|
||||
"@opentelemetry/instrumentation-ioredis": "^0.58.0",
|
||||
"@opentelemetry/instrumentation-nestjs-core": "^0.56.0",
|
||||
"@opentelemetry/instrumentation-pg": "^0.62.0",
|
||||
"@opentelemetry/resources": "^2.0.1",
|
||||
"@opentelemetry/sdk-metrics": "^2.0.1",
|
||||
"@opentelemetry/sdk-node": "^0.208.0",
|
||||
"@opentelemetry/sdk-node": "^0.210.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.34.0",
|
||||
"@react-email/components": "^0.5.0",
|
||||
"@react-email/render": "^1.1.2",
|
||||
@@ -70,7 +70,7 @@
|
||||
"cookie": "^1.0.2",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"cron": "4.3.5",
|
||||
"exiftool-vendored": "^34.0.0",
|
||||
"exiftool-vendored": "^34.3.0",
|
||||
"express": "^5.1.0",
|
||||
"fast-glob": "^3.3.2",
|
||||
"fluent-ffmpeg": "^2.1.2",
|
||||
@@ -96,7 +96,7 @@
|
||||
"pg": "^8.11.3",
|
||||
"pg-connection-string": "^2.9.1",
|
||||
"picomatch": "^4.0.2",
|
||||
"postgres": "3.4.7",
|
||||
"postgres": "3.4.8",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-email": "^4.0.0",
|
||||
@@ -110,6 +110,7 @@
|
||||
"socket.io": "^4.8.1",
|
||||
"tailwindcss-preset-email": "^1.4.0",
|
||||
"thumbhash": "^0.1.1",
|
||||
"transformation-matrix": "^3.1.0",
|
||||
"ua-parser-js": "^2.0.0",
|
||||
"uuid": "^11.1.0",
|
||||
"validator": "^13.12.0"
|
||||
@@ -128,13 +129,13 @@
|
||||
"@types/cookie-parser": "^1.4.8",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/fluent-ffmpeg": "^2.1.21",
|
||||
"@types/jsonwebtoken": "^9.0.10",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/jsonwebtoken": "^9.0.10",
|
||||
"@types/lodash": "^4.14.197",
|
||||
"@types/luxon": "^3.6.2",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^24.10.3",
|
||||
"@types/node": "^24.10.8",
|
||||
"@types/nodemailer": "^7.0.0",
|
||||
"@types/picomatch": "^4.0.0",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
@@ -162,11 +163,11 @@
|
||||
"typescript": "^5.9.2",
|
||||
"typescript-eslint": "^8.28.0",
|
||||
"unplugin-swc": "^1.4.5",
|
||||
"vite-tsconfig-paths": "^5.0.0",
|
||||
"vite-tsconfig-paths": "^6.0.0",
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "24.11.1"
|
||||
"node": "24.13.0"
|
||||
},
|
||||
"overrides": {
|
||||
"sharp": "^0.34.5"
|
||||
|
||||
@@ -10,6 +10,7 @@ import { IWorker } from 'src/constants';
|
||||
import { controllers } from 'src/controllers';
|
||||
import { ImmichWorker } from 'src/enum';
|
||||
import { MaintenanceAuthGuard } from 'src/maintenance/maintenance-auth.guard';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerController } from 'src/maintenance/maintenance-worker.controller';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
@@ -21,8 +22,11 @@ import { LoggingInterceptor } from 'src/middleware/logging.interceptor';
|
||||
import { repositories } from 'src/repositories';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { teardownTelemetry, TelemetryRepository } from 'src/repositories/telemetry.repository';
|
||||
import { WebsocketRepository } from 'src/repositories/websocket.repository';
|
||||
@@ -103,8 +107,12 @@ export class ApiModule extends BaseModule {}
|
||||
providers: [
|
||||
ConfigRepository,
|
||||
LoggingRepository,
|
||||
StorageRepository,
|
||||
ProcessRepository,
|
||||
DatabaseRepository,
|
||||
SystemMetadataRepository,
|
||||
AppRepository,
|
||||
MaintenanceHealthRepository,
|
||||
MaintenanceWebsocketRepository,
|
||||
MaintenanceWorkerService,
|
||||
...commonMiddleware,
|
||||
@@ -116,9 +124,14 @@ export class MaintenanceModule {
|
||||
constructor(
|
||||
@Inject(IWorker) private worker: ImmichWorker,
|
||||
logger: LoggingRepository,
|
||||
private maintenanceWorkerService: MaintenanceWorkerService,
|
||||
) {
|
||||
logger.setAppName(this.worker);
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.maintenanceWorkerService.init();
|
||||
}
|
||||
}
|
||||
|
||||
@Module({
|
||||
|
||||
@@ -236,6 +236,7 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
[QueueName.Notification]: { concurrency: 5 },
|
||||
[QueueName.Ocr]: { concurrency: 1 },
|
||||
[QueueName.Workflow]: { concurrency: 5 },
|
||||
[QueueName.Editor]: { concurrency: 2 },
|
||||
},
|
||||
logging: {
|
||||
enabled: true,
|
||||
|
||||
@@ -5,7 +5,7 @@ import { SemVer } from 'semver';
|
||||
import { ApiTag, DatabaseExtension, ExifOrientation, VectorIndex } from 'src/enum';
|
||||
|
||||
export const POSTGRES_VERSION_RANGE = '>=14.0.0';
|
||||
export const VECTORCHORD_VERSION_RANGE = '>=0.3 <0.6';
|
||||
export const VECTORCHORD_VERSION_RANGE = '>=0.3 <2';
|
||||
export const VECTORS_VERSION_RANGE = '>=0.2 <0.4';
|
||||
export const VECTOR_VERSION_RANGE = '>=0.5 <1';
|
||||
|
||||
@@ -141,6 +141,7 @@ export const endpointTags: Record<ApiTag, string> = {
|
||||
[ApiTag.Assets]: 'An asset is an image or video that has been uploaded to Immich.',
|
||||
[ApiTag.Authentication]: 'Endpoints related to user authentication, including OAuth.',
|
||||
[ApiTag.AuthenticationAdmin]: 'Administrative endpoints related to authentication.',
|
||||
[ApiTag.DatabaseBackups]: 'Manage backups of the Immich database.',
|
||||
[ApiTag.Deprecated]: 'Deprecated endpoints that are planned for removal in the next major release.',
|
||||
[ApiTag.Download]: 'Endpoints for downloading assets or collections of assets.',
|
||||
[ApiTag.Duplicates]: 'Endpoints for managing and identifying duplicate assets.',
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
UploadedFiles,
|
||||
UseInterceptors,
|
||||
} from '@nestjs/common';
|
||||
import { ApiBody, ApiConsumes, ApiHeader, ApiTags } from '@nestjs/swagger';
|
||||
import { ApiBody, ApiConsumes, ApiHeader, ApiResponse, ApiTags } from '@nestjs/swagger';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import {
|
||||
@@ -33,6 +33,7 @@ import {
|
||||
CheckExistingAssetsDto,
|
||||
UploadFieldName,
|
||||
} from 'src/dtos/asset-media.dto';
|
||||
import { AssetDownloadOriginalDto } from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { ApiTag, ImmichHeader, Permission, RouteKey } from 'src/enum';
|
||||
import { AssetUploadInterceptor } from 'src/middleware/asset-upload.interceptor';
|
||||
@@ -62,6 +63,16 @@ export class AssetMediaController {
|
||||
required: false,
|
||||
})
|
||||
@ApiBody({ description: 'Asset Upload Information', type: AssetMediaCreateDto })
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: 'Asset is a duplicate',
|
||||
type: AssetMediaResponseDto,
|
||||
})
|
||||
@ApiResponse({
|
||||
status: 201,
|
||||
description: 'Asset uploaded successfully',
|
||||
type: AssetMediaResponseDto,
|
||||
})
|
||||
@Endpoint({
|
||||
summary: 'Upload asset',
|
||||
description: 'Uploads a new asset to the server.',
|
||||
@@ -94,15 +105,21 @@ export class AssetMediaController {
|
||||
async downloadAsset(
|
||||
@Auth() auth: AuthDto,
|
||||
@Param() { id }: UUIDParamDto,
|
||||
@Query() dto: AssetDownloadOriginalDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
) {
|
||||
await sendFile(res, next, () => this.service.downloadOriginal(auth, id), this.logger);
|
||||
await sendFile(res, next, () => this.service.downloadOriginal(auth, id, dto), this.logger);
|
||||
}
|
||||
|
||||
@Put(':id/original')
|
||||
@UseInterceptors(FileUploadInterceptor)
|
||||
@ApiConsumes('multipart/form-data')
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: 'Asset replaced successfully',
|
||||
type: AssetMediaResponseDto,
|
||||
})
|
||||
@Endpoint({
|
||||
summary: 'Replace asset',
|
||||
description: 'Replace the asset with new file, without changing its id.',
|
||||
|
||||
@@ -79,6 +79,74 @@ describe(AssetController.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets/metadata', () => {
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer()).put(`/assets/metadata`);
|
||||
expect(ctx.authenticate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should require a valid assetId', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.put('/assets/metadata')
|
||||
.send({ items: [{ assetId: '123', key: 'test', value: {} }] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(factory.responses.badRequest(expect.arrayContaining(['items.0.assetId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should require a key', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.put('/assets/metadata')
|
||||
.send({ items: [{ assetId: factory.uuid(), value: {} }] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
factory.responses.badRequest(
|
||||
expect.arrayContaining(['items.0.key must be a string', 'items.0.key should not be empty']),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should work', async () => {
|
||||
const { status } = await request(ctx.getHttpServer())
|
||||
.put('/assets/metadata')
|
||||
.send({ items: [{ assetId: factory.uuid(), key: AssetMetadataKey.MobileApp, value: { iCloudId: '123' } }] });
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /assets/metadata', () => {
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer()).delete(`/assets/metadata`);
|
||||
expect(ctx.authenticate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should require a valid assetId', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.delete('/assets/metadata')
|
||||
.send({ items: [{ assetId: '123', key: 'test' }] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(factory.responses.badRequest(expect.arrayContaining(['items.0.assetId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should require a key', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.delete('/assets/metadata')
|
||||
.send({ items: [{ assetId: factory.uuid() }] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
factory.responses.badRequest(
|
||||
expect.arrayContaining(['items.0.key must be a string', 'items.0.key should not be empty']),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should work', async () => {
|
||||
const { status } = await request(ctx.getHttpServer())
|
||||
.delete('/assets/metadata')
|
||||
.send({ items: [{ assetId: factory.uuid(), key: AssetMetadataKey.MobileApp }] });
|
||||
expect(status).toBe(204);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets/:id', () => {
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer()).get(`/assets/123`);
|
||||
@@ -169,12 +237,10 @@ describe(AssetController.name, () => {
|
||||
it('should require each item to have a valid key', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.put(`/assets/${factory.uuid()}/metadata`)
|
||||
.send({ items: [{ key: 'someKey' }] });
|
||||
.send({ items: [{ value: { some: 'value' } }] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
factory.responses.badRequest(
|
||||
expect.arrayContaining([expect.stringContaining('items.0.key must be one of the following values')]),
|
||||
),
|
||||
factory.responses.badRequest(['items.0.key must be a string', 'items.0.key should not be empty']),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -224,15 +290,63 @@ describe(AssetController.name, () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(factory.responses.badRequest(expect.arrayContaining(['id must be a UUID'])));
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets/:id/edits', () => {
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer()).put(`/assets/${factory.uuid()}/edits`).send({ edits: [] });
|
||||
expect(ctx.authenticate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should accept valid edits and pass to service correctly', async () => {
|
||||
const edits = [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 100,
|
||||
height: 100,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const assetId = factory.uuid();
|
||||
const { status } = await request(ctx.getHttpServer()).put(`/assets/${assetId}/edits`).send({
|
||||
edits,
|
||||
});
|
||||
|
||||
expect(service.editAsset).toHaveBeenCalledWith(undefined, assetId, { edits });
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.put(`/assets/123/edits`)
|
||||
.send({
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 100,
|
||||
height: 100,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
it('should require a valid key', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer()).get(`/assets/${factory.uuid()}/metadata/invalid`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
factory.responses.badRequest(
|
||||
expect.arrayContaining([expect.stringContaining('key must be one of the following value')]),
|
||||
),
|
||||
);
|
||||
expect(body).toEqual(factory.responses.badRequest(expect.arrayContaining(['id must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should require at least one edit', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.put(`/assets/${factory.uuid()}/edits`)
|
||||
.send({ edits: [] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(factory.responses.badRequest(['edits must contain at least 1 elements']));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -247,13 +361,5 @@ describe(AssetController.name, () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(factory.responses.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require a valid key', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer()).delete(`/assets/${factory.uuid()}/metadata/invalid`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
factory.responses.badRequest([expect.stringContaining('key must be one of the following values')]),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,6 +7,9 @@ import {
|
||||
AssetBulkUpdateDto,
|
||||
AssetCopyDto,
|
||||
AssetJobsDto,
|
||||
AssetMetadataBulkDeleteDto,
|
||||
AssetMetadataBulkResponseDto,
|
||||
AssetMetadataBulkUpsertDto,
|
||||
AssetMetadataResponseDto,
|
||||
AssetMetadataRouteParams,
|
||||
AssetMetadataUpsertDto,
|
||||
@@ -17,6 +20,7 @@ import {
|
||||
UpdateAssetDto,
|
||||
} from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetEditActionListDto, AssetEditsDto } from 'src/dtos/editing.dto';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { ApiTag, Permission, RouteKey } from 'src/enum';
|
||||
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
||||
@@ -120,6 +124,32 @@ export class AssetController {
|
||||
return this.service.copy(auth, dto);
|
||||
}
|
||||
|
||||
@Put('metadata')
|
||||
@Authenticated({ permission: Permission.AssetUpdate })
|
||||
@Endpoint({
|
||||
summary: 'Upsert asset metadata',
|
||||
description: 'Upsert metadata key-value pairs for multiple assets.',
|
||||
history: new HistoryBuilder().added('v1').beta('v2.5.0'),
|
||||
})
|
||||
updateBulkAssetMetadata(
|
||||
@Auth() auth: AuthDto,
|
||||
@Body() dto: AssetMetadataBulkUpsertDto,
|
||||
): Promise<AssetMetadataBulkResponseDto[]> {
|
||||
return this.service.upsertBulkMetadata(auth, dto);
|
||||
}
|
||||
|
||||
@Delete('metadata')
|
||||
@Authenticated({ permission: Permission.AssetUpdate })
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
@Endpoint({
|
||||
summary: 'Delete asset metadata',
|
||||
description: 'Delete metadata key-value pairs for multiple assets.',
|
||||
history: new HistoryBuilder().added('v1').beta('v2.5.0'),
|
||||
})
|
||||
deleteBulkAssetMetadata(@Auth() auth: AuthDto, @Body() dto: AssetMetadataBulkDeleteDto): Promise<void> {
|
||||
return this.service.deleteBulkMetadata(auth, dto);
|
||||
}
|
||||
|
||||
@Put(':id')
|
||||
@Authenticated({ permission: Permission.AssetUpdate })
|
||||
@Endpoint({
|
||||
@@ -197,4 +227,42 @@ export class AssetController {
|
||||
deleteAssetMetadata(@Auth() auth: AuthDto, @Param() { id, key }: AssetMetadataRouteParams): Promise<void> {
|
||||
return this.service.deleteMetadataByKey(auth, id, key);
|
||||
}
|
||||
|
||||
@Get(':id/edits')
|
||||
@Authenticated({ permission: Permission.AssetEditGet })
|
||||
@Endpoint({
|
||||
summary: 'Retrieve edits for an existing asset',
|
||||
description: 'Retrieve a series of edit actions (crop, rotate, mirror) associated with the specified asset.',
|
||||
history: new HistoryBuilder().added('v2.5.0').beta('v2.5.0'),
|
||||
})
|
||||
getAssetEdits(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<AssetEditsDto> {
|
||||
return this.service.getAssetEdits(auth, id);
|
||||
}
|
||||
|
||||
@Put(':id/edits')
|
||||
@Authenticated({ permission: Permission.AssetEditCreate })
|
||||
@Endpoint({
|
||||
summary: 'Apply edits to an existing asset',
|
||||
description: 'Apply a series of edit actions (crop, rotate, mirror) to the specified asset.',
|
||||
history: new HistoryBuilder().added('v2.5.0').beta('v2.5.0'),
|
||||
})
|
||||
editAsset(
|
||||
@Auth() auth: AuthDto,
|
||||
@Param() { id }: UUIDParamDto,
|
||||
@Body() dto: AssetEditActionListDto,
|
||||
): Promise<AssetEditsDto> {
|
||||
return this.service.editAsset(auth, id, dto);
|
||||
}
|
||||
|
||||
@Delete(':id/edits')
|
||||
@Authenticated({ permission: Permission.AssetEditDelete })
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
@Endpoint({
|
||||
summary: 'Remove edits from an existing asset',
|
||||
description: 'Removes all edit actions (crop, rotate, mirror) associated with the specified asset.',
|
||||
history: new HistoryBuilder().added('v2.5.0').beta('v2.5.0'),
|
||||
})
|
||||
removeAssetEdits(@Auth() auth: AuthDto, @Param() { id }: UUIDParamDto): Promise<void> {
|
||||
return this.service.removeAssetEdits(auth, id);
|
||||
}
|
||||
}
|
||||
|
||||
101
server/src/controllers/database-backup.controller.ts
Normal file
101
server/src/controllers/database-backup.controller.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { Body, Controller, Delete, Get, Next, Param, Post, Res, UploadedFile, UseInterceptors } from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { ApiBody, ApiConsumes, ApiTags } from '@nestjs/swagger';
|
||||
import { NextFunction, Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import {
|
||||
DatabaseBackupDeleteDto,
|
||||
DatabaseBackupListResponseDto,
|
||||
DatabaseBackupUploadDto,
|
||||
} from 'src/dtos/database-backup.dto';
|
||||
import { ApiTag, ImmichCookie, Permission } from 'src/enum';
|
||||
import { Authenticated, FileResponse, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
@ApiTags(ApiTag.DatabaseBackups)
|
||||
@Controller('admin/database-backups')
|
||||
export class DatabaseBackupController {
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: DatabaseBackupService,
|
||||
private maintenanceService: MaintenanceService,
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
@Endpoint({
|
||||
summary: 'List database backups',
|
||||
description: 'Get the list of the successful and failed backups',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
@Get(':filename')
|
||||
@FileResponse()
|
||||
@Endpoint({
|
||||
summary: 'Download database backup',
|
||||
description: 'Downloads the database backup file',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDownload, admin: true })
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
): Promise<void> {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
@Delete()
|
||||
@Endpoint({
|
||||
summary: 'Delete database backup',
|
||||
description: 'Delete a backup by its filename',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.BackupDelete, admin: true })
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
@Post('start-restore')
|
||||
@Endpoint({
|
||||
summary: 'Start database backup restore flow',
|
||||
description: 'Put Immich into maintenance mode to restore a backup (Immich must not be configured)',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
async startDatabaseRestoreFlow(
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
const { jwt } = await this.maintenanceService.startRestoreFlow();
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
});
|
||||
}
|
||||
|
||||
@Post('upload')
|
||||
@Authenticated({ permission: Permission.BackupUpload, admin: true })
|
||||
@ApiConsumes('multipart/form-data')
|
||||
@ApiBody({ description: 'Backup Upload', type: DatabaseBackupUploadDto })
|
||||
@Endpoint({
|
||||
summary: 'Upload database backup',
|
||||
description: 'Uploads .sql/.sql.gz file to restore backup from',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { AssetMediaController } from 'src/controllers/asset-media.controller';
|
||||
import { AssetController } from 'src/controllers/asset.controller';
|
||||
import { AuthAdminController } from 'src/controllers/auth-admin.controller';
|
||||
import { AuthController } from 'src/controllers/auth.controller';
|
||||
import { DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import { DownloadController } from 'src/controllers/download.controller';
|
||||
import { DuplicateController } from 'src/controllers/duplicate.controller';
|
||||
import { FaceController } from 'src/controllers/face.controller';
|
||||
@@ -46,6 +47,7 @@ export const controllers = [
|
||||
AssetMediaController,
|
||||
AuthController,
|
||||
AuthAdminController,
|
||||
DatabaseBackupController,
|
||||
DownloadController,
|
||||
DuplicateController,
|
||||
FaceController,
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
import { BadRequestException, Body, Controller, Post, Res } from '@nestjs/common';
|
||||
import { BadRequestException, Body, Controller, Get, Post, Res } from '@nestjs/common';
|
||||
import { ApiTags } from '@nestjs/swagger';
|
||||
import { Response } from 'express';
|
||||
import { Endpoint, HistoryBuilder } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ApiTag, ImmichCookie, MaintenanceAction, Permission } from 'src/enum';
|
||||
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
@@ -15,6 +21,27 @@ import { respondWithCookie } from 'src/utils/response';
|
||||
export class MaintenanceController {
|
||||
constructor(private service: MaintenanceService) {}
|
||||
|
||||
@Get('status')
|
||||
@Endpoint({
|
||||
summary: 'Get maintenance mode status',
|
||||
description: 'Fetch information about the currently running maintenance action.',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return this.service.getMaintenanceStatus();
|
||||
}
|
||||
|
||||
@Get('detect-install')
|
||||
@Endpoint({
|
||||
summary: 'Detect existing install',
|
||||
description: 'Collect integrity checks and other heuristics about local data.',
|
||||
history: new HistoryBuilder().added('v2.5.0').alpha('v2.5.0'),
|
||||
})
|
||||
@Authenticated({ permission: Permission.Maintenance, admin: true })
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('login')
|
||||
@Endpoint({
|
||||
summary: 'Log into maintenance mode',
|
||||
@@ -38,8 +65,8 @@ export class MaintenanceController {
|
||||
@GetLoginDetails() loginDetails: LoginDetails,
|
||||
@Res({ passthrough: true }) res: Response,
|
||||
): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.Start) {
|
||||
const { jwt } = await this.service.startMaintenance(auth.user.name);
|
||||
if (dto.action !== MaintenanceAction.End) {
|
||||
const { jwt } = await this.service.startMaintenance(dto, auth.user.name);
|
||||
return respondWithCookie(res, undefined, {
|
||||
isSecure: loginDetails.isSecure,
|
||||
values: [{ key: ImmichCookie.MaintenanceToken, value: jwt }],
|
||||
|
||||
@@ -24,7 +24,13 @@ export interface MoveRequest {
|
||||
};
|
||||
}
|
||||
|
||||
export type GeneratedImageType = AssetPathType.Preview | AssetPathType.Thumbnail | AssetPathType.FullSize;
|
||||
export type GeneratedImageType =
|
||||
| AssetPathType.Preview
|
||||
| AssetPathType.Thumbnail
|
||||
| AssetPathType.FullSize
|
||||
| AssetPathType.EditedPreview
|
||||
| AssetPathType.EditedThumbnail
|
||||
| AssetPathType.EditedFullSize;
|
||||
export type GeneratedAssetType = GeneratedImageType | AssetPathType.EncodedVideo;
|
||||
|
||||
export type ThumbnailPathEntity = { id: string; ownerId: string };
|
||||
|
||||
@@ -272,6 +272,7 @@ export type AssetFace = {
|
||||
person?: Person | null;
|
||||
updatedAt: Date;
|
||||
updateId: string;
|
||||
isVisible: boolean;
|
||||
};
|
||||
|
||||
export type Plugin = Selectable<PluginTable>;
|
||||
@@ -340,6 +341,8 @@ export const columns = {
|
||||
'asset.originalPath',
|
||||
'asset.ownerId',
|
||||
'asset.type',
|
||||
'asset.width',
|
||||
'asset.height',
|
||||
],
|
||||
assetFiles: ['asset_file.id', 'asset_file.path', 'asset_file.type'],
|
||||
authUser: ['user.id', 'user.name', 'user.email', 'user.isAdmin', 'user.quotaUsageInBytes', 'user.quotaSizeInBytes'],
|
||||
@@ -390,6 +393,9 @@ export const columns = {
|
||||
'asset.livePhotoVideoId',
|
||||
'asset.stackId',
|
||||
'asset.libraryId',
|
||||
'asset.width',
|
||||
'asset.height',
|
||||
'asset.isEdited',
|
||||
],
|
||||
syncAlbumUser: ['album_user.albumId as albumId', 'album_user.userId as userId', 'album_user.role'],
|
||||
syncStack: ['stack.id', 'stack.createdAt', 'stack.updatedAt', 'stack.primaryAssetId', 'stack.ownerId'],
|
||||
|
||||
@@ -19,6 +19,9 @@ export enum AssetMediaSize {
|
||||
export class AssetMediaOptionsDto {
|
||||
@ValidateEnum({ enum: AssetMediaSize, name: 'AssetMediaSize', optional: true })
|
||||
size?: AssetMediaSize;
|
||||
|
||||
@ValidateBoolean({ optional: true, default: false })
|
||||
edited?: boolean;
|
||||
}
|
||||
|
||||
export enum UploadFieldName {
|
||||
@@ -78,7 +81,7 @@ export class AssetMediaCreateDto extends AssetMediaBase {
|
||||
@Optional()
|
||||
@ValidateNested({ each: true })
|
||||
@IsArray()
|
||||
metadata!: AssetMetadataUpsertItemDto[];
|
||||
metadata?: AssetMetadataUpsertItemDto[];
|
||||
|
||||
@ApiProperty({ type: 'string', format: 'binary', required: false })
|
||||
[UploadFieldName.SIDECAR_DATA]?: any;
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Selectable } from 'kysely';
|
||||
import { AssetFace, AssetFile, Exif, Stack, Tag, User } from 'src/database';
|
||||
import { HistoryBuilder, Property } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetEditActionItem } from 'src/dtos/editing.dto';
|
||||
import { ExifResponseDto, mapExif } from 'src/dtos/exif.dto';
|
||||
import {
|
||||
AssetFaceWithoutPersonResponseDto,
|
||||
@@ -13,6 +14,8 @@ import {
|
||||
import { TagResponseDto, mapTag } from 'src/dtos/tag.dto';
|
||||
import { UserResponseDto, mapUser } from 'src/dtos/user.dto';
|
||||
import { AssetStatus, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { ImageDimensions } from 'src/types';
|
||||
import { getDimensions } from 'src/utils/asset.util';
|
||||
import { hexOrBufferToBase64 } from 'src/utils/bytes';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
import { ValidateEnum } from 'src/validation';
|
||||
@@ -34,6 +37,8 @@ export class SanitizedAssetResponseDto {
|
||||
duration!: string;
|
||||
livePhotoVideoId?: string | null;
|
||||
hasMetadata!: boolean;
|
||||
width!: number | null;
|
||||
height!: number | null;
|
||||
}
|
||||
|
||||
export class AssetResponseDto extends SanitizedAssetResponseDto {
|
||||
@@ -93,6 +98,8 @@ export class AssetResponseDto extends SanitizedAssetResponseDto {
|
||||
|
||||
@Property({ history: new HistoryBuilder().added('v1').deprecated('v1.113.0') })
|
||||
resized?: boolean;
|
||||
@Property({ history: new HistoryBuilder().added('v2.5.0').beta('v2.5.0') })
|
||||
isEdited!: boolean;
|
||||
}
|
||||
|
||||
export type MapAsset = {
|
||||
@@ -107,6 +114,7 @@ export type MapAsset = {
|
||||
deviceId: string;
|
||||
duplicateId: string | null;
|
||||
duration: string | null;
|
||||
edits?: AssetEditActionItem[];
|
||||
encodedVideoPath: string | null;
|
||||
exifInfo?: Selectable<Exif> | null;
|
||||
faces?: AssetFace[];
|
||||
@@ -129,6 +137,9 @@ export type MapAsset = {
|
||||
tags?: Tag[];
|
||||
thumbhash: Buffer<ArrayBufferLike> | null;
|
||||
type: AssetType;
|
||||
width: number | null;
|
||||
height: number | null;
|
||||
isEdited: boolean;
|
||||
};
|
||||
|
||||
export class AssetStackResponseDto {
|
||||
@@ -147,7 +158,11 @@ export type AssetMapOptions = {
|
||||
};
|
||||
|
||||
// TODO: this is inefficient
|
||||
const peopleWithFaces = (faces?: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||
const peopleWithFaces = (
|
||||
faces?: AssetFace[],
|
||||
edits?: AssetEditActionItem[],
|
||||
assetDimensions?: ImageDimensions,
|
||||
): PersonWithFacesResponseDto[] => {
|
||||
const result: PersonWithFacesResponseDto[] = [];
|
||||
if (faces) {
|
||||
for (const face of faces) {
|
||||
@@ -156,7 +171,7 @@ const peopleWithFaces = (faces?: AssetFace[]): PersonWithFacesResponseDto[] => {
|
||||
if (existingPersonEntry) {
|
||||
existingPersonEntry.faces.push(face);
|
||||
} else {
|
||||
result.push({ ...mapPerson(face.person!), faces: [mapFacesWithoutPerson(face)] });
|
||||
result.push({ ...mapPerson(face.person!), faces: [mapFacesWithoutPerson(face, edits, assetDimensions)] });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -190,10 +205,14 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
|
||||
duration: entity.duration ?? '0:00:00.00000',
|
||||
livePhotoVideoId: entity.livePhotoVideoId,
|
||||
hasMetadata: false,
|
||||
width: entity.width,
|
||||
height: entity.height,
|
||||
};
|
||||
return sanitizedAssetResponse as AssetResponseDto;
|
||||
}
|
||||
|
||||
const assetDimensions = entity.exifInfo ? getDimensions(entity.exifInfo) : undefined;
|
||||
|
||||
return {
|
||||
id: entity.id,
|
||||
createdAt: entity.createdAt,
|
||||
@@ -219,7 +238,7 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
|
||||
exifInfo: entity.exifInfo ? mapExif(entity.exifInfo) : undefined,
|
||||
livePhotoVideoId: entity.livePhotoVideoId,
|
||||
tags: entity.tags?.map((tag) => mapTag(tag)),
|
||||
people: peopleWithFaces(entity.faces),
|
||||
people: peopleWithFaces(entity.faces, entity.edits, assetDimensions),
|
||||
unassignedFaces: entity.faces?.filter((face) => !face.person).map((a) => mapFacesWithoutPerson(a)),
|
||||
checksum: hexOrBufferToBase64(entity.checksum)!,
|
||||
stack: withStack ? mapStack(entity) : undefined,
|
||||
@@ -227,5 +246,8 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
|
||||
hasMetadata: true,
|
||||
duplicateId: entity.duplicateId,
|
||||
resized: true,
|
||||
width: entity.width,
|
||||
height: entity.height,
|
||||
isEdited: entity.isEdited,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -17,9 +17,9 @@ import {
|
||||
ValidateNested,
|
||||
} from 'class-validator';
|
||||
import { BulkIdsDto } from 'src/dtos/asset-ids.response.dto';
|
||||
import { AssetMetadataKey, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { AssetType, AssetVisibility } from 'src/enum';
|
||||
import { AssetStats } from 'src/repositories/asset.repository';
|
||||
import { IsNotSiblingOf, Optional, ValidateBoolean, ValidateEnum, ValidateUUID } from 'src/validation';
|
||||
import { IsNotSiblingOf, Optional, ValidateBoolean, ValidateEnum, ValidateString, ValidateUUID } from 'src/validation';
|
||||
|
||||
export class DeviceIdDto {
|
||||
@IsNotEmpty()
|
||||
@@ -142,8 +142,8 @@ export class AssetMetadataRouteParams {
|
||||
@ValidateUUID()
|
||||
id!: string;
|
||||
|
||||
@ValidateEnum({ enum: AssetMetadataKey, name: 'AssetMetadataKey' })
|
||||
key!: AssetMetadataKey;
|
||||
@ValidateString()
|
||||
key!: string;
|
||||
}
|
||||
|
||||
export class AssetMetadataUpsertDto {
|
||||
@@ -154,26 +154,57 @@ export class AssetMetadataUpsertDto {
|
||||
}
|
||||
|
||||
export class AssetMetadataUpsertItemDto {
|
||||
@ValidateEnum({ enum: AssetMetadataKey, name: 'AssetMetadataKey' })
|
||||
key!: AssetMetadataKey;
|
||||
@ValidateString()
|
||||
key!: string;
|
||||
|
||||
@IsObject()
|
||||
value!: object;
|
||||
}
|
||||
|
||||
export class AssetMetadataMobileAppDto {
|
||||
@IsString()
|
||||
@Optional()
|
||||
iCloudId?: string;
|
||||
export class AssetMetadataBulkUpsertDto {
|
||||
@IsArray()
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => AssetMetadataBulkUpsertItemDto)
|
||||
items!: AssetMetadataBulkUpsertItemDto[];
|
||||
}
|
||||
|
||||
export class AssetMetadataBulkUpsertItemDto {
|
||||
@ValidateUUID()
|
||||
assetId!: string;
|
||||
|
||||
@ValidateString()
|
||||
key!: string;
|
||||
|
||||
@IsObject()
|
||||
value!: object;
|
||||
}
|
||||
|
||||
export class AssetMetadataBulkDeleteDto {
|
||||
@IsArray()
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => AssetMetadataBulkDeleteItemDto)
|
||||
items!: AssetMetadataBulkDeleteItemDto[];
|
||||
}
|
||||
|
||||
export class AssetMetadataBulkDeleteItemDto {
|
||||
@ValidateUUID()
|
||||
assetId!: string;
|
||||
|
||||
@ValidateString()
|
||||
key!: string;
|
||||
}
|
||||
|
||||
export class AssetMetadataResponseDto {
|
||||
@ValidateEnum({ enum: AssetMetadataKey, name: 'AssetMetadataKey' })
|
||||
key!: AssetMetadataKey;
|
||||
@ValidateString()
|
||||
key!: string;
|
||||
value!: object;
|
||||
updatedAt!: Date;
|
||||
}
|
||||
|
||||
export class AssetMetadataBulkResponseDto extends AssetMetadataResponseDto {
|
||||
assetId!: string;
|
||||
}
|
||||
|
||||
export class AssetCopyDto {
|
||||
@ValidateUUID()
|
||||
sourceId!: string;
|
||||
@@ -197,6 +228,11 @@ export class AssetCopyDto {
|
||||
favorite?: boolean;
|
||||
}
|
||||
|
||||
export class AssetDownloadOriginalDto {
|
||||
@ValidateBoolean({ optional: true, default: false })
|
||||
edited?: boolean;
|
||||
}
|
||||
|
||||
export const mapStats = (stats: AssetStats): AssetStatsResponseDto => {
|
||||
return {
|
||||
images: stats[AssetType.Image],
|
||||
|
||||
21
server/src/dtos/database-backup.dto.ts
Normal file
21
server/src/dtos/database-backup.dto.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsString } from 'class-validator';
|
||||
|
||||
export class DatabaseBackupDto {
|
||||
filename!: string;
|
||||
filesize!: number;
|
||||
}
|
||||
|
||||
export class DatabaseBackupListResponseDto {
|
||||
backups!: DatabaseBackupDto[];
|
||||
}
|
||||
|
||||
export class DatabaseBackupUploadDto {
|
||||
@ApiProperty({ type: 'string', format: 'binary', required: false })
|
||||
file?: any;
|
||||
}
|
||||
|
||||
export class DatabaseBackupDeleteDto {
|
||||
@IsString({ each: true })
|
||||
backups!: string[];
|
||||
}
|
||||
125
server/src/dtos/editing.dto.ts
Normal file
125
server/src/dtos/editing.dto.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { ApiExtraModels, ApiProperty, getSchemaPath } from '@nestjs/swagger';
|
||||
import { ClassConstructor, plainToInstance, Transform, Type } from 'class-transformer';
|
||||
import { ArrayMinSize, IsEnum, IsInt, Min, ValidateNested } from 'class-validator';
|
||||
import { IsAxisAlignedRotation, IsUniqueEditActions, ValidateUUID } from 'src/validation';
|
||||
|
||||
export enum AssetEditAction {
|
||||
Crop = 'crop',
|
||||
Rotate = 'rotate',
|
||||
Mirror = 'mirror',
|
||||
}
|
||||
|
||||
export enum MirrorAxis {
|
||||
Horizontal = 'horizontal',
|
||||
Vertical = 'vertical',
|
||||
}
|
||||
|
||||
export class CropParameters {
|
||||
@IsInt()
|
||||
@Min(0)
|
||||
@ApiProperty({ description: 'Top-Left X coordinate of crop' })
|
||||
x!: number;
|
||||
|
||||
@IsInt()
|
||||
@Min(0)
|
||||
@ApiProperty({ description: 'Top-Left Y coordinate of crop' })
|
||||
y!: number;
|
||||
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
@ApiProperty({ description: 'Width of the crop' })
|
||||
width!: number;
|
||||
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
@ApiProperty({ description: 'Height of the crop' })
|
||||
height!: number;
|
||||
}
|
||||
|
||||
export class RotateParameters {
|
||||
@IsAxisAlignedRotation()
|
||||
@ApiProperty({ description: 'Rotation angle in degrees' })
|
||||
angle!: number;
|
||||
}
|
||||
|
||||
export class MirrorParameters {
|
||||
@IsEnum(MirrorAxis)
|
||||
@ApiProperty({ enum: MirrorAxis, enumName: 'MirrorAxis', description: 'Axis to mirror along' })
|
||||
axis!: MirrorAxis;
|
||||
}
|
||||
|
||||
class AssetEditActionBase {
|
||||
@IsEnum(AssetEditAction)
|
||||
@ApiProperty({ enum: AssetEditAction, enumName: 'AssetEditAction' })
|
||||
action!: AssetEditAction;
|
||||
}
|
||||
|
||||
export class AssetEditActionCrop extends AssetEditActionBase {
|
||||
@ValidateNested()
|
||||
@Type(() => CropParameters)
|
||||
@ApiProperty({ type: CropParameters })
|
||||
parameters!: CropParameters;
|
||||
}
|
||||
|
||||
export class AssetEditActionRotate extends AssetEditActionBase {
|
||||
@ValidateNested()
|
||||
@Type(() => RotateParameters)
|
||||
@ApiProperty({ type: RotateParameters })
|
||||
parameters!: RotateParameters;
|
||||
}
|
||||
|
||||
export class AssetEditActionMirror extends AssetEditActionBase {
|
||||
@ValidateNested()
|
||||
@Type(() => MirrorParameters)
|
||||
@ApiProperty({ type: MirrorParameters })
|
||||
parameters!: MirrorParameters;
|
||||
}
|
||||
|
||||
export type AssetEditActionItem =
|
||||
| {
|
||||
action: AssetEditAction.Crop;
|
||||
parameters: CropParameters;
|
||||
}
|
||||
| {
|
||||
action: AssetEditAction.Rotate;
|
||||
parameters: RotateParameters;
|
||||
}
|
||||
| {
|
||||
action: AssetEditAction.Mirror;
|
||||
parameters: MirrorParameters;
|
||||
};
|
||||
|
||||
export type AssetEditActionParameter = {
|
||||
[AssetEditAction.Crop]: CropParameters;
|
||||
[AssetEditAction.Rotate]: RotateParameters;
|
||||
[AssetEditAction.Mirror]: MirrorParameters;
|
||||
};
|
||||
|
||||
type AssetEditActions = AssetEditActionCrop | AssetEditActionRotate | AssetEditActionMirror;
|
||||
const actionToClass: Record<AssetEditAction, ClassConstructor<AssetEditActions>> = {
|
||||
[AssetEditAction.Crop]: AssetEditActionCrop,
|
||||
[AssetEditAction.Rotate]: AssetEditActionRotate,
|
||||
[AssetEditAction.Mirror]: AssetEditActionMirror,
|
||||
} as const;
|
||||
|
||||
const getActionClass = (item: { action: AssetEditAction }): ClassConstructor<AssetEditActions> =>
|
||||
actionToClass[item.action];
|
||||
|
||||
@ApiExtraModels(AssetEditActionRotate, AssetEditActionMirror, AssetEditActionCrop)
|
||||
export class AssetEditActionListDto {
|
||||
/** list of edits */
|
||||
@ArrayMinSize(1)
|
||||
@IsUniqueEditActions()
|
||||
@ValidateNested({ each: true })
|
||||
@Transform(({ value: edits }) =>
|
||||
Array.isArray(edits) ? edits.map((item) => plainToInstance(getActionClass(item), item)) : edits,
|
||||
)
|
||||
@ApiProperty({ anyOf: Object.values(actionToClass).map((target) => ({ $ref: getSchemaPath(target) })) })
|
||||
edits!: AssetEditActionItem[];
|
||||
}
|
||||
|
||||
export class AssetEditsDto extends AssetEditActionListDto {
|
||||
@ValidateUUID()
|
||||
@ApiProperty()
|
||||
assetId!: string;
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Transform, Type } from 'class-transformer';
|
||||
import { IsEnum, IsInt, IsString, Matches } from 'class-validator';
|
||||
import { DatabaseSslMode, ImmichEnvironment, LogLevel } from 'src/enum';
|
||||
import { DatabaseSslMode, ImmichEnvironment, LogFormat, LogLevel } from 'src/enum';
|
||||
import { IsIPRange, Optional, ValidateBoolean } from 'src/validation';
|
||||
|
||||
export class EnvDto {
|
||||
@@ -48,6 +48,10 @@ export class EnvDto {
|
||||
@Optional()
|
||||
IMMICH_LOG_LEVEL?: LogLevel;
|
||||
|
||||
@IsEnum(LogFormat)
|
||||
@Optional()
|
||||
IMMICH_LOG_FORMAT?: LogFormat;
|
||||
|
||||
@Optional()
|
||||
@Matches(/^\//, { message: 'IMMICH_MEDIA_LOCATION must be an absolute path' })
|
||||
IMMICH_MEDIA_LOCATION?: string;
|
||||
@@ -58,7 +62,7 @@ export class EnvDto {
|
||||
IMMICH_MICROSERVICES_METRICS_PORT?: number;
|
||||
|
||||
@ValidateBoolean({ optional: true })
|
||||
IMMICH_PLUGINS_ENABLED?: boolean;
|
||||
IMMICH_ALLOW_EXTERNAL_PLUGINS?: boolean;
|
||||
|
||||
@Optional()
|
||||
@Matches(/^\//, { message: 'IMMICH_PLUGINS_INSTALL_FOLDER must be an absolute path' })
|
||||
@@ -113,6 +117,9 @@ export class EnvDto {
|
||||
@Optional()
|
||||
IMMICH_THIRD_PARTY_SUPPORT_URL?: string;
|
||||
|
||||
@ValidateBoolean({ optional: true })
|
||||
IMMICH_ALLOW_SETUP?: boolean;
|
||||
|
||||
@IsIPRange({ requireCIDR: false }, { each: true })
|
||||
@Transform(({ value }) =>
|
||||
value && typeof value === 'string'
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import { MaintenanceAction } from 'src/enum';
|
||||
import { ValidateIf } from 'class-validator';
|
||||
import { MaintenanceAction, StorageFolder } from 'src/enum';
|
||||
import { ValidateEnum, ValidateString } from 'src/validation';
|
||||
|
||||
export class SetMaintenanceModeDto {
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
@ValidateIf((o) => o.action === MaintenanceAction.RestoreDatabase)
|
||||
@ValidateString()
|
||||
restoreBackupFilename?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceLoginDto {
|
||||
@@ -14,3 +19,26 @@ export class MaintenanceLoginDto {
|
||||
export class MaintenanceAuthDto {
|
||||
username!: string;
|
||||
}
|
||||
|
||||
export class MaintenanceStatusResponseDto {
|
||||
active!: boolean;
|
||||
|
||||
@ValidateEnum({ enum: MaintenanceAction, name: 'MaintenanceAction' })
|
||||
action!: MaintenanceAction;
|
||||
|
||||
progress?: number;
|
||||
task?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallStorageFolderDto {
|
||||
@ValidateEnum({ enum: StorageFolder, name: 'StorageFolder' })
|
||||
folder!: StorageFolder;
|
||||
readable!: boolean;
|
||||
writable!: boolean;
|
||||
files!: number;
|
||||
}
|
||||
|
||||
export class MaintenanceDetectInstallResponseDto {
|
||||
storage!: MaintenanceDetectInstallStorageFolderDto[];
|
||||
}
|
||||
|
||||
@@ -6,9 +6,12 @@ import { DateTime } from 'luxon';
|
||||
import { AssetFace, Person } from 'src/database';
|
||||
import { HistoryBuilder, Property } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetEditActionItem } from 'src/dtos/editing.dto';
|
||||
import { SourceType } from 'src/enum';
|
||||
import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
|
||||
import { ImageDimensions } from 'src/types';
|
||||
import { asDateString } from 'src/utils/date';
|
||||
import { transformFaceBoundingBox } from 'src/utils/transform';
|
||||
import {
|
||||
IsDateStringFormat,
|
||||
MaxDateString,
|
||||
@@ -233,29 +236,37 @@ export function mapPerson(person: Person): PersonResponseDto {
|
||||
};
|
||||
}
|
||||
|
||||
export function mapFacesWithoutPerson(face: Selectable<AssetFaceTable>): AssetFaceWithoutPersonResponseDto {
|
||||
export function mapFacesWithoutPerson(
|
||||
face: Selectable<AssetFaceTable>,
|
||||
edits?: AssetEditActionItem[],
|
||||
assetDimensions?: ImageDimensions,
|
||||
): AssetFaceWithoutPersonResponseDto {
|
||||
return {
|
||||
id: face.id,
|
||||
imageHeight: face.imageHeight,
|
||||
imageWidth: face.imageWidth,
|
||||
boundingBoxX1: face.boundingBoxX1,
|
||||
boundingBoxX2: face.boundingBoxX2,
|
||||
boundingBoxY1: face.boundingBoxY1,
|
||||
boundingBoxY2: face.boundingBoxY2,
|
||||
...transformFaceBoundingBox(
|
||||
{
|
||||
boundingBoxX1: face.boundingBoxX1,
|
||||
boundingBoxY1: face.boundingBoxY1,
|
||||
boundingBoxX2: face.boundingBoxX2,
|
||||
boundingBoxY2: face.boundingBoxY2,
|
||||
imageWidth: face.imageWidth,
|
||||
imageHeight: face.imageHeight,
|
||||
},
|
||||
edits ?? [],
|
||||
assetDimensions ?? { width: face.imageWidth, height: face.imageHeight },
|
||||
),
|
||||
sourceType: face.sourceType,
|
||||
};
|
||||
}
|
||||
|
||||
export function mapFaces(face: AssetFace, auth: AuthDto): AssetFaceResponseDto {
|
||||
export function mapFaces(
|
||||
face: AssetFace,
|
||||
auth: AuthDto,
|
||||
edits?: AssetEditActionItem[],
|
||||
assetDimensions?: ImageDimensions,
|
||||
): AssetFaceResponseDto {
|
||||
return {
|
||||
id: face.id,
|
||||
imageHeight: face.imageHeight,
|
||||
imageWidth: face.imageWidth,
|
||||
boundingBoxX1: face.boundingBoxX1,
|
||||
boundingBoxX2: face.boundingBoxX2,
|
||||
boundingBoxY1: face.boundingBoxY1,
|
||||
boundingBoxY2: face.boundingBoxY2,
|
||||
sourceType: face.sourceType,
|
||||
...mapFacesWithoutPerson(face, edits, assetDimensions),
|
||||
person: face.person?.ownerId === auth.user.id ? mapPerson(face.person) : null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -66,6 +66,9 @@ export class QueuesResponseLegacyDto implements Record<QueueName, QueueResponseL
|
||||
|
||||
@ApiProperty({ type: QueueResponseLegacyDto })
|
||||
[QueueName.Workflow]!: QueueResponseLegacyDto;
|
||||
|
||||
@ApiProperty({ type: QueueResponseLegacyDto })
|
||||
[QueueName.Editor]!: QueueResponseLegacyDto;
|
||||
}
|
||||
|
||||
export const mapQueueLegacy = (response: QueueResponseDto): QueueResponseLegacyDto => {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { IsString } from 'class-validator';
|
||||
import _ from 'lodash';
|
||||
import { SharedLink } from 'src/database';
|
||||
import { HistoryBuilder, Property } from 'src/decorators';
|
||||
import { AlbumResponseDto, mapAlbumWithoutAssets } from 'src/dtos/album.dto';
|
||||
import { AssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
@@ -10,6 +10,10 @@ import { Optional, ValidateBoolean, ValidateDate, ValidateEnum, ValidateUUID } f
|
||||
export class SharedLinkSearchDto {
|
||||
@ValidateUUID({ optional: true })
|
||||
albumId?: string;
|
||||
|
||||
@ValidateUUID({ optional: true })
|
||||
@Property({ history: new HistoryBuilder().added('v2.5.0') })
|
||||
id?: string;
|
||||
}
|
||||
|
||||
export class SharedLinkCreateDto {
|
||||
@@ -113,10 +117,10 @@ export class SharedLinkResponseDto {
|
||||
slug!: string | null;
|
||||
}
|
||||
|
||||
export function mapSharedLink(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||
const linkAssets = sharedLink.assets || [];
|
||||
export function mapSharedLink(sharedLink: SharedLink, options: { stripAssetMetadata: boolean }): SharedLinkResponseDto {
|
||||
const assets = sharedLink.assets || [];
|
||||
|
||||
return {
|
||||
const response = {
|
||||
id: sharedLink.id,
|
||||
description: sharedLink.description,
|
||||
password: sharedLink.password,
|
||||
@@ -125,35 +129,19 @@ export function mapSharedLink(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||
type: sharedLink.type,
|
||||
createdAt: sharedLink.createdAt,
|
||||
expiresAt: sharedLink.expiresAt,
|
||||
assets: linkAssets.map((asset) => mapAsset(asset)),
|
||||
album: sharedLink.album ? mapAlbumWithoutAssets(sharedLink.album) : undefined,
|
||||
allowUpload: sharedLink.allowUpload,
|
||||
allowDownload: sharedLink.allowDownload,
|
||||
showMetadata: sharedLink.showExif,
|
||||
slug: sharedLink.slug,
|
||||
};
|
||||
}
|
||||
|
||||
export function mapSharedLinkWithoutMetadata(sharedLink: SharedLink): SharedLinkResponseDto {
|
||||
const linkAssets = sharedLink.assets || [];
|
||||
const albumAssets = (sharedLink?.album?.assets || []).map((asset) => asset);
|
||||
|
||||
const assets = _.uniqBy([...linkAssets, ...albumAssets], (asset) => asset.id);
|
||||
|
||||
return {
|
||||
id: sharedLink.id,
|
||||
description: sharedLink.description,
|
||||
password: sharedLink.password,
|
||||
userId: sharedLink.userId,
|
||||
key: sharedLink.key.toString('base64url'),
|
||||
type: sharedLink.type,
|
||||
createdAt: sharedLink.createdAt,
|
||||
expiresAt: sharedLink.expiresAt,
|
||||
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: true })),
|
||||
assets: assets.map((asset) => mapAsset(asset, { stripMetadata: options.stripAssetMetadata })),
|
||||
album: sharedLink.album ? mapAlbumWithoutAssets(sharedLink.album) : undefined,
|
||||
allowUpload: sharedLink.allowUpload,
|
||||
allowDownload: sharedLink.allowDownload,
|
||||
showMetadata: sharedLink.showExif,
|
||||
slug: sharedLink.slug,
|
||||
};
|
||||
|
||||
// unless we select sharedLink.album.sharedLinks this will be wrong
|
||||
if (response.album) {
|
||||
response.album.hasSharedLink = true;
|
||||
response.album.shared = true;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import { ArrayMaxSize, IsInt, IsPositive, IsString } from 'class-validator';
|
||||
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
|
||||
import {
|
||||
AlbumUserRole,
|
||||
AssetMetadataKey,
|
||||
AssetOrder,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
@@ -118,6 +117,12 @@ export class SyncAssetV1 {
|
||||
livePhotoVideoId!: string | null;
|
||||
stackId!: string | null;
|
||||
libraryId!: string | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
width!: number | null;
|
||||
@ApiProperty({ type: 'integer' })
|
||||
height!: number | null;
|
||||
@ApiProperty({ type: 'boolean' })
|
||||
isEdited!: boolean;
|
||||
}
|
||||
|
||||
@ExtraModel()
|
||||
@@ -167,16 +172,14 @@ export class SyncAssetExifV1 {
|
||||
@ExtraModel()
|
||||
export class SyncAssetMetadataV1 {
|
||||
assetId!: string;
|
||||
@ValidateEnum({ enum: AssetMetadataKey, name: 'AssetMetadataKey' })
|
||||
key!: AssetMetadataKey;
|
||||
key!: string;
|
||||
value!: object;
|
||||
}
|
||||
|
||||
@ExtraModel()
|
||||
export class SyncAssetMetadataDeleteV1 {
|
||||
assetId!: string;
|
||||
@ValidateEnum({ enum: AssetMetadataKey, name: 'AssetMetadataKey' })
|
||||
key!: AssetMetadataKey;
|
||||
key!: string;
|
||||
}
|
||||
|
||||
@ExtraModel()
|
||||
|
||||
@@ -230,6 +230,12 @@ class SystemConfigJobDto implements Record<ConcurrentQueueName, JobSettingsDto>
|
||||
@IsObject()
|
||||
@Type(() => JobSettingsDto)
|
||||
[QueueName.Workflow]!: JobSettingsDto;
|
||||
|
||||
@ApiProperty({ type: JobSettingsDto })
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
@Type(() => JobSettingsDto)
|
||||
[QueueName.Editor]!: JobSettingsDto;
|
||||
}
|
||||
|
||||
class SystemConfigLibraryScanDto {
|
||||
|
||||
@@ -45,6 +45,9 @@ export enum AssetFileType {
|
||||
Preview = 'preview',
|
||||
Thumbnail = 'thumbnail',
|
||||
Sidecar = 'sidecar',
|
||||
FullSizeEdited = 'fullsize_edited',
|
||||
PreviewEdited = 'preview_edited',
|
||||
ThumbnailEdited = 'thumbnail_edited',
|
||||
}
|
||||
|
||||
export enum AlbumUserRole {
|
||||
@@ -106,6 +109,11 @@ export enum Permission {
|
||||
AssetUpload = 'asset.upload',
|
||||
AssetReplace = 'asset.replace',
|
||||
AssetCopy = 'asset.copy',
|
||||
AssetDerive = 'asset.derive',
|
||||
|
||||
AssetEditGet = 'asset.edit.get',
|
||||
AssetEditCreate = 'asset.edit.create',
|
||||
AssetEditDelete = 'asset.edit.delete',
|
||||
|
||||
AlbumCreate = 'album.create',
|
||||
AlbumRead = 'album.read',
|
||||
@@ -128,6 +136,11 @@ export enum Permission {
|
||||
|
||||
ArchiveRead = 'archive.read',
|
||||
|
||||
BackupList = 'backup.list',
|
||||
BackupDownload = 'backup.download',
|
||||
BackupUpload = 'backup.upload',
|
||||
BackupDelete = 'backup.delete',
|
||||
|
||||
DuplicateRead = 'duplicate.read',
|
||||
DuplicateDelete = 'duplicate.delete',
|
||||
|
||||
@@ -358,6 +371,9 @@ export enum AssetPathType {
|
||||
Original = 'original',
|
||||
FullSize = 'fullsize',
|
||||
Preview = 'preview',
|
||||
EditedFullSize = 'edited_fullsize',
|
||||
EditedPreview = 'edited_preview',
|
||||
EditedThumbnail = 'edited_thumbnail',
|
||||
Thumbnail = 'thumbnail',
|
||||
EncodedVideo = 'encoded_video',
|
||||
Sidecar = 'sidecar',
|
||||
@@ -454,6 +470,11 @@ export enum LogLevel {
|
||||
Fatal = 'fatal',
|
||||
}
|
||||
|
||||
export enum LogFormat {
|
||||
Console = 'console',
|
||||
Json = 'json',
|
||||
}
|
||||
|
||||
export enum ApiCustomExtension {
|
||||
Permission = 'x-immich-permission',
|
||||
AdminOnly = 'x-immich-admin-only',
|
||||
@@ -550,6 +571,7 @@ export enum QueueName {
|
||||
BackupDatabase = 'backupDatabase',
|
||||
Ocr = 'ocr',
|
||||
Workflow = 'workflow',
|
||||
Editor = 'editor',
|
||||
}
|
||||
|
||||
export enum QueueJobStatus {
|
||||
@@ -568,6 +590,7 @@ export enum JobName {
|
||||
AssetDetectFaces = 'AssetDetectFaces',
|
||||
AssetDetectDuplicatesQueueAll = 'AssetDetectDuplicatesQueueAll',
|
||||
AssetDetectDuplicates = 'AssetDetectDuplicates',
|
||||
AssetEditThumbnailGeneration = 'AssetEditThumbnailGeneration',
|
||||
AssetEncodeVideoQueueAll = 'AssetEncodeVideoQueueAll',
|
||||
AssetEncodeVideo = 'AssetEncodeVideo',
|
||||
AssetEmptyTrash = 'AssetEmptyTrash',
|
||||
@@ -679,12 +702,15 @@ export enum DatabaseLock {
|
||||
MediaLocation = 700,
|
||||
GetSystemConfig = 69,
|
||||
BackupDatabase = 42,
|
||||
MaintenanceOperation = 621,
|
||||
MemoryCreation = 777,
|
||||
}
|
||||
|
||||
export enum MaintenanceAction {
|
||||
Start = 'start',
|
||||
End = 'end',
|
||||
SelectDatabaseRestore = 'select_database_restore',
|
||||
RestoreDatabase = 'restore_database',
|
||||
}
|
||||
|
||||
export enum ExitCode {
|
||||
@@ -831,6 +857,7 @@ export enum ApiTag {
|
||||
Authentication = 'Authentication',
|
||||
AuthenticationAdmin = 'Authentication (admin)',
|
||||
Assets = 'Assets',
|
||||
DatabaseBackups = 'Database Backups (admin)',
|
||||
Deprecated = 'Deprecated',
|
||||
Download = 'Download',
|
||||
Duplicates = 'Duplicates',
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Kysely } from 'kysely';
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { CommandFactory } from 'nest-commander';
|
||||
import { ChildProcess, fork } from 'node:child_process';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { Worker } from 'node:worker_threads';
|
||||
import { PostgresError } from 'postgres';
|
||||
import { ImmichAdminModule } from 'src/app.module';
|
||||
import { ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { DatabaseLock, ExitCode, ImmichWorker, LogLevel, SystemMetadataKey } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type DB } from 'src/schema';
|
||||
@@ -35,19 +35,18 @@ class Workers {
|
||||
if (isMaintenanceMode) {
|
||||
this.startWorker(ImmichWorker.Maintenance);
|
||||
} else {
|
||||
await this.waitForFreeLock();
|
||||
|
||||
for (const worker of workers) {
|
||||
this.startWorker(worker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise a short-lived Nest application to build configuration
|
||||
* @returns System configuration
|
||||
*/
|
||||
private async isMaintenanceMode(): Promise<boolean> {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
const { log: _, ...kyselyConfig } = getKyselyConfig(database.config);
|
||||
const kysely = new Kysely<DB>(kyselyConfig);
|
||||
const systemMetadataRepository = new SystemMetadataRepository(kysely);
|
||||
|
||||
try {
|
||||
@@ -65,6 +64,32 @@ class Workers {
|
||||
}
|
||||
}
|
||||
|
||||
private async waitForFreeLock() {
|
||||
const { database } = new ConfigRepository().getEnv();
|
||||
const kysely = new Kysely<DB>(getKyselyConfig(database.config));
|
||||
|
||||
let locked = false;
|
||||
while (!locked) {
|
||||
locked = await kysely.connection().execute(async (conn) => {
|
||||
const { rows } = await sql<{
|
||||
pg_try_advisory_lock: boolean;
|
||||
}>`SELECT pg_try_advisory_lock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
|
||||
const isLocked = rows[0].pg_try_advisory_lock;
|
||||
|
||||
if (isLocked) {
|
||||
await sql`SELECT pg_advisory_unlock(${DatabaseLock.MaintenanceOperation})`.execute(conn);
|
||||
}
|
||||
|
||||
return isLocked;
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
await kysely.destroy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start an individual worker
|
||||
* @param name Worker
|
||||
|
||||
67
server/src/maintenance/maintenance-health.repository.ts
Normal file
67
server/src/maintenance/maintenance-health.repository.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { fork } from 'node:child_process';
|
||||
import { dirname, join } from 'node:path';
|
||||
|
||||
@Injectable()
|
||||
export class MaintenanceHealthRepository {
|
||||
checkApiHealth(): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
// eslint-disable-next-line unicorn/prefer-module
|
||||
const basePath = dirname(__filename);
|
||||
const workerFile = join(basePath, '..', 'workers', `api.js`);
|
||||
|
||||
const worker = fork(workerFile, [], {
|
||||
execArgv: process.execArgv.filter((arg) => !arg.startsWith('--inspect')),
|
||||
env: {
|
||||
...process.env,
|
||||
IMMICH_HOST: '127.0.0.1',
|
||||
IMMICH_PORT: '33001',
|
||||
},
|
||||
stdio: ['ignore', 'pipe', 'ignore', 'ipc'],
|
||||
});
|
||||
|
||||
async function checkHealth() {
|
||||
try {
|
||||
const response = await fetch('http://127.0.0.1:33001/api/server/config');
|
||||
const { isOnboarded } = await response.json();
|
||||
if (isOnboarded) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error('Server health check failed, no admin exists.'));
|
||||
}
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
} finally {
|
||||
if (worker.exitCode === null) {
|
||||
worker.kill('SIGTERM');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let output = '',
|
||||
alive = false;
|
||||
|
||||
worker.stdout?.on('data', (data) => {
|
||||
if (alive) {
|
||||
return;
|
||||
}
|
||||
|
||||
output += data;
|
||||
|
||||
if (output.includes('Immich Server is listening')) {
|
||||
alive = true;
|
||||
void checkHealth();
|
||||
}
|
||||
});
|
||||
|
||||
worker.on('exit', reject);
|
||||
worker.on('error', reject);
|
||||
|
||||
setTimeout(() => {
|
||||
if (worker.exitCode === null) {
|
||||
worker.kill('SIGTERM');
|
||||
}
|
||||
}, 20_000);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -7,17 +7,24 @@ import {
|
||||
WebSocketServer,
|
||||
} from '@nestjs/websockets';
|
||||
import { Server, Socket } from 'socket.io';
|
||||
import { MaintenanceAuthDto, MaintenanceStatusResponseDto } from 'src/dtos/maintenance.dto';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { AppRestartEvent, ArgsOf } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
|
||||
export const serverEvents = ['AppRestart'] as const;
|
||||
export type ServerEvents = (typeof serverEvents)[number];
|
||||
|
||||
export interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
interface ServerEventMap {
|
||||
AppRestart: [AppRestartEvent];
|
||||
MaintenanceStatus: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
interface ClientEventMap {
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
MaintenanceStatusV1: [MaintenanceStatusResponseDto];
|
||||
}
|
||||
|
||||
type AuthFn = (client: Socket) => Promise<MaintenanceAuthDto>;
|
||||
type StatusUpdateFn = (status: MaintenanceStatusResponseDto) => void;
|
||||
|
||||
@WebSocketGateway({
|
||||
cors: true,
|
||||
path: '/api/socket.io',
|
||||
@@ -25,8 +32,11 @@ export interface ClientEventMap {
|
||||
})
|
||||
@Injectable()
|
||||
export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGatewayDisconnect, OnGatewayInit {
|
||||
private authFn?: AuthFn;
|
||||
private statusUpdateFn?: StatusUpdateFn;
|
||||
|
||||
@WebSocketServer()
|
||||
private websocketServer?: Server;
|
||||
private server?: Server;
|
||||
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
@@ -35,10 +45,10 @@ export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGa
|
||||
this.logger.setContext(MaintenanceWebsocketRepository.name);
|
||||
}
|
||||
|
||||
afterInit(websocketServer: Server) {
|
||||
afterInit(server: Server) {
|
||||
this.logger.log('Initialized websocket server');
|
||||
|
||||
websocketServer.on('AppRestart', (event: ArgsOf<'AppRestart'>, ack?: (ok: 'ok') => void) => {
|
||||
server.on('MaintenanceStatus', (status) => this.statusUpdateFn?.(status));
|
||||
server.on('AppRestart', (event: ArgsOf<'AppRestart'>, ack?: (ok: 'ok') => void) => {
|
||||
this.logger.log(`Restarting due to event... ${JSON.stringify(event)}`);
|
||||
|
||||
ack?.('ok');
|
||||
@@ -46,20 +56,40 @@ export class MaintenanceWebsocketRepository implements OnGatewayConnection, OnGa
|
||||
});
|
||||
}
|
||||
|
||||
clientSend<T extends keyof ClientEventMap>(event: T, room: string, ...data: ClientEventMap[T]) {
|
||||
this.server?.to(room).emit(event, ...data);
|
||||
}
|
||||
|
||||
clientBroadcast<T extends keyof ClientEventMap>(event: T, ...data: ClientEventMap[T]) {
|
||||
this.websocketServer?.emit(event, ...data);
|
||||
this.server?.emit(event, ...data);
|
||||
}
|
||||
|
||||
serverSend<T extends ServerEvents>(event: T, ...args: ArgsOf<T>): void {
|
||||
serverSend<T extends keyof ServerEventMap>(event: T, ...args: ServerEventMap[T]): void {
|
||||
this.logger.debug(`Server event: ${event} (send)`);
|
||||
this.websocketServer?.serverSideEmit(event, ...args);
|
||||
this.server?.serverSideEmit(event, ...args);
|
||||
}
|
||||
|
||||
handleConnection(client: Socket) {
|
||||
this.logger.log(`Websocket Connect: ${client.id}`);
|
||||
async handleConnection(client: Socket) {
|
||||
try {
|
||||
await this.authFn!(client);
|
||||
await client.join('private');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (private)`);
|
||||
} catch {
|
||||
await client.join('public');
|
||||
this.logger.log(`Websocket Connect: ${client.id} (public)`);
|
||||
}
|
||||
}
|
||||
|
||||
handleDisconnect(client: Socket) {
|
||||
async handleDisconnect(client: Socket) {
|
||||
this.logger.log(`Websocket Disconnect: ${client.id}`);
|
||||
await Promise.allSettled([client.leave('private'), client.leave('public')]);
|
||||
}
|
||||
|
||||
setAuthFn(fn: (client: Socket) => Promise<MaintenanceAuthDto>) {
|
||||
this.authFn = fn;
|
||||
}
|
||||
|
||||
setStatusUpdateFn(fn: (status: MaintenanceStatusResponseDto) => void) {
|
||||
this.statusUpdateFn = fn;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,114 @@
|
||||
import { Body, Controller, Get, Post, Req, Res } from '@nestjs/common';
|
||||
import { Request, Response } from 'express';
|
||||
import { MaintenanceAuthDto, MaintenanceLoginDto, SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto } from 'src/dtos/server.dto';
|
||||
import { ImmichCookie, MaintenanceAction } from 'src/enum';
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
Next,
|
||||
Param,
|
||||
Post,
|
||||
Req,
|
||||
Res,
|
||||
UploadedFile,
|
||||
UseInterceptors,
|
||||
} from '@nestjs/common';
|
||||
import { FileInterceptor } from '@nestjs/platform-express';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceLoginDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto, ServerVersionResponseDto } from 'src/dtos/server.dto';
|
||||
import { ImmichCookie } from 'src/enum';
|
||||
import { MaintenanceRoute } from 'src/maintenance/maintenance-auth.guard';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { GetLoginDetails } from 'src/middleware/auth.guard';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { LoginDetails } from 'src/services/auth.service';
|
||||
import { sendFile } from 'src/utils/file';
|
||||
import { respondWithCookie } from 'src/utils/response';
|
||||
import { FilenameParamDto } from 'src/validation';
|
||||
|
||||
import type { DatabaseBackupController as _DatabaseBackupController } from 'src/controllers/database-backup.controller';
|
||||
import type { ServerController as _ServerController } from 'src/controllers/server.controller';
|
||||
import { DatabaseBackupDeleteDto, DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
|
||||
@Controller()
|
||||
export class MaintenanceWorkerController {
|
||||
constructor(private service: MaintenanceWorkerService) {}
|
||||
constructor(
|
||||
private logger: LoggingRepository,
|
||||
private service: MaintenanceWorkerService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* {@link _ServerController.getServerConfig }
|
||||
*/
|
||||
@Get('server/config')
|
||||
getServerConfig(): Promise<ServerConfigDto> {
|
||||
getServerConfig(): ServerConfigDto {
|
||||
return this.service.getSystemConfig();
|
||||
}
|
||||
|
||||
@Get('server/version')
|
||||
getServerVersion(): ServerVersionResponseDto {
|
||||
return this.service.getVersion();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.listDatabaseBackups}
|
||||
*/
|
||||
@Get('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
listDatabaseBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
return this.service.listBackups();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.downloadDatabaseBackup}
|
||||
*/
|
||||
@Get('admin/database-backups/:filename')
|
||||
@MaintenanceRoute()
|
||||
async downloadDatabaseBackup(
|
||||
@Param() { filename }: FilenameParamDto,
|
||||
@Res() res: Response,
|
||||
@Next() next: NextFunction,
|
||||
) {
|
||||
await sendFile(res, next, () => this.service.downloadBackup(filename), this.logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.deleteDatabaseBackup}
|
||||
*/
|
||||
@Delete('admin/database-backups')
|
||||
@MaintenanceRoute()
|
||||
async deleteDatabaseBackup(@Body() dto: DatabaseBackupDeleteDto): Promise<void> {
|
||||
return this.service.deleteBackup(dto.backups);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupController.uploadDatabaseBackup}
|
||||
*/
|
||||
@Post('admin/database-backups/upload')
|
||||
@MaintenanceRoute()
|
||||
@UseInterceptors(FileInterceptor('file'))
|
||||
uploadDatabaseBackup(
|
||||
@UploadedFile()
|
||||
file: Express.Multer.File,
|
||||
): Promise<void> {
|
||||
return this.service.uploadBackup(file);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/status')
|
||||
maintenanceStatus(@Req() request: Request): Promise<MaintenanceStatusResponseDto> {
|
||||
return this.service.status(request.cookies[ImmichCookie.MaintenanceToken]);
|
||||
}
|
||||
|
||||
@Get('admin/maintenance/detect-install')
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return this.service.detectPriorInstall();
|
||||
}
|
||||
|
||||
@Post('admin/maintenance/login')
|
||||
async maintenanceLogin(
|
||||
@Req() request: Request,
|
||||
@@ -35,9 +126,7 @@ export class MaintenanceWorkerController {
|
||||
|
||||
@Post('admin/maintenance')
|
||||
@MaintenanceRoute()
|
||||
async setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): Promise<void> {
|
||||
if (dto.action === MaintenanceAction.End) {
|
||||
await this.service.endMaintenance();
|
||||
}
|
||||
setMaintenanceMode(@Body() dto: SetMaintenanceModeDto): void {
|
||||
void this.service.setAction(dto);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,51 @@
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { BadRequestException, UnauthorizedException } from '@nestjs/common';
|
||||
import { SignJWT } from 'jose';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { DateTime } from 'luxon';
|
||||
import { PassThrough, Readable } from 'node:stream';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { MaintenanceAction, StorageFolder, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { MaintenanceWorkerService } from 'src/maintenance/maintenance-worker.service';
|
||||
import { automock, getMocks, ServiceMocks } from 'test/utils';
|
||||
import { automock, AutoMocked, getMocks, mockDuplex, mockSpawn, ServiceMocks } from 'test/utils';
|
||||
|
||||
function* mockData() {
|
||||
yield '';
|
||||
}
|
||||
|
||||
describe(MaintenanceWorkerService.name, () => {
|
||||
let sut: MaintenanceWorkerService;
|
||||
let mocks: ServiceMocks;
|
||||
let maintenanceWorkerRepositoryMock: MaintenanceWebsocketRepository;
|
||||
let maintenanceWebsocketRepositoryMock: AutoMocked<MaintenanceWebsocketRepository>;
|
||||
let maintenanceHealthRepositoryMock: AutoMocked<MaintenanceHealthRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
mocks = getMocks();
|
||||
maintenanceWorkerRepositoryMock = automock(MaintenanceWebsocketRepository, { args: [mocks.logger], strict: false });
|
||||
maintenanceWebsocketRepositoryMock = automock(MaintenanceWebsocketRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
maintenanceHealthRepositoryMock = automock(MaintenanceHealthRepository, {
|
||||
args: [mocks.logger],
|
||||
strict: false,
|
||||
});
|
||||
|
||||
sut = new MaintenanceWorkerService(
|
||||
mocks.logger as never,
|
||||
mocks.app,
|
||||
mocks.config,
|
||||
mocks.systemMetadata as never,
|
||||
maintenanceWorkerRepositoryMock,
|
||||
maintenanceWebsocketRepositoryMock,
|
||||
maintenanceHealthRepositoryMock,
|
||||
mocks.storage as never,
|
||||
mocks.process,
|
||||
mocks.database as never,
|
||||
);
|
||||
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
@@ -27,14 +53,43 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
describe('getSystemConfig', () => {
|
||||
it('should respond the server is in maintenance mode', async () => {
|
||||
await expect(sut.getSystemConfig()).resolves.toMatchObject(
|
||||
it('should respond the server is in maintenance mode', () => {
|
||||
expect(sut.getSystemConfig()).toMatchObject(
|
||||
expect.objectContaining({
|
||||
maintenanceMode: true,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe.skip('ssr');
|
||||
describe.skip('detectMediaLocation');
|
||||
|
||||
describe('setStatus', () => {
|
||||
it('should broadcast status', () => {
|
||||
sut.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalled();
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledTimes(2);
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'def',
|
||||
});
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'public', {
|
||||
active: true,
|
||||
action: 'start',
|
||||
task: 'abc',
|
||||
error: 'Something went wrong, see logs!',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -42,7 +97,14 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should log a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.logSecret()).resolves.toBeUndefined();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith(expect.stringMatching(RE_LOGIN_URL));
|
||||
|
||||
@@ -63,7 +125,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should parse cookie properly', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.authenticate({
|
||||
@@ -73,13 +141,102 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('status', () => {
|
||||
beforeEach(() => {
|
||||
sut.mock({
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
error: 'secret value!',
|
||||
});
|
||||
});
|
||||
|
||||
it('generates private status', async () => {
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
.setIssuedAt()
|
||||
.setExpirationTime('4h')
|
||||
.sign(new TextEncoder().encode('secret'));
|
||||
|
||||
await expect(sut.status(jwt)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'secret value!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates public status', async () => {
|
||||
await expect(sut.status()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
error: 'Something went wrong, see logs!',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectPriorInstall', () => {
|
||||
it('generate report about prior installation', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should fail without token', async () => {
|
||||
await expect(sut.login()).rejects.toThrowError(new UnauthorizedException('Missing JWT Token'));
|
||||
});
|
||||
|
||||
it('should fail with expired JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const jwt = await new SignJWT({})
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -91,7 +248,13 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
|
||||
it('should succeed with valid JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const jwt = await new SignJWT({ _mockValue: true })
|
||||
.setProtectedHeader({ alg: 'HS256' })
|
||||
@@ -107,22 +270,275 @@ describe(MaintenanceWorkerService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('endMaintenance', () => {
|
||||
describe.skip('setAction'); // just calls setStatus+runAction
|
||||
|
||||
/**
|
||||
* Actions
|
||||
*/
|
||||
|
||||
describe('action: start', () => {
|
||||
it('should not do anything', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.Start,
|
||||
});
|
||||
|
||||
expect(mocks.logger.log).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: end', () => {
|
||||
it('should set maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
await expect(sut.endMaintenance()).resolves.toBeUndefined();
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWorkerRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
expect(maintenanceWebsocketRepositoryMock.clientBroadcast).toHaveBeenCalledWith('AppRestartV1', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
|
||||
expect(maintenanceWorkerRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
expect(maintenanceWebsocketRepositoryMock.serverSend).toHaveBeenCalledWith('AppRestart', {
|
||||
isMaintenanceMode: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('action: restore database', () => {
|
||||
beforeEach(() => {
|
||||
mocks.database.tryLock.mockResolvedValueOnce(true);
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.process.fork.mockImplementation(() => mockSpawn(0, 'Immich Server is listening', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.storage.createPlainReadStream.mockReturnValue(Readable.from(mockData()));
|
||||
mocks.storage.createWriteStream.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGzip.mockReturnValue(new PassThrough());
|
||||
mocks.storage.createGunzip.mockReturnValue(new PassThrough());
|
||||
});
|
||||
|
||||
it('should update maintenance mode state', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(mocks.database.tryLock).toHaveBeenCalled();
|
||||
expect(mocks.logger.log).toHaveBeenCalledWith('Running maintenance action restore_database');
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail to restore invalid backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'filename',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Invalid backup file format!',
|
||||
task: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should successfully run a backup', async () => {
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: expect.any(Number),
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
{
|
||||
active: true,
|
||||
action: 'end',
|
||||
},
|
||||
);
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should fail if backup creation fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: pg_dump non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail if restore itself fails', async () => {
|
||||
mocks.process.spawnDuplexStream
|
||||
.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('gzip', 0, 'data', ''))
|
||||
.mockReturnValueOnce(mockDuplex('psql', 1, '', 'error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: psql non-zero exit code (1)\nerror',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenLastCalledWith(
|
||||
'MaintenanceStatusV1',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
task: 'error',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should rollback if database migrations fail', async () => {
|
||||
mocks.database.runMigrations.mockRejectedValue(new Error('Migrations Error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Migrations Error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalledTimes(0);
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
|
||||
it('should rollback if API healthcheck fails', async () => {
|
||||
maintenanceHealthRepositoryMock.checkApiHealth.mockRejectedValue(new Error('Health Error'));
|
||||
|
||||
await sut.runAction({
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
restoreBackupFilename: 'development-filename.sql',
|
||||
});
|
||||
|
||||
expect(maintenanceWebsocketRepositoryMock.clientSend).toHaveBeenCalledWith('MaintenanceStatusV1', 'private', {
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
error: 'Error: Health Error',
|
||||
task: 'error',
|
||||
});
|
||||
|
||||
expect(maintenanceHealthRepositoryMock.checkApiHealth).toHaveBeenCalled();
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Backups
|
||||
*/
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
{ filename: 'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-1753789649000.sql.gz', filesize: 1024 },
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,19 +4,41 @@ import { NextFunction, Request, Response } from 'express';
|
||||
import { jwtVerify } from 'jose';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { IncomingHttpHeaders } from 'node:http';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { ImmichCookie, SystemMetadataKey } from 'src/enum';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { ServerConfigDto, ServerVersionResponseDto } from 'src/dtos/server.dto';
|
||||
import { DatabaseLock, ImmichCookie, MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceHealthRepository } from 'src/maintenance/maintenance-health.repository';
|
||||
import { MaintenanceWebsocketRepository } from 'src/maintenance/maintenance-websocket.repository';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { type ApiService as _ApiService } from 'src/services/api.service';
|
||||
import { type BaseService as _BaseService } from 'src/services/base.service';
|
||||
import { type DatabaseBackupService as _DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { type ServerService as _ServerService } from 'src/services/server.service';
|
||||
import { type VersionService as _VersionService } from 'src/services/version.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { getConfig } from 'src/utils/config';
|
||||
import { createMaintenanceLoginUrl } from 'src/utils/maintenance';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
restoreDatabaseBackup,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { createMaintenanceLoginUrl, detectPriorInstall } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -24,16 +46,51 @@ import { getExternalDomain } from 'src/utils/misc';
|
||||
*/
|
||||
@Injectable()
|
||||
export class MaintenanceWorkerService {
|
||||
#secret: string | null = null;
|
||||
#status: MaintenanceStatusResponseDto = {
|
||||
active: true,
|
||||
action: MaintenanceAction.Start,
|
||||
};
|
||||
|
||||
constructor(
|
||||
protected logger: LoggingRepository,
|
||||
private appRepository: AppRepository,
|
||||
private configRepository: ConfigRepository,
|
||||
private systemMetadataRepository: SystemMetadataRepository,
|
||||
private maintenanceWorkerRepository: MaintenanceWebsocketRepository,
|
||||
private maintenanceWebsocketRepository: MaintenanceWebsocketRepository,
|
||||
private maintenanceHealthRepository: MaintenanceHealthRepository,
|
||||
private storageRepository: StorageRepository,
|
||||
private processRepository: ProcessRepository,
|
||||
private databaseRepository: DatabaseRepository,
|
||||
) {
|
||||
this.logger.setContext(this.constructor.name);
|
||||
}
|
||||
|
||||
mock(status: MaintenanceStatusResponseDto) {
|
||||
this.#secret = 'secret';
|
||||
this.#status = status;
|
||||
}
|
||||
|
||||
async init() {
|
||||
const state = (await this.systemMetadataRepository.get(
|
||||
SystemMetadataKey.MaintenanceMode,
|
||||
)) as MaintenanceModeState & { isMaintenanceMode: true };
|
||||
|
||||
this.#secret = state.secret;
|
||||
this.#status = {
|
||||
active: true,
|
||||
action: state.action.action,
|
||||
};
|
||||
|
||||
StorageCore.setMediaLocation(this.detectMediaLocation());
|
||||
|
||||
this.maintenanceWebsocketRepository.setAuthFn(async (client) => this.authenticate(client.request.headers));
|
||||
this.maintenanceWebsocketRepository.setStatusUpdateFn((status) => (this.#status = status));
|
||||
|
||||
await this.logSecret();
|
||||
void this.runAction(state.action);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _BaseService.configRepos}
|
||||
*/
|
||||
@@ -55,22 +112,17 @@ export class MaintenanceWorkerService {
|
||||
/**
|
||||
* {@link _ServerService.getSystemConfig}
|
||||
*/
|
||||
async getSystemConfig() {
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
|
||||
getSystemConfig() {
|
||||
return {
|
||||
loginPageMessage: config.server.loginPageMessage,
|
||||
trashDays: config.trash.days,
|
||||
userDeleteDelay: config.user.deleteDelay,
|
||||
oauthButtonText: config.oauth.buttonText,
|
||||
isInitialized: true,
|
||||
isOnboarded: true,
|
||||
externalDomain: config.server.externalDomain,
|
||||
publicUsers: config.server.publicUsers,
|
||||
mapDarkStyleUrl: config.map.darkStyle,
|
||||
mapLightStyleUrl: config.map.lightStyle,
|
||||
maintenanceMode: true,
|
||||
};
|
||||
} as ServerConfigDto;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _VersionService.getVersion}
|
||||
*/
|
||||
getVersion() {
|
||||
return ServerVersionResponseDto.fromSemVer(serverVersion);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -106,12 +158,99 @@ export class MaintenanceWorkerService {
|
||||
};
|
||||
}
|
||||
|
||||
private async secret(): Promise<string> {
|
||||
const state = (await this.systemMetadataRepository.get(SystemMetadataKey.MaintenanceMode)) as {
|
||||
secret: string;
|
||||
};
|
||||
/**
|
||||
* {@link _StorageService.detectMediaLocation}
|
||||
*/
|
||||
detectMediaLocation(): string {
|
||||
const envData = this.configRepository.getEnv();
|
||||
if (envData.storage.mediaLocation) {
|
||||
return envData.storage.mediaLocation;
|
||||
}
|
||||
|
||||
return state.secret;
|
||||
const targets: string[] = [];
|
||||
const candidates = ['/data', '/usr/src/app/upload'];
|
||||
|
||||
for (const candidate of candidates) {
|
||||
const exists = this.storageRepository.existsSync(candidate);
|
||||
if (exists) {
|
||||
targets.push(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
if (targets.length === 1) {
|
||||
return targets[0];
|
||||
}
|
||||
|
||||
return '/usr/src/app/upload';
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.listBackups}
|
||||
*/
|
||||
async listBackups(): Promise<{ backups: { filename: string; filesize: number }[] }> {
|
||||
const backups = await listDatabaseBackups(this.backupRepos);
|
||||
return { backups };
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.deleteBackup}
|
||||
*/
|
||||
async deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.uploadBackup}
|
||||
*/
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link _DatabaseBackupService.downloadBackup}
|
||||
*/
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get secret() {
|
||||
if (!this.#secret) {
|
||||
throw new Error('Secret is not initialised yet.');
|
||||
}
|
||||
|
||||
return this.#secret;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
health: this.maintenanceHealthRepository,
|
||||
};
|
||||
}
|
||||
|
||||
private getStatus(): MaintenanceStatusResponseDto {
|
||||
return this.#status;
|
||||
}
|
||||
|
||||
private getPublicStatus(): MaintenanceStatusResponseDto {
|
||||
const state = structuredClone(this.#status);
|
||||
|
||||
if (state.error) {
|
||||
state.error = 'Something went wrong, see logs!';
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
setStatus(status: MaintenanceStatusResponseDto): void {
|
||||
this.#status = status;
|
||||
this.maintenanceWebsocketRepository.serverSend('MaintenanceStatus', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'private', status);
|
||||
this.maintenanceWebsocketRepository.clientSend('MaintenanceStatusV1', 'public', this.getPublicStatus());
|
||||
}
|
||||
|
||||
async logSecret(): Promise<void> {
|
||||
@@ -123,7 +262,7 @@ export class MaintenanceWorkerService {
|
||||
{
|
||||
username: 'immich-admin',
|
||||
},
|
||||
await this.secret(),
|
||||
this.secret,
|
||||
);
|
||||
|
||||
this.logger.log(`\n\n🚧 Immich is in maintenance mode, you can log in using the following URL:\n${url}\n`);
|
||||
@@ -134,28 +273,115 @@ export class MaintenanceWorkerService {
|
||||
return this.login(jwtToken);
|
||||
}
|
||||
|
||||
async status(potentiallyJwt?: string): Promise<MaintenanceStatusResponseDto> {
|
||||
try {
|
||||
await this.login(potentiallyJwt);
|
||||
return this.getStatus();
|
||||
} catch {
|
||||
return this.getPublicStatus();
|
||||
}
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async login(jwt?: string): Promise<MaintenanceAuthDto> {
|
||||
if (!jwt) {
|
||||
throw new UnauthorizedException('Missing JWT Token');
|
||||
}
|
||||
|
||||
const secret = await this.secret();
|
||||
|
||||
try {
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(secret));
|
||||
const result = await jwtVerify<MaintenanceAuthDto>(jwt, new TextEncoder().encode(this.secret));
|
||||
return result.payload;
|
||||
} catch {
|
||||
throw new UnauthorizedException('Invalid JWT Token');
|
||||
}
|
||||
}
|
||||
|
||||
async endMaintenance(): Promise<void> {
|
||||
async setAction(action: SetMaintenanceModeDto) {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
});
|
||||
|
||||
await this.runAction(action);
|
||||
}
|
||||
|
||||
async runAction(action: SetMaintenanceModeDto) {
|
||||
switch (action.action) {
|
||||
case MaintenanceAction.Start: {
|
||||
return;
|
||||
}
|
||||
case MaintenanceAction.End: {
|
||||
return this.endMaintenance();
|
||||
}
|
||||
case MaintenanceAction.SelectDatabaseRestore: {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const lock = await this.databaseRepository.tryLock(DatabaseLock.MaintenanceOperation);
|
||||
if (!lock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log(`Running maintenance action ${action.action}`);
|
||||
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: this.secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
if (!action.restoreBackupFilename) {
|
||||
throw new Error("Expected restoreBackupFilename but it's missing!");
|
||||
}
|
||||
|
||||
await this.restoreBackup(action.restoreBackupFilename);
|
||||
} catch (error) {
|
||||
this.logger.error(`Encountered error running action: ${error}`);
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: action.action,
|
||||
task: 'error',
|
||||
error: '' + error,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async restoreBackup(filename: string): Promise<void> {
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
task: 'ready',
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await restoreDatabaseBackup(this.backupRepos, filename, (task, progress) =>
|
||||
this.setStatus({
|
||||
active: true,
|
||||
action: MaintenanceAction.RestoreDatabase,
|
||||
progress,
|
||||
task,
|
||||
}),
|
||||
);
|
||||
|
||||
await this.setAction({
|
||||
action: MaintenanceAction.End,
|
||||
});
|
||||
}
|
||||
|
||||
private async endMaintenance(): Promise<void> {
|
||||
const state: MaintenanceModeState = { isMaintenanceMode: false as const };
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, state);
|
||||
|
||||
// => corresponds to notification.service.ts#onAppRestart
|
||||
this.maintenanceWorkerRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWorkerRepository.serverSend('AppRestart', state);
|
||||
this.maintenanceWebsocketRepository.clientBroadcast('AppRestartV1', state);
|
||||
this.maintenanceWebsocketRepository.serverSend('AppRestart', state);
|
||||
this.appRepository.exitApp();
|
||||
}
|
||||
}
|
||||
|
||||
17
server/src/queries/asset.edit.repository.sql
Normal file
17
server/src/queries/asset.edit.repository.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
-- NOTE: This file is auto generated by ./sql-generator
|
||||
|
||||
-- AssetEditRepository.replaceAll
|
||||
begin
|
||||
delete from "asset_edit"
|
||||
where
|
||||
"assetId" = $1
|
||||
rollback
|
||||
|
||||
-- AssetEditRepository.getAll
|
||||
select
|
||||
"action",
|
||||
"parameters"
|
||||
from
|
||||
"asset_edit"
|
||||
where
|
||||
"assetId" = $1
|
||||
@@ -105,7 +105,21 @@ select
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "files"
|
||||
) as "files",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_edit"."action",
|
||||
"asset_edit"."parameters"
|
||||
from
|
||||
"asset_edit"
|
||||
where
|
||||
"asset_edit"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "edits"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id"
|
||||
@@ -167,6 +181,20 @@ select
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "files",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
from
|
||||
(
|
||||
select
|
||||
"asset_edit"."action",
|
||||
"asset_edit"."parameters"
|
||||
from
|
||||
"asset_edit"
|
||||
where
|
||||
"asset_edit"."assetId" = "asset"."id"
|
||||
) as agg
|
||||
) as "edits",
|
||||
to_json("asset_exif") as "exifInfo"
|
||||
from
|
||||
"asset"
|
||||
@@ -191,6 +219,8 @@ select
|
||||
"asset"."originalPath",
|
||||
"asset"."ownerId",
|
||||
"asset"."type",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
@@ -203,6 +233,7 @@ select
|
||||
where
|
||||
"asset_face"."assetId" = "asset"."id"
|
||||
and "asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" = $1
|
||||
) as agg
|
||||
) as "faces",
|
||||
(
|
||||
@@ -218,13 +249,13 @@ select
|
||||
"asset_file"
|
||||
where
|
||||
"asset_file"."assetId" = "asset"."id"
|
||||
and "asset_file"."type" = $1
|
||||
and "asset_file"."type" = $2
|
||||
) as agg
|
||||
) as "files"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $2
|
||||
"asset"."id" = $3
|
||||
|
||||
-- AssetJobRepository.getLockedPropertiesForMetadataExtraction
|
||||
select
|
||||
@@ -402,6 +433,7 @@ select
|
||||
where
|
||||
"asset_face"."assetId" = "asset"."id"
|
||||
and "asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" is true
|
||||
) as agg
|
||||
) as "faces",
|
||||
(
|
||||
@@ -493,6 +525,9 @@ select
|
||||
"asset"."fileCreatedAt",
|
||||
"asset_exif"."timeZone",
|
||||
"asset_exif"."fileSizeInByte",
|
||||
"asset_exif"."make",
|
||||
"asset_exif"."model",
|
||||
"asset_exif"."lensModel",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
@@ -529,6 +564,9 @@ select
|
||||
"asset"."fileCreatedAt",
|
||||
"asset_exif"."timeZone",
|
||||
"asset_exif"."fileSizeInByte",
|
||||
"asset_exif"."make",
|
||||
"asset_exif"."model",
|
||||
"asset_exif"."lensModel",
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(agg), '[]')
|
||||
|
||||
@@ -49,6 +49,23 @@ returning
|
||||
"dateTimeOriginal",
|
||||
"timeZone"
|
||||
|
||||
-- AssetRepository.unlockProperties
|
||||
update "asset_exif"
|
||||
set
|
||||
"lockedProperties" = nullif(
|
||||
array(
|
||||
select distinct
|
||||
property
|
||||
from
|
||||
unnest("asset_exif"."lockedProperties") property
|
||||
where
|
||||
not property = any ($1)
|
||||
),
|
||||
'{}'
|
||||
)
|
||||
where
|
||||
"assetId" = $2
|
||||
|
||||
-- AssetRepository.getMetadata
|
||||
select
|
||||
"key",
|
||||
@@ -76,6 +93,14 @@ where
|
||||
"assetId" = $1
|
||||
and "key" = $2
|
||||
|
||||
-- AssetRepository.deleteBulkMetadata
|
||||
begin
|
||||
delete from "asset_metadata"
|
||||
where
|
||||
"assetId" = $1
|
||||
and "key" = $2
|
||||
commit
|
||||
|
||||
-- AssetRepository.getByDayOfYear
|
||||
with
|
||||
"res" as (
|
||||
@@ -174,6 +199,7 @@ select
|
||||
where
|
||||
"asset_face"."assetId" = "asset"."id"
|
||||
and "asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" is true
|
||||
) as agg
|
||||
) as "faces",
|
||||
(
|
||||
@@ -375,14 +401,10 @@ with
|
||||
"asset_exif"."projectionType",
|
||||
coalesce(
|
||||
case
|
||||
when asset_exif."exifImageHeight" = 0
|
||||
or asset_exif."exifImageWidth" = 0 then 1
|
||||
when "asset_exif"."orientation" in ('5', '6', '7', '8', '-90', '90') then round(
|
||||
asset_exif."exifImageHeight"::numeric / asset_exif."exifImageWidth"::numeric,
|
||||
3
|
||||
)
|
||||
when asset."height" = 0
|
||||
or asset."width" = 0 then 1
|
||||
else round(
|
||||
asset_exif."exifImageWidth"::numeric / asset_exif."exifImageHeight"::numeric,
|
||||
asset."width"::numeric / asset."height"::numeric,
|
||||
3
|
||||
)
|
||||
end,
|
||||
|
||||
@@ -15,6 +15,7 @@ from
|
||||
"asset_ocr"
|
||||
where
|
||||
"asset_ocr"."assetId" = $1
|
||||
and "asset_ocr"."isVisible" = $2
|
||||
|
||||
-- OcrRepository.upsert
|
||||
with
|
||||
@@ -66,3 +67,12 @@ with
|
||||
)
|
||||
select
|
||||
1 as "dummy"
|
||||
|
||||
-- OcrRepository.updateOcrVisibilities
|
||||
begin
|
||||
update "ocr_search"
|
||||
set
|
||||
"text" = $1
|
||||
where
|
||||
"assetId" = $2
|
||||
commit
|
||||
|
||||
@@ -35,6 +35,7 @@ from
|
||||
where
|
||||
"person"."ownerId" = $1
|
||||
and "asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" is true
|
||||
and "person"."isHidden" = $2
|
||||
group by
|
||||
"person"."id"
|
||||
@@ -63,6 +64,7 @@ from
|
||||
left join "asset_face" on "asset_face"."personId" = "person"."id"
|
||||
where
|
||||
"asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" is true
|
||||
group by
|
||||
"person"."id"
|
||||
having
|
||||
@@ -89,6 +91,7 @@ from
|
||||
where
|
||||
"asset_face"."assetId" = $1
|
||||
and "asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" = $2
|
||||
order by
|
||||
"asset_face"."boundingBoxX1" asc
|
||||
|
||||
@@ -230,6 +233,7 @@ from
|
||||
and "asset"."deletedAt" is null
|
||||
where
|
||||
"asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" is true
|
||||
|
||||
-- PersonRepository.getNumberOfPeople
|
||||
select
|
||||
@@ -251,6 +255,7 @@ where
|
||||
where
|
||||
"asset_face"."personId" = "person"."id"
|
||||
and "asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" = $2
|
||||
and exists (
|
||||
select
|
||||
from
|
||||
@@ -261,7 +266,7 @@ where
|
||||
and "asset"."deletedAt" is null
|
||||
)
|
||||
)
|
||||
and "person"."ownerId" = $2
|
||||
and "person"."ownerId" = $3
|
||||
|
||||
-- PersonRepository.refreshFaces
|
||||
with
|
||||
@@ -322,6 +327,7 @@ from
|
||||
where
|
||||
"asset_face"."personId" = $1
|
||||
and "asset_face"."deletedAt" is null
|
||||
and "asset_face"."isVisible" is true
|
||||
|
||||
-- PersonRepository.getLatestFaceDate
|
||||
select
|
||||
|
||||
@@ -69,6 +69,9 @@ select
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."stackId",
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"album_asset"."updateId"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
@@ -99,6 +102,9 @@ select
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."stackId",
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
@@ -134,7 +140,10 @@ select
|
||||
"asset"."duration",
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."stackId",
|
||||
"asset"."libraryId"
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
inner join "asset" on "asset"."id" = "album_asset"."assetId"
|
||||
@@ -448,6 +457,9 @@ select
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."stackId",
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
@@ -536,6 +548,7 @@ where
|
||||
"asset_face"."updateId" < $1
|
||||
and "asset_face"."updateId" > $2
|
||||
and "asset"."ownerId" = $3
|
||||
and "asset_face"."isVisible" = $4
|
||||
order by
|
||||
"asset_face"."updateId" asc
|
||||
|
||||
@@ -740,6 +753,9 @@ select
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."stackId",
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
@@ -789,6 +805,9 @@ select
|
||||
"asset"."livePhotoVideoId",
|
||||
"asset"."stackId",
|
||||
"asset"."libraryId",
|
||||
"asset"."width",
|
||||
"asset"."height",
|
||||
"asset"."isEdited",
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
|
||||
41
server/src/repositories/asset-edit.repository.ts
Normal file
41
server/src/repositories/asset-edit.repository.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Kysely } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetEditActionItem } from 'src/dtos/editing.dto';
|
||||
import { DB } from 'src/schema';
|
||||
|
||||
@Injectable()
|
||||
export class AssetEditRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
@GenerateSql({
|
||||
params: [DummyValue.UUID],
|
||||
})
|
||||
async replaceAll(assetId: string, edits: AssetEditActionItem[]): Promise<AssetEditActionItem[]> {
|
||||
return await this.db.transaction().execute(async (trx) => {
|
||||
await trx.deleteFrom('asset_edit').where('assetId', '=', assetId).execute();
|
||||
|
||||
if (edits.length > 0) {
|
||||
return trx
|
||||
.insertInto('asset_edit')
|
||||
.values(edits.map((edit) => ({ assetId, ...edit })))
|
||||
.returning(['action', 'parameters'])
|
||||
.execute() as Promise<AssetEditActionItem[]>;
|
||||
}
|
||||
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
params: [DummyValue.UUID],
|
||||
})
|
||||
async getAll(assetId: string): Promise<AssetEditActionItem[]> {
|
||||
return this.db
|
||||
.selectFrom('asset_edit')
|
||||
.select(['action', 'parameters'])
|
||||
.where('assetId', '=', assetId)
|
||||
.execute() as Promise<AssetEditActionItem[]>;
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
asUuid,
|
||||
toJson,
|
||||
withDefaultVisibility,
|
||||
withEdits,
|
||||
withExif,
|
||||
withExifInner,
|
||||
withFaces,
|
||||
@@ -72,6 +73,7 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id', 'asset.thumbhash'])
|
||||
.select(withFiles)
|
||||
.select(withEdits)
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.$if(!force, (qb) =>
|
||||
@@ -113,6 +115,7 @@ export class AssetJobRepository {
|
||||
'asset.type',
|
||||
])
|
||||
.select(withFiles)
|
||||
.select(withEdits)
|
||||
.$call(withExifInner)
|
||||
.where('asset.id', '=', id)
|
||||
.executeTakeFirst();
|
||||
@@ -200,7 +203,7 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id', 'asset.visibility'])
|
||||
.$call(withExifInner)
|
||||
.select((eb) => withFaces(eb, true))
|
||||
.select((eb) => withFaces(eb, true, true))
|
||||
.select((eb) => withFiles(eb, AssetFileType.Preview))
|
||||
.where('asset.id', '=', id)
|
||||
.executeTakeFirst();
|
||||
@@ -324,6 +327,9 @@ export class AssetJobRepository {
|
||||
'asset.fileCreatedAt',
|
||||
'asset_exif.timeZone',
|
||||
'asset_exif.fileSizeInByte',
|
||||
'asset_exif.make',
|
||||
'asset_exif.model',
|
||||
'asset_exif.lensModel',
|
||||
])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
|
||||
.where('asset.deletedAt', 'is', null);
|
||||
|
||||
@@ -5,11 +5,12 @@ import { InjectKysely } from 'nestjs-kysely';
|
||||
import { LockableProperty, Stack } from 'src/database';
|
||||
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetFileType, AssetMetadataKey, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { AssetFileType, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
|
||||
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
|
||||
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
|
||||
import { AssetMetadataTable } from 'src/schema/tables/asset-metadata.table';
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import {
|
||||
anyUuid,
|
||||
@@ -19,6 +20,7 @@ import {
|
||||
truncatedDate,
|
||||
unnest,
|
||||
withDefaultVisibility,
|
||||
withEdits,
|
||||
withExif,
|
||||
withFaces,
|
||||
withFacesAndPeople,
|
||||
@@ -111,6 +113,7 @@ interface GetByIdsRelations {
|
||||
smartSearch?: boolean;
|
||||
stack?: { assets?: boolean };
|
||||
tags?: boolean;
|
||||
edits?: boolean;
|
||||
}
|
||||
|
||||
const distinctLocked = <T extends LockableProperty[] | null>(eb: ExpressionBuilder<DB, 'asset_exif'>, columns: T) =>
|
||||
@@ -220,6 +223,17 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, ['description']] })
|
||||
unlockProperties(assetId: string, properties: LockableProperty[]) {
|
||||
return this.db
|
||||
.updateTable('asset_exif')
|
||||
.where('assetId', '=', assetId)
|
||||
.set((eb) => ({
|
||||
lockedProperties: sql`nullif(array(select distinct property from unnest(${eb.ref('asset_exif.lockedProperties')}) property where not property = any(${properties})), '{}')`,
|
||||
}))
|
||||
.execute();
|
||||
}
|
||||
|
||||
async upsertJobStatus(...jobStatus: Insertable<AssetJobStatusTable>[]): Promise<void> {
|
||||
if (jobStatus.length === 0) {
|
||||
return;
|
||||
@@ -256,7 +270,11 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
upsertMetadata(id: string, items: Array<{ key: AssetMetadataKey; value: object }>) {
|
||||
upsertMetadata(id: string, items: Array<{ key: string; value: object }>) {
|
||||
if (items.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return this.db
|
||||
.insertInto('asset_metadata')
|
||||
.values(items.map((item) => ({ assetId: id, ...item })))
|
||||
@@ -269,8 +287,21 @@ export class AssetRepository {
|
||||
.execute();
|
||||
}
|
||||
|
||||
upsertBulkMetadata(items: Insertable<AssetMetadataTable>[]) {
|
||||
return this.db
|
||||
.insertInto('asset_metadata')
|
||||
.values(items)
|
||||
.onConflict((oc) =>
|
||||
oc
|
||||
.columns(['assetId', 'key'])
|
||||
.doUpdateSet((eb) => ({ key: eb.ref('excluded.key'), value: eb.ref('excluded.value') })),
|
||||
)
|
||||
.returning(['assetId', 'key', 'value', 'updatedAt'])
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
|
||||
getMetadataByKey(assetId: string, key: AssetMetadataKey) {
|
||||
getMetadataByKey(assetId: string, key: string) {
|
||||
return this.db
|
||||
.selectFrom('asset_metadata')
|
||||
.select(['key', 'value', 'updatedAt'])
|
||||
@@ -280,10 +311,23 @@ export class AssetRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
|
||||
async deleteMetadataByKey(id: string, key: AssetMetadataKey) {
|
||||
async deleteMetadataByKey(id: string, key: string) {
|
||||
await this.db.deleteFrom('asset_metadata').where('assetId', '=', id).where('key', '=', key).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[{ assetId: DummyValue.UUID, key: DummyValue.STRING }]] })
|
||||
async deleteBulkMetadata(items: Array<{ assetId: string; key: string }>) {
|
||||
if (items.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.db.transaction().execute(async (tx) => {
|
||||
for (const { assetId, key } of items) {
|
||||
await tx.deleteFrom('asset_metadata').where('assetId', '=', assetId).where('key', '=', key).execute();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
create(asset: Insertable<AssetTable>) {
|
||||
return this.db.insertInto('asset').values(asset).returningAll().executeTakeFirstOrThrow();
|
||||
}
|
||||
@@ -441,7 +485,10 @@ export class AssetRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
|
||||
getById(
|
||||
id: string,
|
||||
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags, edits }: GetByIdsRelations = {},
|
||||
) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.selectAll('asset')
|
||||
@@ -478,6 +525,7 @@ export class AssetRepository {
|
||||
)
|
||||
.$if(!!files, (qb) => qb.select(withFiles))
|
||||
.$if(!!tags, (qb) => qb.select(withTags))
|
||||
.$if(!!edits, (qb) => qb.select(withEdits))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@@ -505,10 +553,11 @@ export class AssetRepository {
|
||||
.selectAll('asset')
|
||||
.$call(withExif)
|
||||
.$call((qb) => qb.select(withFacesAndPeople))
|
||||
.$call((qb) => qb.select(withEdits))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
return this.getById(asset.id, { exifInfo: true, faces: { person: true } });
|
||||
return this.getById(asset.id, { exifInfo: true, faces: { person: true }, edits: true });
|
||||
}
|
||||
|
||||
async remove(asset: { id: string }): Promise<void> {
|
||||
@@ -665,11 +714,9 @@ export class AssetRepository {
|
||||
.coalesce(
|
||||
eb
|
||||
.case()
|
||||
.when(sql`asset_exif."exifImageHeight" = 0 or asset_exif."exifImageWidth" = 0`)
|
||||
.when(sql`asset."height" = 0 or asset."width" = 0`)
|
||||
.then(eb.lit(1))
|
||||
.when('asset_exif.orientation', 'in', sql<string>`('5', '6', '7', '8', '-90', '90')`)
|
||||
.then(sql`round(asset_exif."exifImageHeight"::numeric / asset_exif."exifImageWidth"::numeric, 3)`)
|
||||
.else(sql`round(asset_exif."exifImageWidth"::numeric / asset_exif."exifImageHeight"::numeric, 3)`)
|
||||
.else(sql`round(asset."width"::numeric / asset."height"::numeric, 3)`)
|
||||
.end(),
|
||||
eb.lit(1),
|
||||
)
|
||||
|
||||
@@ -8,6 +8,8 @@ const getEnv = () => {
|
||||
|
||||
const resetEnv = () => {
|
||||
for (const env of [
|
||||
'IMMICH_ALLOW_EXTERNAL_PLUGINS',
|
||||
'IMMICH_ALLOW_SETUP',
|
||||
'IMMICH_ENV',
|
||||
'IMMICH_WORKERS_INCLUDE',
|
||||
'IMMICH_WORKERS_EXCLUDE',
|
||||
@@ -75,6 +77,9 @@ describe('getEnv', () => {
|
||||
configFile: undefined,
|
||||
logLevel: undefined,
|
||||
});
|
||||
|
||||
expect(config.plugins.external).toEqual({ allow: false });
|
||||
expect(config.setup).toEqual({ allow: true });
|
||||
});
|
||||
|
||||
describe('IMMICH_MEDIA_LOCATION', () => {
|
||||
@@ -84,6 +89,32 @@ describe('getEnv', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('IMMICH_ALLOW_EXTERNAL_PLUGINS', () => {
|
||||
it('should disable plugins', () => {
|
||||
process.env.IMMICH_ALLOW_EXTERNAL_PLUGINS = 'false';
|
||||
const config = getEnv();
|
||||
expect(config.plugins.external).toEqual({ allow: false });
|
||||
});
|
||||
|
||||
it('should throw an error for invalid value', () => {
|
||||
process.env.IMMICH_ALLOW_EXTERNAL_PLUGINS = 'invalid';
|
||||
expect(() => getEnv()).toThrowError('IMMICH_ALLOW_EXTERNAL_PLUGINS must be a boolean value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('IMMICH_ALLOW_SETUP', () => {
|
||||
it('should disable setup', () => {
|
||||
process.env.IMMICH_ALLOW_SETUP = 'false';
|
||||
const { setup } = getEnv();
|
||||
expect(setup).toEqual({ allow: false });
|
||||
});
|
||||
|
||||
it('should throw an error for invalid value', () => {
|
||||
process.env.IMMICH_ALLOW_SETUP = 'invalid';
|
||||
expect(() => getEnv()).toThrowError('IMMICH_ALLOW_SETUP must be a boolean value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('database', () => {
|
||||
it('should use defaults', () => {
|
||||
const { database } = getEnv();
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
ImmichHeader,
|
||||
ImmichTelemetry,
|
||||
ImmichWorker,
|
||||
LogFormat,
|
||||
LogLevel,
|
||||
QueueName,
|
||||
} from 'src/enum';
|
||||
@@ -29,6 +30,7 @@ export interface EnvData {
|
||||
environment: ImmichEnvironment;
|
||||
configFile?: string;
|
||||
logLevel?: LogLevel;
|
||||
logFormat?: LogFormat;
|
||||
|
||||
buildMetadata: {
|
||||
build?: string;
|
||||
@@ -90,6 +92,10 @@ export interface EnvData {
|
||||
|
||||
redis: RedisOptions;
|
||||
|
||||
setup: {
|
||||
allow: boolean;
|
||||
};
|
||||
|
||||
telemetry: {
|
||||
apiPort: number;
|
||||
microservicesPort: number;
|
||||
@@ -104,8 +110,10 @@ export interface EnvData {
|
||||
workers: ImmichWorker[];
|
||||
|
||||
plugins: {
|
||||
enabled: boolean;
|
||||
installFolder?: string;
|
||||
external: {
|
||||
allow: boolean;
|
||||
installFolder?: string;
|
||||
};
|
||||
};
|
||||
|
||||
noColor: boolean;
|
||||
@@ -227,6 +235,7 @@ const getEnv = (): EnvData => {
|
||||
environment,
|
||||
configFile: dto.IMMICH_CONFIG_FILE,
|
||||
logLevel: dto.IMMICH_LOG_LEVEL,
|
||||
logFormat: dto.IMMICH_LOG_FORMAT || LogFormat.Console,
|
||||
|
||||
buildMetadata: {
|
||||
build: dto.IMMICH_BUILD,
|
||||
@@ -313,6 +322,10 @@ const getEnv = (): EnvData => {
|
||||
corePlugin: join(buildFolder, 'corePlugin'),
|
||||
},
|
||||
|
||||
setup: {
|
||||
allow: dto.IMMICH_ALLOW_SETUP ?? true,
|
||||
},
|
||||
|
||||
storage: {
|
||||
ignoreMountCheckErrors: !!dto.IMMICH_IGNORE_MOUNT_CHECK_ERRORS,
|
||||
mediaLocation: dto.IMMICH_MEDIA_LOCATION,
|
||||
@@ -327,8 +340,10 @@ const getEnv = (): EnvData => {
|
||||
workers,
|
||||
|
||||
plugins: {
|
||||
enabled: !!dto.IMMICH_PLUGINS_ENABLED,
|
||||
installFolder: dto.IMMICH_PLUGINS_INSTALL_FOLDER,
|
||||
external: {
|
||||
allow: dto.IMMICH_ALLOW_EXTERNAL_PLUGINS ?? false,
|
||||
installFolder: dto.IMMICH_PLUGINS_INSTALL_FOLDER,
|
||||
},
|
||||
},
|
||||
|
||||
noColor: !!dto.NO_COLOR,
|
||||
|
||||
@@ -4,6 +4,7 @@ import { AlbumUserRepository } from 'src/repositories/album-user.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { AuditRepository } from 'src/repositories/audit.repository';
|
||||
@@ -59,6 +60,7 @@ export const repositories = [
|
||||
ApiKeyRepository,
|
||||
AppRepository,
|
||||
AssetRepository,
|
||||
AssetEditRepository,
|
||||
AssetJobRepository,
|
||||
ConfigRepository,
|
||||
CronRepository,
|
||||
|
||||
@@ -2,7 +2,7 @@ import { ConsoleLogger, Inject, Injectable, Scope } from '@nestjs/common';
|
||||
import { isLogLevelEnabled } from '@nestjs/common/services/utils/is-log-level-enabled.util';
|
||||
import { ClsService } from 'nestjs-cls';
|
||||
import { Telemetry } from 'src/decorators';
|
||||
import { LogLevel } from 'src/enum';
|
||||
import { LogFormat, LogLevel } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
|
||||
type LogDetails = any;
|
||||
@@ -27,10 +27,12 @@ export class MyConsoleLogger extends ConsoleLogger {
|
||||
|
||||
constructor(
|
||||
private cls: ClsService | undefined,
|
||||
options?: { color?: boolean; context?: string },
|
||||
options?: { json?: boolean; color?: boolean; context?: string },
|
||||
) {
|
||||
super(options?.context || MyConsoleLogger.name);
|
||||
this.isColorEnabled = options?.color || false;
|
||||
super(options?.context || MyConsoleLogger.name, {
|
||||
json: options?.json ?? false,
|
||||
});
|
||||
this.isColorEnabled = !options?.json && (options?.color || false);
|
||||
}
|
||||
|
||||
isLevelEnabled(level: LogLevel) {
|
||||
@@ -79,10 +81,17 @@ export class LoggingRepository {
|
||||
@Inject(ConfigRepository) configRepository: ConfigRepository | undefined,
|
||||
) {
|
||||
let noColor = false;
|
||||
let logFormat = LogFormat.Console;
|
||||
if (configRepository) {
|
||||
noColor = configRepository.getEnv().noColor;
|
||||
const env = configRepository.getEnv();
|
||||
noColor = env.noColor;
|
||||
logFormat = env.logFormat ?? logFormat;
|
||||
}
|
||||
this.logger = new MyConsoleLogger(cls, { context: LoggingRepository.name, color: !noColor });
|
||||
this.logger = new MyConsoleLogger(cls, {
|
||||
context: LoggingRepository.name,
|
||||
json: logFormat === LogFormat.Json,
|
||||
color: !noColor,
|
||||
});
|
||||
}
|
||||
|
||||
static create(context?: string) {
|
||||
|
||||
667
server/src/repositories/media.repository.spec.ts
Normal file
667
server/src/repositories/media.repository.spec.ts
Normal file
@@ -0,0 +1,667 @@
|
||||
import sharp from 'sharp';
|
||||
import { AssetFace } from 'src/database';
|
||||
import { AssetEditAction, MirrorAxis } from 'src/dtos/editing.dto';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { SourceType } from 'src/enum';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
||||
import { MediaRepository } from 'src/repositories/media.repository';
|
||||
import { checkFaceVisibility, checkOcrVisibility } from 'src/utils/editor';
|
||||
import { automock } from 'test/utils';
|
||||
|
||||
const getPixelColor = async (buffer: Buffer, x: number, y: number) => {
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
const width = metadata.width!;
|
||||
const { data } = await sharp(buffer).raw().toBuffer({ resolveWithObject: true });
|
||||
const idx = (y * width + x) * 4;
|
||||
return {
|
||||
r: data[idx],
|
||||
g: data[idx + 1],
|
||||
b: data[idx + 2],
|
||||
};
|
||||
};
|
||||
|
||||
const buildTestQuadImage = async () => {
|
||||
// build a 4 quadrant image for testing mirroring
|
||||
const base = sharp({
|
||||
create: { width: 1000, height: 1000, channels: 3, background: { r: 0, g: 0, b: 0 } },
|
||||
}).png();
|
||||
|
||||
const tl = await sharp({
|
||||
create: { width: 500, height: 500, channels: 3, background: { r: 255, g: 0, b: 0 } },
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const tr = await sharp({
|
||||
create: { width: 500, height: 500, channels: 3, background: { r: 0, g: 255, b: 0 } },
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const bl = await sharp({
|
||||
create: { width: 500, height: 500, channels: 3, background: { r: 0, g: 0, b: 255 } },
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const br = await sharp({
|
||||
create: { width: 500, height: 500, channels: 3, background: { r: 255, g: 255, b: 0 } },
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const image = base.composite([
|
||||
{ input: tl, left: 0, top: 0 }, // top-left
|
||||
{ input: tr, left: 500, top: 0 }, // top-right
|
||||
{ input: bl, left: 0, top: 500 }, // bottom-left
|
||||
{ input: br, left: 500, top: 500 }, // bottom-right
|
||||
]);
|
||||
|
||||
return image.png().toBuffer();
|
||||
};
|
||||
|
||||
describe(MediaRepository.name, () => {
|
||||
let sut: MediaRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
// eslint-disable-next-line no-sparse-arrays
|
||||
sut = new MediaRepository(automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }));
|
||||
});
|
||||
|
||||
describe('applyEdits (single actions)', () => {
|
||||
it('should apply crop edit correctly', async () => {
|
||||
const result = await sut['applyEdits'](
|
||||
sharp({
|
||||
create: {
|
||||
width: 1000,
|
||||
height: 1000,
|
||||
channels: 4,
|
||||
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
|
||||
},
|
||||
}).png(),
|
||||
[
|
||||
{
|
||||
action: AssetEditAction.Crop,
|
||||
parameters: {
|
||||
x: 100,
|
||||
y: 200,
|
||||
width: 700,
|
||||
height: 300,
|
||||
},
|
||||
},
|
||||
],
|
||||
);
|
||||
|
||||
const metadata = await result.toBuffer().then((buf) => sharp(buf).metadata());
|
||||
expect(metadata.width).toBe(700);
|
||||
expect(metadata.height).toBe(300);
|
||||
});
|
||||
it('should apply rotate edit correctly', async () => {
|
||||
const result = await sut['applyEdits'](
|
||||
sharp({
|
||||
create: {
|
||||
width: 500,
|
||||
height: 1000,
|
||||
channels: 4,
|
||||
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
|
||||
},
|
||||
}).png(),
|
||||
[
|
||||
{
|
||||
action: AssetEditAction.Rotate,
|
||||
parameters: {
|
||||
angle: 90,
|
||||
},
|
||||
},
|
||||
],
|
||||
);
|
||||
|
||||
const metadata = await result.toBuffer().then((buf) => sharp(buf).metadata());
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(500);
|
||||
});
|
||||
|
||||
it('should apply mirror edit correctly', async () => {
|
||||
const resultHorizontal = await sut['applyEdits'](sharp(await buildTestQuadImage()), [
|
||||
{
|
||||
action: AssetEditAction.Mirror,
|
||||
parameters: {
|
||||
axis: MirrorAxis.Horizontal,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const bufferHorizontal = await resultHorizontal.toBuffer();
|
||||
const metadataHorizontal = await resultHorizontal.metadata();
|
||||
expect(metadataHorizontal.width).toBe(1000);
|
||||
expect(metadataHorizontal.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(bufferHorizontal, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(bufferHorizontal, 990, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(bufferHorizontal, 10, 990)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(bufferHorizontal, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
|
||||
const resultVertical = await sut['applyEdits'](sharp(await buildTestQuadImage()), [
|
||||
{
|
||||
action: AssetEditAction.Mirror,
|
||||
parameters: {
|
||||
axis: MirrorAxis.Vertical,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const bufferVertical = await resultVertical.toBuffer();
|
||||
const metadataVertical = await resultVertical.metadata();
|
||||
expect(metadataVertical.width).toBe(1000);
|
||||
expect(metadataVertical.height).toBe(1000);
|
||||
|
||||
// top-left should now be bottom-left (blue)
|
||||
expect(await getPixelColor(bufferVertical, 10, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
// top-right should now be bottom-right (yellow)
|
||||
expect(await getPixelColor(bufferVertical, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
// bottom-left should now be top-left (red)
|
||||
expect(await getPixelColor(bufferVertical, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
// bottom-right should now be top-right (blue)
|
||||
expect(await getPixelColor(bufferVertical, 990, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('applyEdits (multiple sequential edits)', () => {
|
||||
it('should apply horizontal mirror then vertical mirror (equivalent to 180° rotation)', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply rotate 90° then horizontal mirror', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply 180° rotation', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 180 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply 270° rotations', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 270 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
});
|
||||
|
||||
it('should apply crop then rotate 90°', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Crop, parameters: { x: 0, y: 0, width: 1000, height: 500 } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(500);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply rotate 90° then crop', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 1000 } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(500);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply vertical mirror then horizontal mirror then rotate 90°', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
});
|
||||
|
||||
it('should apply crop to single quadrant then mirror', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 500 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(500);
|
||||
expect(metadata.height).toBe(500);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 490, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 10, 490)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 490, 490)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply all operations: crop, rotate, mirror', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 1000 } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(500);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkFaceVisibility', () => {
|
||||
const baseFace: AssetFace = {
|
||||
id: 'face-1',
|
||||
assetId: 'asset-1',
|
||||
personId: 'person-1',
|
||||
boundingBoxX1: 100,
|
||||
boundingBoxY1: 100,
|
||||
boundingBoxX2: 200,
|
||||
boundingBoxY2: 200,
|
||||
imageWidth: 1000,
|
||||
imageHeight: 800,
|
||||
sourceType: SourceType.MachineLearning,
|
||||
isVisible: true,
|
||||
updatedAt: new Date(),
|
||||
deletedAt: null,
|
||||
updateId: '',
|
||||
};
|
||||
|
||||
const assetDimensions = { width: 1000, height: 800 };
|
||||
|
||||
describe('with no crop edit', () => {
|
||||
it('should return only currently invisible faces when no crop is provided', () => {
|
||||
const visibleFace = { ...baseFace, id: 'face-visible', isVisible: true };
|
||||
const invisibleFace = { ...baseFace, id: 'face-invisible', isVisible: false };
|
||||
const faces = [visibleFace, invisibleFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([invisibleFace]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty arrays when all faces are already visible and no crop is provided', () => {
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return all faces when all are invisible and no crop is provided', () => {
|
||||
const face1 = { ...baseFace, id: 'face-1', isVisible: false };
|
||||
const face2 = { ...baseFace, id: 'face-2', isVisible: false };
|
||||
const faces = [face1, face2];
|
||||
const result = checkFaceVisibility(faces, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([face1, face2]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with crop edit', () => {
|
||||
it('should mark face as visible when fully inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 500, y2: 400 };
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual(faces);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should mark face as visible when more than 50% inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 150, y1: 150, x2: 650, y2: 550 };
|
||||
// Face at (100,100)-(200,200), crop starts at (150,150)
|
||||
// Overlap: (150,150)-(200,200) = 50x50 = 2500
|
||||
// Face area: 100x100 = 10000
|
||||
// Overlap percentage: 25% - should be hidden
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(faces);
|
||||
});
|
||||
|
||||
it('should mark face as hidden when less than 50% inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 250, y1: 250, x2: 750, y2: 650 };
|
||||
// Face completely outside crop area
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(faces);
|
||||
});
|
||||
|
||||
it('should mark face as hidden when completely outside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 500, y1: 500, x2: 700, y2: 700 };
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(faces);
|
||||
});
|
||||
|
||||
it('should handle multiple faces with mixed visibility', () => {
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 300, y2: 300 };
|
||||
const faceInside: AssetFace = {
|
||||
...baseFace,
|
||||
id: 'face-inside',
|
||||
boundingBoxX1: 50,
|
||||
boundingBoxY1: 50,
|
||||
boundingBoxX2: 150,
|
||||
boundingBoxY2: 150,
|
||||
};
|
||||
const faceOutside: AssetFace = {
|
||||
...baseFace,
|
||||
id: 'face-outside',
|
||||
boundingBoxX1: 400,
|
||||
boundingBoxY1: 400,
|
||||
boundingBoxX2: 500,
|
||||
boundingBoxY2: 500,
|
||||
};
|
||||
const faces = [faceInside, faceOutside];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([faceInside]);
|
||||
expect(result.hidden).toEqual([faceOutside]);
|
||||
});
|
||||
|
||||
it('should handle face at exactly 50% overlap threshold', () => {
|
||||
// Face at (0,0)-(100,100), crop at (50,0)-(150,100)
|
||||
// Overlap: (50,0)-(100,100) = 50x100 = 5000
|
||||
// Face area: 100x100 = 10000
|
||||
// Overlap percentage: 50% - exactly at threshold, should be visible
|
||||
const faceAtEdge: AssetFace = {
|
||||
...baseFace,
|
||||
id: 'face-edge',
|
||||
boundingBoxX1: 0,
|
||||
boundingBoxY1: 0,
|
||||
boundingBoxX2: 100,
|
||||
boundingBoxY2: 100,
|
||||
};
|
||||
const crop: BoundingBox = { x1: 50, y1: 0, x2: 150, y2: 100 };
|
||||
const faces = [faceAtEdge];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([faceAtEdge]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with scaled dimensions', () => {
|
||||
it('should handle faces when asset dimensions differ from face image dimensions', () => {
|
||||
// Face stored at 1000x800 resolution, but displaying at 500x400
|
||||
const scaledDimensions = { width: 500, height: 400 };
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 250, y2: 200 };
|
||||
// Face at (100,100)-(200,200) on 1000x800
|
||||
// Scaled to 500x400: (50,50)-(100,100)
|
||||
// Crop at (0,0)-(250,200) - face is fully inside
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, scaledDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual(faces);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkOcrVisibility', () => {
|
||||
const baseOcr: AssetOcrResponseDto & { isVisible: boolean } = {
|
||||
id: 'ocr-1',
|
||||
assetId: 'asset-1',
|
||||
x1: 0.1,
|
||||
y1: 0.1,
|
||||
x2: 0.2,
|
||||
y2: 0.1,
|
||||
x3: 0.2,
|
||||
y3: 0.2,
|
||||
x4: 0.1,
|
||||
y4: 0.2,
|
||||
boxScore: 0.9,
|
||||
textScore: 0.85,
|
||||
text: 'Test OCR',
|
||||
isVisible: false,
|
||||
};
|
||||
|
||||
const assetDimensions = { width: 1000, height: 800 };
|
||||
|
||||
describe('with no crop edit', () => {
|
||||
it('should return only currently invisible OCR items when no crop is provided', () => {
|
||||
const visibleOcr = { ...baseOcr, id: 'ocr-visible', isVisible: true };
|
||||
const invisibleOcr = { ...baseOcr, id: 'ocr-invisible', isVisible: false };
|
||||
const ocrs = [visibleOcr, invisibleOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([invisibleOcr]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty arrays when all OCR items are already visible and no crop is provided', () => {
|
||||
const visibleOcr = { ...baseOcr, isVisible: true };
|
||||
const ocrs = [visibleOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return all OCR items when all are invisible and no crop is provided', () => {
|
||||
const ocr1 = { ...baseOcr, id: 'ocr-1', isVisible: false };
|
||||
const ocr2 = { ...baseOcr, id: 'ocr-2', isVisible: false };
|
||||
const ocrs = [ocr1, ocr2];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([ocr1, ocr2]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with crop edit', () => {
|
||||
it('should mark OCR as visible when fully inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 500, y2: 400 };
|
||||
// OCR box: (0.1,0.1)-(0.2,0.2) on 1000x800 = (100,80)-(200,160)
|
||||
// Crop: (0,0)-(500,400) - OCR fully inside
|
||||
const ocrs = [baseOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual(ocrs);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should mark OCR as hidden when completely outside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 500, y1: 500, x2: 700, y2: 700 };
|
||||
// OCR box: (100,80)-(200,160) - completely outside crop
|
||||
const ocrs = [baseOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(ocrs);
|
||||
});
|
||||
|
||||
it('should mark OCR as hidden when less than 50% inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 150, y1: 120, x2: 650, y2: 520 };
|
||||
// OCR box: (100,80)-(200,160)
|
||||
// Crop: (150,120)-(650,520)
|
||||
// Overlap: (150,120)-(200,160) = 50x40 = 2000
|
||||
// OCR area: 100x80 = 8000
|
||||
// Overlap percentage: 25% - should be hidden
|
||||
const ocrs = [baseOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(ocrs);
|
||||
});
|
||||
|
||||
it('should handle multiple OCR items with mixed visibility', () => {
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 300, y2: 300 };
|
||||
const ocrInside = {
|
||||
...baseOcr,
|
||||
id: 'ocr-inside',
|
||||
};
|
||||
const ocrOutside = {
|
||||
...baseOcr,
|
||||
id: 'ocr-outside',
|
||||
x1: 0.5,
|
||||
y1: 0.5,
|
||||
x2: 0.6,
|
||||
y2: 0.5,
|
||||
x3: 0.6,
|
||||
y3: 0.6,
|
||||
x4: 0.5,
|
||||
y4: 0.6,
|
||||
};
|
||||
const ocrs = [ocrInside, ocrOutside];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([ocrInside]);
|
||||
expect(result.hidden).toEqual([ocrOutside]);
|
||||
});
|
||||
|
||||
it('should handle OCR boxes with rotated/skewed polygons', () => {
|
||||
// OCR with a rotated bounding box (not axis-aligned)
|
||||
const rotatedOcr = {
|
||||
...baseOcr,
|
||||
id: 'ocr-rotated',
|
||||
x1: 0.15,
|
||||
y1: 0.1,
|
||||
x2: 0.25,
|
||||
y2: 0.15,
|
||||
x3: 0.2,
|
||||
y3: 0.25,
|
||||
x4: 0.1,
|
||||
y4: 0.2,
|
||||
};
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 300, y2: 300 };
|
||||
const ocrs = [rotatedOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([rotatedOcr]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('visibility is only affected by crop (not rotate or mirror)', () => {
|
||||
it('should keep all OCR items visible when there is no crop regardless of other transforms', () => {
|
||||
// Rotate and mirror edits don't affect visibility - only crop does
|
||||
// The visibility functions only take an optional crop parameter
|
||||
const ocrs = [baseOcr];
|
||||
|
||||
// Without any crop, all OCR items remain visible
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual(ocrs);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should only consider crop for visibility calculation', () => {
|
||||
// Even if the image will be rotated/mirrored, visibility is determined
|
||||
// solely by whether the OCR box overlaps with the crop area
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 300, y2: 300 };
|
||||
|
||||
const ocrInsideCrop = {
|
||||
...baseOcr,
|
||||
id: 'ocr-inside',
|
||||
// OCR at (0.1,0.1)-(0.2,0.2) = (100,80)-(200,160) on 1000x800, inside crop
|
||||
};
|
||||
|
||||
const ocrOutsideCrop = {
|
||||
...baseOcr,
|
||||
id: 'ocr-outside',
|
||||
x1: 0.5,
|
||||
y1: 0.5,
|
||||
x2: 0.6,
|
||||
y2: 0.5,
|
||||
x3: 0.6,
|
||||
y3: 0.6,
|
||||
x4: 0.5,
|
||||
y4: 0.6,
|
||||
// OCR at (500,400)-(600,480) on 1000x800, outside crop
|
||||
};
|
||||
|
||||
const ocrs = [ocrInsideCrop, ocrOutsideCrop];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
// OCR inside crop area is visible, OCR outside is hidden
|
||||
// This is true regardless of any subsequent rotate/mirror operations
|
||||
expect(result.visible).toEqual([ocrInsideCrop]);
|
||||
expect(result.hidden).toEqual([ocrOutsideCrop]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,6 +7,7 @@ import { Writable } from 'node:stream';
|
||||
import sharp from 'sharp';
|
||||
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
|
||||
import { Exif } from 'src/database';
|
||||
import { AssetEditActionItem } from 'src/dtos/editing.dto';
|
||||
import { Colorspace, LogLevel, RawExtractedFormat } from 'src/enum';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import {
|
||||
@@ -19,6 +20,7 @@ import {
|
||||
VideoInfo,
|
||||
} from 'src/types';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
import { createAffineMatrix } from 'src/utils/transform';
|
||||
|
||||
const probe = (input: string, options: string[]): Promise<FfprobeData> =>
|
||||
new Promise((resolve, reject) =>
|
||||
@@ -138,21 +140,48 @@ export class MediaRepository {
|
||||
}
|
||||
}
|
||||
|
||||
decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
|
||||
return this.getImageDecodingPipeline(input, options).raw().toBuffer({ resolveWithObject: true });
|
||||
async decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
|
||||
const pipeline = await this.getImageDecodingPipeline(input, options);
|
||||
return pipeline.raw().toBuffer({ resolveWithObject: true });
|
||||
}
|
||||
|
||||
private async applyEdits(pipeline: sharp.Sharp, edits: AssetEditActionItem[]): Promise<sharp.Sharp> {
|
||||
const affineEditOperations = edits.filter((edit) => edit.action !== 'crop');
|
||||
const matrix = createAffineMatrix(affineEditOperations);
|
||||
|
||||
const crop = edits.find((edit) => edit.action === 'crop');
|
||||
const dimensions = await pipeline.metadata();
|
||||
|
||||
if (crop) {
|
||||
pipeline = pipeline.extract({
|
||||
left: crop ? Math.round(crop.parameters.x) : 0,
|
||||
top: crop ? Math.round(crop.parameters.y) : 0,
|
||||
width: crop ? Math.round(crop.parameters.width) : dimensions.width || 0,
|
||||
height: crop ? Math.round(crop.parameters.height) : dimensions.height || 0,
|
||||
});
|
||||
}
|
||||
|
||||
const { a, b, c, d } = matrix;
|
||||
pipeline = pipeline.affine([
|
||||
[a, b],
|
||||
[c, d],
|
||||
]);
|
||||
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
async generateThumbnail(input: string | Buffer, options: GenerateThumbnailOptions, output: string): Promise<void> {
|
||||
await this.getImageDecodingPipeline(input, options)
|
||||
.toFormat(options.format, {
|
||||
quality: options.quality,
|
||||
// this is default in libvips (except the threshold is 90), but we need to set it manually in sharp
|
||||
chromaSubsampling: options.quality >= 80 ? '4:4:4' : '4:2:0',
|
||||
})
|
||||
.toFile(output);
|
||||
const pipeline = await this.getImageDecodingPipeline(input, options);
|
||||
const decoded = pipeline.toFormat(options.format, {
|
||||
quality: options.quality,
|
||||
// this is default in libvips (except the threshold is 90), but we need to set it manually in sharp
|
||||
chromaSubsampling: options.quality >= 80 ? '4:4:4' : '4:2:0',
|
||||
});
|
||||
|
||||
await decoded.toFile(output);
|
||||
}
|
||||
|
||||
private getImageDecodingPipeline(input: string | Buffer, options: DecodeToBufferOptions) {
|
||||
private async getImageDecodingPipeline(input: string | Buffer, options: DecodeToBufferOptions) {
|
||||
let pipeline = sharp(input, {
|
||||
// some invalid images can still be processed by sharp, but we want to fail on them by default to avoid crashes
|
||||
failOn: options.processInvalidImages ? 'none' : 'error',
|
||||
@@ -175,8 +204,8 @@ export class MediaRepository {
|
||||
}
|
||||
}
|
||||
|
||||
if (options.crop) {
|
||||
pipeline = pipeline.extract(options.crop);
|
||||
if (options.edits && options.edits.length > 0) {
|
||||
pipeline = await this.applyEdits(pipeline, options.edits);
|
||||
}
|
||||
|
||||
if (options.size !== undefined) {
|
||||
@@ -186,14 +215,20 @@ export class MediaRepository {
|
||||
}
|
||||
|
||||
async generateThumbhash(input: string | Buffer, options: GenerateThumbhashOptions): Promise<Buffer> {
|
||||
const [{ rgbaToThumbHash }, { data, info }] = await Promise.all([
|
||||
const [{ rgbaToThumbHash }, decodingPipeline] = await Promise.all([
|
||||
import('thumbhash'),
|
||||
sharp(input, options)
|
||||
.resize(100, 100, { fit: 'inside', withoutEnlargement: true })
|
||||
.raw()
|
||||
.ensureAlpha()
|
||||
.toBuffer({ resolveWithObject: true }),
|
||||
this.getImageDecodingPipeline(input, {
|
||||
colorspace: options.colorspace,
|
||||
processInvalidImages: options.processInvalidImages,
|
||||
raw: options.raw,
|
||||
edits: options.edits,
|
||||
}),
|
||||
]);
|
||||
|
||||
const pipeline = decodingPipeline.resize(100, 100, { fit: 'inside', withoutEnlargement: true }).raw().ensureAlpha();
|
||||
|
||||
const { data, info } = await pipeline.toBuffer({ resolveWithObject: true });
|
||||
|
||||
return Buffer.from(rgbaToThumbHash(info.width, info.height, data));
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
|
||||
import { Insertable, Kysely, sql } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetOcrTable } from 'src/schema/tables/asset-ocr.table';
|
||||
|
||||
@@ -15,8 +16,15 @@ export class OcrRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getByAssetId(id: string) {
|
||||
return this.db.selectFrom('asset_ocr').selectAll('asset_ocr').where('asset_ocr.assetId', '=', id).execute();
|
||||
getByAssetId(id: string, options?: { isVisible?: boolean }) {
|
||||
const isVisible = options === undefined ? true : options.isVisible;
|
||||
|
||||
return this.db
|
||||
.selectFrom('asset_ocr')
|
||||
.selectAll('asset_ocr')
|
||||
.where('asset_ocr.assetId', '=', id)
|
||||
.$if(isVisible !== undefined, (qb) => qb.where('asset_ocr.isVisible', '=', isVisible!))
|
||||
.execute();
|
||||
}
|
||||
|
||||
deleteAll() {
|
||||
@@ -65,4 +73,40 @@ export class OcrRepository {
|
||||
|
||||
return query.selectNoFrom(sql`1`.as('dummy')).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, [], []] })
|
||||
async updateOcrVisibilities(
|
||||
assetId: string,
|
||||
visible: AssetOcrResponseDto[],
|
||||
hidden: AssetOcrResponseDto[],
|
||||
): Promise<void> {
|
||||
await this.db.transaction().execute(async (trx) => {
|
||||
if (visible.length > 0) {
|
||||
await trx
|
||||
.updateTable('asset_ocr')
|
||||
.set({ isVisible: true })
|
||||
.where(
|
||||
'asset_ocr.id',
|
||||
'in',
|
||||
visible.map((i) => i.id),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
if (hidden.length > 0) {
|
||||
await trx
|
||||
.updateTable('asset_ocr')
|
||||
.set({ isVisible: false })
|
||||
.where(
|
||||
'asset_ocr.id',
|
||||
'in',
|
||||
hidden.map((i) => i.id),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
const searchText = visible.map((item) => item.text.trim()).join(' ');
|
||||
await trx.updateTable('ocr_search').set({ text: searchText }).where('assetId', '=', assetId).execute();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
|
||||
import { ExpressionBuilder, Insertable, Kysely, NotNull, Selectable, sql, Updateable } from 'kysely';
|
||||
import { jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { AssetFace } from 'src/database';
|
||||
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetFileType, AssetVisibility, SourceType } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
@@ -121,6 +122,7 @@ export class PersonRepository {
|
||||
.$if(!!options.sourceType, (qb) => qb.where('asset_face.sourceType', '=', options.sourceType!))
|
||||
.$if(!!options.assetId, (qb) => qb.where('asset_face.assetId', '=', options.assetId!))
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -160,6 +162,7 @@ export class PersonRepository {
|
||||
)
|
||||
.where('person.ownerId', '=', userId)
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.orderBy('person.isHidden', 'asc')
|
||||
.orderBy('person.isFavorite', 'desc')
|
||||
.having((eb) =>
|
||||
@@ -208,19 +211,23 @@ export class PersonRepository {
|
||||
.selectAll('person')
|
||||
.leftJoin('asset_face', 'asset_face.personId', 'person.id')
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.having((eb) => eb.fn.count('asset_face.assetId'), '=', 0)
|
||||
.groupBy('person.id')
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getFaces(assetId: string) {
|
||||
getFaces(assetId: string, options?: { isVisible?: boolean }) {
|
||||
const isVisible = options === undefined ? true : options.isVisible;
|
||||
|
||||
return this.db
|
||||
.selectFrom('asset_face')
|
||||
.selectAll('asset_face')
|
||||
.select(withPerson)
|
||||
.where('asset_face.assetId', '=', assetId)
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.$if(isVisible !== undefined, (qb) => qb.where('asset_face.isVisible', '=', isVisible!))
|
||||
.orderBy('asset_face.boundingBoxX1', 'asc')
|
||||
.execute();
|
||||
}
|
||||
@@ -350,6 +357,7 @@ export class PersonRepository {
|
||||
)
|
||||
.select((eb) => eb.fn.count(eb.fn('distinct', ['asset.id'])).as('count'))
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.executeTakeFirst();
|
||||
|
||||
return {
|
||||
@@ -368,6 +376,7 @@ export class PersonRepository {
|
||||
.selectFrom('asset_face')
|
||||
.whereRef('asset_face.personId', '=', 'person.id')
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', '=', true)
|
||||
.where((eb) =>
|
||||
eb.exists((eb) =>
|
||||
eb
|
||||
@@ -495,6 +504,7 @@ export class PersonRepository {
|
||||
.selectAll('asset_face')
|
||||
.where('asset_face.personId', '=', personId)
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -539,4 +549,37 @@ export class PersonRepository {
|
||||
}
|
||||
return this.db.selectFrom('person').select(['id', 'thumbnailPath']).where('id', 'in', ids).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[], []] })
|
||||
async updateVisibility(visible: AssetFace[], hidden: AssetFace[]): Promise<void> {
|
||||
if (visible.length === 0 && hidden.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.db.transaction().execute(async (trx) => {
|
||||
if (visible.length > 0) {
|
||||
await trx
|
||||
.updateTable('asset_face')
|
||||
.set({ isVisible: true })
|
||||
.where(
|
||||
'asset_face.id',
|
||||
'in',
|
||||
visible.map(({ id }) => id),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
if (hidden.length > 0) {
|
||||
await trx
|
||||
.updateTable('asset_face')
|
||||
.set({ isVisible: false })
|
||||
.where(
|
||||
'asset_face.id',
|
||||
'in',
|
||||
hidden.map(({ id }) => id),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
85
server/src/repositories/process.repository.spec.ts
Normal file
85
server/src/repositories/process.repository.spec.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
|
||||
function* data() {
|
||||
yield 'Hello, world!';
|
||||
}
|
||||
|
||||
describe(ProcessRepository.name, () => {
|
||||
let sut: ProcessRepository;
|
||||
let sink: Writable;
|
||||
|
||||
beforeAll(() => {
|
||||
sut = new ProcessRepository();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
sink = new Writable({
|
||||
write(_chunk, _encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('createSpawnDuplexStream', () => {
|
||||
it('should work (drain to stdout)', async () => {
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
await pipeline(process, sink);
|
||||
});
|
||||
|
||||
it('should throw on non-zero exit code', async () => {
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'echo "error message" >&2; exit 1']);
|
||||
await expect(pipeline(process, sink)).rejects.toThrowErrorMatchingInlineSnapshot(`
|
||||
[Error: bash non-zero exit code (1)
|
||||
error message
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should accept stdin / output stdout', async () => {
|
||||
let output = '';
|
||||
const sink = new Writable({
|
||||
write(chunk, _encoding, callback) {
|
||||
output += chunk;
|
||||
callback();
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
callback();
|
||||
},
|
||||
});
|
||||
|
||||
const echoProcess = sut.spawnDuplexStream('cat');
|
||||
await pipeline(Readable.from(data()), echoProcess, sink);
|
||||
expect(output).toBe('Hello, world!');
|
||||
});
|
||||
|
||||
it('should drain stdin on process exit', async () => {
|
||||
let resolve1: () => void;
|
||||
let resolve2: () => void;
|
||||
const promise1 = new Promise<void>((r) => (resolve1 = r));
|
||||
const promise2 = new Promise<void>((r) => (resolve2 = r));
|
||||
|
||||
async function* data() {
|
||||
yield 'Hello, world!';
|
||||
await promise1;
|
||||
await promise2;
|
||||
yield 'Write after stdin close / process exit!';
|
||||
}
|
||||
|
||||
const process = sut.spawnDuplexStream('bash', ['-c', 'exit 0']);
|
||||
|
||||
const realProcess = (process as never as { _process: ChildProcessWithoutNullStreams })._process;
|
||||
realProcess.on('close', () => setImmediate(() => resolve1()));
|
||||
realProcess.stdin.on('close', () => setImmediate(() => resolve2()));
|
||||
|
||||
await pipeline(Readable.from(data()), process);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,110 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { ChildProcessWithoutNullStreams, fork, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { Duplex } from 'node:stream';
|
||||
|
||||
@Injectable()
|
||||
export class ProcessRepository {
|
||||
spawn(command: string, args: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
return spawn(command, args, options);
|
||||
}
|
||||
|
||||
spawnDuplexStream(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): Duplex {
|
||||
let stdinClosed = false;
|
||||
let drainCallback: undefined | (() => void);
|
||||
|
||||
const process = this.spawn(command, args, options);
|
||||
const duplex = new Duplex({
|
||||
// duplex -> stdin
|
||||
write(chunk, encoding, callback) {
|
||||
// drain the input if process dies
|
||||
if (stdinClosed) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
// handle stream backpressure
|
||||
if (process.stdin.write(chunk, encoding)) {
|
||||
callback();
|
||||
} else {
|
||||
drainCallback = callback;
|
||||
process.stdin.once('drain', () => {
|
||||
drainCallback = undefined;
|
||||
callback();
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
read() {
|
||||
// no-op
|
||||
},
|
||||
|
||||
final(callback) {
|
||||
if (stdinClosed) {
|
||||
callback();
|
||||
} else {
|
||||
process.stdin.end(callback);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// stdout -> duplex
|
||||
process.stdout.on('data', (chunk) => {
|
||||
// handle stream backpressure
|
||||
if (!duplex.push(chunk)) {
|
||||
process.stdout.pause();
|
||||
}
|
||||
});
|
||||
|
||||
duplex.on('resume', () => process.stdout.resume());
|
||||
|
||||
// end handling
|
||||
let stdoutClosed = false;
|
||||
function close(error?: Error) {
|
||||
stdinClosed = true;
|
||||
|
||||
if (error) {
|
||||
duplex.destroy(error);
|
||||
} else if (stdoutClosed && typeof process.exitCode === 'number') {
|
||||
duplex.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.on('close', () => {
|
||||
stdoutClosed = true;
|
||||
close();
|
||||
});
|
||||
|
||||
// error handling
|
||||
process.on('error', close);
|
||||
process.stdout.on('error', close);
|
||||
process.stdin.on('error', (error) => {
|
||||
if ((error as { code?: 'EPIPE' })?.code === 'EPIPE') {
|
||||
try {
|
||||
drainCallback!();
|
||||
} catch (error) {
|
||||
close(error as Error);
|
||||
}
|
||||
} else {
|
||||
close(error);
|
||||
}
|
||||
});
|
||||
|
||||
let stderr = '';
|
||||
process.stderr.on('data', (chunk) => (stderr += chunk));
|
||||
|
||||
process.on('exit', (code) => {
|
||||
console.info(`${command} exited (${code})`);
|
||||
|
||||
if (code === 0) {
|
||||
close();
|
||||
} else {
|
||||
close(new Error(`${command} non-zero exit code (${code})\n${stderr}`));
|
||||
}
|
||||
});
|
||||
|
||||
return Object.assign(duplex, { _process: process });
|
||||
}
|
||||
|
||||
fork(...args: Parameters<typeof fork>): ReturnType<typeof fork> {
|
||||
return fork(...args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import { SharedLinkTable } from 'src/schema/tables/shared-link.table';
|
||||
|
||||
export type SharedLinkSearchOptions = {
|
||||
userId: string;
|
||||
id?: string;
|
||||
albumId?: string;
|
||||
};
|
||||
|
||||
@@ -118,7 +119,7 @@ export class SharedLinkRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
|
||||
getAll({ userId, albumId }: SharedLinkSearchOptions) {
|
||||
getAll({ userId, id, albumId }: SharedLinkSearchOptions) {
|
||||
return this.db
|
||||
.selectFrom('shared_link')
|
||||
.selectAll('shared_link')
|
||||
@@ -176,6 +177,7 @@ export class SharedLinkRepository {
|
||||
.select((eb) => eb.fn.toJson('album').$castTo<Album | null>().as('album'))
|
||||
.where((eb) => eb.or([eb('shared_link.type', '=', SharedLinkType.Individual), eb('album.id', 'is not', null)]))
|
||||
.$if(!!albumId, (eb) => eb.where('shared_link.albumId', '=', albumId!))
|
||||
.$if(!!id, (eb) => eb.where('shared_link.id', '=', id!))
|
||||
.orderBy('shared_link.createdAt', 'desc')
|
||||
.distinctOn(['shared_link.createdAt'])
|
||||
.execute();
|
||||
|
||||
@@ -5,7 +5,8 @@ import { escapePath, glob, globStream } from 'fast-glob';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync, ReadOptionsWithBuffer } from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { PassThrough, Readable, Writable } from 'node:stream';
|
||||
import { createGunzip, createGzip } from 'node:zlib';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
@@ -93,6 +94,18 @@ export class StorageRepository {
|
||||
return { stream: archive, addFile, finalize };
|
||||
}
|
||||
|
||||
createGzip(): PassThrough {
|
||||
return createGzip();
|
||||
}
|
||||
|
||||
createGunzip(): PassThrough {
|
||||
return createGunzip();
|
||||
}
|
||||
|
||||
createPlainReadStream(filepath: string): Readable {
|
||||
return createReadStream(filepath);
|
||||
}
|
||||
|
||||
async createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream> {
|
||||
const { size } = await fs.stat(filepath);
|
||||
await fs.access(filepath, constants.R_OK);
|
||||
|
||||
@@ -483,6 +483,7 @@ class AssetFaceSync extends BaseSync {
|
||||
])
|
||||
.leftJoin('asset', 'asset.id', 'asset_face.assetId')
|
||||
.where('asset.ownerId', '=', options.userId)
|
||||
.where('asset_face.isVisible', '=', true)
|
||||
.stream();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,6 +37,7 @@ export interface ClientEventMap {
|
||||
|
||||
AssetUploadReadyV1: [{ asset: SyncAssetV1; exif: SyncAssetExifV1 }];
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
AssetEditReadyV1: [{ asset: SyncAssetV1 }];
|
||||
}
|
||||
|
||||
export type AuthFn = (client: Socket) => Promise<AuthDto>;
|
||||
|
||||
@@ -255,3 +255,34 @@ export const asset_face_audit = registerFunction({
|
||||
RETURN NULL;
|
||||
END`,
|
||||
});
|
||||
|
||||
export const asset_edit_insert = registerFunction({
|
||||
name: 'asset_edit_insert',
|
||||
returnType: 'TRIGGER',
|
||||
language: 'PLPGSQL',
|
||||
body: `
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "isEdited" = true
|
||||
FROM inserted_edit
|
||||
WHERE asset.id = inserted_edit."assetId" AND NOT asset."isEdited";
|
||||
RETURN NULL;
|
||||
END
|
||||
`,
|
||||
});
|
||||
|
||||
export const asset_edit_delete = registerFunction({
|
||||
name: 'asset_edit_delete',
|
||||
returnType: 'TRIGGER',
|
||||
language: 'PLPGSQL',
|
||||
body: `
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "isEdited" = false
|
||||
FROM deleted_edit
|
||||
WHERE asset.id = deleted_edit."assetId" AND asset."isEdited"
|
||||
AND NOT EXISTS (SELECT FROM asset_edit edit WHERE edit."assetId" = asset.id);
|
||||
RETURN NULL;
|
||||
END
|
||||
`,
|
||||
});
|
||||
|
||||
@@ -28,6 +28,7 @@ import { AlbumUserTable } from 'src/schema/tables/album-user.table';
|
||||
import { AlbumTable } from 'src/schema/tables/album.table';
|
||||
import { ApiKeyTable } from 'src/schema/tables/api-key.table';
|
||||
import { AssetAuditTable } from 'src/schema/tables/asset-audit.table';
|
||||
import { AssetEditTable } from 'src/schema/tables/asset-edit.table';
|
||||
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
|
||||
import { AssetFaceAuditTable } from 'src/schema/tables/asset-face-audit.table';
|
||||
import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
|
||||
@@ -86,6 +87,7 @@ export class ImmichDatabase {
|
||||
AlbumTable,
|
||||
ApiKeyTable,
|
||||
AssetAuditTable,
|
||||
AssetEditTable,
|
||||
AssetFaceTable,
|
||||
AssetFaceAuditTable,
|
||||
AssetMetadataTable,
|
||||
@@ -179,6 +181,7 @@ export interface DB {
|
||||
|
||||
asset: AssetTable;
|
||||
asset_audit: AssetAuditTable;
|
||||
asset_edit: AssetEditTable;
|
||||
asset_exif: AssetExifTable;
|
||||
asset_face: AssetFaceTable;
|
||||
asset_face_audit: AssetFaceAuditTable;
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset" ADD COLUMN "width" integer;`.execute(db);
|
||||
await sql`ALTER TABLE "asset" ADD COLUMN "height" integer;`.execute(db);
|
||||
|
||||
// Populate width and height from exif data with orientation-aware swapping
|
||||
await sql`
|
||||
UPDATE "asset"
|
||||
SET
|
||||
"width" = CASE
|
||||
WHEN "asset_exif"."orientation" IN ('5', '6', '7', '8', '-90', '90') THEN "asset_exif"."exifImageHeight"
|
||||
ELSE "asset_exif"."exifImageWidth"
|
||||
END,
|
||||
"height" = CASE
|
||||
WHEN "asset_exif"."orientation" IN ('5', '6', '7', '8', '-90', '90') THEN "asset_exif"."exifImageWidth"
|
||||
ELSE "asset_exif"."exifImageHeight"
|
||||
END
|
||||
FROM "asset_exif"
|
||||
WHERE "asset"."id" = "asset_exif"."assetId"
|
||||
AND ("asset_exif"."exifImageWidth" IS NOT NULL OR "asset_exif"."exifImageHeight" IS NOT NULL)
|
||||
`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "width";`.execute(db);
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "height";`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`
|
||||
CREATE TABLE "asset_edit" (
|
||||
"id" uuid NOT NULL DEFAULT uuid_generate_v4(),
|
||||
"assetId" uuid NOT NULL,
|
||||
"action" varchar NOT NULL,
|
||||
"parameters" jsonb NOT NULL
|
||||
);
|
||||
`.execute(db);
|
||||
|
||||
await sql`ALTER TABLE "asset_edit" ADD CONSTRAINT "asset_edit_pkey" PRIMARY KEY ("id");`.execute(db);
|
||||
await sql`ALTER TABLE "asset_edit" ADD CONSTRAINT "asset_edit_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE CASCADE ON DELETE CASCADE;`.execute(
|
||||
db,
|
||||
);
|
||||
await sql`CREATE INDEX "asset_edit_assetId_idx" ON "asset_edit" ("assetId")`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP TABLE IF EXISTS "asset_edit";`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset_ocr" ADD COLUMN "isVisible" boolean NOT NULL DEFAULT TRUE`.execute(db);
|
||||
await sql`ALTER TABLE "asset_face" ADD COLUMN "isVisible" boolean NOT NULL DEFAULT TRUE`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TABLE "asset_ocr" DROP COLUMN "isVisible";`.execute(db);
|
||||
await sql`ALTER TABLE "asset_face" DROP COLUMN "isVisible";`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE OR REPLACE FUNCTION asset_edit_insert()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "editCount" = "editCount" + 1
|
||||
WHERE "id" = NEW."assetId";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION asset_edit_delete()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "editCount" = "editCount" - 1
|
||||
WHERE "id" = OLD."assetId";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`ALTER TABLE "asset" ADD "editCount" integer NOT NULL DEFAULT 0;`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_delete"
|
||||
AFTER DELETE ON "asset_edit"
|
||||
REFERENCING OLD TABLE AS "old"
|
||||
FOR EACH ROW
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION asset_edit_delete();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_insert"
|
||||
AFTER INSERT ON "asset_edit"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION asset_edit_insert();`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_asset_edit_insert', '{"type":"function","name":"asset_edit_insert","sql":"CREATE OR REPLACE FUNCTION asset_edit_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"editCount\\" = \\"editCount\\" + 1\\n WHERE \\"id\\" = NEW.\\"assetId\\";\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('function_asset_edit_delete', '{"type":"function","name":"asset_edit_delete","sql":"CREATE OR REPLACE FUNCTION asset_edit_delete()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"editCount\\" = \\"editCount\\" - 1\\n WHERE \\"id\\" = OLD.\\"assetId\\";\\n RETURN NULL;\\n END\\n $$;"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_edit_delete', '{"type":"trigger","name":"asset_edit_delete","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_delete\\"\\n AFTER DELETE ON \\"asset_edit\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH ROW\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION asset_edit_delete();"}'::jsonb);`.execute(db);
|
||||
await sql`INSERT INTO "migration_overrides" ("name", "value") VALUES ('trigger_asset_edit_insert', '{"type":"trigger","name":"asset_edit_insert","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_insert\\"\\n AFTER INSERT ON \\"asset_edit\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION asset_edit_insert();"}'::jsonb);`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`DROP TRIGGER "asset_edit_delete" ON "asset_edit";`.execute(db);
|
||||
await sql`DROP TRIGGER "asset_edit_insert" ON "asset_edit";`.execute(db);
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "editCount";`.execute(db);
|
||||
await sql`DROP FUNCTION asset_edit_insert;`.execute(db);
|
||||
await sql`DROP FUNCTION asset_edit_delete;`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'function_asset_edit_insert';`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'function_asset_edit_delete';`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'trigger_asset_edit_delete';`.execute(db);
|
||||
await sql`DELETE FROM "migration_overrides" WHERE "name" = 'trigger_asset_edit_insert';`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE OR REPLACE FUNCTION asset_edit_insert()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "isEdited" = true
|
||||
FROM inserted_edit
|
||||
WHERE asset.id = inserted_edit."assetId" AND NOT asset."isEdited";
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION asset_edit_delete()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "isEdited" = false
|
||||
FROM deleted_edit
|
||||
WHERE asset.id = deleted_edit."assetId" AND asset."isEdited"
|
||||
AND NOT EXISTS (SELECT FROM asset_edit edit WHERE edit."assetId" = asset.id);
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`ALTER TABLE "asset" ADD "isEdited" boolean NOT NULL DEFAULT false;`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_delete"
|
||||
AFTER DELETE ON "asset_edit"
|
||||
REFERENCING OLD TABLE AS "deleted_edit"
|
||||
FOR EACH STATEMENT
|
||||
WHEN (pg_trigger_depth() = 0)
|
||||
EXECUTE FUNCTION asset_edit_delete();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_insert"
|
||||
AFTER INSERT ON "asset_edit"
|
||||
REFERENCING NEW TABLE AS "inserted_edit"
|
||||
FOR EACH STATEMENT
|
||||
EXECUTE FUNCTION asset_edit_insert();`.execute(db);
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "editCount";`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"asset_edit_insert","sql":"CREATE OR REPLACE FUNCTION asset_edit_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"isEdited\\" = true\\n FROM inserted_edit\\n WHERE asset.id = inserted_edit.\\"assetId\\" AND NOT asset.\\"isEdited\\";\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_asset_edit_insert';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"asset_edit_delete","sql":"CREATE OR REPLACE FUNCTION asset_edit_delete()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"isEdited\\" = false\\n FROM deleted_edit\\n WHERE asset.id = deleted_edit.\\"assetId\\" AND asset.\\"isEdited\\" \\n AND NOT EXISTS (SELECT FROM asset_edit edit WHERE edit.\\"assetId\\" = asset.id);\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_asset_edit_delete';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"trigger","name":"asset_edit_delete","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_delete\\"\\n AFTER DELETE ON \\"asset_edit\\"\\n REFERENCING OLD TABLE AS \\"deleted_edit\\"\\n FOR EACH STATEMENT\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION asset_edit_delete();"}'::jsonb WHERE "name" = 'trigger_asset_edit_delete';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"trigger","name":"asset_edit_insert","sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_insert\\"\\n AFTER INSERT ON \\"asset_edit\\"\\n REFERENCING NEW TABLE AS \\"inserted_edit\\"\\n FOR EACH STATEMENT\\n EXECUTE FUNCTION asset_edit_insert();"}'::jsonb WHERE "name" = 'trigger_asset_edit_insert';`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`CREATE OR REPLACE FUNCTION public.asset_edit_insert()
|
||||
RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "editCount" = "editCount" + 1
|
||||
WHERE "id" = NEW."assetId";
|
||||
RETURN NULL;
|
||||
END
|
||||
$function$
|
||||
`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION public.asset_edit_delete()
|
||||
RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
BEGIN
|
||||
UPDATE asset
|
||||
SET "editCount" = "editCount" - 1
|
||||
WHERE "id" = OLD."assetId";
|
||||
RETURN NULL;
|
||||
END
|
||||
$function$
|
||||
`.execute(db);
|
||||
await sql`ALTER TABLE "asset" ADD "editCount" integer NOT NULL DEFAULT 0;`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_delete"
|
||||
AFTER DELETE ON "asset_edit"
|
||||
REFERENCING OLD TABLE AS "old"
|
||||
FOR EACH ROW
|
||||
WHEN ((pg_trigger_depth() = 0))
|
||||
EXECUTE FUNCTION asset_edit_delete();`.execute(db);
|
||||
await sql`CREATE OR REPLACE TRIGGER "asset_edit_insert"
|
||||
AFTER INSERT ON "asset_edit"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION asset_edit_insert();`.execute(db);
|
||||
await sql`ALTER TABLE "asset" DROP COLUMN "isEdited";`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"sql":"CREATE OR REPLACE FUNCTION asset_edit_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"editCount\\" = \\"editCount\\" + 1\\n WHERE \\"id\\" = NEW.\\"assetId\\";\\n RETURN NULL;\\n END\\n $$;","name":"asset_edit_insert","type":"function"}'::jsonb WHERE "name" = 'function_asset_edit_insert';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"sql":"CREATE OR REPLACE FUNCTION asset_edit_delete()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE asset\\n SET \\"editCount\\" = \\"editCount\\" - 1\\n WHERE \\"id\\" = OLD.\\"assetId\\";\\n RETURN NULL;\\n END\\n $$;","name":"asset_edit_delete","type":"function"}'::jsonb WHERE "name" = 'function_asset_edit_delete';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_delete\\"\\n AFTER DELETE ON \\"asset_edit\\"\\n REFERENCING OLD TABLE AS \\"old\\"\\n FOR EACH ROW\\n WHEN (pg_trigger_depth() = 0)\\n EXECUTE FUNCTION asset_edit_delete();","name":"asset_edit_delete","type":"trigger"}'::jsonb WHERE "name" = 'trigger_asset_edit_delete';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"sql":"CREATE OR REPLACE TRIGGER \\"asset_edit_insert\\"\\n AFTER INSERT ON \\"asset_edit\\"\\n FOR EACH ROW\\n EXECUTE FUNCTION asset_edit_insert();","name":"asset_edit_insert","type":"trigger"}'::jsonb WHERE "name" = 'trigger_asset_edit_insert';`.execute(db);
|
||||
}
|
||||
34
server/src/schema/tables/asset-edit.table.ts
Normal file
34
server/src/schema/tables/asset-edit.table.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { AssetEditAction, AssetEditActionParameter } from 'src/dtos/editing.dto';
|
||||
import { asset_edit_delete, asset_edit_insert } from 'src/schema/functions';
|
||||
import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import {
|
||||
AfterDeleteTrigger,
|
||||
AfterInsertTrigger,
|
||||
Column,
|
||||
ForeignKeyColumn,
|
||||
Generated,
|
||||
PrimaryGeneratedColumn,
|
||||
Table,
|
||||
} from 'src/sql-tools';
|
||||
|
||||
@Table('asset_edit')
|
||||
@AfterInsertTrigger({ scope: 'statement', function: asset_edit_insert, referencingNewTableAs: 'inserted_edit' })
|
||||
@AfterDeleteTrigger({
|
||||
scope: 'statement',
|
||||
function: asset_edit_delete,
|
||||
referencingOldTableAs: 'deleted_edit',
|
||||
when: 'pg_trigger_depth() = 0',
|
||||
})
|
||||
export class AssetEditTable<T extends AssetEditAction = AssetEditAction> {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: Generated<string>;
|
||||
|
||||
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false })
|
||||
assetId!: string;
|
||||
|
||||
@Column()
|
||||
action!: T;
|
||||
|
||||
@Column({ type: 'jsonb' })
|
||||
parameters!: AssetEditActionParameter[T];
|
||||
}
|
||||
@@ -78,4 +78,7 @@ export class AssetFaceTable {
|
||||
|
||||
@UpdateIdColumn()
|
||||
updateId!: Generated<string>;
|
||||
|
||||
@Column({ type: 'boolean', default: true })
|
||||
isVisible!: Generated<boolean>;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { PrimaryGeneratedUuidV7Column } from 'src/decorators';
|
||||
import { AssetMetadataKey } from 'src/enum';
|
||||
import { Column, CreateDateColumn, Generated, Table, Timestamp } from 'src/sql-tools';
|
||||
|
||||
@Table('asset_metadata_audit')
|
||||
@@ -11,7 +10,7 @@ export class AssetMetadataAuditTable {
|
||||
assetId!: string;
|
||||
|
||||
@Column({ index: true })
|
||||
key!: AssetMetadataKey;
|
||||
key!: string;
|
||||
|
||||
@CreateDateColumn({ default: () => 'clock_timestamp()', index: true })
|
||||
deletedAt!: Generated<Timestamp>;
|
||||
|
||||
@@ -32,7 +32,7 @@ export class AssetMetadataTable {
|
||||
assetId!: string;
|
||||
|
||||
@PrimaryColumn({ type: 'character varying' })
|
||||
key!: AssetMetadataKey;
|
||||
key!: AssetMetadataKey | string;
|
||||
|
||||
@Column({ type: 'jsonb' })
|
||||
value!: object;
|
||||
|
||||
@@ -42,4 +42,7 @@ export class AssetOcrTable {
|
||||
|
||||
@Column({ type: 'text' })
|
||||
text!: string;
|
||||
|
||||
@Column({ type: 'boolean', default: true })
|
||||
isVisible!: Generated<boolean>;
|
||||
}
|
||||
|
||||
@@ -137,4 +137,13 @@ export class AssetTable {
|
||||
|
||||
@Column({ enum: asset_visibility_enum, default: AssetVisibility.Timeline })
|
||||
visibility!: Generated<AssetVisibility>;
|
||||
|
||||
@Column({ type: 'integer', nullable: true })
|
||||
width!: number | null;
|
||||
|
||||
@Column({ type: 'integer', nullable: true })
|
||||
height!: number | null;
|
||||
|
||||
@Column({ type: 'boolean', default: false })
|
||||
isEdited!: Generated<boolean>;
|
||||
}
|
||||
|
||||
@@ -107,6 +107,78 @@ describe(ApiKeyService.name, () => {
|
||||
permissions: newPermissions,
|
||||
});
|
||||
});
|
||||
|
||||
describe('api key auth', () => {
|
||||
it('should prevent adding Permission.all', async () => {
|
||||
const permissions = [Permission.ApiKeyCreate, Permission.ApiKeyUpdate, Permission.AssetRead];
|
||||
const auth = factory.auth({ apiKey: { permissions } });
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id, permissions });
|
||||
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
|
||||
await expect(sut.update(auth, apiKey.id, { permissions: [Permission.All] })).rejects.toThrow(
|
||||
'Cannot grant permissions you do not have',
|
||||
);
|
||||
|
||||
expect(mocks.apiKey.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should prevent adding a new permission', async () => {
|
||||
const permissions = [Permission.ApiKeyCreate, Permission.ApiKeyUpdate, Permission.AssetRead];
|
||||
const auth = factory.auth({ apiKey: { permissions } });
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id, permissions });
|
||||
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
|
||||
await expect(sut.update(auth, apiKey.id, { permissions: [Permission.AssetCopy] })).rejects.toThrow(
|
||||
'Cannot grant permissions you do not have',
|
||||
);
|
||||
|
||||
expect(mocks.apiKey.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow removing permissions', async () => {
|
||||
const auth = factory.auth({ apiKey: { permissions: [Permission.ApiKeyUpdate, Permission.AssetRead] } });
|
||||
const apiKey = factory.apiKey({
|
||||
userId: auth.user.id,
|
||||
permissions: [Permission.AssetRead, Permission.AssetDelete],
|
||||
});
|
||||
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
mocks.apiKey.update.mockResolvedValue(apiKey);
|
||||
|
||||
// remove Permission.AssetDelete
|
||||
await sut.update(auth, apiKey.id, { permissions: [Permission.AssetRead] });
|
||||
|
||||
expect(mocks.apiKey.update).toHaveBeenCalledWith(
|
||||
auth.user.id,
|
||||
apiKey.id,
|
||||
expect.objectContaining({ permissions: [Permission.AssetRead] }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow adding new permissions', async () => {
|
||||
const auth = factory.auth({
|
||||
apiKey: { permissions: [Permission.ApiKeyUpdate, Permission.AssetRead, Permission.AssetUpdate] },
|
||||
});
|
||||
const apiKey = factory.apiKey({ userId: auth.user.id, permissions: [Permission.AssetRead] });
|
||||
|
||||
mocks.apiKey.getById.mockResolvedValue(apiKey);
|
||||
mocks.apiKey.update.mockResolvedValue(apiKey);
|
||||
|
||||
// add Permission.AssetUpdate
|
||||
await sut.update(auth, apiKey.id, {
|
||||
name: apiKey.name,
|
||||
permissions: [Permission.AssetRead, Permission.AssetUpdate],
|
||||
});
|
||||
|
||||
expect(mocks.apiKey.update).toHaveBeenCalledWith(
|
||||
auth.user.id,
|
||||
apiKey.id,
|
||||
expect.objectContaining({ permissions: [Permission.AssetRead, Permission.AssetUpdate] }),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
|
||||
@@ -32,6 +32,14 @@ export class ApiKeyService extends BaseService {
|
||||
throw new BadRequestException('API Key not found');
|
||||
}
|
||||
|
||||
if (
|
||||
auth.apiKey &&
|
||||
dto.permissions &&
|
||||
!isGranted({ requested: dto.permissions, current: auth.apiKey.permissions })
|
||||
) {
|
||||
throw new BadRequestException('Cannot grant permissions you do not have');
|
||||
}
|
||||
|
||||
const key = await this.apiKeyRepository.update(auth.user.id, id, { name: dto.name, permissions: dto.permissions });
|
||||
|
||||
return this.map(key);
|
||||
|
||||
@@ -489,7 +489,7 @@ describe(AssetMediaService.name, () => {
|
||||
|
||||
describe('downloadOriginal', () => {
|
||||
it('should require the asset.download permission', async () => {
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(mocks.access.asset.checkOwnerAccess).toHaveBeenCalledWith(
|
||||
authStub.admin.user.id,
|
||||
@@ -503,16 +503,16 @@ describe(AssetMediaService.name, () => {
|
||||
it('should throw an error if the asset is not found', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1')).rejects.toBeInstanceOf(NotFoundException);
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', {})).rejects.toBeInstanceOf(NotFoundException);
|
||||
|
||||
expect(mocks.asset.getById).toHaveBeenCalledWith('asset-1', { files: true });
|
||||
expect(mocks.asset.getById).toHaveBeenCalledWith('asset-1', { files: true, edits: true });
|
||||
});
|
||||
|
||||
it('should download a file', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.image);
|
||||
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1')).resolves.toEqual(
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', {})).resolves.toEqual(
|
||||
new ImmichFileResponse({
|
||||
path: '/original/path.jpg',
|
||||
fileName: 'asset-id.jpg',
|
||||
@@ -521,6 +521,104 @@ describe(AssetMediaService.name, () => {
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should download edited file by default when edits exist', async () => {
|
||||
const editedAsset = {
|
||||
...assetStub.withCropEdit,
|
||||
files: [
|
||||
...assetStub.withCropEdit.files,
|
||||
{
|
||||
id: 'edited-file',
|
||||
type: AssetFileType.FullSizeEdited,
|
||||
path: '/uploads/user-id/fullsize/edited.jpg',
|
||||
} as AssetFile,
|
||||
],
|
||||
};
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.asset.getById.mockResolvedValue(editedAsset);
|
||||
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', { edited: true })).resolves.toEqual(
|
||||
new ImmichFileResponse({
|
||||
path: '/uploads/user-id/fullsize/edited.jpg',
|
||||
fileName: 'asset-id.jpg',
|
||||
contentType: 'image/jpeg',
|
||||
cacheControl: CacheControl.PrivateWithCache,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should download edited file when edited=true', async () => {
|
||||
const editedAsset = {
|
||||
...assetStub.withCropEdit,
|
||||
files: [
|
||||
...assetStub.withCropEdit.files,
|
||||
{
|
||||
id: 'edited-file',
|
||||
type: AssetFileType.FullSizeEdited,
|
||||
path: '/uploads/user-id/fullsize/edited.jpg',
|
||||
} as AssetFile,
|
||||
],
|
||||
};
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.asset.getById.mockResolvedValue(editedAsset);
|
||||
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', { edited: true })).resolves.toEqual(
|
||||
new ImmichFileResponse({
|
||||
path: '/uploads/user-id/fullsize/edited.jpg',
|
||||
fileName: 'asset-id.jpg',
|
||||
contentType: 'image/jpeg',
|
||||
cacheControl: CacheControl.PrivateWithCache,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should download original file when edited=false', async () => {
|
||||
const editedAsset = {
|
||||
...assetStub.withCropEdit,
|
||||
files: [
|
||||
...assetStub.withCropEdit.files,
|
||||
{
|
||||
id: 'edited-file',
|
||||
type: AssetFileType.FullSizeEdited,
|
||||
path: '/uploads/user-id/fullsize/edited.jpg',
|
||||
} as AssetFile,
|
||||
],
|
||||
};
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.asset.getById.mockResolvedValue(editedAsset);
|
||||
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', { edited: false })).resolves.toEqual(
|
||||
new ImmichFileResponse({
|
||||
path: '/original/path.jpg',
|
||||
fileName: 'asset-id.jpg',
|
||||
contentType: 'image/jpeg',
|
||||
cacheControl: CacheControl.PrivateWithCache,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should download original file when no edits exist', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.image);
|
||||
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', { edited: true })).resolves.toEqual(
|
||||
new ImmichFileResponse({
|
||||
path: '/original/path.jpg',
|
||||
fileName: 'asset-id.jpg',
|
||||
contentType: 'image/jpeg',
|
||||
cacheControl: CacheControl.PrivateWithCache,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a not found when edits exist but no edited file available', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.withCropEdit);
|
||||
|
||||
await expect(sut.downloadOriginal(authStub.admin, 'asset-1', { edited: true })).rejects.toBeInstanceOf(
|
||||
NotFoundException,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('viewThumbnail', () => {
|
||||
@@ -620,6 +718,8 @@ describe(AssetMediaService.name, () => {
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
// TODO: Edited asset tests
|
||||
});
|
||||
|
||||
describe('playbackVideo', () => {
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
CheckExistingAssetsDto,
|
||||
UploadFieldName,
|
||||
} from 'src/dtos/asset-media.dto';
|
||||
import { AssetDownloadOriginalDto } from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
AssetFileType,
|
||||
@@ -193,11 +194,26 @@ export class AssetMediaService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
async downloadOriginal(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
|
||||
async downloadOriginal(auth: AuthDto, id: string, dto: AssetDownloadOriginalDto): Promise<ImmichFileResponse> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetDownload, ids: [id] });
|
||||
|
||||
const asset = await this.findOrFail(id);
|
||||
|
||||
if (asset.edits!.length > 0 && (dto.edited ?? false)) {
|
||||
const { editedFullsizeFile } = getAssetFiles(asset.files ?? []);
|
||||
|
||||
if (!editedFullsizeFile) {
|
||||
throw new NotFoundException('Edited asset media not found');
|
||||
}
|
||||
|
||||
return new ImmichFileResponse({
|
||||
path: editedFullsizeFile.path,
|
||||
fileName: getFileNameWithoutExtension(asset.originalFileName) + getFilenameExtension(editedFullsizeFile.path),
|
||||
contentType: mimeTypes.lookup(editedFullsizeFile.path),
|
||||
cacheControl: CacheControl.PrivateWithCache,
|
||||
});
|
||||
}
|
||||
|
||||
return new ImmichFileResponse({
|
||||
path: asset.originalPath,
|
||||
fileName: asset.originalFileName,
|
||||
@@ -216,12 +232,20 @@ export class AssetMediaService extends BaseService {
|
||||
const asset = await this.findOrFail(id);
|
||||
const size = dto.size ?? AssetMediaSize.THUMBNAIL;
|
||||
|
||||
const { thumbnailFile, previewFile, fullsizeFile } = getAssetFiles(asset.files ?? []);
|
||||
const files = getAssetFiles(asset.files ?? []);
|
||||
|
||||
const requestingEdited = (dto.edited ?? false) && asset.edits!.length > 0;
|
||||
const { fullsizeFile, previewFile, thumbnailFile } = {
|
||||
fullsizeFile: requestingEdited ? files.editedFullsizeFile : files.fullsizeFile,
|
||||
previewFile: requestingEdited ? files.editedPreviewFile : files.previewFile,
|
||||
thumbnailFile: requestingEdited ? files.editedThumbnailFile : files.thumbnailFile,
|
||||
};
|
||||
|
||||
let filepath = previewFile?.path;
|
||||
if (size === AssetMediaSize.THUMBNAIL && thumbnailFile) {
|
||||
filepath = thumbnailFile.path;
|
||||
} else if (size === AssetMediaSize.FULLSIZE) {
|
||||
if (mimeTypes.isWebSupportedImage(asset.originalPath)) {
|
||||
if (mimeTypes.isWebSupportedImage(asset.originalPath) && !dto.edited) {
|
||||
// use original file for web supported images
|
||||
return { targetSize: 'original' };
|
||||
}
|
||||
@@ -433,7 +457,7 @@ export class AssetMediaService extends BaseService {
|
||||
originalFileName: dto.filename || file.originalName,
|
||||
});
|
||||
|
||||
if (dto.metadata) {
|
||||
if (dto.metadata?.length) {
|
||||
await this.assetRepository.upsertMetadata(asset.id, dto.metadata);
|
||||
}
|
||||
|
||||
@@ -465,7 +489,7 @@ export class AssetMediaService extends BaseService {
|
||||
}
|
||||
|
||||
private async findOrFail(id: string) {
|
||||
const asset = await this.assetRepository.getById(id, { files: true });
|
||||
const asset = await this.assetRepository.getById(id, { files: true, edits: true });
|
||||
if (!asset) {
|
||||
throw new NotFoundException('Asset not found');
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { BadRequestException } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||
import { AssetJobName, AssetStatsResponseDto } from 'src/dtos/asset.dto';
|
||||
import { AssetStatus, AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum';
|
||||
import { AssetMetadataKey, AssetStatus, AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum';
|
||||
import { AssetStats } from 'src/repositories/asset.repository';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
@@ -704,6 +704,7 @@ describe(AssetService.name, () => {
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([ocr1, ocr2]);
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.image);
|
||||
|
||||
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([ocr1, ocr2]);
|
||||
|
||||
@@ -718,7 +719,7 @@ describe(AssetService.name, () => {
|
||||
it('should return empty array when no OCR data exists', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1']));
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.image);
|
||||
await expect(sut.getOcr(authStub.admin, 'asset-1')).resolves.toEqual([]);
|
||||
|
||||
expect(mocks.ocr.getByAssetId).toHaveBeenCalledWith('asset-1');
|
||||
@@ -776,4 +777,40 @@ describe(AssetService.name, () => {
|
||||
expect(result).toEqual(assets.map((asset) => asset.deviceAssetId));
|
||||
});
|
||||
});
|
||||
|
||||
describe('upsertMetadata', () => {
|
||||
it('should throw a bad request exception if duplicate keys are sent', async () => {
|
||||
const asset = factory.asset();
|
||||
const items = [
|
||||
{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
|
||||
{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
|
||||
];
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([asset.id]));
|
||||
|
||||
await expect(sut.upsertMetadata(authStub.admin, asset.id, { items })).rejects.toThrowError(
|
||||
'Duplicate items are not allowed:',
|
||||
);
|
||||
|
||||
expect(mocks.asset.upsertBulkMetadata).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('upsertBulkMetadata', () => {
|
||||
it('should throw a bad request exception if duplicate keys are sent', async () => {
|
||||
const asset = factory.asset();
|
||||
const items = [
|
||||
{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
|
||||
{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
|
||||
];
|
||||
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([asset.id]));
|
||||
|
||||
await expect(sut.upsertBulkMetadata(authStub.admin, { items })).rejects.toThrowError(
|
||||
'Duplicate items are not allowed:',
|
||||
);
|
||||
|
||||
expect(mocks.asset.upsertBulkMetadata).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,6 +11,9 @@ import {
|
||||
AssetCopyDto,
|
||||
AssetJobName,
|
||||
AssetJobsDto,
|
||||
AssetMetadataBulkDeleteDto,
|
||||
AssetMetadataBulkResponseDto,
|
||||
AssetMetadataBulkUpsertDto,
|
||||
AssetMetadataResponseDto,
|
||||
AssetMetadataUpsertDto,
|
||||
AssetStatsDto,
|
||||
@@ -18,11 +21,12 @@ import {
|
||||
mapStats,
|
||||
} from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { AssetEditAction, AssetEditActionListDto, AssetEditsDto } from 'src/dtos/editing.dto';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import {
|
||||
AssetFileType,
|
||||
AssetMetadataKey,
|
||||
AssetStatus,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
JobName,
|
||||
JobStatus,
|
||||
@@ -32,8 +36,17 @@ import {
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { requireElevatedPermission } from 'src/utils/access';
|
||||
import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUnlink } from 'src/utils/asset.util';
|
||||
import {
|
||||
getAssetFiles,
|
||||
getDimensions,
|
||||
getMyPartnerIds,
|
||||
isPanorama,
|
||||
onAfterUnlink,
|
||||
onBeforeLink,
|
||||
onBeforeUnlink,
|
||||
} from 'src/utils/asset.util';
|
||||
import { updateLockedColumns } from 'src/utils/database';
|
||||
import { transformOcrBoundingBox } from 'src/utils/transform';
|
||||
|
||||
@Injectable()
|
||||
export class AssetService extends BaseService {
|
||||
@@ -68,6 +81,7 @@ export class AssetService extends BaseService {
|
||||
owner: true,
|
||||
faces: { person: true },
|
||||
stack: { assets: true },
|
||||
edits: true,
|
||||
tags: true,
|
||||
});
|
||||
|
||||
@@ -345,11 +359,19 @@ export class AssetService extends BaseService {
|
||||
}
|
||||
}
|
||||
|
||||
const { fullsizeFile, previewFile, thumbnailFile, sidecarFile } = getAssetFiles(asset.files ?? []);
|
||||
const files = [thumbnailFile?.path, previewFile?.path, fullsizeFile?.path, asset.encodedVideoPath];
|
||||
const assetFiles = getAssetFiles(asset.files ?? []);
|
||||
const files = [
|
||||
assetFiles.thumbnailFile?.path,
|
||||
assetFiles.previewFile?.path,
|
||||
assetFiles.fullsizeFile?.path,
|
||||
assetFiles.editedFullsizeFile?.path,
|
||||
assetFiles.editedPreviewFile?.path,
|
||||
assetFiles.editedThumbnailFile?.path,
|
||||
asset.encodedVideoPath,
|
||||
];
|
||||
|
||||
if (deleteOnDisk && !asset.isOffline) {
|
||||
files.push(sidecarFile?.path, asset.originalPath);
|
||||
files.push(assetFiles.sidecarFile?.path, asset.originalPath);
|
||||
}
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files: files.filter(Boolean) } });
|
||||
@@ -378,15 +400,50 @@ export class AssetService extends BaseService {
|
||||
|
||||
async getOcr(auth: AuthDto, id: string): Promise<AssetOcrResponseDto[]> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
|
||||
return this.ocrRepository.getByAssetId(id);
|
||||
const ocr = await this.ocrRepository.getByAssetId(id);
|
||||
const asset = await this.assetRepository.getById(id, { exifInfo: true, edits: true });
|
||||
|
||||
if (!asset || !asset.exifInfo || !asset.edits) {
|
||||
throw new BadRequestException('Asset not found');
|
||||
}
|
||||
|
||||
const dimensions = getDimensions(asset.exifInfo);
|
||||
|
||||
return ocr.map((item) => transformOcrBoundingBox(item, asset.edits!, dimensions));
|
||||
}
|
||||
|
||||
async upsertBulkMetadata(auth: AuthDto, dto: AssetMetadataBulkUpsertDto): Promise<AssetMetadataBulkResponseDto[]> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: dto.items.map((item) => item.assetId) });
|
||||
|
||||
const uniqueKeys = new Set<string>();
|
||||
for (const item of dto.items) {
|
||||
const key = `(${item.assetId}, ${item.key})`;
|
||||
if (uniqueKeys.has(key)) {
|
||||
throw new BadRequestException(`Duplicate items are not allowed: "${key}"`);
|
||||
}
|
||||
|
||||
uniqueKeys.add(key);
|
||||
}
|
||||
|
||||
return this.assetRepository.upsertBulkMetadata(dto.items);
|
||||
}
|
||||
|
||||
async upsertMetadata(auth: AuthDto, id: string, dto: AssetMetadataUpsertDto): Promise<AssetMetadataResponseDto[]> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: [id] });
|
||||
|
||||
const uniqueKeys = new Set<string>();
|
||||
for (const { key } of dto.items) {
|
||||
if (uniqueKeys.has(key)) {
|
||||
throw new BadRequestException(`Duplicate items are not allowed: "${key}"`);
|
||||
}
|
||||
|
||||
uniqueKeys.add(key);
|
||||
}
|
||||
|
||||
return this.assetRepository.upsertMetadata(id, dto.items);
|
||||
}
|
||||
|
||||
async getMetadataByKey(auth: AuthDto, id: string, key: AssetMetadataKey): Promise<AssetMetadataResponseDto> {
|
||||
async getMetadataByKey(auth: AuthDto, id: string, key: string): Promise<AssetMetadataResponseDto> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
|
||||
|
||||
const item = await this.assetRepository.getMetadataByKey(id, key);
|
||||
@@ -396,11 +453,16 @@ export class AssetService extends BaseService {
|
||||
return item;
|
||||
}
|
||||
|
||||
async deleteMetadataByKey(auth: AuthDto, id: string, key: AssetMetadataKey): Promise<void> {
|
||||
async deleteMetadataByKey(auth: AuthDto, id: string, key: string): Promise<void> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: [id] });
|
||||
return this.assetRepository.deleteMetadataByKey(id, key);
|
||||
}
|
||||
|
||||
async deleteBulkMetadata(auth: AuthDto, dto: AssetMetadataBulkDeleteDto) {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: dto.items.map((item) => item.assetId) });
|
||||
await this.assetRepository.deleteBulkMetadata(dto.items);
|
||||
}
|
||||
|
||||
async run(auth: AuthDto, dto: AssetJobsDto) {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetUpdate, ids: dto.assetIds });
|
||||
|
||||
@@ -474,4 +536,78 @@ export class AssetService extends BaseService {
|
||||
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id } });
|
||||
}
|
||||
}
|
||||
|
||||
async getAssetEdits(auth: AuthDto, id: string): Promise<AssetEditsDto> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [id] });
|
||||
const edits = await this.assetEditRepository.getAll(id);
|
||||
return {
|
||||
assetId: id,
|
||||
edits,
|
||||
};
|
||||
}
|
||||
|
||||
async editAsset(auth: AuthDto, id: string, dto: AssetEditActionListDto): Promise<AssetEditsDto> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetEditCreate, ids: [id] });
|
||||
|
||||
const asset = await this.assetRepository.getById(id, { exifInfo: true });
|
||||
if (!asset) {
|
||||
throw new BadRequestException('Asset not found');
|
||||
}
|
||||
|
||||
if (asset.type !== AssetType.Image) {
|
||||
throw new BadRequestException('Only images can be edited');
|
||||
}
|
||||
|
||||
if (asset.livePhotoVideoId) {
|
||||
throw new BadRequestException('Editing live photos is not supported');
|
||||
}
|
||||
|
||||
if (isPanorama(asset)) {
|
||||
throw new BadRequestException('Editing panorama images is not supported');
|
||||
}
|
||||
|
||||
if (asset.originalPath?.toLowerCase().endsWith('.gif')) {
|
||||
throw new BadRequestException('Editing GIF images is not supported');
|
||||
}
|
||||
|
||||
if (asset.originalPath?.toLowerCase().endsWith('.svg')) {
|
||||
throw new BadRequestException('Editing SVG images is not supported');
|
||||
}
|
||||
|
||||
// check that crop parameters will not go out of bounds
|
||||
const { width: assetWidth, height: assetHeight } = getDimensions(asset.exifInfo!);
|
||||
|
||||
if (!assetWidth || !assetHeight) {
|
||||
throw new BadRequestException('Asset dimensions are not available for editing');
|
||||
}
|
||||
|
||||
const crop = dto.edits.find((e) => e.action === AssetEditAction.Crop)?.parameters;
|
||||
if (crop) {
|
||||
const { x, y, width, height } = crop;
|
||||
if (x + width > assetWidth || y + height > assetHeight) {
|
||||
throw new BadRequestException('Crop parameters are out of bounds');
|
||||
}
|
||||
}
|
||||
|
||||
const newEdits = await this.assetEditRepository.replaceAll(id, dto.edits);
|
||||
await this.jobRepository.queue({ name: JobName.AssetEditThumbnailGeneration, data: { id } });
|
||||
|
||||
// Return the asset and its applied edits
|
||||
return {
|
||||
assetId: id,
|
||||
edits: newEdits,
|
||||
};
|
||||
}
|
||||
|
||||
async removeAssetEdits(auth: AuthDto, id: string): Promise<void> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetEditDelete, ids: [id] });
|
||||
|
||||
const asset = await this.assetRepository.getById(id);
|
||||
if (!asset) {
|
||||
throw new BadRequestException('Asset not found');
|
||||
}
|
||||
|
||||
await this.assetEditRepository.replaceAll(id, []);
|
||||
await this.jobRepository.queue({ name: JobName.AssetEditThumbnailGeneration, data: { id } });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,6 +165,11 @@ export class AuthService extends BaseService {
|
||||
}
|
||||
|
||||
async adminSignUp(dto: SignUpDto): Promise<UserAdminResponseDto> {
|
||||
const { setup } = this.configRepository.getEnv();
|
||||
if (!setup.allow) {
|
||||
throw new BadRequestException('Admin setup is disabled');
|
||||
}
|
||||
|
||||
const adminUser = await this.userRepository.getAdmin();
|
||||
if (adminUser) {
|
||||
throw new BadRequestException('The server already has an admin');
|
||||
|
||||
@@ -5,7 +5,7 @@ import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, JobStatus, StorageFolder } from 'src/enum';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { mockDuplex, mockSpawn, newTestService, ServiceMocks } from 'test/utils';
|
||||
import { describe } from 'vitest';
|
||||
|
||||
describe(BackupService.name, () => {
|
||||
@@ -147,6 +147,7 @@ describe(BackupService.name, () => {
|
||||
beforeEach(() => {
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -165,7 +166,7 @@ describe(BackupService.name, () => {
|
||||
({ sut, mocks } = newTestService(BackupService, { config: configMock }));
|
||||
|
||||
mocks.storage.readdir.mockResolvedValue([]);
|
||||
mocks.process.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockImplementation(() => mockDuplex('command', 0, 'data', ''));
|
||||
mocks.storage.rename.mockResolvedValue();
|
||||
mocks.storage.unlink.mockResolvedValue();
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
@@ -174,14 +175,16 @@ describe(BackupService.name, () => {
|
||||
|
||||
await sut.handleBackupDatabase();
|
||||
|
||||
expect(mocks.process.spawn).toHaveBeenCalled();
|
||||
const call = mocks.process.spawn.mock.calls[0];
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalled();
|
||||
const call = mocks.process.spawnDuplexStream.mock.calls[0];
|
||||
const args = call[1] as string[];
|
||||
// ['--dbname', '<url>', '--clean', '--if-exists']
|
||||
expect(args[0]).toBe('--dbname');
|
||||
const passedUrl = args[1];
|
||||
expect(passedUrl).not.toContain('uselibpqcompat');
|
||||
expect(passedUrl).toContain('sslmode=require');
|
||||
expect(args).toMatchInlineSnapshot(`
|
||||
[
|
||||
"postgresql://postgres:pwd@host:5432/immich?sslmode=require",
|
||||
"--clean",
|
||||
"--if-exists",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('should run a database backup successfully', async () => {
|
||||
@@ -196,21 +199,21 @@ describe(BackupService.name, () => {
|
||||
expect(mocks.storage.rename).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if pg_dumpall fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
it('should fail if pg_dump fails', async () => {
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fail if gzip fails', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Gzip failed with code 1');
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 0, 'data', ''));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('gzip', 1, '', 'error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('gzip non-zero exit code (1)');
|
||||
});
|
||||
|
||||
it('should fail if write stream fails', async () => {
|
||||
@@ -226,9 +229,9 @@ describe(BackupService.name, () => {
|
||||
});
|
||||
|
||||
it('should ignore unlink failing and still return failed job status', async () => {
|
||||
mocks.process.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
mocks.process.spawnDuplexStream.mockReturnValueOnce(mockDuplex('pg_dump', 1, '', 'error'));
|
||||
mocks.storage.unlink.mockRejectedValue(new Error('error'));
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('Backup failed with code 1');
|
||||
await expect(sut.handleBackupDatabase()).rejects.toThrow('pg_dump non-zero exit code (1)');
|
||||
expect(mocks.storage.unlink).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -242,12 +245,12 @@ describe(BackupService.name, () => {
|
||||
${'17.15.1'} | ${17}
|
||||
${'18.0.0'} | ${18}
|
||||
`(
|
||||
`should use pg_dumpall $expectedVersion with postgres version $postgresVersion`,
|
||||
`should use pg_dump $expectedVersion with postgres version $postgresVersion`,
|
||||
async ({ postgresVersion, expectedVersion }) => {
|
||||
mocks.database.getPostgresVersion.mockResolvedValue(postgresVersion);
|
||||
await sut.handleBackupDatabase();
|
||||
expect(mocks.process.spawn).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dumpall`,
|
||||
expect(mocks.process.spawnDuplexStream).toHaveBeenCalledWith(
|
||||
`/usr/lib/postgresql/${expectedVersion}/bin/pg_dump`,
|
||||
expect.any(Array),
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import path from 'node:path';
|
||||
import semver from 'semver';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName, StorageFolder } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
createDatabaseBackup,
|
||||
isFailedDatabaseBackupName,
|
||||
isValidDatabaseRoutineBackupName,
|
||||
UnsupportedPostgresError,
|
||||
} from 'src/utils/database-backups';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
@@ -53,16 +56,11 @@ export class BackupService extends BaseService {
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.Backups);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const failedBackups = files.filter((file) => file.match(/immich-db-backup-.*\.sql\.gz\.tmp$/));
|
||||
const backups = files
|
||||
.filter((file) => {
|
||||
const oldBackupStyle = file.match(/immich-db-backup-\d+\.sql\.gz$/);
|
||||
//immich-db-backup-20250729T114018-v1.136.0-pg14.17.sql.gz
|
||||
const newBackupStyle = file.match(/immich-db-backup-\d{8}T\d{6}-v.*-pg.*\.sql\.gz$/);
|
||||
return oldBackupStyle || newBackupStyle;
|
||||
})
|
||||
.filter((filename) => isValidDatabaseRoutineBackupName(filename))
|
||||
.toSorted()
|
||||
.toReversed();
|
||||
const failedBackups = files.filter((filename) => isFailedDatabaseBackupName(filename));
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
@@ -75,123 +73,27 @@ export class BackupService extends BaseService {
|
||||
|
||||
@OnJob({ name: JobName.DatabaseBackup, queue: QueueName.BackupDatabase })
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
this.logger.debug(`Database Backup Started`);
|
||||
const { database } = this.configRepository.getEnv();
|
||||
const config = database.config;
|
||||
|
||||
const isUrlConnection = config.connectionType === 'url';
|
||||
|
||||
let connectionUrl: string = isUrlConnection ? config.url : '';
|
||||
if (URL.canParse(connectionUrl)) {
|
||||
// remove known bad url parameters for pg_dumpall
|
||||
const url = new URL(connectionUrl);
|
||||
url.searchParams.delete('uselibpqcompat');
|
||||
connectionUrl = url.toString();
|
||||
}
|
||||
|
||||
const databaseParams = isUrlConnection
|
||||
? ['--dbname', connectionUrl]
|
||||
: [
|
||||
'--username',
|
||||
config.username,
|
||||
'--host',
|
||||
config.host,
|
||||
'--port',
|
||||
`${config.port}`,
|
||||
'--database',
|
||||
config.database,
|
||||
];
|
||||
|
||||
databaseParams.push('--clean', '--if-exists');
|
||||
const databaseVersion = await this.databaseRepository.getPostgresVersion();
|
||||
const backupFilePath = path.join(
|
||||
StorageCore.getBaseFolder(StorageFolder.Backups),
|
||||
`immich-db-backup-${DateTime.now().toFormat("yyyyLLdd'T'HHmmss")}-v${serverVersion.toString()}-pg${databaseVersion.split(' ')[0]}.sql.gz.tmp`,
|
||||
);
|
||||
const databaseSemver = semver.coerce(databaseVersion);
|
||||
const databaseMajorVersion = databaseSemver?.major;
|
||||
|
||||
if (!databaseMajorVersion || !databaseSemver || !semver.satisfies(databaseSemver, '>=14.0.0 <19.0.0')) {
|
||||
this.logger.error(`Database Backup Failure: Unsupported PostgreSQL version: ${databaseVersion}`);
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Starting. Database Version: ${databaseMajorVersion}`);
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const pgdump = this.processRepository.spawn(
|
||||
`/usr/lib/postgresql/${databaseMajorVersion}/bin/pg_dumpall`,
|
||||
databaseParams,
|
||||
{
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
PGPASSWORD: isUrlConnection ? new URL(connectionUrl).password : config.password,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// NOTE: `--rsyncable` is only supported in GNU gzip
|
||||
const gzip = this.processRepository.spawn(`gzip`, ['--rsyncable']);
|
||||
pgdump.stdout.pipe(gzip.stdin);
|
||||
|
||||
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
||||
|
||||
gzip.stdout.pipe(fileStream);
|
||||
|
||||
pgdump.on('error', (err) => {
|
||||
this.logger.error(`Backup failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
gzip.on('error', (err) => {
|
||||
this.logger.error(`Gzip failed with error: ${err}`);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
let pgdumpLogs = '';
|
||||
let gzipLogs = '';
|
||||
|
||||
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
||||
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
||||
|
||||
pgdump.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Backup failed with code ${code}`);
|
||||
reject(`Backup failed with code ${code}`);
|
||||
this.logger.error(pgdumpLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdumpLogs) {
|
||||
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
||||
}
|
||||
});
|
||||
|
||||
gzip.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Gzip failed with code ${code}`);
|
||||
reject(`Gzip failed with code ${code}`);
|
||||
this.logger.error(gzipLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdump.exitCode !== 0) {
|
||||
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
await createDatabaseBackup(this.backupRepos);
|
||||
} catch (error) {
|
||||
this.logger.error(`Database Backup Failure: ${error}`);
|
||||
await this.storageRepository
|
||||
.unlink(backupFilePath)
|
||||
.catch((error) => this.logger.error(`Failed to delete failed backup file: ${error}`));
|
||||
if (error instanceof UnsupportedPostgresError) {
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
this.logger.log(`Database Backup Success`);
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import { AlbumUserRepository } from 'src/repositories/album-user.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { AuditRepository } from 'src/repositories/audit.repository';
|
||||
@@ -69,6 +70,7 @@ export const BASE_SERVICE_DEPENDENCIES = [
|
||||
ApiKeyRepository,
|
||||
AppRepository,
|
||||
AssetRepository,
|
||||
AssetEditRepository,
|
||||
AssetJobRepository,
|
||||
AuditRepository,
|
||||
ConfigRepository,
|
||||
@@ -127,6 +129,7 @@ export class BaseService {
|
||||
protected apiKeyRepository: ApiKeyRepository,
|
||||
protected appRepository: AppRepository,
|
||||
protected assetRepository: AssetRepository,
|
||||
protected assetEditRepository: AssetEditRepository,
|
||||
protected assetJobRepository: AssetJobRepository,
|
||||
protected auditRepository: AuditRepository,
|
||||
protected configRepository: ConfigRepository,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { jwtVerify } from 'jose';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
@@ -95,7 +95,14 @@ describe(CliService.name, () => {
|
||||
});
|
||||
|
||||
it('should disable maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.disableMaintenanceMode()).resolves.toEqual({
|
||||
alreadyDisabled: false,
|
||||
});
|
||||
@@ -109,7 +116,14 @@ describe(CliService.name, () => {
|
||||
|
||||
describe('enableMaintenanceMode', () => {
|
||||
it('should not do anything if in maintenance mode', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(sut.enableMaintenanceMode()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
alreadyEnabled: true,
|
||||
@@ -133,13 +147,22 @@ describe(CliService.name, () => {
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const RE_LOGIN_URL = /https:\/\/my.immich.app\/maintenance\?token=([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)/;
|
||||
|
||||
it('should return a valid login URL', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
const result = await sut.enableMaintenanceMode();
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { isAbsolute } from 'node:path';
|
||||
import { SALT_ROUNDS } from 'src/constants';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { UserAdminResponseDto, mapUserAdmin } from 'src/dtos/user.dto';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret } from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
@@ -86,6 +86,9 @@ export class CliService extends BaseService {
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await this.appRepository.sendOneShotAppRestart({
|
||||
|
||||
83
server/src/services/database-backup.service.spec.ts
Normal file
83
server/src/services/database-backup.service.spec.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { StorageFolder } from 'src/enum';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(MaintenanceService.name, () => {
|
||||
let sut: DatabaseBackupService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(DatabaseBackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('listBackups', () => {
|
||||
it('should give us all backups', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue([
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-25T11:02:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz.tmp`,
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-27T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
'immich-db-backup-1753789649000.sql.gz',
|
||||
`immich-db-backup-${DateTime.fromISO('2025-07-29T11:01:16Z').toFormat("yyyyLLdd'T'HHmmss")}-v1.234.5-pg14.5.sql.gz`,
|
||||
]);
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
|
||||
await expect(sut.listBackups()).resolves.toMatchObject({
|
||||
backups: [
|
||||
{ filename: 'immich-db-backup-20250729T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-20250727T110116-v1.234.5-pg14.5.sql.gz', filesize: 1024 },
|
||||
{ filename: 'immich-db-backup-1753789649000.sql.gz', filesize: 1024 },
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.deleteBackup(['filename'])).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should unlink the target file', async () => {
|
||||
await sut.deleteBackup(['filename.sql']);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.Backups)}/filename.sql`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadBackup', () => {
|
||||
it('should reject invalid file names', async () => {
|
||||
await expect(sut.uploadBackup({ originalname: 'invalid backup' } as never)).rejects.toThrowError(
|
||||
new BadRequestException('Invalid backup name!'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should write file', async () => {
|
||||
await sut.uploadBackup({ originalname: 'path.sql.gz', buffer: 'buffer' } as never);
|
||||
expect(mocks.storage.createOrOverwriteFile).toBeCalledWith('/data/backups/uploaded-path.sql.gz', 'buffer');
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadBackup', () => {
|
||||
it('should reject invalid file names', () => {
|
||||
expect(() => sut.downloadBackup('invalid backup')).toThrowError(new BadRequestException('Invalid backup name!'));
|
||||
});
|
||||
|
||||
it('should get backup path', () => {
|
||||
expect(sut.downloadBackup('hello.sql.gz')).toEqual(
|
||||
expect.objectContaining({
|
||||
path: '/data/backups/hello.sql.gz',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
43
server/src/services/database-backup.service.ts
Normal file
43
server/src/services/database-backup.service.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { DatabaseBackupListResponseDto } from 'src/dtos/database-backup.dto';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
deleteDatabaseBackup,
|
||||
downloadDatabaseBackup,
|
||||
listDatabaseBackups,
|
||||
uploadDatabaseBackup,
|
||||
} from 'src/utils/database-backups';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
|
||||
/**
|
||||
* This service is available outside of maintenance mode to manage maintenance mode
|
||||
*/
|
||||
@Injectable()
|
||||
export class DatabaseBackupService extends BaseService {
|
||||
async listBackups(): Promise<DatabaseBackupListResponseDto> {
|
||||
const backups = await listDatabaseBackups(this.backupRepos);
|
||||
return { backups };
|
||||
}
|
||||
|
||||
deleteBackup(files: string[]): Promise<void> {
|
||||
return deleteDatabaseBackup(this.backupRepos, files);
|
||||
}
|
||||
|
||||
async uploadBackup(file: Express.Multer.File): Promise<void> {
|
||||
return uploadDatabaseBackup(this.backupRepos, file);
|
||||
}
|
||||
|
||||
downloadBackup(fileName: string): ImmichFileResponse {
|
||||
return downloadDatabaseBackup(fileName);
|
||||
}
|
||||
|
||||
private get backupRepos() {
|
||||
return {
|
||||
logger: this.logger,
|
||||
storage: this.storageRepository,
|
||||
config: this.configRepository,
|
||||
process: this.processRepository,
|
||||
database: this.databaseRepository,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import { AuthAdminService } from 'src/services/auth-admin.service';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { DatabaseBackupService } from 'src/services/database-backup.service';
|
||||
import { DatabaseService } from 'src/services/database.service';
|
||||
import { DownloadService } from 'src/services/download.service';
|
||||
import { DuplicateService } from 'src/services/duplicate.service';
|
||||
@@ -59,6 +60,7 @@ export const services = [
|
||||
AuthAdminService,
|
||||
BackupService,
|
||||
CliService,
|
||||
DatabaseBackupService,
|
||||
DatabaseService,
|
||||
DownloadService,
|
||||
DuplicateService,
|
||||
|
||||
@@ -96,6 +96,38 @@ export class JobService extends BaseService {
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.AssetEditThumbnailGeneration: {
|
||||
const asset = await this.assetRepository.getById(item.data.id);
|
||||
|
||||
if (asset) {
|
||||
this.websocketRepository.clientSend('AssetEditReadyV1', asset.ownerId, {
|
||||
asset: {
|
||||
id: asset.id,
|
||||
ownerId: asset.ownerId,
|
||||
originalFileName: asset.originalFileName,
|
||||
thumbhash: asset.thumbhash ? hexOrBufferToBase64(asset.thumbhash) : null,
|
||||
checksum: hexOrBufferToBase64(asset.checksum),
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
localDateTime: asset.localDateTime,
|
||||
duration: asset.duration,
|
||||
type: asset.type,
|
||||
deletedAt: asset.deletedAt,
|
||||
isFavorite: asset.isFavorite,
|
||||
visibility: asset.visibility,
|
||||
livePhotoVideoId: asset.livePhotoVideoId,
|
||||
stackId: asset.stackId,
|
||||
libraryId: asset.libraryId,
|
||||
width: asset.width,
|
||||
height: asset.height,
|
||||
isEdited: asset.isEdited,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case JobName.AssetGenerateThumbnails: {
|
||||
if (!item.data.notify && item.data.source !== 'upload') {
|
||||
break;
|
||||
@@ -141,6 +173,9 @@ export class JobService extends BaseService {
|
||||
livePhotoVideoId: asset.livePhotoVideoId,
|
||||
stackId: asset.stackId,
|
||||
libraryId: asset.libraryId,
|
||||
width: asset.width,
|
||||
height: asset.height,
|
||||
isEdited: asset.isEdited,
|
||||
},
|
||||
exif: {
|
||||
assetId: exif.assetId,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { MaintenanceService } from 'src/services/maintenance.service';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
@@ -36,28 +36,96 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
|
||||
it('should return true if enabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: '' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: '',
|
||||
action: { action: MaintenanceAction.Start },
|
||||
});
|
||||
|
||||
await expect(sut.getMaintenanceMode()).resolves.toEqual({
|
||||
isMaintenanceMode: true,
|
||||
secret: '',
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.get).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('integrityCheck', () => {
|
||||
it('generate integrity report', async () => {
|
||||
mocks.storage.readdir.mockResolvedValue(['.immich', 'file1', 'file2']);
|
||||
mocks.storage.readFile.mockResolvedValue(undefined as never);
|
||||
mocks.storage.overwriteFile.mockRejectedValue(undefined as never);
|
||||
|
||||
await expect(sut.detectPriorInstall()).resolves.toMatchInlineSnapshot(`
|
||||
{
|
||||
"storage": [
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "encoded-video",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "library",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "upload",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "profile",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "thumbs",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
{
|
||||
"files": 2,
|
||||
"folder": "backups",
|
||||
"readable": true,
|
||||
"writable": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('startMaintenance', () => {
|
||||
it('should set maintenance mode and return a secret', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: false });
|
||||
|
||||
await expect(sut.startMaintenance('admin')).resolves.toMatchObject({
|
||||
await expect(
|
||||
sut.startMaintenance(
|
||||
{
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
'admin',
|
||||
),
|
||||
).resolves.toMatchObject({
|
||||
jwt: expect.any(String),
|
||||
});
|
||||
|
||||
expect(mocks.systemMetadata.set).toHaveBeenCalledWith(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret: expect.stringMatching(/^\w{128}$/),
|
||||
action: {
|
||||
action: 'start',
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AppRestart', {
|
||||
@@ -78,7 +146,13 @@ describe(MaintenanceService.name, () => {
|
||||
});
|
||||
|
||||
it('should generate a login url with JWT', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue({ isMaintenanceMode: true, secret: 'secret' });
|
||||
mocks.systemMetadata.get.mockResolvedValue({
|
||||
isMaintenanceMode: true,
|
||||
secret: 'secret',
|
||||
action: {
|
||||
action: MaintenanceAction.Start,
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.createLoginUrl({
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
||||
import { OnEvent } from 'src/decorators';
|
||||
import { MaintenanceAuthDto } from 'src/dtos/maintenance.dto';
|
||||
import { SystemMetadataKey } from 'src/enum';
|
||||
import {
|
||||
MaintenanceAuthDto,
|
||||
MaintenanceDetectInstallResponseDto,
|
||||
MaintenanceStatusResponseDto,
|
||||
SetMaintenanceModeDto,
|
||||
} from 'src/dtos/maintenance.dto';
|
||||
import { MaintenanceAction, SystemMetadataKey } from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { MaintenanceModeState } from 'src/types';
|
||||
import { createMaintenanceLoginUrl, generateMaintenanceSecret, signMaintenanceJwt } from 'src/utils/maintenance';
|
||||
import {
|
||||
createMaintenanceLoginUrl,
|
||||
detectPriorInstall,
|
||||
generateMaintenanceSecret,
|
||||
signMaintenanceJwt,
|
||||
} from 'src/utils/maintenance';
|
||||
import { getExternalDomain } from 'src/utils/misc';
|
||||
|
||||
/**
|
||||
@@ -19,9 +29,25 @@ export class MaintenanceService extends BaseService {
|
||||
.then((state) => state ?? { isMaintenanceMode: false });
|
||||
}
|
||||
|
||||
async startMaintenance(username: string): Promise<{ jwt: string }> {
|
||||
getMaintenanceStatus(): MaintenanceStatusResponseDto {
|
||||
return {
|
||||
active: false,
|
||||
action: MaintenanceAction.End,
|
||||
};
|
||||
}
|
||||
|
||||
detectPriorInstall(): Promise<MaintenanceDetectInstallResponseDto> {
|
||||
return detectPriorInstall(this.storageRepository);
|
||||
}
|
||||
|
||||
async startMaintenance(action: SetMaintenanceModeDto, username: string): Promise<{ jwt: string }> {
|
||||
const secret = generateMaintenanceSecret();
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, { isMaintenanceMode: true, secret });
|
||||
await this.systemMetadataRepository.set(SystemMetadataKey.MaintenanceMode, {
|
||||
isMaintenanceMode: true,
|
||||
secret,
|
||||
action,
|
||||
});
|
||||
|
||||
await this.eventRepository.emit('AppRestart', { isMaintenanceMode: true });
|
||||
|
||||
return {
|
||||
@@ -31,6 +57,20 @@ export class MaintenanceService extends BaseService {
|
||||
};
|
||||
}
|
||||
|
||||
async startRestoreFlow(): Promise<{ jwt: string }> {
|
||||
const adminUser = await this.userRepository.getAdmin();
|
||||
if (adminUser) {
|
||||
throw new BadRequestException('The server already has an admin');
|
||||
}
|
||||
|
||||
return this.startMaintenance(
|
||||
{
|
||||
action: MaintenanceAction.SelectDatabaseRestore,
|
||||
},
|
||||
'admin',
|
||||
);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'AppRestart', server: true })
|
||||
onRestart(event: ArgOf<'AppRestart'>, ack?: (ok: 'ok') => void): void {
|
||||
this.logger.log(`Restarting due to event... ${JSON.stringify(event)}`);
|
||||
|
||||
@@ -18,13 +18,17 @@ import {
|
||||
} from 'src/enum';
|
||||
import { MediaService } from 'src/services/media.service';
|
||||
import { JobCounts, RawImageInfo } from 'src/types';
|
||||
import { assetStub } from 'test/fixtures/asset.stub';
|
||||
import { assetStub, previewFile } from 'test/fixtures/asset.stub';
|
||||
import { faceStub } from 'test/fixtures/face.stub';
|
||||
import { probeStub } from 'test/fixtures/media.stub';
|
||||
import { personStub, personThumbnailStub } from 'test/fixtures/person.stub';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const fullsizeBuffer = Buffer.from('embedded image data');
|
||||
const rawBuffer = Buffer.from('raw image data');
|
||||
const extractedBuffer = Buffer.from('embedded image file');
|
||||
|
||||
describe(MediaService.name, () => {
|
||||
let sut: MediaService;
|
||||
let mocks: ServiceMocks;
|
||||
@@ -160,6 +164,42 @@ describe(MediaService.name, () => {
|
||||
|
||||
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
|
||||
});
|
||||
|
||||
it('should queue assets with edits but missing edited thumbnails', async () => {
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.withCropEdit]));
|
||||
mocks.person.getAll.mockReturnValue(makeStream());
|
||||
await sut.handleQueueGenerateThumbnails({ force: false });
|
||||
|
||||
expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(false);
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.AssetEditThumbnailGeneration,
|
||||
data: { id: assetStub.withCropEdit.id },
|
||||
},
|
||||
]);
|
||||
|
||||
expect(mocks.person.getAll).toHaveBeenCalledWith({ thumbnailPath: '' });
|
||||
});
|
||||
|
||||
it('should queue both regular and edited thumbnails for assets with edits when force is true', async () => {
|
||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.withCropEdit]));
|
||||
mocks.person.getAll.mockReturnValue(makeStream());
|
||||
await sut.handleQueueGenerateThumbnails({ force: true });
|
||||
|
||||
expect(mocks.assetJob.streamForThumbnailJob).toHaveBeenCalledWith(true);
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{
|
||||
name: JobName.AssetGenerateThumbnails,
|
||||
data: { id: assetStub.withCropEdit.id },
|
||||
},
|
||||
{
|
||||
name: JobName.AssetEditThumbnailGeneration,
|
||||
data: { id: assetStub.withCropEdit.id },
|
||||
},
|
||||
]);
|
||||
|
||||
expect(mocks.person.getAll).toHaveBeenCalledWith(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueMigration', () => {
|
||||
@@ -222,16 +262,12 @@ describe(MediaService.name, () => {
|
||||
});
|
||||
|
||||
describe('handleGenerateThumbnails', () => {
|
||||
let rawBuffer: Buffer;
|
||||
let fullsizeBuffer: Buffer;
|
||||
let extractedBuffer: Buffer;
|
||||
let rawInfo: RawImageInfo;
|
||||
|
||||
beforeEach(() => {
|
||||
fullsizeBuffer = Buffer.from('embedded image data');
|
||||
rawBuffer = Buffer.from('raw image data');
|
||||
extractedBuffer = Buffer.from('embedded image file');
|
||||
rawInfo = { width: 100, height: 100, channels: 3 };
|
||||
mocks.person.getFaces.mockResolvedValue([]);
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
mocks.media.decodeImage.mockImplementation((input) =>
|
||||
Promise.resolve(
|
||||
typeof input === 'string'
|
||||
@@ -281,7 +317,12 @@ describe(MediaService.name, () => {
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith('/uploads/user-id/thumbs/path.jpg');
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: {
|
||||
files: expect.arrayContaining([previewFile.path]),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate P3 thumbnails for a wide gamut image', async () => {
|
||||
@@ -313,6 +354,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -325,6 +367,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -334,6 +377,7 @@ describe(MediaService.name, () => {
|
||||
colorspace: Colorspace.P3,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
});
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([
|
||||
@@ -527,6 +571,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
previewPath,
|
||||
);
|
||||
@@ -539,6 +584,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
thumbnailPath,
|
||||
);
|
||||
@@ -572,6 +618,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
previewPath,
|
||||
);
|
||||
@@ -584,6 +631,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
thumbnailPath,
|
||||
);
|
||||
@@ -595,7 +643,12 @@ describe(MediaService.name, () => {
|
||||
|
||||
await sut.handleGenerateThumbnails({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith('/uploads/user-id/webp/path.ext');
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: {
|
||||
files: expect.arrayContaining([previewFile.path]),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract embedded image if enabled and available', async () => {
|
||||
@@ -641,7 +694,6 @@ describe(MediaService.name, () => {
|
||||
processInvalidImages: false,
|
||||
size: 1440,
|
||||
});
|
||||
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should resize original image if embedded image extraction is not enabled', async () => {
|
||||
@@ -657,7 +709,6 @@ describe(MediaService.name, () => {
|
||||
processInvalidImages: false,
|
||||
size: 1440,
|
||||
});
|
||||
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should process invalid images if enabled', async () => {
|
||||
@@ -691,7 +742,6 @@ describe(MediaService.name, () => {
|
||||
expect.objectContaining({ processInvalidImages: false }),
|
||||
);
|
||||
|
||||
expect(mocks.media.getImageDimensions).not.toHaveBeenCalled();
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
@@ -722,6 +772,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -752,6 +803,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -764,6 +816,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -792,6 +845,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -804,6 +858,7 @@ describe(MediaService.name, () => {
|
||||
size: 1440,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -833,6 +888,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -888,6 +944,7 @@ describe(MediaService.name, () => {
|
||||
quality: 80,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
@@ -926,12 +983,166 @@ describe(MediaService.name, () => {
|
||||
quality: 90,
|
||||
processInvalidImages: false,
|
||||
raw: rawInfo,
|
||||
edits: [],
|
||||
},
|
||||
expect.any(String),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleAssetEditThumbnailGeneration', () => {
|
||||
let rawInfo: RawImageInfo;
|
||||
|
||||
beforeEach(() => {
|
||||
rawInfo = { width: 100, height: 100, channels: 3 };
|
||||
mocks.person.getFaces.mockResolvedValue([]);
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
mocks.media.decodeImage.mockImplementation((input) =>
|
||||
Promise.resolve(
|
||||
typeof input === 'string'
|
||||
? { data: rawBuffer, info: rawInfo as OutputInfo } // string implies original file
|
||||
: { data: fullsizeBuffer, info: rawInfo as OutputInfo }, // buffer implies embedded image extracted
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip videos', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(assetStub.video);
|
||||
|
||||
await expect(sut.handleAssetEditThumbnailGeneration({ id: assetStub.video.id })).resolves.toBe(JobStatus.Success);
|
||||
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should upsert 3 edited files for edit jobs', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withCropEdit,
|
||||
});
|
||||
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
|
||||
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
|
||||
mocks.person.getFaces.mockResolvedValue([]);
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ type: AssetFileType.FullSizeEdited }),
|
||||
expect.objectContaining({ type: AssetFileType.PreviewEdited }),
|
||||
expect.objectContaining({ type: AssetFileType.ThumbnailEdited }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should apply edits when generating thumbnails', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withCropEdit,
|
||||
});
|
||||
mocks.person.getFaces.mockResolvedValue([]);
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
rawBuffer,
|
||||
expect.objectContaining({
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: { height: 1152, width: 1512, x: 216, y: 1512 },
|
||||
},
|
||||
],
|
||||
}),
|
||||
expect.any(String),
|
||||
);
|
||||
});
|
||||
|
||||
it('should clean up edited files if an asset has no edits', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withoutEdits,
|
||||
});
|
||||
|
||||
const status = await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: {
|
||||
files: expect.arrayContaining([
|
||||
'/uploads/user-id/fullsize/path_edited.jpg',
|
||||
'/uploads/user-id/preview/path_edited.jpg',
|
||||
'/uploads/user-id/thumbnail/path_edited.jpg',
|
||||
]),
|
||||
},
|
||||
});
|
||||
|
||||
expect(mocks.asset.deleteFiles).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ path: '/uploads/user-id/preview/path_edited.jpg' }),
|
||||
expect.objectContaining({ path: '/uploads/user-id/thumbnail/path_edited.jpg' }),
|
||||
expect.objectContaining({ path: '/uploads/user-id/fullsize/path_edited.jpg' }),
|
||||
]),
|
||||
);
|
||||
|
||||
expect(status).toBe(JobStatus.Success);
|
||||
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should generate all 3 edited files if an asset has edits', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withCropEdit,
|
||||
});
|
||||
mocks.person.getFaces.mockResolvedValue([]);
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
rawBuffer,
|
||||
expect.anything(),
|
||||
expect.stringContaining('edited_preview.jpeg'),
|
||||
);
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
rawBuffer,
|
||||
expect.anything(),
|
||||
expect.stringContaining('edited_thumbnail.webp'),
|
||||
);
|
||||
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
|
||||
rawBuffer,
|
||||
expect.anything(),
|
||||
expect.stringContaining('edited_fullsize.jpeg'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should generate the original thumbhash if no edits exist', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withoutEdits,
|
||||
});
|
||||
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
|
||||
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
|
||||
|
||||
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id, source: 'upload' });
|
||||
|
||||
expect(mocks.media.generateThumbhash).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should apply thumbhash if job source is edit and edits exist', async () => {
|
||||
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue({
|
||||
...assetStub.withCropEdit,
|
||||
});
|
||||
const thumbhashBuffer = Buffer.from('a thumbhash', 'utf8');
|
||||
mocks.media.generateThumbhash.mockResolvedValue(thumbhashBuffer);
|
||||
mocks.person.getFaces.mockResolvedValue([]);
|
||||
mocks.ocr.getByAssetId.mockResolvedValue([]);
|
||||
|
||||
await sut.handleAssetEditThumbnailGeneration({ id: assetStub.image.id });
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
thumbhash: thumbhashBuffer,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleGeneratePersonThumbnail', () => {
|
||||
it('should skip if machine learning is disabled', async () => {
|
||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
||||
@@ -981,12 +1192,17 @@ describe(MediaService.name, () => {
|
||||
colorspace: Colorspace.P3,
|
||||
format: ImageFormat.Jpeg,
|
||||
quality: 80,
|
||||
crop: {
|
||||
left: 238,
|
||||
top: 163,
|
||||
width: 274,
|
||||
height: 274,
|
||||
},
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
height: 274,
|
||||
width: 274,
|
||||
x: 238,
|
||||
y: 163,
|
||||
},
|
||||
},
|
||||
],
|
||||
raw: info,
|
||||
processInvalidImages: false,
|
||||
size: 250,
|
||||
@@ -1020,12 +1236,17 @@ describe(MediaService.name, () => {
|
||||
colorspace: Colorspace.P3,
|
||||
format: ImageFormat.Jpeg,
|
||||
quality: 80,
|
||||
crop: {
|
||||
left: 238,
|
||||
top: 163,
|
||||
width: 274,
|
||||
height: 274,
|
||||
},
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
height: 274,
|
||||
width: 274,
|
||||
x: 238,
|
||||
y: 163,
|
||||
},
|
||||
},
|
||||
],
|
||||
raw: info,
|
||||
processInvalidImages: false,
|
||||
size: 250,
|
||||
@@ -1057,12 +1278,17 @@ describe(MediaService.name, () => {
|
||||
colorspace: Colorspace.P3,
|
||||
format: ImageFormat.Jpeg,
|
||||
quality: 80,
|
||||
crop: {
|
||||
left: 0,
|
||||
top: 85,
|
||||
width: 510,
|
||||
height: 510,
|
||||
},
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
height: 510,
|
||||
width: 510,
|
||||
x: 0,
|
||||
y: 85,
|
||||
},
|
||||
},
|
||||
],
|
||||
raw: info,
|
||||
processInvalidImages: false,
|
||||
size: 250,
|
||||
@@ -1094,12 +1320,17 @@ describe(MediaService.name, () => {
|
||||
colorspace: Colorspace.P3,
|
||||
format: ImageFormat.Jpeg,
|
||||
quality: 80,
|
||||
crop: {
|
||||
left: 591,
|
||||
top: 591,
|
||||
width: 408,
|
||||
height: 408,
|
||||
},
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
height: 408,
|
||||
width: 408,
|
||||
x: 591,
|
||||
y: 591,
|
||||
},
|
||||
},
|
||||
],
|
||||
raw: info,
|
||||
processInvalidImages: false,
|
||||
size: 250,
|
||||
@@ -1131,12 +1362,17 @@ describe(MediaService.name, () => {
|
||||
colorspace: Colorspace.P3,
|
||||
format: ImageFormat.Jpeg,
|
||||
quality: 80,
|
||||
crop: {
|
||||
left: 0,
|
||||
top: 62,
|
||||
width: 412,
|
||||
height: 412,
|
||||
},
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
height: 412,
|
||||
width: 412,
|
||||
x: 0,
|
||||
y: 62,
|
||||
},
|
||||
},
|
||||
],
|
||||
raw: info,
|
||||
processInvalidImages: false,
|
||||
size: 250,
|
||||
@@ -1168,12 +1404,17 @@ describe(MediaService.name, () => {
|
||||
colorspace: Colorspace.P3,
|
||||
format: ImageFormat.Jpeg,
|
||||
quality: 80,
|
||||
crop: {
|
||||
left: 4485,
|
||||
top: 94,
|
||||
width: 138,
|
||||
height: 138,
|
||||
},
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
height: 138,
|
||||
width: 138,
|
||||
x: 4485,
|
||||
y: 94,
|
||||
},
|
||||
},
|
||||
],
|
||||
raw: info,
|
||||
processInvalidImages: false,
|
||||
size: 250,
|
||||
@@ -1210,12 +1451,17 @@ describe(MediaService.name, () => {
|
||||
colorspace: Colorspace.P3,
|
||||
format: ImageFormat.Jpeg,
|
||||
quality: 80,
|
||||
crop: {
|
||||
height: 844,
|
||||
left: 388,
|
||||
top: 730,
|
||||
width: 844,
|
||||
},
|
||||
edits: [
|
||||
{
|
||||
action: 'crop',
|
||||
parameters: {
|
||||
height: 844,
|
||||
width: 844,
|
||||
x: 388,
|
||||
y: 730,
|
||||
},
|
||||
},
|
||||
],
|
||||
raw: info,
|
||||
processInvalidImages: false,
|
||||
size: 250,
|
||||
@@ -2999,4 +3245,147 @@ describe(MediaService.name, () => {
|
||||
expect(sut.isSRGB({ profileDescription: 'sRGB', bitsPerSample: 16 } as Exif)).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('syncFiles', () => {
|
||||
it('should upsert new files when they do not exist', async () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg' },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/new/thumbnail.jpg' },
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview },
|
||||
{ assetId: 'asset-id', path: '/new/thumbnail.jpg', type: AssetFileType.Thumbnail },
|
||||
]);
|
||||
expect(mocks.asset.deleteFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should replace existing files with new paths', async () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg' },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/new/thumbnail.jpg' },
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview },
|
||||
{ assetId: 'asset-id', path: '/new/thumbnail.jpg', type: AssetFileType.Thumbnail },
|
||||
]);
|
||||
expect(mocks.asset.deleteFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: ['/old/preview.jpg', '/old/thumbnail.jpg'] },
|
||||
});
|
||||
});
|
||||
|
||||
it('should delete files when newPath is not provided', async () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [{ type: AssetFileType.Preview }, { type: AssetFileType.Thumbnail }]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFiles).toHaveBeenCalledWith([
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
]);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: ['/old/preview.jpg', '/old/thumbnail.jpg'] },
|
||||
});
|
||||
});
|
||||
|
||||
it('should not make changes when file paths already match', async () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/same/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/same/thumbnail.jpg' },
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, newPath: '/same/preview.jpg' },
|
||||
{ type: AssetFileType.Thumbnail, newPath: '/same/thumbnail.jpg' },
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle mixed operations (upsert, replace, delete)', async () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [
|
||||
{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' },
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Preview, newPath: '/new/preview.jpg' }, // replace
|
||||
{ type: AssetFileType.Thumbnail }, // delete
|
||||
{ type: AssetFileType.FullSize, newPath: '/new/fullsize.jpg' }, // new
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith([
|
||||
{ assetId: 'asset-id', path: '/new/preview.jpg', type: AssetFileType.Preview },
|
||||
{ assetId: 'asset-id', path: '/new/fullsize.jpg', type: AssetFileType.FullSize },
|
||||
]);
|
||||
expect(mocks.asset.deleteFiles).toHaveBeenCalledWith([
|
||||
{ id: 'file-2', assetId: 'asset-id', type: AssetFileType.Thumbnail, path: '/old/thumbnail.jpg' },
|
||||
]);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: ['/old/preview.jpg', '/old/thumbnail.jpg'] },
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty file list', async () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, []);
|
||||
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should delete non-existent file types when newPath is not provided', async () => {
|
||||
const asset = {
|
||||
id: 'asset-id',
|
||||
files: [{ id: 'file-1', assetId: 'asset-id', type: AssetFileType.Preview, path: '/old/preview.jpg' }],
|
||||
};
|
||||
|
||||
await sut['syncFiles'](asset, [
|
||||
{ type: AssetFileType.Thumbnail }, // file doesn't exist, newPath not provided
|
||||
]);
|
||||
|
||||
expect(mocks.asset.upsertFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.asset.deleteFiles).not.toHaveBeenCalled();
|
||||
expect(mocks.job.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core';
|
||||
import { Exif } from 'src/database';
|
||||
import { AssetFile, Exif } from 'src/database';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { AssetEditAction, CropParameters } from 'src/dtos/editing.dto';
|
||||
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
|
||||
import {
|
||||
AssetFileType,
|
||||
@@ -24,12 +26,13 @@ import {
|
||||
VideoCodec,
|
||||
VideoContainer,
|
||||
} from 'src/enum';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import {
|
||||
AudioStreamInfo,
|
||||
CropOptions,
|
||||
DecodeToBufferOptions,
|
||||
GenerateThumbnailOptions,
|
||||
ImageDimensions,
|
||||
JobItem,
|
||||
JobOf,
|
||||
@@ -37,16 +40,20 @@ import {
|
||||
VideoInterfaces,
|
||||
VideoStreamInfo,
|
||||
} from 'src/types';
|
||||
import { getAssetFiles } from 'src/utils/asset.util';
|
||||
import { getAssetFiles, getDimensions } from 'src/utils/asset.util';
|
||||
import { checkFaceVisibility, checkOcrVisibility } from 'src/utils/editor';
|
||||
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
import { clamp, isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
|
||||
import { getOutputDimensions } from 'src/utils/transform';
|
||||
interface UpsertFileOptions {
|
||||
assetId: string;
|
||||
type: AssetFileType;
|
||||
path: string;
|
||||
}
|
||||
|
||||
type ThumbnailAsset = NonNullable<Awaited<ReturnType<AssetJobRepository['getForGenerateThumbnailJob']>>>;
|
||||
|
||||
@Injectable()
|
||||
export class MediaService extends BaseService {
|
||||
videoInterfaces: VideoInterfaces = { dri: [], mali: false };
|
||||
@@ -67,12 +74,19 @@ export class MediaService extends BaseService {
|
||||
};
|
||||
|
||||
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
|
||||
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
|
||||
const assetFiles = getAssetFiles(asset.files);
|
||||
|
||||
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
|
||||
if (!assetFiles.previewFile || !assetFiles.thumbnailFile || !asset.thumbhash || force) {
|
||||
jobs.push({ name: JobName.AssetGenerateThumbnails, data: { id: asset.id } });
|
||||
}
|
||||
|
||||
if (
|
||||
asset.edits.length > 0 &&
|
||||
(!assetFiles.editedPreviewFile || !assetFiles.editedThumbnailFile || !assetFiles.editedFullsizeFile || force)
|
||||
) {
|
||||
jobs.push({ name: JobName.AssetEditThumbnailGeneration, data: { id: asset.id } });
|
||||
}
|
||||
|
||||
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||
await queueAll();
|
||||
}
|
||||
@@ -154,9 +168,45 @@ export class MediaService extends BaseService {
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.AssetEditThumbnailGeneration, queue: QueueName.Editor })
|
||||
async handleAssetEditThumbnailGeneration({ id }: JobOf<JobName.AssetEditThumbnailGeneration>): Promise<JobStatus> {
|
||||
const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id);
|
||||
|
||||
if (!asset) {
|
||||
this.logger.warn(`Thumbnail generation failed for asset ${id}: not found in database or missing metadata`);
|
||||
return JobStatus.Failed;
|
||||
}
|
||||
|
||||
const generated = await this.generateEditedThumbnails(asset);
|
||||
|
||||
let thumbhash: Buffer | undefined = generated?.thumbhash;
|
||||
if (!thumbhash) {
|
||||
const { image } = await this.getConfig({ withCache: true });
|
||||
const extractedImage = await this.extractOriginalImage(asset, image);
|
||||
const { info, data, colorspace } = extractedImage;
|
||||
|
||||
thumbhash = await this.mediaRepository.generateThumbhash(data, {
|
||||
colorspace,
|
||||
processInvalidImages: false,
|
||||
raw: info,
|
||||
edits: [],
|
||||
});
|
||||
}
|
||||
|
||||
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, thumbhash) !== 0) {
|
||||
await this.assetRepository.update({ id: asset.id, thumbhash });
|
||||
}
|
||||
|
||||
const fullsizeDimensions = generated?.fullsizeDimensions ?? getDimensions(asset.exifInfo!);
|
||||
await this.assetRepository.update({ id: asset.id, ...fullsizeDimensions });
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.AssetGenerateThumbnails, queue: QueueName.ThumbnailGeneration })
|
||||
async handleGenerateThumbnails({ id }: JobOf<JobName.AssetGenerateThumbnails>): Promise<JobStatus> {
|
||||
const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id);
|
||||
|
||||
if (!asset) {
|
||||
this.logger.warn(`Thumbnail generation failed for asset ${id}: not found in database or missing metadata`);
|
||||
return JobStatus.Failed;
|
||||
@@ -172,6 +222,7 @@ export class MediaService extends BaseService {
|
||||
thumbnailPath: string;
|
||||
fullsizePath?: string;
|
||||
thumbhash: Buffer;
|
||||
fullsizeDimensions?: ImageDimensions;
|
||||
};
|
||||
if (asset.type === AssetType.Video || asset.originalFileName.toLowerCase().endsWith('.gif')) {
|
||||
this.logger.verbose(`Thumbnail generation for video ${id} ${asset.originalPath}`);
|
||||
@@ -184,54 +235,19 @@ export class MediaService extends BaseService {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
const { previewFile, thumbnailFile, fullsizeFile } = getAssetFiles(asset.files);
|
||||
const toUpsert: UpsertFileOptions[] = [];
|
||||
if (previewFile?.path !== generated.previewPath) {
|
||||
toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.Preview });
|
||||
}
|
||||
await this.syncFiles(asset, [
|
||||
{ type: AssetFileType.Preview, newPath: generated.previewPath },
|
||||
{ type: AssetFileType.Thumbnail, newPath: generated.thumbnailPath },
|
||||
{ type: AssetFileType.FullSize, newPath: generated.fullsizePath },
|
||||
]);
|
||||
|
||||
if (thumbnailFile?.path !== generated.thumbnailPath) {
|
||||
toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.Thumbnail });
|
||||
}
|
||||
const editiedGenerated = await this.generateEditedThumbnails(asset);
|
||||
const thumbhash = editiedGenerated?.thumbhash || generated.thumbhash;
|
||||
|
||||
if (generated.fullsizePath && fullsizeFile?.path !== generated.fullsizePath) {
|
||||
toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FullSize });
|
||||
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, thumbhash) !== 0) {
|
||||
await this.assetRepository.update({ id: asset.id, thumbhash });
|
||||
}
|
||||
|
||||
if (toUpsert.length > 0) {
|
||||
await this.assetRepository.upsertFiles(toUpsert);
|
||||
}
|
||||
|
||||
const pathsToDelete: string[] = [];
|
||||
if (previewFile && previewFile.path !== generated.previewPath) {
|
||||
this.logger.debug(`Deleting old preview for asset ${asset.id}`);
|
||||
pathsToDelete.push(previewFile.path);
|
||||
}
|
||||
|
||||
if (thumbnailFile && thumbnailFile.path !== generated.thumbnailPath) {
|
||||
this.logger.debug(`Deleting old thumbnail for asset ${asset.id}`);
|
||||
pathsToDelete.push(thumbnailFile.path);
|
||||
}
|
||||
|
||||
if (fullsizeFile && fullsizeFile.path !== generated.fullsizePath) {
|
||||
this.logger.debug(`Deleting old fullsize preview image for asset ${asset.id}`);
|
||||
pathsToDelete.push(fullsizeFile.path);
|
||||
if (!generated.fullsizePath) {
|
||||
// did not generate a new fullsize image, delete the existing record
|
||||
await this.assetRepository.deleteFiles([fullsizeFile]);
|
||||
}
|
||||
}
|
||||
|
||||
if (pathsToDelete.length > 0) {
|
||||
await Promise.all(pathsToDelete.map((path) => this.storageRepository.unlink(path)));
|
||||
}
|
||||
|
||||
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, generated.thumbhash) !== 0) {
|
||||
await this.assetRepository.update({ id: asset.id, thumbhash: generated.thumbhash });
|
||||
}
|
||||
|
||||
await this.assetRepository.upsertJobStatus({ assetId: asset.id, previewAt: new Date(), thumbnailAt: new Date() });
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
@@ -258,27 +274,20 @@ export class MediaService extends BaseService {
|
||||
return { info, data, colorspace };
|
||||
}
|
||||
|
||||
private async generateImageThumbnails(asset: {
|
||||
id: string;
|
||||
ownerId: string;
|
||||
originalFileName: string;
|
||||
originalPath: string;
|
||||
exifInfo: Exif;
|
||||
}) {
|
||||
const { image } = await this.getConfig({ withCache: true });
|
||||
const previewPath = StorageCore.getImagePath(asset, AssetPathType.Preview, image.preview.format);
|
||||
const thumbnailPath = StorageCore.getImagePath(asset, AssetPathType.Thumbnail, image.thumbnail.format);
|
||||
this.storageCore.ensureFolders(previewPath);
|
||||
|
||||
// Handle embedded preview extraction for RAW files
|
||||
private async extractOriginalImage(
|
||||
asset: NonNullable<ThumbnailAsset>,
|
||||
image: SystemConfig['image'],
|
||||
useEdits = false,
|
||||
) {
|
||||
const extractEmbedded = image.extractEmbedded && mimeTypes.isRaw(asset.originalFileName);
|
||||
const extracted = extractEmbedded ? await this.extractImage(asset.originalPath, image.preview.size) : null;
|
||||
const generateFullsize =
|
||||
(image.fullsize.enabled || asset.exifInfo.projectionType == 'EQUIRECTANGULAR') &&
|
||||
!mimeTypes.isWebSupportedImage(asset.originalPath);
|
||||
((image.fullsize.enabled || asset.exifInfo.projectionType === 'EQUIRECTANGULAR') &&
|
||||
!mimeTypes.isWebSupportedImage(asset.originalPath)) ||
|
||||
useEdits;
|
||||
const convertFullsize = generateFullsize && (!extracted || !mimeTypes.isWebSupportedImage(` .${extracted.format}`));
|
||||
|
||||
const { info, data, colorspace } = await this.decodeImage(
|
||||
const { data, info, colorspace } = await this.decodeImage(
|
||||
extracted ? extracted.buffer : asset.originalPath,
|
||||
// only specify orientation to extracted images which don't have EXIF orientation data
|
||||
// or it can double rotate the image
|
||||
@@ -286,20 +295,64 @@ export class MediaService extends BaseService {
|
||||
convertFullsize ? undefined : image.preview.size,
|
||||
);
|
||||
|
||||
return {
|
||||
extracted,
|
||||
data,
|
||||
info,
|
||||
colorspace,
|
||||
convertFullsize,
|
||||
generateFullsize,
|
||||
};
|
||||
}
|
||||
|
||||
private async generateImageThumbnails(asset: ThumbnailAsset, useEdits: boolean = false) {
|
||||
const { image } = await this.getConfig({ withCache: true });
|
||||
const previewPath = StorageCore.getImagePath(
|
||||
asset,
|
||||
useEdits ? AssetPathType.EditedPreview : AssetPathType.Preview,
|
||||
image.preview.format,
|
||||
);
|
||||
const thumbnailPath = StorageCore.getImagePath(
|
||||
asset,
|
||||
useEdits ? AssetPathType.EditedThumbnail : AssetPathType.Thumbnail,
|
||||
image.thumbnail.format,
|
||||
);
|
||||
this.storageCore.ensureFolders(previewPath);
|
||||
|
||||
// Handle embedded preview extraction for RAW files
|
||||
const extractedImage = await this.extractOriginalImage(asset, image, useEdits);
|
||||
const { info, data, colorspace, generateFullsize, convertFullsize, extracted } = extractedImage;
|
||||
|
||||
// generate final images
|
||||
const thumbnailOptions = { colorspace, processInvalidImages: false, raw: info };
|
||||
const thumbnailOptions = { colorspace, processInvalidImages: false, raw: info, edits: useEdits ? asset.edits : [] };
|
||||
const promises = [
|
||||
this.mediaRepository.generateThumbhash(data, thumbnailOptions),
|
||||
this.mediaRepository.generateThumbnail(data, { ...image.thumbnail, ...thumbnailOptions }, thumbnailPath),
|
||||
this.mediaRepository.generateThumbnail(data, { ...image.preview, ...thumbnailOptions }, previewPath),
|
||||
this.mediaRepository.generateThumbnail(
|
||||
data,
|
||||
{ ...image.thumbnail, ...thumbnailOptions, edits: useEdits ? asset.edits : [] },
|
||||
thumbnailPath,
|
||||
),
|
||||
this.mediaRepository.generateThumbnail(
|
||||
data,
|
||||
{ ...image.preview, ...thumbnailOptions, edits: useEdits ? asset.edits : [] },
|
||||
previewPath,
|
||||
),
|
||||
];
|
||||
|
||||
let fullsizePath: string | undefined;
|
||||
|
||||
if (convertFullsize) {
|
||||
// convert a new fullsize image from the same source as the thumbnail
|
||||
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, image.fullsize.format);
|
||||
const fullsizeOptions = { format: image.fullsize.format, quality: image.fullsize.quality, ...thumbnailOptions };
|
||||
fullsizePath = StorageCore.getImagePath(
|
||||
asset,
|
||||
useEdits ? AssetPathType.EditedFullSize : AssetPathType.FullSize,
|
||||
image.fullsize.format,
|
||||
);
|
||||
const fullsizeOptions = {
|
||||
format: image.fullsize.format,
|
||||
quality: image.fullsize.quality,
|
||||
...thumbnailOptions,
|
||||
};
|
||||
promises.push(this.mediaRepository.generateThumbnail(data, fullsizeOptions, fullsizePath));
|
||||
} else if (generateFullsize && extracted && extracted.format === RawExtractedFormat.Jpeg) {
|
||||
fullsizePath = StorageCore.getImagePath(asset, AssetPathType.FullSize, extracted.format);
|
||||
@@ -328,7 +381,10 @@ export class MediaService extends BaseService {
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
|
||||
const decodedDimensions = { width: info.width, height: info.height };
|
||||
const fullsizeDimensions = useEdits ? getOutputDimensions(asset.edits, decodedDimensions) : decodedDimensions;
|
||||
|
||||
return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer, fullsizeDimensions };
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.PersonGenerateThumbnail, queue: QueueName.ThumbnailGeneration })
|
||||
@@ -369,17 +425,22 @@ export class MediaService extends BaseService {
|
||||
const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
|
||||
this.storageCore.ensureFolders(thumbnailPath);
|
||||
|
||||
const thumbnailOptions = {
|
||||
const thumbnailOptions: GenerateThumbnailOptions = {
|
||||
colorspace: image.colorspace,
|
||||
format: ImageFormat.Jpeg,
|
||||
raw: info,
|
||||
quality: image.thumbnail.quality,
|
||||
crop: this.getCrop(
|
||||
{ old: { width: oldWidth, height: oldHeight }, new: { width: info.width, height: info.height } },
|
||||
{ x1, y1, x2, y2 },
|
||||
),
|
||||
processInvalidImages: false,
|
||||
size: FACE_THUMBNAIL_SIZE,
|
||||
edits: [
|
||||
{
|
||||
action: AssetEditAction.Crop,
|
||||
parameters: this.getCrop(
|
||||
{ old: { width: oldWidth, height: oldHeight }, new: { width: info.width, height: info.height } },
|
||||
{ x1, y1, x2, y2 },
|
||||
),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await this.mediaRepository.generateThumbnail(decodedImage, thumbnailOptions, thumbnailPath);
|
||||
@@ -388,7 +449,10 @@ export class MediaService extends BaseService {
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions {
|
||||
private getCrop(
|
||||
dims: { old: ImageDimensions; new: ImageDimensions },
|
||||
{ x1, y1, x2, y2 }: BoundingBox,
|
||||
): CropParameters {
|
||||
// face bounding boxes can spill outside the image dimensions
|
||||
const clampedX1 = clamp(x1, 0, dims.old.width);
|
||||
const clampedY1 = clamp(y1, 0, dims.old.height);
|
||||
@@ -416,8 +480,8 @@ export class MediaService extends BaseService {
|
||||
);
|
||||
|
||||
return {
|
||||
left: middleX - newHalfSize,
|
||||
top: middleY - newHalfSize,
|
||||
x: middleX - newHalfSize,
|
||||
y: middleY - newHalfSize,
|
||||
width: newHalfSize * 2,
|
||||
height: newHalfSize * 2,
|
||||
};
|
||||
@@ -454,7 +518,12 @@ export class MediaService extends BaseService {
|
||||
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
|
||||
});
|
||||
|
||||
return { previewPath, thumbnailPath, thumbhash };
|
||||
return {
|
||||
previewPath,
|
||||
thumbnailPath,
|
||||
thumbhash,
|
||||
fullsizeDimensions: { width: mainVideoStream.width, height: mainVideoStream.height },
|
||||
};
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.AssetEncodeVideoQueueAll, queue: QueueName.VideoConversion })
|
||||
@@ -707,4 +776,84 @@ export class MediaService extends BaseService {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async syncFiles(
|
||||
asset: { id: string; files: AssetFile[] },
|
||||
files: { type: AssetFileType; newPath?: string }[],
|
||||
) {
|
||||
const toUpsert: UpsertFileOptions[] = [];
|
||||
const pathsToDelete: string[] = [];
|
||||
const toDelete: AssetFile[] = [];
|
||||
|
||||
for (const { type, newPath } of files) {
|
||||
const existingFile = asset.files.find((file) => file.type === type);
|
||||
|
||||
// upsert new file path
|
||||
if (newPath && existingFile?.path !== newPath) {
|
||||
toUpsert.push({ assetId: asset.id, path: newPath, type });
|
||||
|
||||
// delete old file from disk
|
||||
if (existingFile) {
|
||||
this.logger.debug(`Deleting old ${type} image for asset ${asset.id} in favor of a replacement`);
|
||||
pathsToDelete.push(existingFile.path);
|
||||
}
|
||||
}
|
||||
|
||||
// delete old file from disk and database
|
||||
if (!newPath && existingFile) {
|
||||
this.logger.debug(`Deleting old ${type} image for asset ${asset.id}`);
|
||||
|
||||
pathsToDelete.push(existingFile.path);
|
||||
toDelete.push(existingFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (toUpsert.length > 0) {
|
||||
await this.assetRepository.upsertFiles(toUpsert);
|
||||
}
|
||||
|
||||
if (toDelete.length > 0) {
|
||||
await this.assetRepository.deleteFiles(toDelete);
|
||||
}
|
||||
|
||||
if (pathsToDelete.length > 0) {
|
||||
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files: pathsToDelete } });
|
||||
}
|
||||
}
|
||||
|
||||
private async generateEditedThumbnails(asset: ThumbnailAsset) {
|
||||
if (asset.type !== AssetType.Image) {
|
||||
return;
|
||||
}
|
||||
|
||||
const generated = asset.edits.length > 0 ? await this.generateImageThumbnails(asset, true) : undefined;
|
||||
|
||||
await this.syncFiles(asset, [
|
||||
{ type: AssetFileType.PreviewEdited, newPath: generated?.previewPath },
|
||||
{ type: AssetFileType.ThumbnailEdited, newPath: generated?.thumbnailPath },
|
||||
{ type: AssetFileType.FullSizeEdited, newPath: generated?.fullsizePath },
|
||||
]);
|
||||
|
||||
const crop = asset.edits.find((e) => e.action === AssetEditAction.Crop);
|
||||
const cropBox = crop
|
||||
? {
|
||||
x1: crop.parameters.x,
|
||||
y1: crop.parameters.y,
|
||||
x2: crop.parameters.x + crop.parameters.width,
|
||||
y2: crop.parameters.y + crop.parameters.height,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const originalDimensions = getDimensions(asset.exifInfo!);
|
||||
const assetFaces = await this.personRepository.getFaces(asset.id, {});
|
||||
const ocrData = await this.ocrRepository.getByAssetId(asset.id, {});
|
||||
|
||||
const faceStatuses = checkFaceVisibility(assetFaces, originalDimensions, cropBox);
|
||||
await this.personRepository.updateVisibility(faceStatuses.visible, faceStatuses.hidden);
|
||||
|
||||
const ocrStatuses = checkOcrVisibility(ocrData, originalDimensions, cropBox);
|
||||
await this.ocrRepository.updateOcrVisibilities(asset.id, ocrStatuses.visible, ocrStatuses.hidden);
|
||||
|
||||
return generated;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,6 +224,8 @@ describe(MetadataService.name, () => {
|
||||
fileCreatedAt: fileModifiedAt,
|
||||
fileModifiedAt,
|
||||
localDateTime: fileModifiedAt,
|
||||
width: null,
|
||||
height: null,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -251,6 +253,8 @@ describe(MetadataService.name, () => {
|
||||
fileCreatedAt,
|
||||
fileModifiedAt,
|
||||
localDateTime: fileCreatedAt,
|
||||
width: null,
|
||||
height: null,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -297,6 +301,8 @@ describe(MetadataService.name, () => {
|
||||
fileCreatedAt: assetStub.image.fileCreatedAt,
|
||||
fileModifiedAt: assetStub.image.fileCreatedAt,
|
||||
localDateTime: assetStub.image.fileCreatedAt,
|
||||
width: null,
|
||||
height: null,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -327,6 +333,8 @@ describe(MetadataService.name, () => {
|
||||
fileCreatedAt: assetStub.withLocation.fileCreatedAt,
|
||||
fileModifiedAt: assetStub.withLocation.fileModifiedAt,
|
||||
localDateTime: new Date('2023-02-22T05:06:29.716Z'),
|
||||
width: null,
|
||||
height: null,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -357,6 +365,8 @@ describe(MetadataService.name, () => {
|
||||
fileCreatedAt: assetStub.withLocation.fileCreatedAt,
|
||||
fileModifiedAt: assetStub.withLocation.fileModifiedAt,
|
||||
localDateTime: new Date('2023-02-22T05:06:29.716Z'),
|
||||
width: null,
|
||||
height: null,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1560,6 +1570,49 @@ describe(MetadataService.name, () => {
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
);
|
||||
});
|
||||
|
||||
it('should properly set width/height for normal images', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mockReadTags({ ImageWidth: 1000, ImageHeight: 2000 });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
width: 1000,
|
||||
height: 2000,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should properly swap asset width/height for rotated images', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
|
||||
mockReadTags({ ImageWidth: 1000, ImageHeight: 2000, Orientation: 6 });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
width: 2000,
|
||||
height: 1000,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not overwrite existing width/height if they already exist', async () => {
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
|
||||
...assetStub.image,
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
});
|
||||
mockReadTags({ ImageWidth: 1280, ImageHeight: 720 });
|
||||
|
||||
await sut.handleMetadataExtraction({ id: assetStub.image.id });
|
||||
expect(mocks.asset.update).not.toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
width: 1280,
|
||||
height: 720,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleQueueSidecar', () => {
|
||||
@@ -1705,6 +1758,12 @@ describe(MetadataService.name, () => {
|
||||
GPSLatitude: gps,
|
||||
GPSLongitude: gps,
|
||||
});
|
||||
expect(mocks.asset.unlockProperties).toHaveBeenCalledWith(asset.id, [
|
||||
'description',
|
||||
'latitude',
|
||||
'longitude',
|
||||
'dateTimeOriginal',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -196,6 +196,15 @@ export class MetadataService extends BaseService {
|
||||
await this.eventRepository.emit('AssetHide', { assetId: motionAsset.id, userId: motionAsset.ownerId });
|
||||
}
|
||||
|
||||
private isOrientationSidewards(orientation: ExifOrientation | number): boolean {
|
||||
return [
|
||||
ExifOrientation.MirrorHorizontalRotate270CW,
|
||||
ExifOrientation.Rotate90CW,
|
||||
ExifOrientation.MirrorHorizontalRotate90CW,
|
||||
ExifOrientation.Rotate270CW,
|
||||
].includes(orientation);
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.AssetExtractMetadataQueueAll, queue: QueueName.MetadataExtraction })
|
||||
async handleQueueMetadataExtraction(job: JobOf<JobName.AssetExtractMetadataQueueAll>): Promise<JobStatus> {
|
||||
const { force } = job;
|
||||
@@ -289,6 +298,10 @@ export class MetadataService extends BaseService {
|
||||
autoStackId: this.getAutoStackId(exifTags),
|
||||
};
|
||||
|
||||
const isSidewards = exifTags.Orientation && this.isOrientationSidewards(exifTags.Orientation);
|
||||
const assetWidth = isSidewards ? validate(height) : validate(width);
|
||||
const assetHeight = isSidewards ? validate(width) : validate(height);
|
||||
|
||||
const promises: Promise<unknown>[] = [
|
||||
this.assetRepository.upsertExif(exifData, { lockedPropertiesBehavior: 'skip' }),
|
||||
this.assetRepository.update({
|
||||
@@ -297,6 +310,11 @@ export class MetadataService extends BaseService {
|
||||
localDateTime: dates.localDateTime,
|
||||
fileCreatedAt: dates.dateTimeOriginal ?? undefined,
|
||||
fileModifiedAt: stats.mtime,
|
||||
|
||||
// only update the dimensions if they don't already exist
|
||||
// we don't want to overwrite width/height that are modified by edits
|
||||
width: asset.width == null ? assetWidth : undefined,
|
||||
height: asset.height == null ? assetHeight : undefined,
|
||||
}),
|
||||
this.applyTagList(asset, exifTags),
|
||||
];
|
||||
@@ -443,6 +461,8 @@ export class MetadataService extends BaseService {
|
||||
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.Sidecar, path: sidecarPath });
|
||||
}
|
||||
|
||||
await this.assetRepository.unlockProperties(asset.id, lockedProperties);
|
||||
|
||||
return JobStatus.Success;
|
||||
}
|
||||
|
||||
@@ -716,12 +736,7 @@ export class MetadataService extends BaseService {
|
||||
return regionInfo;
|
||||
}
|
||||
|
||||
const isSidewards = [
|
||||
ExifOrientation.MirrorHorizontalRotate270CW,
|
||||
ExifOrientation.Rotate90CW,
|
||||
ExifOrientation.MirrorHorizontalRotate90CW,
|
||||
ExifOrientation.Rotate270CW,
|
||||
].includes(orientation);
|
||||
const isSidewards = this.isOrientationSidewards(orientation);
|
||||
|
||||
// swap image dimensions in AppliedToDimensions if orientation is sidewards
|
||||
const adjustedAppliedToDimensions = isSidewards
|
||||
@@ -971,9 +986,17 @@ export class MetadataService extends BaseService {
|
||||
private async getVideoTags(originalPath: string) {
|
||||
const { videoStreams, format } = await this.mediaRepository.probe(originalPath);
|
||||
|
||||
const tags: Pick<ImmichTags, 'Duration' | 'Orientation'> = {};
|
||||
const tags: Pick<ImmichTags, 'Duration' | 'Orientation' | 'ImageWidth' | 'ImageHeight'> = {};
|
||||
|
||||
if (videoStreams[0]) {
|
||||
// Set video dimensions
|
||||
if (videoStreams[0].width) {
|
||||
tags.ImageWidth = videoStreams[0].width;
|
||||
}
|
||||
if (videoStreams[0].height) {
|
||||
tags.ImageHeight = videoStreams[0].height;
|
||||
}
|
||||
|
||||
switch (videoStreams[0].rotation) {
|
||||
case -90: {
|
||||
tags.Orientation = ExifOrientation.Rotate90CW;
|
||||
|
||||
@@ -354,6 +354,7 @@ describe(PersonService.name, () => {
|
||||
it('should get the bounding boxes for an asset', async () => {
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([faceStub.face1.assetId]));
|
||||
mocks.person.getFaces.mockResolvedValue([faceStub.primaryFace1]);
|
||||
mocks.asset.getById.mockResolvedValue(assetStub.image);
|
||||
await expect(sut.getFacesById(authStub.admin, { id: faceStub.face1.assetId })).resolves.toStrictEqual([
|
||||
mapFaces(faceStub.primaryFace1, authStub.admin),
|
||||
]);
|
||||
|
||||
@@ -40,6 +40,7 @@ import { AssetFaceTable } from 'src/schema/tables/asset-face.table';
|
||||
import { FaceSearchTable } from 'src/schema/tables/face-search.table';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { getDimensions } from 'src/utils/asset.util';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
import { isFacialRecognitionEnabled } from 'src/utils/misc';
|
||||
@@ -126,7 +127,10 @@ export class PersonService extends BaseService {
|
||||
async getFacesById(auth: AuthDto, dto: FaceDto): Promise<AssetFaceResponseDto[]> {
|
||||
await this.requireAccess({ auth, permission: Permission.AssetRead, ids: [dto.id] });
|
||||
const faces = await this.personRepository.getFaces(dto.id);
|
||||
return faces.map((asset) => mapFaces(asset, auth));
|
||||
const asset = await this.assetRepository.getById(dto.id, { edits: true, exifInfo: true });
|
||||
const assetDimensions = getDimensions(asset!.exifInfo!);
|
||||
|
||||
return faces.map((face) => mapFaces(face, auth, asset!.edits!, assetDimensions));
|
||||
}
|
||||
|
||||
async createNewFeaturePhoto(changeFeaturePhoto: string[]) {
|
||||
|
||||
@@ -87,8 +87,8 @@ export class PluginService extends BaseService {
|
||||
this.logger.log(`Successfully processed core plugin: ${coreManifest.name} (version ${coreManifest.version})`);
|
||||
|
||||
// Load external plugins
|
||||
if (plugins.enabled && plugins.installFolder) {
|
||||
await this.loadExternalPlugins(plugins.installFolder);
|
||||
if (plugins.external.allow && plugins.external.installFolder) {
|
||||
await this.loadExternalPlugins(plugins.external.installFolder);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ describe(QueueService.name, () => {
|
||||
it('should update concurrency', () => {
|
||||
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
|
||||
|
||||
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(17);
|
||||
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(18);
|
||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FacialRecognition, 1);
|
||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DuplicateDetection, 1);
|
||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BackgroundTask, 5);
|
||||
@@ -77,6 +77,7 @@ describe(QueueService.name, () => {
|
||||
[QueueName.BackupDatabase]: expected,
|
||||
[QueueName.Ocr]: expected,
|
||||
[QueueName.Workflow]: expected,
|
||||
[QueueName.Editor]: expected,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -115,8 +115,9 @@ export class ServerService extends BaseService {
|
||||
}
|
||||
|
||||
async getSystemConfig(): Promise<ServerConfigDto> {
|
||||
const { setup } = this.configRepository.getEnv();
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
const isInitialized = await this.userRepository.hasAdmin();
|
||||
const isInitialized = !setup.allow || (await this.userRepository.hasAdmin());
|
||||
const onboarding = await this.systemMetadataRepository.get(SystemMetadataKey.AdminOnboarding);
|
||||
|
||||
return {
|
||||
|
||||
@@ -55,7 +55,8 @@ describe(SharedLinkService.name, () => {
|
||||
},
|
||||
});
|
||||
mocks.sharedLink.get.mockResolvedValue(sharedLinkStub.readonlyNoExif);
|
||||
await expect(sut.getMine(authDto, {})).resolves.toEqual(sharedLinkResponseStub.readonlyNoMetadata);
|
||||
const response = await sut.getMine(authDto, {});
|
||||
expect(response.assets[0]).toMatchObject({ hasMetadata: false });
|
||||
expect(mocks.sharedLink.get).toHaveBeenCalledWith(authDto.user.id, authDto.sharedLink?.id);
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import { AssetIdsDto } from 'src/dtos/asset.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
mapSharedLink,
|
||||
mapSharedLinkWithoutMetadata,
|
||||
SharedLinkCreateDto,
|
||||
SharedLinkEditDto,
|
||||
SharedLinkPasswordDto,
|
||||
@@ -19,10 +18,10 @@ import { getExternalDomain, OpenGraphTags } from 'src/utils/misc';
|
||||
|
||||
@Injectable()
|
||||
export class SharedLinkService extends BaseService {
|
||||
async getAll(auth: AuthDto, { albumId }: SharedLinkSearchDto): Promise<SharedLinkResponseDto[]> {
|
||||
async getAll(auth: AuthDto, { id, albumId }: SharedLinkSearchDto): Promise<SharedLinkResponseDto[]> {
|
||||
return this.sharedLinkRepository
|
||||
.getAll({ userId: auth.user.id, albumId })
|
||||
.then((links) => links.map((link) => mapSharedLink(link)));
|
||||
.getAll({ userId: auth.user.id, id, albumId })
|
||||
.then((links) => links.map((link) => mapSharedLink(link, { stripAssetMetadata: false })));
|
||||
}
|
||||
|
||||
async getMine(auth: AuthDto, dto: SharedLinkPasswordDto): Promise<SharedLinkResponseDto> {
|
||||
@@ -31,7 +30,7 @@ export class SharedLinkService extends BaseService {
|
||||
}
|
||||
|
||||
const sharedLink = await this.findOrFail(auth.user.id, auth.sharedLink.id);
|
||||
const response = this.mapToSharedLink(sharedLink, { withExif: sharedLink.showExif });
|
||||
const response = mapSharedLink(sharedLink, { stripAssetMetadata: !sharedLink.showExif });
|
||||
if (sharedLink.password) {
|
||||
response.token = this.validateAndRefreshToken(sharedLink, dto);
|
||||
}
|
||||
@@ -41,7 +40,7 @@ export class SharedLinkService extends BaseService {
|
||||
|
||||
async get(auth: AuthDto, id: string): Promise<SharedLinkResponseDto> {
|
||||
const sharedLink = await this.findOrFail(auth.user.id, id);
|
||||
return this.mapToSharedLink(sharedLink, { withExif: true });
|
||||
return mapSharedLink(sharedLink, { stripAssetMetadata: false });
|
||||
}
|
||||
|
||||
async create(auth: AuthDto, dto: SharedLinkCreateDto): Promise<SharedLinkResponseDto> {
|
||||
@@ -81,7 +80,7 @@ export class SharedLinkService extends BaseService {
|
||||
slug: dto.slug || null,
|
||||
});
|
||||
|
||||
return this.mapToSharedLink(sharedLink, { withExif: true });
|
||||
return mapSharedLink(sharedLink, { stripAssetMetadata: false });
|
||||
} catch (error) {
|
||||
this.handleError(error);
|
||||
}
|
||||
@@ -108,7 +107,7 @@ export class SharedLinkService extends BaseService {
|
||||
showExif: dto.showMetadata,
|
||||
slug: dto.slug || null,
|
||||
});
|
||||
return this.mapToSharedLink(sharedLink, { withExif: true });
|
||||
return mapSharedLink(sharedLink, { stripAssetMetadata: false });
|
||||
} catch (error) {
|
||||
this.handleError(error);
|
||||
}
|
||||
@@ -214,10 +213,6 @@ export class SharedLinkService extends BaseService {
|
||||
};
|
||||
}
|
||||
|
||||
private mapToSharedLink(sharedLink: SharedLink, { withExif }: { withExif: boolean }) {
|
||||
return withExif ? mapSharedLink(sharedLink) : mapSharedLinkWithoutMetadata(sharedLink);
|
||||
}
|
||||
|
||||
private validateAndRefreshToken(sharedLink: SharedLink, dto: SharedLinkPasswordDto): string {
|
||||
const token = this.cryptoRepository.hashSha256(`${sharedLink.id}-${sharedLink.password}`);
|
||||
const sharedLinkTokens = dto.token?.split(',') || [];
|
||||
|
||||
@@ -117,7 +117,7 @@ export class SmartInfoService extends BaseService {
|
||||
|
||||
const newConfig = await this.getConfig({ withCache: true });
|
||||
if (machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName) {
|
||||
// Skip the job if the the model has changed since the embedding was generated.
|
||||
// Skip the job if the model has changed since the embedding was generated.
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user