mirror of
https://github.com/immich-app/immich.git
synced 2026-03-01 10:08:42 +03:00
feat: image editing (#24155)
This commit is contained in:
41
server/src/repositories/asset-edit.repository.ts
Normal file
41
server/src/repositories/asset-edit.repository.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Kysely } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetEditActionItem } from 'src/dtos/editing.dto';
|
||||
import { DB } from 'src/schema';
|
||||
|
||||
@Injectable()
|
||||
export class AssetEditRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
@GenerateSql({
|
||||
params: [DummyValue.UUID],
|
||||
})
|
||||
async replaceAll(assetId: string, edits: AssetEditActionItem[]): Promise<AssetEditActionItem[]> {
|
||||
return await this.db.transaction().execute(async (trx) => {
|
||||
await trx.deleteFrom('asset_edit').where('assetId', '=', assetId).execute();
|
||||
|
||||
if (edits.length > 0) {
|
||||
return trx
|
||||
.insertInto('asset_edit')
|
||||
.values(edits.map((edit) => ({ assetId, ...edit })))
|
||||
.returning(['action', 'parameters'])
|
||||
.execute() as Promise<AssetEditActionItem[]>;
|
||||
}
|
||||
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
@GenerateSql({
|
||||
params: [DummyValue.UUID],
|
||||
})
|
||||
async getAll(assetId: string): Promise<AssetEditActionItem[]> {
|
||||
return this.db
|
||||
.selectFrom('asset_edit')
|
||||
.select(['action', 'parameters'])
|
||||
.where('assetId', '=', assetId)
|
||||
.execute() as Promise<AssetEditActionItem[]>;
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
asUuid,
|
||||
toJson,
|
||||
withDefaultVisibility,
|
||||
withEdits,
|
||||
withExif,
|
||||
withExifInner,
|
||||
withFaces,
|
||||
@@ -72,6 +73,7 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id', 'asset.thumbhash'])
|
||||
.select(withFiles)
|
||||
.select(withEdits)
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.$if(!force, (qb) =>
|
||||
@@ -113,6 +115,7 @@ export class AssetJobRepository {
|
||||
'asset.type',
|
||||
])
|
||||
.select(withFiles)
|
||||
.select(withEdits)
|
||||
.$call(withExifInner)
|
||||
.where('asset.id', '=', id)
|
||||
.executeTakeFirst();
|
||||
@@ -200,7 +203,7 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id', 'asset.visibility'])
|
||||
.$call(withExifInner)
|
||||
.select((eb) => withFaces(eb, true))
|
||||
.select((eb) => withFaces(eb, true, true))
|
||||
.select((eb) => withFiles(eb, AssetFileType.Preview))
|
||||
.where('asset.id', '=', id)
|
||||
.executeTakeFirst();
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
truncatedDate,
|
||||
unnest,
|
||||
withDefaultVisibility,
|
||||
withEdits,
|
||||
withExif,
|
||||
withFaces,
|
||||
withFacesAndPeople,
|
||||
@@ -112,6 +113,7 @@ interface GetByIdsRelations {
|
||||
smartSearch?: boolean;
|
||||
stack?: { assets?: boolean };
|
||||
tags?: boolean;
|
||||
edits?: boolean;
|
||||
}
|
||||
|
||||
const distinctLocked = <T extends LockableProperty[] | null>(eb: ExpressionBuilder<DB, 'asset_exif'>, columns: T) =>
|
||||
@@ -472,7 +474,10 @@ export class AssetRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getById(id: string, { exifInfo, faces, files, library, owner, smartSearch, stack, tags }: GetByIdsRelations = {}) {
|
||||
getById(
|
||||
id: string,
|
||||
{ exifInfo, faces, files, library, owner, smartSearch, stack, tags, edits }: GetByIdsRelations = {},
|
||||
) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.selectAll('asset')
|
||||
@@ -509,6 +514,7 @@ export class AssetRepository {
|
||||
)
|
||||
.$if(!!files, (qb) => qb.select(withFiles))
|
||||
.$if(!!tags, (qb) => qb.select(withTags))
|
||||
.$if(!!edits, (qb) => qb.select(withEdits))
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
@@ -536,10 +542,11 @@ export class AssetRepository {
|
||||
.selectAll('asset')
|
||||
.$call(withExif)
|
||||
.$call((qb) => qb.select(withFacesAndPeople))
|
||||
.$call((qb) => qb.select(withEdits))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
return this.getById(asset.id, { exifInfo: true, faces: { person: true } });
|
||||
return this.getById(asset.id, { exifInfo: true, faces: { person: true }, edits: true });
|
||||
}
|
||||
|
||||
async remove(asset: { id: string }): Promise<void> {
|
||||
@@ -696,11 +703,9 @@ export class AssetRepository {
|
||||
.coalesce(
|
||||
eb
|
||||
.case()
|
||||
.when(sql`asset_exif."exifImageHeight" = 0 or asset_exif."exifImageWidth" = 0`)
|
||||
.when(sql`asset."height" = 0 or asset."width" = 0`)
|
||||
.then(eb.lit(1))
|
||||
.when('asset_exif.orientation', 'in', sql<string>`('5', '6', '7', '8', '-90', '90')`)
|
||||
.then(sql`round(asset_exif."exifImageHeight"::numeric / asset_exif."exifImageWidth"::numeric, 3)`)
|
||||
.else(sql`round(asset_exif."exifImageWidth"::numeric / asset_exif."exifImageHeight"::numeric, 3)`)
|
||||
.else(sql`round(asset."width"::numeric / asset."height"::numeric, 3)`)
|
||||
.end(),
|
||||
eb.lit(1),
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { AlbumUserRepository } from 'src/repositories/album-user.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
|
||||
import { AppRepository } from 'src/repositories/app.repository';
|
||||
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { AuditRepository } from 'src/repositories/audit.repository';
|
||||
@@ -59,6 +60,7 @@ export const repositories = [
|
||||
ApiKeyRepository,
|
||||
AppRepository,
|
||||
AssetRepository,
|
||||
AssetEditRepository,
|
||||
AssetJobRepository,
|
||||
ConfigRepository,
|
||||
CronRepository,
|
||||
|
||||
667
server/src/repositories/media.repository.spec.ts
Normal file
667
server/src/repositories/media.repository.spec.ts
Normal file
@@ -0,0 +1,667 @@
|
||||
import sharp from 'sharp';
|
||||
import { AssetFace } from 'src/database';
|
||||
import { AssetEditAction, MirrorAxis } from 'src/dtos/editing.dto';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { SourceType } from 'src/enum';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
||||
import { MediaRepository } from 'src/repositories/media.repository';
|
||||
import { checkFaceVisibility, checkOcrVisibility } from 'src/utils/editor';
|
||||
import { automock } from 'test/utils';
|
||||
|
||||
const getPixelColor = async (buffer: Buffer, x: number, y: number) => {
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
const width = metadata.width!;
|
||||
const { data } = await sharp(buffer).raw().toBuffer({ resolveWithObject: true });
|
||||
const idx = (y * width + x) * 4;
|
||||
return {
|
||||
r: data[idx],
|
||||
g: data[idx + 1],
|
||||
b: data[idx + 2],
|
||||
};
|
||||
};
|
||||
|
||||
const buildTestQuadImage = async () => {
|
||||
// build a 4 quadrant image for testing mirroring
|
||||
const base = sharp({
|
||||
create: { width: 1000, height: 1000, channels: 3, background: { r: 0, g: 0, b: 0 } },
|
||||
}).png();
|
||||
|
||||
const tl = await sharp({
|
||||
create: { width: 500, height: 500, channels: 3, background: { r: 255, g: 0, b: 0 } },
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const tr = await sharp({
|
||||
create: { width: 500, height: 500, channels: 3, background: { r: 0, g: 255, b: 0 } },
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const bl = await sharp({
|
||||
create: { width: 500, height: 500, channels: 3, background: { r: 0, g: 0, b: 255 } },
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const br = await sharp({
|
||||
create: { width: 500, height: 500, channels: 3, background: { r: 255, g: 255, b: 0 } },
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const image = base.composite([
|
||||
{ input: tl, left: 0, top: 0 }, // top-left
|
||||
{ input: tr, left: 500, top: 0 }, // top-right
|
||||
{ input: bl, left: 0, top: 500 }, // bottom-left
|
||||
{ input: br, left: 500, top: 500 }, // bottom-right
|
||||
]);
|
||||
|
||||
return image.png().toBuffer();
|
||||
};
|
||||
|
||||
describe(MediaRepository.name, () => {
|
||||
let sut: MediaRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
// eslint-disable-next-line no-sparse-arrays
|
||||
sut = new MediaRepository(automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }));
|
||||
});
|
||||
|
||||
describe('applyEdits (single actions)', () => {
|
||||
it('should apply crop edit correctly', async () => {
|
||||
const result = await sut['applyEdits'](
|
||||
sharp({
|
||||
create: {
|
||||
width: 1000,
|
||||
height: 1000,
|
||||
channels: 4,
|
||||
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
|
||||
},
|
||||
}).png(),
|
||||
[
|
||||
{
|
||||
action: AssetEditAction.Crop,
|
||||
parameters: {
|
||||
x: 100,
|
||||
y: 200,
|
||||
width: 700,
|
||||
height: 300,
|
||||
},
|
||||
},
|
||||
],
|
||||
);
|
||||
|
||||
const metadata = await result.toBuffer().then((buf) => sharp(buf).metadata());
|
||||
expect(metadata.width).toBe(700);
|
||||
expect(metadata.height).toBe(300);
|
||||
});
|
||||
it('should apply rotate edit correctly', async () => {
|
||||
const result = await sut['applyEdits'](
|
||||
sharp({
|
||||
create: {
|
||||
width: 500,
|
||||
height: 1000,
|
||||
channels: 4,
|
||||
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
|
||||
},
|
||||
}).png(),
|
||||
[
|
||||
{
|
||||
action: AssetEditAction.Rotate,
|
||||
parameters: {
|
||||
angle: 90,
|
||||
},
|
||||
},
|
||||
],
|
||||
);
|
||||
|
||||
const metadata = await result.toBuffer().then((buf) => sharp(buf).metadata());
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(500);
|
||||
});
|
||||
|
||||
it('should apply mirror edit correctly', async () => {
|
||||
const resultHorizontal = await sut['applyEdits'](sharp(await buildTestQuadImage()), [
|
||||
{
|
||||
action: AssetEditAction.Mirror,
|
||||
parameters: {
|
||||
axis: MirrorAxis.Horizontal,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const bufferHorizontal = await resultHorizontal.toBuffer();
|
||||
const metadataHorizontal = await resultHorizontal.metadata();
|
||||
expect(metadataHorizontal.width).toBe(1000);
|
||||
expect(metadataHorizontal.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(bufferHorizontal, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(bufferHorizontal, 990, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(bufferHorizontal, 10, 990)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(bufferHorizontal, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
|
||||
const resultVertical = await sut['applyEdits'](sharp(await buildTestQuadImage()), [
|
||||
{
|
||||
action: AssetEditAction.Mirror,
|
||||
parameters: {
|
||||
axis: MirrorAxis.Vertical,
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const bufferVertical = await resultVertical.toBuffer();
|
||||
const metadataVertical = await resultVertical.metadata();
|
||||
expect(metadataVertical.width).toBe(1000);
|
||||
expect(metadataVertical.height).toBe(1000);
|
||||
|
||||
// top-left should now be bottom-left (blue)
|
||||
expect(await getPixelColor(bufferVertical, 10, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
// top-right should now be bottom-right (yellow)
|
||||
expect(await getPixelColor(bufferVertical, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
// bottom-left should now be top-left (red)
|
||||
expect(await getPixelColor(bufferVertical, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
// bottom-right should now be top-right (blue)
|
||||
expect(await getPixelColor(bufferVertical, 990, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('applyEdits (multiple sequential edits)', () => {
|
||||
it('should apply horizontal mirror then vertical mirror (equivalent to 180° rotation)', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply rotate 90° then horizontal mirror', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply 180° rotation', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 180 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply 270° rotations', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 270 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
});
|
||||
|
||||
it('should apply crop then rotate 90°', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Crop, parameters: { x: 0, y: 0, width: 1000, height: 500 } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(500);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply rotate 90° then crop', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 1000 } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(500);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply vertical mirror then horizontal mirror then rotate 90°', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(1000);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 0, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 255, g: 255, b: 0 });
|
||||
expect(await getPixelColor(buffer, 10, 990)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 990)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
});
|
||||
|
||||
it('should apply crop to single quadrant then mirror', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 500 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(500);
|
||||
expect(metadata.height).toBe(500);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 490, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 10, 490)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 490, 490)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
});
|
||||
|
||||
it('should apply all operations: crop, rotate, mirror', async () => {
|
||||
const imageBuffer = await buildTestQuadImage();
|
||||
const result = await sut['applyEdits'](sharp(imageBuffer), [
|
||||
{ action: AssetEditAction.Crop, parameters: { x: 0, y: 0, width: 500, height: 1000 } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
]);
|
||||
|
||||
const buffer = await result.png().toBuffer();
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
expect(metadata.width).toBe(1000);
|
||||
expect(metadata.height).toBe(500);
|
||||
|
||||
expect(await getPixelColor(buffer, 10, 10)).toEqual({ r: 255, g: 0, b: 0 });
|
||||
expect(await getPixelColor(buffer, 990, 10)).toEqual({ r: 0, g: 0, b: 255 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkFaceVisibility', () => {
|
||||
const baseFace: AssetFace = {
|
||||
id: 'face-1',
|
||||
assetId: 'asset-1',
|
||||
personId: 'person-1',
|
||||
boundingBoxX1: 100,
|
||||
boundingBoxY1: 100,
|
||||
boundingBoxX2: 200,
|
||||
boundingBoxY2: 200,
|
||||
imageWidth: 1000,
|
||||
imageHeight: 800,
|
||||
sourceType: SourceType.MachineLearning,
|
||||
isVisible: true,
|
||||
updatedAt: new Date(),
|
||||
deletedAt: null,
|
||||
updateId: '',
|
||||
};
|
||||
|
||||
const assetDimensions = { width: 1000, height: 800 };
|
||||
|
||||
describe('with no crop edit', () => {
|
||||
it('should return only currently invisible faces when no crop is provided', () => {
|
||||
const visibleFace = { ...baseFace, id: 'face-visible', isVisible: true };
|
||||
const invisibleFace = { ...baseFace, id: 'face-invisible', isVisible: false };
|
||||
const faces = [visibleFace, invisibleFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([invisibleFace]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty arrays when all faces are already visible and no crop is provided', () => {
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return all faces when all are invisible and no crop is provided', () => {
|
||||
const face1 = { ...baseFace, id: 'face-1', isVisible: false };
|
||||
const face2 = { ...baseFace, id: 'face-2', isVisible: false };
|
||||
const faces = [face1, face2];
|
||||
const result = checkFaceVisibility(faces, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([face1, face2]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with crop edit', () => {
|
||||
it('should mark face as visible when fully inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 500, y2: 400 };
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual(faces);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should mark face as visible when more than 50% inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 150, y1: 150, x2: 650, y2: 550 };
|
||||
// Face at (100,100)-(200,200), crop starts at (150,150)
|
||||
// Overlap: (150,150)-(200,200) = 50x50 = 2500
|
||||
// Face area: 100x100 = 10000
|
||||
// Overlap percentage: 25% - should be hidden
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(faces);
|
||||
});
|
||||
|
||||
it('should mark face as hidden when less than 50% inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 250, y1: 250, x2: 750, y2: 650 };
|
||||
// Face completely outside crop area
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(faces);
|
||||
});
|
||||
|
||||
it('should mark face as hidden when completely outside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 500, y1: 500, x2: 700, y2: 700 };
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(faces);
|
||||
});
|
||||
|
||||
it('should handle multiple faces with mixed visibility', () => {
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 300, y2: 300 };
|
||||
const faceInside: AssetFace = {
|
||||
...baseFace,
|
||||
id: 'face-inside',
|
||||
boundingBoxX1: 50,
|
||||
boundingBoxY1: 50,
|
||||
boundingBoxX2: 150,
|
||||
boundingBoxY2: 150,
|
||||
};
|
||||
const faceOutside: AssetFace = {
|
||||
...baseFace,
|
||||
id: 'face-outside',
|
||||
boundingBoxX1: 400,
|
||||
boundingBoxY1: 400,
|
||||
boundingBoxX2: 500,
|
||||
boundingBoxY2: 500,
|
||||
};
|
||||
const faces = [faceInside, faceOutside];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([faceInside]);
|
||||
expect(result.hidden).toEqual([faceOutside]);
|
||||
});
|
||||
|
||||
it('should handle face at exactly 50% overlap threshold', () => {
|
||||
// Face at (0,0)-(100,100), crop at (50,0)-(150,100)
|
||||
// Overlap: (50,0)-(100,100) = 50x100 = 5000
|
||||
// Face area: 100x100 = 10000
|
||||
// Overlap percentage: 50% - exactly at threshold, should be visible
|
||||
const faceAtEdge: AssetFace = {
|
||||
...baseFace,
|
||||
id: 'face-edge',
|
||||
boundingBoxX1: 0,
|
||||
boundingBoxY1: 0,
|
||||
boundingBoxX2: 100,
|
||||
boundingBoxY2: 100,
|
||||
};
|
||||
const crop: BoundingBox = { x1: 50, y1: 0, x2: 150, y2: 100 };
|
||||
const faces = [faceAtEdge];
|
||||
const result = checkFaceVisibility(faces, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([faceAtEdge]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with scaled dimensions', () => {
|
||||
it('should handle faces when asset dimensions differ from face image dimensions', () => {
|
||||
// Face stored at 1000x800 resolution, but displaying at 500x400
|
||||
const scaledDimensions = { width: 500, height: 400 };
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 250, y2: 200 };
|
||||
// Face at (100,100)-(200,200) on 1000x800
|
||||
// Scaled to 500x400: (50,50)-(100,100)
|
||||
// Crop at (0,0)-(250,200) - face is fully inside
|
||||
const faces = [baseFace];
|
||||
const result = checkFaceVisibility(faces, scaledDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual(faces);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkOcrVisibility', () => {
|
||||
const baseOcr: AssetOcrResponseDto & { isVisible: boolean } = {
|
||||
id: 'ocr-1',
|
||||
assetId: 'asset-1',
|
||||
x1: 0.1,
|
||||
y1: 0.1,
|
||||
x2: 0.2,
|
||||
y2: 0.1,
|
||||
x3: 0.2,
|
||||
y3: 0.2,
|
||||
x4: 0.1,
|
||||
y4: 0.2,
|
||||
boxScore: 0.9,
|
||||
textScore: 0.85,
|
||||
text: 'Test OCR',
|
||||
isVisible: false,
|
||||
};
|
||||
|
||||
const assetDimensions = { width: 1000, height: 800 };
|
||||
|
||||
describe('with no crop edit', () => {
|
||||
it('should return only currently invisible OCR items when no crop is provided', () => {
|
||||
const visibleOcr = { ...baseOcr, id: 'ocr-visible', isVisible: true };
|
||||
const invisibleOcr = { ...baseOcr, id: 'ocr-invisible', isVisible: false };
|
||||
const ocrs = [visibleOcr, invisibleOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([invisibleOcr]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty arrays when all OCR items are already visible and no crop is provided', () => {
|
||||
const visibleOcr = { ...baseOcr, isVisible: true };
|
||||
const ocrs = [visibleOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return all OCR items when all are invisible and no crop is provided', () => {
|
||||
const ocr1 = { ...baseOcr, id: 'ocr-1', isVisible: false };
|
||||
const ocr2 = { ...baseOcr, id: 'ocr-2', isVisible: false };
|
||||
const ocrs = [ocr1, ocr2];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual([ocr1, ocr2]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with crop edit', () => {
|
||||
it('should mark OCR as visible when fully inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 500, y2: 400 };
|
||||
// OCR box: (0.1,0.1)-(0.2,0.2) on 1000x800 = (100,80)-(200,160)
|
||||
// Crop: (0,0)-(500,400) - OCR fully inside
|
||||
const ocrs = [baseOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual(ocrs);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should mark OCR as hidden when completely outside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 500, y1: 500, x2: 700, y2: 700 };
|
||||
// OCR box: (100,80)-(200,160) - completely outside crop
|
||||
const ocrs = [baseOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(ocrs);
|
||||
});
|
||||
|
||||
it('should mark OCR as hidden when less than 50% inside crop area', () => {
|
||||
const crop: BoundingBox = { x1: 150, y1: 120, x2: 650, y2: 520 };
|
||||
// OCR box: (100,80)-(200,160)
|
||||
// Crop: (150,120)-(650,520)
|
||||
// Overlap: (150,120)-(200,160) = 50x40 = 2000
|
||||
// OCR area: 100x80 = 8000
|
||||
// Overlap percentage: 25% - should be hidden
|
||||
const ocrs = [baseOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([]);
|
||||
expect(result.hidden).toEqual(ocrs);
|
||||
});
|
||||
|
||||
it('should handle multiple OCR items with mixed visibility', () => {
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 300, y2: 300 };
|
||||
const ocrInside = {
|
||||
...baseOcr,
|
||||
id: 'ocr-inside',
|
||||
};
|
||||
const ocrOutside = {
|
||||
...baseOcr,
|
||||
id: 'ocr-outside',
|
||||
x1: 0.5,
|
||||
y1: 0.5,
|
||||
x2: 0.6,
|
||||
y2: 0.5,
|
||||
x3: 0.6,
|
||||
y3: 0.6,
|
||||
x4: 0.5,
|
||||
y4: 0.6,
|
||||
};
|
||||
const ocrs = [ocrInside, ocrOutside];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([ocrInside]);
|
||||
expect(result.hidden).toEqual([ocrOutside]);
|
||||
});
|
||||
|
||||
it('should handle OCR boxes with rotated/skewed polygons', () => {
|
||||
// OCR with a rotated bounding box (not axis-aligned)
|
||||
const rotatedOcr = {
|
||||
...baseOcr,
|
||||
id: 'ocr-rotated',
|
||||
x1: 0.15,
|
||||
y1: 0.1,
|
||||
x2: 0.25,
|
||||
y2: 0.15,
|
||||
x3: 0.2,
|
||||
y3: 0.25,
|
||||
x4: 0.1,
|
||||
y4: 0.2,
|
||||
};
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 300, y2: 300 };
|
||||
const ocrs = [rotatedOcr];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
expect(result.visible).toEqual([rotatedOcr]);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('visibility is only affected by crop (not rotate or mirror)', () => {
|
||||
it('should keep all OCR items visible when there is no crop regardless of other transforms', () => {
|
||||
// Rotate and mirror edits don't affect visibility - only crop does
|
||||
// The visibility functions only take an optional crop parameter
|
||||
const ocrs = [baseOcr];
|
||||
|
||||
// Without any crop, all OCR items remain visible
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions);
|
||||
|
||||
expect(result.visible).toEqual(ocrs);
|
||||
expect(result.hidden).toEqual([]);
|
||||
});
|
||||
|
||||
it('should only consider crop for visibility calculation', () => {
|
||||
// Even if the image will be rotated/mirrored, visibility is determined
|
||||
// solely by whether the OCR box overlaps with the crop area
|
||||
const crop: BoundingBox = { x1: 0, y1: 0, x2: 300, y2: 300 };
|
||||
|
||||
const ocrInsideCrop = {
|
||||
...baseOcr,
|
||||
id: 'ocr-inside',
|
||||
// OCR at (0.1,0.1)-(0.2,0.2) = (100,80)-(200,160) on 1000x800, inside crop
|
||||
};
|
||||
|
||||
const ocrOutsideCrop = {
|
||||
...baseOcr,
|
||||
id: 'ocr-outside',
|
||||
x1: 0.5,
|
||||
y1: 0.5,
|
||||
x2: 0.6,
|
||||
y2: 0.5,
|
||||
x3: 0.6,
|
||||
y3: 0.6,
|
||||
x4: 0.5,
|
||||
y4: 0.6,
|
||||
// OCR at (500,400)-(600,480) on 1000x800, outside crop
|
||||
};
|
||||
|
||||
const ocrs = [ocrInsideCrop, ocrOutsideCrop];
|
||||
const result = checkOcrVisibility(ocrs, assetDimensions, crop);
|
||||
|
||||
// OCR inside crop area is visible, OCR outside is hidden
|
||||
// This is true regardless of any subsequent rotate/mirror operations
|
||||
expect(result.visible).toEqual([ocrInsideCrop]);
|
||||
expect(result.hidden).toEqual([ocrOutsideCrop]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,6 +7,7 @@ import { Writable } from 'node:stream';
|
||||
import sharp from 'sharp';
|
||||
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
|
||||
import { Exif } from 'src/database';
|
||||
import { AssetEditActionItem } from 'src/dtos/editing.dto';
|
||||
import { Colorspace, LogLevel, RawExtractedFormat } from 'src/enum';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import {
|
||||
@@ -19,6 +20,7 @@ import {
|
||||
VideoInfo,
|
||||
} from 'src/types';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
import { createAffineMatrix } from 'src/utils/transform';
|
||||
|
||||
const probe = (input: string, options: string[]): Promise<FfprobeData> =>
|
||||
new Promise((resolve, reject) =>
|
||||
@@ -138,21 +140,48 @@ export class MediaRepository {
|
||||
}
|
||||
}
|
||||
|
||||
decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
|
||||
return this.getImageDecodingPipeline(input, options).raw().toBuffer({ resolveWithObject: true });
|
||||
async decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
|
||||
const pipeline = await this.getImageDecodingPipeline(input, options);
|
||||
return pipeline.raw().toBuffer({ resolveWithObject: true });
|
||||
}
|
||||
|
||||
private async applyEdits(pipeline: sharp.Sharp, edits: AssetEditActionItem[]): Promise<sharp.Sharp> {
|
||||
const affineEditOperations = edits.filter((edit) => edit.action !== 'crop');
|
||||
const matrix = createAffineMatrix(affineEditOperations);
|
||||
|
||||
const crop = edits.find((edit) => edit.action === 'crop');
|
||||
const dimensions = await pipeline.metadata();
|
||||
|
||||
if (crop) {
|
||||
pipeline = pipeline.extract({
|
||||
left: crop ? Math.round(crop.parameters.x) : 0,
|
||||
top: crop ? Math.round(crop.parameters.y) : 0,
|
||||
width: crop ? Math.round(crop.parameters.width) : dimensions.width || 0,
|
||||
height: crop ? Math.round(crop.parameters.height) : dimensions.height || 0,
|
||||
});
|
||||
}
|
||||
|
||||
const { a, b, c, d } = matrix;
|
||||
pipeline = pipeline.affine([
|
||||
[a, b],
|
||||
[c, d],
|
||||
]);
|
||||
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
async generateThumbnail(input: string | Buffer, options: GenerateThumbnailOptions, output: string): Promise<void> {
|
||||
await this.getImageDecodingPipeline(input, options)
|
||||
.toFormat(options.format, {
|
||||
quality: options.quality,
|
||||
// this is default in libvips (except the threshold is 90), but we need to set it manually in sharp
|
||||
chromaSubsampling: options.quality >= 80 ? '4:4:4' : '4:2:0',
|
||||
})
|
||||
.toFile(output);
|
||||
const pipeline = await this.getImageDecodingPipeline(input, options);
|
||||
const decoded = pipeline.toFormat(options.format, {
|
||||
quality: options.quality,
|
||||
// this is default in libvips (except the threshold is 90), but we need to set it manually in sharp
|
||||
chromaSubsampling: options.quality >= 80 ? '4:4:4' : '4:2:0',
|
||||
});
|
||||
|
||||
await decoded.toFile(output);
|
||||
}
|
||||
|
||||
private getImageDecodingPipeline(input: string | Buffer, options: DecodeToBufferOptions) {
|
||||
private async getImageDecodingPipeline(input: string | Buffer, options: DecodeToBufferOptions) {
|
||||
let pipeline = sharp(input, {
|
||||
// some invalid images can still be processed by sharp, but we want to fail on them by default to avoid crashes
|
||||
failOn: options.processInvalidImages ? 'none' : 'error',
|
||||
@@ -175,8 +204,8 @@ export class MediaRepository {
|
||||
}
|
||||
}
|
||||
|
||||
if (options.crop) {
|
||||
pipeline = pipeline.extract(options.crop);
|
||||
if (options.edits && options.edits.length > 0) {
|
||||
pipeline = await this.applyEdits(pipeline, options.edits);
|
||||
}
|
||||
|
||||
if (options.size !== undefined) {
|
||||
@@ -186,14 +215,20 @@ export class MediaRepository {
|
||||
}
|
||||
|
||||
async generateThumbhash(input: string | Buffer, options: GenerateThumbhashOptions): Promise<Buffer> {
|
||||
const [{ rgbaToThumbHash }, { data, info }] = await Promise.all([
|
||||
const [{ rgbaToThumbHash }, decodingPipeline] = await Promise.all([
|
||||
import('thumbhash'),
|
||||
sharp(input, options)
|
||||
.resize(100, 100, { fit: 'inside', withoutEnlargement: true })
|
||||
.raw()
|
||||
.ensureAlpha()
|
||||
.toBuffer({ resolveWithObject: true }),
|
||||
this.getImageDecodingPipeline(input, {
|
||||
colorspace: options.colorspace,
|
||||
processInvalidImages: options.processInvalidImages,
|
||||
raw: options.raw,
|
||||
edits: options.edits,
|
||||
}),
|
||||
]);
|
||||
|
||||
const pipeline = decodingPipeline.resize(100, 100, { fit: 'inside', withoutEnlargement: true }).raw().ensureAlpha();
|
||||
|
||||
const { data, info } = await pipeline.toBuffer({ resolveWithObject: true });
|
||||
|
||||
return Buffer.from(rgbaToThumbHash(info.width, info.height, data));
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
|
||||
import { Insertable, Kysely, sql } from 'kysely';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetOcrTable } from 'src/schema/tables/asset-ocr.table';
|
||||
|
||||
@@ -15,8 +16,15 @@ export class OcrRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getByAssetId(id: string) {
|
||||
return this.db.selectFrom('asset_ocr').selectAll('asset_ocr').where('asset_ocr.assetId', '=', id).execute();
|
||||
getByAssetId(id: string, options?: { isVisible?: boolean }) {
|
||||
const isVisible = options === undefined ? true : options.isVisible;
|
||||
|
||||
return this.db
|
||||
.selectFrom('asset_ocr')
|
||||
.selectAll('asset_ocr')
|
||||
.where('asset_ocr.assetId', '=', id)
|
||||
.$if(isVisible !== undefined, (qb) => qb.where('asset_ocr.isVisible', '=', isVisible!))
|
||||
.execute();
|
||||
}
|
||||
|
||||
deleteAll() {
|
||||
@@ -65,4 +73,40 @@ export class OcrRepository {
|
||||
|
||||
return query.selectNoFrom(sql`1`.as('dummy')).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, [], []] })
|
||||
async updateOcrVisibilities(
|
||||
assetId: string,
|
||||
visible: AssetOcrResponseDto[],
|
||||
hidden: AssetOcrResponseDto[],
|
||||
): Promise<void> {
|
||||
await this.db.transaction().execute(async (trx) => {
|
||||
if (visible.length > 0) {
|
||||
await trx
|
||||
.updateTable('asset_ocr')
|
||||
.set({ isVisible: true })
|
||||
.where(
|
||||
'asset_ocr.id',
|
||||
'in',
|
||||
visible.map((i) => i.id),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
if (hidden.length > 0) {
|
||||
await trx
|
||||
.updateTable('asset_ocr')
|
||||
.set({ isVisible: false })
|
||||
.where(
|
||||
'asset_ocr.id',
|
||||
'in',
|
||||
hidden.map((i) => i.id),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
const searchText = visible.map((item) => item.text.trim()).join(' ');
|
||||
await trx.updateTable('ocr_search').set({ text: searchText }).where('assetId', '=', assetId).execute();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
|
||||
import { ExpressionBuilder, Insertable, Kysely, NotNull, Selectable, sql, Updateable } from 'kysely';
|
||||
import { jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { AssetFace } from 'src/database';
|
||||
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetFileType, AssetVisibility, SourceType } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
@@ -121,6 +122,7 @@ export class PersonRepository {
|
||||
.$if(!!options.sourceType, (qb) => qb.where('asset_face.sourceType', '=', options.sourceType!))
|
||||
.$if(!!options.assetId, (qb) => qb.where('asset_face.assetId', '=', options.assetId!))
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -160,6 +162,7 @@ export class PersonRepository {
|
||||
)
|
||||
.where('person.ownerId', '=', userId)
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.orderBy('person.isHidden', 'asc')
|
||||
.orderBy('person.isFavorite', 'desc')
|
||||
.having((eb) =>
|
||||
@@ -208,19 +211,23 @@ export class PersonRepository {
|
||||
.selectAll('person')
|
||||
.leftJoin('asset_face', 'asset_face.personId', 'person.id')
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.having((eb) => eb.fn.count('asset_face.assetId'), '=', 0)
|
||||
.groupBy('person.id')
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
getFaces(assetId: string) {
|
||||
getFaces(assetId: string, options?: { isVisible?: boolean }) {
|
||||
const isVisible = options === undefined ? true : options.isVisible;
|
||||
|
||||
return this.db
|
||||
.selectFrom('asset_face')
|
||||
.selectAll('asset_face')
|
||||
.select(withPerson)
|
||||
.where('asset_face.assetId', '=', assetId)
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.$if(isVisible !== undefined, (qb) => qb.where('asset_face.isVisible', '=', isVisible!))
|
||||
.orderBy('asset_face.boundingBoxX1', 'asc')
|
||||
.execute();
|
||||
}
|
||||
@@ -350,6 +357,7 @@ export class PersonRepository {
|
||||
)
|
||||
.select((eb) => eb.fn.count(eb.fn('distinct', ['asset.id'])).as('count'))
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.executeTakeFirst();
|
||||
|
||||
return {
|
||||
@@ -368,6 +376,7 @@ export class PersonRepository {
|
||||
.selectFrom('asset_face')
|
||||
.whereRef('asset_face.personId', '=', 'person.id')
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', '=', true)
|
||||
.where((eb) =>
|
||||
eb.exists((eb) =>
|
||||
eb
|
||||
@@ -495,6 +504,7 @@ export class PersonRepository {
|
||||
.selectAll('asset_face')
|
||||
.where('asset_face.personId', '=', personId)
|
||||
.where('asset_face.deletedAt', 'is', null)
|
||||
.where('asset_face.isVisible', 'is', true)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -539,4 +549,37 @@ export class PersonRepository {
|
||||
}
|
||||
return this.db.selectFrom('person').select(['id', 'thumbnailPath']).where('id', 'in', ids).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[], []] })
|
||||
async updateVisibility(visible: AssetFace[], hidden: AssetFace[]): Promise<void> {
|
||||
if (visible.length === 0 && hidden.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.db.transaction().execute(async (trx) => {
|
||||
if (visible.length > 0) {
|
||||
await trx
|
||||
.updateTable('asset_face')
|
||||
.set({ isVisible: true })
|
||||
.where(
|
||||
'asset_face.id',
|
||||
'in',
|
||||
visible.map(({ id }) => id),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
|
||||
if (hidden.length > 0) {
|
||||
await trx
|
||||
.updateTable('asset_face')
|
||||
.set({ isVisible: false })
|
||||
.where(
|
||||
'asset_face.id',
|
||||
'in',
|
||||
hidden.map(({ id }) => id),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -483,6 +483,7 @@ class AssetFaceSync extends BaseSync {
|
||||
])
|
||||
.leftJoin('asset', 'asset.id', 'asset_face.assetId')
|
||||
.where('asset.ownerId', '=', options.userId)
|
||||
.where('asset_face.isVisible', '=', true)
|
||||
.stream();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,6 +37,7 @@ export interface ClientEventMap {
|
||||
|
||||
AssetUploadReadyV1: [{ asset: SyncAssetV1; exif: SyncAssetExifV1 }];
|
||||
AppRestartV1: [AppRestartEvent];
|
||||
AssetEditReadyV1: [{ assetId: string }];
|
||||
}
|
||||
|
||||
export type AuthFn = (client: Socket) => Promise<AuthDto>;
|
||||
|
||||
Reference in New Issue
Block a user