chore: more wip

This commit is contained in:
bwees
2026-03-24 16:34:07 -05:00
parent d8d532e7ca
commit 80db413d69
19 changed files with 400 additions and 155 deletions

View File

@@ -30,6 +30,7 @@ import {
AssetMediaOptionsDto,
AssetMediaReplaceDto,
AssetMediaSize,
AssetThumbnailOptionsDto,
CheckExistingAssetsDto,
UploadFieldName,
} from 'src/dtos/asset-media.dto';
@@ -154,7 +155,7 @@ export class AssetMediaController {
async viewAsset(
@Auth() auth: AuthDto,
@Param() { id }: UUIDParamDto,
@Query() dto: AssetMediaOptionsDto,
@Query() dto: AssetThumbnailOptionsDto,
@Req() req: Request,
@Res() res: Response,
@Next() next: NextFunction,
@@ -197,9 +198,10 @@ export class AssetMediaController {
@Auth() auth: AuthDto,
@Param() { id }: UUIDParamDto,
@Res() res: Response,
@Query() dto: AssetMediaOptionsDto,
@Next() next: NextFunction,
) {
await sendFile(res, next, () => this.service.playbackVideo(auth, id), this.logger);
await sendFile(res, next, () => this.service.playbackVideo(auth, id, dto), this.logger);
}
@Post('exist')

View File

@@ -120,8 +120,8 @@ export class StorageCore {
);
}
static getEncodedVideoPath(asset: ThumbnailPathEntity) {
return StorageCore.getNestedPath(StorageFolder.EncodedVideo, asset.ownerId, `${asset.id}.mp4`);
static getEncodedVideoPath(asset: ThumbnailPathEntity, isEdited: boolean = false) {
return StorageCore.getNestedPath(StorageFolder.EncodedVideo, asset.ownerId, `${asset.id}${isEdited ? '_edited' : ''}.mp4`);
}
static getAndroidMotionPath(asset: ThumbnailPathEntity, uuid: string) {

View File

@@ -18,13 +18,15 @@ export enum AssetMediaSize {
}
export class AssetMediaOptionsDto {
@ValidateEnum({ enum: AssetMediaSize, name: 'AssetMediaSize', description: 'Asset media size', optional: true })
size?: AssetMediaSize;
@ValidateBoolean({ optional: true, description: 'Return edited asset if available', default: false })
edited?: boolean;
}
export class AssetThumbnailOptionsDto extends AssetMediaOptionsDto {
@ValidateEnum({ enum: AssetMediaSize, name: 'AssetMediaSize', description: 'Asset media size', optional: true })
size?: AssetMediaSize;
}
export enum UploadFieldName {
ASSET_DATA = 'assetData',
SIDECAR_DATA = 'sidecarData',

View File

@@ -588,8 +588,6 @@ export enum JobName {
AssetDetectFaces = 'AssetDetectFaces',
AssetDetectDuplicatesQueueAll = 'AssetDetectDuplicatesQueueAll',
AssetDetectDuplicates = 'AssetDetectDuplicates',
AssetEditThumbnailGeneration = 'AssetEditThumbnailGeneration',
AssetEditTranscodeGeneration = 'AssetEditTranscodeGeneration',
AssetEncodeVideoQueueAll = 'AssetEncodeVideoQueueAll',
AssetEncodeVideo = 'AssetEncodeVideo',
AssetEmptyTrash = 'AssetEmptyTrash',
@@ -598,6 +596,7 @@ export enum JobName {
AssetFileMigration = 'AssetFileMigration',
AssetGenerateThumbnailsQueueAll = 'AssetGenerateThumbnailsQueueAll',
AssetGenerateThumbnails = 'AssetGenerateThumbnails',
AssetProcessEdit = 'AssetProcessEdit',
AuditLogCleanup = 'AuditLogCleanup',
AuditTableCleanup = 'AuditTableCleanup',

View File

@@ -138,6 +138,39 @@ export class AssetJobRepository {
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForAssetEditProcessing(id: string) {
return this.db
.selectFrom('asset')
.select([
'asset.id',
'asset.visibility',
'asset.originalFileName',
'asset.originalPath',
'asset.ownerId',
'asset.thumbhash',
'asset.type',
])
.select((eb) =>
jsonArrayFrom(
eb
.selectFrom('asset_file')
.select(columns.assetFilesForThumbnail)
.whereRef('asset_file.assetId', '=', 'asset.id')
.where('asset_file.type', 'in', [
AssetFileType.Thumbnail,
AssetFileType.Preview,
AssetFileType.FullSize,
AssetFileType.EncodedVideo,
]),
).as('files'),
)
.select(withEdits)
.$call(withExifInner)
.where('asset.id', '=', id)
.executeTakeFirst();
}
@GenerateSql({ params: [DummyValue.UUID] })
getForMetadataExtraction(id: string) {
return this.db
@@ -308,7 +341,7 @@ export class AssetJobRepository {
streamForVideoConversion(force?: boolean) {
return this.db
.selectFrom('asset')
.select(['asset.id'])
.select(['asset.id', 'asset.isEdited'])
.where('asset.type', '=', sql.lit(AssetType.Video))
.$if(!force, (qb) =>
qb
@@ -334,7 +367,15 @@ export class AssetJobRepository {
return this.db
.selectFrom('asset')
.select(['asset.id', 'asset.ownerId', 'asset.originalPath'])
.select(withFiles)
.select((eb) =>
jsonArrayFrom(
eb
.selectFrom('asset_file')
.select(columns.assetFilesForThumbnail)
.whereRef('asset_file.assetId', '=', 'asset.id')
.where('asset_file.type', '=', sql.lit(AssetFileType.EncodedVideo)),
).as('files'),
)
.select(withEdits)
.where('asset.id', '=', id)
.where('asset.type', '=', sql.lit(AssetType.Video))

View File

@@ -1149,12 +1149,12 @@ export class AssetRepository {
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [DummyValue.UUID] })
async getForVideo(id: string) {
@GenerateSql({ params: [DummyValue.UUID, true] })
async getForVideo(id: string, isEdited: boolean) {
return this.db
.selectFrom('asset')
.select(['asset.originalPath'])
.select((eb) => withFilePath(eb, AssetFileType.EncodedVideo).as('encodedVideoPath'))
.select((eb) => withFilePath(eb, AssetFileType.EncodedVideo, isEdited).as('encodedVideoPath'))
.where('asset.id', '=', id)
.where('asset.type', '=', AssetType.Video)
.executeTakeFirst();

View File

@@ -17,6 +17,7 @@ import {
AssetMediaOptionsDto,
AssetMediaReplaceDto,
AssetMediaSize,
AssetThumbnailOptionsDto,
CheckExistingAssetsDto,
UploadFieldName,
} from 'src/dtos/asset-media.dto';
@@ -222,7 +223,7 @@ export class AssetMediaService extends BaseService {
async viewThumbnail(
auth: AuthDto,
id: string,
dto: AssetMediaOptionsDto,
dto: AssetThumbnailOptionsDto,
): Promise<ImmichFileResponse | AssetMediaRedirectResponse> {
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [id] });
@@ -266,10 +267,10 @@ export class AssetMediaService extends BaseService {
});
}
async playbackVideo(auth: AuthDto, id: string): Promise<ImmichFileResponse> {
async playbackVideo(auth: AuthDto, id: string, dto: AssetMediaOptionsDto): Promise<ImmichFileResponse> {
await this.requireAccess({ auth, permission: Permission.AssetView, ids: [id] });
const asset = await this.assetRepository.getForVideo(id);
const asset = await this.assetRepository.getForVideo(id, dto.edited ?? false);
if (!asset) {
throw new NotFoundException('Asset not found or asset is not a video');

View File

@@ -47,6 +47,7 @@ import {
} from 'src/utils/asset.util';
import { updateLockedColumns } from 'src/utils/database';
import { extractTimeZone } from 'src/utils/date';
import { scaleEdits } from 'src/utils/editor';
import { transformOcrBoundingBox } from 'src/utils/transform';
@Injectable()
@@ -605,10 +606,27 @@ export class AssetService extends BaseService {
}
const newEdits = await this.assetEditRepository.replaceAll(id, edits);
await this.jobRepository.queue({ name: JobName.AssetEditThumbnailGeneration, data: { id } });
await this.jobRepository.queue({ name: JobName.AssetProcessEdit, data: { id } });
if (asset.livePhotoVideoId) {
await this.jobRepository.queue({ name: JobName.AssetEditTranscodeGeneration, data: { id } });
const liveAsset = await this.assetRepository.getForEdit(asset.livePhotoVideoId);
if (!liveAsset) {
throw new BadRequestException('Live photo video not found');
}
const { width: liveWidth, height: liveHeight } = getDimensions(liveAsset);
console.log(liveWidth, liveHeight);
const scaledEdits = scaleEdits(
edits,
{ width: liveWidth, height: liveHeight },
{ width: assetWidth, height: assetHeight },
);
await this.assetEditRepository.replaceAll(asset.livePhotoVideoId, scaledEdits);
await this.jobRepository.queue({
name: JobName.AssetProcessEdit,
data: { id: asset.livePhotoVideoId },
});
}
// Return the asset and its applied edits
@@ -627,6 +645,14 @@ export class AssetService extends BaseService {
}
await this.assetEditRepository.replaceAll(id, []);
await this.jobRepository.queue({ name: JobName.AssetEditThumbnailGeneration, data: { id } });
await this.jobRepository.queue({ name: JobName.AssetProcessEdit, data: { id } });
if (asset.livePhotoVideoId) {
await this.assetEditRepository.replaceAll(asset.livePhotoVideoId, []);
await this.jobRepository.queue({
name: JobName.AssetProcessEdit,
data: { id: asset.livePhotoVideoId },
});
}
}
}

View File

@@ -95,8 +95,7 @@ export class JobService extends BaseService {
}
break;
}
case JobName.AssetEditThumbnailGeneration: {
case JobName.AssetProcessEdit: {
const asset = await this.assetRepository.getById(item.data.id);
const edits = await this.assetEditRepository.getWithSyncInfo(item.data.id);

View File

@@ -273,7 +273,7 @@ describe(MediaService.name, () => {
data: { id: asset.id },
},
{
name: JobName.AssetEditThumbnailGeneration,
name: JobName.AssetProcessEdit,
data: { id: asset.id },
},
]);
@@ -1323,7 +1323,7 @@ describe(MediaService.name, () => {
});
});
describe('handleAssetEditThumbnailGeneration', () => {
describe('handleAssetEditProcessing', () => {
let rawInfo: RawImageInfo;
beforeEach(() => {
@@ -1344,7 +1344,7 @@ describe(MediaService.name, () => {
const asset = AssetFactory.from({ type: AssetType.Video }).exif().build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
await expect(sut.handleAssetEditThumbnailGeneration({ id: asset.id })).resolves.toBe(JobStatus.Success);
await expect(sut.handleAssetEditProcessing({ id: asset.id })).resolves.toBe(JobStatus.Success);
expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
});
@@ -1365,7 +1365,7 @@ describe(MediaService.name, () => {
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.asset.upsertFiles).toHaveBeenCalledWith(
expect.arrayContaining([
@@ -1385,7 +1385,7 @@ describe(MediaService.name, () => {
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
rawBuffer,
expect.objectContaining({
@@ -1411,7 +1411,7 @@ describe(MediaService.name, () => {
.build();
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
const status = await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
const status = await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.job.queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
@@ -1431,7 +1431,7 @@ describe(MediaService.name, () => {
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.media.generateThumbnail).toHaveBeenCalledTimes(3);
expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
@@ -1456,7 +1456,7 @@ describe(MediaService.name, () => {
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(getForGenerateThumbnail(asset));
mocks.media.generateThumbhash.mockResolvedValue(factory.buffer());
await sut.handleAssetEditThumbnailGeneration({ id: asset.id, source: 'upload' });
await sut.handleAssetEditProcessing({ id: asset.id, source: 'upload' });
expect(mocks.media.generateThumbhash).toHaveBeenCalled();
});
@@ -1469,7 +1469,7 @@ describe(MediaService.name, () => {
mocks.person.getFaces.mockResolvedValue([]);
mocks.ocr.getByAssetId.mockResolvedValue([]);
await sut.handleAssetEditThumbnailGeneration({ id: asset.id });
await sut.handleAssetEditProcessing({ id: asset.id });
expect(mocks.asset.update).toHaveBeenCalledWith(expect.objectContaining({ thumbhash: thumbhashBuffer }));
});

View File

@@ -4,7 +4,7 @@ import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
import { ImagePathOptions, StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core';
import { AssetFile } from 'src/database';
import { OnEvent, OnJob } from 'src/decorators';
import { AssetEditAction, CropParameters } from 'src/dtos/editing.dto';
import { AssetEditAction, AssetEditActionItem, CropParameters } from 'src/dtos/editing.dto';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import {
AssetFileType,
@@ -39,7 +39,7 @@ import {
VideoInterfaces,
VideoStreamInfo,
} from 'src/types';
import { getAssetFile, getDimensions } from 'src/utils/asset.util';
import { getDimensions } from 'src/utils/asset.util';
import { checkFaceVisibility, checkOcrVisibility } from 'src/utils/editor';
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
import { mimeTypes } from 'src/utils/mime-types';
@@ -56,6 +56,13 @@ interface UpsertFileOptions {
}
type ThumbnailAsset = NonNullable<Awaited<ReturnType<AssetJobRepository['getForGenerateThumbnailJob']>>>;
type VideoConversionAsset = NonNullable<Awaited<ReturnType<AssetJobRepository['getForVideoConversion']>>>;
type ThumbnailGenerationResult = {
files: UpsertFileOptions[];
thumbhash: Buffer;
fullsizeDimensions: ImageDimensions;
};
@Injectable()
export class MediaService extends BaseService {
@@ -84,7 +91,7 @@ export class MediaService extends BaseService {
}
if (asset.isEdited) {
jobs.push({ name: JobName.AssetEditThumbnailGeneration, data: { id: asset.id } });
jobs.push({ name: JobName.AssetProcessEdit, data: { id: asset.id } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
@@ -168,9 +175,9 @@ export class MediaService extends BaseService {
return JobStatus.Success;
}
@OnJob({ name: JobName.AssetEditThumbnailGeneration, queue: QueueName.Editor })
async handleAssetEditThumbnailGeneration({ id }: JobOf<JobName.AssetEditThumbnailGeneration>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForGenerateThumbnailJob(id);
@OnJob({ name: JobName.AssetProcessEdit, queue: QueueName.Editor })
async handleAssetEditProcessing({ id }: JobOf<JobName.AssetProcessEdit>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForAssetEditProcessing(id);
const config = await this.getConfig({ withCache: true });
if (!asset) {
@@ -178,7 +185,25 @@ export class MediaService extends BaseService {
return JobStatus.Failed;
}
const generated = await this.generateEditedThumbnails(asset, config);
switch (asset.type) {
case AssetType.Image: {
await this.handleImageEdit(asset, config);
break;
}
case AssetType.Video: {
await this.handleVideoEdit(asset, config);
break;
}
default: {
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
}
}
return JobStatus.Success;
}
private async handleImageEdit(asset: ThumbnailAsset, config: SystemConfig) {
const generated = await this.generateEditedImageThumbnails(asset, config);
await this.syncFiles(
asset.files.filter((file) => file.isEdited),
generated?.files ?? [],
@@ -203,54 +228,51 @@ export class MediaService extends BaseService {
const fullsizeDimensions = generated?.fullsizeDimensions ?? getDimensions(asset.exifInfo!);
await this.assetRepository.update({ id: asset.id, ...fullsizeDimensions });
return JobStatus.Success;
}
@OnJob({ name: JobName.AssetEditTranscodeGeneration, queue: QueueName.Editor })
async handleAssetEditTranscodeGeneration({ id }: JobOf<JobName.AssetEditTranscodeGeneration>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForVideoConversion(id);
if (!asset) {
return JobStatus.Failed;
}
private async handleVideoEdit(asset: ThumbnailAsset, config: SystemConfig) {
// transcode edited video
const generatedVideo = asset.edits.length > 0 ? await this.transcodeVideo(asset, config.ffmpeg, true) : undefined;
const input = asset.originalPath;
const output = StorageCore.getEncodedVideoPath(asset);
this.storageCore.ensureFolders(output);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
countFrames: this.logger.isLevelEnabled(LogLevel.Debug), // makes frame count more reliable for progress logs
});
const videoStream = this.getMainStream(videoStreams);
const audioStream = this.getMainStream(audioStreams);
if (!videoStream || !format.formatName) {
return JobStatus.Failed;
}
if (!videoStream.height || !videoStream.width) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
return JobStatus.Failed;
}
let { ffmpeg } = await this.getConfig({ withCache: true });
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(
TranscodeTarget.All,
videoStream,
audioStream,
undefined, // TODO: cleaner way to do this?
asset.edits,
await this.syncFiles(
asset.files.filter((file) => file.isEdited && file.type === AssetFileType.EncodedVideo),
generatedVideo ? [generatedVideo.file] : [],
);
await this.mediaRepository.transcode(input, output, command);
await this.assetRepository.upsertFile({
assetId: asset.id,
type: AssetFileType.EncodedVideo,
path: output,
isEdited: true,
});
// update asset dimensions
const newDimensions = generatedVideo?.dimensions ?? getDimensions(asset.exifInfo!);
await this.assetRepository.update({ id: asset.id, ...newDimensions });
return JobStatus.Success;
// if the asset is hidden, we dont need to update the thumbhash or thumbnails
if (asset.visibility === AssetVisibility.Hidden) {
return;
}
const editedThumbnails = await this.generateEditedVideoThumbnails(asset, config);
await this.syncFiles(
asset.files.filter((file) => file.isEdited && file.type !== AssetFileType.EncodedVideo),
editedThumbnails?.files ?? [],
);
let thumbhash: Buffer | undefined = editedThumbnails?.thumbhash;
if (!thumbhash) {
const previewFile = asset.files.find((file) => file.type === AssetFileType.Preview && !file.isEdited);
if (!previewFile) {
this.logger.warn(`Failed to generate thumbhash for asset ${asset.id}: missing preview file`);
return;
}
thumbhash = await this.mediaRepository.generateThumbhash(previewFile.path, {
colorspace: config.image.colorspace,
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
});
}
// update asset table info
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, thumbhash) !== 0) {
await this.assetRepository.update({ id: asset.id, thumbhash });
}
}
@OnJob({ name: JobName.AssetGenerateThumbnails, queue: QueueName.ThumbnailGeneration })
@@ -263,31 +285,34 @@ export class MediaService extends BaseService {
return JobStatus.Failed;
}
let generated: ThumbnailGenerationResult;
let generatedEdited: ThumbnailGenerationResult | undefined;
if (asset.visibility === AssetVisibility.Hidden) {
this.logger.verbose(`Thumbnail generation skipped for asset ${id}: not visible`);
return JobStatus.Skipped;
}
let generated: Awaited<ReturnType<MediaService['generateImageThumbnails']>>;
if (asset.type === AssetType.Video || asset.originalFileName.toLowerCase().endsWith('.gif')) {
this.logger.verbose(`Thumbnail generation for video ${id} ${asset.originalPath}`);
generated = await this.generateVideoThumbnails(asset, config);
generatedEdited = await this.generateEditedVideoThumbnails(asset, config);
} else if (asset.type === AssetType.Image) {
this.logger.verbose(`Thumbnail generation for image ${id} ${asset.originalPath}`);
generated = await this.generateImageThumbnails(asset, config);
generatedEdited = await this.generateEditedImageThumbnails(asset, config);
} else {
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
return JobStatus.Skipped;
}
const editedGenerated = await this.generateEditedThumbnails(asset, config);
if (editedGenerated) {
generated.files.push(...editedGenerated.files);
if (generatedEdited) {
generated.files.push(...generatedEdited.files);
}
await this.syncFiles(asset.files, generated.files);
const thumbhash = editedGenerated?.thumbhash || generated.thumbhash;
const thumbhash = generatedEdited?.thumbhash || generated.thumbhash;
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, thumbhash) !== 0) {
await this.assetRepository.update({ id: asset.id, thumbhash });
}
@@ -553,20 +578,21 @@ export class MediaService extends BaseService {
}
private async generateVideoThumbnails(
asset: ThumbnailPathEntity & { originalPath: string },
asset: ThumbnailPathEntity & { originalPath: string; edits: AssetEditActionItem[] },
{ ffmpeg, image }: SystemConfig,
useEdits: boolean = false,
) {
const previewFile = this.getImageFile(asset, {
fileType: AssetFileType.Preview,
format: image.preview.format,
isEdited: false,
isEdited: useEdits,
isProgressive: false,
isTransparent: false,
});
const thumbnailFile = this.getImageFile(asset, {
fileType: AssetFileType.Thumbnail,
format: image.thumbnail.format,
isEdited: false,
isEdited: useEdits,
isProgressive: false,
isTransparent: false,
});
@@ -579,14 +605,27 @@ export class MediaService extends BaseService {
}
const mainAudioStream = this.getMainStream(audioStreams);
let edits: AssetEditActionItem[] | undefined;
if (useEdits) {
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
edits = asset.edits;
}
const previewConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.preview.size.toString() });
const thumbnailConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() });
const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, mainVideoStream, mainAudioStream, format);
const previewOptions = previewConfig.getCommand(
TranscodeTarget.Video,
mainVideoStream,
mainAudioStream,
format,
edits,
);
const thumbnailOptions = thumbnailConfig.getCommand(
TranscodeTarget.Video,
mainVideoStream,
mainAudioStream,
format,
edits,
);
await this.mediaRepository.transcode(asset.originalPath, previewFile.path, previewOptions);
@@ -597,73 +636,69 @@ export class MediaService extends BaseService {
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
});
let fullsizeDimensions = { width: mainVideoStream.width, height: mainVideoStream.height };
if (useEdits) {
fullsizeDimensions = getOutputDimensions(asset.edits, fullsizeDimensions);
}
return {
files: [previewFile, thumbnailFile],
thumbhash,
fullsizeDimensions: { width: mainVideoStream.width, height: mainVideoStream.height },
fullsizeDimensions,
};
}
@OnJob({ name: JobName.AssetEncodeVideoQueueAll, queue: QueueName.VideoConversion })
async handleQueueVideoConversion(job: JobOf<JobName.AssetEncodeVideoQueueAll>): Promise<JobStatus> {
const { force } = job;
let queue: { name: JobName.AssetEncodeVideo; data: { id: string } }[] = [];
for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) {
queue.push({ name: JobName.AssetEncodeVideo, data: { id: asset.id } });
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(queue);
queue = [];
}
}
await this.jobRepository.queueAll(queue);
return JobStatus.Success;
}
@OnJob({ name: JobName.AssetEncodeVideo, queue: QueueName.VideoConversion })
async handleVideoConversion({ id }: JobOf<JobName.AssetEncodeVideo>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForVideoConversion(id);
if (!asset) {
return JobStatus.Failed;
}
private async transcodeVideo(
asset: VideoConversionAsset,
ffmpeg: SystemConfigFFmpegDto,
useEdits: boolean = false,
): Promise<{ file: UpsertFileOptions; dimensions: { width: number; height: number } } | undefined> {
const input = asset.originalPath;
const output = StorageCore.getEncodedVideoPath(asset);
const output = StorageCore.getEncodedVideoPath(asset, useEdits);
this.storageCore.ensureFolders(output);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
countFrames: this.logger.isLevelEnabled(LogLevel.Debug), // makes frame count more reliable for progress logs
countFrames: this.logger.isLevelEnabled(LogLevel.Debug),
});
const videoStream = this.getMainStream(videoStreams);
const audioStream = this.getMainStream(audioStreams);
if (!videoStream || !format.formatName) {
return JobStatus.Failed;
return undefined;
}
if (!videoStream.height || !videoStream.width) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
return JobStatus.Failed;
return undefined;
}
let { ffmpeg } = await this.getConfig({ withCache: true });
const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
if (target === TranscodeTarget.None && !this.isRemuxRequired(ffmpeg, format)) {
const encodedVideo = getAssetFile(asset.files, AssetFileType.EncodedVideo, { isEdited: false });
if (encodedVideo) {
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
await this.jobRepository.queue({ name: JobName.FileDelete, data: { files: [encodedVideo.path] } });
await this.assetRepository.deleteFiles([encodedVideo]);
} else {
this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`);
let target: TranscodeTarget;
let edits: AssetEditActionItem[] | undefined;
if (useEdits) {
if (asset.edits.length === 0) {
this.logger.verbose(`Asset ${asset.id} has no edits, skipping edited version transcoding`);
return undefined;
}
return JobStatus.Skipped;
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
target = TranscodeTarget.All;
edits = asset.edits;
} else {
target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
if (target === TranscodeTarget.None && !this.isRemuxRequired(ffmpeg, format)) {
this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`);
return undefined;
}
}
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(
target,
videoStream,
audioStream,
useEdits ? undefined : format,
edits,
);
if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) {
this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`);
} else {
@@ -677,7 +712,7 @@ export class MediaService extends BaseService {
} catch (error: any) {
this.logger.error(`Error occurred during transcoding: ${error.message}`);
if (ffmpeg.accel === TranscodeHardwareAcceleration.Disabled) {
return JobStatus.Failed;
throw error;
}
let partialFallbackSuccess = false;
@@ -685,7 +720,13 @@ export class MediaService extends BaseService {
try {
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and software decoding`);
ffmpeg = { ...ffmpeg, accelDecode: false };
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(
target,
videoStream,
audioStream,
format,
edits,
);
await this.mediaRepository.transcode(input, output, command);
partialFallbackSuccess = true;
} catch (error: any) {
@@ -695,20 +736,92 @@ export class MediaService extends BaseService {
if (!partialFallbackSuccess) {
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`);
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled }; // TODO: USE THIS TO DISABLE CPU ENCODING
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
ffmpeg = { ...ffmpeg, accel: TranscodeHardwareAcceleration.Disabled };
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(
target,
videoStream,
audioStream,
format,
edits,
);
await this.mediaRepository.transcode(input, output, command);
}
}
this.logger.log(`Successfully encoded ${asset.id}`);
await this.assetRepository.upsertFile({
assetId: asset.id,
type: AssetFileType.EncodedVideo,
path: output,
isEdited: false,
});
let finalDimensions = { width: videoStream.width, height: videoStream.height };
if (useEdits) {
finalDimensions = getOutputDimensions(asset.edits, finalDimensions);
}
return {
dimensions: finalDimensions,
file: {
assetId: asset.id,
type: AssetFileType.EncodedVideo,
path: output,
isEdited: useEdits,
isProgressive: false,
isTransparent: false,
},
};
}
@OnJob({ name: JobName.AssetEncodeVideoQueueAll, queue: QueueName.VideoConversion })
async handleQueueVideoConversion(job: JobOf<JobName.AssetEncodeVideoQueueAll>): Promise<JobStatus> {
const { force } = job;
let jobs: JobItem[] = [];
for await (const asset of this.assetJobRepository.streamForVideoConversion(force)) {
if (force || !asset.isEdited) {
jobs.push({ name: JobName.AssetEncodeVideo, data: { id: asset.id } });
}
if (asset.isEdited) {
jobs.push({ name: JobName.AssetProcessEdit, data: { id: asset.id } });
}
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
await this.jobRepository.queueAll(jobs);
jobs = [];
}
}
await this.jobRepository.queueAll(jobs);
return JobStatus.Success;
}
@OnJob({ name: JobName.AssetEncodeVideo, queue: QueueName.VideoConversion })
async handleVideoConversion({ id }: JobOf<JobName.AssetEncodeVideo>): Promise<JobStatus> {
const asset = await this.assetJobRepository.getForVideoConversion(id);
if (!asset) {
return JobStatus.Failed;
}
const { ffmpeg } = await this.getConfig({ withCache: true });
const files: UpsertFileOptions[] = [];
try {
const generated = await this.transcodeVideo(asset, ffmpeg);
if (!generated) {
return JobStatus.Skipped;
}
if (generated?.file) {
files.push(generated.file);
}
const editedGenerated = await this.transcodeVideo(asset, ffmpeg, true);
if (editedGenerated) {
files.push(editedGenerated.file);
}
} catch {
return JobStatus.Failed;
}
await this.syncFiles(asset.files, files);
return JobStatus.Success;
}
@@ -920,13 +1033,29 @@ export class MediaService extends BaseService {
}
}
private async generateEditedThumbnails(asset: ThumbnailAsset, config: SystemConfig) {
private async generateEditedImageThumbnails(asset: ThumbnailAsset, config: SystemConfig) {
if (asset.type !== AssetType.Image || (asset.files.length === 0 && asset.edits.length === 0)) {
return;
}
const generated = asset.edits.length > 0 ? await this.generateImageThumbnails(asset, config, true) : undefined;
await this.updateMLVisibilities(asset);
return generated;
}
private async generateEditedVideoThumbnails(asset: ThumbnailAsset, config: SystemConfig) {
if (asset.type !== AssetType.Video || (asset.files.length === 0 && asset.edits.length === 0)) {
return;
}
const generated = asset.edits.length > 0 ? await this.generateVideoThumbnails(asset, config, true) : undefined;
await this.updateMLVisibilities(asset);
return generated;
}
private async updateMLVisibilities(asset: ThumbnailAsset) {
const crop = asset.edits.find((e) => e.action === AssetEditAction.Crop);
const cropBox = crop
? {
@@ -946,8 +1075,6 @@ export class MediaService extends BaseService {
const ocrStatuses = checkOcrVisibility(ocrData, originalDimensions, cropBox);
await this.ocrRepository.updateOcrVisibilities(asset.id, ocrStatuses.visible, ocrStatuses.hidden);
return generated;
}
private warnOnTransparencyLoss(isTransparent: boolean, format: ImageFormat, assetId: string) {

View File

@@ -391,8 +391,7 @@ export type JobItem =
| { name: JobName.WorkflowRun; data: IWorkflowJob }
// Editor
| { name: JobName.AssetEditThumbnailGeneration; data: IEntityJob }
| { name: JobName.AssetEditTranscodeGeneration; data: IEntityJob };
| { name: JobName.AssetProcessEdit; data: IEntityJob };
export type VectorExtension = (typeof VECTOR_EXTENSIONS)[number];

View File

@@ -126,12 +126,13 @@ export function withFiles(eb: ExpressionBuilder<DB, 'asset'>, type?: AssetFileTy
).as('files');
}
export function withFilePath(eb: ExpressionBuilder<DB, 'asset'>, type: AssetFileType) {
export function withFilePath(eb: ExpressionBuilder<DB, 'asset'>, type: AssetFileType, isEdited = false) {
return eb
.selectFrom('asset_file')
.select('asset_file.path')
.whereRef('asset_file.assetId', '=', 'asset.id')
.where('asset_file.type', '=', type);
.where('asset_file.type', '=', type)
.where('asset_file.isEdited', '=', isEdited);
}
export function withFacesAndPeople(

View File

@@ -1,4 +1,5 @@
import { AssetFace } from 'src/database';
import { AssetEditActionItem, CropParameters } from 'src/dtos/editing.dto';
import { AssetOcrResponseDto } from 'src/dtos/ocr.dto';
import { ImageDimensions } from 'src/types';
@@ -31,6 +32,15 @@ const scale = (box: BoundingBox, target: ImageDimensions, source?: ImageDimensio
};
};
const scaleCrop = (crop: CropParameters, target: ImageDimensions, source: ImageDimensions) => {
return {
width: Math.round((crop.width / source.width) * target.width),
height: Math.round((crop.height / source.height) * target.height),
x: Math.round((crop.x / source.width) * target.width),
y: Math.round((crop.y / source.height) * target.height),
};
};
export const checkFaceVisibility = (
faces: AssetFace[],
originalAssetDimensions: ImageDimensions,
@@ -105,3 +115,20 @@ export const checkOcrVisibility = (
hidden: status.filter((s) => !s.isVisible).map((s) => s.ocr),
};
};
export const scaleEdits = (
edits: AssetEditActionItem[],
target: ImageDimensions,
source: ImageDimensions,
): AssetEditActionItem[] => {
return edits.map((edit) => {
if (edit.action === 'crop') {
return {
...edit,
parameters: scaleCrop(edit.parameters as CropParameters, target, source),
} as AssetEditActionItem;
}
return edit;
});
};

View File

@@ -8,7 +8,7 @@ import {
RotateParameters,
} from 'src/dtos/editing.dto';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import { CQMode, LogLevel, ToneMapping, TranscodeHardwareAcceleration, TranscodeTarget, VideoCodec } from 'src/enum';
import { CQMode, ToneMapping, TranscodeHardwareAcceleration, TranscodeTarget, VideoCodec } from 'src/enum';
import {
AudioStreamInfo,
BitrateDistribution,
@@ -98,8 +98,17 @@ export class BaseConfig implements VideoCodecSWConfig {
format?: VideoFormat,
edits: AssetEditActionItem[] = [],
) {
const inputOptions = this.getBaseInputOptions(videoStream, format);
if (edits.length > 0) {
// turns out MOV files can have cropping metadata that ffmpeg automatically applies when decoding
// this means that the video streams dimensions can just be wrong once it hits the filter pipeline
// https://github.com/FFmpeg/FFmpeg/blob/f40fcf802472227851e0b8eeba40b9e6b3b8a3a1/libavutil/frame.h#L1021
inputOptions.push('-apply_cropping 0');
}
const options = {
inputOptions: this.getBaseInputOptions(videoStream, format),
inputOptions,
outputOptions: [...this.getBaseOutputOptions(target, videoStream, audioStream), '-v verbose'],
twoPass: this.eligibleForTwoPass(),
progress: { frameCount: videoStream.frameCount, percentInterval: 5 },