feat(server): xxhash

This commit is contained in:
Jonathan Jogenfors
2024-10-11 01:05:19 +02:00
parent 465f4639da
commit 0dbb0aabc9
15 changed files with 523 additions and 14 deletions

View File

@@ -282,7 +282,10 @@ export class StorageCore {
private savePath(pathType: PathType, id: string, newPath: string) {
switch (pathType) {
case AssetPathType.ORIGINAL: {
return this.assetRepository.update({ id, originalPath: newPath });
return Promise.all([
this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.ORIGINAL, path: newPath }),
this.assetRepository.update({ id, originalPath: newPath }),
]);
}
case AssetPathType.PREVIEW: {
return this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.PREVIEW, path: newPath });

View File

@@ -35,4 +35,8 @@ export class AssetFileEntity {
@Column()
path!: string;
@Column({ type: 'bigint' })
@Index()
checksum!: BigInt | null;
}

View File

@@ -11,6 +11,7 @@ export enum AssetType {
}
export enum AssetFileType {
ORIGINAL = 'original',
PREVIEW = 'preview',
THUMBNAIL = 'thumbnail',
}

View File

@@ -145,6 +145,7 @@ export interface UpsertFileOptions {
assetId: string;
type: AssetFileType;
path: string;
checksum?: BigInt;
}
export type AssetPathEntity = Pick<AssetEntity, 'id' | 'originalPath' | 'isOffline'>;

View File

@@ -5,6 +5,7 @@ export interface ICryptoRepository {
randomUUID(): string;
hashFile(filePath: string | Buffer): Promise<Buffer>;
hashSha256(data: string): string;
xxHash(value: string): BigInt;
verifySha256(data: string, encrypted: string, publicKey: string): boolean;
hashSha1(data: string | Buffer): Buffer;
hashBcrypt(data: string | Buffer, saltOrRounds: string | number): Promise<string>;

View File

@@ -2,6 +2,7 @@ import { CallHandler, ExecutionContext, Inject, Injectable, NestInterceptor } fr
import { PATH_METADATA } from '@nestjs/common/constants';
import { Reflector } from '@nestjs/core';
import { transformException } from '@nestjs/platform-express/multer/multer/multer.utils';
import { xxh3 } from '@node-rs/xxhash';
import { NextFunction, RequestHandler } from 'express';
import multer, { StorageEngine, diskStorage } from 'multer';
import { createHash, randomUUID } from 'node:crypto';
@@ -33,12 +34,14 @@ export interface ImmichFile extends Express.Multer.File {
/** sha1 hash of file */
uuid: string;
checksum: Buffer;
xxhash: BigInt;
}
export function mapToUploadFile(file: ImmichFile): UploadFile {
return {
uuid: file.uuid,
checksum: file.checksum,
xxhash: file.xxhash,
originalPath: file.path,
originalName: Buffer.from(file.originalname, 'latin1').toString('utf8'),
size: file.size,
@@ -146,14 +149,22 @@ export class FileUploadInterceptor implements NestInterceptor {
return;
}
const hash = createHash('sha1');
file.stream.on('data', (chunk) => hash.update(chunk));
this.logger.debug(`Handling asset upload file: ${file.originalname}`);
const xxhash = new xxh3.Xxh3();
const sha1hash = createHash('sha1');
file.stream.on('data', (chunk) => {
xxhash.update(chunk);
sha1hash.update(chunk);
});
this.defaultStorage._handleFile(request, file, (error, info) => {
if (error) {
hash.destroy();
sha1hash.destroy();
xxhash.reset();
callback(error);
} else {
callback(null, { ...info, checksum: hash.digest() });
callback(null, { ...info, checksum: sha1hash.digest(), xxhash: xxhash.digest() });
}
});
}

View File

@@ -0,0 +1,15 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
export class AssetFileChecksum1728632095015 implements MigrationInterface {
name = 'AssetFileChecksum1728632095015';
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "asset_files" ADD "checksum" bigint`);
await queryRunner.query(`CREATE INDEX "IDX_c946066edd16cfa5c25a26aa8e" ON "asset_files" ("checksum") `);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP INDEX "public"."IDX_c946066edd16cfa5c25a26aa8e"`);
await queryRunner.query(`ALTER TABLE "asset_files" DROP COLUMN "checksum"`);
}
}

View File

@@ -64,7 +64,8 @@ SELECT
"files"."createdAt" AS "files_createdAt",
"files"."updatedAt" AS "files_updatedAt",
"files"."type" AS "files_type",
"files"."path" AS "files_path"
"files"."path" AS "files_path",
"files"."checksum" AS "files_checksum"
FROM
"assets" "entity"
LEFT JOIN "exif" "exifInfo" ON "exifInfo"."assetId" = "entity"."id"
@@ -248,7 +249,8 @@ SELECT
"AssetEntity__AssetEntity_files"."createdAt" AS "AssetEntity__AssetEntity_files_createdAt",
"AssetEntity__AssetEntity_files"."updatedAt" AS "AssetEntity__AssetEntity_files_updatedAt",
"AssetEntity__AssetEntity_files"."type" AS "AssetEntity__AssetEntity_files_type",
"AssetEntity__AssetEntity_files"."path" AS "AssetEntity__AssetEntity_files_path"
"AssetEntity__AssetEntity_files"."path" AS "AssetEntity__AssetEntity_files_path",
"AssetEntity__AssetEntity_files"."checksum" AS "AssetEntity__AssetEntity_files_checksum"
FROM
"assets" "AssetEntity"
LEFT JOIN "exif" "AssetEntity__AssetEntity_exifInfo" ON "AssetEntity__AssetEntity_exifInfo"."assetId" = "AssetEntity"."id"
@@ -1117,10 +1119,11 @@ INSERT INTO
"createdAt",
"updatedAt",
"type",
"path"
"path",
"checksum"
)
VALUES
(DEFAULT, $1, DEFAULT, DEFAULT, $2, $3)
(DEFAULT, $1, DEFAULT, DEFAULT, $2, $3, DEFAULT)
ON CONFLICT ("assetId", "type") DO
UPDATE
SET
@@ -1141,10 +1144,11 @@ INSERT INTO
"createdAt",
"updatedAt",
"type",
"path"
"path",
"checksum"
)
VALUES
(DEFAULT, $1, DEFAULT, DEFAULT, $2, $3)
(DEFAULT, $1, DEFAULT, DEFAULT, $2, $3, DEFAULT)
ON CONFLICT ("assetId", "type") DO
UPDATE
SET

View File

@@ -801,7 +801,7 @@ export class AssetRepository implements IAssetRepository {
}
@GenerateSql({ params: [{ assetId: DummyValue.UUID, type: AssetFileType.PREVIEW, path: '/path/to/file' }] })
async upsertFile(file: { assetId: string; type: AssetFileType; path: string }): Promise<void> {
async upsertFile(file: { assetId: string; type: AssetFileType; path: string; checksum?: BigInt }): Promise<void> {
await this.fileRepository.upsert(file, { conflictPaths: ['assetId', 'type'] });
}

View File

@@ -1,4 +1,5 @@
import { Injectable } from '@nestjs/common';
import { xxh3 } from '@node-rs/xxhash';
import { compareSync, hash } from 'bcrypt';
import { createHash, createPublicKey, createVerify, randomBytes, randomUUID } from 'node:crypto';
import { createReadStream } from 'node:fs';
@@ -28,6 +29,10 @@ export class CryptoRepository implements ICryptoRepository {
return createHash('sha256').update(value).digest('base64');
}
xxHash(value: string) {
return xxh3.Xxh3.withSeed().update(value).digest();
}
verifySha256(value: string, encryptedValue: string, publicKey: string) {
const publicKeyBuffer = Buffer.from(publicKey, 'base64');
const cryptoPublicKey = createPublicKey({

View File

@@ -21,7 +21,7 @@ import {
} from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { ASSET_CHECKSUM_CONSTRAINT, AssetEntity } from 'src/entities/asset.entity';
import { AssetStatus, AssetType, CacheControl, Permission, StorageFolder } from 'src/enum';
import { AssetFileType, AssetStatus, AssetType, CacheControl, Permission, StorageFolder } from 'src/enum';
import { JobName } from 'src/interfaces/job.interface';
import { BaseService } from 'src/services/base.service';
import { requireAccess, requireUploadAccess } from 'src/utils/access';
@@ -39,6 +39,7 @@ export interface UploadRequest {
export interface UploadFile {
uuid: string;
checksum: Buffer;
xxhash: BigInt;
originalPath: string;
originalName: string;
size: number;
@@ -334,6 +335,15 @@ export class AssetMediaService extends BaseService {
sidecarPath: sidecarPath || null,
});
await this.assetRepository.upsertFile({
assetId,
type: AssetFileType.ORIGINAL,
path: file.originalPath,
checksum: file.xxhash,
});
console.log('xxhash', file.xxhash);
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif({ assetId, fileSizeInByte: file.size });
await this.jobRepository.queue({
@@ -364,6 +374,8 @@ export class AssetMediaService extends BaseService {
sidecarPath: asset.sidecarPath,
});
// TODO: asset file original
const { size } = await this.storageRepository.stat(created.originalPath);
await this.assetRepository.upsertExif({ assetId: created.id, fileSizeInByte: size });
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: created.id, source: 'copy' } });
@@ -400,6 +412,13 @@ export class AssetMediaService extends BaseService {
sidecarPath: sidecarFile?.originalPath,
});
await this.assetRepository.upsertFile({
assetId: asset.id,
type: AssetFileType.ORIGINAL,
path: asset.originalPath,
checksum: file.xxhash,
});
if (sidecarFile) {
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
}

View File

@@ -417,7 +417,6 @@ export class LibraryService extends BaseService {
localDateTime: mtime,
type: assetType,
originalFileName: parse(assetPath).base,
sidecarPath,
isExternal: true,
});