diff --git a/.gitignore b/.gitignore index 537e048be2..e0544ad8d5 100644 --- a/.gitignore +++ b/.gitignore @@ -21,3 +21,5 @@ mobile/openapi/.openapi-generator/FILES open-api/typescript-sdk/build mobile/android/fastlane/report.xml mobile/ios/fastlane/report.xml + +vite.config.js.timestamp-* diff --git a/e2e/src/api/specs/library.e2e-spec.ts b/e2e/src/api/specs/library.e2e-spec.ts index 9f5adc4e27..fe0b4f2bd4 100644 --- a/e2e/src/api/specs/library.e2e-spec.ts +++ b/e2e/src/api/specs/library.e2e-spec.ts @@ -500,13 +500,13 @@ describe('/libraries', () => { }); it('should set an asset offline its file is not in any import path', async () => { + utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); + const library = await utils.createLibrary(admin.accessToken, { ownerId: admin.userId, importPaths: [`${testAssetDirInternal}/temp/offline`], }); - utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`); - await scan(admin.accessToken, library.id); await utils.waitForQueueFinish(admin.accessToken, 'library'); diff --git a/e2e/src/api/specs/repair.e2e-spec.ts b/e2e/src/api/specs/repair.e2e-spec.ts new file mode 100644 index 0000000000..c68ccd4770 --- /dev/null +++ b/e2e/src/api/specs/repair.e2e-spec.ts @@ -0,0 +1,95 @@ +import { LibraryResponseDto, LoginResponseDto, getAllLibraries, scanLibrary } from '@immich/sdk'; +import { readFile } from 'fs/promises'; +import { Socket } from 'socket.io-client'; +import { userDto, uuidDto } from 'src/fixtures'; +import { errorDto } from 'src/responses'; +import { app, asBearerAuth, testAssetDir, testAssetDirInternal, utils } from 'src/utils'; +import request from 'supertest'; +import { utimes } from 'utimes'; +import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest'; +import { b } from 'vitest/dist/chunks/suite.BMWOKiTe.js'; + +const scan = async (accessToken: string, id: string) => scanLibrary({ id }, { headers: asBearerAuth(accessToken) }); + +describe('/repair', () => { + let admin: LoginResponseDto; + let user: LoginResponseDto; + let library: LibraryResponseDto; + let websocket: Socket; + + beforeAll(async () => { + await utils.resetDatabase(); + admin = await utils.adminSetup(); + await utils.resetAdminConfig(admin.accessToken); + user = await utils.userSetup(admin.accessToken, userDto.user1); + library = await utils.createLibrary(admin.accessToken, { ownerId: admin.userId }); + websocket = await utils.connectWebsocket(admin.accessToken); + }); + + afterAll(() => { + utils.disconnectWebsocket(websocket); + utils.resetTempFolder(); + }); + + beforeEach(() => { + utils.resetEvents(); + }); + + describe('POST /check', () => { + it('should require authentication', async () => { + const { status, body } = await request(app).post('/libraries').send({}); + expect(status).toBe(401); + expect(body).toEqual(errorDto.unauthorized); + }); + + it('should require admin authentication', async () => { + const { status, body } = await request(app) + .post('/repair/check') + .set('Authorization', `Bearer ${user.accessToken}`) + .send({ ownerId: admin.userId }); + + expect(status).toBe(403); + expect(body).toEqual(errorDto.forbidden); + }); + + it('should detect a changed original file', async () => { + const asset = await utils.createAsset(admin.accessToken, { + assetData: { + filename: 'polemonium_reptans.jpg', + bytes: await readFile(`${testAssetDir}/albums/nature/polemonium_reptans.jpg`), + }, + }); + + await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id }); + + let assetPath = ''; + { + const { status, body } = await request(app) + .get(`/assets/${asset.id}`) + .set('Authorization', `Bearer ${admin.accessToken}`); + expect(status).toBe(200); + assetPath = body.originalPath; + } + + utils.flipBitInFile(assetPath, 2, 5); + + const { status, body } = await request(app) + .post('/repair/check') + .set('Authorization', `Bearer ${admin.accessToken}`) + .send({ ownerId: admin.userId }); + + expect(status).toBe(201); + expect(body).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + ownerId: admin.userId, + name: 'New External Library', + refreshedAt: null, + assetCount: 0, + importPaths: [], + exclusionPatterns: expect.any(Array), + }), + ]); + }); + }); +}); diff --git a/e2e/src/utils.ts b/e2e/src/utils.ts index 52acd35a5c..6bf8e6d051 100644 --- a/e2e/src/utils.ts +++ b/e2e/src/utils.ts @@ -37,7 +37,7 @@ import { import { BrowserContext } from '@playwright/test'; import { exec, spawn } from 'node:child_process'; import { createHash } from 'node:crypto'; -import { existsSync, mkdirSync, rmSync, writeFileSync } from 'node:fs'; +import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; import { tmpdir } from 'node:os'; import path, { dirname } from 'node:path'; import { setTimeout as setAsyncTimeout } from 'node:timers/promises'; @@ -374,8 +374,8 @@ export const utils = { }, createDirectory: (path: string) => { - if (!existsSync(dirname(path))) { - mkdirSync(dirname(path), { recursive: true }); + if (!existsSync(path)) { + mkdirSync(path, { recursive: true }); } }, @@ -387,12 +387,26 @@ export const utils = { rmSync(path); }, + flipBitInFile: (filePath: string, byteIndex: number, bitPosition: number) => { + const data = readFileSync(filePath); + + // Check if the byte index is within the file size + if (byteIndex >= data.length) { + throw new Error('Byte index is out of range.'); + } + + // Flip the specific bit using XOR + data[byteIndex] ^= 1 << bitPosition; + + writeFileSync(filePath, data); + }, + removeDirectory: (path: string) => { if (!existsSync(path)) { return; } - rmSync(path); + rmSync(path, { recursive: true }); }, getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }), diff --git a/server/src/config.ts b/server/src/config.ts index 4fdf23ecc2..9fca9232e6 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -195,6 +195,7 @@ export const defaults = Object.freeze({ [QueueName.THUMBNAIL_GENERATION]: { concurrency: 3 }, [QueueName.VIDEO_CONVERSION]: { concurrency: 1 }, [QueueName.NOTIFICATION]: { concurrency: 5 }, + [QueueName.REPAIR]: { concurrency: 2 }, }, logging: { enabled: true, diff --git a/server/src/controllers/repair.controller.ts b/server/src/controllers/repair.controller.ts new file mode 100644 index 0000000000..30e6d20a5a --- /dev/null +++ b/server/src/controllers/repair.controller.ts @@ -0,0 +1,24 @@ +import { Controller, Get, Post } from '@nestjs/common'; +import { ApiTags } from '@nestjs/swagger'; +import { AuthDto } from 'src/dtos/auth.dto'; +import { RepairEntity } from 'src/entities/repair.entity'; +import { Auth, Authenticated } from 'src/middleware/auth.guard'; +import { RepairService } from 'src/services/repair.service'; + +@ApiTags('Repairs') +@Controller('repairs') +export class RepairController { + constructor(private service: RepairService) {} + + @Get() + @Authenticated({ admin: true }) + getRepairs(@Auth() auth: AuthDto): Promise { + return this.service.getRepairs(auth); + } + + @Post('/check') + @Authenticated({ admin: true }) + validateChecksums(@Auth() auth: AuthDto): Promise { + return this.service.getRepairs(auth); + } +} diff --git a/server/src/dtos/job.dto.ts b/server/src/dtos/job.dto.ts index 49e4cfb67b..b5b10ff642 100644 --- a/server/src/dtos/job.dto.ts +++ b/server/src/dtos/job.dto.ts @@ -97,4 +97,7 @@ export class AllJobStatusResponseDto implements Record @ApiProperty({ type: JobStatusDto }) [QueueName.NOTIFICATION]!: JobStatusDto; + + @ApiProperty({ type: JobStatusDto }) + [QueueName.REPAIR]!: JobStatusDto; } diff --git a/server/src/dtos/system-config.dto.ts b/server/src/dtos/system-config.dto.ts index 039dbd20ff..59e8d1ea0b 100644 --- a/server/src/dtos/system-config.dto.ts +++ b/server/src/dtos/system-config.dto.ts @@ -218,6 +218,12 @@ class SystemConfigJobDto implements Record @IsObject() @Type(() => JobSettingsDto) [QueueName.NOTIFICATION]!: JobSettingsDto; + + @ApiProperty({ type: JobSettingsDto }) + @ValidateNested() + @IsObject() + @Type(() => JobSettingsDto) + [QueueName.REPAIR]!: JobSettingsDto; } class SystemConfigLibraryScanDto { diff --git a/server/src/entities/asset-files.entity.ts b/server/src/entities/asset-files.entity.ts index dbed61136c..0488abd4b6 100644 --- a/server/src/entities/asset-files.entity.ts +++ b/server/src/entities/asset-files.entity.ts @@ -36,7 +36,7 @@ export class AssetFileEntity { @Column() path!: string; - @Column({ type: 'bytea' }) + @Column({ type: 'bytea', nullable: true, default: null }) @Index() checksum!: Buffer | null; } diff --git a/server/src/entities/repair.entity.ts b/server/src/entities/repair.entity.ts new file mode 100644 index 0000000000..d3aff8fa45 --- /dev/null +++ b/server/src/entities/repair.entity.ts @@ -0,0 +1,21 @@ +import { AssetFileEntity } from 'src/entities/asset-files.entity'; +import { RepairType } from 'src/enum'; +import { Column, CreateDateColumn, Entity, ManyToOne, PrimaryGeneratedColumn, Unique } from 'typeorm'; + +@Entity('repair') +export class RepairEntity { + @PrimaryGeneratedColumn('uuid') + id!: string; + + @ManyToOne(() => AssetFileEntity, { onDelete: 'CASCADE', onUpdate: 'CASCADE' }) + assetFile!: AssetFileEntity; + + @CreateDateColumn({ type: 'timestamptz' }) + createdAt!: Date; + + @Column() + type!: RepairType; + + @Column() + path!: string; +} diff --git a/server/src/enum.ts b/server/src/enum.ts index ca4571dc47..9b40f67baf 100644 --- a/server/src/enum.ts +++ b/server/src/enum.ts @@ -16,6 +16,10 @@ export enum AssetFileType { THUMBNAIL = 'thumbnail', } +export enum RepairType { + CHECKSUM_MISMATCH = 'checksum-mismatch', +} + export enum AlbumUserRole { EDITOR = 'editor', VIEWER = 'viewer', diff --git a/server/src/interfaces/asset.interface.ts b/server/src/interfaces/asset.interface.ts index bafef02c8c..edc4a6e7c9 100644 --- a/server/src/interfaces/asset.interface.ts +++ b/server/src/interfaces/asset.interface.ts @@ -1,3 +1,4 @@ +import { AssetFileEntity } from 'src/entities/asset-files.entity'; import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity'; import { AssetEntity } from 'src/entities/asset.entity'; import { ExifEntity } from 'src/entities/exif.entity'; @@ -195,6 +196,8 @@ export interface IAssetRepository { getDuplicates(options: AssetBuilderOptions): Promise; getAllForUserFullSync(options: AssetFullSyncOptions): Promise; getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise; + getFileById(assetFileId: string): Promise; + removeFile(assetId: string, type: AssetFileType): Promise; upsertFile(file: UpsertFileOptions): Promise; upsertFiles(files: UpsertFileOptions[]): Promise; } diff --git a/server/src/interfaces/crypto.interface.ts b/server/src/interfaces/crypto.interface.ts index 3891d174fe..5912ffbd88 100644 --- a/server/src/interfaces/crypto.interface.ts +++ b/server/src/interfaces/crypto.interface.ts @@ -6,6 +6,7 @@ export interface ICryptoRepository { hashFile(filePath: string | Buffer): Promise; hashSha256(data: string): string; xxHash(value: string): Buffer; + xxHashFile(filePath: string | Buffer): Promise; verifySha256(data: string, encrypted: string, publicKey: string): boolean; hashSha1(data: string | Buffer): Buffer; hashBcrypt(data: string | Buffer, saltOrRounds: string | number): Promise; diff --git a/server/src/interfaces/job.interface.ts b/server/src/interfaces/job.interface.ts index aa3090675e..25959da7ec 100644 --- a/server/src/interfaces/job.interface.ts +++ b/server/src/interfaces/job.interface.ts @@ -15,6 +15,7 @@ export enum QueueName { SIDECAR = 'sidecar', LIBRARY = 'library', NOTIFICATION = 'notifications', + REPAIR = 'repair', } export type ConcurrentQueueName = Exclude< @@ -111,6 +112,9 @@ export enum JobName { // Version check VERSION_CHECK = 'version-check', + + // REPAIR + REPAIR_VERIFY_CHECKSUM = 'repair-verify-checksum', } export const JOBS_ASSET_PAGINATION_SIZE = 1000; @@ -283,7 +287,10 @@ export type JobItem = | { name: JobName.NOTIFY_SIGNUP; data: INotifySignupJob } // Version check - | { name: JobName.VERSION_CHECK; data: IBaseJob }; + | { name: JobName.VERSION_CHECK; data: IBaseJob } + + // Repairs + | { name: JobName.REPAIR_VERIFY_CHECKSUM; data: IEntityJob }; export enum JobStatus { SUCCESS = 'success', diff --git a/server/src/interfaces/repair.interface.ts b/server/src/interfaces/repair.interface.ts new file mode 100644 index 0000000000..584f6fb087 --- /dev/null +++ b/server/src/interfaces/repair.interface.ts @@ -0,0 +1,9 @@ +import { RepairEntity } from 'src/entities/repair.entity'; +import { Paginated, PaginationOptions } from 'src/utils/pagination'; + +export const IRepairRepository = 'IRepairRepository'; + +export interface IRepairRepository { + create(repair: Partial): Promise; + getAll(pagination: PaginationOptions): Paginated; +} diff --git a/server/src/migrations/1728632095015-AddAssetFileChecksum.ts b/server/src/migrations/1728632095015-AddAssetFileChecksum.ts index eeb3e4740f..aa405dd53b 100644 --- a/server/src/migrations/1728632095015-AddAssetFileChecksum.ts +++ b/server/src/migrations/1728632095015-AddAssetFileChecksum.ts @@ -4,7 +4,7 @@ export class AssetFileChecksum1728632095015 implements MigrationInterface { name = 'AssetFileChecksum1728632095015'; public async up(queryRunner: QueryRunner): Promise { - await queryRunner.query(`ALTER TABLE "asset_files" ADD "checksum" bytea NULL`); + await queryRunner.query(`ALTER TABLE "asset_files" ADD "checksum" bytea`); await queryRunner.query(`CREATE INDEX "IDX_c946066edd16cfa5c25a26aa8e" ON "asset_files" ("checksum")`); } diff --git a/server/src/queries/asset.repository.sql b/server/src/queries/asset.repository.sql index ca5acf5de4..767ff45067 100644 --- a/server/src/queries/asset.repository.sql +++ b/server/src/queries/asset.repository.sql @@ -1134,7 +1134,8 @@ SET RETURNING "id", "createdAt", - "updatedAt" + "updatedAt", + "checksum" -- AssetRepository.upsertFiles INSERT INTO @@ -1159,4 +1160,5 @@ SET RETURNING "id", "createdAt", - "updatedAt" + "updatedAt", + "checksum" diff --git a/server/src/repositories/asset.repository.ts b/server/src/repositories/asset.repository.ts index f6a1be504a..0617661919 100644 --- a/server/src/repositories/asset.repository.ts +++ b/server/src/repositories/asset.repository.ts @@ -767,6 +767,17 @@ export class AssetRepository implements IAssetRepository { return builder.getMany(); } + async removeFile(assetId: string, type: AssetFileType): Promise { + await this.fileRepository.delete({ assetId, type }); + } + + async getFileById(assetFileId: string): Promise { + return this.fileRepository.findOne({ + where: { id: assetFileId }, + withDeleted: true, + }); + } + @GenerateSql({ params: [{ assetId: DummyValue.UUID, type: AssetFileType.PREVIEW, path: '/path/to/file' }] }) async upsertFile(file: { assetId: string; type: AssetFileType; path: string; checksum?: Buffer }): Promise { await this.fileRepository.upsert(file, { conflictPaths: ['assetId', 'type'] }); diff --git a/server/src/repositories/crypto.repository.ts b/server/src/repositories/crypto.repository.ts index 96a4f93d0b..0ce9f9c56a 100644 --- a/server/src/repositories/crypto.repository.ts +++ b/server/src/repositories/crypto.repository.ts @@ -33,6 +33,16 @@ export class CryptoRepository implements ICryptoRepository { return Buffer.from(xxh3.Xxh3.withSeed().update(value).digest().toString(16), 'utf8'); } + xxHashFile(filepath: string | Buffer): Promise { + return new Promise((resolve, reject) => { + const hash = xxh3.Xxh3.withSeed(); + const stream = createReadStream(filepath); + stream.on('error', (error) => reject(error)); + stream.on('data', (chunk) => hash.update(chunk)); + stream.on('end', () => resolve(Buffer.from(hash.digest().toString(16), 'utf8'))); + }); + } + verifySha256(value: string, encryptedValue: string, publicKey: string) { const publicKeyBuffer = Buffer.from(publicKey, 'base64'); const cryptoPublicKey = createPublicKey({ diff --git a/server/src/repositories/index.ts b/server/src/repositories/index.ts index 5bf08d0d78..527855c191 100644 --- a/server/src/repositories/index.ts +++ b/server/src/repositories/index.ts @@ -23,9 +23,11 @@ import { INotificationRepository } from 'src/interfaces/notification.interface'; import { IOAuthRepository } from 'src/interfaces/oauth.interface'; import { IPartnerRepository } from 'src/interfaces/partner.interface'; import { IPersonRepository } from 'src/interfaces/person.interface'; +import { IRepairRepository } from 'src/interfaces/repair.interface'; import { ISearchRepository } from 'src/interfaces/search.interface'; import { IServerInfoRepository } from 'src/interfaces/server-info.interface'; import { ISessionRepository } from 'src/interfaces/session.interface'; + import { ISharedLinkRepository } from 'src/interfaces/shared-link.interface'; import { IStackRepository } from 'src/interfaces/stack.interface'; import { IStorageRepository } from 'src/interfaces/storage.interface'; @@ -60,6 +62,7 @@ import { NotificationRepository } from 'src/repositories/notification.repository import { OAuthRepository } from 'src/repositories/oauth.repository'; import { PartnerRepository } from 'src/repositories/partner.repository'; import { PersonRepository } from 'src/repositories/person.repository'; +import { RepairRepository } from 'src/repositories/repair.repository'; import { SearchRepository } from 'src/repositories/search.repository'; import { ServerInfoRepository } from 'src/repositories/server-info.repository'; import { SessionRepository } from 'src/repositories/session.repository'; @@ -109,6 +112,7 @@ export const repositories = [ { provide: ITagRepository, useClass: TagRepository }, { provide: ITrashRepository, useClass: TrashRepository }, { provide: IUserRepository, useClass: UserRepository }, + { provide: IRepairRepository, useClass: RepairRepository }, { provide: IVersionHistoryRepository, useClass: VersionHistoryRepository }, { provide: IViewRepository, useClass: ViewRepository }, ]; diff --git a/server/src/repositories/job.repository.ts b/server/src/repositories/job.repository.ts index 3f154ee016..8f88741586 100644 --- a/server/src/repositories/job.repository.ts +++ b/server/src/repositories/job.repository.ts @@ -96,6 +96,9 @@ export const JOBS_TO_QUEUE: Record = { // Trash [JobName.QUEUE_TRASH_EMPTY]: QueueName.BACKGROUND_TASK, + + // Repair + [JobName.REPAIR_VERIFY_CHECKSUM]: QueueName.REPAIR, }; @Instrumentation() diff --git a/server/src/repositories/repair.repository.ts b/server/src/repositories/repair.repository.ts new file mode 100644 index 0000000000..c123bf6a11 --- /dev/null +++ b/server/src/repositories/repair.repository.ts @@ -0,0 +1,27 @@ +import { Injectable } from '@nestjs/common'; +import { InjectRepository } from '@nestjs/typeorm'; +import { RepairEntity } from 'src/entities/repair.entity'; +import { PaginationMode } from 'src/enum'; +import { IRepairRepository } from 'src/interfaces/repair.interface'; +import { Instrumentation } from 'src/utils/instrumentation'; +import { Paginated, paginatedBuilder, PaginationOptions } from 'src/utils/pagination'; +import { Repository } from 'typeorm'; + +@Instrumentation() +@Injectable() +export class RepairRepository implements IRepairRepository { + constructor(@InjectRepository(RepairEntity) private repository: Repository) {} + + create(repair: Partial): Promise { + return this.repository.save(repair); + } + + getAll(pagination: PaginationOptions): Paginated { + const builder = this.repository.createQueryBuilder('repair'); + return paginatedBuilder(builder, { + mode: PaginationMode.SKIP_TAKE, + skip: pagination.skip, + take: pagination.take, + }); + } +} diff --git a/server/src/services/base.service.ts b/server/src/services/base.service.ts index 2bb717b45b..16f4ecc91c 100644 --- a/server/src/services/base.service.ts +++ b/server/src/services/base.service.ts @@ -26,6 +26,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface'; import { IOAuthRepository } from 'src/interfaces/oauth.interface'; import { IPartnerRepository } from 'src/interfaces/partner.interface'; import { IPersonRepository } from 'src/interfaces/person.interface'; +import { IRepairRepository } from 'src/interfaces/repair.interface'; import { ISearchRepository } from 'src/interfaces/search.interface'; import { IServerInfoRepository } from 'src/interfaces/server-info.interface'; import { ISessionRepository } from 'src/interfaces/session.interface'; @@ -70,6 +71,7 @@ export class BaseService { @Inject(IOAuthRepository) protected oauthRepository: IOAuthRepository, @Inject(IPartnerRepository) protected partnerRepository: IPartnerRepository, @Inject(IPersonRepository) protected personRepository: IPersonRepository, + @Inject(IRepairRepository) protected repairRepository: IRepairRepository, @Inject(ISearchRepository) protected searchRepository: ISearchRepository, @Inject(IServerInfoRepository) protected serverInfoRepository: IServerInfoRepository, @Inject(ISessionRepository) protected sessionRepository: ISessionRepository, diff --git a/server/src/services/index.ts b/server/src/services/index.ts index 2cfbdb40c2..b79647009a 100644 --- a/server/src/services/index.ts +++ b/server/src/services/index.ts @@ -20,6 +20,7 @@ import { MicroservicesService } from 'src/services/microservices.service'; import { NotificationService } from 'src/services/notification.service'; import { PartnerService } from 'src/services/partner.service'; import { PersonService } from 'src/services/person.service'; +import { RepairService } from 'src/services/repair.service'; import { SearchService } from 'src/services/search.service'; import { ServerService } from 'src/services/server.service'; import { SessionService } from 'src/services/session.service'; @@ -63,6 +64,7 @@ export const services = [ PartnerService, PersonService, SearchService, + RepairService, ServerService, SessionService, SharedLinkService, diff --git a/server/src/services/library.service.spec.ts b/server/src/services/library.service.spec.ts index 7993c7dacc..b021eedbe9 100644 --- a/server/src/services/library.service.spec.ts +++ b/server/src/services/library.service.spec.ts @@ -119,6 +119,64 @@ describe(LibraryService.name, () => { }); }); + describe('onConfigUpdateEvent', () => { + beforeEach(async () => { + systemMock.get.mockResolvedValue(defaults); + databaseMock.tryLock.mockResolvedValue(true); + await sut.onBootstrap(); + }); + + it('should do nothing if oldConfig is not provided', async () => { + await sut.onConfigUpdate({ newConfig: systemConfigStub.libraryScan as SystemConfig }); + expect(jobMock.updateCronJob).not.toHaveBeenCalled(); + }); + + it('should do nothing if instance does not have the watch lock', async () => { + databaseMock.tryLock.mockResolvedValue(false); + await sut.onBootstrap(); + await sut.onConfigUpdate({ newConfig: systemConfigStub.libraryScan as SystemConfig, oldConfig: defaults }); + expect(jobMock.updateCronJob).not.toHaveBeenCalled(); + }); + + it('should update cron job and enable watching', async () => { + libraryMock.getAll.mockResolvedValue([]); + await sut.onConfigUpdate({ + newConfig: { + library: { ...systemConfigStub.libraryScan.library, ...systemConfigStub.libraryWatchEnabled.library }, + } as SystemConfig, + oldConfig: defaults, + }); + + expect(jobMock.updateCronJob).toHaveBeenCalledWith( + 'libraryScan', + systemConfigStub.libraryScan.library.scan.cronExpression, + systemConfigStub.libraryScan.library.scan.enabled, + ); + }); + + it('should update cron job and disable watching', async () => { + libraryMock.getAll.mockResolvedValue([]); + await sut.onConfigUpdate({ + newConfig: { + library: { ...systemConfigStub.libraryScan.library, ...systemConfigStub.libraryWatchEnabled.library }, + } as SystemConfig, + oldConfig: defaults, + }); + await sut.onConfigUpdate({ + newConfig: { + library: { ...systemConfigStub.libraryScan.library, ...systemConfigStub.libraryWatchDisabled.library }, + } as SystemConfig, + oldConfig: defaults, + }); + + expect(jobMock.updateCronJob).toHaveBeenCalledWith( + 'libraryScan', + systemConfigStub.libraryScan.library.scan.cronExpression, + systemConfigStub.libraryScan.library.scan.enabled, + ); + }); + }); + describe('onConfigValidateEvent', () => { it('should allow a valid cron expression', () => { expect(() => @@ -139,7 +197,7 @@ describe(LibraryService.name, () => { }); }); - describe('handleQueueAssetRefresh', () => { + describe('handleQueueSyncFiles', () => { it('should queue refresh of a new asset', async () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); storageMock.walk.mockImplementation(mockWalk); @@ -559,8 +617,8 @@ describe(LibraryService.name, () => { expect(jobMock.queueAll).not.toHaveBeenCalled(); }); - it('should throw BadRequestException when asset does not exist', async () => { - storageMock.stat.mockRejectedValue(new Error("ENOENT, no such file or directory '/data/user1/photo.jpg'")); + it('should fail when the file could not be read', async () => { + storageMock.stat.mockRejectedValue(new Error('Could not read file')); const mockLibraryJob: ILibraryFileJob = { id: libraryStub.externalLibrary1.id, @@ -572,6 +630,27 @@ describe(LibraryService.name, () => { assetMock.create.mockResolvedValue(assetStub.image); await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED); + expect(libraryMock.get).not.toHaveBeenCalled(); + expect(assetMock.create).not.toHaveBeenCalled(); + }); + + it('should skip if the file could not be found', async () => { + const error = new Error('File not found') as any; + error.code = 'ENOENT'; + storageMock.stat.mockRejectedValue(error); + + const mockLibraryJob: ILibraryFileJob = { + id: libraryStub.externalLibrary1.id, + ownerId: userStub.admin.id, + assetPath: '/data/user1/photo.jpg', + }; + + assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null); + assetMock.create.mockResolvedValue(assetStub.image); + + await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED); + expect(libraryMock.get).not.toHaveBeenCalled(); + expect(assetMock.create).not.toHaveBeenCalled(); }); }); @@ -654,6 +733,10 @@ describe(LibraryService.name, () => { expect(libraryMock.getStatistics).toHaveBeenCalledWith(libraryStub.externalLibrary1.id); }); + + it('should throw an error if the library could not be found', async () => { + await expect(sut.getStatistics('foo')).rejects.toBeInstanceOf(BadRequestException); + }); }); describe('create', () => { @@ -783,6 +866,13 @@ describe(LibraryService.name, () => { }); }); + describe('getAll', () => { + it('should get all libraries', async () => { + libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]); + await expect(sut.getAll()).resolves.toEqual([expect.objectContaining({ id: libraryStub.externalLibrary1.id })]); + }); + }); + describe('handleQueueCleanup', () => { it('should queue cleanup jobs', async () => { libraryMock.getAllDeleted.mockResolvedValue([libraryStub.externalLibrary1, libraryStub.externalLibrary2]); @@ -803,15 +893,38 @@ describe(LibraryService.name, () => { await sut.onBootstrap(); }); + it('should throw an error if an import path is invalid', async () => { + libraryMock.update.mockResolvedValue(libraryStub.externalLibrary1); + libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); + + await expect(sut.update('library-id', { importPaths: ['foo/bar'] })).rejects.toBeInstanceOf(BadRequestException); + expect(libraryMock.update).not.toHaveBeenCalled(); + }); + it('should update library', async () => { libraryMock.update.mockResolvedValue(libraryStub.externalLibrary1); libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); - await expect(sut.update('library-id', {})).resolves.toEqual(mapLibrary(libraryStub.externalLibrary1)); + storageMock.stat.mockResolvedValue({ isDirectory: () => true } as Stats); + storageMock.checkFileExists.mockResolvedValue(true); + + await expect(sut.update('library-id', { importPaths: ['foo/bar'] })).resolves.toEqual( + mapLibrary(libraryStub.externalLibrary1), + ); expect(libraryMock.update).toHaveBeenCalledWith(expect.objectContaining({ id: 'library-id' })); }); }); + describe('onShutdown', () => { + it('should do nothing if instance does not have the watch lock', async () => { + await sut.onShutdown(); + }); + }); + describe('watchAll', () => { + it('should return false if instance does not have the watch lock', async () => { + await expect(sut.watchAll()).resolves.toBe(false); + }); + describe('watching disabled', () => { beforeEach(async () => { systemMock.get.mockResolvedValue(systemConfigStub.libraryWatchDisabled); @@ -872,6 +985,7 @@ describe(LibraryService.name, () => { it('should handle a new file event', async () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1); libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]); + assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image); storageMock.watch.mockImplementation(makeMockWatcher({ items: [{ event: 'add', value: '/foo/photo.jpg' }] })); await sut.watchAll(); @@ -886,11 +1000,15 @@ describe(LibraryService.name, () => { }, }, ]); + expect(jobMock.queueAll).toHaveBeenCalledWith([ + { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + ]); }); it('should handle a file change event', async () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1); libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]); + assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image); storageMock.watch.mockImplementation( makeMockWatcher({ items: [{ event: 'change', value: '/foo/photo.jpg' }] }), ); @@ -907,6 +1025,24 @@ describe(LibraryService.name, () => { }, }, ]); + expect(jobMock.queueAll).toHaveBeenCalledWith([ + { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + ]); + }); + + it('should handle a file unlink event', async () => { + libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1); + libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]); + assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image); + storageMock.watch.mockImplementation( + makeMockWatcher({ items: [{ event: 'unlink', value: '/foo/photo.jpg' }] }), + ); + + await sut.watchAll(); + + expect(jobMock.queueAll).toHaveBeenCalledWith([ + { name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) }, + ]); }); it('should handle an error event', async () => { @@ -986,15 +1122,14 @@ describe(LibraryService.name, () => { it('should delete an empty library', async () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); assetMock.getAll.mockResolvedValue({ items: [], hasNextPage: false }); - libraryMock.delete.mockImplementation(async () => {}); await expect(sut.handleDeleteLibrary({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SUCCESS); + expect(libraryMock.delete).toHaveBeenCalled(); }); - it('should delete a library with assets', async () => { + it('should delete all assets in a library', async () => { libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1); assetMock.getAll.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false }); - libraryMock.delete.mockImplementation(async () => {}); assetMock.getById.mockResolvedValue(assetStub.image1); @@ -1076,6 +1211,10 @@ describe(LibraryService.name, () => { }); describe('validate', () => { + it('should not require import paths', async () => { + await expect(sut.validate('library-id', {})).resolves.toEqual({ importPaths: [] }); + }); + it('should validate directory', async () => { storageMock.stat.mockResolvedValue({ isDirectory: () => true, diff --git a/server/src/services/library.service.ts b/server/src/services/library.service.ts index 0cd939dd36..0a85750768 100644 --- a/server/src/services/library.service.ts +++ b/server/src/services/library.service.ts @@ -16,7 +16,7 @@ import { } from 'src/dtos/library.dto'; import { AssetEntity } from 'src/entities/asset.entity'; import { LibraryEntity } from 'src/entities/library.entity'; -import { AssetType } from 'src/enum'; +import { AssetFileType, AssetType } from 'src/enum'; import { DatabaseLock } from 'src/interfaces/database.interface'; import { ArgOf } from 'src/interfaces/event.interface'; import { @@ -303,7 +303,6 @@ export class LibraryService extends BaseService { async update(id: string, dto: UpdateLibraryDto): Promise { await this.findOrFail(id); - const library = await this.libraryRepository.update({ id, ...dto }); if (dto.importPaths) { const validation = await this.validate(id, { importPaths: dto.importPaths }); @@ -316,6 +315,7 @@ export class LibraryService extends BaseService { } } + const library = await this.libraryRepository.update({ id, ...dto }); return mapLibrary(library); } @@ -424,6 +424,8 @@ export class LibraryService extends BaseService { isExternal: true, }); + await this.assetRepository.upsertFile({ assetId: asset.id, type: AssetFileType.ORIGINAL, path: assetPath }); + await this.queuePostSyncJobs(asset); return JobStatus.SUCCESS; @@ -482,6 +484,7 @@ export class LibraryService extends BaseService { if (!asset.isOffline) { this.logger.debug(`${explanation}, removing: ${asset.originalPath}`); await this.assetRepository.updateAll([asset.id], { isOffline: true, deletedAt: new Date() }); + await this.assetRepository.removeFile(asset.id, AssetFileType.ORIGINAL); } }; @@ -518,6 +521,12 @@ export class LibraryService extends BaseService { fileModifiedAt: mtime, originalFileName: parse(asset.originalPath).base, }); + + await this.assetRepository.upsertFile({ + assetId: asset.id, + type: AssetFileType.ORIGINAL, + path: asset.originalPath, + }); } if (isAssetModified) { diff --git a/server/src/services/microservices.service.ts b/server/src/services/microservices.service.ts index d1d2bb8f20..32a9253aaf 100644 --- a/server/src/services/microservices.service.ts +++ b/server/src/services/microservices.service.ts @@ -12,6 +12,7 @@ import { MediaService } from 'src/services/media.service'; import { MetadataService } from 'src/services/metadata.service'; import { NotificationService } from 'src/services/notification.service'; import { PersonService } from 'src/services/person.service'; +import { RepairService } from 'src/services/repair.service'; import { SessionService } from 'src/services/session.service'; import { SmartInfoService } from 'src/services/smart-info.service'; import { StorageTemplateService } from 'src/services/storage-template.service'; @@ -42,6 +43,7 @@ export class MicroservicesService { private userService: UserService, private duplicateService: DuplicateService, private versionService: VersionService, + private repairService: RepairService, ) {} @OnEvent({ name: 'app.bootstrap' }) @@ -99,6 +101,7 @@ export class MicroservicesService { [JobName.TAG_CLEANUP]: () => this.tagService.handleTagCleanup(), [JobName.VERSION_CHECK]: () => this.versionService.handleVersionCheck(), [JobName.QUEUE_TRASH_EMPTY]: () => this.trashService.handleQueueEmptyTrash(), + [JobName.REPAIR_VERIFY_CHECKSUM]: (data) => this.repairService.handleVerifyChecksum(data), //Handles a single path on disk //Watcher calls for new files }); } diff --git a/server/src/services/repair.service.ts b/server/src/services/repair.service.ts new file mode 100644 index 0000000000..25a7293a05 --- /dev/null +++ b/server/src/services/repair.service.ts @@ -0,0 +1,66 @@ +import { Injectable } from '@nestjs/common'; +import { DateTime } from 'luxon'; +import { AUDIT_LOG_MAX_DURATION } from 'src/constants'; +import { AuthDto } from 'src/dtos/auth.dto'; +import { RepairEntity } from 'src/entities/repair.entity'; +import { Permission, RepairType } from 'src/enum'; +import { IEntityJob, JobName, JOBS_ASSET_PAGINATION_SIZE, JobStatus } from 'src/interfaces/job.interface'; +import { BaseService } from 'src/services/base.service'; +import { usePagination } from 'src/utils/pagination'; + +@Injectable() +export class RepairService extends BaseService { + async handleVerifyChecksum(job: IEntityJob): Promise { + const assetFile = await this.assetRepository.getFileById(job.id); + if (!assetFile) { + this.logger.error(`Asset file not found for id: ${job.id}`); + return JobStatus.FAILED; + } + + if (!assetFile.checksum) { + this.logger.error(`Asset file has no checksum, cannot verify: ${job.id}`); + return JobStatus.FAILED; + } + const currentChecksum = await this.cryptoRepository.xxHashFile(assetFile.path); + if (currentChecksum.equals(assetFile.checksum)) { + this.logger.log(`Asset file checksum verified: ${job.id}`); + } else { + this.logger.error(`Asset file checksum mismatch: ${job.id}`); + await this.repairRepository.create({ assetFile, type: RepairType.CHECKSUM_MISMATCH }); + } + return JobStatus.SUCCESS; + } + + async queueVerifyChecksums(auth: AuthDto): Promise { + const onlineAssets = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => + this.assetRepository.getAll(pagination), + ); + + for await (const assets of onlineAssets) { + const fileIds = assets + .map((asset) => asset.files) + .flat() + .filter((file) => file.checksum) + .map((file) => file.id); + + await this.jobRepository.queueAll( + fileIds.map((id) => ({ + name: JobName.REPAIR_VERIFY_CHECKSUM, + data: { id }, + })), + ); + } + } + + async getRepairs(auth: AuthDto): Promise { + let repairs: RepairEntity[] = []; + const repairPages = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => + this.repairRepository.getAll(pagination), + ); + + for await (const repairPage of repairPages) { + repairs = repairs.concat(repairPage); + } + return repairs; + } +}