0
Fork 0
mirror of https://github.com/immich-app/immich.git synced 2024-12-31 00:43:56 -05:00

Merge remote-tracking branch 'origin' into feat/xxhash

This commit is contained in:
Jonathan Jogenfors 2024-10-11 23:04:31 +02:00
commit d75ee28b1f
28 changed files with 488 additions and 20 deletions

2
.gitignore vendored
View file

@ -21,3 +21,5 @@ mobile/openapi/.openapi-generator/FILES
open-api/typescript-sdk/build
mobile/android/fastlane/report.xml
mobile/ios/fastlane/report.xml
vite.config.js.timestamp-*

View file

@ -500,13 +500,13 @@ describe('/libraries', () => {
});
it('should set an asset offline its file is not in any import path', async () => {
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');

View file

@ -0,0 +1,95 @@
import { LibraryResponseDto, LoginResponseDto, getAllLibraries, scanLibrary } from '@immich/sdk';
import { readFile } from 'fs/promises';
import { Socket } from 'socket.io-client';
import { userDto, uuidDto } from 'src/fixtures';
import { errorDto } from 'src/responses';
import { app, asBearerAuth, testAssetDir, testAssetDirInternal, utils } from 'src/utils';
import request from 'supertest';
import { utimes } from 'utimes';
import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest';
import { b } from 'vitest/dist/chunks/suite.BMWOKiTe.js';
const scan = async (accessToken: string, id: string) => scanLibrary({ id }, { headers: asBearerAuth(accessToken) });
describe('/repair', () => {
let admin: LoginResponseDto;
let user: LoginResponseDto;
let library: LibraryResponseDto;
let websocket: Socket;
beforeAll(async () => {
await utils.resetDatabase();
admin = await utils.adminSetup();
await utils.resetAdminConfig(admin.accessToken);
user = await utils.userSetup(admin.accessToken, userDto.user1);
library = await utils.createLibrary(admin.accessToken, { ownerId: admin.userId });
websocket = await utils.connectWebsocket(admin.accessToken);
});
afterAll(() => {
utils.disconnectWebsocket(websocket);
utils.resetTempFolder();
});
beforeEach(() => {
utils.resetEvents();
});
describe('POST /check', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/libraries').send({});
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require admin authentication', async () => {
const { status, body } = await request(app)
.post('/repair/check')
.set('Authorization', `Bearer ${user.accessToken}`)
.send({ ownerId: admin.userId });
expect(status).toBe(403);
expect(body).toEqual(errorDto.forbidden);
});
it('should detect a changed original file', async () => {
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename: 'polemonium_reptans.jpg',
bytes: await readFile(`${testAssetDir}/albums/nature/polemonium_reptans.jpg`),
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
let assetPath = '';
{
const { status, body } = await request(app)
.get(`/assets/${asset.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
assetPath = body.originalPath;
}
utils.flipBitInFile(assetPath, 2, 5);
const { status, body } = await request(app)
.post('/repair/check')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ ownerId: admin.userId });
expect(status).toBe(201);
expect(body).toEqual(
expect.arrayContaining([
expect.objectContaining({
ownerId: admin.userId,
name: 'New External Library',
refreshedAt: null,
assetCount: 0,
importPaths: [],
exclusionPatterns: expect.any(Array),
}),
]);
});
});
});

View file

@ -37,7 +37,7 @@ import {
import { BrowserContext } from '@playwright/test';
import { exec, spawn } from 'node:child_process';
import { createHash } from 'node:crypto';
import { existsSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
import { tmpdir } from 'node:os';
import path, { dirname } from 'node:path';
import { setTimeout as setAsyncTimeout } from 'node:timers/promises';
@ -374,8 +374,8 @@ export const utils = {
},
createDirectory: (path: string) => {
if (!existsSync(dirname(path))) {
mkdirSync(dirname(path), { recursive: true });
if (!existsSync(path)) {
mkdirSync(path, { recursive: true });
}
},
@ -387,12 +387,26 @@ export const utils = {
rmSync(path);
},
flipBitInFile: (filePath: string, byteIndex: number, bitPosition: number) => {
const data = readFileSync(filePath);
// Check if the byte index is within the file size
if (byteIndex >= data.length) {
throw new Error('Byte index is out of range.');
}
// Flip the specific bit using XOR
data[byteIndex] ^= 1 << bitPosition;
writeFileSync(filePath, data);
},
removeDirectory: (path: string) => {
if (!existsSync(path)) {
return;
}
rmSync(path);
rmSync(path, { recursive: true });
},
getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }),

View file

@ -195,6 +195,7 @@ export const defaults = Object.freeze<SystemConfig>({
[QueueName.THUMBNAIL_GENERATION]: { concurrency: 3 },
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
[QueueName.NOTIFICATION]: { concurrency: 5 },
[QueueName.REPAIR]: { concurrency: 2 },
},
logging: {
enabled: true,

View file

@ -0,0 +1,24 @@
import { Controller, Get, Post } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { AuthDto } from 'src/dtos/auth.dto';
import { RepairEntity } from 'src/entities/repair.entity';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { RepairService } from 'src/services/repair.service';
@ApiTags('Repairs')
@Controller('repairs')
export class RepairController {
constructor(private service: RepairService) {}
@Get()
@Authenticated({ admin: true })
getRepairs(@Auth() auth: AuthDto): Promise<RepairEntity[]> {
return this.service.getRepairs(auth);
}
@Post('/check')
@Authenticated({ admin: true })
validateChecksums(@Auth() auth: AuthDto): Promise<RepairEntity[]> {
return this.service.getRepairs(auth);
}
}

View file

@ -97,4 +97,7 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
@ApiProperty({ type: JobStatusDto })
[QueueName.NOTIFICATION]!: JobStatusDto;
@ApiProperty({ type: JobStatusDto })
[QueueName.REPAIR]!: JobStatusDto;
}

View file

@ -218,6 +218,12 @@ class SystemConfigJobDto implements Record<ConcurrentQueueName, JobSettingsDto>
@IsObject()
@Type(() => JobSettingsDto)
[QueueName.NOTIFICATION]!: JobSettingsDto;
@ApiProperty({ type: JobSettingsDto })
@ValidateNested()
@IsObject()
@Type(() => JobSettingsDto)
[QueueName.REPAIR]!: JobSettingsDto;
}
class SystemConfigLibraryScanDto {

View file

@ -36,7 +36,7 @@ export class AssetFileEntity {
@Column()
path!: string;
@Column({ type: 'bytea' })
@Column({ type: 'bytea', nullable: true, default: null })
@Index()
checksum!: Buffer | null;
}

View file

@ -0,0 +1,21 @@
import { AssetFileEntity } from 'src/entities/asset-files.entity';
import { RepairType } from 'src/enum';
import { Column, CreateDateColumn, Entity, ManyToOne, PrimaryGeneratedColumn, Unique } from 'typeorm';
@Entity('repair')
export class RepairEntity {
@PrimaryGeneratedColumn('uuid')
id!: string;
@ManyToOne(() => AssetFileEntity, { onDelete: 'CASCADE', onUpdate: 'CASCADE' })
assetFile!: AssetFileEntity;
@CreateDateColumn({ type: 'timestamptz' })
createdAt!: Date;
@Column()
type!: RepairType;
@Column()
path!: string;
}

View file

@ -16,6 +16,10 @@ export enum AssetFileType {
THUMBNAIL = 'thumbnail',
}
export enum RepairType {
CHECKSUM_MISMATCH = 'checksum-mismatch',
}
export enum AlbumUserRole {
EDITOR = 'editor',
VIEWER = 'viewer',

View file

@ -1,3 +1,4 @@
import { AssetFileEntity } from 'src/entities/asset-files.entity';
import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { ExifEntity } from 'src/entities/exif.entity';
@ -195,6 +196,8 @@ export interface IAssetRepository {
getDuplicates(options: AssetBuilderOptions): Promise<AssetEntity[]>;
getAllForUserFullSync(options: AssetFullSyncOptions): Promise<AssetEntity[]>;
getChangedDeltaSync(options: AssetDeltaSyncOptions): Promise<AssetEntity[]>;
getFileById(assetFileId: string): Promise<AssetFileEntity | null>;
removeFile(assetId: string, type: AssetFileType): Promise<void>;
upsertFile(file: UpsertFileOptions): Promise<void>;
upsertFiles(files: UpsertFileOptions[]): Promise<void>;
}

View file

@ -6,6 +6,7 @@ export interface ICryptoRepository {
hashFile(filePath: string | Buffer): Promise<Buffer>;
hashSha256(data: string): string;
xxHash(value: string): Buffer;
xxHashFile(filePath: string | Buffer): Promise<Buffer>;
verifySha256(data: string, encrypted: string, publicKey: string): boolean;
hashSha1(data: string | Buffer): Buffer;
hashBcrypt(data: string | Buffer, saltOrRounds: string | number): Promise<string>;

View file

@ -15,6 +15,7 @@ export enum QueueName {
SIDECAR = 'sidecar',
LIBRARY = 'library',
NOTIFICATION = 'notifications',
REPAIR = 'repair',
}
export type ConcurrentQueueName = Exclude<
@ -111,6 +112,9 @@ export enum JobName {
// Version check
VERSION_CHECK = 'version-check',
// REPAIR
REPAIR_VERIFY_CHECKSUM = 'repair-verify-checksum',
}
export const JOBS_ASSET_PAGINATION_SIZE = 1000;
@ -283,7 +287,10 @@ export type JobItem =
| { name: JobName.NOTIFY_SIGNUP; data: INotifySignupJob }
// Version check
| { name: JobName.VERSION_CHECK; data: IBaseJob };
| { name: JobName.VERSION_CHECK; data: IBaseJob }
// Repairs
| { name: JobName.REPAIR_VERIFY_CHECKSUM; data: IEntityJob };
export enum JobStatus {
SUCCESS = 'success',

View file

@ -0,0 +1,9 @@
import { RepairEntity } from 'src/entities/repair.entity';
import { Paginated, PaginationOptions } from 'src/utils/pagination';
export const IRepairRepository = 'IRepairRepository';
export interface IRepairRepository {
create(repair: Partial<RepairEntity>): Promise<RepairEntity>;
getAll(pagination: PaginationOptions): Paginated<RepairEntity>;
}

View file

@ -4,7 +4,7 @@ export class AssetFileChecksum1728632095015 implements MigrationInterface {
name = 'AssetFileChecksum1728632095015';
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "asset_files" ADD "checksum" bytea NULL`);
await queryRunner.query(`ALTER TABLE "asset_files" ADD "checksum" bytea`);
await queryRunner.query(`CREATE INDEX "IDX_c946066edd16cfa5c25a26aa8e" ON "asset_files" ("checksum")`);
}

View file

@ -1134,7 +1134,8 @@ SET
RETURNING
"id",
"createdAt",
"updatedAt"
"updatedAt",
"checksum"
-- AssetRepository.upsertFiles
INSERT INTO
@ -1159,4 +1160,5 @@ SET
RETURNING
"id",
"createdAt",
"updatedAt"
"updatedAt",
"checksum"

View file

@ -767,6 +767,17 @@ export class AssetRepository implements IAssetRepository {
return builder.getMany();
}
async removeFile(assetId: string, type: AssetFileType): Promise<void> {
await this.fileRepository.delete({ assetId, type });
}
async getFileById(assetFileId: string): Promise<AssetFileEntity | null> {
return this.fileRepository.findOne({
where: { id: assetFileId },
withDeleted: true,
});
}
@GenerateSql({ params: [{ assetId: DummyValue.UUID, type: AssetFileType.PREVIEW, path: '/path/to/file' }] })
async upsertFile(file: { assetId: string; type: AssetFileType; path: string; checksum?: Buffer }): Promise<void> {
await this.fileRepository.upsert(file, { conflictPaths: ['assetId', 'type'] });

View file

@ -33,6 +33,16 @@ export class CryptoRepository implements ICryptoRepository {
return Buffer.from(xxh3.Xxh3.withSeed().update(value).digest().toString(16), 'utf8');
}
xxHashFile(filepath: string | Buffer): Promise<Buffer> {
return new Promise<Buffer>((resolve, reject) => {
const hash = xxh3.Xxh3.withSeed();
const stream = createReadStream(filepath);
stream.on('error', (error) => reject(error));
stream.on('data', (chunk) => hash.update(chunk));
stream.on('end', () => resolve(Buffer.from(hash.digest().toString(16), 'utf8')));
});
}
verifySha256(value: string, encryptedValue: string, publicKey: string) {
const publicKeyBuffer = Buffer.from(publicKey, 'base64');
const cryptoPublicKey = createPublicKey({

View file

@ -23,9 +23,11 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
import { IPartnerRepository } from 'src/interfaces/partner.interface';
import { IPersonRepository } from 'src/interfaces/person.interface';
import { IRepairRepository } from 'src/interfaces/repair.interface';
import { ISearchRepository } from 'src/interfaces/search.interface';
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
import { ISessionRepository } from 'src/interfaces/session.interface';
import { ISharedLinkRepository } from 'src/interfaces/shared-link.interface';
import { IStackRepository } from 'src/interfaces/stack.interface';
import { IStorageRepository } from 'src/interfaces/storage.interface';
@ -60,6 +62,7 @@ import { NotificationRepository } from 'src/repositories/notification.repository
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { RepairRepository } from 'src/repositories/repair.repository';
import { SearchRepository } from 'src/repositories/search.repository';
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
import { SessionRepository } from 'src/repositories/session.repository';
@ -109,6 +112,7 @@ export const repositories = [
{ provide: ITagRepository, useClass: TagRepository },
{ provide: ITrashRepository, useClass: TrashRepository },
{ provide: IUserRepository, useClass: UserRepository },
{ provide: IRepairRepository, useClass: RepairRepository },
{ provide: IVersionHistoryRepository, useClass: VersionHistoryRepository },
{ provide: IViewRepository, useClass: ViewRepository },
];

View file

@ -96,6 +96,9 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
// Trash
[JobName.QUEUE_TRASH_EMPTY]: QueueName.BACKGROUND_TASK,
// Repair
[JobName.REPAIR_VERIFY_CHECKSUM]: QueueName.REPAIR,
};
@Instrumentation()

View file

@ -0,0 +1,27 @@
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { RepairEntity } from 'src/entities/repair.entity';
import { PaginationMode } from 'src/enum';
import { IRepairRepository } from 'src/interfaces/repair.interface';
import { Instrumentation } from 'src/utils/instrumentation';
import { Paginated, paginatedBuilder, PaginationOptions } from 'src/utils/pagination';
import { Repository } from 'typeorm';
@Instrumentation()
@Injectable()
export class RepairRepository implements IRepairRepository {
constructor(@InjectRepository(RepairEntity) private repository: Repository<RepairEntity>) {}
create(repair: Partial<RepairEntity>): Promise<RepairEntity> {
return this.repository.save(repair);
}
getAll(pagination: PaginationOptions): Paginated<RepairEntity> {
const builder = this.repository.createQueryBuilder('repair');
return paginatedBuilder<RepairEntity>(builder, {
mode: PaginationMode.SKIP_TAKE,
skip: pagination.skip,
take: pagination.take,
});
}
}

View file

@ -26,6 +26,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
import { IPartnerRepository } from 'src/interfaces/partner.interface';
import { IPersonRepository } from 'src/interfaces/person.interface';
import { IRepairRepository } from 'src/interfaces/repair.interface';
import { ISearchRepository } from 'src/interfaces/search.interface';
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
import { ISessionRepository } from 'src/interfaces/session.interface';
@ -70,6 +71,7 @@ export class BaseService {
@Inject(IOAuthRepository) protected oauthRepository: IOAuthRepository,
@Inject(IPartnerRepository) protected partnerRepository: IPartnerRepository,
@Inject(IPersonRepository) protected personRepository: IPersonRepository,
@Inject(IRepairRepository) protected repairRepository: IRepairRepository,
@Inject(ISearchRepository) protected searchRepository: ISearchRepository,
@Inject(IServerInfoRepository) protected serverInfoRepository: IServerInfoRepository,
@Inject(ISessionRepository) protected sessionRepository: ISessionRepository,

View file

@ -20,6 +20,7 @@ import { MicroservicesService } from 'src/services/microservices.service';
import { NotificationService } from 'src/services/notification.service';
import { PartnerService } from 'src/services/partner.service';
import { PersonService } from 'src/services/person.service';
import { RepairService } from 'src/services/repair.service';
import { SearchService } from 'src/services/search.service';
import { ServerService } from 'src/services/server.service';
import { SessionService } from 'src/services/session.service';
@ -63,6 +64,7 @@ export const services = [
PartnerService,
PersonService,
SearchService,
RepairService,
ServerService,
SessionService,
SharedLinkService,

View file

@ -119,6 +119,64 @@ describe(LibraryService.name, () => {
});
});
describe('onConfigUpdateEvent', () => {
beforeEach(async () => {
systemMock.get.mockResolvedValue(defaults);
databaseMock.tryLock.mockResolvedValue(true);
await sut.onBootstrap();
});
it('should do nothing if oldConfig is not provided', async () => {
await sut.onConfigUpdate({ newConfig: systemConfigStub.libraryScan as SystemConfig });
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
});
it('should do nothing if instance does not have the watch lock', async () => {
databaseMock.tryLock.mockResolvedValue(false);
await sut.onBootstrap();
await sut.onConfigUpdate({ newConfig: systemConfigStub.libraryScan as SystemConfig, oldConfig: defaults });
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
});
it('should update cron job and enable watching', async () => {
libraryMock.getAll.mockResolvedValue([]);
await sut.onConfigUpdate({
newConfig: {
library: { ...systemConfigStub.libraryScan.library, ...systemConfigStub.libraryWatchEnabled.library },
} as SystemConfig,
oldConfig: defaults,
});
expect(jobMock.updateCronJob).toHaveBeenCalledWith(
'libraryScan',
systemConfigStub.libraryScan.library.scan.cronExpression,
systemConfigStub.libraryScan.library.scan.enabled,
);
});
it('should update cron job and disable watching', async () => {
libraryMock.getAll.mockResolvedValue([]);
await sut.onConfigUpdate({
newConfig: {
library: { ...systemConfigStub.libraryScan.library, ...systemConfigStub.libraryWatchEnabled.library },
} as SystemConfig,
oldConfig: defaults,
});
await sut.onConfigUpdate({
newConfig: {
library: { ...systemConfigStub.libraryScan.library, ...systemConfigStub.libraryWatchDisabled.library },
} as SystemConfig,
oldConfig: defaults,
});
expect(jobMock.updateCronJob).toHaveBeenCalledWith(
'libraryScan',
systemConfigStub.libraryScan.library.scan.cronExpression,
systemConfigStub.libraryScan.library.scan.enabled,
);
});
});
describe('onConfigValidateEvent', () => {
it('should allow a valid cron expression', () => {
expect(() =>
@ -139,7 +197,7 @@ describe(LibraryService.name, () => {
});
});
describe('handleQueueAssetRefresh', () => {
describe('handleQueueSyncFiles', () => {
it('should queue refresh of a new asset', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
storageMock.walk.mockImplementation(mockWalk);
@ -559,8 +617,8 @@ describe(LibraryService.name, () => {
expect(jobMock.queueAll).not.toHaveBeenCalled();
});
it('should throw BadRequestException when asset does not exist', async () => {
storageMock.stat.mockRejectedValue(new Error("ENOENT, no such file or directory '/data/user1/photo.jpg'"));
it('should fail when the file could not be read', async () => {
storageMock.stat.mockRejectedValue(new Error('Could not read file'));
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
@ -572,6 +630,27 @@ describe(LibraryService.name, () => {
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
expect(libraryMock.get).not.toHaveBeenCalled();
expect(assetMock.create).not.toHaveBeenCalled();
});
it('should skip if the file could not be found', async () => {
const error = new Error('File not found') as any;
error.code = 'ENOENT';
storageMock.stat.mockRejectedValue(error);
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: userStub.admin.id,
assetPath: '/data/user1/photo.jpg',
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
expect(libraryMock.get).not.toHaveBeenCalled();
expect(assetMock.create).not.toHaveBeenCalled();
});
});
@ -654,6 +733,10 @@ describe(LibraryService.name, () => {
expect(libraryMock.getStatistics).toHaveBeenCalledWith(libraryStub.externalLibrary1.id);
});
it('should throw an error if the library could not be found', async () => {
await expect(sut.getStatistics('foo')).rejects.toBeInstanceOf(BadRequestException);
});
});
describe('create', () => {
@ -783,6 +866,13 @@ describe(LibraryService.name, () => {
});
});
describe('getAll', () => {
it('should get all libraries', async () => {
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]);
await expect(sut.getAll()).resolves.toEqual([expect.objectContaining({ id: libraryStub.externalLibrary1.id })]);
});
});
describe('handleQueueCleanup', () => {
it('should queue cleanup jobs', async () => {
libraryMock.getAllDeleted.mockResolvedValue([libraryStub.externalLibrary1, libraryStub.externalLibrary2]);
@ -803,15 +893,38 @@ describe(LibraryService.name, () => {
await sut.onBootstrap();
});
it('should throw an error if an import path is invalid', async () => {
libraryMock.update.mockResolvedValue(libraryStub.externalLibrary1);
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
await expect(sut.update('library-id', { importPaths: ['foo/bar'] })).rejects.toBeInstanceOf(BadRequestException);
expect(libraryMock.update).not.toHaveBeenCalled();
});
it('should update library', async () => {
libraryMock.update.mockResolvedValue(libraryStub.externalLibrary1);
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
await expect(sut.update('library-id', {})).resolves.toEqual(mapLibrary(libraryStub.externalLibrary1));
storageMock.stat.mockResolvedValue({ isDirectory: () => true } as Stats);
storageMock.checkFileExists.mockResolvedValue(true);
await expect(sut.update('library-id', { importPaths: ['foo/bar'] })).resolves.toEqual(
mapLibrary(libraryStub.externalLibrary1),
);
expect(libraryMock.update).toHaveBeenCalledWith(expect.objectContaining({ id: 'library-id' }));
});
});
describe('onShutdown', () => {
it('should do nothing if instance does not have the watch lock', async () => {
await sut.onShutdown();
});
});
describe('watchAll', () => {
it('should return false if instance does not have the watch lock', async () => {
await expect(sut.watchAll()).resolves.toBe(false);
});
describe('watching disabled', () => {
beforeEach(async () => {
systemMock.get.mockResolvedValue(systemConfigStub.libraryWatchDisabled);
@ -872,6 +985,7 @@ describe(LibraryService.name, () => {
it('should handle a new file event', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
storageMock.watch.mockImplementation(makeMockWatcher({ items: [{ event: 'add', value: '/foo/photo.jpg' }] }));
await sut.watchAll();
@ -886,11 +1000,15 @@ describe(LibraryService.name, () => {
},
},
]);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) },
]);
});
it('should handle a file change event', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
storageMock.watch.mockImplementation(
makeMockWatcher({ items: [{ event: 'change', value: '/foo/photo.jpg' }] }),
);
@ -907,6 +1025,24 @@ describe(LibraryService.name, () => {
},
},
]);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) },
]);
});
it('should handle a file unlink event', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
storageMock.watch.mockImplementation(
makeMockWatcher({ items: [{ event: 'unlink', value: '/foo/photo.jpg' }] }),
);
await sut.watchAll();
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.LIBRARY_SYNC_ASSET, data: expect.objectContaining({ id: assetStub.image.id }) },
]);
});
it('should handle an error event', async () => {
@ -986,15 +1122,14 @@ describe(LibraryService.name, () => {
it('should delete an empty library', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
assetMock.getAll.mockResolvedValue({ items: [], hasNextPage: false });
libraryMock.delete.mockImplementation(async () => {});
await expect(sut.handleDeleteLibrary({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SUCCESS);
expect(libraryMock.delete).toHaveBeenCalled();
});
it('should delete a library with assets', async () => {
it('should delete all assets in a library', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
assetMock.getAll.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false });
libraryMock.delete.mockImplementation(async () => {});
assetMock.getById.mockResolvedValue(assetStub.image1);
@ -1076,6 +1211,10 @@ describe(LibraryService.name, () => {
});
describe('validate', () => {
it('should not require import paths', async () => {
await expect(sut.validate('library-id', {})).resolves.toEqual({ importPaths: [] });
});
it('should validate directory', async () => {
storageMock.stat.mockResolvedValue({
isDirectory: () => true,

View file

@ -16,7 +16,7 @@ import {
} from 'src/dtos/library.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { LibraryEntity } from 'src/entities/library.entity';
import { AssetType } from 'src/enum';
import { AssetFileType, AssetType } from 'src/enum';
import { DatabaseLock } from 'src/interfaces/database.interface';
import { ArgOf } from 'src/interfaces/event.interface';
import {
@ -303,7 +303,6 @@ export class LibraryService extends BaseService {
async update(id: string, dto: UpdateLibraryDto): Promise<LibraryResponseDto> {
await this.findOrFail(id);
const library = await this.libraryRepository.update({ id, ...dto });
if (dto.importPaths) {
const validation = await this.validate(id, { importPaths: dto.importPaths });
@ -316,6 +315,7 @@ export class LibraryService extends BaseService {
}
}
const library = await this.libraryRepository.update({ id, ...dto });
return mapLibrary(library);
}
@ -424,6 +424,8 @@ export class LibraryService extends BaseService {
isExternal: true,
});
await this.assetRepository.upsertFile({ assetId: asset.id, type: AssetFileType.ORIGINAL, path: assetPath });
await this.queuePostSyncJobs(asset);
return JobStatus.SUCCESS;
@ -482,6 +484,7 @@ export class LibraryService extends BaseService {
if (!asset.isOffline) {
this.logger.debug(`${explanation}, removing: ${asset.originalPath}`);
await this.assetRepository.updateAll([asset.id], { isOffline: true, deletedAt: new Date() });
await this.assetRepository.removeFile(asset.id, AssetFileType.ORIGINAL);
}
};
@ -518,6 +521,12 @@ export class LibraryService extends BaseService {
fileModifiedAt: mtime,
originalFileName: parse(asset.originalPath).base,
});
await this.assetRepository.upsertFile({
assetId: asset.id,
type: AssetFileType.ORIGINAL,
path: asset.originalPath,
});
}
if (isAssetModified) {

View file

@ -12,6 +12,7 @@ import { MediaService } from 'src/services/media.service';
import { MetadataService } from 'src/services/metadata.service';
import { NotificationService } from 'src/services/notification.service';
import { PersonService } from 'src/services/person.service';
import { RepairService } from 'src/services/repair.service';
import { SessionService } from 'src/services/session.service';
import { SmartInfoService } from 'src/services/smart-info.service';
import { StorageTemplateService } from 'src/services/storage-template.service';
@ -42,6 +43,7 @@ export class MicroservicesService {
private userService: UserService,
private duplicateService: DuplicateService,
private versionService: VersionService,
private repairService: RepairService,
) {}
@OnEvent({ name: 'app.bootstrap' })
@ -99,6 +101,7 @@ export class MicroservicesService {
[JobName.TAG_CLEANUP]: () => this.tagService.handleTagCleanup(),
[JobName.VERSION_CHECK]: () => this.versionService.handleVersionCheck(),
[JobName.QUEUE_TRASH_EMPTY]: () => this.trashService.handleQueueEmptyTrash(),
[JobName.REPAIR_VERIFY_CHECKSUM]: (data) => this.repairService.handleVerifyChecksum(data), //Handles a single path on disk //Watcher calls for new files
});
}

View file

@ -0,0 +1,66 @@
import { Injectable } from '@nestjs/common';
import { DateTime } from 'luxon';
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
import { AuthDto } from 'src/dtos/auth.dto';
import { RepairEntity } from 'src/entities/repair.entity';
import { Permission, RepairType } from 'src/enum';
import { IEntityJob, JobName, JOBS_ASSET_PAGINATION_SIZE, JobStatus } from 'src/interfaces/job.interface';
import { BaseService } from 'src/services/base.service';
import { usePagination } from 'src/utils/pagination';
@Injectable()
export class RepairService extends BaseService {
async handleVerifyChecksum(job: IEntityJob): Promise<JobStatus> {
const assetFile = await this.assetRepository.getFileById(job.id);
if (!assetFile) {
this.logger.error(`Asset file not found for id: ${job.id}`);
return JobStatus.FAILED;
}
if (!assetFile.checksum) {
this.logger.error(`Asset file has no checksum, cannot verify: ${job.id}`);
return JobStatus.FAILED;
}
const currentChecksum = await this.cryptoRepository.xxHashFile(assetFile.path);
if (currentChecksum.equals(assetFile.checksum)) {
this.logger.log(`Asset file checksum verified: ${job.id}`);
} else {
this.logger.error(`Asset file checksum mismatch: ${job.id}`);
await this.repairRepository.create({ assetFile, type: RepairType.CHECKSUM_MISMATCH });
}
return JobStatus.SUCCESS;
}
async queueVerifyChecksums(auth: AuthDto): Promise<void> {
const onlineAssets = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getAll(pagination),
);
for await (const assets of onlineAssets) {
const fileIds = assets
.map((asset) => asset.files)
.flat()
.filter((file) => file.checksum)
.map((file) => file.id);
await this.jobRepository.queueAll(
fileIds.map((id) => ({
name: JobName.REPAIR_VERIFY_CHECKSUM,
data: { id },
})),
);
}
}
async getRepairs(auth: AuthDto): Promise<RepairEntity[]> {
let repairs: RepairEntity[] = [];
const repairPages = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
this.repairRepository.getAll(pagination),
);
for await (const repairPage of repairPages) {
repairs = repairs.concat(repairPage);
}
return repairs;
}
}