mirror of
https://github.com/immich-app/immich.git
synced 2025-02-04 01:09:14 -05:00
refactor(server): remove checksum job (#1786)
This commit is contained in:
parent
57136e48fb
commit
e309647f1b
7 changed files with 0 additions and 111 deletions
|
@ -14,9 +14,7 @@ import { Module } from '@nestjs/common';
|
||||||
import { ConfigModule } from '@nestjs/config';
|
import { ConfigModule } from '@nestjs/config';
|
||||||
import { TypeOrmModule } from '@nestjs/typeorm';
|
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||||
import { CommunicationModule } from '../../immich/src/api-v1/communication/communication.module';
|
import { CommunicationModule } from '../../immich/src/api-v1/communication/communication.module';
|
||||||
import { MicroservicesService } from './microservices.service';
|
|
||||||
import { AssetUploadedProcessor } from './processors/asset-uploaded.processor';
|
import { AssetUploadedProcessor } from './processors/asset-uploaded.processor';
|
||||||
import { GenerateChecksumProcessor } from './processors/generate-checksum.processor';
|
|
||||||
import { MachineLearningProcessor } from './processors/machine-learning.processor';
|
import { MachineLearningProcessor } from './processors/machine-learning.processor';
|
||||||
import { MetadataExtractionProcessor } from './processors/metadata-extraction.processor';
|
import { MetadataExtractionProcessor } from './processors/metadata-extraction.processor';
|
||||||
import { StorageMigrationProcessor } from './processors/storage-migration.processor';
|
import { StorageMigrationProcessor } from './processors/storage-migration.processor';
|
||||||
|
@ -46,12 +44,10 @@ import { DomainModule } from '@app/domain';
|
||||||
],
|
],
|
||||||
controllers: [],
|
controllers: [],
|
||||||
providers: [
|
providers: [
|
||||||
MicroservicesService,
|
|
||||||
AssetUploadedProcessor,
|
AssetUploadedProcessor,
|
||||||
ThumbnailGeneratorProcessor,
|
ThumbnailGeneratorProcessor,
|
||||||
MetadataExtractionProcessor,
|
MetadataExtractionProcessor,
|
||||||
VideoTranscodeProcessor,
|
VideoTranscodeProcessor,
|
||||||
GenerateChecksumProcessor,
|
|
||||||
MachineLearningProcessor,
|
MachineLearningProcessor,
|
||||||
UserDeletionProcessor,
|
UserDeletionProcessor,
|
||||||
StorageMigrationProcessor,
|
StorageMigrationProcessor,
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
import { Inject, Injectable, OnModuleInit } from '@nestjs/common';
|
|
||||||
import { IJobRepository, JobName } from '@app/domain';
|
|
||||||
|
|
||||||
const sleep = (ms: number) => new Promise<void>((resolve) => setTimeout(() => resolve(), ms));
|
|
||||||
|
|
||||||
@Injectable()
|
|
||||||
export class MicroservicesService implements OnModuleInit {
|
|
||||||
constructor(@Inject(IJobRepository) private jobRepository: IJobRepository) {}
|
|
||||||
|
|
||||||
async onModuleInit() {
|
|
||||||
// wait for migration
|
|
||||||
await sleep(10_000);
|
|
||||||
|
|
||||||
await this.jobRepository.add({ name: JobName.CHECKSUM_GENERATION });
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,82 +0,0 @@
|
||||||
import { AssetEntity } from '@app/infra';
|
|
||||||
import { JobName, QueueName } from '@app/domain';
|
|
||||||
import { Process, Processor } from '@nestjs/bull';
|
|
||||||
import { Logger } from '@nestjs/common';
|
|
||||||
import { InjectRepository } from '@nestjs/typeorm';
|
|
||||||
import { createHash } from 'node:crypto';
|
|
||||||
import fs from 'node:fs';
|
|
||||||
import { FindOptionsWhere, IsNull, MoreThan, QueryFailedError, Repository } from 'typeorm';
|
|
||||||
|
|
||||||
// TODO: just temporary task to generate previous uploaded assets.
|
|
||||||
@Processor(QueueName.CHECKSUM_GENERATION)
|
|
||||||
export class GenerateChecksumProcessor {
|
|
||||||
constructor(
|
|
||||||
@InjectRepository(AssetEntity)
|
|
||||||
private assetRepository: Repository<AssetEntity>,
|
|
||||||
) {}
|
|
||||||
|
|
||||||
@Process(JobName.CHECKSUM_GENERATION)
|
|
||||||
async generateChecksum() {
|
|
||||||
const pageSize = 200;
|
|
||||||
let hasNext = true;
|
|
||||||
let lastErrAssetId: string | undefined = undefined;
|
|
||||||
|
|
||||||
while (hasNext) {
|
|
||||||
const whereStat: FindOptionsWhere<AssetEntity> = {
|
|
||||||
checksum: IsNull(),
|
|
||||||
};
|
|
||||||
|
|
||||||
if (lastErrAssetId) {
|
|
||||||
whereStat.id = MoreThan(lastErrAssetId);
|
|
||||||
}
|
|
||||||
|
|
||||||
const assets = await this.assetRepository.find({
|
|
||||||
where: whereStat,
|
|
||||||
take: pageSize,
|
|
||||||
order: { id: 'ASC' },
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!assets?.length) {
|
|
||||||
hasNext = false; // avoid using break
|
|
||||||
} else {
|
|
||||||
for (const asset of assets) {
|
|
||||||
try {
|
|
||||||
await this.generateAssetChecksum(asset);
|
|
||||||
} catch (err: any) {
|
|
||||||
lastErrAssetId = asset.id;
|
|
||||||
|
|
||||||
if (err instanceof QueryFailedError && (err as any).constraint === 'UQ_userid_checksum') {
|
|
||||||
Logger.error(`${asset.originalPath} duplicated`);
|
|
||||||
} else {
|
|
||||||
Logger.error(`checksum generation ${err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// break when reach to the last page
|
|
||||||
if (assets.length < pageSize) {
|
|
||||||
hasNext = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Logger.log(`checksum generation done!`);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async generateAssetChecksum(asset: AssetEntity) {
|
|
||||||
if (!asset.originalPath) return;
|
|
||||||
if (!fs.existsSync(asset.originalPath)) return;
|
|
||||||
|
|
||||||
const fileReadStream = fs.createReadStream(asset.originalPath);
|
|
||||||
const sha1Hash = createHash('sha1');
|
|
||||||
const deferred = new Promise<Buffer>((resolve, reject) => {
|
|
||||||
sha1Hash.once('error', (err) => reject(err));
|
|
||||||
sha1Hash.once('finish', () => resolve(sha1Hash.read()));
|
|
||||||
});
|
|
||||||
|
|
||||||
fileReadStream.pipe(sha1Hash);
|
|
||||||
const checksum = await deferred;
|
|
||||||
|
|
||||||
await this.assetRepository.update(asset.id, { checksum });
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -2,7 +2,6 @@ export enum QueueName {
|
||||||
THUMBNAIL_GENERATION = 'thumbnail-generation-queue',
|
THUMBNAIL_GENERATION = 'thumbnail-generation-queue',
|
||||||
METADATA_EXTRACTION = 'metadata-extraction-queue',
|
METADATA_EXTRACTION = 'metadata-extraction-queue',
|
||||||
VIDEO_CONVERSION = 'video-conversion-queue',
|
VIDEO_CONVERSION = 'video-conversion-queue',
|
||||||
CHECKSUM_GENERATION = 'generate-checksum-queue',
|
|
||||||
ASSET_UPLOADED = 'asset-uploaded-queue',
|
ASSET_UPLOADED = 'asset-uploaded-queue',
|
||||||
MACHINE_LEARNING = 'machine-learning-queue',
|
MACHINE_LEARNING = 'machine-learning-queue',
|
||||||
USER_DELETION = 'user-deletion-queue',
|
USER_DELETION = 'user-deletion-queue',
|
||||||
|
@ -24,5 +23,4 @@ export enum JobName {
|
||||||
OBJECT_DETECTION = 'detect-object',
|
OBJECT_DETECTION = 'detect-object',
|
||||||
IMAGE_TAGGING = 'tag-image',
|
IMAGE_TAGGING = 'tag-image',
|
||||||
DELETE_FILE_ON_DISK = 'delete-file-on-disk',
|
DELETE_FILE_ON_DISK = 'delete-file-on-disk',
|
||||||
CHECKSUM_GENERATION = 'checksum-generation',
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,6 @@ export type JobItem =
|
||||||
| { name: JobName.USER_DELETION; data: IUserDeletionJob }
|
| { name: JobName.USER_DELETION; data: IUserDeletionJob }
|
||||||
| { name: JobName.TEMPLATE_MIGRATION }
|
| { name: JobName.TEMPLATE_MIGRATION }
|
||||||
| { name: JobName.CONFIG_CHANGE }
|
| { name: JobName.CONFIG_CHANGE }
|
||||||
| { name: JobName.CHECKSUM_GENERATION }
|
|
||||||
| { name: JobName.EXTRACT_VIDEO_METADATA; data: IVideoLengthExtractionProcessor }
|
| { name: JobName.EXTRACT_VIDEO_METADATA; data: IVideoLengthExtractionProcessor }
|
||||||
| { name: JobName.OBJECT_DETECTION; data: IMachineLearningJob }
|
| { name: JobName.OBJECT_DETECTION; data: IMachineLearningJob }
|
||||||
| { name: JobName.IMAGE_TAGGING; data: IMachineLearningJob }
|
| { name: JobName.IMAGE_TAGGING; data: IMachineLearningJob }
|
||||||
|
|
|
@ -79,7 +79,6 @@ const providers: Provider[] = [
|
||||||
{ name: QueueName.ASSET_UPLOADED },
|
{ name: QueueName.ASSET_UPLOADED },
|
||||||
{ name: QueueName.METADATA_EXTRACTION },
|
{ name: QueueName.METADATA_EXTRACTION },
|
||||||
{ name: QueueName.VIDEO_CONVERSION },
|
{ name: QueueName.VIDEO_CONVERSION },
|
||||||
{ name: QueueName.CHECKSUM_GENERATION },
|
|
||||||
{ name: QueueName.MACHINE_LEARNING },
|
{ name: QueueName.MACHINE_LEARNING },
|
||||||
{ name: QueueName.CONFIG },
|
{ name: QueueName.CONFIG },
|
||||||
{ name: QueueName.BACKGROUND_TASK },
|
{ name: QueueName.BACKGROUND_TASK },
|
||||||
|
|
|
@ -20,7 +20,6 @@ export class JobRepository implements IJobRepository {
|
||||||
constructor(
|
constructor(
|
||||||
@InjectQueue(QueueName.ASSET_UPLOADED) private assetUploaded: Queue<IAssetUploadedJob>,
|
@InjectQueue(QueueName.ASSET_UPLOADED) private assetUploaded: Queue<IAssetUploadedJob>,
|
||||||
@InjectQueue(QueueName.BACKGROUND_TASK) private backgroundTask: Queue,
|
@InjectQueue(QueueName.BACKGROUND_TASK) private backgroundTask: Queue,
|
||||||
@InjectQueue(QueueName.CHECKSUM_GENERATION) private generateChecksum: Queue,
|
|
||||||
@InjectQueue(QueueName.MACHINE_LEARNING) private machineLearning: Queue<IMachineLearningJob>,
|
@InjectQueue(QueueName.MACHINE_LEARNING) private machineLearning: Queue<IMachineLearningJob>,
|
||||||
@InjectQueue(QueueName.METADATA_EXTRACTION) private metadataExtraction: Queue<IMetadataExtractionJob>,
|
@InjectQueue(QueueName.METADATA_EXTRACTION) private metadataExtraction: Queue<IMetadataExtractionJob>,
|
||||||
@InjectQueue(QueueName.CONFIG) private storageMigration: Queue,
|
@InjectQueue(QueueName.CONFIG) private storageMigration: Queue,
|
||||||
|
@ -52,10 +51,6 @@ export class JobRepository implements IJobRepository {
|
||||||
await this.backgroundTask.add(item.name, item.data);
|
await this.backgroundTask.add(item.name, item.data);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case JobName.CHECKSUM_GENERATION:
|
|
||||||
await this.generateChecksum.add(item.name, {});
|
|
||||||
break;
|
|
||||||
|
|
||||||
case JobName.OBJECT_DETECTION:
|
case JobName.OBJECT_DETECTION:
|
||||||
case JobName.IMAGE_TAGGING:
|
case JobName.IMAGE_TAGGING:
|
||||||
await this.machineLearning.add(item.name, item.data);
|
await this.machineLearning.add(item.name, item.data);
|
||||||
|
|
Loading…
Add table
Reference in a new issue