mirror of
https://github.com/immich-app/immich.git
synced 2025-04-08 03:01:32 -05:00
feat: built-in automatic database backups (#13773)
This commit is contained in:
parent
30d42e571c
commit
7d933ec97a
41 changed files with 994 additions and 17 deletions
|
@ -21,6 +21,17 @@ The recommended way to backup and restore the Immich database is to use the `pg_
|
|||
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
||||
:::
|
||||
|
||||
### Automatic Database Backups
|
||||
|
||||
Immich will automatically create database backups by default. The backups are stored in `UPLOAD_LOCATION/backups`.
|
||||
You can adjust the schedule and amount of kept backups in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
|
||||
By default, Immich will keep the last 14 backups and create a new backup every day at 2:00 AM.
|
||||
|
||||
#### Restoring
|
||||
|
||||
We hope to make restoring simpler in future versions, for now you can find the backups in the `UPLOAD_LOCATION/backups` folder on your host.
|
||||
Then please follow the steps in the following section for restoring the database.
|
||||
|
||||
### Manual Backup and Restore
|
||||
|
||||
<Tabs>
|
||||
|
|
11
i18n/en.json
11
i18n/en.json
|
@ -34,6 +34,11 @@
|
|||
"authentication_settings_disable_all": "Are you sure you want to disable all login methods? Login will be completely disabled.",
|
||||
"authentication_settings_reenable": "To re-enable, use a <link>Server Command</link>.",
|
||||
"background_task_job": "Background Tasks",
|
||||
"backup_database": "Backup Database",
|
||||
"backup_database_enable_description": "Enable database backups",
|
||||
"backup_keep_last_amount": "Amount of previous backups to keep",
|
||||
"backup_settings": "Backup Settings",
|
||||
"backup_settings_description": "Manage database backup settings",
|
||||
"check_all": "Check All",
|
||||
"cleared_jobs": "Cleared jobs for: {job}",
|
||||
"config_set_by_file": "Config is currently set by a config file",
|
||||
|
@ -43,6 +48,9 @@
|
|||
"confirm_reprocess_all_faces": "Are you sure you want to reprocess all faces? This will also clear named people.",
|
||||
"confirm_user_password_reset": "Are you sure you want to reset {user}'s password?",
|
||||
"create_job": "Create job",
|
||||
"cron_expression": "Cron expression",
|
||||
"cron_expression_description": "Set the scanning interval using the cron format. For more information please refer to e.g. <link>Crontab Guru</link>",
|
||||
"cron_expression_presets": "Cron expression presets",
|
||||
"disable_login": "Disable login",
|
||||
"duplicate_detection_job_description": "Run machine learning on assets to detect similar images. Relies on Smart Search",
|
||||
"exclusion_pattern_description": "Exclusion patterns lets you ignore files and folders when scanning your library. This is useful if you have folders that contain files you don't want to import, such as RAW files.",
|
||||
|
@ -80,9 +88,6 @@
|
|||
"jobs_delayed": "{jobCount, plural, other {# delayed}}",
|
||||
"jobs_failed": "{jobCount, plural, other {# failed}}",
|
||||
"library_created": "Created library: {library}",
|
||||
"library_cron_expression": "Cron expression",
|
||||
"library_cron_expression_description": "Set the scanning interval using the cron format. For more information please refer to e.g. <link>Crontab Guru</link>",
|
||||
"library_cron_expression_presets": "Cron expression presets",
|
||||
"library_deleted": "Library deleted",
|
||||
"library_import_path_description": "Specify a folder to import. This folder, including subfolders, will be scanned for images and videos.",
|
||||
"library_scanning": "Periodic Scanning",
|
||||
|
|
2
mobile/openapi/README.md
generated
2
mobile/openapi/README.md
generated
|
@ -306,6 +306,7 @@ Class | Method | HTTP request | Description
|
|||
- [CreateAlbumDto](doc//CreateAlbumDto.md)
|
||||
- [CreateLibraryDto](doc//CreateLibraryDto.md)
|
||||
- [CreateProfileImageResponseDto](doc//CreateProfileImageResponseDto.md)
|
||||
- [DatabaseBackupConfig](doc//DatabaseBackupConfig.md)
|
||||
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
|
||||
- [DownloadInfoDto](doc//DownloadInfoDto.md)
|
||||
- [DownloadResponse](doc//DownloadResponse.md)
|
||||
|
@ -413,6 +414,7 @@ Class | Method | HTTP request | Description
|
|||
- [StackCreateDto](doc//StackCreateDto.md)
|
||||
- [StackResponseDto](doc//StackResponseDto.md)
|
||||
- [StackUpdateDto](doc//StackUpdateDto.md)
|
||||
- [SystemConfigBackupsDto](doc//SystemConfigBackupsDto.md)
|
||||
- [SystemConfigDto](doc//SystemConfigDto.md)
|
||||
- [SystemConfigFFmpegDto](doc//SystemConfigFFmpegDto.md)
|
||||
- [SystemConfigFacesDto](doc//SystemConfigFacesDto.md)
|
||||
|
|
2
mobile/openapi/lib/api.dart
generated
2
mobile/openapi/lib/api.dart
generated
|
@ -120,6 +120,7 @@ part 'model/colorspace.dart';
|
|||
part 'model/create_album_dto.dart';
|
||||
part 'model/create_library_dto.dart';
|
||||
part 'model/create_profile_image_response_dto.dart';
|
||||
part 'model/database_backup_config.dart';
|
||||
part 'model/download_archive_info.dart';
|
||||
part 'model/download_info_dto.dart';
|
||||
part 'model/download_response.dart';
|
||||
|
@ -227,6 +228,7 @@ part 'model/source_type.dart';
|
|||
part 'model/stack_create_dto.dart';
|
||||
part 'model/stack_response_dto.dart';
|
||||
part 'model/stack_update_dto.dart';
|
||||
part 'model/system_config_backups_dto.dart';
|
||||
part 'model/system_config_dto.dart';
|
||||
part 'model/system_config_f_fmpeg_dto.dart';
|
||||
part 'model/system_config_faces_dto.dart';
|
||||
|
|
4
mobile/openapi/lib/api_client.dart
generated
4
mobile/openapi/lib/api_client.dart
generated
|
@ -294,6 +294,8 @@ class ApiClient {
|
|||
return CreateLibraryDto.fromJson(value);
|
||||
case 'CreateProfileImageResponseDto':
|
||||
return CreateProfileImageResponseDto.fromJson(value);
|
||||
case 'DatabaseBackupConfig':
|
||||
return DatabaseBackupConfig.fromJson(value);
|
||||
case 'DownloadArchiveInfo':
|
||||
return DownloadArchiveInfo.fromJson(value);
|
||||
case 'DownloadInfoDto':
|
||||
|
@ -508,6 +510,8 @@ class ApiClient {
|
|||
return StackResponseDto.fromJson(value);
|
||||
case 'StackUpdateDto':
|
||||
return StackUpdateDto.fromJson(value);
|
||||
case 'SystemConfigBackupsDto':
|
||||
return SystemConfigBackupsDto.fromJson(value);
|
||||
case 'SystemConfigDto':
|
||||
return SystemConfigDto.fromJson(value);
|
||||
case 'SystemConfigFFmpegDto':
|
||||
|
|
|
@ -14,6 +14,7 @@ class AllJobStatusResponseDto {
|
|||
/// Returns a new [AllJobStatusResponseDto] instance.
|
||||
AllJobStatusResponseDto({
|
||||
required this.backgroundTask,
|
||||
required this.backupDatabase,
|
||||
required this.duplicateDetection,
|
||||
required this.faceDetection,
|
||||
required this.facialRecognition,
|
||||
|
@ -31,6 +32,8 @@ class AllJobStatusResponseDto {
|
|||
|
||||
JobStatusDto backgroundTask;
|
||||
|
||||
JobStatusDto backupDatabase;
|
||||
|
||||
JobStatusDto duplicateDetection;
|
||||
|
||||
JobStatusDto faceDetection;
|
||||
|
@ -60,6 +63,7 @@ class AllJobStatusResponseDto {
|
|||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is AllJobStatusResponseDto &&
|
||||
other.backgroundTask == backgroundTask &&
|
||||
other.backupDatabase == backupDatabase &&
|
||||
other.duplicateDetection == duplicateDetection &&
|
||||
other.faceDetection == faceDetection &&
|
||||
other.facialRecognition == facialRecognition &&
|
||||
|
@ -78,6 +82,7 @@ class AllJobStatusResponseDto {
|
|||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(backgroundTask.hashCode) +
|
||||
(backupDatabase.hashCode) +
|
||||
(duplicateDetection.hashCode) +
|
||||
(faceDetection.hashCode) +
|
||||
(facialRecognition.hashCode) +
|
||||
|
@ -93,11 +98,12 @@ class AllJobStatusResponseDto {
|
|||
(videoConversion.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'AllJobStatusResponseDto[backgroundTask=$backgroundTask, duplicateDetection=$duplicateDetection, faceDetection=$faceDetection, facialRecognition=$facialRecognition, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
|
||||
String toString() => 'AllJobStatusResponseDto[backgroundTask=$backgroundTask, backupDatabase=$backupDatabase, duplicateDetection=$duplicateDetection, faceDetection=$faceDetection, facialRecognition=$facialRecognition, library_=$library_, metadataExtraction=$metadataExtraction, migration=$migration, notifications=$notifications, search=$search, sidecar=$sidecar, smartSearch=$smartSearch, storageTemplateMigration=$storageTemplateMigration, thumbnailGeneration=$thumbnailGeneration, videoConversion=$videoConversion]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'backgroundTask'] = this.backgroundTask;
|
||||
json[r'backupDatabase'] = this.backupDatabase;
|
||||
json[r'duplicateDetection'] = this.duplicateDetection;
|
||||
json[r'faceDetection'] = this.faceDetection;
|
||||
json[r'facialRecognition'] = this.facialRecognition;
|
||||
|
@ -124,6 +130,7 @@ class AllJobStatusResponseDto {
|
|||
|
||||
return AllJobStatusResponseDto(
|
||||
backgroundTask: JobStatusDto.fromJson(json[r'backgroundTask'])!,
|
||||
backupDatabase: JobStatusDto.fromJson(json[r'backupDatabase'])!,
|
||||
duplicateDetection: JobStatusDto.fromJson(json[r'duplicateDetection'])!,
|
||||
faceDetection: JobStatusDto.fromJson(json[r'faceDetection'])!,
|
||||
facialRecognition: JobStatusDto.fromJson(json[r'facialRecognition'])!,
|
||||
|
@ -185,6 +192,7 @@ class AllJobStatusResponseDto {
|
|||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'backgroundTask',
|
||||
'backupDatabase',
|
||||
'duplicateDetection',
|
||||
'faceDetection',
|
||||
'facialRecognition',
|
||||
|
|
116
mobile/openapi/lib/model/database_backup_config.dart
generated
Normal file
116
mobile/openapi/lib/model/database_backup_config.dart
generated
Normal file
|
@ -0,0 +1,116 @@
|
|||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class DatabaseBackupConfig {
|
||||
/// Returns a new [DatabaseBackupConfig] instance.
|
||||
DatabaseBackupConfig({
|
||||
required this.cronExpression,
|
||||
required this.enabled,
|
||||
required this.keepLastAmount,
|
||||
});
|
||||
|
||||
String cronExpression;
|
||||
|
||||
bool enabled;
|
||||
|
||||
/// Minimum value: 1
|
||||
num keepLastAmount;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is DatabaseBackupConfig &&
|
||||
other.cronExpression == cronExpression &&
|
||||
other.enabled == enabled &&
|
||||
other.keepLastAmount == keepLastAmount;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(cronExpression.hashCode) +
|
||||
(enabled.hashCode) +
|
||||
(keepLastAmount.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'DatabaseBackupConfig[cronExpression=$cronExpression, enabled=$enabled, keepLastAmount=$keepLastAmount]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'cronExpression'] = this.cronExpression;
|
||||
json[r'enabled'] = this.enabled;
|
||||
json[r'keepLastAmount'] = this.keepLastAmount;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [DatabaseBackupConfig] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static DatabaseBackupConfig? fromJson(dynamic value) {
|
||||
upgradeDto(value, "DatabaseBackupConfig");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return DatabaseBackupConfig(
|
||||
cronExpression: mapValueOfType<String>(json, r'cronExpression')!,
|
||||
enabled: mapValueOfType<bool>(json, r'enabled')!,
|
||||
keepLastAmount: num.parse('${json[r'keepLastAmount']}'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<DatabaseBackupConfig> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <DatabaseBackupConfig>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = DatabaseBackupConfig.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, DatabaseBackupConfig> mapFromJson(dynamic json) {
|
||||
final map = <String, DatabaseBackupConfig>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = DatabaseBackupConfig.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of DatabaseBackupConfig-objects as value to a dart map
|
||||
static Map<String, List<DatabaseBackupConfig>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<DatabaseBackupConfig>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = DatabaseBackupConfig.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'cronExpression',
|
||||
'enabled',
|
||||
'keepLastAmount',
|
||||
};
|
||||
}
|
||||
|
3
mobile/openapi/lib/model/job_name.dart
generated
3
mobile/openapi/lib/model/job_name.dart
generated
|
@ -37,6 +37,7 @@ class JobName {
|
|||
static const sidecar = JobName._(r'sidecar');
|
||||
static const library_ = JobName._(r'library');
|
||||
static const notifications = JobName._(r'notifications');
|
||||
static const backupDatabase = JobName._(r'backupDatabase');
|
||||
|
||||
/// List of all possible values in this [enum][JobName].
|
||||
static const values = <JobName>[
|
||||
|
@ -54,6 +55,7 @@ class JobName {
|
|||
sidecar,
|
||||
library_,
|
||||
notifications,
|
||||
backupDatabase,
|
||||
];
|
||||
|
||||
static JobName? fromJson(dynamic value) => JobNameTypeTransformer().decode(value);
|
||||
|
@ -106,6 +108,7 @@ class JobNameTypeTransformer {
|
|||
case r'sidecar': return JobName.sidecar;
|
||||
case r'library': return JobName.library_;
|
||||
case r'notifications': return JobName.notifications;
|
||||
case r'backupDatabase': return JobName.backupDatabase;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
|
|
99
mobile/openapi/lib/model/system_config_backups_dto.dart
generated
Normal file
99
mobile/openapi/lib/model/system_config_backups_dto.dart
generated
Normal file
|
@ -0,0 +1,99 @@
|
|||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SystemConfigBackupsDto {
|
||||
/// Returns a new [SystemConfigBackupsDto] instance.
|
||||
SystemConfigBackupsDto({
|
||||
required this.database,
|
||||
});
|
||||
|
||||
DatabaseBackupConfig database;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SystemConfigBackupsDto &&
|
||||
other.database == database;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(database.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SystemConfigBackupsDto[database=$database]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'database'] = this.database;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SystemConfigBackupsDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SystemConfigBackupsDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "SystemConfigBackupsDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SystemConfigBackupsDto(
|
||||
database: DatabaseBackupConfig.fromJson(json[r'database'])!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SystemConfigBackupsDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SystemConfigBackupsDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SystemConfigBackupsDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SystemConfigBackupsDto> mapFromJson(dynamic json) {
|
||||
final map = <String, SystemConfigBackupsDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SystemConfigBackupsDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SystemConfigBackupsDto-objects as value to a dart map
|
||||
static Map<String, List<SystemConfigBackupsDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SystemConfigBackupsDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SystemConfigBackupsDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'database',
|
||||
};
|
||||
}
|
||||
|
10
mobile/openapi/lib/model/system_config_dto.dart
generated
10
mobile/openapi/lib/model/system_config_dto.dart
generated
|
@ -13,6 +13,7 @@ part of openapi.api;
|
|||
class SystemConfigDto {
|
||||
/// Returns a new [SystemConfigDto] instance.
|
||||
SystemConfigDto({
|
||||
required this.backup,
|
||||
required this.ffmpeg,
|
||||
required this.image,
|
||||
required this.job,
|
||||
|
@ -33,6 +34,8 @@ class SystemConfigDto {
|
|||
required this.user,
|
||||
});
|
||||
|
||||
SystemConfigBackupsDto backup;
|
||||
|
||||
SystemConfigFFmpegDto ffmpeg;
|
||||
|
||||
SystemConfigImageDto image;
|
||||
|
@ -71,6 +74,7 @@ class SystemConfigDto {
|
|||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SystemConfigDto &&
|
||||
other.backup == backup &&
|
||||
other.ffmpeg == ffmpeg &&
|
||||
other.image == image &&
|
||||
other.job == job &&
|
||||
|
@ -93,6 +97,7 @@ class SystemConfigDto {
|
|||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(backup.hashCode) +
|
||||
(ffmpeg.hashCode) +
|
||||
(image.hashCode) +
|
||||
(job.hashCode) +
|
||||
|
@ -113,10 +118,11 @@ class SystemConfigDto {
|
|||
(user.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SystemConfigDto[ffmpeg=$ffmpeg, image=$image, job=$job, library_=$library_, logging=$logging, machineLearning=$machineLearning, map=$map, metadata=$metadata, newVersionCheck=$newVersionCheck, notifications=$notifications, oauth=$oauth, passwordLogin=$passwordLogin, reverseGeocoding=$reverseGeocoding, server=$server, storageTemplate=$storageTemplate, theme=$theme, trash=$trash, user=$user]';
|
||||
String toString() => 'SystemConfigDto[backup=$backup, ffmpeg=$ffmpeg, image=$image, job=$job, library_=$library_, logging=$logging, machineLearning=$machineLearning, map=$map, metadata=$metadata, newVersionCheck=$newVersionCheck, notifications=$notifications, oauth=$oauth, passwordLogin=$passwordLogin, reverseGeocoding=$reverseGeocoding, server=$server, storageTemplate=$storageTemplate, theme=$theme, trash=$trash, user=$user]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'backup'] = this.backup;
|
||||
json[r'ffmpeg'] = this.ffmpeg;
|
||||
json[r'image'] = this.image;
|
||||
json[r'job'] = this.job;
|
||||
|
@ -147,6 +153,7 @@ class SystemConfigDto {
|
|||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SystemConfigDto(
|
||||
backup: SystemConfigBackupsDto.fromJson(json[r'backup'])!,
|
||||
ffmpeg: SystemConfigFFmpegDto.fromJson(json[r'ffmpeg'])!,
|
||||
image: SystemConfigImageDto.fromJson(json[r'image'])!,
|
||||
job: SystemConfigJobDto.fromJson(json[r'job'])!,
|
||||
|
@ -212,6 +219,7 @@ class SystemConfigDto {
|
|||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'backup',
|
||||
'ffmpeg',
|
||||
'image',
|
||||
'job',
|
||||
|
|
|
@ -7745,6 +7745,9 @@
|
|||
"backgroundTask": {
|
||||
"$ref": "#/components/schemas/JobStatusDto"
|
||||
},
|
||||
"backupDatabase": {
|
||||
"$ref": "#/components/schemas/JobStatusDto"
|
||||
},
|
||||
"duplicateDetection": {
|
||||
"$ref": "#/components/schemas/JobStatusDto"
|
||||
},
|
||||
|
@ -7787,6 +7790,7 @@
|
|||
},
|
||||
"required": [
|
||||
"backgroundTask",
|
||||
"backupDatabase",
|
||||
"duplicateDetection",
|
||||
"faceDetection",
|
||||
"facialRecognition",
|
||||
|
@ -8754,6 +8758,26 @@
|
|||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DatabaseBackupConfig": {
|
||||
"properties": {
|
||||
"cronExpression": {
|
||||
"type": "string"
|
||||
},
|
||||
"enabled": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"keepLastAmount": {
|
||||
"minimum": 1,
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cronExpression",
|
||||
"enabled",
|
||||
"keepLastAmount"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DownloadArchiveInfo": {
|
||||
"properties": {
|
||||
"assetIds": {
|
||||
|
@ -9289,7 +9313,8 @@
|
|||
"search",
|
||||
"sidecar",
|
||||
"library",
|
||||
"notifications"
|
||||
"notifications",
|
||||
"backupDatabase"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -11456,8 +11481,22 @@
|
|||
},
|
||||
"type": "object"
|
||||
},
|
||||
"SystemConfigBackupsDto": {
|
||||
"properties": {
|
||||
"database": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupConfig"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"database"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SystemConfigDto": {
|
||||
"properties": {
|
||||
"backup": {
|
||||
"$ref": "#/components/schemas/SystemConfigBackupsDto"
|
||||
},
|
||||
"ffmpeg": {
|
||||
"$ref": "#/components/schemas/SystemConfigFFmpegDto"
|
||||
},
|
||||
|
@ -11514,6 +11553,7 @@
|
|||
}
|
||||
},
|
||||
"required": [
|
||||
"backup",
|
||||
"ffmpeg",
|
||||
"image",
|
||||
"job",
|
||||
|
|
|
@ -535,6 +535,7 @@ export type JobStatusDto = {
|
|||
};
|
||||
export type AllJobStatusResponseDto = {
|
||||
backgroundTask: JobStatusDto;
|
||||
backupDatabase: JobStatusDto;
|
||||
duplicateDetection: JobStatusDto;
|
||||
faceDetection: JobStatusDto;
|
||||
facialRecognition: JobStatusDto;
|
||||
|
@ -1084,6 +1085,14 @@ export type AssetFullSyncDto = {
|
|||
updatedUntil: string;
|
||||
userId?: string;
|
||||
};
|
||||
export type DatabaseBackupConfig = {
|
||||
cronExpression: string;
|
||||
enabled: boolean;
|
||||
keepLastAmount: number;
|
||||
};
|
||||
export type SystemConfigBackupsDto = {
|
||||
database: DatabaseBackupConfig;
|
||||
};
|
||||
export type SystemConfigFFmpegDto = {
|
||||
accel: TranscodeHWAccel;
|
||||
accelDecode: boolean;
|
||||
|
@ -1232,6 +1241,7 @@ export type SystemConfigUserDto = {
|
|||
deleteDelay: number;
|
||||
};
|
||||
export type SystemConfigDto = {
|
||||
backup: SystemConfigBackupsDto;
|
||||
ffmpeg: SystemConfigFFmpegDto;
|
||||
image: SystemConfigImageDto;
|
||||
job: SystemConfigJobDto;
|
||||
|
@ -3445,7 +3455,8 @@ export enum JobName {
|
|||
Search = "search",
|
||||
Sidecar = "sidecar",
|
||||
Library = "library",
|
||||
Notifications = "notifications"
|
||||
Notifications = "notifications",
|
||||
BackupDatabase = "backupDatabase"
|
||||
}
|
||||
export enum JobCommand {
|
||||
Start = "start",
|
||||
|
|
|
@ -15,6 +15,13 @@ import { ConcurrentQueueName, QueueName } from 'src/interfaces/job.interface';
|
|||
import { ImageOptions } from 'src/interfaces/media.interface';
|
||||
|
||||
export interface SystemConfig {
|
||||
backup: {
|
||||
database: {
|
||||
enabled: boolean;
|
||||
cronExpression: string;
|
||||
keepLastAmount: number;
|
||||
};
|
||||
};
|
||||
ffmpeg: {
|
||||
crf: number;
|
||||
threads: number;
|
||||
|
@ -150,6 +157,13 @@ export interface SystemConfig {
|
|||
}
|
||||
|
||||
export const defaults = Object.freeze<SystemConfig>({
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: CronExpression.EVERY_DAY_AT_2AM,
|
||||
keepLastAmount: 14,
|
||||
},
|
||||
},
|
||||
ffmpeg: {
|
||||
crf: 23,
|
||||
threads: 0,
|
||||
|
|
|
@ -97,4 +97,7 @@ export class AllJobStatusResponseDto implements Record<QueueName, JobStatusDto>
|
|||
|
||||
@ApiProperty({ type: JobStatusDto })
|
||||
[QueueName.NOTIFICATION]!: JobStatusDto;
|
||||
|
||||
@ApiProperty({ type: JobStatusDto })
|
||||
[QueueName.BACKUP_DATABASE]!: JobStatusDto;
|
||||
}
|
||||
|
|
|
@ -46,6 +46,30 @@ const isLibraryScanEnabled = (config: SystemConfigLibraryScanDto) => config.enab
|
|||
const isOAuthEnabled = (config: SystemConfigOAuthDto) => config.enabled;
|
||||
const isOAuthOverrideEnabled = (config: SystemConfigOAuthDto) => config.mobileOverrideEnabled;
|
||||
const isEmailNotificationEnabled = (config: SystemConfigSmtpDto) => config.enabled;
|
||||
const isDatabaseBackupEnabled = (config: DatabaseBackupConfig) => config.enabled;
|
||||
|
||||
export class DatabaseBackupConfig {
|
||||
@ValidateBoolean()
|
||||
enabled!: boolean;
|
||||
|
||||
@ValidateIf(isDatabaseBackupEnabled)
|
||||
@IsNotEmpty()
|
||||
@Validate(CronValidator, { message: 'Invalid cron expression' })
|
||||
@IsString()
|
||||
cronExpression!: string;
|
||||
|
||||
@IsInt()
|
||||
@IsPositive()
|
||||
@IsNotEmpty()
|
||||
keepLastAmount!: number;
|
||||
}
|
||||
|
||||
export class SystemConfigBackupsDto {
|
||||
@Type(() => DatabaseBackupConfig)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
database!: DatabaseBackupConfig;
|
||||
}
|
||||
|
||||
export class SystemConfigFFmpegDto {
|
||||
@IsInt()
|
||||
|
@ -531,6 +555,11 @@ class SystemConfigUserDto {
|
|||
}
|
||||
|
||||
export class SystemConfigDto implements SystemConfig {
|
||||
@Type(() => SystemConfigBackupsDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
backup!: SystemConfigBackupsDto;
|
||||
|
||||
@Type(() => SystemConfigFFmpegDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
|
|
|
@ -181,6 +181,7 @@ export enum StorageFolder {
|
|||
UPLOAD = 'upload',
|
||||
PROFILE = 'profile',
|
||||
THUMBNAILS = 'thumbs',
|
||||
BACKUPS = 'backups',
|
||||
}
|
||||
|
||||
export enum SystemMetadataKey {
|
||||
|
|
|
@ -37,6 +37,7 @@ export enum DatabaseLock {
|
|||
CLIPDimSize = 512,
|
||||
Library = 1337,
|
||||
GetSystemConfig = 69,
|
||||
BackupDatabase = 42,
|
||||
}
|
||||
|
||||
export const EXTENSION_NAMES: Record<DatabaseExtension, string> = {
|
||||
|
|
|
@ -15,11 +15,15 @@ export enum QueueName {
|
|||
SIDECAR = 'sidecar',
|
||||
LIBRARY = 'library',
|
||||
NOTIFICATION = 'notifications',
|
||||
BACKUP_DATABASE = 'backupDatabase',
|
||||
}
|
||||
|
||||
export type ConcurrentQueueName = Exclude<
|
||||
QueueName,
|
||||
QueueName.STORAGE_TEMPLATE_MIGRATION | QueueName.FACIAL_RECOGNITION | QueueName.DUPLICATE_DETECTION
|
||||
| QueueName.STORAGE_TEMPLATE_MIGRATION
|
||||
| QueueName.FACIAL_RECOGNITION
|
||||
| QueueName.DUPLICATE_DETECTION
|
||||
| QueueName.BACKUP_DATABASE
|
||||
>;
|
||||
|
||||
export enum JobCommand {
|
||||
|
@ -31,6 +35,9 @@ export enum JobCommand {
|
|||
}
|
||||
|
||||
export enum JobName {
|
||||
//backups
|
||||
BACKUP_DATABASE = 'database-backup',
|
||||
|
||||
// conversion
|
||||
QUEUE_VIDEO_CONVERSION = 'queue-video-conversion',
|
||||
VIDEO_CONVERSION = 'video-conversion',
|
||||
|
@ -209,6 +216,9 @@ export enum QueueCleanType {
|
|||
}
|
||||
|
||||
export type JobItem =
|
||||
// Backups
|
||||
| { name: JobName.BACKUP_DATABASE; data?: IBaseJob }
|
||||
|
||||
// Transcoding
|
||||
| { name: JobName.QUEUE_VIDEO_CONVERSION; data: IBaseJob }
|
||||
| { name: JobName.VIDEO_CONVERSION; data: IEntityJob }
|
||||
|
|
25
server/src/interfaces/process.interface.ts
Normal file
25
server/src/interfaces/process.interface.ts
Normal file
|
@ -0,0 +1,25 @@
|
|||
import { ChildProcessWithoutNullStreams, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { Readable } from 'node:stream';
|
||||
|
||||
export interface ImmichReadStream {
|
||||
stream: Readable;
|
||||
type?: string;
|
||||
length?: number;
|
||||
}
|
||||
|
||||
export interface ImmichZipStream extends ImmichReadStream {
|
||||
addFile: (inputPath: string, filename: string) => void;
|
||||
finalize: () => Promise<void>;
|
||||
}
|
||||
|
||||
export interface DiskUsage {
|
||||
available: number;
|
||||
free: number;
|
||||
total: number;
|
||||
}
|
||||
|
||||
export const IProcessRepository = 'IProcessRepository';
|
||||
|
||||
export interface IProcessRepository {
|
||||
spawn(command: string, args?: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams;
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
import { WatchOptions } from 'chokidar';
|
||||
import { Stats } from 'node:fs';
|
||||
import { FileReadOptions } from 'node:fs/promises';
|
||||
import { Readable } from 'node:stream';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
|
||||
export interface ImmichReadStream {
|
||||
|
@ -36,6 +36,7 @@ export interface IStorageRepository {
|
|||
createReadStream(filepath: string, mimeType?: string | null): Promise<ImmichReadStream>;
|
||||
readFile(filepath: string, options?: FileReadOptions<Buffer>): Promise<Buffer>;
|
||||
createFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||
createWriteStream(filepath: string): Writable;
|
||||
createOrOverwriteFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||
overwriteFile(filepath: string, buffer: Buffer): Promise<void>;
|
||||
realpath(filepath: string): Promise<string>;
|
||||
|
|
|
@ -22,6 +22,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
|
|||
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
|
||||
import { IPartnerRepository } from 'src/interfaces/partner.interface';
|
||||
import { IPersonRepository } from 'src/interfaces/person.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { ISearchRepository } from 'src/interfaces/search.interface';
|
||||
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
|
||||
import { ISessionRepository } from 'src/interfaces/session.interface';
|
||||
|
@ -59,6 +60,7 @@ import { NotificationRepository } from 'src/repositories/notification.repository
|
|||
import { OAuthRepository } from 'src/repositories/oauth.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { ProcessRepository } from 'src/repositories/process.repository';
|
||||
import { SearchRepository } from 'src/repositories/search.repository';
|
||||
import { ServerInfoRepository } from 'src/repositories/server-info.repository';
|
||||
import { SessionRepository } from 'src/repositories/session.repository';
|
||||
|
@ -98,6 +100,7 @@ export const repositories = [
|
|||
{ provide: IOAuthRepository, useClass: OAuthRepository },
|
||||
{ provide: IPartnerRepository, useClass: PartnerRepository },
|
||||
{ provide: IPersonRepository, useClass: PersonRepository },
|
||||
{ provide: IProcessRepository, useClass: ProcessRepository },
|
||||
{ provide: ISearchRepository, useClass: SearchRepository },
|
||||
{ provide: IServerInfoRepository, useClass: ServerInfoRepository },
|
||||
{ provide: ISessionRepository, useClass: SessionRepository },
|
||||
|
|
|
@ -30,6 +30,9 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
|
|||
[JobName.PERSON_CLEANUP]: QueueName.BACKGROUND_TASK,
|
||||
[JobName.USER_SYNC_USAGE]: QueueName.BACKGROUND_TASK,
|
||||
|
||||
// backups
|
||||
[JobName.BACKUP_DATABASE]: QueueName.BACKUP_DATABASE,
|
||||
|
||||
// conversion
|
||||
[JobName.QUEUE_VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
|
||||
[JobName.VIDEO_CONVERSION]: QueueName.VIDEO_CONVERSION,
|
||||
|
|
16
server/src/repositories/process.repository.ts
Normal file
16
server/src/repositories/process.repository.ts
Normal file
|
@ -0,0 +1,16 @@
|
|||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process';
|
||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
|
||||
@Injectable()
|
||||
export class ProcessRepository implements IProcessRepository {
|
||||
constructor(@Inject(ILoggerRepository) private logger: ILoggerRepository) {
|
||||
this.logger.setContext(StorageRepository.name);
|
||||
}
|
||||
|
||||
spawn(command: string, args: readonly string[], options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams {
|
||||
return spawn(command, args, options);
|
||||
}
|
||||
}
|
|
@ -2,9 +2,10 @@ import { Inject, Injectable } from '@nestjs/common';
|
|||
import archiver from 'archiver';
|
||||
import chokidar, { WatchOptions } from 'chokidar';
|
||||
import { escapePath, glob, globStream } from 'fast-glob';
|
||||
import { constants, createReadStream, existsSync, mkdirSync } from 'node:fs';
|
||||
import { constants, createReadStream, createWriteStream, existsSync, mkdirSync } from 'node:fs';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import { Writable } from 'node:stream';
|
||||
import { CrawlOptionsDto, WalkOptionsDto } from 'src/dtos/library.dto';
|
||||
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
||||
import {
|
||||
|
@ -42,6 +43,10 @@ export class StorageRepository implements IStorageRepository {
|
|||
return fs.writeFile(filepath, buffer, { flag: 'wx' });
|
||||
}
|
||||
|
||||
createWriteStream(filepath: string): Writable {
|
||||
return createWriteStream(filepath, { flags: 'w' });
|
||||
}
|
||||
|
||||
createOrOverwriteFile(filepath: string, buffer: Buffer) {
|
||||
return fs.writeFile(filepath, buffer, { flag: 'w' });
|
||||
}
|
||||
|
|
217
server/src/services/backup.service.spec.ts
Normal file
217
server/src/services/backup.service.spec.ts
Normal file
|
@ -0,0 +1,217 @@
|
|||
import { PassThrough } from 'node:stream';
|
||||
import { defaults, SystemConfig } from 'src/config';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { ImmichWorker, StorageFolder } from 'src/enum';
|
||||
import { IDatabaseRepository } from 'src/interfaces/database.interface';
|
||||
import { IJobRepository, JobStatus } from 'src/interfaces/job.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { IStorageRepository } from 'src/interfaces/storage.interface';
|
||||
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { systemConfigStub } from 'test/fixtures/system-config.stub';
|
||||
import { mockSpawn, newTestService } from 'test/utils';
|
||||
import { describe, Mocked } from 'vitest';
|
||||
|
||||
describe(BackupService.name, () => {
|
||||
let sut: BackupService;
|
||||
|
||||
let databaseMock: Mocked<IDatabaseRepository>;
|
||||
let jobMock: Mocked<IJobRepository>;
|
||||
let processMock: Mocked<IProcessRepository>;
|
||||
let storageMock: Mocked<IStorageRepository>;
|
||||
let systemMock: Mocked<ISystemMetadataRepository>;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, databaseMock, jobMock, processMock, storageMock, systemMock } = newTestService(BackupService));
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('onBootstrapEvent', () => {
|
||||
it('should init cron job and handle config changes', async () => {
|
||||
databaseMock.tryLock.mockResolvedValue(true);
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
|
||||
await sut.onBootstrap(ImmichWorker.API);
|
||||
|
||||
expect(jobMock.addCronJob).toHaveBeenCalled();
|
||||
expect(systemMock.get).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialize backup database cron job when lock is taken', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
databaseMock.tryLock.mockResolvedValue(false);
|
||||
|
||||
await sut.onBootstrap(ImmichWorker.API);
|
||||
|
||||
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not initialise backup database job when running on microservices', async () => {
|
||||
await sut.onBootstrap(ImmichWorker.MICROSERVICES);
|
||||
|
||||
expect(jobMock.addCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('onConfigUpdateEvent', () => {
|
||||
beforeEach(async () => {
|
||||
systemMock.get.mockResolvedValue(defaults);
|
||||
databaseMock.tryLock.mockResolvedValue(true);
|
||||
await sut.onBootstrap(ImmichWorker.API);
|
||||
});
|
||||
|
||||
it('should update cron job if backup is enabled', () => {
|
||||
sut.onConfigUpdate({
|
||||
oldConfig: defaults,
|
||||
newConfig: {
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: '0 1 * * *',
|
||||
},
|
||||
},
|
||||
} as SystemConfig,
|
||||
});
|
||||
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalledWith('backupDatabase', '0 1 * * *', true);
|
||||
expect(jobMock.updateCronJob).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if oldConfig is not provided', () => {
|
||||
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig });
|
||||
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing if instance does not have the backup database lock', async () => {
|
||||
databaseMock.tryLock.mockResolvedValue(false);
|
||||
await sut.onBootstrap(ImmichWorker.API);
|
||||
sut.onConfigUpdate({ newConfig: systemConfigStub.backupEnabled as SystemConfig, oldConfig: defaults });
|
||||
expect(jobMock.updateCronJob).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('onConfigValidateEvent', () => {
|
||||
it('should allow a valid cron expression', () => {
|
||||
expect(() =>
|
||||
sut.onConfigValidate({
|
||||
newConfig: { backup: { database: { cronExpression: '0 0 * * *' } } } as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).not.toThrow(expect.stringContaining('Invalid cron expression'));
|
||||
});
|
||||
|
||||
it('should fail for an invalid cron expression', () => {
|
||||
expect(() =>
|
||||
sut.onConfigValidate({
|
||||
newConfig: { backup: { database: { cronExpression: 'foo' } } } as SystemConfig,
|
||||
oldConfig: {} as SystemConfig,
|
||||
}),
|
||||
).toThrow(/Invalid cron expression.*/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupDatabaseBackups', () => {
|
||||
it('should do nothing if not reached keepLastAmount', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz']);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(storageMock.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should remove failed backup files', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.readdir.mockResolvedValue([
|
||||
'immich-db-backup-123.sql.gz.tmp',
|
||||
'immich-db-backup-234.sql.gz',
|
||||
'immich-db-backup-345.sql.gz.tmp',
|
||||
]);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-123.sql.gz.tmp`,
|
||||
);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-345.sql.gz.tmp`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove old backup files over keepLastAmount', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.readdir.mockResolvedValue(['immich-db-backup-1.sql.gz', 'immich-db-backup-2.sql.gz']);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(1);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove old backup files over keepLastAmount and failed backups', async () => {
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.readdir.mockResolvedValue([
|
||||
'immich-db-backup-1.sql.gz.tmp',
|
||||
'immich-db-backup-2.sql.gz',
|
||||
'immich-db-backup-3.sql.gz',
|
||||
]);
|
||||
await sut.cleanupDatabaseBackups();
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-1.sql.gz.tmp`,
|
||||
);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(
|
||||
`${StorageCore.getBaseFolder(StorageFolder.BACKUPS)}/immich-db-backup-2.sql.gz`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleBackupDatabase', () => {
|
||||
beforeEach(() => {
|
||||
storageMock.readdir.mockResolvedValue([]);
|
||||
processMock.spawn.mockReturnValue(mockSpawn(0, 'data', ''));
|
||||
storageMock.rename.mockResolvedValue();
|
||||
systemMock.get.mockResolvedValue(systemConfigStub.backupEnabled);
|
||||
storageMock.createWriteStream.mockReturnValue(new PassThrough());
|
||||
});
|
||||
it('should run a database backup successfully', async () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.SUCCESS);
|
||||
expect(storageMock.createWriteStream).toHaveBeenCalled();
|
||||
});
|
||||
it('should rename file on success', async () => {
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.SUCCESS);
|
||||
expect(storageMock.rename).toHaveBeenCalled();
|
||||
});
|
||||
it('should fail if pg_dumpall fails', async () => {
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
it('should not rename file if pgdump fails and gzip succeeds', async () => {
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
expect(storageMock.rename).not.toHaveBeenCalled();
|
||||
});
|
||||
it('should fail if gzip fails', async () => {
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(0, 'data', ''));
|
||||
processMock.spawn.mockReturnValueOnce(mockSpawn(1, '', 'error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
it('should fail if write stream fails', async () => {
|
||||
storageMock.createWriteStream.mockImplementation(() => {
|
||||
throw new Error('error');
|
||||
});
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
it('should fail if rename fails', async () => {
|
||||
storageMock.rename.mockRejectedValue(new Error('error'));
|
||||
const result = await sut.handleBackupDatabase();
|
||||
expect(result).toBe(JobStatus.FAILED);
|
||||
});
|
||||
});
|
||||
});
|
157
server/src/services/backup.service.ts
Normal file
157
server/src/services/backup.service.ts
Normal file
|
@ -0,0 +1,157 @@
|
|||
import { Injectable } from '@nestjs/common';
|
||||
import { default as path } from 'node:path';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent } from 'src/decorators';
|
||||
import { ImmichWorker, StorageFolder } from 'src/enum';
|
||||
import { DatabaseLock } from 'src/interfaces/database.interface';
|
||||
import { ArgOf } from 'src/interfaces/event.interface';
|
||||
import { JobName, JobStatus } from 'src/interfaces/job.interface';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
import { validateCronExpression } from 'src/validation';
|
||||
|
||||
@Injectable()
|
||||
export class BackupService extends BaseService {
|
||||
private backupLock = false;
|
||||
|
||||
@OnEvent({ name: 'app.bootstrap' })
|
||||
async onBootstrap(workerType: ImmichWorker) {
|
||||
if (workerType !== ImmichWorker.API) {
|
||||
return;
|
||||
}
|
||||
const {
|
||||
backup: { database },
|
||||
} = await this.getConfig({ withCache: true });
|
||||
|
||||
this.backupLock = await this.databaseRepository.tryLock(DatabaseLock.BackupDatabase);
|
||||
|
||||
if (this.backupLock) {
|
||||
this.jobRepository.addCronJob(
|
||||
'backupDatabase',
|
||||
database.cronExpression,
|
||||
() => handlePromiseError(this.jobRepository.queue({ name: JobName.BACKUP_DATABASE }), this.logger),
|
||||
database.enabled,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.update', server: true })
|
||||
onConfigUpdate({ newConfig: { backup }, oldConfig }: ArgOf<'config.update'>) {
|
||||
if (!oldConfig || !this.backupLock) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.jobRepository.updateCronJob('backupDatabase', backup.database.cronExpression, backup.database.enabled);
|
||||
}
|
||||
|
||||
@OnEvent({ name: 'config.validate' })
|
||||
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
|
||||
const { database } = newConfig.backup;
|
||||
if (!validateCronExpression(database.cronExpression)) {
|
||||
throw new Error(`Invalid cron expression ${database.cronExpression}`);
|
||||
}
|
||||
}
|
||||
|
||||
async cleanupDatabaseBackups() {
|
||||
this.logger.debug(`Database Backup Cleanup Started`);
|
||||
const {
|
||||
backup: { database: config },
|
||||
} = await this.getConfig({ withCache: false });
|
||||
|
||||
const backupsFolder = StorageCore.getBaseFolder(StorageFolder.BACKUPS);
|
||||
const files = await this.storageRepository.readdir(backupsFolder);
|
||||
const failedBackups = files.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz\.tmp$/));
|
||||
const backups = files
|
||||
.filter((file) => file.match(/immich-db-backup-\d+\.sql\.gz$/))
|
||||
.sort()
|
||||
.reverse();
|
||||
|
||||
const toDelete = backups.slice(config.keepLastAmount);
|
||||
toDelete.push(...failedBackups);
|
||||
|
||||
for (const file of toDelete) {
|
||||
await this.storageRepository.unlink(path.join(backupsFolder, file));
|
||||
}
|
||||
this.logger.debug(`Database Backup Cleanup Finished, deleted ${toDelete.length} backups`);
|
||||
}
|
||||
|
||||
async handleBackupDatabase(): Promise<JobStatus> {
|
||||
this.logger.debug(`Database Backup Started`);
|
||||
|
||||
const {
|
||||
database: { config },
|
||||
} = this.configRepository.getEnv();
|
||||
|
||||
const isUrlConnection = config.connectionType === 'url';
|
||||
const databaseParams = isUrlConnection ? [config.url] : ['-U', config.username, '-h', config.host];
|
||||
const backupFilePath = path.join(
|
||||
StorageCore.getBaseFolder(StorageFolder.BACKUPS),
|
||||
`immich-db-backup-${Date.now()}.sql.gz.tmp`,
|
||||
);
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const pgdump = this.processRepository.spawn(`pg_dumpall`, [...databaseParams, '--clean', '--if-exists'], {
|
||||
env: { PATH: process.env.PATH, PGPASSWORD: isUrlConnection ? undefined : config.password },
|
||||
});
|
||||
|
||||
const gzip = this.processRepository.spawn(`gzip`, []);
|
||||
pgdump.stdout.pipe(gzip.stdin);
|
||||
|
||||
const fileStream = this.storageRepository.createWriteStream(backupFilePath);
|
||||
|
||||
gzip.stdout.pipe(fileStream);
|
||||
|
||||
pgdump.on('error', (err) => {
|
||||
this.logger.error('Backup failed with error', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
gzip.on('error', (err) => {
|
||||
this.logger.error('Gzip failed with error', err);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
let pgdumpLogs = '';
|
||||
let gzipLogs = '';
|
||||
|
||||
pgdump.stderr.on('data', (data) => (pgdumpLogs += data));
|
||||
gzip.stderr.on('data', (data) => (gzipLogs += data));
|
||||
|
||||
pgdump.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Backup failed with code ${code}`);
|
||||
reject(`Backup failed with code ${code}`);
|
||||
this.logger.error(pgdumpLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdumpLogs) {
|
||||
this.logger.debug(`pgdump_all logs\n${pgdumpLogs}`);
|
||||
}
|
||||
});
|
||||
|
||||
gzip.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
this.logger.error(`Gzip failed with code ${code}`);
|
||||
reject(`Gzip failed with code ${code}`);
|
||||
this.logger.error(gzipLogs);
|
||||
return;
|
||||
}
|
||||
if (pgdump.exitCode !== 0) {
|
||||
this.logger.error(`Gzip exited with code 0 but pgdump exited with ${pgdump.exitCode}`);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
await this.storageRepository.rename(backupFilePath, backupFilePath.replace('.tmp', ''));
|
||||
} catch (error) {
|
||||
this.logger.error('Database Backup Failure', error);
|
||||
return JobStatus.FAILED;
|
||||
}
|
||||
|
||||
this.logger.debug(`Database Backup Success`);
|
||||
await this.cleanupDatabaseBackups();
|
||||
return JobStatus.SUCCESS;
|
||||
}
|
||||
}
|
|
@ -28,6 +28,7 @@ import { INotificationRepository } from 'src/interfaces/notification.interface';
|
|||
import { IOAuthRepository } from 'src/interfaces/oauth.interface';
|
||||
import { IPartnerRepository } from 'src/interfaces/partner.interface';
|
||||
import { IPersonRepository } from 'src/interfaces/person.interface';
|
||||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { ISearchRepository } from 'src/interfaces/search.interface';
|
||||
import { IServerInfoRepository } from 'src/interfaces/server-info.interface';
|
||||
import { ISessionRepository } from 'src/interfaces/session.interface';
|
||||
|
@ -72,6 +73,7 @@ export class BaseService {
|
|||
@Inject(IOAuthRepository) protected oauthRepository: IOAuthRepository,
|
||||
@Inject(IPartnerRepository) protected partnerRepository: IPartnerRepository,
|
||||
@Inject(IPersonRepository) protected personRepository: IPersonRepository,
|
||||
@Inject(IProcessRepository) protected processRepository: IProcessRepository,
|
||||
@Inject(ISearchRepository) protected searchRepository: ISearchRepository,
|
||||
@Inject(IServerInfoRepository) protected serverInfoRepository: IServerInfoRepository,
|
||||
@Inject(ISessionRepository) protected sessionRepository: ISessionRepository,
|
||||
|
|
|
@ -6,6 +6,7 @@ import { AssetMediaService } from 'src/services/asset-media.service';
|
|||
import { AssetService } from 'src/services/asset.service';
|
||||
import { AuditService } from 'src/services/audit.service';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { CliService } from 'src/services/cli.service';
|
||||
import { DatabaseService } from 'src/services/database.service';
|
||||
import { DownloadService } from 'src/services/download.service';
|
||||
|
@ -48,6 +49,7 @@ export const services = [
|
|||
AssetService,
|
||||
AuditService,
|
||||
AuthService,
|
||||
BackupService,
|
||||
CliService,
|
||||
DatabaseService,
|
||||
DownloadService,
|
||||
|
|
|
@ -44,7 +44,7 @@ describe(JobService.name, () => {
|
|||
sut.onBootstrap(ImmichWorker.MICROSERVICES);
|
||||
sut.onConfigUpdate({ oldConfig: defaults, newConfig: defaults });
|
||||
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(14);
|
||||
expect(jobMock.setConcurrency).toHaveBeenCalledTimes(15);
|
||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
|
||||
expect(jobMock.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
|
||||
|
@ -114,6 +114,7 @@ describe(JobService.name, () => {
|
|||
[QueueName.SIDECAR]: expectedJobStatus,
|
||||
[QueueName.LIBRARY]: expectedJobStatus,
|
||||
[QueueName.NOTIFICATION]: expectedJobStatus,
|
||||
[QueueName.BACKUP_DATABASE]: expectedJobStatus,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -220,6 +220,7 @@ export class JobService extends BaseService {
|
|||
QueueName.FACIAL_RECOGNITION,
|
||||
QueueName.STORAGE_TEMPLATE_MIGRATION,
|
||||
QueueName.DUPLICATE_DETECTION,
|
||||
QueueName.BACKUP_DATABASE,
|
||||
].includes(name);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ import { ArgOf } from 'src/interfaces/event.interface';
|
|||
import { IDeleteFilesJob, JobName } from 'src/interfaces/job.interface';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { AuditService } from 'src/services/audit.service';
|
||||
import { BackupService } from 'src/services/backup.service';
|
||||
import { DuplicateService } from 'src/services/duplicate.service';
|
||||
import { JobService } from 'src/services/job.service';
|
||||
import { LibraryService } from 'src/services/library.service';
|
||||
|
@ -26,6 +27,7 @@ export class MicroservicesService {
|
|||
constructor(
|
||||
private auditService: AuditService,
|
||||
private assetService: AssetService,
|
||||
private backupService: BackupService,
|
||||
private jobService: JobService,
|
||||
private libraryService: LibraryService,
|
||||
private mediaService: MediaService,
|
||||
|
@ -52,6 +54,7 @@ export class MicroservicesService {
|
|||
await this.jobService.init({
|
||||
[JobName.ASSET_DELETION]: (data) => this.assetService.handleAssetDeletion(data),
|
||||
[JobName.ASSET_DELETION_CHECK]: () => this.assetService.handleAssetDeletionCheck(),
|
||||
[JobName.BACKUP_DATABASE]: () => this.backupService.handleBackupDatabase(),
|
||||
[JobName.DELETE_FILES]: (data: IDeleteFilesJob) => this.storageService.handleDeleteFiles(data),
|
||||
[JobName.CLEAN_OLD_AUDIT_LOGS]: () => this.auditService.handleCleanup(),
|
||||
[JobName.CLEAN_OLD_SESSION_TOKENS]: () => this.sessionService.handleCleanup(),
|
||||
|
|
|
@ -32,6 +32,7 @@ describe(StorageService.name, () => {
|
|||
|
||||
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
|
||||
mountChecks: {
|
||||
backups: true,
|
||||
'encoded-video': true,
|
||||
library: true,
|
||||
profile: true,
|
||||
|
@ -44,16 +45,19 @@ describe(StorageService.name, () => {
|
|||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/profile');
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/thumbs');
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/upload');
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/backups');
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/encoded-video/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/profile/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/thumbs/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/upload/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/backups/.immich', expect.any(Buffer));
|
||||
});
|
||||
|
||||
it('should enable mount folder checking for a new folder type', async () => {
|
||||
systemMock.get.mockResolvedValue({
|
||||
mountChecks: {
|
||||
backups: false,
|
||||
'encoded-video': true,
|
||||
library: false,
|
||||
profile: true,
|
||||
|
@ -66,6 +70,7 @@ describe(StorageService.name, () => {
|
|||
|
||||
expect(systemMock.set).toHaveBeenCalledWith(SystemMetadataKey.SYSTEM_FLAGS, {
|
||||
mountChecks: {
|
||||
backups: true,
|
||||
'encoded-video': true,
|
||||
library: true,
|
||||
profile: true,
|
||||
|
@ -73,10 +78,12 @@ describe(StorageService.name, () => {
|
|||
upload: true,
|
||||
},
|
||||
});
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledTimes(1);
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/library');
|
||||
expect(storageMock.createFile).toHaveBeenCalledTimes(1);
|
||||
expect(storageMock.mkdirSync).toHaveBeenCalledWith('upload/backups');
|
||||
expect(storageMock.createFile).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/library/.immich', expect.any(Buffer));
|
||||
expect(storageMock.createFile).toHaveBeenCalledWith('upload/backups/.immich', expect.any(Buffer));
|
||||
});
|
||||
|
||||
it('should throw an error if .immich is missing', async () => {
|
||||
|
|
|
@ -44,6 +44,13 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
|||
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
|
||||
[QueueName.NOTIFICATION]: { concurrency: 5 },
|
||||
},
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: '0 02 * * *',
|
||||
keepLastAmount: 14,
|
||||
},
|
||||
},
|
||||
ffmpeg: {
|
||||
crf: 30,
|
||||
threads: 0,
|
||||
|
|
9
server/test/fixtures/system-config.stub.ts
vendored
9
server/test/fixtures/system-config.stub.ts
vendored
|
@ -74,6 +74,15 @@ export const systemConfigStub = {
|
|||
},
|
||||
},
|
||||
},
|
||||
backupEnabled: {
|
||||
backup: {
|
||||
database: {
|
||||
enabled: true,
|
||||
cronExpression: '0 0 * * *',
|
||||
keepLastAmount: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
machineLearningDisabled: {
|
||||
machineLearning: {
|
||||
enabled: false,
|
||||
|
|
8
server/test/repositories/process.repository.mock.ts
Normal file
8
server/test/repositories/process.repository.mock.ts
Normal file
|
@ -0,0 +1,8 @@
|
|||
import { IProcessRepository } from 'src/interfaces/process.interface';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
export const newProcessRepositoryMock = (): Mocked<IProcessRepository> => {
|
||||
return {
|
||||
spawn: vitest.fn(),
|
||||
};
|
||||
};
|
|
@ -49,6 +49,7 @@ export const newStorageRepositoryMock = (reset = true): Mocked<IStorageRepositor
|
|||
createReadStream: vitest.fn(),
|
||||
readFile: vitest.fn(),
|
||||
createFile: vitest.fn(),
|
||||
createWriteStream: vitest.fn(),
|
||||
createOrOverwriteFile: vitest.fn(),
|
||||
overwriteFile: vitest.fn(),
|
||||
unlink: vitest.fn(),
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { ChildProcessWithoutNullStreams } from 'node:child_process';
|
||||
import { Writable } from 'node:stream';
|
||||
import { PNG } from 'pngjs';
|
||||
import { IMetadataRepository } from 'src/interfaces/metadata.interface';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
|
@ -25,6 +27,7 @@ import { newNotificationRepositoryMock } from 'test/repositories/notification.re
|
|||
import { newOAuthRepositoryMock } from 'test/repositories/oauth.repository.mock';
|
||||
import { newPartnerRepositoryMock } from 'test/repositories/partner.repository.mock';
|
||||
import { newPersonRepositoryMock } from 'test/repositories/person.repository.mock';
|
||||
import { newProcessRepositoryMock } from 'test/repositories/process.repository.mock';
|
||||
import { newSearchRepositoryMock } from 'test/repositories/search.repository.mock';
|
||||
import { newServerInfoRepositoryMock } from 'test/repositories/server-info.repository.mock';
|
||||
import { newSessionRepositoryMock } from 'test/repositories/session.repository.mock';
|
||||
|
@ -38,7 +41,8 @@ import { newTrashRepositoryMock } from 'test/repositories/trash.repository.mock'
|
|||
import { newUserRepositoryMock } from 'test/repositories/user.repository.mock';
|
||||
import { newVersionHistoryRepositoryMock } from 'test/repositories/version-history.repository.mock';
|
||||
import { newViewRepositoryMock } from 'test/repositories/view.repository.mock';
|
||||
import { Mocked } from 'vitest';
|
||||
import { Readable } from 'typeorm/platform/PlatformTools';
|
||||
import { Mocked, vitest } from 'vitest';
|
||||
|
||||
type RepositoryOverrides = {
|
||||
metadataRepository: IMetadataRepository;
|
||||
|
@ -78,6 +82,7 @@ export const newTestService = <T extends BaseService>(
|
|||
const oauthMock = newOAuthRepositoryMock();
|
||||
const partnerMock = newPartnerRepositoryMock();
|
||||
const personMock = newPersonRepositoryMock();
|
||||
const processMock = newProcessRepositoryMock();
|
||||
const searchMock = newSearchRepositoryMock();
|
||||
const serverInfoMock = newServerInfoRepositoryMock();
|
||||
const sessionMock = newSessionRepositoryMock();
|
||||
|
@ -117,6 +122,7 @@ export const newTestService = <T extends BaseService>(
|
|||
oauthMock,
|
||||
partnerMock,
|
||||
personMock,
|
||||
processMock,
|
||||
searchMock,
|
||||
serverInfoMock,
|
||||
sessionMock,
|
||||
|
@ -158,6 +164,7 @@ export const newTestService = <T extends BaseService>(
|
|||
oauthMock,
|
||||
partnerMock,
|
||||
personMock,
|
||||
processMock,
|
||||
searchMock,
|
||||
serverInfoMock,
|
||||
sessionMock,
|
||||
|
@ -203,3 +210,37 @@ export const newRandomImage = () => {
|
|||
|
||||
return value;
|
||||
};
|
||||
|
||||
export const mockSpawn = vitest.fn((exitCode: number, stdout: string, stderr: string, error?: unknown) => {
|
||||
return {
|
||||
stdout: new Readable({
|
||||
read() {
|
||||
this.push(stdout); // write mock data to stdout
|
||||
this.push(null); // end stream
|
||||
},
|
||||
}),
|
||||
stderr: new Readable({
|
||||
read() {
|
||||
this.push(stderr); // write mock data to stderr
|
||||
this.push(null); // end stream
|
||||
},
|
||||
}),
|
||||
stdin: new Writable({
|
||||
write(chunk, encoding, callback) {
|
||||
callback();
|
||||
},
|
||||
}),
|
||||
exitCode,
|
||||
on: vitest.fn((event, callback: any) => {
|
||||
if (event === 'close') {
|
||||
callback(0);
|
||||
}
|
||||
if (event === 'error' && error) {
|
||||
callback(error);
|
||||
}
|
||||
if (event === 'exit') {
|
||||
callback(exitCode);
|
||||
}
|
||||
}),
|
||||
} as unknown as ChildProcessWithoutNullStreams;
|
||||
});
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
<script lang="ts">
|
||||
import type { SystemConfigDto } from '@immich/sdk';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { fade } from 'svelte/transition';
|
||||
import type { SettingsResetEvent, SettingsSaveEvent } from '../admin-settings';
|
||||
import SettingInputField, {
|
||||
SettingInputFieldType,
|
||||
} from '$lib/components/shared-components/settings/setting-input-field.svelte';
|
||||
import SettingSwitch from '$lib/components/shared-components/settings/setting-switch.svelte';
|
||||
import SettingButtonsRow from '$lib/components/shared-components/settings/setting-buttons-row.svelte';
|
||||
import SettingSelect from '$lib/components/shared-components/settings/setting-select.svelte';
|
||||
import { t } from 'svelte-i18n';
|
||||
import FormatMessage from '$lib/components/i18n/format-message.svelte';
|
||||
|
||||
export let savedConfig: SystemConfigDto;
|
||||
export let defaultConfig: SystemConfigDto;
|
||||
export let config: SystemConfigDto; // this is the config that is being edited
|
||||
export let disabled = false;
|
||||
export let onReset: SettingsResetEvent;
|
||||
export let onSave: SettingsSaveEvent;
|
||||
|
||||
$: cronExpressionOptions = [
|
||||
{ text: $t('interval.night_at_midnight'), value: '0 0 * * *' },
|
||||
{ text: $t('interval.night_at_twoam'), value: '0 02 * * *' },
|
||||
{ text: $t('interval.day_at_onepm'), value: '0 13 * * *' },
|
||||
{ text: $t('interval.hours', { values: { hours: 6 } }), value: '0 */6 * * *' },
|
||||
];
|
||||
</script>
|
||||
|
||||
<div>
|
||||
<div in:fade={{ duration: 500 }}>
|
||||
<form autocomplete="off" on:submit|preventDefault>
|
||||
<div class="ml-4 mt-4 flex flex-col gap-4">
|
||||
<SettingSwitch
|
||||
title={$t('admin.backup_database_enable_description')}
|
||||
{disabled}
|
||||
bind:checked={config.backup.database.enabled}
|
||||
/>
|
||||
|
||||
<SettingSelect
|
||||
options={cronExpressionOptions}
|
||||
disabled={disabled || !config.backup.database.enabled}
|
||||
name="expression"
|
||||
label={$t('admin.cron_expression_presets')}
|
||||
bind:value={config.backup.database.cronExpression}
|
||||
/>
|
||||
|
||||
<SettingInputField
|
||||
inputType={SettingInputFieldType.TEXT}
|
||||
required={true}
|
||||
disabled={disabled || !config.backup.database.enabled}
|
||||
label={$t('admin.cron_expression')}
|
||||
bind:value={config.backup.database.cronExpression}
|
||||
isEdited={config.backup.database.cronExpression !== savedConfig.backup.database.cronExpression}
|
||||
>
|
||||
<svelte:fragment slot="desc">
|
||||
<p class="text-sm dark:text-immich-dark-fg">
|
||||
<FormatMessage key="admin.cron_expression_description" let:message>
|
||||
<a
|
||||
href="https://crontab.guru/#{config.backup.database.cronExpression.replaceAll(' ', '_')}"
|
||||
class="underline"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{message}
|
||||
<br />
|
||||
</a>
|
||||
</FormatMessage>
|
||||
</p>
|
||||
</svelte:fragment>
|
||||
</SettingInputField>
|
||||
|
||||
<SettingInputField
|
||||
inputType={SettingInputFieldType.NUMBER}
|
||||
required={true}
|
||||
label={$t('admin.backup_keep_last_amount')}
|
||||
disabled={disabled || !config.backup.database.enabled}
|
||||
bind:value={config.backup.database.keepLastAmount}
|
||||
isEdited={config.backup.database.keepLastAmount !== savedConfig.backup.database.keepLastAmount}
|
||||
/>
|
||||
|
||||
<SettingButtonsRow
|
||||
onReset={(options) => onReset({ ...options, configKeys: ['backup'] })}
|
||||
onSave={() => onSave({ backup: config.backup })}
|
||||
showResetToDefault={!isEqual(savedConfig.backup, defaultConfig.backup)}
|
||||
{disabled}
|
||||
/>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
|
@ -65,7 +65,7 @@
|
|||
class="font-medium text-immich-primary dark:text-immich-dark-primary text-sm"
|
||||
for="expression-select"
|
||||
>
|
||||
{$t('admin.library_cron_expression_presets')}
|
||||
{$t('admin.cron_expression_presets')}
|
||||
</label>
|
||||
<select
|
||||
class="p-2 mt-2 text-sm rounded-lg bg-slate-200 hover:cursor-pointer dark:bg-gray-600"
|
||||
|
@ -84,13 +84,13 @@
|
|||
inputType={SettingInputFieldType.TEXT}
|
||||
required={true}
|
||||
disabled={disabled || !config.library.scan.enabled}
|
||||
label={$t('admin.library_cron_expression')}
|
||||
label={$t('admin.cron_expression')}
|
||||
bind:value={config.library.scan.cronExpression}
|
||||
isEdited={config.library.scan.cronExpression !== savedConfig.library.scan.cronExpression}
|
||||
>
|
||||
<svelte:fragment slot="desc">
|
||||
<p class="text-sm dark:text-immich-dark-fg">
|
||||
<FormatMessage key="admin.library_cron_expression_description" let:message>
|
||||
<FormatMessage key="admin.cron_expression_description" let:message>
|
||||
<a
|
||||
href="https://crontab.guru/#{config.library.scan.cronExpression.replaceAll(' ', '_')}"
|
||||
class="underline"
|
||||
|
|
|
@ -148,6 +148,7 @@ export const getJobName = derived(t, ($t) => {
|
|||
[JobName.Search]: $t('search'),
|
||||
[JobName.Library]: $t('library'),
|
||||
[JobName.Notifications]: $t('notifications'),
|
||||
[JobName.BackupDatabase]: $t('admin.backup_database'),
|
||||
};
|
||||
|
||||
return names[jobName];
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
<script lang="ts">
|
||||
import AdminSettings from '$lib/components/admin-page/settings/admin-settings.svelte';
|
||||
import AuthSettings from '$lib/components/admin-page/settings/auth/auth-settings.svelte';
|
||||
import BackupSettings from '$lib/components/admin-page/settings/backup-settings/backup-settings.svelte';
|
||||
import FFmpegSettings from '$lib/components/admin-page/settings/ffmpeg/ffmpeg-settings.svelte';
|
||||
import ImageSettings from '$lib/components/admin-page/settings/image/image-settings.svelte';
|
||||
import JobSettings from '$lib/components/admin-page/settings/job-settings/job-settings.svelte';
|
||||
|
@ -30,6 +31,7 @@
|
|||
import {
|
||||
mdiAccountOutline,
|
||||
mdiAlert,
|
||||
mdiBackupRestore,
|
||||
mdiBellOutline,
|
||||
mdiBookshelf,
|
||||
mdiContentCopy,
|
||||
|
@ -99,6 +101,13 @@
|
|||
key: 'authentication',
|
||||
icon: mdiLockOutline,
|
||||
},
|
||||
{
|
||||
component: BackupSettings,
|
||||
title: $t('admin.backup_settings'),
|
||||
subtitle: $t('admin.backup_settings_description'),
|
||||
key: 'backup',
|
||||
icon: mdiBackupRestore,
|
||||
},
|
||||
{
|
||||
component: ImageSettings,
|
||||
title: $t('admin.image_settings'),
|
||||
|
|
Loading…
Add table
Reference in a new issue