2023-12-14 17:55:40 +01:00
|
|
|
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
|
2024-03-14 06:52:30 +01:00
|
|
|
import { Trie } from 'mnemonist';
|
2023-09-20 13:16:33 +02:00
|
|
|
import { R_OK } from 'node:constants';
|
|
|
|
import { Stats } from 'node:fs';
|
2024-02-02 04:18:00 +01:00
|
|
|
import path, { basename, parse } from 'node:path';
|
2024-01-31 17:26:51 +01:00
|
|
|
import picomatch from 'picomatch';
|
2024-03-20 21:20:38 +01:00
|
|
|
import { StorageCore } from 'src/cores/storage.core';
|
|
|
|
import { SystemConfigCore } from 'src/cores/system-config.core';
|
2024-03-22 23:24:02 +01:00
|
|
|
import { OnServerEvent } from 'src/decorators';
|
2023-09-20 13:16:33 +02:00
|
|
|
import {
|
|
|
|
CreateLibraryDto,
|
|
|
|
LibraryResponseDto,
|
|
|
|
LibraryStatsResponseDto,
|
|
|
|
ScanLibraryDto,
|
|
|
|
UpdateLibraryDto,
|
2024-02-20 16:53:12 +01:00
|
|
|
ValidateLibraryDto,
|
|
|
|
ValidateLibraryImportPathResponseDto,
|
|
|
|
ValidateLibraryResponseDto,
|
2023-10-09 05:52:12 +02:00
|
|
|
mapLibrary,
|
2024-03-20 23:53:07 +01:00
|
|
|
} from 'src/dtos/library.dto';
|
2024-03-20 22:02:51 +01:00
|
|
|
import { AssetType } from 'src/entities/asset.entity';
|
2024-05-21 00:09:10 +02:00
|
|
|
import { LibraryEntity } from 'src/entities/library.entity';
|
2024-03-21 12:59:49 +01:00
|
|
|
import { IAssetRepository, WithProperty } from 'src/interfaces/asset.interface';
|
|
|
|
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
|
|
|
|
import { DatabaseLock, IDatabaseRepository } from 'src/interfaces/database.interface';
|
2024-03-22 23:24:02 +01:00
|
|
|
import { ServerAsyncEvent, ServerAsyncEventMap } from 'src/interfaces/event.interface';
|
2024-03-21 04:15:09 +01:00
|
|
|
import {
|
|
|
|
IBaseJob,
|
|
|
|
IEntityJob,
|
|
|
|
IJobRepository,
|
|
|
|
ILibraryFileJob,
|
|
|
|
ILibraryRefreshJob,
|
|
|
|
JOBS_ASSET_PAGINATION_SIZE,
|
|
|
|
JobName,
|
|
|
|
JobStatus,
|
2024-03-21 12:59:49 +01:00
|
|
|
} from 'src/interfaces/job.interface';
|
|
|
|
import { ILibraryRepository } from 'src/interfaces/library.interface';
|
2024-04-16 23:30:31 +02:00
|
|
|
import { ILoggerRepository } from 'src/interfaces/logger.interface';
|
2024-04-16 05:05:08 +02:00
|
|
|
import { IStorageRepository } from 'src/interfaces/storage.interface';
|
2024-05-16 00:58:23 +02:00
|
|
|
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
|
2024-03-21 04:15:09 +01:00
|
|
|
import { mimeTypes } from 'src/utils/mime-types';
|
|
|
|
import { handlePromiseError } from 'src/utils/misc';
|
|
|
|
import { usePagination } from 'src/utils/pagination';
|
2024-03-20 21:04:03 +01:00
|
|
|
import { validateCronExpression } from 'src/validation';
|
2023-09-20 13:16:33 +02:00
|
|
|
|
2024-03-14 06:52:30 +01:00
|
|
|
const LIBRARY_SCAN_BATCH_SIZE = 5000;
|
|
|
|
|
2023-09-20 13:16:33 +02:00
|
|
|
@Injectable()
|
2024-04-16 05:05:08 +02:00
|
|
|
export class LibraryService {
|
2023-10-31 21:19:12 +01:00
|
|
|
private configCore: SystemConfigCore;
|
2024-01-31 09:15:54 +01:00
|
|
|
private watchLibraries = false;
|
2024-03-07 18:36:53 +01:00
|
|
|
private watchLock = false;
|
2024-03-05 23:23:06 +01:00
|
|
|
private watchers: Record<string, () => Promise<void>> = {};
|
2024-01-31 09:15:54 +01:00
|
|
|
|
2023-09-20 13:16:33 +02:00
|
|
|
constructor(
|
|
|
|
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
2024-05-16 00:58:23 +02:00
|
|
|
@Inject(ISystemMetadataRepository) systemMetadataRepository: ISystemMetadataRepository,
|
2023-09-20 13:16:33 +02:00
|
|
|
@Inject(ICryptoRepository) private cryptoRepository: ICryptoRepository,
|
|
|
|
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
|
|
|
@Inject(ILibraryRepository) private repository: ILibraryRepository,
|
|
|
|
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
2024-03-07 18:36:53 +01:00
|
|
|
@Inject(IDatabaseRepository) private databaseRepository: IDatabaseRepository,
|
2024-04-16 23:30:31 +02:00
|
|
|
@Inject(ILoggerRepository) private logger: ILoggerRepository,
|
2023-09-20 13:16:33 +02:00
|
|
|
) {
|
2024-04-16 23:30:31 +02:00
|
|
|
this.logger.setContext(LibraryService.name);
|
2024-05-16 00:58:23 +02:00
|
|
|
this.configCore = SystemConfigCore.create(systemMetadataRepository, this.logger);
|
2023-10-31 21:19:12 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
async init() {
|
|
|
|
const config = await this.configCore.getConfig();
|
2024-03-07 18:36:53 +01:00
|
|
|
|
2024-01-31 17:26:51 +01:00
|
|
|
const { watch, scan } = config.library;
|
2024-03-07 18:36:53 +01:00
|
|
|
|
|
|
|
// This ensures that library watching only occurs in one microservice
|
|
|
|
// TODO: we could make the lock be per-library instead of global
|
|
|
|
this.watchLock = await this.databaseRepository.tryLock(DatabaseLock.LibraryWatch);
|
|
|
|
|
|
|
|
this.watchLibraries = this.watchLock && watch.enabled;
|
|
|
|
|
2023-10-31 21:19:12 +01:00
|
|
|
this.jobRepository.addCronJob(
|
|
|
|
'libraryScan',
|
2024-01-31 17:26:51 +01:00
|
|
|
scan.cronExpression,
|
2024-03-05 23:23:06 +01:00
|
|
|
() =>
|
|
|
|
handlePromiseError(
|
|
|
|
this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: { force: false } }),
|
|
|
|
this.logger,
|
|
|
|
),
|
2024-01-31 17:26:51 +01:00
|
|
|
scan.enabled,
|
2023-10-31 21:19:12 +01:00
|
|
|
);
|
|
|
|
|
2024-01-31 09:15:54 +01:00
|
|
|
if (this.watchLibraries) {
|
|
|
|
await this.watchAll();
|
|
|
|
}
|
|
|
|
|
2024-03-05 23:23:06 +01:00
|
|
|
this.configCore.config$.subscribe(({ library }) => {
|
2024-01-31 17:26:51 +01:00
|
|
|
this.jobRepository.updateCronJob('libraryScan', library.scan.cronExpression, library.scan.enabled);
|
2024-01-31 09:15:54 +01:00
|
|
|
|
2024-01-31 17:26:51 +01:00
|
|
|
if (library.watch.enabled !== this.watchLibraries) {
|
2024-03-07 18:36:53 +01:00
|
|
|
// Watch configuration changed, update accordingly
|
2024-01-31 17:26:51 +01:00
|
|
|
this.watchLibraries = library.watch.enabled;
|
2024-03-05 23:23:06 +01:00
|
|
|
handlePromiseError(this.watchLibraries ? this.watchAll() : this.unwatchAll(), this.logger);
|
2024-01-31 09:15:54 +01:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-03-22 23:24:02 +01:00
|
|
|
@OnServerEvent(ServerAsyncEvent.CONFIG_VALIDATE)
|
|
|
|
onValidateConfig({ newConfig }: ServerAsyncEventMap[ServerAsyncEvent.CONFIG_VALIDATE]) {
|
2024-03-17 20:16:02 +01:00
|
|
|
const { scan } = newConfig.library;
|
|
|
|
if (!validateCronExpression(scan.cronExpression)) {
|
|
|
|
throw new Error(`Invalid cron expression ${scan.cronExpression}`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-31 09:15:54 +01:00
|
|
|
private async watch(id: string): Promise<boolean> {
|
|
|
|
if (!this.watchLibraries) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
const library = await this.findOrFail(id);
|
2024-05-21 00:09:10 +02:00
|
|
|
if (library.importPaths.length === 0) {
|
2024-01-31 09:15:54 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
await this.unwatch(id);
|
|
|
|
|
|
|
|
this.logger.log(`Starting to watch library ${library.id} with import path(s) ${library.importPaths}`);
|
|
|
|
|
|
|
|
const matcher = picomatch(`**/*{${mimeTypes.getSupportedFileExtensions().join(',')}}`, {
|
|
|
|
nocase: true,
|
|
|
|
ignore: library.exclusionPatterns,
|
|
|
|
});
|
|
|
|
|
2024-02-13 14:48:47 +01:00
|
|
|
let _resolve: () => void;
|
|
|
|
const ready$ = new Promise<void>((resolve) => (_resolve = resolve));
|
|
|
|
|
|
|
|
this.watchers[id] = this.storageRepository.watch(
|
|
|
|
library.importPaths,
|
|
|
|
{
|
2024-02-28 21:20:10 +01:00
|
|
|
usePolling: false,
|
2024-02-13 14:48:47 +01:00
|
|
|
ignoreInitial: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
onReady: () => _resolve(),
|
2024-03-05 23:23:06 +01:00
|
|
|
onAdd: (path) => {
|
|
|
|
const handler = async () => {
|
|
|
|
this.logger.debug(`File add event received for ${path} in library ${library.id}}`);
|
|
|
|
if (matcher(path)) {
|
|
|
|
await this.scanAssets(library.id, [path], library.ownerId, false);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
return handlePromiseError(handler(), this.logger);
|
2024-02-13 14:48:47 +01:00
|
|
|
},
|
2024-03-05 23:23:06 +01:00
|
|
|
onChange: (path) => {
|
|
|
|
const handler = async () => {
|
|
|
|
this.logger.debug(`Detected file change for ${path} in library ${library.id}`);
|
|
|
|
if (matcher(path)) {
|
|
|
|
// Note: if the changed file was not previously imported, it will be imported now.
|
|
|
|
await this.scanAssets(library.id, [path], library.ownerId, false);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
return handlePromiseError(handler(), this.logger);
|
2024-02-13 14:48:47 +01:00
|
|
|
},
|
2024-03-05 23:23:06 +01:00
|
|
|
onUnlink: (path) => {
|
|
|
|
const handler = async () => {
|
|
|
|
this.logger.debug(`Detected deleted file at ${path} in library ${library.id}`);
|
|
|
|
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
|
|
|
|
if (asset && matcher(path)) {
|
2024-03-20 03:42:10 +01:00
|
|
|
await this.assetRepository.update({ id: asset.id, isOffline: true });
|
2024-03-05 23:23:06 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
return handlePromiseError(handler(), this.logger);
|
2024-02-13 14:48:47 +01:00
|
|
|
},
|
|
|
|
onError: (error) => {
|
|
|
|
this.logger.error(`Library watcher for library ${library.id} encountered error: ${error}`);
|
|
|
|
},
|
|
|
|
},
|
|
|
|
);
|
2024-01-31 09:15:54 +01:00
|
|
|
|
|
|
|
// Wait for the watcher to initialize before returning
|
2024-02-13 14:48:47 +01:00
|
|
|
await ready$;
|
2024-01-31 09:15:54 +01:00
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
async unwatch(id: string) {
|
2024-02-13 14:48:47 +01:00
|
|
|
if (this.watchers[id]) {
|
2024-01-31 09:15:54 +01:00
|
|
|
await this.watchers[id]();
|
|
|
|
delete this.watchers[id];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-07 18:36:53 +01:00
|
|
|
async teardown() {
|
|
|
|
await this.unwatchAll();
|
|
|
|
}
|
|
|
|
|
|
|
|
private async unwatchAll() {
|
|
|
|
if (!this.watchLock) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2024-01-31 09:15:54 +01:00
|
|
|
for (const id in this.watchers) {
|
|
|
|
await this.unwatch(id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async watchAll() {
|
2024-03-07 18:36:53 +01:00
|
|
|
if (!this.watchLock) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2024-05-21 00:09:10 +02:00
|
|
|
const libraries = await this.repository.getAll(false);
|
2024-01-31 09:15:54 +01:00
|
|
|
for (const library of libraries) {
|
|
|
|
await this.watch(library.id);
|
|
|
|
}
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-18 21:59:53 +01:00
|
|
|
async getStatistics(id: string): Promise<LibraryStatsResponseDto> {
|
|
|
|
await this.findOrFail(id);
|
2023-09-20 13:16:33 +02:00
|
|
|
return this.repository.getStatistics(id);
|
|
|
|
}
|
|
|
|
|
2024-03-18 21:59:53 +01:00
|
|
|
async get(id: string): Promise<LibraryResponseDto> {
|
2023-09-20 13:16:33 +02:00
|
|
|
const library = await this.findOrFail(id);
|
|
|
|
return mapLibrary(library);
|
|
|
|
}
|
|
|
|
|
2024-05-21 00:09:10 +02:00
|
|
|
async getAll(): Promise<LibraryResponseDto[]> {
|
|
|
|
const libraries = await this.repository.getAll(false);
|
2024-02-29 19:35:37 +01:00
|
|
|
return libraries.map((library) => mapLibrary(library));
|
|
|
|
}
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
async handleQueueCleanup(): Promise<JobStatus> {
|
2023-09-20 13:16:33 +02:00
|
|
|
this.logger.debug('Cleaning up any pending library deletions');
|
|
|
|
const pendingDeletion = await this.repository.getAllDeleted();
|
2024-01-01 21:45:42 +01:00
|
|
|
await this.jobRepository.queueAll(
|
|
|
|
pendingDeletion.map((libraryToDelete) => ({ name: JobName.LIBRARY_DELETE, data: { id: libraryToDelete.id } })),
|
|
|
|
);
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SUCCESS;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-18 21:59:53 +01:00
|
|
|
async create(dto: CreateLibraryDto): Promise<LibraryResponseDto> {
|
2023-09-20 13:16:33 +02:00
|
|
|
const library = await this.repository.create({
|
2024-03-18 21:59:53 +01:00
|
|
|
ownerId: dto.ownerId,
|
2024-05-21 00:09:10 +02:00
|
|
|
name: dto.name ?? 'New External Library',
|
2023-09-20 13:16:33 +02:00
|
|
|
importPaths: dto.importPaths ?? [],
|
|
|
|
exclusionPatterns: dto.exclusionPatterns ?? [],
|
|
|
|
});
|
|
|
|
return mapLibrary(library);
|
|
|
|
}
|
|
|
|
|
2024-01-31 09:15:54 +01:00
|
|
|
private async scanAssets(libraryId: string, assetPaths: string[], ownerId: string, force = false) {
|
2024-03-14 14:43:05 +01:00
|
|
|
this.logger.verbose(`Queuing refresh of ${assetPaths.length} asset(s)`);
|
|
|
|
|
|
|
|
// We perform this in batches to save on memory when performing large refreshes (greater than 1M assets)
|
|
|
|
const batchSize = 5000;
|
|
|
|
for (let i = 0; i < assetPaths.length; i += batchSize) {
|
|
|
|
const batch = assetPaths.slice(i, i + batchSize);
|
|
|
|
await this.jobRepository.queueAll(
|
|
|
|
batch.map((assetPath) => ({
|
|
|
|
name: JobName.LIBRARY_SCAN_ASSET,
|
|
|
|
data: {
|
|
|
|
id: libraryId,
|
|
|
|
assetPath: assetPath,
|
|
|
|
ownerId,
|
|
|
|
force,
|
|
|
|
},
|
|
|
|
})),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
this.logger.debug('Asset refresh queue completed');
|
2024-01-31 09:15:54 +01:00
|
|
|
}
|
|
|
|
|
2024-02-20 16:53:12 +01:00
|
|
|
private async validateImportPath(importPath: string): Promise<ValidateLibraryImportPathResponseDto> {
|
|
|
|
const validation = new ValidateLibraryImportPathResponseDto();
|
|
|
|
validation.importPath = importPath;
|
|
|
|
|
2024-03-15 23:01:58 +01:00
|
|
|
if (StorageCore.isImmichPath(importPath)) {
|
|
|
|
validation.message = 'Cannot use media upload folder for external libraries';
|
|
|
|
return validation;
|
|
|
|
}
|
|
|
|
|
2024-02-20 16:53:12 +01:00
|
|
|
try {
|
|
|
|
const stat = await this.storageRepository.stat(importPath);
|
|
|
|
if (!stat.isDirectory()) {
|
|
|
|
validation.message = 'Not a directory';
|
|
|
|
return validation;
|
|
|
|
}
|
|
|
|
} catch (error: any) {
|
|
|
|
if (error.code === 'ENOENT') {
|
|
|
|
validation.message = 'Path does not exist (ENOENT)';
|
|
|
|
return validation;
|
|
|
|
}
|
|
|
|
validation.message = String(error);
|
|
|
|
return validation;
|
|
|
|
}
|
|
|
|
|
|
|
|
const access = await this.storageRepository.checkFileExists(importPath, R_OK);
|
|
|
|
|
|
|
|
if (!access) {
|
|
|
|
validation.message = 'Lacking read permission for folder';
|
|
|
|
return validation;
|
|
|
|
}
|
|
|
|
|
|
|
|
validation.isValid = true;
|
|
|
|
return validation;
|
|
|
|
}
|
|
|
|
|
2024-03-18 21:59:53 +01:00
|
|
|
async validate(id: string, dto: ValidateLibraryDto): Promise<ValidateLibraryResponseDto> {
|
|
|
|
const importPaths = await Promise.all(
|
|
|
|
(dto.importPaths || []).map((importPath) => this.validateImportPath(importPath)),
|
|
|
|
);
|
|
|
|
return { importPaths };
|
2024-02-20 16:53:12 +01:00
|
|
|
}
|
|
|
|
|
2024-03-18 21:59:53 +01:00
|
|
|
async update(id: string, dto: UpdateLibraryDto): Promise<LibraryResponseDto> {
|
|
|
|
await this.findOrFail(id);
|
2023-09-20 13:16:33 +02:00
|
|
|
const library = await this.repository.update({ id, ...dto });
|
2024-01-31 09:15:54 +01:00
|
|
|
|
2024-02-20 16:53:12 +01:00
|
|
|
if (dto.importPaths) {
|
2024-03-18 21:59:53 +01:00
|
|
|
const validation = await this.validate(id, { importPaths: dto.importPaths });
|
2024-02-20 16:53:12 +01:00
|
|
|
if (validation.importPaths) {
|
|
|
|
for (const path of validation.importPaths) {
|
|
|
|
if (!path.isValid) {
|
|
|
|
throw new BadRequestException(`Invalid import path: ${path.message}`);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-20 13:16:33 +02:00
|
|
|
return mapLibrary(library);
|
|
|
|
}
|
|
|
|
|
2024-03-18 21:59:53 +01:00
|
|
|
async delete(id: string) {
|
2024-05-21 00:09:10 +02:00
|
|
|
await this.findOrFail(id);
|
2023-09-20 13:16:33 +02:00
|
|
|
|
2024-01-31 09:15:54 +01:00
|
|
|
if (this.watchLibraries) {
|
|
|
|
await this.unwatch(id);
|
|
|
|
}
|
|
|
|
|
2023-09-20 13:16:33 +02:00
|
|
|
await this.repository.softDelete(id);
|
|
|
|
await this.jobRepository.queue({ name: JobName.LIBRARY_DELETE, data: { id } });
|
|
|
|
}
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
async handleDeleteLibrary(job: IEntityJob): Promise<JobStatus> {
|
2023-09-20 13:16:33 +02:00
|
|
|
const library = await this.repository.get(job.id, true);
|
|
|
|
if (!library) {
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.FAILED;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO use pagination
|
2023-10-07 22:42:08 +02:00
|
|
|
const assetIds = await this.repository.getAssetIds(job.id, true);
|
2023-09-20 13:16:33 +02:00
|
|
|
this.logger.debug(`Will delete ${assetIds.length} asset(s) in library ${job.id}`);
|
2024-01-01 21:45:42 +01:00
|
|
|
await this.jobRepository.queueAll(
|
2024-05-03 21:34:57 +02:00
|
|
|
assetIds.map((assetId) => ({ name: JobName.ASSET_DELETION, data: { id: assetId } })),
|
2024-01-01 21:45:42 +01:00
|
|
|
);
|
2023-10-07 19:44:10 +02:00
|
|
|
|
2023-10-07 22:42:08 +02:00
|
|
|
if (assetIds.length === 0) {
|
|
|
|
this.logger.log(`Deleting library ${job.id}`);
|
|
|
|
await this.repository.delete(job.id);
|
|
|
|
}
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SUCCESS;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
async handleAssetRefresh(job: ILibraryFileJob): Promise<JobStatus> {
|
2023-09-20 13:16:33 +02:00
|
|
|
const assetPath = path.normalize(job.assetPath);
|
|
|
|
|
|
|
|
const existingAssetEntity = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath);
|
|
|
|
|
|
|
|
let stats: Stats;
|
|
|
|
try {
|
|
|
|
stats = await this.storageRepository.stat(assetPath);
|
|
|
|
} catch (error: Error | any) {
|
|
|
|
// Can't access file, probably offline
|
|
|
|
if (existingAssetEntity) {
|
|
|
|
// Mark asset as offline
|
|
|
|
this.logger.debug(`Marking asset as offline: ${assetPath}`);
|
|
|
|
|
2024-03-20 03:42:10 +01:00
|
|
|
await this.assetRepository.update({ id: existingAssetEntity.id, isOffline: true });
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SUCCESS;
|
2023-09-20 13:16:33 +02:00
|
|
|
} else {
|
|
|
|
// File can't be accessed and does not already exist in db
|
2024-02-29 19:35:37 +01:00
|
|
|
throw new BadRequestException('Cannot access file', { cause: error });
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let doImport = false;
|
|
|
|
let doRefresh = false;
|
|
|
|
|
2023-10-09 05:16:13 +02:00
|
|
|
if (job.force) {
|
2023-09-20 13:16:33 +02:00
|
|
|
doRefresh = true;
|
|
|
|
}
|
|
|
|
|
2024-03-29 03:51:07 +01:00
|
|
|
const originalFileName = parse(assetPath).base;
|
|
|
|
|
2023-09-20 13:16:33 +02:00
|
|
|
if (!existingAssetEntity) {
|
|
|
|
// This asset is new to us, read it from disk
|
|
|
|
this.logger.debug(`Importing new asset: ${assetPath}`);
|
|
|
|
doImport = true;
|
|
|
|
} else if (stats.mtime.toISOString() !== existingAssetEntity.fileModifiedAt.toISOString()) {
|
|
|
|
// File modification time has changed since last time we checked, re-read from disk
|
|
|
|
this.logger.debug(
|
|
|
|
`File modification time has changed, re-importing asset: ${assetPath}. Old mtime: ${existingAssetEntity.fileModifiedAt}. New mtime: ${stats.mtime}`,
|
|
|
|
);
|
|
|
|
doRefresh = true;
|
2024-03-29 03:51:07 +01:00
|
|
|
} else if (existingAssetEntity.originalFileName !== originalFileName) {
|
|
|
|
// TODO: We can likely remove this check in the second half of 2024 when all assets have likely been re-imported by all users
|
|
|
|
this.logger.debug(
|
|
|
|
`Asset is missing file extension, re-importing: ${assetPath}. Current incorrect filename: ${existingAssetEntity.originalFileName}.`,
|
|
|
|
);
|
|
|
|
doRefresh = true;
|
2023-10-09 05:16:13 +02:00
|
|
|
} else if (!job.force && stats && !existingAssetEntity.isOffline) {
|
2023-09-20 13:16:33 +02:00
|
|
|
// Asset exists on disk and in db and mtime has not changed. Also, we are not forcing refresn. Therefore, do nothing
|
|
|
|
this.logger.debug(`Asset already exists in database and on disk, will not import: ${assetPath}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (stats && existingAssetEntity?.isOffline) {
|
|
|
|
// File was previously offline but is now online
|
|
|
|
this.logger.debug(`Marking previously-offline asset as online: ${assetPath}`);
|
2024-03-20 03:42:10 +01:00
|
|
|
await this.assetRepository.update({ id: existingAssetEntity.id, isOffline: false });
|
2023-09-20 13:16:33 +02:00
|
|
|
doRefresh = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!doImport && !doRefresh) {
|
|
|
|
// If we don't import, exit here
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SKIPPED;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
let assetType: AssetType;
|
|
|
|
|
|
|
|
if (mimeTypes.isImage(assetPath)) {
|
|
|
|
assetType = AssetType.IMAGE;
|
|
|
|
} else if (mimeTypes.isVideo(assetPath)) {
|
|
|
|
assetType = AssetType.VIDEO;
|
|
|
|
} else {
|
|
|
|
throw new BadRequestException(`Unsupported file type ${assetPath}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: doesn't xmp replace the file extension? Will need investigation
|
|
|
|
let sidecarPath: string | null = null;
|
|
|
|
if (await this.storageRepository.checkFileExists(`${assetPath}.xmp`, R_OK)) {
|
|
|
|
sidecarPath = `${assetPath}.xmp`;
|
|
|
|
}
|
|
|
|
|
2024-02-02 04:18:00 +01:00
|
|
|
const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, '');
|
2023-09-20 13:16:33 +02:00
|
|
|
|
|
|
|
let assetId;
|
|
|
|
if (doImport) {
|
|
|
|
const library = await this.repository.get(job.id, true);
|
|
|
|
if (library?.deletedAt) {
|
|
|
|
this.logger.error('Cannot import asset into deleted library');
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.FAILED;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-01-31 09:15:54 +01:00
|
|
|
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
|
|
|
|
|
2023-09-20 13:16:33 +02:00
|
|
|
// TODO: In wait of refactoring the domain asset service, this function is just manually written like this
|
|
|
|
const addedAsset = await this.assetRepository.create({
|
|
|
|
ownerId: job.ownerId,
|
|
|
|
libraryId: job.id,
|
|
|
|
checksum: pathHash,
|
|
|
|
originalPath: assetPath,
|
|
|
|
deviceAssetId: deviceAssetId,
|
|
|
|
deviceId: 'Library Import',
|
2023-09-24 15:14:25 +02:00
|
|
|
fileCreatedAt: stats.mtime,
|
2023-09-20 13:16:33 +02:00
|
|
|
fileModifiedAt: stats.mtime,
|
2023-10-05 00:11:11 +02:00
|
|
|
localDateTime: stats.mtime,
|
2023-09-20 13:16:33 +02:00
|
|
|
type: assetType,
|
2024-03-29 03:51:07 +01:00
|
|
|
originalFileName,
|
2023-09-20 13:16:33 +02:00
|
|
|
sidecarPath,
|
|
|
|
isExternal: true,
|
|
|
|
});
|
|
|
|
assetId = addedAsset.id;
|
|
|
|
} else if (doRefresh && existingAssetEntity) {
|
|
|
|
assetId = existingAssetEntity.id;
|
|
|
|
await this.assetRepository.updateAll([existingAssetEntity.id], {
|
2023-09-24 15:14:25 +02:00
|
|
|
fileCreatedAt: stats.mtime,
|
2023-09-20 13:16:33 +02:00
|
|
|
fileModifiedAt: stats.mtime,
|
2024-03-29 03:51:07 +01:00
|
|
|
originalFileName,
|
2023-09-20 13:16:33 +02:00
|
|
|
});
|
|
|
|
} else {
|
|
|
|
// Not importing and not refreshing, do nothing
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SKIPPED;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
this.logger.debug(`Queuing metadata extraction for: ${assetPath}`);
|
|
|
|
|
|
|
|
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: assetId, source: 'upload' } });
|
|
|
|
|
|
|
|
if (assetType === AssetType.VIDEO) {
|
|
|
|
await this.jobRepository.queue({ name: JobName.VIDEO_CONVERSION, data: { id: assetId } });
|
|
|
|
}
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SUCCESS;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-18 21:59:53 +01:00
|
|
|
async queueScan(id: string, dto: ScanLibraryDto) {
|
2024-05-21 00:09:10 +02:00
|
|
|
await this.findOrFail(id);
|
2023-09-20 13:16:33 +02:00
|
|
|
|
|
|
|
await this.jobRepository.queue({
|
|
|
|
name: JobName.LIBRARY_SCAN,
|
|
|
|
data: {
|
|
|
|
id,
|
|
|
|
refreshModifiedFiles: dto.refreshModifiedFiles ?? false,
|
|
|
|
refreshAllFiles: dto.refreshAllFiles ?? false,
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-03-18 21:59:53 +01:00
|
|
|
async queueRemoveOffline(id: string) {
|
2023-09-20 13:16:33 +02:00
|
|
|
this.logger.verbose(`Removing offline files from library: ${id}`);
|
2024-03-18 21:59:53 +01:00
|
|
|
await this.jobRepository.queue({ name: JobName.LIBRARY_REMOVE_OFFLINE, data: { id } });
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
async handleQueueAllScan(job: IBaseJob): Promise<JobStatus> {
|
2023-09-20 13:16:33 +02:00
|
|
|
this.logger.debug(`Refreshing all external libraries: force=${job.force}`);
|
|
|
|
|
|
|
|
// Queue cleanup
|
|
|
|
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} });
|
|
|
|
|
|
|
|
// Queue all library refresh
|
2024-05-21 00:09:10 +02:00
|
|
|
const libraries = await this.repository.getAll(true);
|
2024-01-01 21:45:42 +01:00
|
|
|
await this.jobRepository.queueAll(
|
|
|
|
libraries.map((library) => ({
|
2023-09-20 13:16:33 +02:00
|
|
|
name: JobName.LIBRARY_SCAN,
|
|
|
|
data: {
|
|
|
|
id: library.id,
|
|
|
|
refreshModifiedFiles: !job.force,
|
|
|
|
refreshAllFiles: job.force ?? false,
|
|
|
|
},
|
2024-01-01 21:45:42 +01:00
|
|
|
})),
|
|
|
|
);
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SUCCESS;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
async handleOfflineRemoval(job: IEntityJob): Promise<JobStatus> {
|
2023-10-07 19:44:10 +02:00
|
|
|
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
|
|
|
this.assetRepository.getWith(pagination, WithProperty.IS_OFFLINE, job.id),
|
|
|
|
);
|
2023-09-20 13:16:33 +02:00
|
|
|
|
|
|
|
for await (const assets of assetPagination) {
|
2023-10-07 19:44:10 +02:00
|
|
|
this.logger.debug(`Removing ${assets.length} offline assets`);
|
2024-01-01 21:45:42 +01:00
|
|
|
await this.jobRepository.queueAll(
|
2024-05-03 21:34:57 +02:00
|
|
|
assets.map((asset) => ({ name: JobName.ASSET_DELETION, data: { id: asset.id } })),
|
2024-01-01 21:45:42 +01:00
|
|
|
);
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SUCCESS;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
async handleQueueAssetRefresh(job: ILibraryRefreshJob): Promise<JobStatus> {
|
2023-09-20 13:16:33 +02:00
|
|
|
const library = await this.repository.get(job.id);
|
2024-05-21 00:09:10 +02:00
|
|
|
if (!library) {
|
|
|
|
this.logger.warn('Library not found');
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.FAILED;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-14 14:43:05 +01:00
|
|
|
this.logger.log(`Refreshing library: ${job.id}`);
|
2024-02-20 16:53:12 +01:00
|
|
|
|
2024-03-14 06:52:30 +01:00
|
|
|
const crawledAssetPaths = await this.getPathTrie(library);
|
2024-03-11 03:30:57 +01:00
|
|
|
this.logger.debug(`Found ${crawledAssetPaths.size} asset(s) when crawling import paths ${library.importPaths}`);
|
2023-09-20 13:16:33 +02:00
|
|
|
|
2024-03-11 03:30:57 +01:00
|
|
|
const assetIdsToMarkOffline = [];
|
|
|
|
const assetIdsToMarkOnline = [];
|
2024-03-14 06:52:30 +01:00
|
|
|
const pagination = usePagination(LIBRARY_SCAN_BATCH_SIZE, (pagination) =>
|
2024-04-15 01:55:44 +02:00
|
|
|
this.assetRepository.getExternalLibraryAssetPaths(pagination, library.id),
|
2024-03-11 03:30:57 +01:00
|
|
|
);
|
|
|
|
|
2024-03-14 14:43:05 +01:00
|
|
|
this.logger.verbose(`Crawled asset paths paginated`);
|
|
|
|
|
2024-03-14 06:52:30 +01:00
|
|
|
const shouldScanAll = job.refreshAllFiles || job.refreshModifiedFiles;
|
2024-03-11 03:30:57 +01:00
|
|
|
for await (const page of pagination) {
|
|
|
|
for (const asset of page) {
|
|
|
|
const isOffline = !crawledAssetPaths.has(asset.originalPath);
|
|
|
|
if (isOffline && !asset.isOffline) {
|
|
|
|
assetIdsToMarkOffline.push(asset.id);
|
2024-03-14 14:43:05 +01:00
|
|
|
this.logger.verbose(`Added to mark-offline list: ${asset.originalPath}`);
|
2024-03-11 03:30:57 +01:00
|
|
|
}
|
2023-09-20 13:16:33 +02:00
|
|
|
|
2024-03-11 03:30:57 +01:00
|
|
|
if (!isOffline && asset.isOffline) {
|
|
|
|
assetIdsToMarkOnline.push(asset.id);
|
2024-03-14 14:43:05 +01:00
|
|
|
this.logger.verbose(`Added to mark-online list: ${asset.originalPath}`);
|
2024-03-11 03:30:57 +01:00
|
|
|
}
|
|
|
|
|
2024-03-14 06:52:30 +01:00
|
|
|
if (!shouldScanAll) {
|
|
|
|
crawledAssetPaths.delete(asset.originalPath);
|
|
|
|
}
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
2024-03-11 03:30:57 +01:00
|
|
|
}
|
|
|
|
|
2024-03-14 14:43:05 +01:00
|
|
|
this.logger.verbose(`Crawled assets have been checked for online/offline status`);
|
|
|
|
|
2024-03-11 03:30:57 +01:00
|
|
|
if (assetIdsToMarkOffline.length > 0) {
|
|
|
|
this.logger.debug(`Found ${assetIdsToMarkOffline.length} offline asset(s) previously marked as online`);
|
|
|
|
await this.assetRepository.updateAll(assetIdsToMarkOffline, { isOffline: true });
|
|
|
|
}
|
|
|
|
|
|
|
|
if (assetIdsToMarkOnline.length > 0) {
|
|
|
|
this.logger.debug(`Found ${assetIdsToMarkOnline.length} online asset(s) previously marked as offline`);
|
|
|
|
await this.assetRepository.updateAll(assetIdsToMarkOnline, { isOffline: false });
|
|
|
|
}
|
|
|
|
|
2024-03-14 06:52:30 +01:00
|
|
|
if (crawledAssetPaths.size > 0) {
|
|
|
|
if (!shouldScanAll) {
|
|
|
|
this.logger.debug(`Will import ${crawledAssetPaths.size} new asset(s)`);
|
|
|
|
}
|
|
|
|
|
2024-03-15 23:01:58 +01:00
|
|
|
let batch = [];
|
2024-03-14 06:52:30 +01:00
|
|
|
for (const assetPath of crawledAssetPaths) {
|
|
|
|
batch.push(assetPath);
|
2023-09-20 13:16:33 +02:00
|
|
|
|
2024-03-14 06:52:30 +01:00
|
|
|
if (batch.length >= LIBRARY_SCAN_BATCH_SIZE) {
|
|
|
|
await this.scanAssets(job.id, batch, library.ownerId, job.refreshAllFiles ?? false);
|
2024-03-15 23:01:58 +01:00
|
|
|
batch = [];
|
2024-03-14 06:52:30 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (batch.length > 0) {
|
|
|
|
await this.scanAssets(job.id, batch, library.ownerId, job.refreshAllFiles ?? false);
|
|
|
|
}
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
await this.repository.update({ id: job.id, refreshedAt: new Date() });
|
|
|
|
|
2024-03-15 14:16:54 +01:00
|
|
|
return JobStatus.SUCCESS;
|
2023-09-20 13:16:33 +02:00
|
|
|
}
|
|
|
|
|
2024-03-14 06:52:30 +01:00
|
|
|
private async getPathTrie(library: LibraryEntity): Promise<Trie<string>> {
|
|
|
|
const pathValidation = await Promise.all(
|
|
|
|
library.importPaths.map(async (importPath) => await this.validateImportPath(importPath)),
|
|
|
|
);
|
|
|
|
|
|
|
|
const validImportPaths = pathValidation
|
|
|
|
.map((validation) => {
|
|
|
|
if (!validation.isValid) {
|
|
|
|
this.logger.error(`Skipping invalid import path: ${validation.importPath}. Reason: ${validation.message}`);
|
|
|
|
}
|
|
|
|
return validation;
|
|
|
|
})
|
|
|
|
.filter((validation) => validation.isValid)
|
|
|
|
.map((validation) => validation.importPath);
|
|
|
|
|
|
|
|
const generator = this.storageRepository.walk({
|
|
|
|
pathsToCrawl: validImportPaths,
|
|
|
|
exclusionPatterns: library.exclusionPatterns,
|
|
|
|
});
|
|
|
|
|
|
|
|
const trie = new Trie<string>();
|
|
|
|
for await (const filePath of generator) {
|
|
|
|
trie.add(filePath);
|
|
|
|
}
|
|
|
|
|
|
|
|
return trie;
|
|
|
|
}
|
|
|
|
|
2023-09-20 13:16:33 +02:00
|
|
|
private async findOrFail(id: string) {
|
|
|
|
const library = await this.repository.get(id);
|
|
|
|
if (!library) {
|
|
|
|
throw new BadRequestException('Library not found');
|
|
|
|
}
|
|
|
|
return library;
|
|
|
|
}
|
|
|
|
}
|