1
0
Fork 0
mirror of https://github.com/immich-app/immich.git synced 2025-01-16 00:36:47 +01:00

refactor: cli (#8199)

* refactor(cli): upload asset

* chore: e2e tests
This commit is contained in:
Jason Rasmussen 2024-03-22 14:38:00 -04:00 committed by GitHub
parent db744f500b
commit 5b7417bf64
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 341 additions and 410 deletions

View file

@ -1,5 +1,7 @@
import {
Action,
AssetBulkUploadCheckResult,
AssetFileUploadResponseDto,
addAssetsToAlbum,
checkBulkUpload,
createAlbum,
@ -8,130 +10,19 @@ import {
getSupportedMediaTypes,
} from '@immich/sdk';
import byteSize from 'byte-size';
import cliProgress from 'cli-progress';
import { chunk, zip } from 'lodash-es';
import { createHash } from 'node:crypto';
import fs, { createReadStream } from 'node:fs';
import { access, constants, lstat, stat, unlink } from 'node:fs/promises';
import { Presets, SingleBar } from 'cli-progress';
import { chunk } from 'lodash-es';
import { Stats, createReadStream } from 'node:fs';
import { stat, unlink } from 'node:fs/promises';
import os from 'node:os';
import path, { basename } from 'node:path';
import { BaseOptions, authenticate, crawl } from 'src/utils';
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
const zipDefined = zip as <T, U>(a: T[], b: U[]) => [T, U][];
const s = (count: number) => (count === 1 ? '' : 's');
enum CheckResponseStatus {
ACCEPT = 'accept',
REJECT = 'reject',
DUPLICATE = 'duplicate',
}
class Asset {
readonly path: string;
id?: string;
deviceAssetId?: string;
fileCreatedAt?: Date;
fileModifiedAt?: Date;
sidecarPath?: string;
fileSize?: number;
albumName?: string;
constructor(path: string) {
this.path = path;
}
async prepare() {
const stats = await stat(this.path);
this.deviceAssetId = `${basename(this.path)}-${stats.size}`.replaceAll(/\s+/g, '');
this.fileCreatedAt = stats.mtime;
this.fileModifiedAt = stats.mtime;
this.fileSize = stats.size;
this.albumName = this.extractAlbumName();
}
async getUploadFormData(): Promise<FormData> {
if (!this.deviceAssetId) {
throw new Error('Device asset id not set');
}
if (!this.fileCreatedAt) {
throw new Error('File created at not set');
}
if (!this.fileModifiedAt) {
throw new Error('File modified at not set');
}
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
const assetPath = path.parse(this.path);
const assetPathWithoutExt = path.join(assetPath.dir, assetPath.name);
const sidecarPathWithoutExt = `${assetPathWithoutExt}.xmp`;
const sideCarPathWithExt = `${this.path}.xmp`;
const [sideCarWithExtExists, sideCarWithoutExtExists] = await Promise.all([
access(sideCarPathWithExt, constants.R_OK)
.then(() => true)
.catch(() => false),
access(sidecarPathWithoutExt, constants.R_OK)
.then(() => true)
.catch(() => false),
]);
let sidecarPath = undefined;
if (sideCarWithExtExists) {
sidecarPath = sideCarPathWithExt;
} else if (sideCarWithoutExtExists) {
sidecarPath = sidecarPathWithoutExt;
}
let sidecarData: Blob | undefined = undefined;
if (sidecarPath) {
try {
sidecarData = new File([await fs.openAsBlob(sidecarPath)], basename(sidecarPath));
} catch {}
}
const data: any = {
assetData: new File([await fs.openAsBlob(this.path)], basename(this.path)),
deviceAssetId: this.deviceAssetId,
deviceId: 'CLI',
fileCreatedAt: this.fileCreatedAt.toISOString(),
fileModifiedAt: this.fileModifiedAt.toISOString(),
isFavorite: String(false),
};
const formData = new FormData();
for (const property in data) {
formData.append(property, data[property]);
}
if (sidecarData) {
formData.append('sidecarData', sidecarData);
}
return formData;
}
async delete(): Promise<void> {
return unlink(this.path);
}
async hash(): Promise<string> {
const sha1 = (filePath: string) => {
const hash = createHash('sha1');
return new Promise<string>((resolve, reject) => {
const rs = createReadStream(filePath);
rs.on('error', reject);
rs.on('data', (chunk) => hash.update(chunk));
rs.on('end', () => resolve(hash.digest('hex')));
});
};
return await sha1(this.path);
}
private extractAlbumName(): string | undefined {
return os.platform() === 'win32' ? this.path.split('\\').at(-2) : this.path.split('/').at(-2);
}
}
// TODO figure out why `id` is missing
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
type Asset = { id: string; filepath: string };
interface UploadOptionsDto {
recursive?: boolean;
@ -145,315 +36,294 @@ interface UploadOptionsDto {
concurrency: number;
}
export const upload = async (paths: string[], baseOptions: BaseOptions, uploadOptions: UploadOptionsDto) => {
await authenticate(baseOptions);
console.log('Crawling for assets...');
const inputFiles: string[] = [];
for (const pathArgument of paths) {
const fileStat = await lstat(pathArgument);
if (fileStat.isFile()) {
inputFiles.push(pathArgument);
}
class UploadFile extends File {
constructor(
private filepath: string,
private _size: number,
) {
super([], basename(filepath));
}
const { image, video } = await getSupportedMediaTypes();
const files = await crawl({
pathsToCrawl: paths,
recursive: uploadOptions.recursive,
exclusionPatterns: uploadOptions.exclusionPatterns,
includeHidden: uploadOptions.includeHidden,
extensions: [...image, ...video],
});
get size() {
return this._size;
}
files.push(...inputFiles);
stream() {
return createReadStream(this.filepath) as any;
}
}
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
await authenticate(baseOptions);
const files = await scan(paths, options);
if (files.length === 0) {
console.log('No assets found, exiting');
console.log('No files found, exiting');
return;
}
return new UploadCommand().run(files, uploadOptions);
const { newFiles, duplicates } = await checkForDuplicates(files, options);
const newAssets = await uploadFiles(newFiles, options);
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(newFiles, options);
};
// TODO refactor this
class UploadCommand {
async run(files: string[], options: UploadOptionsDto): Promise<void> {
const { concurrency, dryRun } = options;
const assetsToCheck = files.map((path) => new Asset(path));
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
const { image, video } = await getSupportedMediaTypes();
const { newAssets, duplicateAssets } = await this.checkAssets(assetsToCheck, concurrency);
console.log('Crawling for assets...');
const files = await crawl({
pathsToCrawl,
recursive: options.recursive,
exclusionPatterns: options.exclusionPatterns,
includeHidden: options.includeHidden,
extensions: [...image, ...video],
});
const totalSizeUploaded = await this.upload(newAssets, options);
const messageStart = dryRun ? 'Would have' : 'Successfully';
if (newAssets.length === 0) {
console.log('All assets were already uploaded, nothing to do.');
} else {
console.log(
`${messageStart} uploaded ${newAssets.length} asset${newAssets.length === 1 ? '' : 's'} (${byteSize(totalSizeUploaded)})`,
return files;
};
const checkForDuplicates = async (files: string[], { concurrency }: UploadOptionsDto) => {
const progressBar = new SingleBar(
{ format: 'Checking files | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
progressBar.start(files.length, 0);
const newFiles: string[] = [];
const duplicates: Asset[] = [];
try {
// TODO refactor into a queue
for (const items of chunk(files, concurrency)) {
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
if (action === Action.Accept) {
newFiles.push(filepath);
} else {
// rejects are always duplicates
duplicates.push({ id: assetId as string, filepath });
}
progressBar.increment();
}
}
} finally {
progressBar.stop();
}
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
return { newFiles, duplicates };
};
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
if (files.length === 0) {
console.log('All assets were already uploaded, nothing to do.');
return [];
}
// Compute total size first
let totalSize = 0;
const statsMap = new Map<string, Stats>();
for (const filepath of files) {
const stats = await stat(filepath);
statsMap.set(filepath, stats);
totalSize += stats.size;
}
if (dryRun) {
console.log(`Would have uploaded ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
return [];
}
const uploadProgress = new SingleBar(
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
Presets.shades_classic,
);
uploadProgress.start(totalSize, 0);
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
let totalSizeUploaded = 0;
const newAssets: Asset[] = [];
try {
for (const items of chunk(files, concurrency)) {
await Promise.all(
items.map(async (filepath) => {
const stats = statsMap.get(filepath) as Stats;
const response = await uploadFile(filepath, stats);
totalSizeUploaded += stats.size ?? 0;
uploadProgress.update(totalSizeUploaded, { value_formatted: byteSize(totalSizeUploaded) });
newAssets.push({ id: response.id, filepath });
return response;
}),
);
}
} finally {
uploadProgress.stop();
}
if (options.album || options.albumName) {
const { createdAlbumCount, updatedAssetCount } = await this.updateAlbums(
[...newAssets, ...duplicateAssets],
options,
console.log(`Successfully uploaded ${newAssets.length} asset${s(newAssets.length)} (${byteSize(totalSizeUploaded)})`);
return newAssets;
};
const uploadFile = async (input: string, stats: Stats): Promise<AssetFileUploadResponseDto> => {
const { baseUrl, headers } = defaults;
const assetPath = path.parse(input);
const noExtension = path.join(assetPath.dir, assetPath.name);
const sidecarsFiles = await Promise.all(
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
[`${noExtension}.xmp`, `${input}.xmp`].map(async (sidecarPath) => {
try {
const stats = await stat(sidecarPath);
return new UploadFile(sidecarPath, stats.size);
} catch {
return false;
}
}),
);
const sidecarData = sidecarsFiles.find((file): file is UploadFile => file !== false);
const formData = new FormData();
formData.append('deviceAssetId', `${basename(input)}-${stats.size}`.replaceAll(/\s+/g, ''));
formData.append('deviceId', 'CLI');
formData.append('fileCreatedAt', stats.mtime.toISOString());
formData.append('fileModifiedAt', stats.mtime.toISOString());
formData.append('fileSize', String(stats.size));
formData.append('isFavorite', 'false');
formData.append('assetData', new UploadFile(input, stats.size));
if (sidecarData) {
formData.append('sidecarData', sidecarData);
}
const response = await fetch(`${baseUrl}/asset/upload`, {
method: 'post',
redirect: 'error',
headers: headers as Record<string, string>,
body: formData,
});
if (response.status !== 200 && response.status !== 201) {
throw new Error(await response.text());
}
return response.json();
};
const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<void> => {
if (!options.delete) {
return;
}
if (options.dryRun) {
console.log(`Would now have deleted assets, but skipped due to dry run`);
return;
}
console.log('Deleting assets that have been uploaded...');
const deletionProgress = new SingleBar(
{ format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
deletionProgress.start(files.length, 0);
try {
for (const assetBatch of chunk(files, options.concurrency)) {
await Promise.all(assetBatch.map((input: string) => unlink(input)));
deletionProgress.update(assetBatch.length);
}
} finally {
deletionProgress.stop();
}
};
const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
if (!options.album && !options.albumName) {
return;
}
const { dryRun, concurrency } = options;
const albums = await getAllAlbums({});
const existingAlbums = new Map(albums.map((album) => [album.albumName, album.id]));
const newAlbums: Set<string> = new Set();
for (const { filepath } of assets) {
const albumName = getAlbumName(filepath, options);
if (albumName && !existingAlbums.has(albumName)) {
newAlbums.add(albumName);
}
}
if (dryRun) {
// TODO print asset counts for new albums
console.log(`Would have created ${newAlbums.size} new album${s(newAlbums.size)}`);
console.log(`Would have updated ${assets.length} asset${s(assets.length)}`);
return;
}
const progressBar = new SingleBar(
{ format: 'Creating albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} albums' },
Presets.shades_classic,
);
progressBar.start(newAlbums.size, 0);
try {
for (const albumNames of chunk([...newAlbums], concurrency)) {
const items = await Promise.all(
albumNames.map((albumName: string) => createAlbum({ createAlbumDto: { albumName } })),
);
console.log(`${messageStart} created ${createdAlbumCount} new album${createdAlbumCount === 1 ? '' : 's'}`);
console.log(`${messageStart} updated ${updatedAssetCount} asset${updatedAssetCount === 1 ? '' : 's'}`);
for (const { id, albumName } of items) {
existingAlbums.set(albumName, id);
}
progressBar.increment(albumNames.length);
}
if (!options.delete) {
return;
}
if (dryRun) {
console.log(`Would now have deleted assets, but skipped due to dry run`);
return;
}
console.log('Deleting assets that have been uploaded...');
await this.deleteAssets(newAssets, options);
} finally {
progressBar.stop();
}
async checkAssets(
assetsToCheck: Asset[],
concurrency: number,
): Promise<{ newAssets: Asset[]; duplicateAssets: Asset[]; rejectedAssets: Asset[] }> {
for (const assets of chunk(assetsToCheck, concurrency)) {
await Promise.all(assets.map((asset: Asset) => asset.prepare()));
console.log(`Successfully created ${newAlbums.size} new album${s(newAlbums.size)}`);
console.log(`Successfully updated ${assets.length} asset${s(assets.length)}`);
const albumToAssets = new Map<string, string[]>();
for (const asset of assets) {
const albumName = getAlbumName(asset.filepath, options);
if (!albumName) {
continue;
}
const checkProgress = new cliProgress.SingleBar(
{ format: 'Checking assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
cliProgress.Presets.shades_classic,
);
checkProgress.start(assetsToCheck.length, 0);
const newAssets = [];
const duplicateAssets = [];
const rejectedAssets = [];
try {
for (const assets of chunk(assetsToCheck, concurrency)) {
const checkedAssets = await this.getStatus(assets);
for (const checked of checkedAssets) {
if (checked.status === CheckResponseStatus.ACCEPT) {
newAssets.push(checked.asset);
} else if (checked.status === CheckResponseStatus.DUPLICATE) {
duplicateAssets.push(checked.asset);
} else {
rejectedAssets.push(checked.asset);
}
checkProgress.increment();
}
const albumId = existingAlbums.get(albumName);
if (albumId) {
if (!albumToAssets.has(albumId)) {
albumToAssets.set(albumId, []);
}
} finally {
checkProgress.stop();
}
return { newAssets, duplicateAssets, rejectedAssets };
}
async upload(assetsToUpload: Asset[], { dryRun, concurrency }: UploadOptionsDto): Promise<number> {
let totalSize = 0;
// Compute total size first
for (const asset of assetsToUpload) {
totalSize += asset.fileSize ?? 0;
}
if (dryRun) {
return totalSize;
}
const uploadProgress = new cliProgress.SingleBar(
{
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
},
cliProgress.Presets.shades_classic,
);
uploadProgress.start(totalSize, 0);
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
let totalSizeUploaded = 0;
try {
for (const assets of chunk(assetsToUpload, concurrency)) {
const ids = await this.uploadAssets(assets);
for (const [asset, id] of zipDefined(assets, ids)) {
asset.id = id;
if (asset.fileSize) {
totalSizeUploaded += asset.fileSize ?? 0;
} else {
console.log(`Could not determine file size for ${asset.path}`);
}
}
uploadProgress.update(totalSizeUploaded, { value_formatted: byteSize(totalSizeUploaded) });
}
} finally {
uploadProgress.stop();
}
return totalSizeUploaded;
}
async updateAlbums(
assets: Asset[],
options: UploadOptionsDto,
): Promise<{ createdAlbumCount: number; updatedAssetCount: number }> {
const { dryRun, concurrency } = options;
if (options.albumName) {
for (const asset of assets) {
asset.albumName = options.albumName;
}
}
const albums = await getAllAlbums({});
const existingAlbums = new Map(albums.map((album) => [album.albumName, album.id]));
const assetsToUpdate = assets.filter(
(asset): asset is Asset & { albumName: string; id: string } => !!(asset.albumName && asset.id),
);
const newAlbumsSet: Set<string> = new Set();
for (const asset of assetsToUpdate) {
if (!existingAlbums.has(asset.albumName)) {
newAlbumsSet.add(asset.albumName);
}
}
const newAlbums = [...newAlbumsSet];
if (dryRun) {
return { createdAlbumCount: newAlbums.length, updatedAssetCount: assetsToUpdate.length };
}
const albumCreationProgress = new cliProgress.SingleBar(
{
format: 'Creating albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} albums',
},
cliProgress.Presets.shades_classic,
);
albumCreationProgress.start(newAlbums.length, 0);
try {
for (const albumNames of chunk(newAlbums, concurrency)) {
const newAlbumIds = await Promise.all(
albumNames.map((albumName: string) => createAlbum({ createAlbumDto: { albumName } }).then((r) => r.id)),
);
for (const [albumName, albumId] of zipDefined(albumNames, newAlbumIds)) {
existingAlbums.set(albumName, albumId);
}
albumCreationProgress.increment(albumNames.length);
}
} finally {
albumCreationProgress.stop();
}
const albumToAssets = new Map<string, string[]>();
for (const asset of assetsToUpdate) {
const albumId = existingAlbums.get(asset.albumName);
if (albumId) {
if (!albumToAssets.has(albumId)) {
albumToAssets.set(albumId, []);
}
albumToAssets.get(albumId)?.push(asset.id);
}
}
const albumUpdateProgress = new cliProgress.SingleBar(
{
format: 'Adding assets to albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets',
},
cliProgress.Presets.shades_classic,
);
albumUpdateProgress.start(assetsToUpdate.length, 0);
try {
for (const [albumId, assets] of albumToAssets.entries()) {
for (const assetBatch of chunk(assets, Math.min(1000 * concurrency, 65_000))) {
await addAssetsToAlbum({ id: albumId, bulkIdsDto: { ids: assetBatch } });
albumUpdateProgress.increment(assetBatch.length);
}
}
} finally {
albumUpdateProgress.stop();
}
return { createdAlbumCount: newAlbums.length, updatedAssetCount: assetsToUpdate.length };
}
async deleteAssets(assets: Asset[], options: UploadOptionsDto): Promise<void> {
const deletionProgress = new cliProgress.SingleBar(
{
format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets',
},
cliProgress.Presets.shades_classic,
);
deletionProgress.start(assets.length, 0);
try {
for (const assetBatch of chunk(assets, options.concurrency)) {
await Promise.all(assetBatch.map((asset: Asset) => asset.delete()));
deletionProgress.update(assetBatch.length);
}
} finally {
deletionProgress.stop();
albumToAssets.get(albumId)?.push(asset.id);
}
}
private async getStatus(assets: Asset[]): Promise<{ asset: Asset; status: CheckResponseStatus }[]> {
const checkResponse = await this.checkHashes(assets);
const albumUpdateProgress = new SingleBar(
{ format: 'Adding assets to albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
albumUpdateProgress.start(assets.length, 0);
const responses = [];
for (const [check, asset] of zipDefined(checkResponse, assets)) {
if (check.assetId) {
asset.id = check.assetId;
}
if (check.action === 'accept') {
responses.push({ asset, status: CheckResponseStatus.ACCEPT });
} else if (check.reason === 'duplicate') {
responses.push({ asset, status: CheckResponseStatus.DUPLICATE });
} else {
responses.push({ asset, status: CheckResponseStatus.REJECT });
try {
for (const [albumId, assets] of albumToAssets.entries()) {
for (const assetBatch of chunk(assets, Math.min(1000 * concurrency, 65_000))) {
await addAssetsToAlbum({ id: albumId, bulkIdsDto: { ids: assetBatch } });
albumUpdateProgress.increment(assetBatch.length);
}
}
return responses;
} finally {
albumUpdateProgress.stop();
}
};
private async checkHashes(assetsToCheck: Asset[]): Promise<AssetBulkUploadCheckResult[]> {
const checksums = await Promise.all(assetsToCheck.map((asset) => asset.hash()));
const assetBulkUploadCheckDto = {
assets: zipDefined(assetsToCheck, checksums).map(([asset, checksum]) => ({ id: asset.path, checksum })),
};
const checkResponse = await checkBulkUpload({ assetBulkUploadCheckDto });
return checkResponse.results;
}
private async uploadAssets(assets: Asset[]): Promise<string[]> {
const fileRequests = await Promise.all(assets.map((asset) => asset.getUploadFormData()));
const results = await Promise.all(fileRequests.map((request) => this.uploadAsset(request)));
return results.map((response) => response.id);
}
private async uploadAsset(data: FormData): Promise<{ id: string }> {
const { baseUrl, headers } = defaults;
const response = await fetch(`${baseUrl}/asset/upload`, {
method: 'post',
redirect: 'error',
headers: headers as Record<string, string>,
body: data,
});
if (response.status !== 200 && response.status !== 201) {
throw new Error(await response.text());
}
return response.json();
}
}
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
return options.albumName ?? folderName;
};

View file

@ -1,5 +1,7 @@
import { defaults, getMyUserInfo, isHttpError } from '@immich/sdk';
import { glob } from 'glob';
import { createHash } from 'node:crypto';
import { createReadStream } from 'node:fs';
import { readFile, stat, writeFile } from 'node:fs/promises';
import { join } from 'node:path';
import yaml from 'yaml';
@ -100,7 +102,7 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPatterns, includeHidden } = options;
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
if (!pathsToCrawl) {
if (pathsToCrawl.length === 0) {
return [];
}
@ -149,3 +151,13 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
return [...crawledFiles, ...globbedFiles].sort();
};
export const sha1 = (filepath: string) => {
const hash = createHash('sha1');
return new Promise<string>((resolve, reject) => {
const rs = createReadStream(filepath);
rs.on('error', reject);
rs.on('data', (chunk) => hash.update(chunk));
rs.on('end', () => resolve(hash.digest('hex')));
});
};

View file

@ -4,7 +4,8 @@ import { beforeAll, describe, expect, it } from 'vitest';
describe(`immich server-info`, () => {
beforeAll(async () => {
await utils.resetDatabase();
await utils.cliLogin();
const admin = await utils.adminSetup();
await utils.cliLogin(admin.accessToken);
});
it('should return the server info', async () => {

View file

@ -1,20 +1,69 @@
import { getAllAlbums, getAllAssets } from '@immich/sdk';
import { LoginResponseDto, getAllAlbums, getAllAssets } from '@immich/sdk';
import { mkdir, readdir, rm, symlink } from 'node:fs/promises';
import { asKeyAuth, immichCli, testAssetDir, utils } from 'src/utils';
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
describe(`immich upload`, () => {
let admin: LoginResponseDto;
let key: string;
beforeAll(async () => {
await utils.resetDatabase();
key = await utils.cliLogin();
admin = await utils.adminSetup();
key = await utils.cliLogin(admin.accessToken);
});
beforeEach(async () => {
await utils.resetDatabase(['assets', 'albums']);
});
describe(`immich upload /path/to/file.jpg`, () => {
it('should upload a single file', async () => {
const { stderr, stdout, exitCode } = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
expect(stderr).toBe('');
expect(stdout.split('\n')).toEqual(
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 asset')]),
);
expect(exitCode).toBe(0);
const assets = await getAllAssets({}, { headers: asKeyAuth(key) });
expect(assets.length).toBe(1);
});
it('should skip a duplicate file', async () => {
const first = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
expect(first.stderr).toBe('');
expect(first.stdout.split('\n')).toEqual(
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 asset')]),
);
expect(first.exitCode).toBe(0);
const assets = await getAllAssets({}, { headers: asKeyAuth(key) });
expect(assets.length).toBe(1);
const second = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
expect(second.stderr).toBe('');
expect(second.stdout.split('\n')).toEqual(
expect.arrayContaining([
expect.stringContaining('Found 0 new files and 1 duplicate'),
expect.stringContaining('All assets were already uploaded, nothing to do'),
]),
);
expect(first.exitCode).toBe(0);
});
it('should skip files that do not exist', async () => {
const { stderr, stdout, exitCode } = await immichCli(['upload', `/path/to/file`]);
expect(stderr).toBe('');
expect(stdout.split('\n')).toEqual(expect.arrayContaining([expect.stringContaining('No files found, exiting')]));
expect(exitCode).toBe(0);
const assets = await getAllAssets({}, { headers: asKeyAuth(key) });
expect(assets.length).toBe(0);
});
});
describe('immich upload --recursive', () => {
it('should upload a folder recursively', async () => {
const { stderr, stdout, exitCode } = await immichCli(['upload', `${testAssetDir}/albums/nature/`, '--recursive']);

View file

@ -404,9 +404,8 @@ export const utils = {
},
]),
cliLogin: async () => {
const admin = await utils.adminSetup();
const key = await utils.createApiKey(admin.accessToken);
cliLogin: async (accessToken: string) => {
const key = await utils.createApiKey(accessToken);
await immichCli(['login-key', app, `${key.secret}`]);
return key.secret;
},