mirror of
https://github.com/immich-app/immich.git
synced 2024-12-29 15:11:58 +00:00
Add readme for top level folder, clean up dead code
This commit is contained in:
parent
85b83f9666
commit
613b4449a7
7 changed files with 90 additions and 60 deletions
33
README.md
Normal file
33
README.md
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
# IMMICH
|
||||||
|
|
||||||
|
Self-hosted Photo backup solution directly from your mobile phone.
|
||||||
|
|
||||||
|
# Development
|
||||||
|
|
||||||
|
You can use docker compose for development, there are several services that compose Immich
|
||||||
|
|
||||||
|
1. The server
|
||||||
|
2. PostgreSQL
|
||||||
|
3. Redis
|
||||||
|
|
||||||
|
## Populate .env file
|
||||||
|
|
||||||
|
Navigate to `server` directory and run
|
||||||
|
|
||||||
|
```
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
|
||||||
|
Then populate the value in there.
|
||||||
|
|
||||||
|
To start, run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up ./server
|
||||||
|
```
|
||||||
|
|
||||||
|
To force rebuild node modules after installing new packages
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up --build -V ./server
|
||||||
|
```
|
|
@ -29,6 +29,12 @@ COPY . .
|
||||||
|
|
||||||
RUN yarn build
|
RUN yarn build
|
||||||
|
|
||||||
|
# Clean up commands
|
||||||
|
RUN apt-get autoremove -y && apt-get clean && \
|
||||||
|
rm -rf /usr/local/src/*
|
||||||
|
|
||||||
|
RUN apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
FROM ubuntu:20.04 as production
|
FROM ubuntu:20.04 as production
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
@ -62,4 +68,11 @@ COPY . .
|
||||||
|
|
||||||
COPY --from=development /usr/src/app/dist ./dist
|
COPY --from=development /usr/src/app/dist ./dist
|
||||||
|
|
||||||
|
# Clean up commands
|
||||||
|
RUN apt-get autoremove -y && apt-get clean && \
|
||||||
|
rm -rf /usr/local/src/*
|
||||||
|
|
||||||
|
RUN apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
CMD ["node", "dist/main"]
|
CMD ["node", "dist/main"]
|
|
@ -10,16 +10,4 @@ There is a tensorflow module running in the server so some package will be neede
|
||||||
$ apt-get install make cmake gcc g++
|
$ apt-get install make cmake gcc g++
|
||||||
```
|
```
|
||||||
|
|
||||||
# Docker
|
|
||||||
|
|
||||||
To run application using docker compose
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose up
|
|
||||||
```
|
|
||||||
|
|
||||||
To force rebuild node module after installing new packages
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose up --build -V
|
|
||||||
```
|
|
||||||
|
|
|
@ -6,14 +6,10 @@ export const immichAppConfig: ConfigModuleOptions = {
|
||||||
isGlobal: true,
|
isGlobal: true,
|
||||||
validationSchema: Joi.object({
|
validationSchema: Joi.object({
|
||||||
NODE_ENV: Joi.string().required().valid('development', 'production', 'staging').default('development'),
|
NODE_ENV: Joi.string().required().valid('development', 'production', 'staging').default('development'),
|
||||||
// DB_HOST: Joi.string().required(),
|
|
||||||
DB_USERNAME: Joi.string().required(),
|
DB_USERNAME: Joi.string().required(),
|
||||||
DB_PASSWORD: Joi.string().required(),
|
DB_PASSWORD: Joi.string().required(),
|
||||||
DB_DATABASE_NAME: Joi.string().required(),
|
DB_DATABASE_NAME: Joi.string().required(),
|
||||||
UPLOAD_LOCATION: Joi.string().required(),
|
UPLOAD_LOCATION: Joi.string().required(),
|
||||||
JWT_SECRET: Joi.string().required(),
|
JWT_SECRET: Joi.string().required(),
|
||||||
// REDIS_HOST: Joi.string().required(),
|
|
||||||
// REDIS_PORT: Joi.string().required(),
|
|
||||||
// REDIS_PASSWORD: Joi.string().required(),
|
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
|
@ -7,7 +7,7 @@ import { AssetService } from '../../api-v1/asset/asset.service';
|
||||||
import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
|
import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
|
||||||
import { ImageOptimizeProcessor } from './image-optimize.processor';
|
import { ImageOptimizeProcessor } from './image-optimize.processor';
|
||||||
import { ImageOptimizeService } from './image-optimize.service';
|
import { ImageOptimizeService } from './image-optimize.service';
|
||||||
// import { MachineLearningProcessor } from './machine-learning.processor';
|
import { MachineLearningProcessor } from './machine-learning.processor';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
|
@ -30,7 +30,7 @@ import { ImageOptimizeService } from './image-optimize.service';
|
||||||
|
|
||||||
TypeOrmModule.forFeature([AssetEntity]),
|
TypeOrmModule.forFeature([AssetEntity]),
|
||||||
],
|
],
|
||||||
providers: [ImageOptimizeService, ImageOptimizeProcessor],
|
providers: [ImageOptimizeService, ImageOptimizeProcessor, MachineLearningProcessor],
|
||||||
exports: [ImageOptimizeService],
|
exports: [ImageOptimizeService],
|
||||||
})
|
})
|
||||||
export class ImageOptimizeModule {}
|
export class ImageOptimizeModule {}
|
||||||
|
|
|
@ -45,13 +45,14 @@ export class ImageOptimizeProcessor {
|
||||||
|
|
||||||
await this.assetRepository.update(savedAsset, { resizePath: resizePath });
|
await this.assetRepository.update(savedAsset, { resizePath: resizePath });
|
||||||
|
|
||||||
const jobb = await this.machineLearningQueue.add(
|
// Send file to object detection after resizing
|
||||||
'object-detection',
|
// const detectionJob = await this.machineLearningQueue.add(
|
||||||
{
|
// 'object-detection',
|
||||||
resizePath,
|
// {
|
||||||
},
|
// resizePath,
|
||||||
{ jobId: randomUUID() },
|
// },
|
||||||
);
|
// { jobId: randomUUID() },
|
||||||
|
// );
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -1,39 +1,38 @@
|
||||||
// import { Process, Processor } from '@nestjs/bull';
|
import { Process, Processor } from '@nestjs/bull';
|
||||||
// import { InjectRepository } from '@nestjs/typeorm';
|
import { InjectRepository } from '@nestjs/typeorm';
|
||||||
// import { Job } from 'bull';
|
import { Job } from 'bull';
|
||||||
// import { Repository } from 'typeorm';
|
import { Repository } from 'typeorm';
|
||||||
// import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
|
import { AssetEntity } from '../../api-v1/asset/entities/asset.entity';
|
||||||
// import sharp from 'sharp';
|
import fs from 'fs';
|
||||||
// import fs, { existsSync, mkdirSync } from 'fs';
|
import { ConfigService } from '@nestjs/config';
|
||||||
// import { ConfigService } from '@nestjs/config';
|
import * as tfnode from '@tensorflow/tfjs-node';
|
||||||
// import * as tfnode from '@tensorflow/tfjs-node';
|
import * as cocoSsd from '@tensorflow-models/coco-ssd';
|
||||||
// import * as cocoSsd from '@tensorflow-models/coco-ssd';
|
|
||||||
|
|
||||||
// @Processor('machine-learning')
|
@Processor('machine-learning')
|
||||||
// export class MachineLearningProcessor {
|
export class MachineLearningProcessor {
|
||||||
// constructor(
|
constructor(
|
||||||
// @InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
|
@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>,
|
||||||
// private configService: ConfigService,
|
private configService: ConfigService,
|
||||||
// ) {}
|
) {}
|
||||||
|
|
||||||
// @Process('object-detection')
|
@Process('object-detection')
|
||||||
// async handleOptimization(job: Job) {
|
async handleOptimization(job: Job) {
|
||||||
// try {
|
try {
|
||||||
// const { resizePath }: { resizePath: string } = job.data;
|
const { resizePath }: { resizePath: string } = job.data;
|
||||||
|
|
||||||
// const image = fs.readFileSync(resizePath);
|
const image = fs.readFileSync(resizePath);
|
||||||
// const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D;
|
const decodedImage = tfnode.node.decodeImage(image, 3) as tfnode.Tensor3D;
|
||||||
// const model = await cocoSsd.load();
|
const model = await cocoSsd.load();
|
||||||
// const predictions = await model.detect(decodedImage);
|
const predictions = await model.detect(decodedImage);
|
||||||
// console.log('start predictions ------------------ ');
|
console.log('\n\nstart predictions ------------------ ');
|
||||||
// for (var result of predictions) {
|
for (var result of predictions) {
|
||||||
// console.log(`Found ${result.class} with score ${result.score}`);
|
console.log(`Found ${result.class} with score ${result.score}`);
|
||||||
// }
|
}
|
||||||
// console.log('end predictions ------------------ ');
|
console.log('end predictions ------------------\n\n');
|
||||||
|
|
||||||
// return 'ok';
|
return 'ok';
|
||||||
// } catch (e) {
|
} catch (e) {
|
||||||
// console.log('Error object detection ', e);
|
console.log('Error object detection ', e);
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
|
|
Loading…
Reference in a new issue