2023-06-25 05:18:09 +02:00
|
|
|
from pathlib import Path
|
2023-06-27 23:01:24 +02:00
|
|
|
from typing import Any
|
2023-06-25 05:18:09 +02:00
|
|
|
|
2023-08-06 04:45:13 +02:00
|
|
|
from huggingface_hub import snapshot_download
|
2023-06-25 05:18:09 +02:00
|
|
|
from PIL.Image import Image
|
|
|
|
from transformers.pipelines import pipeline
|
|
|
|
|
|
|
|
from ..config import settings
|
|
|
|
from ..schemas import ModelType
|
|
|
|
from .base import InferenceModel
|
|
|
|
|
|
|
|
|
|
|
|
class ImageClassifier(InferenceModel):
|
|
|
|
_model_type = ModelType.IMAGE_CLASSIFICATION
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
|
|
|
min_score: float = settings.min_tag_score,
|
2023-06-28 01:21:33 +02:00
|
|
|
cache_dir: Path | str | None = None,
|
|
|
|
**model_kwargs: Any,
|
2023-06-27 23:01:24 +02:00
|
|
|
) -> None:
|
2023-06-25 05:18:09 +02:00
|
|
|
self.min_score = min_score
|
2023-06-27 23:01:24 +02:00
|
|
|
super().__init__(model_name, cache_dir, **model_kwargs)
|
2023-06-25 05:18:09 +02:00
|
|
|
|
2023-08-06 04:45:13 +02:00
|
|
|
def _download(self, **model_kwargs: Any) -> None:
|
|
|
|
snapshot_download(
|
|
|
|
cache_dir=self.cache_dir, repo_id=self.model_name, allow_patterns=["*.bin", "*.json", "*.txt"]
|
|
|
|
)
|
|
|
|
|
|
|
|
def _load(self, **model_kwargs: Any) -> None:
|
2023-06-25 05:18:09 +02:00
|
|
|
self.model = pipeline(
|
|
|
|
self.model_type.value,
|
|
|
|
self.model_name,
|
|
|
|
model_kwargs={"cache_dir": self.cache_dir, **model_kwargs},
|
|
|
|
)
|
|
|
|
|
2023-08-06 04:45:13 +02:00
|
|
|
def _predict(self, image: Image) -> list[str]:
|
2023-06-28 01:21:33 +02:00
|
|
|
predictions: list[dict[str, Any]] = self.model(image) # type: ignore
|
|
|
|
tags = [tag for pred in predictions for tag in pred["label"].split(", ") if pred["score"] >= self.min_score]
|
|
|
|
|
2023-06-25 05:18:09 +02:00
|
|
|
return tags
|