[Spec Kit] Implementation progress
Implements all 88 tasks for the Reaction Image Board (specs/001-reaction-image-board): - docker-compose.yml: postgres, minio, minio-init, api, ui services with healthchecks - api/: FastAPI app with SQLAlchemy 2.x async, Alembic migrations, S3/MinIO storage, full integration + unit test suite (pytest + pytest-asyncio) - ui/: Angular 19 standalone app (Library, Upload, Detail, NotFound components) - .env.example: all required environment variables - .gitignore: Python, Node, Docker, IDE, .env patterns Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
0
api/app/__init__.py
Normal file
0
api/app/__init__.py
Normal file
0
api/app/auth/__init__.py
Normal file
0
api/app/auth/__init__.py
Normal file
8
api/app/auth/noop.py
Normal file
8
api/app/auth/noop.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from app.auth.provider import AuthProvider, Identity
|
||||
|
||||
_ANONYMOUS = Identity(id="anonymous", anonymous=True)
|
||||
|
||||
|
||||
class NoOpAuthProvider(AuthProvider):
|
||||
async def get_identity(self) -> Identity:
|
||||
return _ANONYMOUS
|
||||
14
api/app/auth/provider.py
Normal file
14
api/app/auth/provider.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Identity:
|
||||
id: str
|
||||
anonymous: bool = True
|
||||
|
||||
|
||||
class AuthProvider(ABC):
|
||||
@abstractmethod
|
||||
async def get_identity(self) -> Identity:
|
||||
"""Resolve the request identity."""
|
||||
20
api/app/config.py
Normal file
20
api/app/config.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from functools import lru_cache
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", extra="ignore")
|
||||
|
||||
database_url: str
|
||||
s3_endpoint_url: str
|
||||
s3_bucket_name: str
|
||||
s3_access_key_id: str
|
||||
s3_secret_access_key: str
|
||||
s3_region: str = "us-east-1"
|
||||
api_base_url: str = "http://localhost:8000"
|
||||
max_upload_bytes: int = 52_428_800 # 50 MiB
|
||||
|
||||
|
||||
@lru_cache
|
||||
def get_settings() -> Settings:
|
||||
return Settings()
|
||||
26
api/app/database.py
Normal file
26
api/app/database.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
_engine = None
|
||||
_session_factory = None
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
def get_engine():
|
||||
global _engine
|
||||
if _engine is None:
|
||||
settings = get_settings()
|
||||
_engine = create_async_engine(settings.database_url, echo=False)
|
||||
return _engine
|
||||
|
||||
|
||||
def get_session_factory():
|
||||
global _session_factory
|
||||
if _session_factory is None:
|
||||
_session_factory = async_sessionmaker(get_engine(), expire_on_commit=False)
|
||||
return _session_factory
|
||||
34
api/app/dependencies.py
Normal file
34
api/app/dependencies.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from fastapi import Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.noop import NoOpAuthProvider
|
||||
from app.auth.provider import AuthProvider
|
||||
from app.database import get_session_factory
|
||||
from app.storage.backend import StorageBackend
|
||||
from app.storage.s3_backend import S3StorageBackend
|
||||
|
||||
_storage: StorageBackend | None = None
|
||||
_auth: AuthProvider | None = None
|
||||
|
||||
|
||||
def get_storage() -> StorageBackend:
|
||||
global _storage
|
||||
if _storage is None:
|
||||
_storage = S3StorageBackend()
|
||||
return _storage
|
||||
|
||||
|
||||
def get_auth() -> AuthProvider:
|
||||
global _auth
|
||||
if _auth is None:
|
||||
_auth = NoOpAuthProvider()
|
||||
return _auth
|
||||
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
factory = get_session_factory()
|
||||
async with factory() as session:
|
||||
async with session.begin():
|
||||
yield session
|
||||
33
api/app/main.py
Normal file
33
api/app/main.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from app.config import get_settings
|
||||
from app.database import get_engine, get_session_factory, Base
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(application: FastAPI):
|
||||
settings = get_settings()
|
||||
# Verify DB connection and run migrations on startup
|
||||
engine = get_engine()
|
||||
async with engine.begin() as conn:
|
||||
# In production, Alembic handles migrations; this is a dev convenience
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
yield
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
app = FastAPI(title="Reactbin API", version="1.0.0", lifespan=lifespan)
|
||||
|
||||
|
||||
@app.get("/api/v1/health")
|
||||
async def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
# Routers registered after all modules are defined to avoid circular imports
|
||||
from app.routers import images, tags # noqa: E402
|
||||
|
||||
app.include_router(images.router, prefix="/api/v1")
|
||||
app.include_router(tags.router, prefix="/api/v1")
|
||||
61
api/app/models.py
Normal file
61
api/app/models.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import String, Integer, BigInteger, DateTime, ForeignKey, UniqueConstraint, Index
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
def _utcnow() -> datetime:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
class Image(Base):
|
||||
__tablename__ = "images"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
hash: Mapped[str] = mapped_column(String(64), unique=True, nullable=False, index=True)
|
||||
filename: Mapped[str] = mapped_column(String, nullable=False)
|
||||
mime_type: Mapped[str] = mapped_column(String(20), nullable=False)
|
||||
size_bytes: Mapped[int] = mapped_column(BigInteger, nullable=False)
|
||||
width: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
height: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
storage_key: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, nullable=False)
|
||||
|
||||
image_tags: Mapped[list["ImageTag"]] = relationship(back_populates="image", cascade="all, delete-orphan")
|
||||
|
||||
@property
|
||||
def tags(self) -> list[str]:
|
||||
return [it.tag.name for it in self.image_tags if it.tag]
|
||||
|
||||
|
||||
class Tag(Base):
|
||||
__tablename__ = "tags"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
name: Mapped[str] = mapped_column(String(64), unique=True, nullable=False, index=True)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, nullable=False)
|
||||
|
||||
image_tags: Mapped[list["ImageTag"]] = relationship(back_populates="tag")
|
||||
|
||||
|
||||
class ImageTag(Base):
|
||||
__tablename__ = "image_tags"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("image_id", "tag_id", name="uq_image_tag"),
|
||||
Index("ix_image_tags_image_id", "image_id"),
|
||||
Index("ix_image_tags_tag_id", "tag_id"),
|
||||
)
|
||||
|
||||
image_id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("images.id", ondelete="CASCADE"), primary_key=True
|
||||
)
|
||||
tag_id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("tags.id", ondelete="RESTRICT"), primary_key=True
|
||||
)
|
||||
|
||||
image: Mapped["Image"] = relationship(back_populates="image_tags")
|
||||
tag: Mapped["Tag"] = relationship(back_populates="image_tags")
|
||||
0
api/app/repositories/__init__.py
Normal file
0
api/app/repositories/__init__.py
Normal file
84
api/app/repositories/image_repo.py
Normal file
84
api/app/repositories/image_repo.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.models import Image, ImageTag, Tag
|
||||
|
||||
|
||||
class ImageRepository:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
async def get_by_hash(self, hash_hex: str) -> Optional[Image]:
|
||||
result = await self._session.execute(
|
||||
select(Image).where(Image.hash == hash_hex).options(selectinload(Image.image_tags).selectinload(ImageTag.tag))
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_by_id(self, image_id: uuid.UUID) -> Optional[Image]:
|
||||
result = await self._session.execute(
|
||||
select(Image).where(Image.id == image_id).options(selectinload(Image.image_tags).selectinload(ImageTag.tag))
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create(
|
||||
self,
|
||||
*,
|
||||
hash_hex: str,
|
||||
filename: str,
|
||||
mime_type: str,
|
||||
size_bytes: int,
|
||||
width: int,
|
||||
height: int,
|
||||
storage_key: str,
|
||||
) -> Image:
|
||||
image = Image(
|
||||
hash=hash_hex,
|
||||
filename=filename,
|
||||
mime_type=mime_type,
|
||||
size_bytes=size_bytes,
|
||||
width=width,
|
||||
height=height,
|
||||
storage_key=storage_key,
|
||||
)
|
||||
self._session.add(image)
|
||||
await self._session.flush()
|
||||
await self._session.refresh(image, ["image_tags"])
|
||||
return image
|
||||
|
||||
async def list_images(
|
||||
self,
|
||||
tag_names: list[str] | None = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
) -> tuple[list[Image], int]:
|
||||
from sqlalchemy import func, and_
|
||||
|
||||
base_query = select(Image).options(
|
||||
selectinload(Image.image_tags).selectinload(ImageTag.tag)
|
||||
)
|
||||
|
||||
if tag_names:
|
||||
for tag_name in tag_names:
|
||||
subq = (
|
||||
select(ImageTag.image_id)
|
||||
.join(Tag, ImageTag.tag_id == Tag.id)
|
||||
.where(Tag.name == tag_name)
|
||||
.scalar_subquery()
|
||||
)
|
||||
base_query = base_query.where(Image.id.in_(subq))
|
||||
|
||||
count_query = select(func.count()).select_from(base_query.subquery())
|
||||
total_result = await self._session.execute(count_query)
|
||||
total = total_result.scalar_one()
|
||||
|
||||
paginated = base_query.order_by(Image.created_at.desc()).limit(limit).offset(offset)
|
||||
result = await self._session.execute(paginated)
|
||||
return result.scalars().all(), total
|
||||
|
||||
async def delete(self, image: Image) -> None:
|
||||
await self._session.delete(image)
|
||||
await self._session.flush()
|
||||
102
api/app/repositories/tag_repo.py
Normal file
102
api/app/repositories/tag_repo.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import re
|
||||
import uuid
|
||||
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import Image, ImageTag, Tag
|
||||
|
||||
_TAG_PATTERN = re.compile(r"^[a-z0-9_-]{1,64}$")
|
||||
|
||||
|
||||
class TagRepository:
|
||||
def __init__(self, session: AsyncSession) -> None:
|
||||
self._session = session
|
||||
|
||||
@staticmethod
|
||||
def normalise(name: str) -> str:
|
||||
return name.strip().lower()
|
||||
|
||||
@staticmethod
|
||||
def normalise_and_validate(name: str) -> str:
|
||||
normalised = name.strip().lower()
|
||||
if not _TAG_PATTERN.match(normalised):
|
||||
raise ValueError(
|
||||
f"Invalid tag '{name}': must match ^[a-z0-9_-]{{1,64}}$ after normalisation"
|
||||
)
|
||||
return normalised
|
||||
|
||||
async def upsert_by_name(self, name: str) -> Tag:
|
||||
result = await self._session.execute(select(Tag).where(Tag.name == name))
|
||||
tag = result.scalar_one_or_none()
|
||||
if tag is None:
|
||||
tag = Tag(name=name)
|
||||
self._session.add(tag)
|
||||
await self._session.flush()
|
||||
return tag
|
||||
|
||||
async def get_by_image_id(self, image_id: uuid.UUID) -> list[Tag]:
|
||||
result = await self._session.execute(
|
||||
select(Tag)
|
||||
.join(ImageTag, ImageTag.tag_id == Tag.id)
|
||||
.where(ImageTag.image_id == image_id)
|
||||
.order_by(Tag.name)
|
||||
)
|
||||
return result.scalars().all()
|
||||
|
||||
async def attach_tags(self, image: Image, tag_names: list[str]) -> None:
|
||||
for name in tag_names:
|
||||
tag = await self.upsert_by_name(name)
|
||||
existing = await self._session.execute(
|
||||
select(ImageTag).where(
|
||||
ImageTag.image_id == image.id, ImageTag.tag_id == tag.id
|
||||
)
|
||||
)
|
||||
if existing.scalar_one_or_none() is None:
|
||||
self._session.add(ImageTag(image_id=image.id, tag_id=tag.id))
|
||||
await self._session.flush()
|
||||
|
||||
async def replace_tags_on_image(self, image: Image, tag_names: list[str]) -> None:
|
||||
# Remove all existing associations
|
||||
existing_links = await self._session.execute(
|
||||
select(ImageTag).where(ImageTag.image_id == image.id)
|
||||
)
|
||||
for link in existing_links.scalars().all():
|
||||
await self._session.delete(link)
|
||||
await self._session.flush()
|
||||
|
||||
# Add new associations
|
||||
for name in tag_names:
|
||||
tag = await self.upsert_by_name(name)
|
||||
self._session.add(ImageTag(image_id=image.id, tag_id=tag.id))
|
||||
await self._session.flush()
|
||||
|
||||
async def list_tags(
|
||||
self,
|
||||
prefix: str | None = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
) -> tuple[list[dict], int]:
|
||||
count_subq = (
|
||||
select(func.count(ImageTag.image_id))
|
||||
.where(ImageTag.tag_id == Tag.id)
|
||||
.correlate(Tag)
|
||||
.scalar_subquery()
|
||||
)
|
||||
|
||||
query = select(Tag, count_subq.label("image_count"))
|
||||
if prefix:
|
||||
query = query.where(Tag.name.like(f"{prefix}%"))
|
||||
|
||||
total_query = select(func.count()).select_from(query.subquery())
|
||||
total_result = await self._session.execute(total_query)
|
||||
total = total_result.scalar_one()
|
||||
|
||||
paginated = query.order_by(Tag.name).limit(limit).offset(offset)
|
||||
rows = await self._session.execute(paginated)
|
||||
|
||||
items = [
|
||||
{"id": str(tag.id), "name": tag.name, "image_count": count}
|
||||
for tag, count in rows.all()
|
||||
]
|
||||
return items, total
|
||||
0
api/app/routers/__init__.py
Normal file
0
api/app/routers/__init__.py
Normal file
271
api/app/routers/images.py
Normal file
271
api/app/routers/images.py
Normal file
@@ -0,0 +1,271 @@
|
||||
import io
|
||||
import struct
|
||||
import uuid
|
||||
import zlib
|
||||
from typing import Annotated, Any
|
||||
|
||||
from fastapi import APIRouter, Depends, File, Form, HTTPException, Response, UploadFile
|
||||
from fastapi.responses import RedirectResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.auth.provider import AuthProvider
|
||||
from app.config import get_settings
|
||||
from app.dependencies import get_auth, get_db, get_storage
|
||||
from app.models import Image
|
||||
from app.repositories.image_repo import ImageRepository
|
||||
from app.repositories.tag_repo import TagRepository
|
||||
from app.storage.backend import StorageBackend
|
||||
from app.utils import compute_sha256
|
||||
from app.validation import FileSizeError, MimeTypeError, validate_file_size, validate_mime_type
|
||||
|
||||
router = APIRouter(tags=["images"])
|
||||
|
||||
|
||||
def _error(detail: str, code: str, status: int):
|
||||
raise HTTPException(status_code=status, detail={"detail": detail, "code": code})
|
||||
|
||||
|
||||
def _image_to_dict(image: Image, *, duplicate: bool | None = None) -> dict[str, Any]:
|
||||
data: dict[str, Any] = {
|
||||
"id": str(image.id),
|
||||
"hash": image.hash,
|
||||
"filename": image.filename,
|
||||
"mime_type": image.mime_type,
|
||||
"size_bytes": image.size_bytes,
|
||||
"width": image.width,
|
||||
"height": image.height,
|
||||
"storage_key": image.storage_key,
|
||||
"created_at": image.created_at.isoformat(),
|
||||
"tags": image.tags,
|
||||
}
|
||||
if duplicate is not None:
|
||||
data["duplicate"] = duplicate
|
||||
return data
|
||||
|
||||
|
||||
def _read_image_dimensions(data: bytes, mime_type: str) -> tuple[int, int]:
|
||||
"""Return (width, height) from raw image bytes. Falls back to (0, 0)."""
|
||||
try:
|
||||
if mime_type == "image/jpeg":
|
||||
return _jpeg_dimensions(data)
|
||||
elif mime_type == "image/png":
|
||||
return _png_dimensions(data)
|
||||
elif mime_type == "image/gif":
|
||||
return _gif_dimensions(data)
|
||||
elif mime_type == "image/webp":
|
||||
return _webp_dimensions(data)
|
||||
except Exception:
|
||||
pass
|
||||
return 0, 0
|
||||
|
||||
|
||||
def _jpeg_dimensions(data: bytes) -> tuple[int, int]:
|
||||
i = 0
|
||||
while i < len(data):
|
||||
if data[i] != 0xFF:
|
||||
break
|
||||
i += 1
|
||||
marker = data[i]
|
||||
i += 1
|
||||
if marker in (0xD8, 0xD9):
|
||||
continue
|
||||
length = struct.unpack(">H", data[i : i + 2])[0]
|
||||
if marker in (0xC0, 0xC1, 0xC2):
|
||||
h, w = struct.unpack(">HH", data[i + 3 : i + 7])
|
||||
return w, h
|
||||
i += length
|
||||
return 0, 0
|
||||
|
||||
|
||||
def _png_dimensions(data: bytes) -> tuple[int, int]:
|
||||
w, h = struct.unpack(">II", data[16:24])
|
||||
return w, h
|
||||
|
||||
|
||||
def _gif_dimensions(data: bytes) -> tuple[int, int]:
|
||||
w, h = struct.unpack("<HH", data[6:10])
|
||||
return w, h
|
||||
|
||||
|
||||
def _webp_dimensions(data: bytes) -> tuple[int, int]:
|
||||
if data[8:12] == b"VP8 ":
|
||||
w = struct.unpack("<H", data[26:28])[0] & 0x3FFF
|
||||
h = struct.unpack("<H", data[28:30])[0] & 0x3FFF
|
||||
return w, h
|
||||
elif data[8:12] == b"VP8L":
|
||||
bits = struct.unpack("<I", data[21:25])[0]
|
||||
w = (bits & 0x3FFF) + 1
|
||||
h = ((bits >> 14) & 0x3FFF) + 1
|
||||
return w, h
|
||||
return 0, 0
|
||||
|
||||
|
||||
@router.post("/images", status_code=201)
|
||||
async def upload_image(
|
||||
file: UploadFile = File(...),
|
||||
tags: str | None = Form(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageBackend = Depends(get_storage),
|
||||
auth: AuthProvider = Depends(get_auth),
|
||||
settings=Depends(get_settings),
|
||||
):
|
||||
data = await file.read()
|
||||
mime_type = file.content_type or "application/octet-stream"
|
||||
|
||||
try:
|
||||
validate_mime_type(mime_type)
|
||||
except MimeTypeError:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail={"detail": f"Unsupported file type: {mime_type}", "code": "invalid_mime_type"},
|
||||
)
|
||||
|
||||
try:
|
||||
validate_file_size(len(data), max_bytes=settings.max_upload_bytes)
|
||||
except FileSizeError as exc:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail={"detail": str(exc), "code": "file_too_large"},
|
||||
)
|
||||
|
||||
hash_hex = compute_sha256(data)
|
||||
image_repo = ImageRepository(db)
|
||||
existing = await image_repo.get_by_hash(hash_hex)
|
||||
if existing:
|
||||
return Response(
|
||||
content=__import__("json").dumps(_image_to_dict(existing, duplicate=True)),
|
||||
status_code=200,
|
||||
media_type="application/json",
|
||||
)
|
||||
|
||||
# Parse tag names
|
||||
tag_names: list[str] = []
|
||||
if tags:
|
||||
tag_repo = TagRepository(db)
|
||||
raw = [t.strip() for t in tags.replace(",", " ").split() if t.strip()]
|
||||
try:
|
||||
tag_names = [tag_repo.normalise_and_validate(t) for t in raw]
|
||||
except ValueError as exc:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail={"detail": str(exc), "code": "invalid_tag"},
|
||||
)
|
||||
|
||||
width, height = _read_image_dimensions(data, mime_type)
|
||||
await storage.put(hash_hex, data, mime_type)
|
||||
|
||||
image = await image_repo.create(
|
||||
hash_hex=hash_hex,
|
||||
filename=file.filename or "upload",
|
||||
mime_type=mime_type,
|
||||
size_bytes=len(data),
|
||||
width=width,
|
||||
height=height,
|
||||
storage_key=hash_hex,
|
||||
)
|
||||
|
||||
if tag_names:
|
||||
tag_repo = TagRepository(db)
|
||||
await tag_repo.attach_tags(image, tag_names)
|
||||
await db.refresh(image, ["image_tags"])
|
||||
|
||||
return _image_to_dict(image, duplicate=False)
|
||||
|
||||
|
||||
@router.get("/images")
|
||||
async def list_images(
|
||||
tags: str | None = None,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
limit = min(limit, 100)
|
||||
tag_names = [t.strip() for t in tags.split(",") if t.strip()] if tags else None
|
||||
image_repo = ImageRepository(db)
|
||||
images, total = await image_repo.list_images(tag_names=tag_names, limit=limit, offset=offset)
|
||||
return {
|
||||
"items": [_image_to_dict(img) for img in images],
|
||||
"total": total,
|
||||
"limit": limit,
|
||||
"offset": offset,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/images/{image_id}")
|
||||
async def get_image(
|
||||
image_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
image_repo = ImageRepository(db)
|
||||
image = await image_repo.get_by_id(image_id)
|
||||
if not image:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail={"detail": "Image not found", "code": "image_not_found"},
|
||||
)
|
||||
return _image_to_dict(image)
|
||||
|
||||
|
||||
@router.get("/images/{image_id}/file")
|
||||
async def serve_image_file(
|
||||
image_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageBackend = Depends(get_storage),
|
||||
):
|
||||
image_repo = ImageRepository(db)
|
||||
image = await image_repo.get_by_id(image_id)
|
||||
if not image:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail={"detail": "Image not found", "code": "image_not_found"},
|
||||
)
|
||||
url = await storage.get_presigned_url(image.storage_key, expires_in_seconds=3600)
|
||||
return RedirectResponse(url=url, status_code=302)
|
||||
|
||||
|
||||
@router.patch("/images/{image_id}/tags")
|
||||
async def update_image_tags(
|
||||
image_id: uuid.UUID,
|
||||
body: dict,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
image_repo = ImageRepository(db)
|
||||
image = await image_repo.get_by_id(image_id)
|
||||
if not image:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail={"detail": "Image not found", "code": "image_not_found"},
|
||||
)
|
||||
|
||||
raw_tags: list[str] = body.get("tags", [])
|
||||
tag_repo = TagRepository(db)
|
||||
try:
|
||||
tag_names = [tag_repo.normalise_and_validate(t) for t in raw_tags]
|
||||
except ValueError as exc:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail={"detail": str(exc), "code": "invalid_tag"},
|
||||
)
|
||||
|
||||
await tag_repo.replace_tags_on_image(image, tag_names)
|
||||
await db.refresh(image, ["image_tags"])
|
||||
return _image_to_dict(image)
|
||||
|
||||
|
||||
@router.delete("/images/{image_id}", status_code=204)
|
||||
async def delete_image(
|
||||
image_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
storage: StorageBackend = Depends(get_storage),
|
||||
):
|
||||
image_repo = ImageRepository(db)
|
||||
image = await image_repo.get_by_id(image_id)
|
||||
if not image:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail={"detail": "Image not found", "code": "image_not_found"},
|
||||
)
|
||||
storage_key = image.storage_key
|
||||
await image_repo.delete(image)
|
||||
await storage.delete(storage_key)
|
||||
return Response(status_code=204)
|
||||
20
api/app/routers/tags.py
Normal file
20
api/app/routers/tags.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db
|
||||
from app.repositories.tag_repo import TagRepository
|
||||
|
||||
router = APIRouter(tags=["tags"])
|
||||
|
||||
|
||||
@router.get("/tags")
|
||||
async def list_tags(
|
||||
q: str | None = None,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
limit = min(limit, 200)
|
||||
tag_repo = TagRepository(db)
|
||||
items, total = await tag_repo.list_tags(prefix=q, limit=limit, offset=offset)
|
||||
return {"items": items, "total": total, "limit": limit, "offset": offset}
|
||||
0
api/app/storage/__init__.py
Normal file
0
api/app/storage/__init__.py
Normal file
15
api/app/storage/backend.py
Normal file
15
api/app/storage/backend.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class StorageBackend(ABC):
|
||||
@abstractmethod
|
||||
async def put(self, key: str, data: bytes, content_type: str) -> None:
|
||||
"""Store object at key with given content type."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_presigned_url(self, key: str, expires_in_seconds: int = 3600) -> str:
|
||||
"""Return a pre-signed URL valid for expires_in_seconds."""
|
||||
|
||||
@abstractmethod
|
||||
async def delete(self, key: str) -> None:
|
||||
"""Delete object at key."""
|
||||
46
api/app/storage/s3_backend.py
Normal file
46
api/app/storage/s3_backend.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import aiobotocore.session
|
||||
|
||||
from app.config import get_settings
|
||||
from app.storage.backend import StorageBackend
|
||||
|
||||
|
||||
class S3StorageBackend(StorageBackend):
|
||||
def __init__(self) -> None:
|
||||
self._settings = get_settings()
|
||||
self._session = aiobotocore.session.get_session()
|
||||
|
||||
@asynccontextmanager
|
||||
async def _client(self):
|
||||
s = self._settings
|
||||
async with self._session.create_client(
|
||||
"s3",
|
||||
region_name=s.s3_region,
|
||||
endpoint_url=s.s3_endpoint_url or None,
|
||||
aws_access_key_id=s.s3_access_key_id,
|
||||
aws_secret_access_key=s.s3_secret_access_key,
|
||||
) as client:
|
||||
yield client
|
||||
|
||||
async def put(self, key: str, data: bytes, content_type: str) -> None:
|
||||
async with self._client() as client:
|
||||
await client.put_object(
|
||||
Bucket=self._settings.s3_bucket_name,
|
||||
Key=key,
|
||||
Body=data,
|
||||
ContentType=content_type,
|
||||
)
|
||||
|
||||
async def get_presigned_url(self, key: str, expires_in_seconds: int = 3600) -> str:
|
||||
async with self._client() as client:
|
||||
url = await client.generate_presigned_url(
|
||||
"get_object",
|
||||
Params={"Bucket": self._settings.s3_bucket_name, "Key": key},
|
||||
ExpiresIn=expires_in_seconds,
|
||||
)
|
||||
return url
|
||||
|
||||
async def delete(self, key: str) -> None:
|
||||
async with self._client() as client:
|
||||
await client.delete_object(Bucket=self._settings.s3_bucket_name, Key=key)
|
||||
5
api/app/utils.py
Normal file
5
api/app/utils.py
Normal file
@@ -0,0 +1,5 @@
|
||||
import hashlib
|
||||
|
||||
|
||||
def compute_sha256(data: bytes) -> str:
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
21
api/app/validation.py
Normal file
21
api/app/validation.py
Normal file
@@ -0,0 +1,21 @@
|
||||
ACCEPTED_MIME_TYPES = frozenset(["image/jpeg", "image/png", "image/gif", "image/webp"])
|
||||
|
||||
|
||||
class MimeTypeError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class FileSizeError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def validate_mime_type(mime_type: str) -> None:
|
||||
if mime_type not in ACCEPTED_MIME_TYPES:
|
||||
raise MimeTypeError(f"Unsupported MIME type: {mime_type}")
|
||||
|
||||
|
||||
def validate_file_size(size_bytes: int, max_bytes: int) -> None:
|
||||
if size_bytes <= 0:
|
||||
raise FileSizeError("File must not be empty")
|
||||
if size_bytes > max_bytes:
|
||||
raise FileSizeError(f"File size {size_bytes} exceeds limit of {max_bytes} bytes")
|
||||
Reference in New Issue
Block a user