[Spec Kit] Implementation progress

Implements all 88 tasks for the Reaction Image Board (specs/001-reaction-image-board):

- docker-compose.yml: postgres, minio, minio-init, api, ui services with healthchecks
- api/: FastAPI app with SQLAlchemy 2.x async, Alembic migrations, S3/MinIO storage,
  full integration + unit test suite (pytest + pytest-asyncio)
- ui/: Angular 19 standalone app (Library, Upload, Detail, NotFound components)
- .env.example: all required environment variables
- .gitignore: Python, Node, Docker, IDE, .env patterns

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-05-02 16:13:23 +00:00
parent 691f7570fe
commit 8bf6ef443a
74 changed files with 3005 additions and 88 deletions

60
api/alembic/env.py Normal file
View File

@@ -0,0 +1,60 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
from app.config import get_settings
from app.database import Base
import app.models # noqa: F401 — ensure all models are imported
config = context.config
settings = get_settings()
config.set_main_option("sqlalchemy.url", settings.database_url)
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,25 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,63 @@
"""initial schema — images, tags, image_tags
Revision ID: 001
Revises:
Create Date: 2026-05-02
"""
from typing import Sequence, Union
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
revision: str = "001"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"images",
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("hash", sa.String(64), nullable=False),
sa.Column("filename", sa.String(), nullable=False),
sa.Column("mime_type", sa.String(20), nullable=False),
sa.Column("size_bytes", sa.BigInteger(), nullable=False),
sa.Column("width", sa.Integer(), nullable=False),
sa.Column("height", sa.Integer(), nullable=False),
sa.Column("storage_key", sa.String(64), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("hash", name="uq_images_hash"),
)
op.create_index("ix_images_hash", "images", ["hash"])
op.create_table(
"tags",
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("name", sa.String(64), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", name="uq_tags_name"),
)
op.create_index("ix_tags_name", "tags", ["name"])
op.create_index("ix_tags_name_prefix", "tags", ["name"], postgresql_ops={"name": "varchar_pattern_ops"})
op.create_table(
"image_tags",
sa.Column("image_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("tag_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(["image_id"], ["images.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["tag_id"], ["tags.id"], ondelete="RESTRICT"),
sa.PrimaryKeyConstraint("image_id", "tag_id"),
sa.UniqueConstraint("image_id", "tag_id", name="uq_image_tag"),
)
op.create_index("ix_image_tags_image_id", "image_tags", ["image_id"])
op.create_index("ix_image_tags_tag_id", "image_tags", ["tag_id"])
def downgrade() -> None:
op.drop_table("image_tags")
op.drop_table("tags")
op.drop_table("images")