refactor: restructure settings into composed subgroups

This commit is contained in:
agatha 2026-03-14 14:36:27 -04:00
parent 1a6544a7cb
commit 81acd986c2
5 changed files with 157 additions and 23 deletions

View File

@ -1,4 +1,29 @@
DATABASE_URL=postgresql+asyncpg://proxypool:proxypool@localhost:5432/proxypool
REDIS_URL=redis://localhost:6379/0
SECRET_KEY=change-me-to-something-random-in-production
# Top-level
SECRET_KEY=change-me-to-something-random
LOG_LEVEL=DEBUG
# Database
DB_URL=postgresql+asyncpg://proxypool:proxypool@localhost:5432/proxypool
DB_POOL_SIZE=10
DB_ECHO=false
# Redis
REDIS_URL=redis://localhost:6379/0
# Proxy pipeline
PROXY_JUDGE_URL=http://httpbin.org/ip
PROXY_REVALIDATE_ACTIVE_MINUTES=10
# Accounts
ACCOUNT_DEFAULT_CREDITS=100
# Notifications (optional — leave empty to disable)
NOTIFY_SMTP_HOST=
NOTIFY_SMTP_PORT=587
NOTIFY_SMTP_USER=
NOTIFY_SMTP_PASSWORD=
NOTIFY_ALERT_EMAIL=
NOTIFY_WEBHOOK_URL=
# Cleanup
CLEANUP_PRUNE_DEAD_AFTER_DAYS=30

View File

@ -71,7 +71,7 @@ async def run_async_migrations() -> None:
"""
settings = get_settings()
connectable = create_async_engine(settings.database_url)
connectable = create_async_engine(settings.db.url)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)

View File

@ -1,27 +1,136 @@
from functools import lru_cache
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class DatabaseSettings(BaseSettings):
model_config = SettingsConfigDict(env_prefix="DB_")
url: str = Field(
description="PostgreSQL connection string with asyncpg driver",
)
pool_size: int = Field(
default=10,
description="Number of persistent connections in the pool",
)
max_overflow: int = Field(
default=10,
description="Max temporary connections above pool_size",
)
echo: bool = Field(
default=False,
description="Log all SQL statements for debugging purposes",
)
class RedisSettings(BaseSettings):
model_config = SettingsConfigDict(env_prefix="REDIS_")
url: str = Field(
default="redis://localhost:6379",
description="Redis connection string",
)
key_prefix: str = Field(
default="pp:",
description="Prefix for all Redis keys to avoid collisions",
)
class ProxyPipelineSettings(BaseSettings):
model_config = SettingsConfigDict(env_prefix="PROXY_")
scrape_timeout_seconds: float = Field(
default=30.0,
description="HTTP timeout when fetching proxy sources",
)
scrape_user_agent: str = Field(
default=(
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/144.0.0.0 Safari/537.36"
),
description="User-Agent string for scrape requests",
)
judge_url: str = Field(
default="http://httpbin.org/ip",
description="URL for determining proxy exit IP",
)
check_tcp_timeout: float = Field(default=5.0)
check_http_timeout: float = Field(default=10.0)
check_pipeline_timeout: float = Field(default=120.0)
revalidate_active_minutes: int = Field(
default=10,
description="Re-check active proxies every N minutes",
)
revalidate_dead_hours: int = Field(
default=6,
description="Re-check dead proxies every N hours",
)
revalidate_batch_size: int = Field(default=200)
pool_low_threshold: int = Field(
default=100,
description="Emit pool_low event when active count drops below this threshold",
)
class AccountSettings(BaseSettings):
model_config = SettingsConfigDict(env_prefix="ACCOUNT_")
default_credits: int = Field(
default=100,
description="Default credits per new account",
)
max_lease_duration_seconds: int = Field(default=3600)
credit_low_threshold: int = Field(
default=10,
description="Emit low_balance event below this threshold",
)
api_key_prefix: str = Field(
default="pp_",
description="API key prefix",
)
class NotificationSettings(BaseSettings):
model_config = SettingsConfigDict(env_prefix="NOTIFY_")
smtp_host: str | None = Field(default=None)
smtp_port: int = Field(default=587)
smtp_user: str | None = Field(default=None)
smtp_password: str | None = Field(default=None)
alert_email: str | None = Field(default=None)
webhook_url: str | None = Field(default=None)
class CleanupSettings(BaseSettings):
model_config = SettingsConfigDict(env_prefix="CLEANUP_")
prune_dead_after_days: int = Field(default=30)
prune_checks_after_days: int = Field(default=7)
prune_checks_keep_last: int = Field(default=100)
class Settings(BaseSettings):
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False,
extra="ignore",
)
# Infrastructure
database_url: str
redis_url: str
secret_key: str
# Top-level settings that don't belong to a group
app_name: str = Field(default="proxy-pool")
log_level: str = Field(default="INFO")
secret_key: str = Field(description="Used for internal signing")
cors_origins: list[str] = Field(default_factory=list)
# Application
app_name: str = "proxy-pool"
log_level: str = "INFO"
# Database pool
db_pool_size: int = 10
db_max_overflow: int = 10
# Composed settings groups
db: DatabaseSettings = Field(default_factory=DatabaseSettings)
redis: RedisSettings = Field(default_factory=RedisSettings)
proxy: ProxyPipelineSettings = Field(default_factory=ProxyPipelineSettings)
account: AccountSettings = Field(default_factory=AccountSettings)
notification: NotificationSettings = Field(default_factory=NotificationSettings)
cleanup: CleanupSettings = Field(default_factory=CleanupSettings)
@lru_cache

View File

@ -5,10 +5,10 @@ from proxy_pool.config import Settings
def create_engine(settings: Settings):
return create_async_engine(
settings.database_url,
pool_size=settings.db_pool_size,
max_overflow=settings.db_max_overflow,
echo=settings.log_level == "DEBUG",
settings.db.url,
pool_size=settings.db.pool_size,
max_overflow=settings.db.max_overflow,
echo=settings.db.echo,
)

View File

@ -4,7 +4,7 @@ from collections.abc import AsyncGenerator
import pytest
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from proxy_pool.config import Settings
from proxy_pool.config import Settings, DatabaseSettings, RedisSettings
from proxy_pool.db.base import Base
@ -18,16 +18,16 @@ def event_loop():
@pytest.fixture(scope="session")
def test_settings() -> Settings:
return Settings(
database_url="postgresql+asyncpg://proxypool:proxypool@localhost:5432/proxypool",
redis_url="redis://localhost:6379/1",
secret_key="test-secret",
log_level="DEBUG",
db=DatabaseSettings(url="postgresql+asyncpg://proxypool:proxypool@localhost:5432/proxypool"),
redis=RedisSettings(url="redis://localhost:6379"),
)
@pytest.fixture(scope="session")
async def engine(test_settings: Settings):
engine = create_async_engine(test_settings.database_url)
engine = create_async_engine(test_settings.db.url)
yield engine
await engine.dispose()