office_translator/database/connection.py
Sepehr 550f3516db feat: Add PostgreSQL database infrastructure
- Add SQLAlchemy models for User, Translation, ApiKey, UsageLog, PaymentHistory
- Add database connection management with PostgreSQL/SQLite support
- Add repository layer for CRUD operations
- Add Alembic migration setup with initial migration
- Update auth_service to automatically use database when DATABASE_URL is set
- Update docker-compose.yml with PostgreSQL service and Redis (non-optional)
- Add database migration script (scripts/migrate_to_db.py)
- Update .env.example with database configuration
2025-12-31 10:56:19 +01:00

140 lines
3.9 KiB
Python

"""
Database connection and session management
Supports both PostgreSQL (production) and SQLite (development/testing)
"""
import os
import logging
from typing import Generator, Optional
from contextlib import contextmanager
from sqlalchemy import create_engine, event
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.pool import QueuePool, StaticPool
logger = logging.getLogger(__name__)
# Database URL from environment
# PostgreSQL: postgresql://user:password@host:port/database
# SQLite: sqlite:///./data/translate.db
DATABASE_URL = os.getenv("DATABASE_URL", "")
# Determine if we're using SQLite or PostgreSQL
_is_sqlite = DATABASE_URL.startswith("sqlite") if DATABASE_URL else True
# Create engine based on database type
if DATABASE_URL and not _is_sqlite:
# PostgreSQL configuration
engine = create_engine(
DATABASE_URL,
poolclass=QueuePool,
pool_size=5,
max_overflow=10,
pool_timeout=30,
pool_recycle=1800, # Recycle connections after 30 minutes
pool_pre_ping=True, # Check connection health before use
echo=os.getenv("DATABASE_ECHO", "false").lower() == "true",
)
logger.info("✅ Database configured with PostgreSQL")
else:
# SQLite configuration (for development/testing or when no DATABASE_URL)
sqlite_path = os.getenv("SQLITE_PATH", "data/translate.db")
os.makedirs(os.path.dirname(sqlite_path), exist_ok=True)
sqlite_url = f"sqlite:///./{sqlite_path}"
engine = create_engine(
sqlite_url,
connect_args={"check_same_thread": False},
poolclass=StaticPool,
echo=os.getenv("DATABASE_ECHO", "false").lower() == "true",
)
# Enable foreign keys for SQLite
@event.listens_for(engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
if not DATABASE_URL:
logger.warning("⚠️ DATABASE_URL not set, using SQLite for development")
else:
logger.info(f"✅ Database configured with SQLite: {sqlite_path}")
# Session factory
SessionLocal = sessionmaker(
autocommit=False,
autoflush=False,
bind=engine,
expire_on_commit=False,
)
def get_db() -> Generator[Session, None, None]:
"""
Dependency for FastAPI to get database session.
Usage: db: Session = Depends(get_db)
"""
db = SessionLocal()
try:
yield db
finally:
db.close()
@contextmanager
def get_db_session() -> Generator[Session, None, None]:
"""
Context manager for database session.
Usage: with get_db_session() as db: ...
"""
db = SessionLocal()
try:
yield db
db.commit()
except Exception:
db.rollback()
raise
finally:
db.close()
# Alias for backward compatibility
get_sync_session = get_db_session
def init_db():
"""
Initialize database tables.
Call this on application startup.
"""
from database.models import Base
Base.metadata.create_all(bind=engine)
logger.info("✅ Database tables initialized")
def check_db_connection() -> bool:
"""
Check if database connection is healthy.
Returns True if connection works, False otherwise.
"""
try:
with engine.connect() as conn:
conn.execute("SELECT 1")
return True
except Exception as e:
logger.error(f"Database connection check failed: {e}")
return False
# Connection pool stats (for monitoring)
def get_pool_stats() -> dict:
"""Get database connection pool statistics"""
if hasattr(engine.pool, 'status'):
return {
"pool_size": engine.pool.size(),
"checked_in": engine.pool.checkedin(),
"checked_out": engine.pool.checkedout(),
"overflow": engine.pool.overflow(),
}
return {"status": "pool stats not available"}