Initial commit

This commit is contained in:
2026-02-01 09:31:38 +01:00
commit e02db93960
4396 changed files with 1511612 additions and 0 deletions

1
backend/alembic/README Normal file
View File

@@ -0,0 +1 @@
Ce répertoire contient les scripts de migration Alembic.

82
backend/alembic/env.py Normal file
View File

@@ -0,0 +1,82 @@
"""Configuration de l'environnement Alembic."""
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
import sys
from pathlib import Path
# Ajouter le répertoire parent au path pour importer les modèles
sys.path.insert(0, str(Path(__file__).resolve().parents[0]))
# Importer la configuration et le base
from app.database import Base
from app.models.user import User
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1 @@
# Ce fichier permet de garder le répertoire vide dans git

View File

@@ -0,0 +1,45 @@
"""Initial migration
Revision ID: 0001
Revises:
Create Date: 2026-01-17 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create initial tables."""
# Create users table
op.create_table(
'users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('password_hash', sa.String(), nullable=True),
sa.Column('is_premium', sa.Boolean(), nullable=False, default=False),
sa.Column('referral_code', sa.String(), nullable=True, unique=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('daily_predictions_count', sa.Integer(), default=0),
sa.Column('last_prediction_date', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_referral_code'), 'users', ['referral_code'], unique=True)
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
def downgrade() -> None:
"""Drop initial tables."""
op.drop_index(op.f('ix_users_id'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')

View File

@@ -0,0 +1,47 @@
"""Create tweets table
Revision ID: 0002
Revises: 0001
Create Date: 2026-01-17 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0002'
down_revision = '0001'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create tweets table."""
op.create_table(
'tweets',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tweet_id', sa.String(length=255), nullable=False),
sa.Column('text', sa.String(length=1000), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('retweet_count', sa.Integer(), nullable=True),
sa.Column('like_count', sa.Integer(), nullable=True),
sa.Column('match_id', sa.Integer(), nullable=True),
sa.Column('source', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tweets_id'), 'tweets', ['id'], unique=False)
op.create_index(op.f('ix_tweets_tweet_id'), 'tweets', ['tweet_id'], unique=True)
op.create_index(op.f('ix_tweets_created_at'), 'tweets', ['created_at'], unique=False)
op.create_index(op.f('ix_tweets_match_id'), 'tweets', ['match_id'], unique=False)
op.create_index('idx_tweets_match_id_source', 'tweets', ['match_id', 'source'], unique=False)
def downgrade() -> None:
"""Drop tweets table."""
op.drop_index('idx_tweets_match_id_source', table_name='tweets')
op.drop_index(op.f('ix_tweets_match_id'), table_name='tweets')
op.drop_index(op.f('ix_tweets_created_at'), table_name='tweets')
op.drop_index(op.f('ix_tweets_tweet_id'), table_name='tweets')
op.drop_index(op.f('ix_tweets_id'), table_name='tweets')
op.drop_table('tweets')

View File

@@ -0,0 +1,74 @@
"""Create Reddit posts and comments tables
Revision ID: 0003
Revises: 0002
Create Date: 2026-01-17 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0003'
down_revision = '0002'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create Reddit posts and comments tables."""
# Create posts_reddit table
op.create_table(
'posts_reddit',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('post_id', sa.String(length=255), nullable=False),
sa.Column('title', sa.String(length=500), nullable=False),
sa.Column('text', sa.Text(), nullable=True),
sa.Column('upvotes', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('match_id', sa.Integer(), nullable=True),
sa.Column('subreddit', sa.String(length=100), nullable=False),
sa.Column('source', sa.String(length=50), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_posts_reddit_created_at'), 'posts_reddit', ['created_at'], unique=False)
op.create_index(op.f('ix_posts_reddit_id'), 'posts_reddit', ['id'], unique=False)
op.create_index(op.f('ix_posts_reddit_match_id'), 'posts_reddit', ['match_id'], unique=False)
op.create_index(op.f('ix_posts_reddit_post_id'), 'posts_reddit', ['post_id'], unique=True)
op.create_index(op.f('ix_posts_reddit_subreddit'), 'posts_reddit', ['subreddit'], unique=False)
# Create comments_reddit table
op.create_table(
'comments_reddit',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('comment_id', sa.String(length=255), nullable=False),
sa.Column('post_id', sa.String(length=255), nullable=False),
sa.Column('text', sa.Text(), nullable=False),
sa.Column('upvotes', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('source', sa.String(length=50), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_comments_reddit_created_at'), 'comments_reddit', ['created_at'], unique=False)
op.create_index(op.f('ix_comments_reddit_id'), 'comments_reddit', ['id'], unique=False)
op.create_index(op.f('ix_comments_reddit_comment_id'), 'comments_reddit', ['comment_id'], unique=True)
op.create_index(op.f('ix_comments_reddit_post_id'), 'comments_reddit', ['post_id'], unique=False)
def downgrade() -> None:
"""Drop Reddit posts and comments tables."""
# Drop comments_reddit table
op.drop_index(op.f('ix_comments_reddit_post_id'), table_name='comments_reddit')
op.drop_index(op.f('ix_comments_reddit_comment_id'), table_name='comments_reddit')
op.drop_index(op.f('ix_comments_reddit_id'), table_name='comments_reddit')
op.drop_index(op.f('ix_comments_reddit_created_at'), table_name='comments_reddit')
op.drop_table('comments_reddit')
# Drop posts_reddit table
op.drop_index(op.f('ix_posts_reddit_subreddit'), table_name='posts_reddit')
op.drop_index(op.f('ix_posts_reddit_post_id'), table_name='posts_reddit')
op.drop_index(op.f('ix_posts_reddit_match_id'), table_name='posts_reddit')
op.drop_index(op.f('ix_posts_reddit_id'), table_name='posts_reddit')
op.drop_index(op.f('ix_posts_reddit_created_at'), table_name='posts_reddit')
op.drop_table('posts_reddit')

View File

@@ -0,0 +1,55 @@
"""Create sentiment scores table
Revision ID: 0004
Revises: 0003
Create Date: 2026-01-17 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0004'
down_revision = '0003'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create sentiment_scores table."""
op.create_table(
'sentiment_scores',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('entity_id', sa.String(length=255), nullable=False),
sa.Column('entity_type', sa.String(length=50), nullable=False),
sa.Column('score', sa.Float(), nullable=False),
sa.Column('sentiment_type', sa.String(length=20), nullable=False),
sa.Column('positive', sa.Float(), nullable=False, server_default='0.0'),
sa.Column('negative', sa.Float(), nullable=False, server_default='0.0'),
sa.Column('neutral', sa.Float(), nullable=False, server_default='0.0'),
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.text('(CURRENT_TIMESTAMP)')),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_sentiment_scores_id'), 'sentiment_scores', ['id'], unique=False)
op.create_index(op.f('ix_sentiment_scores_entity_id'), 'sentiment_scores', ['entity_id'], unique=False)
op.create_index(op.f('ix_sentiment_scores_entity_type'), 'sentiment_scores', ['entity_type'], unique=False)
op.create_index(op.f('ix_sentiment_scores_score'), 'sentiment_scores', ['score'], unique=False)
op.create_index(op.f('ix_sentiment_scores_sentiment_type'), 'sentiment_scores', ['sentiment_type'], unique=False)
op.create_index(op.f('ix_sentiment_scores_created_at'), 'sentiment_scores', ['created_at'], unique=False)
op.create_index('idx_sentiment_scores_entity', 'sentiment_scores', ['entity_id', 'entity_type'], unique=False)
def downgrade() -> None:
"""Drop sentiment_scores table."""
# Drop indexes
op.drop_index('idx_sentiment_scores_entity', table_name='sentiment_scores')
op.drop_index(op.f('ix_sentiment_scores_created_at'), table_name='sentiment_scores')
op.drop_index(op.f('ix_sentiment_scores_sentiment_type'), table_name='sentiment_scores')
op.drop_index(op.f('ix_sentiment_scores_score'), table_name='sentiment_scores')
op.drop_index(op.f('ix_sentiment_scores_entity_type'), table_name='sentiment_scores')
op.drop_index(op.f('ix_sentiment_scores_entity_id'), table_name='sentiment_scores')
op.drop_index(op.f('ix_sentiment_scores_id'), table_name='sentiment_scores')
# Drop table
op.drop_table('sentiment_scores')

View File

@@ -0,0 +1,62 @@
"""create energy scores table
Revision ID: 20260117_0004
Revises: 20260117_0003
Create Date: 2026-01-17 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20260117_0004'
down_revision = '20260117_0003'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create energy_scores table."""
op.create_table(
'energy_scores',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('match_id', sa.Integer(), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.Column('score', sa.Float(), nullable=False),
sa.Column('confidence', sa.Float(), nullable=False, server_default='0.0'),
sa.Column('sources_used', sa.JSON(), nullable=False, server_default='[]'),
sa.Column('twitter_score', sa.Float(), nullable=True),
sa.Column('reddit_score', sa.Float(), nullable=True),
sa.Column('rss_score', sa.Float(), nullable=True),
sa.Column('temporal_factor', sa.Float(), nullable=True),
sa.Column('twitter_weight', sa.Float(), nullable=True),
sa.Column('reddit_weight', sa.Float(), nullable=True),
sa.Column('rss_weight', sa.Float(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), nullable=False, server_default=sa.func.now()),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for performance
op.create_index('idx_energy_scores_match_team', 'energy_scores', ['match_id', 'team_id'])
op.create_index('idx_energy_scores_score', 'energy_scores', ['score'])
op.create_index('idx_energy_scores_confidence', 'energy_scores', ['confidence'])
op.create_index('idx_energy_scores_created_at', 'energy_scores', ['created_at'])
op.create_index('idx_energy_scores_updated_at', 'energy_scores', ['updated_at'])
op.create_index(op.f('ix_energy_scores_id'), 'energy_scores', ['id'])
op.create_index(op.f('ix_energy_scores_match_id'), 'energy_scores', ['match_id'])
op.create_index(op.f('ix_energy_scores_team_id'), 'energy_scores', ['team_id'])
def downgrade() -> None:
"""Drop energy_scores table."""
op.drop_index(op.f('ix_energy_scores_team_id'), table_name='energy_scores')
op.drop_index(op.f('ix_energy_scores_match_id'), table_name='energy_scores')
op.drop_index(op.f('ix_energy_scores_id'), table_name='energy_scores')
op.drop_index('idx_energy_scores_updated_at', table_name='energy_scores')
op.drop_index('idx_energy_scores_created_at', table_name='energy_scores')
op.drop_index('idx_energy_scores_confidence', table_name='energy_scores')
op.drop_index('idx_energy_scores_score', table_name='energy_scores')
op.drop_index('idx_energy_scores_match_team', table_name='energy_scores')
op.drop_table('energy_scores')

View File

@@ -0,0 +1,93 @@
"""Create matches and predictions tables
Revision ID: 0006
Revises: 0004
Create Date: 2026-01-17 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0006'
down_revision = '0004'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create matches and predictions tables."""
# Create matches table
op.create_table(
'matches',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('home_team', sa.String(length=255), nullable=False),
sa.Column('away_team', sa.String(length=255), nullable=False),
sa.Column('date', sa.DateTime(), nullable=False),
sa.Column('league', sa.String(length=255), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_matches_id'), 'matches', ['id'], unique=False)
op.create_index(op.f('ix_matches_home_team'), 'matches', ['home_team'], unique=False)
op.create_index(op.f('ix_matches_away_team'), 'matches', ['away_team'], unique=False)
op.create_index(op.f('ix_matches_date'), 'matches', ['date'], unique=False)
op.create_index(op.f('ix_matches_league'), 'matches', ['league'], unique=False)
op.create_index(op.f('ix_matches_status'), 'matches', ['status'], unique=False)
op.create_index(op.f('ix_matches_date_league'), 'matches', ['date', 'league'], unique=False)
op.create_index(op.f('ix_matches_home_away'), 'matches', ['home_team', 'away_team'], unique=False)
# Create predictions table
op.create_table(
'predictions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('match_id', sa.Integer(), nullable=False),
sa.Column('energy_score', sa.String(length=50), nullable=False),
sa.Column('confidence', sa.String(length=50), nullable=False),
sa.Column('predicted_winner', sa.String(length=255), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['match_id'], ['matches.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_predictions_id'), 'predictions', ['id'], unique=False)
op.create_index(op.f('ix_predictions_match_id'), 'predictions', ['match_id'], unique=False)
op.create_index(op.f('ix_predictions_created_at'), 'predictions', ['created_at'], unique=False)
op.create_index(op.f('ix_predictions_confidence'), 'predictions', ['confidence'], unique=False)
op.create_index(op.f('ix_predictions_match_id_created'), 'predictions', ['match_id', 'created_at'], unique=False)
# Add foreign key to tweets table to match matches
with op.batch_alter_table('tweets', schema=None) as batch_op:
batch_op.create_foreign_key(
'fk_tweets_match_id_matches',
'matches',
['match_id'],
['id'],
ondelete='CASCADE'
)
def downgrade() -> None:
"""Drop matches and predictions tables."""
# Drop predictions table
op.drop_index(op.f('ix_predictions_match_id_created'), table_name='predictions')
op.drop_index(op.f('ix_predictions_confidence'), table_name='predictions')
op.drop_index(op.f('ix_predictions_created_at'), table_name='predictions')
op.drop_index(op.f('ix_predictions_match_id'), table_name='predictions')
op.drop_index(op.f('ix_predictions_id'), table_name='predictions')
op.drop_table('predictions')
# Drop matches table
op.drop_index(op.f('ix_matches_home_away'), table_name='matches')
op.drop_index(op.f('ix_matches_date_league'), table_name='matches')
op.drop_index(op.f('ix_matches_status'), table_name='matches')
op.drop_index(op.f('ix_matches_league'), table_name='matches')
op.drop_index(op.f('ix_matches_date'), table_name='matches')
op.drop_index(op.f('ix_matches_away_team'), table_name='matches')
op.drop_index(op.f('ix_matches_home_team'), table_name='matches')
op.drop_index(op.f('ix_matches_id'), table_name='matches')
op.drop_table('matches')
# Remove foreign key from tweets table
with op.batch_alter_table('tweets', schema=None) as batch_op:
batch_op.drop_constraint('fk_tweets_match_id_matches', type_='foreignkey')

View File

@@ -0,0 +1,41 @@
"""Add actual_winner column to matches table
Revision ID: 0006
Revises: 0005
Create Date: 2026-01-17 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0006'
down_revision = '0005'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add actual_winner column to matches table."""
op.add_column(
'matches',
sa.Column(
'actual_winner',
sa.String(255),
nullable=True,
comment='Actual winner of the match: home, away, or draw'
)
)
# Create index for faster queries
op.create_index(
'ix_matches_actual_winner',
'matches',
['actual_winner']
)
def downgrade() -> None:
"""Remove actual_winner column from matches table."""
op.drop_index('ix_matches_actual_winner', table_name='matches')
op.drop_column('matches', 'actual_winner')

View File

@@ -0,0 +1,53 @@
"""create rss_articles table
Revision ID: 20260117_0007
Revises: 20260117_0006
Create Date: 2026-01-17
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import create_engine
# revision identifiers, used by Alembic.
revision = '20260117_0007'
down_revision = '20260117_0006'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create rss_articles table."""
op.create_table(
'rss_articles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('article_id', sa.String(length=255), nullable=False),
sa.Column('title', sa.String(length=500), nullable=False),
sa.Column('content', sa.Text(), nullable=True),
sa.Column('published_at', sa.DateTime(), nullable=False),
sa.Column('source_url', sa.String(length=1000), nullable=False),
sa.Column('match_id', sa.Integer(), nullable=True),
sa.Column('source', sa.String(length=100), nullable=True, server_default='rss'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_rss_articles_id', 'rss_articles', ['id'])
op.create_index('ix_rss_articles_article_id', 'rss_articles', ['article_id'])
op.create_index('ix_rss_articles_published_at', 'rss_articles', ['published_at'])
op.create_index('ix_rss_articles_match_id', 'rss_articles', ['match_id'])
op.create_index('idx_rss_articles_match_id_source', 'rss_articles', ['match_id', 'source'])
op.create_index('idx_rss_articles_published_at', 'rss_articles', ['published_at'])
op.create_index('idx_rss_articles_source_url', 'rss_articles', ['source_url'])
def downgrade() -> None:
"""Drop rss_articles table."""
op.drop_index('idx_rss_articles_source_url', table_name='rss_articles')
op.drop_index('idx_rss_articles_published_at', table_name='rss_articles')
op.drop_index('idx_rss_articles_match_id_source', table_name='rss_articles')
op.drop_index('ix_rss_articles_match_id', table_name='rss_articles')
op.drop_index('ix_rss_articles_published_at', table_name='rss_articles')
op.drop_index('ix_rss_articles_article_id', table_name='rss_articles')
op.drop_index('ix_rss_articles_id', table_name='rss_articles')
op.drop_table('rss_articles')

View File

@@ -0,0 +1,49 @@
"""create api_keys table
Revision ID: 20260118_0008
Revises: add_user_predictions_tracking
Create Date: 2026-01-18 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '20260118_0008'
down_revision = 'add_user_predictions_tracking'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create api_keys table."""
op.create_table(
'api_keys',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('key_hash', sa.String(255), nullable=False),
sa.Column('key_prefix', sa.String(8), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False, server_default='1'),
sa.Column('rate_limit', sa.Integer(), nullable=False, server_default='100'),
sa.Column('last_used_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False, server_default=sa.func.now()),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('key_hash')
)
# Create indexes
op.create_index(op.f('ix_api_keys_id'), 'api_keys', ['id'], unique=False)
op.create_index(op.f('ix_api_keys_user_id'), 'api_keys', ['user_id'], unique=False)
op.create_index(op.f('ix_api_keys_key_hash'), 'api_keys', ['key_hash'], unique=True)
op.create_index(op.f('ix_api_keys_key_prefix'), 'api_keys', ['key_prefix'], unique=False)
def downgrade() -> None:
"""Drop api_keys table."""
op.drop_index(op.f('ix_api_keys_key_prefix'), table_name='api_keys')
op.drop_index(op.f('ix_api_keys_key_hash'), table_name='api_keys')
op.drop_index(op.f('ix_api_keys_user_id'), table_name='api_keys')
op.drop_index(op.f('ix_api_keys_id'), table_name='api_keys')
op.drop_table('api_keys')

View File

@@ -0,0 +1,50 @@
"""add user predictions tracking
Revision ID: add_user_predictions_tracking
Revises:
Create Date: 2026-01-18 10:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import sqlite
# revision identifiers, used by Alembic.
revision = 'add_user_predictions_tracking'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
"""Create user_predictions table to track predictions viewed by users."""
op.create_table(
'user_predictions',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('prediction_id', sa.Integer(), nullable=False),
sa.Column('viewed_at', sa.DateTime(), nullable=False),
sa.Column('was_correct', sa.Boolean(), nullable=True, comment='True if prediction was correct, False if incorrect, NULL if match not completed'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['prediction_id'], ['predictions.id'], ondelete='CASCADE'),
sa.Index('idx_user_predictions_user_id', 'user_id'),
sa.Index('idx_user_predictions_prediction_id', 'prediction_id'),
sa.Index('idx_user_predictions_viewed_at', 'viewed_at'),
sa.UniqueConstraint('user_id', 'prediction_id', name='uq_user_predictions_user_prediction')
)
# Create view timestamp in ISO format
op.execute('''
CREATE TRIGGER update_user_predictions_viewed_at
BEFORE INSERT ON user_predictions
FOR EACH ROW
BEGIN
SELECT datetime('now') INTO NEW.viewed_at;
END;
''')
def downgrade():
"""Drop user_predictions table."""
op.execute('DROP TRIGGER IF EXISTS update_user_predictions_viewed_at')
op.drop_table('user_predictions')