"""Add crawler models

Revision ID: 20250928_crawler_models
Revises: 20241220_add_llm_models_and_chat_tables
Create Date: 2025-09-28 03:17:00.000000

"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = '20250928_crawler_models'
down_revision = '20241220_add_llm_models_and_chat_tables'
branch_labels = None
depends_on = None


def upgrade() -> None:
    # Create crawler_jobs table
    op.create_table('crawler_jobs',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.Column('description', sa.Text(), nullable=True),
        sa.Column('url', sa.String(length=2048), nullable=False),
        sa.Column('status', sa.String(length=20), nullable=False),
        sa.Column('config', sa.JSON(), nullable=True),
        sa.Column('result_data', sa.JSON(), nullable=True),
        sa.Column('error_message', sa.Text(), nullable=True),
        sa.Column('pages_crawled', sa.Integer(), nullable=True),
        sa.Column('total_content_size', sa.Integer(), nullable=True),
        sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
        sa.Column('completed_at', sa.DateTime(timezone=True), nullable=True),
        sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
        sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
        sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_index(op.f('ix_crawler_jobs_id'), 'crawler_jobs', ['id'], unique=False)

    # Create crawled_pages table
    op.create_table('crawled_pages',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('job_id', sa.Integer(), nullable=False),
        sa.Column('url', sa.String(length=2048), nullable=False),
        sa.Column('title', sa.String(length=500), nullable=True),
        sa.Column('content', sa.Text(), nullable=True),
        sa.Column('raw_html', sa.Text(), nullable=True),
        sa.Column('status_code', sa.Integer(), nullable=True),
        sa.Column('content_type', sa.String(length=100), nullable=True),
        sa.Column('content_length', sa.Integer(), nullable=True),
        sa.Column('extracted_data', sa.JSON(), nullable=True),
        sa.Column('links', sa.JSON(), nullable=True),
        sa.Column('images', sa.JSON(), nullable=True),
        sa.Column('crawled_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
        sa.ForeignKeyConstraint(['job_id'], ['crawler_jobs.id'], ),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_index(op.f('ix_crawled_pages_id'), 'crawled_pages', ['id'], unique=False)

    # Create scheduled_crawler_tasks table
    op.create_table('scheduled_crawler_tasks',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.Column('description', sa.Text(), nullable=True),
        sa.Column('url', sa.String(length=2048), nullable=False),
        sa.Column('cron_expression', sa.String(length=100), nullable=False),
        sa.Column('timezone', sa.String(length=50), nullable=True),
        sa.Column('config', sa.JSON(), nullable=True),
        sa.Column('is_active', sa.Boolean(), nullable=True),
        sa.Column('total_executions', sa.Integer(), nullable=True),
        sa.Column('successful_executions', sa.Integer(), nullable=True),
        sa.Column('failed_executions', sa.Integer(), nullable=True),
        sa.Column('last_execution_at', sa.DateTime(timezone=True), nullable=True),
        sa.Column('next_execution_at', sa.DateTime(timezone=True), nullable=True),
        sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
        sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
        sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_index(op.f('ix_scheduled_crawler_tasks_id'), 'scheduled_crawler_tasks', ['id'], unique=False)


def downgrade() -> None:
    op.drop_index(op.f('ix_scheduled_crawler_tasks_id'), table_name='scheduled_crawler_tasks')
    op.drop_table('scheduled_crawler_tasks')
    op.drop_index(op.f('ix_crawled_pages_id'), table_name='crawled_pages')
    op.drop_table('crawled_pages')
    op.drop_index(op.f('ix_crawler_jobs_id'), table_name='crawler_jobs')
    op.drop_table('crawler_jobs')