import logging
from pathlib import Path
from typing import AsyncIterator, List, Optional
from pydantic import BaseModel

from gchat_processing.gch_chain_wrapper import GigaChainWrapper
from .base import BaseBlock, ProgressChunk
from .sources import SourcesItem
from ...config import PaperAIConfig


class TopicsStartRequest(BaseModel):
    state_id: Optional[str] = None
    append: bool = False


class TopicsItem(BaseModel):
    title: str
    summary: str
    disabled: bool = False


class TopicsUpdateRequest(BaseModel):
    topics: List[TopicsItem]
    state_id: Optional[str] = None


class TopicsBlock(BaseBlock):
    """Literature topics block.

    """
    config: PaperAIConfig

    def __init__(self, idea: str, sources: List[SourcesItem], topics: List[TopicsItem],
                 request: TopicsStartRequest):
        self.idea = idea
        self.sources = sources
        self.topics = topics
        self.request = request
        self.gchain_wrapper = GigaChainWrapper(
            sql_cache_path=Path('gchat_processing/sqlite_cache.db'),
            giga_token=self.config.gigachat_token,
            giga_scope='GIGACHAT_API_CORP'
        )
        self.logger = logging.getLogger(__name__)

    @classmethod
    def from_state(cls, data: dict, request: TopicsStartRequest) -> 'TopicsBlock':
        return TopicsBlock(
            idea=data.get('idea', ''),
            sources=[
                SourcesItem.model_validate(item)
                for item in data.get('sources', [])
            ],
            topics=[
                TopicsItem.model_validate(item)
                for item in data.get('topics', [])
            ],
            request=request,
        )

    async def run(self) -> AsyncIterator[ProgressChunk]:
        results: List[TopicsItem] = []
        if self.request.append:
            results = self.topics

        it = self.gchain_wrapper.get_generator_for_topics(
            self.idea,
            summary_list=[
                dict(title=source.title, summary=source.summary) for source in self.sources
                if not source.disabled
            ],
            n_topics=max(len(self.sources) // 2, 2)
        )
        n = 1
        i = 0
        async for d, n in it:
            new_items = [
                # here we generate new topics
                TopicsItem.model_validate({
                    'title': d['title'],
                    'summary': d['summary'],
                })
            ]
            results.extend(new_items)
            yield ProgressChunk(
                done=False,
                total_steps=n,
                current_step=i + 1,
                data={
                    'topics': new_items,
                }
            )
            i += 1
        self.sources = results
        yield ProgressChunk(
            done=True,
            total_steps=n,
            current_step=n,
            data={
                'topics': results,
            }
        )

    def to_state(self, data: dict):
        # data['idea'] = self.idea
        # data['sources'] = self.sources
        data['topics'] = self.topics
        return
