from threading import Thread
from typing import Optional, Any, Generator, Tuple, List
from transformers import PreTrainedTokenizer, TextIteratorStreamer
import time
import baize_config

# Types.
HistoryType = List[Tuple[str, str]]
TokensType = List[int]
BatchTokensType = List[List[int]]


def chat_stream(
    model,
    tokenizer: PreTrainedTokenizer,
    query: str,
    history: Optional[HistoryType],
    debug=False,
) -> Generator[str, Any, None]:

    def make_context(
        tokenizer: PreTrainedTokenizer,
        query: str,
        history: List[Tuple[str, str]],
        max_window_size=1024,
    ):
        conversation = []
        for query_h, response_h in history:
            conversation.append({"role": "user", "content": query_h})
            conversation.append({"role": "assistant", "content": response_h})
        conversation.append({"role": "user", "content": query})
        inputs = tokenizer.apply_chat_template(
            conversation,
            add_generation_prompt=True,
            max_length=max_window_size,
            return_tensors="pt",
        )

        return inputs

    context_tokens = make_context(
        tokenizer, query, history, baize_config.MAX_WINDOWS_SIZE
    )

    streamer = TextIteratorStreamer(
        tokenizer=tokenizer, skip_prompt=True, timeout=80.0, skip_special_tokens=True
    )
    generation_kwargs = dict(
        input_ids=context_tokens,
        streamer=streamer,
        max_new_tokens=baize_config.MAX_NEW_TOKENS,
    )
    thread = Thread(target=model.generate, kwargs=generation_kwargs)
    thread.start()

    def stream_generator():
        for token in streamer:
            yield token

    def stream_generator_debug():
        _costs = []
        _time_begin = time.time()
        for token in streamer:
            _time_cur = time.time()
            _delta = _time_cur - _time_begin
            print(f"+{_time_cur-_time_begin:>6.3f}", f"<{token}>")
            _costs.append(_delta.__str__())
            _time_begin = time.time()
            yield token
        print(",".join(_costs))

    return stream_generator_debug() if debug else stream_generator()
