# --------------------------------------------------
import json, datetime
from pathlib import Path
# from openai import AsyncOpenAI
# 0. Hi
from pydantic import BaseModel, ValidationError
from typing import Annotated, Literal

class M(BaseModel):             # an openai message message
    role: Literal['user', 'assistant', 'tool', 'system', 'developer']
    content: str
    reasoning_content: Annotated[str, 'optional'] = ''  # reasoning content

    def dump_to_file(self, name: str, o: Path , cid: int):
        '''
        Dump this message to file
        '''
        r = self.role
        p = o / f'{name}-{cid}-{r}.md'
        p.write_text(self.content)
        if r == 'assistant' and self.reasoning_content.strip():
            pR = o / f'{name}-{cid}-reasoning.md'
            pR.write_text(self.reasoning_content)

import logging
import sys
P = logging.getLogger()
P.setLevel(logging.DEBUG)
if P.hasHandlers(): # Remove all handlers associated with the root logger
    P.handlers.clear()
handler = logging.StreamHandler(sys.stdout)
# handler.setLevel(logging.DEBUG)
handler.setLevel(logging.INFO)
P.addHandler(handler)

class S:
    BLUE = '\033[94m'
    CYAN = '\033[96m'
    GREEN = '\033[92m'
    NOR = '\033[0m'
import json
import aiohttp

# k = json.loads(Path("/home/me/.ssh/k-250611-h3c.json").read_text())

class Sess(BaseModel):
    ms : list[M] = []  # messages in this session
    model: str  # model name
    url: str    # api url .../v1
    key: str = '<mock-key>'
    name: str                   # experience name
    note: str = ""             # note
    think_chunk_mode: bool = False  # if True, use think chunk mode

    @staticmethod
    def new(model="llama3.2", url="http://localhost:11434/v1", key='<mock-key>', think_chunk_mode:bool = False):
        if url.endswith('/'):
            url = url[:-1]

        d = datetime.datetime.now().strftime("%m%d-%H%M%S")
        name = model + '_' + d
        # make the name valid for path on windows / unix
        name = name.replace(':','-').replace('/','_')
        return Sess(name=name, model=model, url=url, key=key, think_chunk_mode=think_chunk_mode)

    @staticmethod
    def load(N: str, key: str ,p: Path = Path.cwd().parent / 'ixc-qa', think_chunk_mode: bool = False):
        # 1. load the index
        idx = p / 'index.json'
        if not idx.exists():
            raise FileNotFoundError(f"Index file {idx} not found.")
        Idx = json.loads(idx.read_text())

        # 2. find the session by name
        if N not in Idx:
            raise ValueError(f"Session {N} not found in index.")
        o = Idx[N]

        # 3.find the files
        l = sorted(p.glob(f'{N}-*.md'))

        # 4. start the index
        s = Sess(name=N, model=o['model'], url=o['url'], key=key, think_chunk_mode=think_chunk_mode)
        s.note = o.get('note', '')
        # 5. load the message
        for m in l:
            if m.is_file():
                r = m.stem.split('-')[-1]
                if r == 'reasoning':
                    continue            # 🦜 : skip this
                # print(f"Loading {m} with role {r}")
                s.ms.append(M(role=r, content=m.read_text()))
        return s


    @staticmethod
    async def ask_lm0(ms: list, model: str, base_url: str, api_key: str, think_chunk_mode: bool = False) -> tuple[str, str]:
        # c = AsyncOpenAI(base_url=base_url, api_key=api_key)
        # r = await c.chat.completions.create(model=model, messages=ms)
        # return r.choices[0].message.content, '<mock> Thinking...'
        ms0 = []
        for m in ms:
            if isinstance(m, M):
                ms0.append({
                    'role': m.role,
                    'content': m.content,
                })
            else:
                ms0.append(m)
        # P.debug(f'🦜 : asking lm0 with think_chunk_mode = {think_chunk_mode}')
        return await U.ask_lm0(ms0 , model, base_url, api_key, think_chunk_mode=think_chunk_mode)

    def add_q(self, q: str):
        '''
        Add a question to this session
        '''
        if self.ms:
            assert self.ms[-1].role != 'user', "Last message must not be a question."
        self.ms.append(M(role='user', content=q))

    async def ask_for_a(self) -> str:
        assert self.ms[-1].role == 'user', "Last message must be a question."
        a,t = await Sess.ask_lm0(self.ms, self.model, self.url, self.key, self.think_chunk_mode)
        self.ms.append(M(role='assistant', content=a, reasoning_content=t))
        return a

    def dump_to_db(self, p: Path):
        '''
        Dump this session to a directory
        '''

        if not p.exists():
            p.mkdir(parents=True, exist_ok=True)

        idx = p / 'index.json'
        Idx = json.loads(idx.read_text()) if idx.exists() else []

        for i, m in enumerate(self.ms):
            m.dump_to_file(self.name, p, i)

        Idx[self.name] = {
            'model': self.model,
            'url': self.url,
            'note' : self.note,
        }
        idx.write_text(json.dumps(Idx, indent=4))
        P.info(f'📗️ : index Db saved to {S.GREEN}{idx}{S.NOR},\n appended json = \n{S.BLUE}{json.dumps(Idx[self.name], indent = 4)}{S.NOR}')


import re
# --------------------------------------------------
class U:
    async def get_Y_from_X(p_X: Path | str,
                           p_Yo = Path.cwd() / '10a-default-oneshot.Y.yml',
                           model = "google/gemini-2.5-flash-preview-05-20:thinking"
                           ):

        p_X = Path(p_X) if not isinstance(p_X, Path) else p_X
        p_Xo = p_Yo.with_name(p_Yo.name.removesuffix('.Y.yml') + '.X.md')

        assert p_Xo.exists(), f"X oneshot file {p_Xo} does not exist"
        assert p_Yo.exists(), f"Y oneshot file {p_Yo} does not exist"

        p_t = Path.cwd() / '15-prompt-template.md'

        n1 = p_Xo.name.removesuffix('.X.md')
        n = p_X.name.removesuffix('.X.md')
        p_Y = p_X.with_name(f"{n}-{n1}-{model.replace('/','_').replace(':','_')}.Y1.yml")

        # 2. replace the template with the oneshot and Y
        s = p_t.read_text()
        s1 = s.format(
            X_oneshot=p_Xo.read_text(),
            Y_oneshot=p_Yo.read_text(),
            X=p_X.read_text(),
        )

        # 3. prepare the session
        p_qa = Path.cwd().parent / "ixc-qa"
        k = json.loads(Path("/home/me/.ssh/k-250611-h3c.json").read_text())
        s = Sess.new(model=model, url=k['url'], key=k['key'])

        s.add_q(s1); await s.ask_for_a(); None

        o = s.ms[-1].content
        o = U.extract_code_block(o)

        print(f'✅️  {S.GREEN}{p_Y.write_text(o)}{S.NOR} bytes written to {S.CYAN}{p_Y}{S.NOR}')

    @staticmethod
    def extract_code_block(text, lang:str = ''):
        pattern = r'```(.*?)\n(.*?)\n```'
        match = re.search(pattern, text, re.DOTALL)
        if lang:
            assert match.group(1).strip() == lang, f"Expected language {lang}, but found {match.group(1).strip()}"
        if match:
            return match.group(2).strip()
        raise ValueError(f"No code block found in the text.")

    @staticmethod
    async def translate(p: Path, save: bool = True, dry_run: bool = False) -> str:
        '''Translate a file to English if it ends with .cn.X, otherwise to Chinese.
        '''
        if isinstance(p, str):
            p = Path(p)

        s = Sess.new(model="deepseek/deepseek-chat-v3-0324", url=k['url'], key=k['key'])
        p0 = None
        lang = 'Simplified Chinese'
        if '.cn' in p.suffixes:
            lang = 'English'
            # 1. true stem
            s0 = p.name.removesuffix(''.join(p.suffixes))
            # n = p.name.removesuffix(p.suffixes[-2] + p.suffixes[-1])
            l = p.suffixes
            l.remove('.cn')         # 🦜 a rare mutable operation in Python
            p0 = p.with_name(s0 + ''.join(l))
            # pop the -2 suffix
        else:
            n = p.name.removesuffix(p.suffixes[-1])
            p0 = p.with_name(n + '.cn' + p.suffixes[-1])


        if p0.exists():
            print(f"File {S.GREEN}{p0}{S.NOR} already exists, skipping translation.")
            return '<SKIPPED>'

        if save:
            print(f"Translating {S.CYAN}{p}{S.NOR} to {S.BLUE}{lang}{S.NOR} as {S.GREEN}{p0}{S.NOR}")
        if dry_run:
            return '<DRY-RUN> '

        s.add_q(
            f'''
            Hi can you tranlsate the following file {p.name} to {lang}?, please keep the original structure and format of the file.:\n
            ```
        {p.read_text()}
            ```
            '''
        ); await s.ask_for_a(); None


        c = U.extract_code_block(s.ms[-1].content)
        if c is None:
            print(f"Failed to extract code block from {p.name}")
            return '<FAILED> with result: \n' + s.ms[-1].content

        if save:
            p0.write_text(c)
        return c


    @staticmethod
    async def ask_lm0(ms: list, model: str, base_url: str, api_key: str, think_chunk_mode:bool = False) -> tuple[str, str]:
        Ot, O = [], []
        async for cs, content in U.f(ms, model=model, url=base_url, k=api_key, think_chunk_mode=think_chunk_mode):
            if cs:
                if len(O) == 0:
                    pass # 🦜 : print('content --------------------------------------------------?')
                print(f'{S.GREEN}{content}{S.NOR}', end='', flush=True)
                O.append(content)
            else:
                if len(Ot) == 0:
                    pass # 🦜 : print('content --------------------------------------------------?')
                print(f'{S.CYAN}{content}{S.NOR}', end='', flush=True)
                Ot.append(content)
        return ''.join(O), ''.join(Ot)

    @staticmethod
    async def f(ms, url = "http://localhost:11434/v1", model="llama3.2", k = '<mock-key>', think_chunk_mode: bool = False, temperature: float = 0.7):
        url = url.strip()
        url += "/chat/completions"

        payload = {
            "model": model,
            "messages": ms,
            "stream": True,  # 📗️ : stream output
            "max_tokens": 16384,#max_tokens must be < 16384
            "stop": ["null"],
            "temperature": temperature,
            "top_p": 0.7,
            "top_k": 50,
            "frequency_penalty": 0.5,
            "n": 1,
            "response_format": {"type": "text"},
        }

        headers = {
            "Authorization": f"Bearer {k}",
            "Content-Type": "application/json"
        }

        R: str = 'reasoning' if url.startswith('https://openrouter.ai/api/v1') else 'reasoning_content'
        # the reasoning field name
        tags = {
            'begin': '<think>',
            'end': '</think>'
        }

        def process_line(x: str, c: bool, think_chunk_mode: bool = False
                         ) -> tuple[bool, str | None]:
            nonlocal R
            '''
            (line, contented started) -> (content started, line content)
            '''

            x = x.strip()
            # P.debug(f"Processing line cs={cs}, x={S.CYAN}{x}{S.NOR}")
            if x in ['', ': OPENROUTER PROCESSING']:
                return True, None  # known empty line, continue
            elif not x.startswith('data: '):
                P.info(f"⚠️ Weired line format: {x}")
                return True, None

            def json_ok(x) -> bool:
                return 'choices' in x and \
                    isinstance(x['choices'], list) and \
                    len(x['choices']) > 0 and \
                    'delta' in x['choices'][0]

            def time_to_swith(x, think_chunk_mode: bool) -> bool:
                if not think_chunk_mode:
                    return (R not in x) or \
                        (not x[R]) or (x['content'])
                return x['content'].strip() == '</think>'

            try:
                x = x.removeprefix('data: ')

                if x == "[DONE]":
                    P.debug("✅️ Received [DONE] signal")
                    return True, None

                x = json.loads(x)
                if not json_ok(x):
                    P.debug(f"⚠️ Invalid JSON structure: {x}")
                    return True, None
                x = x['choices'][0]['delta']
                # 1. if we are in content-mode: return content
                if c:
                    P.debug(f"Content mode active, returning content: {x.get('content')}")
                    return True, x.get('content')

                # 2. else:
                if (not c) and think_chunk_mode and (x['content'].strip() == '<think>'):
                    return False, None  # skip the tag

                #  2.1 try get the think content
                #  2.2 if failed, we are at the end of thinking content, emit the </think> signal
                if time_to_swith(x, think_chunk_mode):
                    P.debug(f'Time to switch to content mode for this line: {x}')
                    return True, x.get('content')


                # not time to switch
                if think_chunk_mode:
                    return False, x['content']
                else:
                    return False, x[R]

            except json.JSONDecodeError as e:
                P.error(f"❌ JSON decode error: {e} for line {x}")
                return True, None

        cs = False                  # content started
        async with aiohttp.ClientSession(headers=headers) as sn:
            async with sn.post(url, json=payload) as r:
                if r.status != 200:
                    raise Exception(f"❌️ Failed to connect to {url} with status {r.status}, response: {await r.text()}")
                async for line in r.content:
                    x = line.decode('utf-8').strip()
                    # print(f'🦜 > {S.GREEN}{x}{S.NOR}')
                    c, content = process_line(x, c=cs, think_chunk_mode=think_chunk_mode)
                    if content:
                        cs = c
                        yield cs, content

# await f([{"role": "user", "content": "hi"}])
# async for cs, content in f([{"role": "user", "content": "hi"}]):
#     c = S.GREEN if cs else S.BLUE
#     print(f'{c}{content}{S.NOR}', end='', flush=True)

# async for cs, content in U.f([{"role": "user", "content": "solve x for x + 2 = 5"},], url = "http://localhost:11434/v1", model="deepseek-r1:7b", k = '<mock-key>', think_chunk_mode=True):
#     c = S.GREEN if cs else S.BLUE
#     print(f'{c}{content}{S.NOR}', end='', flush=True)
# x = 'data: {"id":"chatcmpl-636","object":"chat.completion.chunk","created":1749709267,"model":"llama3.2","system_fingerprint":"fp_ollama","choices":[{"index":0,"delta":{"role":"assistant","content":"How"},"finish_reason":null}]}'

# --------------------------------------------------
import base64
def decode_base64_strings(obj):
    """
    Recursively traverse a JSON object and decode base64-encoded strings
    that have the format "_HTML:b'<base64>'" or "_CMD:b'<base64>'"
    """
    if isinstance(obj, dict):
        # Recursively process dictionary values
        return {key: decode_base64_strings(value) for key, value in obj.items()}
    
    elif isinstance(obj, list):
        # Recursively process list items
        return [decode_base64_strings(item) for item in obj]
    
    elif isinstance(obj, str):
        # Check if string matches the encoded format
        if obj.startswith('_HTML:b\'') and obj.endswith('\''):
            try:
                return d(obj, 'HTML')
            except Exception:
                # If decoding fails, return original string
                return obj
        elif obj.startswith('_CMD:b\'') and obj.endswith('\''):
            try:
                return d(obj, 'CMD')
            except Exception:
                # If decoding fails, return original string
                return obj
        else:
            # Return string as-is if it doesn't match the pattern
            return obj
    
    else:
        # Return primitive types (int, float, bool, None) as-is
        return obj

def d(s: str, prefix='HTML') -> str:
    """Base64 decode a string with the given prefix"""
    s = s.removeprefix(f"_{prefix}:b'").removesuffix("'")
    return base64.b64decode(s).decode('utf-8')

# --------------------------------------------------
# 11b : try render function

import yaml
import textwrap
# 1. replace the right id

class Ac(BaseModel):            # Action object
    id: str
    label: str
    cmd: str
    expect: list[str] = None
    expect_no: list[str] = None

    @staticmethod
    def build_code(i : str, label: str, cmd: str, expect: list[str] = None, expect_no: list[str] = None):
        def replace_id(i):
            M = {'N' : 'gl.DUT', 'DUT' : 'gl.DUT'}            # the replacement map
            for k in M:
                if i.startswith(k):
                    return M[k] + i.removeprefix(k)
            raise ValueError(f"Invalid ID: {i}")

        i = replace_id(i)
        if not id:
            raise ValueError(f"Invalid ID: {i}")

        cmd = cmd.strip()
        expect1 = list(set(expect)) if expect else []

        # 0. in cmd, if there's duplicate \n or \r\n, then remove them
        cmd = re.sub(r'(\r\n|\n){2,}', '\n', cmd)

        a = [f"{label!r}",
            f"cmd='''{cmd}'''",
            f"expect={expect1!r}" if expect1 else "",
            f"not_expect={expect_no!r}" if expect_no else "",
            "stop_max_attempt=1",
            "wait_fixed=1"]

        if expect_no is None and expect is None:
            a.append('not_expect=["fatal"]')

        # <2025-07-03 Thu>: 🦜 : update: if there're duplicate string in expect, then it translates to a count=n argument
        from collections import Counter
        if expect and len(expect1)  < len(expect):  # if there are duplicates in the original expect list
            d = Counter(expect)
            c = d.most_common()[0][1]
            # add the count
            a.append(f"count={c}")  # add the count argument if there are duplicates

        a = [x for x in a if x]  # remove empty strings
        T = '{i}.CheckCommand({a})'
        # Format the string with the provided id, label, and cmd
        return textwrap.indent(T.format(i=i, a=',\n     '.join(a)), ' ' * 8)  # indent with 8 spaces

    def to_code(self) -> str:
        return Ac.build_code(i=self.id, label=self.label, cmd=self.cmd, expect=self.expect, expect_no=self.expect_no)


def try_render0(p = Path('11-try-render.Y.yml')) -> str:
    '''Render the Yaml file to a Python code snippet.'''
    # 0. preprocess s1: remove the ! comments
    s1 = []
    with p.open(encoding='utf-8') as f:
        for line in f:
            if not line.strip().startswith('!'):
                s1.append(line)

    A = yaml.safe_load('\n'.join(s1))

    l = []
    while A:
        a = A.pop(0)  # get the first item
        # <2025-07-03 Thu>: 🦜 : update: if `id` is list[str]: push back them
        if isinstance(a['id'], list):
            assert len(a['id']) > 0, "ID list cannot be empty"
            assert all(isinstance(i, str) for i in a['id']), "All IDs must be strings"
            for i in a['id']:
                A = [{'id': i, 'label': a['label'], 'cmd': a['cmd'], 'expect': a.get('expect'), 'expect_no': a.get('expect_no')}] + A
            continue
        try:
            a = Ac(**a)  # validate and create an Action object
            l.append(a.to_code())
        except ValueError as e:
            e.add_note(f"Error processing {a['id']}: {S.GREEN}{e}{S.NOR}, a = {S.BLUE}{a}{S.NOR}")
            raise e
    return '\n'.join(l)

def try_render(p: Path = Path('11-try-render.Y.yml')) -> None:
    s0 = Path('11-before-render.txt').read_text()
    s = try_render0(p)

    # 🦜 : if there's a .X.md file, replace that in s0
    nx = p.name.removesuffix('.Y.yml') + '.X.md'
    px = p.with_name(nx)
    sx = px.read_text() if px.exists() else ''
    s0 = s0.format(
        X= textwrap.indent(sx,' ' * 4),
        # YYYY-MM-DD HH:MM
        date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M'),
    )

    n = 'test_' + p.name.removesuffix('.Y.yml') + '.py'
    p1 = p.with_name(n)
    print(f"✅️ {S.GREEN}{p1.write_text(s0 + s)}{S.NOR} bytes written to {S.BLUE}{p1}{S.NOR}")
