import ollama
import os
import subprocess
from typing import List, Tuple, Optional

import json
from typing import Dict
from pathlib import Path
from platformdirs import user_config_dir, user_cache_dir

model_name: str = "llama3.2:3b"

def generate_commit_message(diff_output: str, model: str = model_name) -> str:
    """
    Generate a commit message using an AI model.

    Args:
        diff_output (str): Git diff content.
        model (str, optional): Ollama model name.

    Returns:
        str: Generated commit message.
    """
    system_prompt = """You are a Git expert specializing in concise and meaningful commit messages based on output of git diff command.
                    Choose a type from below that best describes the git diff output :
                        fix: A bug fix,
                        docs: Documentation only changes,
                        style: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc),
                        refactor: A code change that neither fixes a bug nor adds a feature,
                        perf: A code change that improves performance,
                        test: Adding missing tests or correcting existing tests,
                        build: Changes that affect the build system or external dependencies,
                        ci: Changes to our CI configuration files and scripts,
                        chore: Other changes that don't modify src or test files,
                        revert: Reverts a previous commit',
                        feat: A new feature,
                    Now, generate a concise git commit message written in present tense in the format "type": "description" for the output of git diff command which is provided by the user.
                    Exclude anything unnecessary such as translation. Your entire response will be passed directly into git commit.
                    Generate only one commit message of maximum length 40 characters, no explanations.
                    """

    message = ""
    client = ollama.Client(host="http://127.0.0.1:11434")
    stream = client.chat(
        model=model,
        messages=[
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": diff_output}
        ],
        stream=True,
    )

    for chunk in stream:
        content = chunk["message"]["content"]
        message += content

    return message.strip()

def main_old2():
    diff_output = subprocess.check_output(["git", "diff", "HEAD~1", "HEAD"]).decode("utf-8")
    print(diff_output)
    commit_message = generate_commit_message(diff_output)
    print(commit_message)

# def main():
#     from ollama import Ollama

#     model = Ollama(base_url="http://127.0.0.1:11434")
#     response = model.complete(
#         model="lama",
#         prompt="Hello, how are you?"
#     )
#     print(response)

# def main():
#     client = ollama.Client(host="http://127.0.0.1:11434")
#     response = client.chat(model='llama3.2:latest', messages=[{"role": "user", "content": "Hello, how are you?"}])
#     print(response)

def main():
    CONFIG_DIR = Path(user_config_dir("autocommitt"))
    CACHE_DIR = Path(user_cache_dir("autocommitt"))
    print(CONFIG_DIR)
    print(CACHE_DIR)

if __name__ == "__main__":
    main()


