#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
os.environ["TOKENIZERS_PARALLELISM"] = "false"

import json
import logging
import requests
from typing import Any, Dict, Optional, List

from fastapi import FastAPI, Request, Header, HTTPException
from fastapi.responses import JSONResponse
from pydantic import BaseModel, Field
from pathlib import Path
import chromadb
from langchain_chroma import Chroma
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_core.documents import Document
from langchain_community.document_loaders import UnstructuredMarkdownLoader
from langchain_community.vectorstores.utils import filter_complex_metadata
from langchain_deepseek import ChatDeepSeek
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
from langchain_core.vectorstores import VectorStoreRetriever
import uvicorn
from contextlib import asynccontextmanager

import hashlib
import hmac
def verify_signature(payload_body, secret_token, signature_header):
    """Verify that the payload was sent from GitHub by validating SHA256.

    Raise and return 403 if not authorized.

    Args:
        payload_body: original request body to verify (request.body())
        secret_token: GitHub app webhook token (WEBHOOK_SECRET)
        signature_header: header received from GitHub (x-hub-signature-256)
    """
    if not signature_header:
        raise HTTPException(status_code=403, detail="x-hub-signature-256 header is missing!")
    hash_object = hmac.new(secret_token.encode('utf-8'), msg=payload_body, digestmod=hashlib.sha256)
    expected_signature = "sha256=" + hash_object.hexdigest()
    if not hmac.compare_digest(expected_signature, signature_header):
        raise HTTPException(status_code=403, detail="Request signatures didn't match!")

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

ignore_users = [
    "wayneliu0019",
    # "HaiHui886",
    # "",
]

# model_name = "BAAI/bge-large-en-v1.5"
# model_kwargs = {'device': 'cuda'}
model_name = "/Users/hhwang/models/bge-large-zh-v1.5"
model_kwargs = {'device': 'cpu'}
encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity

chroma_path = "chroma_langchain_db"
collection_name = "rag-doc"
md_path = "/Users/hhwang/code/jihulab/opencsg/opencsg-docs/docs"

github_token = os.getenv("GITHUB_TOKEN", "")
deepseek_token = os.getenv("DEEPSEEK_TOKEN", "")

syste_prompt = """
## Role:
You are a professional documentation assistant. Please answer the user's question based on the retrieved content below.

## Constraints:
Answer the user's question directly based on the retrieved content.
If there is no relevant information in the content, clearly state that you cannot answer and do not fabricate information.
At the end, prompt the user that they can find more information at https://opencsg.com/docs and technical support will get involved.

## Output:
Please provide accurate and concise answers.
"""

ai_prompt = ChatPromptTemplate.from_template("Content:\n\n{context}\n\n")
user_prompt = ChatPromptTemplate.from_template("Question:\n\n{question}\n\n")

retriever: VectorStoreRetriever = None
dsllm: ChatDeepSeek = None

def create_llm():
    global dsllm
    print("Load llm ......")
    dsllm = ChatDeepSeek(
        model="deepseek-chat",
        temperature=0.5,
        max_tokens=4096,
        timeout=None,
        max_retries=2,
        streaming=False,
        api_key=deepseek_token,
    )
    print("Load llm done!")

def create_vector_store():
    global retriever
    print(f"Loading model {model_name} ......")
    embed_model = HuggingFaceEmbeddings(
        model_name=model_name,
        model_kwargs=model_kwargs,
        encode_kwargs=encode_kwargs,
    )
    print(f"Load model {model_name} done!")

    if os.path.exists(chroma_path):
        print(f"Load vector store from local {chroma_path} ......")
        vectorstore = Chroma(
            persist_directory=chroma_path,
            embedding_function=embed_model,
            collection_name=collection_name,
            client_settings=chromadb.Settings(anonymized_telemetry=False)
        )
        retriever = vectorstore.as_retriever(
            search_type="similarity",
        )
        print(f"Load vector store from local {chroma_path} done!")
        # return retriever
    else:
        exclude_dirs = [f"{md_path}/starship", f"{md_path}/autohub"]
        md_root_path = Path(md_path)
        md_file_objs = [
            file for file in md_root_path.rglob("*.md")
            if not any(str(file).startswith(exclude_dir) for exclude_dir in exclude_dirs)
        ]

        md_files = [str(file) for file in md_file_objs]

        print(f"Find {len(md_files)} markdown files in {md_path}")

        md_docs: List[Document] = []

        for md_file in md_files:
            print(f"Loading markdown file: {md_file}")
            loader = UnstructuredMarkdownLoader(md_file, remove_hyperlinks=True, remove_images=True)
            documents = loader.load()
            filtered_documents = filter_complex_metadata(documents)
            if filtered_documents:
                md_docs.extend(filtered_documents)

        print(f"Loaded {len(md_docs)} markdown files.")

        print("Creating vector store ......")    
        vectorstore = Chroma.from_documents(
            documents=md_docs,
            collection_name=collection_name,
            embedding=embed_model,
            persist_directory=chroma_path,
            client_settings=chromadb.Settings(anonymized_telemetry=False)
        )
        print("Vector store completed!")

        retriever = vectorstore.as_retriever(
            search_type="similarity",
        )
        # return retriever

def format_docs(docs):
    return "\n\n".join([f"document {i+1}:\n{doc.page_content}" for i, doc in enumerate(docs)])

@asynccontextmanager
async def lifespan(app: FastAPI):
    """Lifespan事件处理器，用于初始化和清理资源"""
    # 启动时初始化
    logger.info("正在初始化组件...")
    create_vector_store()
    create_llm()
    logger.info("所有组件初始化完成!")
    
    yield  # 这里应用运行
    
    # 关闭时清理资源（如果需要）
    logger.info("正在关闭应用...")
    # 可以在这里添加清理代码

app = FastAPI(
    title="GitHub Webhook webhook",
    description="Receive and handle GitHub webhook request",
    version="1.0.0",
    lifespan=lifespan
)

class GitHubWebhookPayload(BaseModel):
    payload: Dict[str, Any] = Field(default_factory=dict, description="GitHub webhook event data")
    
    class Config:
        extra = "allow"

@app.post("/webhook")
async def github_webhook(request: Request, x_github_event: Optional[str] = Header(None)):
    logger.info(f"Receive GitHub event type: {x_github_event}")
    
    body = await request.body()
    payload = json.loads(body)
    
    body = await request.body()
    verify_signature(body, "your_secret_token", request.headers.get("x-hub-signature-256"))

    action = payload.get('action')

    is_issue_open = False
    is_issue_comment_create = False
    if x_github_event == "issues" and action == "opened":
        is_issue_open = True
    if x_github_event == "issue_comment" and action == "created":
        is_issue_comment_create = True

    if not is_issue_open and not is_issue_comment_create:
        print(f"ignore {x_github_event} - {action} event")
        return JSONResponse(content={"status": "success", "message": f"ignore {x_github_event} - {action} event"})
    
    logger.info(json.dumps(payload, indent=2, ensure_ascii=False))
    # logger.info("GitHub webhook content:")
    # logger.info(f"action: {action}")
    issue_number = payload.get('issue').get('number')
    # logger.info(f"issue number: {issue_number}")
    # logger.info(f"issue title: {payload.get('issue').get('title')}")
    issue_body = ""
    issue_body = payload.get('issue').get('body')
    # logger.info(f"issue body: {issue_body}")
    issue_user = payload.get('issue').get('user').get('login')
    # logger.info(f"issue user: {issue_user}")

    comment_body = ""
    comment_user = ""
    comment_id = ""
    if payload.get('comment'):
        comment_id = payload.get('comment').get('id')
        comment_body = payload.get('comment').get('body')
        # logger.info(f"comment body: {comment_body}")
        comment_user = payload.get('comment').get('user').get('login')
        # logger.info(f"comment user: {comment_user}")
    
    repo_name = payload.get('repository').get('name')
    # full_name = payload.get('repository').get('full_name')
    repo_owner = payload.get('repository').get('owner').get('login')

    if is_issue_open and issue_user in ignore_users:
        return JSONResponse(content={"status": "success", "message": f"handle {x_github_event} - {action} event"})
    if is_issue_comment_create and comment_user in ignore_users:
        return JSONResponse(content={"status": "success", "message": f"handle {x_github_event} - {action} event"})

    # logger.info(f"repo full name: {repo_name}")
    # logger.info(f"repo owner login: {repo_owner}")
    handle_question(
        is_issue_open=is_issue_open,
        issue_body=issue_body,
        is_issue_comment_create=is_issue_comment_create,
        comment_body=comment_body,
        repo_name=repo_name,
        repo_owner=repo_owner,
        issue_number=issue_number,
        issue_user=issue_user,
        comment_user=comment_user,
        comment_id=comment_id,
    )
    
    return JSONResponse(content={"status": "success", "message": f"handle {x_github_event} - {action} event"})

def handle_question(is_issue_open: bool, 
                    issue_body: str, 
                    is_issue_comment_create: bool, 
                    comment_body: str,
                    repo_name: str,
                    repo_owner: str,
                    issue_number: str,
                    issue_user: str,
                    comment_user: str,
                    comment_id: str,
    ):
    if not is_issue_open and not is_issue_comment_create:
        return
    
    user_question = ""
    if is_issue_open:
        user_question = issue_body
    if is_issue_comment_create:
        user_question = comment_body

    #if not "auto_reply" in user_question:
    #    return
        
    if len(user_question) < 1:
        return
    
    logger.info(f"repo name: {repo_name}, repo owner: {repo_owner}, issue number: {issue_number}, issue_user: {issue_user}, comment_user: {comment_user}, comment_id: {comment_id}, user_question: {user_question}")

    result = get_answer_by_llm(user_question)
    answer(repo_owner=repo_owner, repo_name=repo_name, issue_number=issue_number, answer=result)
    # logger.info(f"append comment result: {res}")

def answer(repo_owner: str, repo_name: str, issue_number: str, answer: str):
    # https://docs.github.com/en/rest/issues/comments?apiVersion=2022-11-28#create-an-issue-comment
    url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/issues/{issue_number}/comments"
    headers = {
        "Accept": "application/vnd.github+json",
        "Authorization": f"Bearer {github_token}",
        "X-GitHub-Api-Version": "2022-11-28",
    }
    json_data = {"body": answer}
    resp = requests.post(url=url, headers=headers, json=json_data, verify=False)
    logger.info(f"commit comment response on {url}, response code: {resp.status_code}")
    # return resp.json()

def get_answer_by_llm(question: str):
    result_docs = retriever.invoke(question, k=3)
    context = format_docs(result_docs)
    system_message = SystemMessage(content=syste_prompt)
    ai_message = AIMessage(content=ai_prompt.format(context=context))
    user_message = HumanMessage(content=user_prompt.format(question=question))
    messages = [system_message, ai_message, user_message]
    response = dsllm.invoke(messages, config={"streaming": False})
    return response.content

@app.get("/")
async def root():
    return {"status": "online", "message": "GitHub Webhook is running"}

@app.get("/ask")
async def ask(question: str):
    print(f"question: {question}")
    result = await get_answer_by_llm(question)
    return {"status": "success", "message": result}

if __name__ == "__main__":
    uvicorn.run(
        "webhook:app",
        host="0.0.0.0",
        port=9090,
        reload=False
    )
    logger.info("server shutdown")

# python webhook.py

# curl -X POST http://localhost:8000/webhook -H "Content-Type: application/json" -H "X-GitHub-Event: push" -d '{"pusher": {"name": "test-user"}, "repository": {"name": "test-repo"}}'

# curl -X POST http://8.218.171.50:8000/webhook -H "Content-Type: application/json" -H "X-GitHub-Event: push" -d '{"pusher": {"name": "test-user"}, "repository": {"name": "test-repo"}}'

# 2025-10-10 18:21:17,897 - webhook - INFO - 收到GitHub事件: issues
# 2025-10-10 18:21:17,898 - webhook - INFO - GitHub webhook请求体内容:
# 2025-10-10 18:21:17,898 - webhook - INFO - action: opened
# 2025-10-10 18:21:17,898 - webhook - INFO - issue title: fourth issue
# 2025-10-10 18:21:17,898 - webhook - INFO - issue body: opencsg action 1
# 2025-10-10 18:21:17,898 - webhook - INFO - issue user: HaiHui886
# INFO:     140.82.115.124:55976 - "POST /webhook HTTP/1.1" 200 OK
# 2025-10-10 18:22:12,601 - webhook - INFO - 收到GitHub事件: issue_comment
# 2025-10-10 18:22:12,602 - webhook - INFO - GitHub webhook请求体内容:
# 2025-10-10 18:22:12,603 - webhook - INFO - action: created
# 2025-10-10 18:22:12,603 - webhook - INFO - issue title: fourth issue
# 2025-10-10 18:22:12,603 - webhook - INFO - issue body: opencsg action 1
# 2025-10-10 18:22:12,603 - webhook - INFO - issue user: HaiHui886
# 2025-10-10 18:22:12,603 - webhook - INFO - comment body: reply 4
# 2025-10-10 18:22:12,603 - webhook - INFO - comment user: HaiHui886

# curl -k -L -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer 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"  -H "X-GitHub-Api-Version: 2022-11-28"  https://api.github.com/repos/OpenCSGs/csghub-mcp-servers/issues/24/comments -d '{"body":"I will help on this"}'

# nohup python webhook.py > webhook.log 2>&1 &