import requests
import json
import re
from requests.auth import HTTPBasicAuth
import json
import time
import sys,os

from elasticsearch import Elasticsearch
from elasticsearch import helpers

from loguru import logger
from datetime import datetime,timedelta

import sys,os

from multiprocessing import Pool
from multiprocessing import Manager
import multiprocessing
from loguru import logger


class UnAuthorizationException(Exception):
    pass

REQUEST_TIMEOUT = 60
settings = {
    "AI_API_URL_FOR_DATASETS":"http://192.168.1.5:38080/v1/datasets",
    "AI_API_KEY_FOR_DATASETS":"dataset-k5ASe6iCpmHKRTT2lzJGQLNd",
    "AI_API_URL_FOR_DOC_METADATA":"http://192.168.1.5:38080/console/api/datasets",
    "AI_API_USER_FOR_DOC_METADATA":"pixeli@qq.com",
    "AI_API_PASSWORD_FOR_DOC_METADATA":"vPn**123456",
    "AI_API_URL_FOR_LOGIN":"http://192.168.1.5:38080/console/api/login",
}

def get_api_key():
    api_url = settings['AI_API_URL_FOR_DOC_METADATA']
    api_user = settings['AI_API_USER_FOR_DOC_METADATA']
    api_password = settings['AI_API_PASSWORD_FOR_DOC_METADATA']
    api_login = settings['AI_API_URL_FOR_LOGIN']
    headers = {
        'Authorization': f'Bearer',
        'Content-Type': 'application/json',
    }
    data = {
        "email": f'{api_user}',
        "password":f'{api_password}'
    }
    # Send POST request
    logger.trace('start post for {}'.format(api_login))
    start_post = time.time()
    response = requests.post(api_login, headers=headers, data=json.dumps(data),timeout=REQUEST_TIMEOUT)
    logger.trace('end post for {}'.format(api_login))
    end_post = time.time()
    response.raise_for_status()  # Check if request was successful
    access_token = response.json().get('data').get('access_token')
    api_key = access_token
    return api_key

def create_datasets(name):
    api_url = settings['AI_API_URL_FOR_DATASETS']
    api_key = settings['AI_API_KEY_FOR_DATASETS']

    # Request headers
    headers = {
        'Authorization': f'Bearer {api_key}',
        'Content-Type': 'application/json',
    }
    # Request payload
    data = {
        "name": f'{name}',
        "permission": "all_team_members",#"only_me",
    }
    to_result = requests.post(api_url, data=json.dumps(data), headers=headers)
    return to_result

def list_datasets():
    api_url = settings['AI_API_URL_FOR_DATASETS']
    api_key = settings['AI_API_KEY_FOR_DATASETS']

    # Request headers
    headers = {
        'Authorization': f'Bearer {api_key}',
        'Content-Type': 'application/json',
    }
    # Request payload
    data = {
        "name": "text",
        "text": "blocking",
        "indexing_technique":"high_quality",
        "process_rule": {"mode": "automatic"}
    }

    # Send request
    request_url = api_url+"?limit=20&page=1"

    data = {
    }
    response = requests.get(request_url, headers=headers, data=json.dumps(data),timeout=50)
    response.raise_for_status()  # Check if request was successful
    result  = response.json().get('data')
    return result

def update_datasets(datasets_id,permission="all_team_members",model_type=None,api_key=None):
    api_url = settings['AI_API_URL_FOR_DOC_METADATA']
    # Request headers
    headers = {
        'Authorization': f'Bearer {api_key}',
        'Content-Type': 'application/json',
    }
    # Request payload
    if model_type=="local":
        data = {
            "permission":permission,
            "indexing_technique": "high_quality",
            "embedding_model": "bge-m3:latest", #quentinz/bge-large-zh-v1.5:latest",#bge-m3:latest",
            "embedding_model_provider": "ollama",
            "embedding_available": True,
            'retrieval_model_dict': {'search_method': 'semantic_search', 'reranking_enable': False},
        }
    else:
        data = {
            "permission":permission,
            "indexing_technique": "high_quality",
        }

    request_url = api_url+"/"+f'{datasets_id}'
    response = requests.patch(request_url, headers=headers, data=json.dumps(data),timeout=50)
    response.raise_for_status()  # Check if request was successful
    result  = response.json().get('data')
    return result


def create_datasets_doc(dataset_id,catelogy,doc,only_name=False):

    api_url = settings['AI_API_URL_FOR_DATASETS']
    api_key = settings['AI_API_KEY_FOR_DATASETS']

    # Request headers
    headers = {
        'Authorization': f'Bearer {api_key}',
        'Content-Type': 'application/json',
    }

    id_ =None
    get_=None
    if only_name==False:
        for i in ID_FIELD[catelogy]:
            get_ = doc.get(i,None)
            if get_:
                break
        id_ = get_

    get_=None
    for i in NAME_FIELD[catelogy]:
        get_ = doc.get(i,None)
        if get_:
            break
    name_ = get_
    if id_:
        name_=name_+"_编号_"+str(id_)
    # Request payload
    data = {
        "name": name_,
        "text": doc.get('abstract',doc.get('中文摘要',""))+"\n"+doc.get('full_text',doc.get('正文',"")),
        "indexing_technique":"high_quality",
        #"process_rule": {"mode": "automatic"},
        "process_rule":{
            "rules":{
                "pre_processing_rules":[{"id":"remove_extra_spaces","enabled":True},{"id":"remove_urls_emails","enabled":False}],
                "segmentation":{"separator":"@@@","max_tokens":1000,"chunk_overlap":100},
                "parent_mode":"full-doc",
                "subchunk_segmentation":{"separator":"@@@","max_tokens":1000,"chunk_overlap":100}
            },
            "mode":"hierarchical"
        },
        "doc_form":"hierarchical_model",
        "doc_language":"Chinese"
    }
    error = False
    request_url = api_url+f'/{dataset_id}'+"/document/create_by_text"
    try:
        # Send POST request
        logger.trace('start post for {}'.format(request_url))
        start_post = time.time()
        response = requests.post(request_url, headers=headers, data=json.dumps(data),timeout=REQUEST_TIMEOUT)
        end_post = time.time()
        post_cost = 1000*(end_post-start_post)
        logger.trace('end post for {},post cost {}ms'.format(request_url,post_cost))
        response.raise_for_status()  # Check if request was successful
        json_res = response.json()
        return json_res,error
    except requests.exceptions.RequestException as e:
        error = True
        logger.error("Exception {}, for name_ {}".format(e,name_))
        return None,error
    except json.JSONDecodeError as e:
        error = True
        logger.error("Exception {}, for name_ {}".format(e,name_))
        return None,error

def update_datasets_doc(dataset_id,doc_id,catelogy,doc,only_name=False):

    api_url = settings['AI_API_URL_FOR_DATASETS']
    api_key = settings['AI_API_KEY_FOR_DATASETS']

    # Request headers
    headers = {
        'Authorization': f'Bearer {api_key}',
        'Content-Type': 'application/json',
    }
    id_ = None
    get_=None
    if only_name==False:
        for i in ID_FIELD[catelogy]:
            get_ = doc.get(i,None)
            if get_:
                break
        id_ = get_

    get_=None
    for i in NAME_FIELD[catelogy]:
        get_ = doc.get(i,None)
        if get_:
            break
    name_ = get_

    if id_:
        name_= name_+"_编号_"+str(id_)
    # Request payload
    data = {
        "name": name_,
        "text": doc.get('abstract',doc.get('中文摘要',""))+"\n"+doc.get('full_text',doc.get('正文',"")),
        "indexing_technique":"high_quality",
        "process_rule": {"mode": "automatic"}
    }
    error = False
    request_url = api_url+f'/{dataset_id}'+"/document/"+f'{doc_id}'+"update-by-text"
    try:
        # Send POST request
        logger.trace('start post for {}'.format(request_url))
        start_post = time.time()
        response = requests.post(request_url, headers=headers, data=json.dumps(data),timeout=REQUEST_TIMEOUT)
        end_post = time.time()
        post_cost = 1000*(end_post-start_post)
        logger.trace('end post for {},post cost {}ms'.format(request_url,post_cost))
        response.raise_for_status()  # Check if request was successful
        json_res = response.json()
        return json_res,error
    except requests.exceptions.RequestException as e:
        error = True
        logger.error(f"Request error: {e}")
        return None,error
    except json.JSONDecodeError as e:
        error = True
        logger.error(f"JSON parse error: {e}")
        return None,error
