# _*_ coding: utf-8 _*_
# Licensed under the Apache License, Version 2.0 (the "License")
# functions for chat and ner
# Author: MiqroEra Shibo

from api_keys import openai_api_key, bing_api_key
import openai
import requests
import json
from customer_topics import topic_dic
import math
from bs4 import BeautifulSoup

# openai api
openai.api_key = openai_api_key
bing_api_key = bing_api_key


# chat
def chat_with_user(user_input):
    """
    The function is to chat with user and return response from GPT-3.5-turbo
    :param user_input:
    :return: reply
    """
    engine = "davinci"
    prompt = "Here is a ChatGPT plugin named Surfing, which may help you search the internet through ChatGPT. \
              The assistant is helpful, creative, clever, and very friendly.\n\nHuman: " + user_input + "\nAI:"
    max_tokens = 150
    temperature = 0.9
    frequency_penalty = 0.1
    presence_penalty = 0.6
    stop = "\n"

    # call the completion api
    response = openai.Completion.create(
        engine=engine,
        prompt=prompt,
        max_tokens=max_tokens,
        temperature=temperature,
        frequency_penalty=frequency_penalty,
        presence_penalty=presence_penalty,
        stop=stop
    )

    # return the response
    return response.choices[0].text


# NER function
def ner_user_input(user_input):
    """
    The function is to ner user's input and return the entity
    :param user_input:
    :return: the entity
    """
    # ner param setting
    engine = "curie"
    query = user_input

    response = openai.NER.create(
        engine=engine,
        query=query
    )

    # entity having
    entity = response["entities"]

    return entity


# Sort the entity by weight
# here is the entity weight formulation
# score = log(length)*frequency*relevance,
# where length is the length of entity,
# frequency is the frequency of entity
# relevance is the relevance between entity and topic

def score_and_sort_entities(user_input, entities):
    """
    The function is to score and sort the entities
    :param user_input:
    :param entities:
    :return: sorted_entities
    """
    engine = "davinci"
    query = user_input

    topics = topic_dic

    sorted_entities = []
    for entity in entities:
        entity_type = entity["type"]
        entity_text = entity["text"]
        entity_start = entity["start"]
        entity_end = entity["end"]
        length = len(entity_text)
        frequency = user_input.count(entity_text)
        relevance = 0

        for topic, keywords in topics.items():
            if entity_text.lower() in keywords:
                relevance += 1

            for keyword in keywords:
                if keyword.lower() in entity_text.lower():
                    relevance += 0.5

    score = math.log(length) * frequency * relevance
    sorted_entities.append((entity, score))
    sorted_entities.sort(key=lambda x: x[1], reverse=True)

    return sorted_entities


# use bing search api to search the internet
def bing_search_entities(sorted_entities):
    """
    The function is searching the internet by bing search api for entities
    :param sorted_entities:
    :return: web_urls
    """
    bing_url = "https://api.bing.microsoft.com/v7.0/search"
    bing_headers = {"Ocp-Apim-Subscription-Key": bing_api_key}
    web_urls = []
    # travel all entities and get the top 3
    for entity, score in sorted_entities[:3]:
        # get entities text
        entity_text = entity["text"]

        # setting bing search params
        bing_params = {"q": entity_text, "count": 1}

        bing_response = requests.get(bing_url, headers=bing_headers, params=bing_params)
        bing_data = bing_response.json()

        # get web url and append to web_urls
        web_url = bing_data["webPages"]["value"][0]["url"]
        web_urls.append(web_url)

    return web_urls


# get the web content and summarize it
def summarize_web_pages(web_urls):
    # setting openai summarize params
    engine = "davinci"
    max_tokens = 100

    summaries = []

    # travel web urls
    for web_url in web_urls:
        web_response = requests.get(web_url)
        web_html = web_response.text

        # parser the web html
        soup = BeautifulSoup(web_html, "html.parser")
        web_text = soup.get_text()

        # summarize the web text
        summary = openai.Summarize.create(
            engine=engine,
            max_tokens=max_tokens,
            query=web_text,
            output_prefix="Summary:"
        )
        summary = summary["answers"][0]["answer"]
        summaries.append(summary)

    return summaries
