from wsgiref.simple_server import make_server
import json
import pickle
from py2neo import Graph
import urllib.request
import question_classifier
import model_process
import answer_search


test_graph = Graph(
    host="127.0.0.1",
    http_port=7474,
    user="neo4j",
    password="neo4j"
)


class api():
    def __init__(self, ip="localhost", port=8088):
        httpd = make_server(ip, port, app=self.test)
        httpd.serve_forever()

    def test(self, environ, start_response):

        def chatbot(environ1, start_response1):
            status = '200 OK'
            response_headers = [('Content-type', 'application/json'),
                                ('Access-Control-Allow-Origin', '*'),
                                ('Access-Control-Allow-Methods', 'POST'),
                                ('Access-Control-Allow-Headers', 'x-requested-with,content-type'),
                                ]
            start_response1(status, response_headers)
            content = environ1.get('QUERY_STRING')
            parts = content.split('&')
            question = urllib.request.unquote(parts[0])
            model = model_process.NaiveBayesModelMe()
            prediction = model.test(question, vocabulary)
            query = question_classifier.match_question(prediction, question)
            predict = answer_search.Predict(test_graph)
            answer = predict.run(query, prediction)
            return [json.dumps({"code": 200, "answer": answer}).encode()]

        return chatbot(environ, start_response)


if __name__ == '__main__':
    pkl_file = open("./newdict/vocabulary.pkl", "rb")
    vocabulary = pickle.load(pkl_file)
    handler = api()
