#用flask构建服务
import os
from llama_index.core import (
    SimpleDirectoryReader,
    VectorStoreIndex,
    StorageContext,
    load_index_from_storage,
)
from flask import request, Flask
from llama_index.embeddings.openai import OpenAIEmbedding

from llamaIndex.CommonClient import llm

app = Flask(__name__)
index = None


def initialize_index():
    global index
    index_dir = "./.index"
    storage_context = StorageContext.from_defaults(persist_dir=index_dir)

    openAIEmbeddings = OpenAIEmbedding(api_key="sk-CftUbVSsA61lwwgMz9xvt6znTunQZfgBP8ZCVLbQsKfXUR6k",
                                       model='text-embedding-3-small',
                                       api_base="https://www.henapi.top/v1")
    if os.path.exists(index_dir):
        index = load_index_from_storage(storage_context,embed_model=openAIEmbeddings)
    else:
        documents = SimpleDirectoryReader("./data").load_data()
        index = VectorStoreIndex.from_documents(
            documents, storage_context=storage_context,embed_model=openAIEmbeddings
        )
        storage_context.persist(index_dir)


@app.route("/query", methods=["GET"])
def query_index():
    global index
    query_text = request.args.get("text", None)
    if query_text is None:
        return (
            "No text found, please include a ?text=blah parameter in the URL",
            400,
        )
    query_engine = index.as_query_engine(llm=llm)
    response = query_engine.query(query_text)
    return str(response), 200


@app.route("/")
def home():
    return "Hello World!"
if __name__ == "__main__":
    print("initializing index...")
    initialize_index()
    app.run(host="0.0.0.0", port=5601)


# 访问  http://localhost:5601/query?text=What%20was%20Nike%27s%20revenue%20in%202023?
