from flask import Flask, request, jsonify
import fitz
from transformers import pipeline
import time
import os

app = Flask(__name__)

# 初始化摘要和问答pipeline
summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
qa_pipeline = pipeline("question-answering", model="distilbert-base-cased-distilled-squad")

# 性能日志存储
performance_metrics = []


@app.route('/upload', methods=['POST'])
def upload_pdf():
    start_time = time.time()
    if 'file' not in request.files:
        return "No file part in the request", 400

    file = request.files['file']
    if file.filename == '':
        return "No selected file", 400

    if file:
        file_path = os.path.join("uploads", file.filename)
        file.save(file_path)

        # 提取文本
        text = extract_text_from_pdf(file_path)

        # 生成摘要
        summary = generate_summary(text)

        # 记录性能指标
        end_time = time.time()
        log_performance_metrics("summary_generation", start_time, end_time)

        return jsonify({"summary": summary})


@app.route('/ask', methods=['POST'])
def ask_question():
    start_time = time.time()
    data = request.get_json()
    question = data['question']
    context = data['context']

    # 生成答案
    answer = qa_pipeline(question=question, context=context)

    # 记录性能指标
    end_time = time.time()
    log_performance_metrics("qa_interaction", start_time, end_time)

    return jsonify({"answer": answer})


def extract_text_from_pdf(file_path):
    doc = fitz.open(file_path)
    text = ""
    for page_num in range(doc.page_count):
        page = doc.load_page(page_num)
        text += page.get_text()
    return text


def generate_summary(text):
    # 分段处理长文本
    max_chunk_size = 512  # BART模型通常可以处理的最大输入长度（标记数）
    chunks = [text[i:i + max_chunk_size] for i in range(0, len(text), max_chunk_size)]

    summaries = []
    for chunk in chunks:
        summary = summarizer(chunk, max_length=150, min_length=30, do_sample=False)
        summaries.append(summary[0]['summary_text'])

    # 将所有段落的摘要合并
    combined_summary = " ".join(summaries)
    return combined_summary


def log_performance_metrics(task, start_time, end_time):
    latency = end_time - start_time
    performance_metrics.append({"task": task, "latency": latency})
    with open("performance.log", "a") as log_file:
        log_file.write(f"Task: {task}, Latency: {latency} seconds\n")


if __name__ == "__main__":
    if not os.path.exists("uploads"):
        os.makedirs("uploads")
    app.run(debug=True)
