# encoding: utf-8
# @author: wangxin
# @software: garner
# @file: MyGerapy.py
# @time: 2025/4/2 15:52
import os
import sqlite3
from urllib.parse import urlparse
from flask_sqlalchemy import SQLAlchemy
from flask import Flask, request, render_template, jsonify, session, redirect, url_for
import threading
import time
from queue import Queue
from lxml import etree
import requests
app=Flask(__name__)
app.secret_key = os.environ.get('FLASK_SECRET_KEY', 'a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6')
def is_valid_url(url):
    try:
        result = urlparse(url)
        return all([result.scheme in ['http', 'https'], result.netloc])
    except:
        return False
@app.route('/',methods=['GET','POST'])
def index():
    if request.method=='POST':
        if not is_valid_url(request.form['url']):
            return render_template('home.html', error='无效的URL')
        # 存储表单数据到session
        session['url'] = request.form['url']
        session['cookie'] = request.form['cookie']
        session['user_agent'] = request.form['user_agent']
        session['proxy_ip'] = request.form['proxy_ip']
        session['xpath'] = request.form['Xpath']
        session['referer'] = request.form['referer']

        return redirect(url_for('spider'))

    return render_template('home.html')
@app.route('/spider',methods=['GET','POST'])
def spider():
    results = None
    error = None
    if request.method=='POST':
        # 从session获取参数
        url = session.get('url')
        cookies = session.get('cookie')
        user_agent = session.get('user_agent')
        proxy_ip = session.get('proxy_ip')
        xpath_expr = session.get('xpath')
        if not xpath_expr.endswith('/text()'):
            xpath_expr += '/text()'
        referer = session.get('referer')

        # 构建请求头
        headers = {
            'User-Agent': user_agent,
            'Cookie': cookies,
            'Referer': referer
        }
        try:
            # 代理配置
            proxies = {}
            if proxy_ip:
                # 自动补全协议头（假设是HTTP代理）
                if not proxy_ip.startswith(('http://', 'https://')):
                    proxy_ip = f'http://{proxy_ip}'
                proxies = {
                    'http': proxy_ip,
                    'https': proxy_ip
                }

            # 发送请求（添加超时设置）
            response = requests.get(
                url,
                headers=headers,
                proxies=proxies,
                timeout=10  # 设置合理超时时间
            )
            res=response.raise_for_status()  # 检查HTTP状态码
            # XPath解析
            html = etree.HTML(response.text)
            results=html.xpath(xpath_expr)
            print(results)
            return render_template('spider.html', results=results)

        except requests.exceptions.RequestException as e:
            return render_template('error.html',
                                   error=f"请求失败: {str(e)}")
        except etree.XPathError as e:
            return render_template('error.html',
                                   error=f"XPath解析错误: {str(e)}")
        except Exception as e:
            return render_template('error.html',
                                   error=f"未知错误: {str(e)}")

    return render_template('spider.html')
@app.route('/home')
def home():
    return render_template('index.html')
@app.route('/movie')
def movie():
    datalist=[]
    con =sqlite3.connect('movie.db')
    cur = con.cursor()
    sql="select * from movie250"
    data=cur.execute(sql)
    for item in data:
        datalist.append(item)
    cur.close()
    con.close()
    return render_template('movie.html')
@app.route('/score')
def score():
    score=[]
    num=[]
    con = sqlite3.connect('movie.db')
    cur = con.cursor()
    sql = "select score,count(score) from movie250 group by score"
    data = cur.execute(sql)
    for item in data:
        score.append(item[0])
        num.append(item[1])
    cur.close()
    con.close()

    return render_template('score.html')

@app.route('/word')
def word():
    return render_template('word.html')





if __name__ == '__main__':
    app.run()