import json
import sys
import traceback
import redis as redis
import time
import requests
from lxml.etree import HTML
import pymysql
import datetime
from urllib3 import disable_warnings

disable_warnings()

header = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36"
}


def get_proxy():
    ip = requests.get(proxy_url).text.strip()
    # ip = requests.get(proxy_url).json()["proxy"]
    return {
        "http": "http://" + ip,
        "https": "http://" + ip
    }
    # return{}


def init():
    ##获取配置
    config_path = sys.argv[1]
    config_data = json.load(open(config_path, 'r', encoding="utf-8"))

    ##初始化mysql
    mysql_config = config_data["mysql"]
    # 连接database
    conn = pymysql.connect(host=mysql_config["host"], port=mysql_config["port"], user=mysql_config["user"],
                           password=mysql_config["pass"], database=mysql_config["database"])
    # 得到一个可以执行SQL语句并且将结果作为字典返回的游标
    cursor = conn.cursor(cursor=pymysql.cursors.DictCursor)

    ##初始化redis
    redis_config = config_data["redis"]
    r = redis.Redis(host=redis_config["host"], port=redis_config["port"], db=redis_config["database"],
                    password=redis_config["pass"], decode_responses=True)
    return cursor, conn, r, config_data["proxy"]["url"],r.get("dbcl2")
    # return None, None, r


def parse(text):
    html = HTML(text)
    tr_list = html.xpath('//table/tr[position()>1]')
    for tr in tr_list:
        td_list = tr.xpath('./td')
        ##依次是 帖子标题 帖子id 作者名字 作者id 更新时间
        yield td_list[0].xpath("./a/@title")[0], td_list[0].xpath("./a/@href")[0].split("/")[-2], \
              td_list[1].xpath("./a/text()")[0], td_list[1].xpath("./a/@href")[0].split("/")[-2], "{}-{}:00".format(
            now_year, td_list[3].xpath("./text()")[0])


def do_request():
    source_url_list = redis_cli.lrange("source_urls", 0, 999)
    while True:
        proxy = get_proxy()
        print("获取代理成功----", proxy)
        proxy = {}
        try:
            response = requests.get(source_url_list[0].split("-")[0],params={"start": 0}, headers=header, proxies=proxy,verify=False,timeout = 2)
        except Exception:
            continue
        else:
            if "更多小组讨论" in response.text and response.status_code == 200:
                break
        finally:
            time.sleep(1)

    for source_url in source_url_list:
        break_current_url = False
        for page in range(0, 5):
            params = {"start": page * 25}
            try:
                city_code = city_map[source_url.split("-")[1]]
                real_url = source_url.split("-")[0]
                response = requests.get(real_url, params=params, headers=header, proxies=proxy, verify=False)
            except Exception:
                print("请求{}失败,错误原因{}".format("{}?start={}".format(source_url, page * 25), traceback.format_exc()))
                time.sleep(1)
                continue
            print("请求{}成功".format("{}?start={}".format(real_url, page * 25)))
            for per_line in parse(response.text):
                if now_time_stamp - int(
                        datetime.datetime.strptime(per_line[4], "%Y-%m-%d %H:%M:%S").timestamp()) > max_spend_second:
                    break_current_url = True
                    break
                data_wash(per_line, city_code)
            if break_current_url:
                break


def data_wash(data, city_code):
    if redis_cli.hget("topic_filter", data[1]):
        print("{}丢弃,原因:帖子已采集过".format(data))
        return
    elif redis_cli.get("z_normal_author_filter_{}".format(data[3])):
        print("{}丢弃,原因:近段时间采集过的作者".format(data))
        return
    elif redis_cli.hget("bad_author_filter", data[3]):
        print("{}丢弃,原因:狗中介".format(data))
        return
    elif "求" in data[0]:
        print("{}丢弃,原因:疑似求租".format(data))
        return
    rent_type = 0
    house_type = 0
    if "套一" in data[0] or "套1" in data[0] or "一室" in data[0] or "1室" in data[0]:
        house_type = 1
        rent_type = 1
    elif "套二" in data[0] or "套2" in data[0] or "二室" in data[0] or "两室" in data[0] or "2室" in data[0]:
        house_type = 2
        rent_type = 1
    elif "套三" in data[0] or "套3" in data[0] or "三室" in data[0] or "3室" in data[0]:
        house_type = 3
        rent_type = 1
    elif "套四" in data[0] or "套4" in data[0] or "四室" in data[0] or "4室" in data[0]:
        house_type = 4
        rent_type = 1

    if "合租" in data[0] or "单间" in data[0] or "主卧" in data[0] or "次卧" in data[0]:
        rent_type = 2
    elif "整租" in data[0]:
        rent_type = 1

    if rent_type == 0 or house_type == 0:
        print("{}丢弃,原因:获取不到具体房屋信息".format(data))
        return
    data_store(data[1], data[3], rent_type, house_type, city_code)


def data_store(topic_id, author_id, rent_type, house_type, city_code):
    try:
        mysql_cursor.execute('insert into topic values (0,%s,%s,%s,%s)',
                             (topic_id, str(rent_type), str(house_type), str(city_code)))
    except Exception:
        print(topic_id, author_id, rent_type, house_type)
        print(rent_type, house_type, type(rent_type), type(house_type))
        traceback.print_exc()
        mysql_conn.rollback()
    else:
        mysql_conn.commit()
        redis_cli.setex("z_normal_author_filter_{}".format(author_id), normal_author_filter_expire_time, "1")
        redis_cli.hset("topic_filter",topic_id,"1")


if __name__ == '__main__':
    city_map = {
        "成都": 1,
        "宁波":2
    }
    now_time = datetime.datetime.now()
    now_time_stamp = int(now_time.timestamp())
    now_year = now_time.year
    max_spend_second = 21600
    normal_author_filter_expire_time = 604800  ##7天 7天之内再碰到这个人的帖子就忽略
    mysql_cursor, mysql_conn, redis_cli, proxy_url,dbcl2 = init()
    header["Cookie"] = "dbcl2={}".format(dbcl2)
    try:
        do_request()
    except Exception:
        traceback.print_exc()
    else:
        mysql_cursor.execute('select id from topic order by id desc limit 1')
        result = mysql_cursor.fetchone()
        if result:
            print("爬取完毕,此时数据库最新id为{}".format(result["id"]))
            redis_cli.set("nowTopicId", result["id"])
        else:
            print("获取数据库最新id失败！！！")
    finally:
        mysql_cursor.close()
        mysql_conn.close()
        redis_cli.close()
