import requests
from bs4 import BeautifulSoup
from sqlalchemy import create_engine, Column, Integer, String
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
import cityData

# 连接MySQL数据库
# 配置数据库地址：数据库类型+数据库驱动名称://用户名:密码@机器地址:端口号/数据库名
engine = create_engine("mysql+pymysql://root:123456@localhost:3306/python_course_design", encoding='utf-8')
# 把当前的引擎绑定给这个会话；
# autocommit：是否自动提交 autoflush：是否自动刷新并加载数据库 bind：绑定数据库引擎
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# 实例化
session = Session()
# declarative_base类维持了一个从类到表的关系，通常一个应用使用一个Base实例，所有实体类都应该继承此类对象
Base = declarative_base()


# 创建数据库模型（定义表结构:表名称，字段名称以及字段类型）
class CityData(Base):
    # 定义表名
    __tablename__ = 'city_data'
    # 定义字段名称
    # primary_key=True 设置为主键
    id = Column(Integer, primary_key=True, autoincrement=True)
    city_name = Column(String(25))
    city_id = Column(Integer)

    # 构造函数
    def __init__(self, cityId, cityName):
        self.city_id = cityId
        self.city_name = cityName


class ScreeningData(Base):
    __tablename__ = 'screening_data'
    # 定义字段名称
    # primary_key=True 设置为主键
    id = Column(Integer, primary_key=True, autoincrement=True)
    city_name = Column(String(25))
    city_id = Column(Integer)

    # 构造函数
    def __init__(self, cityId, cityName):
        self.city_id = cityId
        self.city_name = cityName


# 在数据库中生成表
Base.metadata.create_all(bind=engine)


def _crawlInformation():
    # 爬取全国城市名和城市ID
    # 爬取网页的网址
    url = "https://blog.csdn.net/WXB_gege/article/details/106853189"
    # 请求头
    headers = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 "
                      "Safari/537.36",
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,"
                  "*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "Accept-Encoding": "gzip, deflate, br",
        "Accept-Language": "zh-CN,zh;q=0.9"
    }
    # 发送请求
    response = requests.get(url=url, headers=headers, timeout=30)
    response.encoding = "utf-8"
    # 获取html超文本
    html = response.text
    # 使用bs解析html文本
    bs = BeautifulSoup(html, "html.parser")
    body = bs.body
    div = body.find("div", {"id": "content_views"})
    pAll = div.findAll('p')[2:]

    pStringDictTotal = {}
    # 获取城市名和ID
    for i in pAll:
        if i.string != " ":
            pString = i.string
            pStringList = pString.split(",")
            # 将城市名为键，城市ID为值写入pStringDictTotal字典
            pStringDictTotal[pStringList[1]] = pStringList[0]
            # 查找数据库中是否已经要该城市数据
            data = session.query(CityData).filter(CityData.city_id == int(pStringList[0])).all()
            # 如果没有则保存进去
            if not data:
                # 将城市名和城市ID存入数据库中
                cityData_ = CityData(cityName=pStringList[1], cityId=int(pStringList[0]))
                # 提交到数据库
                session.add(cityData_)
                session.commit()
    # 关闭session，避免资源浪费
    session.close()

    # print(len(pStringDictTotal))
    # 将输出的pStringDictTotal手动添加到cityData.py中去，并且命名为pStringDictTotal_, 为了美观，同时也为了提高下面函数的查询速度，避免每次查询都要爬取网页


# 根据城市名获取城市ID的函数1（通过存入的字典查询）
def queryCityIdByCityNameFromDict(cityName):
    try:
        cityId = cityData.pStringDictTotal_[cityName]
        return cityId
    except KeyError:
        return "查不到此地方！"


# 根据城市名获取城市ID的函数2（通过查询数据库查询）
def queryCityIdByCityNameFromMysql(cityName):
    try:
        cityId = session.query(CityData.city_id).filter(CityData.city_name == cityName).all()[0][0]
        return cityId
    except IndexError:
        return "查不到此地方！"


# 筛选出字典和数据库中相差的城市
def screening():
    dictAllCityId = list(cityData.pStringDictTotal_.values())
    cityId = session.query(CityData.city_id).filter(~CityData.city_id.in_(dictAllCityId)).all()
    cityName = session.query(CityData.city_name).filter(~CityData.city_id.in_(dictAllCityId)).all()
    cityNameList = []
    for i in cityName:
        cityName = i[0]
        cityNameList.append(cityName)
    cityIdList = []
    for j in cityId:
        cityId = j[0]
        cityIdList.append(cityId)
    screeningCityDict = dict(zip(cityNameList, cityIdList))
    for name, id in screeningCityDict.items():
        screeningCity = ScreeningData(cityId=id, cityName=name)
        session.add(screeningCity)
        session.commit()
    session.close()


if __name__ == '__main__':
    # cityId = queryCityIdByCityNameFromMysql("北京")
    # _crawlInformation()
    # cityId = queryCityIdByCityNameFromDict(cityName="长沙")
    # print(cityId)
    # print(cont)
    screening()
