from crawler.startup import *
from .cards import *
from datetime import datetime, timedelta

id = 4
Card_yzzcdh = Card(id=id, name='yzzcdh', cardname='研招网政策导航', url="https://yz.chsi.com.cn/kyzx/zcdh/")


class Yzzcdh(Base):
    __tablename__ = 'yzzcdh'
    title = Column(String(100), primary_key=True)
    url = Column(String(200))
    time = Column(DateTime, comment='创建时间')
    __mapper_args__ = {
        "order_by": time.desc()
    }


Table_yzzcdh = Table('yzzcdh', metadata,
              Column('title', String(100), primary_key=True),
              Column('url', String(200)),
              Column('time', DateTime))


def run():
    res = session.query(Card).filter_by(id=id).first()
    if res is None:
        session.add_all([Card_yzzcdh])
        metadata.create_all(engine)
    elif res.is_lock:
        print(res.is_lock)
        return
    elif res.updatetime and datetime.now() - res.updatetime < timedelta(hours=3):
        print('updatetime is so short')
        return
    driver.implicitly_wait(10)
    try:
        # 等待十秒加载不出来就会抛出异常，10秒内加载出来正常返回
        driver.get(Card_yzzcdh.url)
        data = driver.find_elements_by_xpath('/html/body/div[1]/div[2]/div[3]/div[1]/ul//a')
        time = driver.find_elements_by_xpath("/html/body/div[1]/div[2]/div[3]/div[1]/ul//span[@class='span-time']")
        for i in range(0, len(data)):
            session.merge(Yzzcdh(title=data[i].text, url=data[i].get_attribute('href'), time=time[i].text))
        # pass
    except Exception as e:
        print(e)
    # session.merge(model)
    session.merge(Card_yzzcdh)
