#定期将拉钩网的数据下载并删除爬虫服务器的数据节省空间
import pymysql,time,datetime

#数据来源
class SourceModel(object):
    def __init__(self):
        #链接数据库
        self.conn=pymysql.connect(
            host='192.168.20.179',
            user='root',
            passwd='root',
            db='article_spider',
            charset='utf8',
            cursorclass=pymysql.cursors.DictCursor)
        self.cursor =self.conn.cursor()
    #获取数据
    def get(self,end_time):
        today = str(datetime.date.today())+ ' 00:00:00'
        # 根据时间区间来获取数据
        sql="""
            SELECT tags,crawl_time,crawl_update_time,min_salary,max_salary,title, url,url_object_id,job_city, work_years, degree_need,
            job_type, publish_time, job_advantage, job_desc, job_addr, company_url, company_name, job_id 
            FROM lagou_job 
            WHERE crawl_update_time > %s AND crawl_update_time < %s
            """
        self.cursor.execute(sql,(end_time,today))
        getData=self.cursor.fetchall()

        num = len(getData)
        if num <= 0:
            return 0
        print('查询%s到%s的数据，一共有%d条' % (end_time,today,num))
        # 将数据处理成列表元组格式
        getData=[tuple(i.values()) for i in getData]
        return getData

    def __del__(self):
        #结束数据库链接
        self.cursor.close()
        self.conn.close()

#要更新的数据库
class ObjectModel(object):
    def __init__(self):
        #链接数据库
        self.conn=pymysql.connect(host='127.0.0.1', user='root', passwd='root', db='article_spider', charset='utf8',cursorclass=pymysql.cursors.DictCursor)
        self.cursor =self.conn.cursor()
    def save(self,param):
        sql="""
            insert into lagou_job(tags,crawl_time,crawl_update_time,min_salary,max_salary,title, url,url_object_id,job_city, work_years, degree_need,
            job_type, publish_time, job_advantage, job_desc, job_addr, company_url, company_name, job_id)
            VALUES (%s,%s,%s,%s, %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            ON DUPLICATE 
            KEY UPDATE 
            tags=VALUES(tags),
            crawl_time=VALUES(crawl_time),
            crawl_update_time=VALUES(crawl_update_time),
            min_salary=VALUES(min_salary),
            max_salary=VALUES(max_salary),
            title=VALUES(title),
            url=VALUES(url),
            url_object_id=VALUES(url_object_id),
            job_city=VALUES(job_city),
            work_years=VALUES(work_years),
            degree_need=VALUES(degree_need),
            job_type=VALUES(job_type),
            publish_time=VALUES(publish_time),
            job_advantage=VALUES(job_advantage),
            job_desc=VALUES(job_desc),
            job_addr=VALUES(job_addr),
            company_url=VALUES(company_url),
            company_name=VALUES(company_name),
            job_id=VALUES(job_id)
        """
        self.cursor.executemany(sql,param)
        self.conn.commit()
    def __del__(self):
        #结束数据库链接
        self.cursor.close()
        self.conn.close()

#询问提示框
class InputModel(object):
    def set_time(self):
        num=input('同步过去几天的数据：')
        today = datetime.date.today()
        yesterday = str(today - datetime.timedelta(days=int(num))) + ' 00:00:00'
        return yesterday
    def set_confirm(self):
        is_ok=input('是否同步数据（Y/N）：').upper()
        if is_ok=="Y":
            return 1
        else:
            return 0

if __name__=='__main__':

    #询问参数
    input_model=InputModel()
    end_time=input_model.set_time()

    #获取数据
    source_model = SourceModel()
    data = source_model.get(end_time)
    if data==0:
        print('没有任何数据----退出！')
        exit()

    #同步数据
    if input_model.set_confirm()==0:
        print('退出！')
        exit()
    object_model = ObjectModel()
    page = 1#数据切片初始分页
    limit = 100#数据切片每页的是数据量
    while True:
        to=limit*page
        offset=to-limit
        param=data[offset:to] # 选取一定量的数据防止一次插入过多导致数据库压力过大
        print('更新了%d条数据' % to)
        if len(param)==0:
            exit()
        object_model.save(param)
        page = page + 1
        time.sleep(2)
    exit()



