import engine1
import projectdb
'''
作者：陆永均
该模块是一个爬取数据的模块，是爬虫功能的入口，直接运行该模块，将会自动把爬取的数据全部写入到数据库
'''
i=0;
j=1
key=["","java","web","php","python","android"]
while(i<101):
    sp = engine1.Spider()
    url="https://www.liepin.com/zhaopin/?init=-1&headckid=43eb5ddb5ff7fe41&fromSearchBtn=2&ckid=43eb5ddb5ff7fe41&degradeFlag=0&key=%s&siTag=I-7rQ0e90mv8a37po7dV3Q~fA9rXquZc5IkJpXC-Ycixw&d_sfrom=search_industry&d_ckId=1b6687b6ed47d14a5a6b19fd83a7aa91&d_curPage=0&d_pageSize=40&d_headId=1b6687b6ed47d14a5a6b19fd83a7aa91&curPage=%s"%(key[j],str(i))
    t = sp.datareturn(url)
    for pay,region, job_name, address,company in zip(t[0], t[1],t[2], t[3], t[4]):

        db=projectdb.Projectdb()
        # print(address)
        if(db.check(address,j)==()):
            print(j)
            print(pay,region, job_name, address,company)
            db.insert_db(job_name,pay, company, address,j,region)

    #     print(job_name,pay, company, address,j,region)
    i=i+1
    if(i==100):
        i=0
        value=key[j]
        j = j + 1
        if(j>5):
            break
    else:
        pass

