'''
Created on 2019-4-13

@author: mayn
'''
#coding=utf-8
import re
import pymysql
import urllib.request
import urllib.parse
from bs4 import  BeautifulSoup
from city import City
import random
from pip._vendor.pyparsing import Keyword


cityUitl = City()
class BossSpider(object):
    url = "https://www.zhipin.com/"
    def __init__(self,cityCode,keyWord,startPage,endPage,userAgent):
        self.cityCode = cityCode
        self.keyWord = keyWord
        self.startPage = startPage
        self.endPage = endPage
        self.userAgent = userAgent
    #根据page拼接指定的url,然后生成请求对象    
    def handle_request(self,page):
        data ={
            "query":self.keyWord,
            "page":page
        }
        nowUrl = self.url+'c'+self.cityCode+'/?'+urllib.parse.urlencode(data)
        headers ={"User-Agent": self.userAgent[0]}
        request = urllib.request.Request(url=nowUrl,headers=headers)
        return request
    #解析内容函数
    def parse_content(self,content):
        soup = BeautifulSoup(content,"lxml")
        li_list = soup.select('.job-list > ul > li')
        for li in li_list:
            #职位Id
            jobsIdIofo = li.select('.info-primary > h3 > a')
            pat = re.compile(r'data-jobid="(.+?)"')
            jobId = re.findall(pat,str(jobsIdIofo))[0]
            #职位
            jobs = li.select('.info-primary > h3 > a > .job-title')[0].text
            #薪资
            momey = li.select('.info-primary > h3 > a > .red')[0].text
            info = li.select('.info-primary > p')
            #地址正则表达
            p_release_address = re.compile(r"<p>(.+?)<em class=\"vline\">")
            address = re.findall(p_release_address,str(info))[0]
            #工作年限和学历正则
            p = re.compile(r"</em>(.+?)<em class=\"vline\"></em>(.+?)</p>")
            experience = re.findall(p,str(info))[0][0]
            education = re.findall(p,str(info))[0][1]
            #公司名称
            company  = li.select('.info-company > .company-text > h3 > a')[0].text
            companyInfo = li.select(".info-company > .company-text > p")
            #服务类型 
            service =  re.findall(p_release_address,str(companyInfo))[0]
            
            #公司融资阶段
            lenInfo = len(re.findall(p,str(companyInfo)))
            
            if lenInfo ==0:
                financingStages = ""
                companySize = re.findall(r"<em class=\"vline\"></em>(.+?)</p>",str(companyInfo))[0]
            else:
                financingStages = re.findall(p,str(companyInfo))[0][0]
                companySize = re.findall(p,str(companyInfo))[0][1]
                
            #公司规模
            #job = {}
            #jobId, jobs, momey, address, experience, education, company, service, financingStages, companySize
            #job['jobId'] = jobId
            #job['jobs'] = jobs
            #job['momey'] = momey
            #job['address'] = address
            #job['experience'] = experience
            #job['education'] = education
            #job['company'] = company
            #job['service'] = service
            #job['financingStages'] = financingStages
            #job['companySize'] = companySize
            #print(job)
            sql_selectJobId = "select jobId from bossJob WHERE jobId=%s"%(jobId)
            
            if self.selectJobId(sql_selectJobId) == 0:
                sql_insert  = "insert into bossJob(jobId,jobs,momey,address,experience,education,company,service,financingStages,companySize) value('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')"%(jobId, jobs, momey, address, experience, education, company, service, financingStages, companySize)
                self.insert(sql_insert)
            else:
                print("数据已经存在！！！")
            #print("职位Id:%s;职位:%s;薪资：%s;工作地址:%s;工作年限：%s;学历：%s;公司名称：%s;服务类型：%s;融资阶段:%s;公司规模：%s" %(jobId, jobs, momey, address, experience, education, company, service, financingStages, companySize))
    #链接mysql,插入数据
    def insert(self,object):
        db = pymysql.connect("localhost", "root", "123456", "boss", charset='utf8')
        cursor = db.cursor()
        data=cursor.execute(object)  
        db.commit()
        if data==1:
            print("数据插入成功！！！！")
        else:
            print("数据插入失败！！！！")
        db.close()
    def selectJobId(self,sql_selectJobId):
        db = pymysql.connect("localhost", "root", "123456", "boss", charset='utf8')
        cursor = db.cursor()
        data=cursor.execute(sql_selectJobId)
        db.close()
        return data;
    def run(self):
        for page in range(self.startPage,self.endPage+1):
            request = self.handle_request(page)
            #发送请求，获取内容
            content = urllib.request.urlopen(request).read().decode()
            #解析内容
            self.parse_content(content)
    
def main():
   #地址
   wordAddress =["深圳","广州","佛山","肇庆","杭州","上海"]
   wordAddress = random.sample(wordAddress,1)
   cityCode =cityUitl.getCityCode(wordAddress[0])
   keyWord =["java","php","前端","javascript","python","c","c++"]
   #关键字
   keyWord  = random.sample(keyWord,1)
   #起始页
   startPage = 1
   #结束页
   endPage = 10
   userAgent =["Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1","Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36","Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5"]
   userAgent = random.sample(userAgent,1)
   
   #创建对象,启动爬取程序
   spider = BossSpider(cityCode,keyWord,startPage,endPage,userAgent)
   spider.run()    
    
if __name__ =="__main__":
    main()







