#!/usr/local/bin/python2.7
# -*- coding: utf-8 -*-
import requests
import json
import MySQLdb as mydb
import time 
# 伪装头部信息
# ajax请求地址
url = 'https://www.lagou.com/jobs/positionAjax.json?city=%s&needAddtionalResult=false&isSchoolJob=0'
# 需要获取的技能列表
jobs = ['php', 'java', 'python']
city = ['杭州', '上海', '温州']
db = mydb.connect("127.0.0.1","root","123456","spider",charset='utf8')
cursor = db.cursor()
cursor.execute('SET CHARACTER SET utf8;')
class Spider:

    def __init__(self, url, kd):
        self.url = url
        self.keyword = kd
        self.setPage(1)

    def setKeyWord(self, kd):
        self.keyword = kd

    def setPage(self, pn):
        self.page = pn

    def setUrl(self, url):
        self.url = url

    def nextPage(self):
        self.page = self.page + 1

    def hasData(self, data):
        #print data
        if data['success'] == False:
            print 'request faild'
            return False
        return len(data['content']['positionResult']['result']) > 0

    def saveData(self, data):
        result = data['content']['positionResult']['result']
        str = 'insert into jobs('
        str +='positionName, '
        str +='workYear, '
        str +='education, '
        str +='companyShortName, '
        str +='positionId, '
        str +='companyId,'
        str +='createTime,'
        str +='city,salary,positionAdvantage,'
        str +='financeStage,industryField,companySize,'
        str +='companyLabelList,publisherId,district,'
        str +='positionLables,industryLables,businessZones,'
        str +='companyFullName,firstType,secondType,companyLogo,jobType'
        str +=') values ('
        str +='%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);'
        
        for obj in result:
            if self.checkExists(obj['positionId']):
                continue
            obj['businessZones'] = self.parseArr(obj['businessZones'])
            obj['industryField'] = self.parseArr(obj['industryField'])
            obj['positionLables'] = self.parseArr(obj['positionLables'])
            obj['companyLabelList'] = self.parseArr(obj['companyLabelList'])
            obj['industryLables'] = self.parseArr(obj['industryLables'])
            save_data = (obj['positionName'],
                         obj['workYear'],obj['education'],
                         obj['companyShortName'],obj['positionId'],
                         obj['companyId'],obj['createTime'],obj['city'],
                         obj['salary'],obj['positionAdvantage'],
                         obj['financeStage'],obj['industryField'],
                         obj['companySize'],obj['companyLabelList'],
                         obj['publisherId'],obj['district'],
                         obj['positionLables'],
                         obj['industryLables'],
                         obj['businessZones'],
                         obj['companyFullName'],obj['firstType'],
                         obj['secondType'],obj['companyLogo'], self.keyword)
                         
            cursor.execute(str, save_data)
            print cursor.lastrowid

    def checkExists(self, _id):
        cursor.execute('SELECT COUNT(1),1 FROM jobs WHERE positionId=%s',(_id,))
        row = cursor.fetchone()
        if row[0] == 0:
            return False
        print 'data exists'
        return True

    def parseArr(self, arr):
        if isinstance(arr, (list)):
            return ','.join(arr)
        return ''

    def getData(self):
        header = {'User-Agent':'MMozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.101 Safari/537.36', 'Referer': 'https://www.lagou.com/jobs/list_PHP'}
        post = {'kd': self.keyword, 'pn': self.page}
        print post
        res = requests.post(self.url, headers=header, data=post)
        data = json.loads(res.text)
        return data
    
    def run(self):
        flag = True
        while flag:
            data = self.getData()
            if self.hasData(data) :
                self.saveData(data)
                self.nextPage()
            else:
                db.commit()
                print('数据'+self.keyword+'爬取完毕！')
                flag = False

u = url%('温州')
spider = Spider(u, 'python')
spider.run()
'''
u = url%('杭州')
spider = Spider(u, 'python')
spider.run()
spider.setKeyWord('java')
spider.setPage(1)
spider.run()
spider.setKeyWord('python')
spider.setPage(1)
spider.run()
exit()
'''
'''
for c in city:
    u = url%(c)
    for j in jobs:
        spider = Spider(u, j)
        spider.run()
        #time.sleep(2)
'''
'''
spider = Spider('', '')
for c in city:
    u = url%(c)
    spider.setUrl(u)
    for j in jobs:
        spider.setKeyWord('python')
        spider.setPage(1)
        spider.run()
        #time.sleep(2)
'''
cursor.close()
db.close()
