#!/usr/bin/python
# -*- coding: UTF-8 -*-
from selenium import webdriver;
from selenium.common.exceptions import TimeoutException
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import re;
from pyquery import PyQuery as pq;
import pymongo;
from  config import *
browser=webdriver.PhantomJS(service_args=SERVICE_ARGS);
browser=webdriver.Chrome()
browser.set_window_size(1400,900)
wait=WebDriverWait(browser, 10);
client=pymongo.MongoClient(MONGO_URL);
db=client["lagou"];
from bs4 import BeautifulSoup
import time
import requests
import pandas as pd
import jieba.analyse
from io import StringIO


def search():
    try:
        browser.get("https://www.lagou.com/");
        # auto-id-3Db197BoZ94kQBeh
        # auto-id-idtJCuzK4TpBCzSD
        root = wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR, "#changeCityBox > p.checkTips > a"))
        )
        root.click()
        input = wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR, "#search_input"))
        );
        click = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, '#search_button')));
        input.send_keys("数据分析师");
        click.click()
        wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR, "#s_position_list > div.item_con_pager > div > span.pager_next"))
        )
        html=browser.page_source

        soup = BeautifulSoup(html, 'lxml')
        # s_position_list > div.item_con_pager > div
        # result=soup.find_all(class_="item_con_pager")[0]
        result=soup.select("#s_position_list div.item_con_pager div")[0]
        item=result.select("span:nth-of-type(5)")[0].string
        getProducts()
        return int(item)
    except TimeoutException:
        print("search timeout")
        return search();

#点击隐藏按钮
def clickHideButton(driver, element):
    driver.execute_script("arguments[0].click()", element);

def next_page(number):
    print("currentNumber",number)
    menu = browser.find_element_by_css_selector(".item_con_pager > div > span.pager_next")
    ActionChains(browser).move_to_element(menu).perform()

    try:
        click = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, '.item_con_pager > div > span.pager_next')));
        clickHideButton(browser, click)
        wait.until(
            EC.text_to_be_present_in_element(
                (By.CSS_SELECTOR, '#s_position_list > div.item_con_pager > div > span.pager_is_current'), str(number)
            )
        )
        getProducts()
    except TimeoutException:
        print("next page timeout");
        next_page(number);

def getProducts():
    # s_position_list > ul
    total = wait.until(
        EC.presence_of_element_located((By.CSS_SELECTOR, "#s_position_list .item_con_list"))
    )
    html=browser.page_source;
    soup = BeautifulSoup(html, 'lxml')
    items=soup.find_all('li',{'class':'con_list_item'})
    for item in items:
        try:
            job_detail_url=select_item(item,".p_top a",attrs="href")
            salary=select_item(item,".p_bot .money",text="")
            company_name=select_item(item,".company a",text="")
            result=select_item(item,".p_bot .money",extract="")
            job_require = select_item(item, ".p_bot .li_b_l", text="")
            industry_compay = select_item(item, ".company .industry", text="")
            logo_url = select_item(item, ".com_logo a img", attrs="src")
            Tags = select_item(item, ".list_item_bot .li_b_l", text="")
            welfare = select_item(item, ".list_item_bot .li_b_r", text="")
            city = select_item(item, ".p_top .add", text="")
            product={
                "job_detail":job_detail_url,
                "salary":salary,
                "city":city,
                 "company_name":company_name,
                "job_requrie":job_require,
                "indestry_company":industry_compay,
                "logo_url":logo_url,
                "tags":Tags,
                "welfare":welfare
            }
            print(product)
            saveToMongo(product)
        except Exception as e:
            print(e)
            continue

def select_item(item,css,**kwargs):
    try:
        my=item.select(css)
        result=None
        if my == None:
          return None
        if len(my)==0:
            return None
        dest=my[0]
        if dest== None:
            return None
        if "text" in kwargs.keys():
            result= dest.get_text().strip()
        if "attrs" in kwargs.keys():
            result =dest.attrs[kwargs.get("attrs")]
        if "extract" in kwargs.keys():
            dest.extract()
    except Exception as e:
        print(e)
        result=None
    return result






def saveToMongo(result):
    try:
        if db["jobs_data"].insert(result):
            print("save result success");
    except:
        print("save mongo error")


def getData():
    queryArgs = {}
    projectionFields = {'_id': True}  # 用字典指定
    result = db["april1"].find(queryArgs)
    return result


def prase_item(item):
    url=item["job_detail"]
    browser.get(url)
    html=browser.page_source
    soup = BeautifulSoup(html, 'lxml')
    result=soup.find_all('dd',{'class':'job_bt'})[0]
    all_ps=result.select("p")
    request_job=""
    for p in all_ps:
        de=p.string
        if de:
            request_job+=de
    item["job_des"]=request_job
    print(item)
    updateToMongo(item, url)


def updateToMongo(item, url):
    try:
        db["april1"].update({"job_detail": url}, {"$set": {"job_des":item["job_des"]}})
        print("update success")
    except Exception as e:
        print(e)
        print("update error")


def main():
    count=search()
    for page in range(2,count+1):
        next_page(page)
    # data=pd.DataFrame(list(getData()))
    # urls=list(data["job_des"])
    # result=StringIO()
    # for url in urls:
    #     if url== None or url =="":
    #         continue
    #     result.write(str(url))
    #
    #
    # my=jieba.analyse.extract_tags(result.getvalue(),topK=30)
    # print(my)






if __name__ == '__main__':
    main()