#!/usr/bin/python
# -*- coding: UTF-8 -*-
from selenium import webdriver;
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import re;
from pyquery import PyQuery as pq;
from bs4 import BeautifulSoup
import pymongo;
from  config import *
# browser=webdriver.PhantomJS(service_args=SERVICE_ARGS);
browser=webdriver.Chrome()
browser.set_window_size(1400,900)
wait=WebDriverWait(browser, 10);
client=pymongo.MongoClient(MONGO_URL);
db=client[MONGO_DB];
from collections import Iterable

def search():
    try:
        browser.get("https://www.jd.com/");
        input = wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR, "#key"))
        );
        click = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, '#search > div > div.form > button')));
        input.send_keys("汤臣倍健");
        click.click();

        total = wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR, "#J_bottomPage > span.p-skip > em:nth-child(1) > b"))
        );
        getProducts();
        return total.text;
    except TimeoutException:
        print("search timeout")
        return search();



def next_page(number):
    print(number)
    try:
        input = wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR, "#J_bottomPage > span.p-skip > input"))
        );
        click = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, '#J_bottomPage > span.p-skip > a')));
        input.clear();
        input.send_keys(number);
        click.click();
        wait.until(
            EC.text_to_be_present_in_element(
                (By.CSS_SELECTOR,'#J_bottomPage > span.p-num > a.curr'),str(number)
            )
        );
        getProducts();
    except TimeoutException:
        print("next page timeout");
        next_page(number);

def getProducts():
    total = wait.until(
        EC.presence_of_element_located((By.CSS_SELECTOR, "#J_goodsList .gl-item"))
    )
    html=browser.page_source;

    soup = BeautifulSoup(html, 'lxml')
    items=soup.find_all(class_="gl-item")
    for item in items:
        try:
            product={
                "title":item.select(".p-img a")[0].attrs["title"],
                "img":item.select(".p-img img")[0].attrs["src"],
                "price":item.select(".gl-i-wrap .p-price  strong  i")[0].string,
                "deal":item.select(".p-commit a")[0].string,
                "shop":item.select(".p-shop a")[0].attrs["title"]
            }
            print(product)
            saveToMongo(product)
        except:
            continue

def saveToMongo(result):
    try:
        if db[MONGO_TABLE].insert(result):
            print("save result success");
    except:
        print("save mongo error")

def main():
    text=search()
    print(text)
    for i in range(2,int(text)+1):
        next_page(i)
main()
