#!/usr/bin/python
# -*- coding: UTF-8 -*-


from selenium import webdriver;
from selenium.common.exceptions import TimeoutException
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import re;
from pyquery import PyQuery as pq;
from lxml import etree
import pymongo;
from  config import *
from douban.result import get_active_user_url
# browser=webdriver.PhantomJS(service_args=SERVICE_ARGS);
client = pymongo.MongoClient(MONGO_URL);
db = client["douban"];
from bs4 import BeautifulSoup
import time
import requests
import pandas as pd
import requests
from utils.Yundama import YDMHttp
from selenium.webdriver.common.proxy import Proxy
browser=webdriver.Chrome()
# browser=webdriver.PhantomJS(service_args=SERVICE_ARGS);
browser.set_window_size(1400,900)
wait=WebDriverWait(browser, 10);






def get_yzmcode():
    ydm = YDMHttp()
    ydm.login()
    cd, result = ydm.decode("yzm.png")
    print(cd, result)
    return result


def login():
    url = "https://www.douban.com/people/170923047/"
    browser.get(url)
    acount = wait.until(
        EC.presence_of_element_located((By.CSS_SELECTOR, "#email"))
    )
    acount.send_keys("18086107575");
    password = wait.until(
        EC.presence_of_element_located((By.CSS_SELECTOR, "#password"))
    )
    password.send_keys("msmw441432");
    yzm = wait.until(
        EC.presence_of_element_located((By.CSS_SELECTOR, "#captcha_field"))
    )
    soup = BeautifulSoup(browser.page_source, 'lxml')
    url = soup.find(id="captcha_image").attrs["src"]
    downloadimg(url)
    result = get_yzmcode()
    yzm.send_keys(result);
    click = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, '#lzform > div:nth-child(9) > input')));
    click.click()


def downloadimg(url):
    res = requests.get(url)
    content = res.content
    with open("yzm.png", "wb") as f:
        f.write(content)


def next_page(page):
    print("currentpage", page)
    next = wait.until(
        EC.element_to_be_clickable((By.CSS_SELECTOR, '#content > div > div.article > div.paginator > span.next > a')));
    next.click()
    wait.until(
        EC.text_to_be_present_in_element(
            (By.CSS_SELECTOR, '#content > div > div.article > div.paginator > span.thispage'), str(page)
        )
    );
    get_all_user()


def get_all_user():
    users = wait.until(
        EC.presence_of_element_located((By.CSS_SELECTOR, "#content > div > div.article"))
    )
    html = browser.page_source;
    soup = BeautifulSoup(html, 'lxml')
    items = soup.find_all(class_="obu")
    for item in items:
        user = {
            "user_name": item.select("dd a")[0].string,
            "user_url": item.select(".nbg")[0].attrs["href"]
        }
        print(user)
        saveToMongo(user)


def saveToMongo(result):
    try:
        if db["active_user"].insert(result):
            print("save result success");
    except:
        print("save mongo error")


def get_data():
    queryArgs = {}
    projectionFields = {'_id': False}  # 用字典指定
    result = db["user"].find(queryArgs, projectionFields)
    return result


def get_last_stauts(url,count):
    browser.get(url)
    list_tags = []
    try:
        users = wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR, "#statuses"))
        )
        html = browser.page_source;
        doc = pq(html);
        items = doc(".stream-items .status-item").items()
        for item in items:
            content=item.find(".text").text()
            if content == None or content=="":
                continue
            list_tags.append(content.strip())




    except Exception as e:
        print(e)
        print("get status error")
    print("current",count)
    print(url,list_tags)
    if list_tags.count==0:
        return
    active_user={
        "url":url,
        "tags":list_tags
    }
    saveToMongo(active_user)


def updateToMongo(url, time):
    try:
        db["user"].update({"user_url": url}, {"$set": {"update_time": time}})
        print("update success")
    except Exception as e:
        print(e)
        print("update error")


def main():
    login()
    urls=get_active_user_url()
    count=0
    for url in urls:
        count+=1
        get_last_stauts(url,count)





def getip():
    res = requests.get("http://localhost:5000/get")
    if res.status_code == 200:
        print(res.text)
    else:
        return ""


if __name__ == '__main__':
    main()
    # login()
    # cookies={}
    # for cookie in browser.get_cookies():
    #     cookies[cookie["name"]] = cookie["value"]
    # print(cookie)


