# coding : UTF-8
import requests
import time
import random
import socket
import httplib
from bs4 import BeautifulSoup
import mysql.connector;
import datetime;
import json;
import os;
from fund_data import Fund
from fund_data import FundData
def query_all_fund():
    fund_data_list=[];
    try:
        conn = mysql.connector.connect(host='localhost',port='3306',user='root', password='1234qwer', database='crm_house', use_unicode=True)
        cursor = conn.cursor();
        cursor.execute('select * from fund where update_end<date_sub(curdate(),interval 1 day) limit 0,1');
        list = cursor.fetchall();
        for data in list:
            fund_data_list.append(Fund(data[0],data[1],data[2]))
        return fund_data_list
    except mysql.connector.Error as e:
        print ('Error: {}'.format(e));
        conn.close();
def getUrl(code,startDate,endDate):
    return "http://fund.fund123.cn/HistoryNV/NetValueHistoryTable.aspx?code=" + code + "&startdate=" + startDate + "&enddate=" + endDate;

def get_content(url,data = None):
    header = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
        'Accept-Encoding': 'gzip, deflate, sdch',
        'Accept-Language': 'zh-CN,zh;q=0.8',
        'Connection': 'keep-alive',
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.235'
    }
    timeout = random.choice(range(80,180))
    while True:
        try:
            rep = requests.get(url,headers = header,timeout = timeout)
            rep.encoding = 'utf-8'
            break
        except socket.timeout as e:
            print('3:', e)
            time.sleep(random.choice(range(8, 15)))

        except socket.error as e:
            print('4:', e)
            time.sleep(random.choice(range(20, 60)))

        except httplib.BAD_REQUEST as e:
            print('5:', e)
            time.sleep(random.choice(range(30, 80)))

    return rep.text

def get_data(html_text,id):
    bs = BeautifulSoup(html_text,"html.parser")
    data = bs.find('table',{'id':'tblContent'})
    tr = data.find_all('tr',{'class':'tr'})
    fund_datas=[];
    for d in tr:
        _data = d.find_all('td')
        print(_data)
        print(_data[1].text.strip())
        print(_data[2].text.strip())
        print(_data[3].text.strip())
        print(_data[4].text.strip())
        print(_data[0].text.strip())
        fund_datas.append(FundData(_data[1].text.strip().replace("\r\n",""),_data[2].text.strip().replace("\r\n",""),
                                   _data[3].text.strip().replace("\r\n",""),_data[4].text.strip().replace("\r\n",""),
                                   id,_data[0].text.strip().replace("\r\n","")).__dict__)#转换json:__dict__

    return fund_datas;


list = query_all_fund();



for data in list:
    update_start = datetime.datetime.strftime(data.update_end,'%Y-%m-%d');
    update_end = datetime.datetime.strftime(datetime.datetime.now(),'%Y-%m-%d');
    url = getUrl(data.id,update_start,update_end);
    print(url)
    html = get_content(url);
    _data = get_data(html,data.id);
    print(_data)
    file_name = update_start+'-'+update_end+'-'+data.id+'.json';
   # os.mknod(file_name)
    with open(file_name,'w') as json_file:
        json_file.write(json.dumps(_data))