# -*- coding: utf-8 -*-
"""
Created on Fri Aug  5 17:52:51 2016

@author: XT-YY

get fund data daily
"""


import pandas as pd 
import os
import time
import urllib, json
from splinter import Browser
from WindPy import w
import sys

trading_date_file = r"X:\data\tradingDate\tradingDate.csv"
tdate = pd.read_csv(trading_date_file, names=['date'], squeeze=True, dtype=str)

dst_path = r"D:\data\FundData"


def getLastTradeDate(date):
    ind = tdate[tdate == date].index[0]
    return tdate[ind-1]

def dateTransform(date):
    return date[0:4]+"-"+date[4:6]+"-"+date[6:8]

def is_trading_date(date):
    tdate = pd.read_csv(trading_date_file, names = ['date'])
    if date in tdate['date'].values.astype(str):
        return True

def get_wind():
    if not w.isconnected():
        w.start()
    return w


def main():
    curdate = time.strftime("%Y%m%d")
#    curdate = "20160804"
    if not is_trading_date(curdate):
        print(curdate+" is not trading day")
        sys.exit(0)    
    
    sdate = curdate
    edate = curdate
    
    dict_append = {}
    
     
    # get market data from wind    
    code_list = ['511880.SH', '511990.SH']    
    while(code_list):
        code = code_list[0]
        d = get_wind().wsd(code, 'open, high, low, close, volume, amt', sdate, edate, 'Fill=Previous')
        if d.ErrorCode != 0:
            continue
        code_list.remove(code)
        
        data = [code, d.Times[0].strftime("%Y-%m-%d"), d.Data[0][0], d.Data[1][0], d.Data[2][0], d.Data[3][0],
                d.Data[4][0], d.Data[5][0]] 
        dict_append[code] = [str(x) for x in data]        
        time.sleep(3)
        
    # get market data from sohu
    col_name = ["Date", "Open", "Close", "Change", "PCT_Change", "Low", "High", 
                "Volume", "Amount", "TurnOver"]
    code = "204001"
    url = ("http://q.stock.sohu.com/hisHq?code=cn_" + code + "&start=" 
              + sdate + "&end=" + edate + "&stat=1&order=D&period=d")
    response = urllib.request.urlopen(url);
    time.sleep(1)
    text = response.read().decode("gbk")
    data = json.loads(text, encoding="gbk")
    while(not data):
        time.sleep(1800)
        response = urllib.request.urlopen(url);
        time.sleep(1)
        text = response.read().decode("gbk")
        data = json.loads(text, encoding="gbk")
        
    d = data[0]['hq'][0]
    code = code + ".SH"
    data = [code, d[0], d[1], d[6], d[5], d[2], d[7], d[8]]
    dict_append[code] = data    
    

    # get netvalue data from eastmoney    
    code = "511880"
    url = ("http://fund.eastmoney.com/f10/F10DataApi.aspx?type=lsjz&code=" + code
        + "&page=1&per=10000&sdate=" + dateTransform(sdate) + "&edate=" + dateTransform(edate))
    browser = Browser('phantomjs', user_agent="Mozilla/5.0 (Windows NT 6.1; WOW64) \
    AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36")
    browser.driver.maximize_window()
    browser.visit(url)
    time.sleep(3)
    xpath = '/html/body/table/tbody' 
    table = browser.find_by_xpath(xpath).first.text.split("\n")
    data = table[0].split(' ')
    while(len(data)<2):
        time.sleep(1800)
        browser.visit(url)
        time.sleep(3)
        xpath = '/html/body/table/tbody' 
        table = browser.find_by_xpath(xpath).first.text.split("\n")
        data = table[0].split(' ')
    data = data[1:3]
    code = code + ".SH"
    dict_append[code].extend(data)
    
    
    # generate file
    for k,v in dict_append.items():
        df = pd.read_csv(dst_path+os.sep+k+".csv", dtype=str, encoding="gbk")
        df.loc[len(df)] = v
        df.drop_duplicates(inplace=True)
        df.to_csv(dst_path+os.sep+k+".csv", index=False)
        

if __name__ == "__main__":
    try:
        print("start to run the program getFundDataDailyV2")
        main()
    except:
        print("some errors happen in getFundDataDailyV2")
    finally:
        input('Press Enter to exit...')

        
    
    
    
        
    

    

