# -*- coding: utf-8 -*-
"""
Created on Wed Aug  3 17:54:28 2016

@author: XT-YY

download the fund data daily
"""

import urllib, json
from splinter import Browser
import pandas as pd
import os
import time

trading_date_file = r"X:\data\tradingDate\tradingDate.csv"
tdate = pd.read_csv(trading_date_file, names=['date'], squeeze=True, dtype=str)
dst_path = ""

def getNextTradeDate(date):
    ind = tdate[tdate == date].index[0]
    return tdate[ind+1]


def getLastTradeDate(date):
    ind = tdate[tdate == date].index[0]
    return tdate[ind-1]


if __name__ == "__main__":
    curdate = time.strftime("%Y%m%d")
    curdate = "20160425"
    sdate = curdate
    edate = curdate
    col_name = ["Date", "Open", "Close", "Change", "PCT_Change", "Low", "High", 
                "Volume", "Amount", "TurnOver"]
    
    # get the market data
    codeList = ['511880', '511990', '204001']
    for code in codeList:
        url = ("http://q.stock.sohu.com/hisHq?code=cn_" + code + "&start=" 
              + sdate + "&end=" + edate + "&stat=1&order=D&period=d")
        response = urllib.request.urlopen(url);
        time.sleep(1)
        text = response.read().decode("gbk")
        data = json.loads(text, encoding="gbk")
        while(not data): 
            print("The data in sohu website has not been updated, wait for 30 minutes")
            time.sleep(1800)
            data = json.loads(text, encoding="gbk")
        data = data[0]['hq'][0]
        if code == "511880":
            dataYH = data
            continue
        df = pd.read_csv(dst_path + code + ".csv", dtype=str, encoding="gbk")        
        df.loc[len(df)] = [code, data[0], data[1], data[6], data[5], data[2], data[3], data[4],
                            data[7], data[8]]
        df.drop_duplicates(inplace=True)
        df.sort_values(by="Date", ascending=True, inplace=True)       
        df.to_csv(dst_path + code + ".csv", index=False)
    
    # get the net value data
    sdate = getLastTradeDate(curdate) # 凤凰财经的bug,起始日期必须提前一个交易日
    code = "511880"
    url = ("http://app.finance.ifeng.com/data/fund/jjjz.php?symbol=" + code +
            "&begin_day=" + sdate + "&end_day=" + edate)
    browser = Browser('phantomjs', user_agent="Mozilla/5.0 (Windows NT 6.1; WOW64) \
    AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36")
    browser.driver.maximize_window()
    browser.visit(url)
    time.sleep(3)
    xpath = '/html/body/div[3]/div/div[3]/div[3]/div[2]/table/tbody'
    table = browser.find_by_xpath(xpath).first.text.replace("  ", " ").split('\n')
    while(len(table)<2):
        print("The data in ifeng website has not been updated, wait for 30 minutes")
        time.sleep(1800)
        table = browser.find_by_xpath(xpath).first.text.replace("  ", " ").split('\n')
    browser.quit()
    dataExtend = table[1].split(' ')
    dataYH.extend(dataExtend)
    data = dataYH
    df = pd.read_csv(dst_path + code + ".csv", dtype=str, encoding="gbk") 
    df.loc[len(df)] = [code, data[0], data[1], data[6], data[5], data[2], data[3], data[4],
                            data[7], data[8], data[12], data[15]]
    df.drop_duplicates(inplace=True)
    df.sort_values(by="Date", ascending=True, inplace=True)       
    df.to_csv(dst_path + code + ".csv", index=False)                       
    
    
    
    
    
    
    


