# -*- coding: utf-8 -*-
"""
Created on Thu Jan 14 10:00:33 2021

@author: Jing
"""
import requests
import json
import numpy as np
import datetime
import pandas as pd
from Crawler.load_data import load_dd

# 私募排排网爬虫
def cumulate_date(fund_id):

    url = 'https://ppwapi.simuwang.com/chart/fundNavTrend'
    headers = {
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36'
        ,
        'cookie': 'guest_id=1567481813; certification=1; qualified_investor=1; evaluation_result=2; focus-certification-pop=-1; Hm_lvt_c3f6328a1a952e922e996c667234cdae=1612842306,1612848277,1612862782,1613610632; rz_utm_source=10009; ss_utm_campaign=SEM-pinpai; ss_utm_sign=360-2; ss_utm_platform=pc; ss_utm_term=%E7%A7%81%E5%8B%9F%E6%8E%92%E6%8E%92%E7%BD%91; ss_utm_content=%E5%93%81%E7%89%8C%E6%A0%B8%E5%BF%83%E8%AF%8D; sensorsdata2015jssdkcross=%7B%22distinct_id%22%3A%221078012%22%2C%22first_id%22%3A%221078012%22%2C%22props%22%3A%7B%22%24latest_traffic_source_type%22%3A%22%E4%BB%98%E8%B4%B9%E5%B9%BF%E5%91%8A%E6%B5%81%E9%87%8F%22%2C%22%24latest_search_keyword%22%3A%22%E7%A7%81%E5%8B%9F%E6%8E%92%E6%8E%92%E7%BD%91%22%2C%22%24latest_referrer%22%3A%22https%3A%2F%2Fwww.so.com%2Fs%3Fie%3Dutf-8%26src%3Dhao_360so_a1111_history_b_cube%26shb%3D1%26hsid%3D1fdf49e0a87d234a%26q%3D%25E7%25A7%2581%25E5%258B%259F%25E6%258E%2592%25E6%258E%2592%25E7%25BD%2591%22%2C%22%24latest_utm_source%22%3A%2210009%22%2C%22%24latest_utm_medium%22%3A%22cpc%22%2C%22%24latest_utm_campaign%22%3A%22SEM-pinpai%22%2C%22_latest_utm_sign%22%3A%22360-2%22%2C%22_latest_utm_platform%22%3A%22pc%22%2C%22%24latest_utm_content%22%3A%22%E5%93%81%E7%89%8C%E6%A0%B8%E5%BF%83%E8%AF%8D%22%2C%22%24latest_utm_term%22%3A%22%E7%A7%81%E5%8B%9F%E6%8E%92%E6%8E%92%E7%BD%91%22%7D%2C%22%24device_id%22%3A%22176dfd52fa94b8-08b066612d3caf-3e604809-2073600-176dfd52faaab0%22%7D; http_tK_cache=a1e9e08eeef7134016f402070829c8b95f8f117e; cur_ck_time=1613610528; ck_request_key=mPbacNetQ5kF0Aa9mZcJ2S9%2Fke0CI8TWrWE6QLu6IyI%3D; passport=1078012%09u5661007396912%09BgcABlBUXwUBXFkCDAhQUwENVQEAAAEGBAIEUVFWAFQ%3D53a835139a; smppw_tz_auth=1; Hm_lpvt_c3f6328a1a952e922e996c667234cdae=1613611086'
    }
    # 数据请求要求
    data = {
    'fund_id':fund_id,
    'index_type': 1,
    'period':25, # 距今多少月，网站似乎只接受12的倍数。非12倍数返还全部数据
    'rz_type': 7, # 数据频数 7为周频，8为月频，但6不是日频
    'nav_flag': 1, # 净值类型1：分红再投资 2：分红不投资
    'muid': 1078012,
    'USER_ID':1078012}

    try:
        wbdata = requests.post(url, headers=headers, data=data).text
        data1 = json.loads(wbdata)
        date = data1['data']['categories']
        profit_value = []
        for value in data1['data']['data'][0]:
            profit_value.append(value['value'])

        # 将字符串格式处理成整数
        date_li = []
        for x in date:
            ss = x  #int(x.replace('-', ''))
            date_li.append(ss)
        dic = {'date': date_li,
               'acc_return': profit_value}

        res = pd.DataFrame(dic)
        res['prodname'] = data1['data']['title'][0]
        return res
    except:
        print('failed,try_update_cookie')


# 倒解净值序列
def inverse_get_netnav(start_nav, data):
    data = data.fillna(0)
    data['acc_return'] = (data['acc_return'] + 1) * start_nav
    data = data.rename(columns={'acc_return':'netvalue'})
    # 向后剔除
    for x in range(len(data)):
        ss = data['date'][x]
        dt_1 = datetime.datetime.strptime(ss, "%Y-%m-%d")
        dt = (dt_1 + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
        if dt in set(data['date']):
            data = data.drop(index=x)
            print('del')
        else:
            pass

    data['date'] = data['date'].apply(lambda x: int(x.replace('-','')))
    data = data.reset_index()
    data = data[['date', 'netvalue', 'prodname']]

    # 转换str 到 日期
    #data['date'] = data['date'].apply(lambda x: datetime.datetime.strptime(x,"%Y-%m-%d"))
    #data.to_excel('C:/Users/wzer/Desktop/deal_with_data/mh_all_2015.xlsx',header=True)
    #print(data)

    return data


if __name__ =='__main__':

    fund_id = 'HF000010XY'
    # 爬取
    ss = cumulate_date(fund_id)
    start_nav = 1
    # 倒解
    sss = inverse_get_netnav(start_nav, ss)
    # 存库
    s = load_dd(sss)
    print('全部数据完成')
