import time, requests, json, queue
import random, re, threading, csv
from my_fake_useragent import UserAgent
from ip_proxy import get_list
import pymongo

client = pymongo.MongoClient('localhost', 27017)
mydb = client['pymongo']
cash = mydb['df_cash']
ua = UserAgent()
referer_list = ['http://fund.eastmoney.com/1100{}.html'.format(number)
                for number in range(10, 50)]
user_agent_list = [
    'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER',
    'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)',
    'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0',
    'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.3.4000 Chrome/30.0.1599.101 Safari/537.36',
    'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36'
]
# referer_list = [
#     'http://fund.eastmoney.com/110022.html',
#     'http://fund.eastmoney.com/110023.html',
#     'http://fund.eastmoney.com/110024.html',
#     'http://fund.eastmoney.com/110025.html'
# ]

headers = {
    'User-Agent': random.choice(user_agent_list),
    'Referer': random.choice(referer_list)
}


def proxy_header():
    proxy_list = get_list(10)
    return proxy_list

def get_fund_code_list():
    res_json = requests.get('http://fund.eastmoney.com/js/fundcode_search.js').text.replace("var r = ", "").replace(";", "")
    json_list = eval(res_json)
    # print(type(eval(res_json)))
    # for i in json_list:
    #     print(i)
    return json_list

def get_queue():
    code_list = get_fund_code_list()
    fund_queue = queue.Queue(len(code_list))
    for i in range(len(code_list)):
        fund_queue.put(code_list[i][0])
    return fund_queue



def spider():
    fund_queue = get_queue()
    while(not fund_queue.empty()):
        fund_code = fund_queue.get()
        try:
            print("基金代码：" + fund_code)
            url = "http://fundgz.1234567.com.cn/js/" + str(fund_code) + ".js"
            print("api:"+url)
            # proxy_head = random.choice(proxy_header())
            res_data = requests.get(url, headers=headers)
            print(res_data.status_code)
            if res_data.status_code != 404:
                res = res_data.text.replace("jsonpgz(","").replace(");", "")
                res_json = json.loads(res)
                
                # cash.update_one(res_json)
                mutex_lock = threading.Lock()
                write_csv(res_json, mutex_lock)
                print("写入成功")
            else:
                print("暂无数据")
        except Exception:
            fund_queue.put(fund_code)
            print("访问失败")

def write_csv(dict, mutex_lock):
    mutex_lock.acquire()
    # 追加数据写入csv文件，若文件不存在则自动创建
    with open('./fund_data.csv', 'a+', encoding='utf-8') as csv_file:
        csv_writer = csv.writer(csv_file)
        data_list = [x for x in dict.values()]
        csv_writer.writerow(data_list)
    # 释放锁
    mutex_lock.release()

def write_mongo(res_json):
    res_mongo = cash.find_one({'fundcode':res_json['fundcode']})
    if res_mongo is None:
        cash.insert_one(res_json) # 插入
    else:
        cash.update({'fundcode':res_json['fundcode']},{'$set':res_json})
    print(res_json)
    

if __name__ == '__main__':
    start = time.time()
    for i in range(50):
        t = threading.Thread(target=spider(),name='LoopThread'+str(i))
        t.start()

    end = time.time()
    print("总时长：", (end-start))


