# coding=UTF-8
import os
from bs4 import BeautifulSoup

import requests
from utils import read_config

url1 = "http://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C.2&sty=FCOIATA&sortType=C&sortRule=-1" \
       "&page=1&pageSize=100&js=var%20quote_123={rank:[(x)],pages:(pc)}&token=7bc05d0d4c3c22ef9fca8c2a912d779c&jsName=quote_123" \
       "&_g=0.9922115724443149"
url2 = "http://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx?type=CT&cmd=C._A&sty=FCOIATA&sortType=C&sortRule=-1resp_content&page=1&pageSize=40000&js=var%20quote_123={rank:[(x)],pages:(pc)}&token=7bc05d0d4c3c22ef9fca8c2a912d779c&jsName=quote_123&_g=0.9922115724443149"
url3 = "http://97.push2.eastmoney.com/api/qt/clist/get?cb=jQuery112406116882438174901_1607073484212&pn=1&pz=20&po=1&np=1&ut=bd1d9ddb04089700cf9c27f6f7426281&fltt=2&invt=2&fid=f3&fs=m:0%20t:6,m:0%20t:13,m:0%20t:80,m:1%20t:2,m:1%20t:23&fields=f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152&_=1607073484226"

# 深证A
url_sz = 'http://20.push2.eastmoney.com/api/qt/clist/get?cb=jQuery112406622309464068492_1607915985198&pn=1&pz=3000&po=1&np=1&ut=bd1d9ddb04089700cf9c27f6f7426281&fltt=2&invt=2&fid=f3&fs=m:0 t:6,m:0 t:13,m:0 t:80&fields=f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152&_='
bbbb = 'http://20.push2.eastmoney.com/api/qt/clist/get?cb=jQuery112406622309464068492_1607915985198&pn=1&pz=20&po=1&np=1&ut=bd1d9ddb04089700cf9c27f6f7426281&fltt=2&invt=2&fid=f3&fs=m:0 t:6,m:0 t:13,m:0 t:80&fields=f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152&_=1607915986181'
# 上证A
ssshhhh = 'http://20.push2.eastmoney.com/api/qt/clist/get?cb=jQuery112406622309464068492_1607915985194&pn=1&pz=20&po=1&np=1&ut=bd1d9ddb04089700cf9c27f6f7426281&fltt=2&invt=2&fid=f3&fs=m:1 t:2,m:1 t:23&fields=f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152&_=1607915986263'
url_sh = 'http://20.push2.eastmoney.com/api/qt/clist/get?cb=jQuery112406622309464068492_1607915985194&pn=1&pz=3000&po=1&np=1&ut=bd1d9ddb04089700cf9c27f6f7426281&fltt=2&invt=2&fid=f3&fs=m:1 t:2,m:1 t:23&fields=f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152&_='
aaa = 'http://20.push2.eastmoney.com/api/qt/clist/get?cb=jQuery112406622309464068492_1607915985198&pn=1&pz=20&po=1&np=1&ut=bd1d9ddb04089700cf9c27f6f7426281&fltt=2&invt=2&fid=f3&fs=m:1 t:2,m:1 t:23&fields=f1,f2,f3,f4,f5,f6,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f20,f21,f23,f24,f25,f22,f11,f62,f128,f136,f115,f152&_=1607915986172'
# 添加请求头
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:79.0) Gecko/20100101 Firefox/79.0',
}
stock_dict = {}

url_all_ = 'http://q.10jqka.com.cn/index/index/board/all/field/zdf/order/desc/page/1/ajax/1/'
url_sh_a_ = 'http://q.10jqka.com.cn/index/index/board/hs/field/zdf/order/desc/page/1/ajax/1/'
url_sz_a_ = 'http://q.10jqka.com.cn/index/index/board/ss/field/zdf/order/desc/page/1/ajax/1/'


# 修改于 whm 2020-12-18
def url_shen_shang():
    count = 0
    while 1:
        count += 1
        print(count)
        url = 'http://q.10jqka.com.cn/index/index/board/all/field/zdf/order/desc/page/' + '@@@' + '/ajax/1/'
        url = url.replace('@@@', str(count))
        resp1 = requests.get(url, headers=headers)
        soup = BeautifulSoup(resp1.content, 'html.parser')
        items = soup.tbody.find_all('tr')
        if len(items) == 0:
            break
        for item in items:
            # print(str(items))
            infos = item.find_all('td')
            code = infos[1].text
            name = infos[2].text
            exchange = '0'
            if code[0:2] == '60' or code[0:2] == '68':
                exchange = '1'
            elif code[0:2] == '00' or code[0:2] == '30':
                exchange = '0'
            else:
                print(infos.text)
            stock_dict[code] = {}
            stock_dict[code]['name'] = name
            stock_dict[code]['code'] = code
            stock_dict[code]['exchange'] = exchange

    stock_file = open(os.path.join(read_config.data_path, 'common', 'config', '0.股票信息'), mode='w', encoding='utf-8')
    for key in stock_dict:
        stock_file.write(str(stock_dict[key]) + '\n')
    stock_file.close()


def url_1():
    base_path = os.path.split(os.path.realpath(__file__))[0]
    stock_file = open(os.path.join(base_path, '0.股票信息'), mode='w', encoding='utf-8')
    resp1 = requests.get(url1, headers=headers)
    resp1_content = str(resp1.content.decode('utf-8'))
    stock1_list = resp1_content.split('["')[1].split('"]')[0]
    for stock_str in stock1_list.split('","'):
        stock_info_list = stock_str.split(',')
        stock_dict[stock_info_list[1]] = {}
        stock_dict[stock_info_list[1]]['name'] = stock_info_list[2]
        stock_dict[stock_info_list[1]]['code'] = stock_info_list[1]
        exchange = stock_info_list[0] if stock_info_list[0] == '1' else '0'
        stock_dict[stock_info_list[1]]['exchange'] = exchange
    resp2 = requests.get(url2, headers=headers)
    resp2_content = str(resp2.content.decode('utf-8'))
    stock2_list = resp2_content.split('["')[1].split('"]')[0]
    for stock_str in stock2_list.split('","'):
        stock_info_list = stock_str.split(',')
        stock_dict[stock_info_list[1]] = {}
        stock_dict[stock_info_list[1]]['name'] = stock_info_list[2]
        stock_dict[stock_info_list[1]]['code'] = stock_info_list[1]
        exchange = stock_info_list[0] if stock_info_list[0] == '1' else '0'
        stock_dict[stock_info_list[1]]['exchange'] = exchange
    for key in stock_dict:
        stock_file.write(str(stock_dict[key]) + '\n')
    stock_file.close()

    # 更新stock_dict
    stock_file = open(os.path.join(base_path, 'stock_dict.py'), mode='w', encoding='utf-8')
    stock_file.write('# coding=UTF-8\n')
    stock_file.write('stock_dict = ')
    stock_file.write(str(stock_dict))
    stock_file.close()


if __name__ == '__main__':
    print()
    url_shen_shang()
