import os
import pandas as pd
import numpy as np
import shutil
"""进行数据处理的程序"""
def lam_seperate_code(stkcd):
    return str(stkcd)[0:6]

def lam_clean_ts(name):
    if name[0:2]=="退市":
        print(name[0:2])
        return np.nan
    else:
        return name
def lam_initialize_state():
    return 1
def delete_special_stock(normal_stocklist:list,normal_orgId:list):
    normal_dirs=[]
    dirs=os.listdir('./公告文件/基本信息')
    for i in range(len(normal_stocklist)):
        normal_dirs.append(str(normal_stocklist[i]).zfill(6)+','+normal_orgId[i])
    for dir in dirs:
       if not dir in normal_dirs:
           shutil.rmtree('./公告文件/基本信息'+'\\'+dir)
def clean_stocklist(path='.\\公告文件\\股票代码.csv'):
    data=pd.read_csv(path)
    ##删除B股
    data=data.drop(index=data[data['code']<300000][data['code']>=200000].index)
    data = data.drop(index=data[data['code'] < 600000][data['code'] >= 400000].index)
    data = data.drop(index=data[data['code'] >= 700000].index)
    ##删除删除退市股票
    data['special']=data.apply(lambda x:lam_clean_ts(x['zwjc']),axis=1)
    data=data.dropna()
    data.to_csv('.\\公告文件\\清理后股票代码.csv')
    return data

def check_crawled(stock):

    announcements=pd.DataFrame()
    uncrawled=[]
    exclude=[]
    if   os.path.exists(".\\公告文件\\pdf全文"+'\\'+str(stock[0:6])):
        crawled_announcements=os.listdir(".\\公告文件\\pdf全文"+'\\'+str(stock[0:6]))
        crawled_announcements=[ann[:-4] for ann in crawled_announcements]
        all_announcements_list=os.listdir(".\\公告文件\\基本信息"+'\\'+str(stock))
        all_announcements=[pd.read_csv(".\\公告文件\\基本信息"+'\\'+str(stock)+'\\'+year) for year in all_announcements_list]
        for df  in all_announcements:
            announcements=pd.concat([announcements,df],axis=0)
        announcements.columns = ['index', 'stkcd', 'year', 'ID', 'title']
        #announcements['isgetted'].apply(lambda x:lam_initialize_state(),axis=1)
        titles=list(announcements.loc[:,'title'])
        ID=list(announcements.loc[:,'ID'])
        ID_titles=pd.DataFrame([ID,titles])
        print(len(titles),len(ID),len(ID_titles))
        # print(len(announcements))
        # print(len(titles))
        # print(len(crawled_announcements))
        for key in crawled_announcements:
            ID_titles.pop(key)
        if not os.path.exists('.\\未爬取公告信息\\'+str(stock[0:6])):
            os.makedirs('.\\未爬取公告信息\\'+str(stock[0:6]))
        # print(len(announcements))
        # print(len(titles))
        IDs=pd.DataFrame([ID_titles]).T
        #print(IDs)
        # print(titles.head())
        # print(announcements.head())
        # titles.columns=['title','isgetted']
        # print('左表行数：',len(announcements))
        # print('右表行数：',len(titles))
        # data=pd.merge(left=announcements,right=titles,how='inner',on=['title'])
        # print('合并后行数：',len(data))
        # data.dropna(inplace=True)
        # print(len(data))
        # data.to_excel('.\\未爬取公告信息\\'+str(stock[0:6])+'\\'+'未爬取信息.xlsx',index=False)
        # # print(len(announcements))
    #检查单只股票
def merge_annoncement(path='.\\公告文件\\基本信息'):
    """
    将按代码和年份爬取的公告合并
    :param path:
    :return:
    """
    dirs=os.listdir(path)
    print(dirs)
    data=pd.DataFrame()
    num=0
    for dir in dirs:
        files=os.listdir(path+'\\'+dir)
        for file in files:
            data=pd.concat([data,pd.read_csv(path+'\\'+dir+'\\'+file)],axis=0)
            num+=1
            print(num)
            if num%1000==0:
                data.columns=['index','stkcd','year','ID','title']
                data['code']=data.apply(lambda x: lam_seperate_code(x['stkcd']), axis=1)
                data['year'] = data.apply(lambda x: lam_seperate_code(x['year']), axis=1)
                data['ID'] = data.apply(lambda x: lam_seperate_code(x['ID']), axis=1)
                data.to_csv('.\\公告文件\\合并公告信息\\全部公告汇总{}.csv'.format(str(num // 1000)),index=False)
                data=pd.DataFrame()
if __name__=="__main__":
    all_stocklist = pd.read_csv(r'.\公告文件\股票代码.csv')
    cleaned_stocklist = pd.read_csv(r'.\公告文件\清理后股票代码.csv')
    getted=os.listdir("./公告文件/基本信息")
    getted=[int(dir[0:6]) for dir in getted]
    print(cleaned_stocklist)
    for stock in cleaned_stocklist['code']:
        if stock not in getted:
            print(stock)