# -*- coding: utf-8 -*-
"""
Created on Sun Mar  6 21:56:17 2022

@author: 29165
"""
import requests
from bs4 import BeautifulSoup
import re
import pandas as pd
import time
st=time.time()
import numpy as np
def get_url(url_base,headers):
    texts=[]
    for i in range(1,10):
        try:
            r=requests.get(url_base+str(i),headers=headers)
            r.raise_for_status()
            r.encoding='UTF-8'
            texts.append(r.text)
        except:
            print('error')
    return texts
def analyze_html(page_lists):
    global number
    number=1
    soup_list=[]
    link_list=[]
    img_list=[]
    
    for i in range(9):
        soup=BeautifulSoup(page_lists[i],'html.parser')
        title_dad=soup.find_all('ul',attrs={'class':'newsList'}
                            )
        #print(title)
        for i in title_dad:
            title=i.find_all('li')
            for j in title:
                soup_list.append(j.find('a').get('title'))
                link_list.append(j.find('a').get('href'))
            number+=1
    return soup_list,link_list
def fill_list(soup,link_final,num):
    un=[]
    ex=[]
    df1=pd.DataFrame(soup,columns=['title'])
    df2=pd.DataFrame(link_final,columns=['link'])
    code=np.arange(1,num)
    for i in link_final:
         if i[8]=='c':
             un.append('TRUE')
             ex.append(np.NaN)
         else:
             un.append(np.NaN)
             ex.append('TRUE')
    df3=pd.DataFrame(code,columns=['code'])
    df4=pd.DataFrame(un,columns=['in'])
    df5=pd.DataFrame(ex,columns=['ex'])
    df=pd.concat([df3,df1,df2,df4,df5],axis=1)
    df_in=pd.concat([df3,df1,df2,df4],axis=1)
    df_ex=pd.concat([df3,df1,df2,df5],axis=1)
    df.to_excel("多页标题和链接3.xlsx")
    df_in=df_in.dropna()
    df_in.to_excel("内链.xlsx")
    df_ex=df_ex.dropna()
    df_ex.to_excel("外链.xlsx")
    return df
def link_fill(link):
     num=1
     linkF=[]
     for i in link:
         if i[1]=='z':
             """该程序主要用于学习，请自行添加链接进行调试"""
             i=''+str(i)
         linkF.append(i)
         num+=1
     return linkF,num
    
def main():
    """该程序主要用于学习，请自行添加链接进行调试"""
    url_base=""
    headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.51 Safari/537.36 Edg/99.0.1150.30'}
    page_lists=get_url(url_base, headers)
    soup,link=analyze_html(page_lists)
    link_final,num=link_fill(link)
    url_dtl=fill_list(soup, link_final, num)
    print(number)
    print(time.time()-st)
    #print(link_final)
main()
