#可见即可爬
'''
1.确定真实url地址
2.发送网络请求
3.解析我们想要的数据
4.保存数据（本次用csv文件保存）
'''
import threading
import  requests
import parsel
import csv
import time
import threading

def moveinfo():
    for i in range(0,91,10):
        print('==========正在打印第{}页================'.format(i))

        #1.确定真实url地址
        url='https://www.maoyan.com/board/4?offset={}'.format(i)
        headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36'}

        #2.发送网络请求
        request_html_textdata=requests.get(url=url,headers=headers)
        html_data=request_html_textdata.text
        # print(type(request_html_textdata))

        #3.解析想要的数据
        parse=parsel.Selector(html_data)
        dds=parse.css('.board-wrapper dd')
        print(dds)
        for dd in dds:
            name=dd.css('.name a::attr(title)').get()
            star=dd.css('.star::text').get().strip()
            releasetime=dd.css('.releasetime::text').get()
            score=dd.css('.score i::text').getall()
            score=''.join(score)
            print(name,star,releasetime,score,sep='|')

            # 4.保存数据到csv
            with open('data.csv',mode='a',encoding='utf-8',newline='') as f:
                csv_write=csv.writer(f)
                csv_write.writerow([name,star,releasetime,score])
        time.sleep(2)
    """-----------------------------
    """
def stockinfo():
    url='https://www.futunn.com/stock/ZH-US?seo_redirect=1&channel=1244&subchannel=2&from=BaiduAladdin&utm_source=alading_user&utm_medium=website_growth'
    headers={'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36'}
    request_html=requests.get(url=url,headers=headers)
    html_data=request_html.text
    pars=parsel.Selector(html_data).css('.news-box li')
    print(pars)
    for itme in pars:
        item1=itme.css('p::text').getall()
        dx=item1
        print(dx)

        with open('股市信息.csv',mode='a',encoding='utf-8',newline='') as f:
            csv_write=csv.writer(f)
            csv_write.writerow([item1])
threads=[]
t1=threading.Thread(target=moveinfo)
threads.append(t1)
t2=threading.Thread(target=stockinfo)
threads.append(t2)
for t in threads:
    t.start()
    print('启动成功')