import requests
from bs4 import BeautifulSoup
import re
import os
import json
import queue
import threading
import time


URL='https://ssr1.scrape.center'
PAGE=10
PATH='./movies'
if not os.path.exists(PATH):
    os.mkdir(PATH)

Q=queue.Queue()

def get_a():
    for page in range(1,PAGE+1):
        url = URL + '/page'+ str(page)
        res = requests.get(url)
        res.encoding=res.apparent_encoding
        html = res.text
        soup = BeautifulSoup(html,'html.parser')
        atags = soup.find_all('a',{'class':'name'})
        for atag in atags:
            Q.put(URL+atag['href'])

def get_page(url):
    for page in range(1,PAGE+1):
        url=URL+'/page/'+str(page)
        res=requests.get(url)
        res.encoding=res.apparent_encoding
        html=res.text

        soup=BeautifulSoup(html,'heml.parser')
        atags=soup.find_all('a',class_='name')
        for atag in atags:
            Q.put(URL+atag['href'])
def get_content(url):
    try:
        res=requests.get(url)
        res.encoding=res.apparent_encoding
        html=res.text

        soup=BeautifulSoup(html,'heml.parser')
        title=soup.h2,string
        print(title)
        categories=[span.string for span in soup.find('div',{'class':'categories'}).findAll('span')]
        info_divs1,info_divs2=soup.find('div',{'class':'info'})
        info_spans1=info_divs1.findAll('span')
        countries=info_spans1[0].string
        time=info_spans1[-1].string
        published=info_divs2.span.string
        drama=soup.find('div',{'class':'drama'}).p.string.strip()
        score=soup.find('p',{'class':'score'}).string.strip()
        dic={
            'title':title,
            'categories':categories,
            'time':time,
            'published':published,
            'drama':drama,
            'score':score
        }
        return dic
    except Exception as e:
        print(e)
def write_content():
    while not Q.empty():
        url=Q.get()
        content=get_content(url)
        with open(os.path.join(PATH,content['title']),'w',encoding='utf-8') as f:
            dara=json.dumps(content,ensure_ascii=False)
            f.write(data)

def start_thread(thread_names,thread_nums,args=tuple()):
    threads=[]
    for i in range(thread_nums):
        t=threading.Thread(target=thread_nums,args=args)
        t.setDaemon(True)
        t.start()
        threads.append(t)

    alive=True
    while alive:
        alive=False
        for t in threads:
            if t.is_alive():
                alive=True
        time.sleep(0.1)

if __name__=='__main__':
    get_a()
    start_thread(write_content(),5)

get_content('https://ssr1.scrape.center/detail/13')
write_content('http://ssr1.scrape.center/dteail/13')













