import threading
import time
import func
from bs4 import BeautifulSoup
import requests


class myThread(threading.Thread):
    def __init__(self,re_header,queueLock,foLock,chapter_list):
        threading.Thread.__init__(self)
        self.re_header=re_header
        self.book_content=book_content
        self.queueLock=queueLock
        self.foLock=foLock
        self.chapter_list=chapter_list
    def run(self):
        while not exitflag:
            self.queueLock.acquire()
            if not len(self.chapter_list)==0:
                get=chapter_list.pop()
                print(get[0])
                self.queueLock.release()
                index=get[0]
                url='http://m.50zw.la'+get[1]
                
                #get pages
                r=requests.get(url+'.html',params=self.re_header)
                r_2=requests.get(url+'_2.html',params=self.re_header)
                r.encoding='gbk'
                r_2.encoding='gbk'
                
                #tramsform
                soup=BeautifulSoup(r.text,"html.parser")
                soup_2=BeautifulSoup(r_2.text,"html.parser")
                
                chapter_name=func.dealwithname(soup)
                chapter_content_1=func.dealwithcontent(soup)
                chapter_content_2=func.dealwithcontent(soup_2)

                chapter_content=chapter_content_1+chapter_content_2

                self.foLock.acquire()
                book.put(index,chapter_name,chapter_content)
                self.foLock.release()
            else:
                self.queueLock.release()

class book_content:
    book_content=[]
    def __init__(self):
        pass
    def put(self,index,name,content):
        self.book_content.append([index,name,content])
'''*********************************************************************'''
book_id = '2222'
base_url = 'http://m.50zw.la'
book_url = base_url+'/book_'+book_id


re_header = {
'Referer':'http://m.50zw.la/chapters_1/',
'Upgrade-Insecure-Requests':'1',
'User-Agent':'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Mobile Safari/537.36'
}



book_name=func.getname(book_url,re_header)

#create file
fo = open(book_name+'.txt','wb') 

func.writeinfo(book_url,book_name,re_header,fo)

chapter_list = func.getlist(book_id,re_header)
for t in range(len(chapter_list)):
    chapter_list[t]=[t,chapter_list[t]]
chapter_list.reverse()

book=book_content()
book2=[['a','a']]*len(chapter_list)
'''**********************************************************************'''
queueLock = threading.Lock()
foLock = threading.Lock()

threads_get=[]

exitflag=0

#create 100 threads
print('创建进程...')
for i in range(100):
    thread = myThread(re_header,queueLock,foLock,chapter_list)
    thread.start()
    threads_get.append(thread)
print('创建完成...')

print('正在获取 '+book_name)

print('等待下载 1 / 3')
#waiting queue empty
while not len(chapter_list)==0:
    pass

exitflag = 1
print('等待下载 2 / 3')
for t in threads_get:
    t.join()

print('开始下载')
for t in book.book_content:
    index=t[0]
    book2[index]=t

for t in book2:
    chapter_name=t[1]
    chapter_content=t[2]
    
    fo.write((chapter_name).encode('utf-8'))
    fo.write((chapter_content).encode('utf-8'))

    print(chapter_name+' 已下载')

fo.close()
print(book_name+" 成功下载")
