import requests
import json
import re
from .spider import Spider
import glob
from openpyxl import Workbook,load_workbook
from collections import deque
import threading
import MySQLdb
import sys
from .models import *

regex = '''<div class="s">作者：(.*?)<br />大小：.*?MB<br>等级：\
<em class="lstar/d"></em><br>更新：.*?</div>[\s\S]*?\
<a href="(.*?)"><img src=".*?">《(.*?)》全集</a>'''

class XsDown:

    def __init__(self,
                 url = 'https://www.sjxs.la/soft/1/Soft_001_2.html',
                 regex = '''<div class="s">作者：(.*?)<br />大小：.*?MB<br>等级：<em class="lstar5"></em><br>更新：.*?</div>[\s\S]*?<a href="(.*?)"><img src=".*?">《(.*?)》全集</a>'''
                 ):
        self.url = url
        self.regex = regex
        self.info = Spider(encode = 'utf-8',
                           url = self.url,
                           info = self.regex).get_info()

        


class MakeQueue:


    def __init__(self,type_num,page_num):

        self.type_num = type_num
        self.page_num = page_num
        self.typ_url = self.make_url()

        

    def make_url(self):

        if self.type_num > 9:

            typ_url = 'https://www.sjxs.la/soft/{0}/\
Soft_0{0}_{1}.html'.format(self.type_num,'page_num')

        elif 9 >= self.type_num >0:

            typ_url = 'https://www.sjxs.la/soft/{0}/\
Soft_00{0}_{1}.html'.format(self.type_num,'page_num')


        elif self.type_num <= 0:

            raise TypeError('type_num不能小于0')

        return typ_url


    def make_queue(self):

        type_url = self.typ_url.replace('page_num','{}')

        queue_list = [type_url.format(i) \
                      for i in range(1,self.page_num+1)]


        with open('type_num：{}.json'.format(self.type_num),'w') as f:

            json.dump(queue_list,
                      f)


        print('队列创建完毕')

class DownToDjango:

    def __init__(self,type_num,page_num):

        self.type_num = type_num
        self.page_num = page_num
        self.queue_list = deque(self.test())
        self.lock = threading.Lock()

    def test(self):
        

        if not glob.glob('type_num：{}.json'.format(self.type_num)):

            MakeQueue(self.type_num,self.page_num).make_queue()


        with open('type_num：{}.json'.format(self.type_num)) as f:
            

            queue_list = json.load(f)

        return queue_list


    def runing(self):


            try:

                url = self.queue_list.popleft()

            except IndexError:

                print('队列耗尽，退出')


            else:
                
                print(url)

                try:

                    info_list = XsDown(url = url).info['info']

                except:

                    BookLog.objects.create(error_url = url,

                                           error_log = str(sys.exc_info())

                                           )

                else:
                    Book.objects.bulk_create(
                      [Book(name = info[2],
                      author = info[0],
                      url = 'https://www.sjxs.la'+info[1],
                      ) for info in info_list])

            
    def my_thread(self):

        while True:

            pool = []

            for i in range(5):
                pool.append(threading.Thread(target = self.runing))
            for i in pool:
                i.start()
            for i in pool:
                i.join()
            if not self.queue_list:

                print('类型：{}，搞定')

                break


            


            

            

    






        

class Down:#保存到excel的类

    def __init__(self,type_num,page_num):

        self.type_num = type_num
        self.page_num = page_num
        self.queue_list = deque(self.test())
        self.lock = threading.Lock()


    def test(self):

        if not glob.glob('type_num：{}.json'.format(self.type_num)):

            MakeQueue(self.type_nume,self.page_num).make_queue()

        if not glob.glob('info.xlsx'):

            wb = Workbook()
            sheet = wb.active
            sheet.title = 'Sheet'
            sheet['A1'] = '书名'
            sheet['B1'] = '作者'
            sheet['C1'] = '链接'
            wb.save('info.xlsx')

        with open('type_num：{}.json'.format(self.type_num)) as f:
            

            queue_list = json.load(f)

        return queue_list

    def runing(self):

        while True:

            try:

                url = self.queue_list.popleft()

            except IndexError:

                print(threading.current_thread().name,'退出')

                break

            thread_count = threading.active_count()

            if thread_count < 20:

                self.my_thread()

            print(url)

            info_list = XsDown(url = url).info['info']

            self.lock.acquire()

            wb = load_workbook('info.xlsx')
            
            sheet = wb['Sheet']
            row = sheet.max_row

            for info in info_list:
                row+=1
                sheet['A{}'.format(row)] = info[2]#书名
                sheet['B{}'.format(row)] = info[0]#作者
                sheet['C{}'.format(row)] = info[1]#链接

            wb.save('info.xlsx')

            self.lock.release()


    def my_thread(self):

        t = threading.Thread(target = self.runing)
        t.start()


class DownToMysql:

    def __init__(self,type_num,page_num):

        self.type_num = type_num
        self.page_num = page_num
        self.queue_list = deque(self.test())
        self.lock = threading.Lock()

    def test(self):

        if not glob.glob('type_num：{}.json'.format(self.type_num)):

            MakeQueue(self.type_nume,self.page_num).make_queue()

        with open('type_num：{}.json'.format(self.type_num)) as f:
            

            queue_list = json.load(f)

        return queue_list

    def runing(self):

        while True:

            try:

                url = self.queue_list.popleft()

            except IndexError:

                print(threading.current_thread().name,'退出')

                break

            thread_count = threading.active_count()

            if thread_count < 5:

                self.my_thread()

            print(url)

            info_list = XsDown(url = url).info['info']

            info = str(info_list)[1:-1].replace('/soft/','https://www.sjxs.la/soft/')

            conn =MySQLdb.connect(host = '140.143.206.157',
                      port = 3306,
                      db = 'temporary',#远程数据库名称，远程要有这个数据库
                      user = 'root',
                      passwd = '123456',
                      charset = 'utf8')

            cursor = conn.cursor()

            cursor.execute('''INSERT INTO tab_1(作者,链接,书名)VALUES{};'''.format(info))
            conn.commit()
            cursor.close()
            conn.close()

    def my_thread(self):

        t = threading.Thread(target = self.runing)
        t.start()

    
        

        

        

    
            
            

        

if __name__ == '__main__':

    x = DownToMysql(1,400).my_thread()










































