import asyncio
import httpx
import os,logging
from pyquery import PyQuery as pq
from abc import ABCMeta, abstractmethod
from collections import namedtuple
logging.basicConfig(level=logging.DEBUG,format="%(asctime)s [%(name)s][%(levelname)s] => %(message)s")
logger=logging.getLogger('Spider')


BookItem=namedtuple('BookItem',['url','title','chapters'])
ChapterItem=namedtuple('ChapterItem',['url','index','title','pages'])
PageItem=namedtuple('PageItem',['url','index'])

HEADERS = {
    'User-Agent': r'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.93 Safari/537.36 Edg/96.0.1054.53'
}

MAX_TASK=int(os.environ.get('max_task',3))
MAX_RETRY=int(os.environ.get('max_retry',10))
class BaseSpider(metaclass=ABCMeta):
    tasks=[]
    client=httpx.AsyncClient(headers=HEADERS)
    semaphore=asyncio.Semaphore(MAX_TASK)
    @abstractmethod
    def parse_book(self,selector) -> BookItem:
        pass

    @abstractmethod
    def parse_chapter(self,book: BookItem) -> PageItem:
        pass
    

    async def _get(self,url):
        try:
            with self.semaphore:
                return await self.client.get(url)
        except Exception as e:
            raise
    
    async def _get_content(self,url):
        res=await self._get(url)
        return res.content
    
    async def _get_selector(self,url):
        res= await self._get(url)
        return pq(res.text)

    def _append_task(self,coroutine):
        self.tasks.append(asyncio.create_task(coroutine))

    def start(self):
        asyncio.run(asyncio.gather(*self.tasks))