import requests
from concurrent.futures import ThreadPoolExecutor
import aiohttp
import asyncio

class Fetcher:
    """多线程爬取"""
    def fetch_url(self, url):
        try:
            response = requests.get(url)
            return response.text
        except Exception as e:
            print(f"error fetching {url}: {e}")
            return None

    def parallel_fetch(self, urls):
        with ThreadPoolExecutor(max_workers=10) as executor:
            results = list(executor.map(self.fetch_url, urls))
        return results

    """异步爬取"""
    async def async_fetch_url(self, session, url):
        try:
            async with session.get(url) as response:
                return await response.text()
        except Exception as e:
            print(f"Error fetching {url}: {e}")
            return None

    async def async_parallel_fetch(self, urls):
        async with aiohttp.ClientSession() as session:
            tasks = [self.async_fetch_url(session, url) for url in urls]
            results = await asyncio.gather(*tasks)
        return results