import yaml
from aiohttp import ClientSession


class Crawler:

    def __init__(self):
        self.cfg = yaml.safe_load(open('../config.yml', 'rb'))
        self.url = self.cfg['url']
        self.ua = self.cfg['ua']
        self.proxy = self.cfg['proxy']

    async def scraper(self):
        offset = 1
        data = []

        while True:
            url = self.url.replace('num', f'{offset}')

            async with ClientSession() as s:
                async with s.get(url, headers=self.ua) as r:
                    # status = r.status
                    page = await r.json()
                    content = page.get('data').get('products')

                    for i in content:
                        sku = i.get('id')
                        link = f"https://www.wildberries.ru/catalog/{sku}/detail.aspx"
                        name = i.get('name')
                        brand = i.get('brand')
                        price = i.get('salePriceU') / 100
                        volume = i.get('volume')    # на складе
                        sale = i.get('sale')        # продано

                        review_rating = i.get('reviewRating')
                        feedbacks = i.get('feedbacks')


                        d = {
                             'Артикул': sku,
                             'URL': link,
                             'Бренд': brand,
                             'Заголовок': name,
                             'Цена': price,
                             'Проданно': sale,
                             'Осталось': volume,
                             'Оценка': review_rating,
                             'Отзывы': feedbacks

                             }
                        data.append(d)

            offset += 1
            if content == []:
                break
        return data
