import os
import json
import asyncio
import aiosqlite
from aiologger import Logger
from pyppeteer import launch


class Scraper():

    def __init__(self):
        bpath = os.getenv("BROWSER_PATH") or None
        is_docker = os.getenv("DOCKER")
        if is_docker:
            is_docker = True
        else:
            is_docker = False
        self.opts = {
            "executablePath": bpath,
            "headless": is_docker,
            "args": ["--no-sandbox"]
        }
        self.goto_opts = {
            "timeout": 0,
            "waitUntil": "networkidle2"
        }
        self.is_docker = is_docker
        self.logger = Logger.with_default_handlers(name='scrape-logger')

    async def prepare_db(self):
        self.logger.debug("preparing database")
        try:
            db = await aiosqlite.connect("scrape.db")
            await db.execute("""
            CREATE TABLE IF NOT EXISTS reviews (

                cust_name VARCHAR(64),
                review_text TEXT,
                stars INT(5),
                firm_url VARCHAR(64),
                UNIQUE(cust_name, firm_url) ON CONFLICT IGNORE

            );""")
            self.db = db
        except Exception as e:
            self.logger.exception(e)
            os.exit(-1)

    async def job(self):
        lst = await self.page.querySelectorAll("._1kf6gff")
        firm_list = [await l.querySelectorEval("._zjunba", "n => n.children[0].href") for l in lst]
        
        for firm_url in firm_list[:3]:
            await self.page.goto(firm_url, self.goto_opts)
            await self.logger.debug("scraping %s" % firm_url)
            pane_objs = await self.page.querySelectorAll("._6e8ght")
            await pane_objs[1].click()
            await asyncio.sleep(1)
            reviews = await self.page.querySelectorAll("._11gvyqv")
            for r in reviews:
                name = await r.querySelectorEval("._16s5yj36", "n => n.innerText")
                stars = await r.querySelectorEval("._1fkin5c", "n => n.children.length")
                review = await r.querySelectorEval("a", "n => n.innerText")
                #self.logger.debug("got review tuple:%s, %s, %s, %s".format(name, review, stars, firm_url)))
                await self.db.execute("INSERT INTO reviews values(?, ?, ?, ?)", [name, review, stars, firm_url])
        await self.db.commit()

        cookies = await self.page.cookies()
        with open('cookies.json', 'w+') as f:
            f.write(json.dumps(cookies))

    async def scrape(self):

        await self.prepare_db()

        cookies = None 
        if os.path.exists("cookies.json"):
            with open("cookies.json") as f:
                cookies = json.loads(f.read())

        browser = await launch(self.opts)
        pages = await browser.pages()
        page = pages[0]
        self.page = page

        try:
            if cookies:
                await page.setCookie(*cookies)
            url = "https://2gis.ru/ufa/branches/2393074172953395"
            self.logger.debug("preparing to scrape %s" % (url))
            await page.goto(url)
            self.logger.debug("starting job")
            await self.job()
        except Exception as e:
            self.logger.exception(e)
        finally:
            if not self.is_docker:
                await page.goto('edge://settings/clearBrowserData')
                await page.keyboard.down('Enter')
            await self.db.close()
            await browser.close()
            self.logger.debug("done scraping")
            await self.logger.shutdown()

def amain():
    s = Scraper()
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)
    asyncio.get_event_loop().run_until_complete(s.scrape())

if __name__ == "__main__":
    amain()

