import logging
import sqlite3
from urllib.parse import urldefrag
import setup_dirs


class DatabaseManager:

    def __init__(self, sourceId):
        """Class for managing databases of scraped urls.

        Creates two databases if not exists. A database of scraped urls and a queue database.

        Args:
            sourceId (str): Id of source config
            code (:obj:`int`, optional): Error code.
        """
        db_queue_file = setup_dirs.data + '/{}_queue.db'.format(sourceId)
        db_scraped_file = setup_dirs.data + '/{}.db'.format(sourceId)

        self.sourceId = sourceId

        self.queue_conn = sqlite3.connect(db_queue_file)
        self.scraped_conn = sqlite3.connect(db_scraped_file)
        self.queue_cursor = self.queue_conn.cursor()
        self.scraped_cursor = self.scraped_conn.cursor()

        # SQLite does not enforce limits on varchar, it ignores the given value but needs one for creation
        # https://www.sqlite.org/faq.html#q9
        # so 20241203 踏马的神奇的字符长度 thanks to you , stupid SQLite developer by LIU Yang
        create_cmd = 'CREATE TABLE IF NOT EXISTS "{sourceId}" (url VARCHAR(100) PRIMARY KEY);'

        # create databases
        self.queue_cursor.execute(create_cmd.format(sourceId=self.sourceId))
        self.scraped_cursor.execute(create_cmd.format(sourceId=self.sourceId))
        self.queue_conn.commit()
        self.scraped_conn.commit()

    def addToScrapedDatabase(self, urls):
        """Add scraped urls to database."""

        if isinstance(urls, str):
            self.addUrlToScrapedDatabase(urls)
        elif isinstance(urls, list):
            for url in urls:
                self.addUrlToScrapedDatabase(url)

        self.scraped_conn.commit()

    def addUrlToScrapedDatabase(self, url):

        # removes fragment identifier if present.
        url = urldefrag(url)[0]

        insert_cmd = 'INSERT OR IGNORE INTO "{sourceId}" (url) VALUES ("{url}");'.format(sourceId=self.sourceId, url=url)
        self.scraped_cursor.execute(insert_cmd)
        self.scraped_conn.commit()

    def addToQueueDatabase(self, urls):
        """Queue urls to database."""

        if isinstance(urls, str):
            self.addUrlToQueueDatabase(urls)
        elif isinstance(urls, list):
            for url in urls:
                self.addUrlToQueueDatabase(url)

        self.queue_conn.commit()

    def addUrlToQueueDatabase(self, url):
        """Queue urls to database."""

        # removes fragment identifier if present.

        # don't queue pdfs for scraping
        # if url[-4:] == '.pdf':
        #     return
        find_cmd = 'SELECT url FROM "{a}" WHERE url="{b}";'.format(a=self.sourceId, b=url)
        if not self.scraped_cursor.execute(find_cmd).fetchall():
            insert_cmd = 'INSERT OR IGNORE INTO "{sourceId}" (url) VALUES ("{url}");'.format(url=url, sourceId=self.sourceId)
            self.queue_cursor.execute(insert_cmd)
            self.queue_conn.commit()
            logging.info('url queued: {}'.format(url))

    def getQueuedUrls(self):
        """Return urls in queue databse."""
        get_cmd = 'SELECT url FROM "{sourceId}";'.format(sourceId=self.sourceId)
        queued_urls = self.queue_cursor.execute(get_cmd).fetchall()
        urls = []
        for url in queued_urls:
            urls.append(url[0])
        return urls

    def getScrapedUrls(self):
        """Return urls in scraped databse."""

        get_cmd = 'SELECT url FROM "{sourceId}";'.format(sourceId=self.sourceId)
        scraped_urls = self.scraped_cursor.execute(get_cmd).fetchall()
        urls = []
        for url in scraped_urls:
            urls.append(url[0])
        return urls

    def clearQueueDatabase(self):
        """Clear whole queue database."""

        clear_cmd = 'DELETE FROM "{sourceId}";'.format(sourceId=self.sourceId)
        self.queue_cursor.execute(clear_cmd)
        self.queue_conn.commit()

    def clearFromQueueDatabase(self, url):
        """Clear scraped urls from queue database."""

        clear_cmd = 'DELETE FROM "{sourceId}" WHERE url="{url}";'.format(sourceId=self.sourceId, url=url)
        self.queue_cursor.execute(clear_cmd)
        self.queue_conn.commit()

    def setQueueDataToScraped(self):
        """Move all urls in queue databse to scraped database."""

        queuedUrls = self.getQueuedUrls()

        for url in queuedUrls:
            insert_cmd = 'INSERT OR IGNORE INTO "{sourceId}" (url) VALUES ("{url}");'.format(sourceId=self.sourceId, url=url)
            self.scraped_cursor.execute(insert_cmd)
        self.scraped_conn.commit()

    def close(self):
        self.queue_conn.commit()
        self.scraped_conn.commit()
        self.queue_conn.close()
        self.scraped_conn.close()
