from pathlib import Path
import random
import time
import asyncio
import aiohttp
import itertools

from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from bs4 import BeautifulSoup
import lxml.html
from lxml import etree

from browser import Browser
import proxy_with_authorization
import database as db


class Parser(Browser):

    """ Этот класс представляет парсер """

    def __init__(self, data, proxy_list=None):

        self.display_browser = data[0]
        self.link_to_page = data[2]
        self.proxy_list = proxy_list
        self.number_pages_parsing = data[3]
        self.list_of_keywords = ['парсер', 'парсинг', 'спарсить', 'краулер', 'кравлер', 'парс', 'python', 'парсит']


    def wait(self, f=2, b=3.5):

        """ Чуть-чуть подождать """

        number = random.uniform(f, b)
        print(f'Ждем {number}\n')
        time.sleep(number)


    def get_saved_page(self, file_name):

        """ Открыть сохраненную страницу для парсинга и возвращаем ее """

        folder = Path('saved_html/')
        page = folder/file_name

        with open(page, 'r', encoding='utf-8') as file:

            text = file.read()
            page = BeautifulSoup(text, 'lxml')

            return page


    def convert_to_bs(self, browser):

        """ Получить объект BeautifulSoup """

        html_source = browser.page_source
        page = BeautifulSoup(html_source, 'lxml')

        return page


    def click_on_button(self, element_selenium, a=.5, b=.8):

        """ Нажать на кнопку """

        element_selenium.click()    # Кликаем на элемент
        self.wait(a, b)             # Чуть-чуть ждем


    def enter_text_in_field(self, element_selenium, text, a=.5, b=.8):

        """ Ввести текст в поле """

        element_selenium.click()
        element_selenium.clear()
        element_selenium.send_keys(text)
        self.wait(a, b)


    def move_to_element(self, browser, selenium_element, a=.5, b=.8):

        """ Переместится к элементу страницы """

        action = ActionChains(browser)
        action.move_to_element(selenium_element)
        action.perform()
        self.wait(a, b)


    def scroll_the_page_js(self, browser, a=.2, b=.5, w=0, h=50):

        """
        Переместиться по странице js
        w - прокрутить страницу по ширине
        h - прокрутить страницу по высоте
        """

        browser.execute_script(f"window.scrollBy({w}, {h})")
        self.wait(a, b)


    def get_configured_browser(self, through_proxy=None, proxy=None):

        """ Получить настроенный браузер """

        if through_proxy == 'public_proxy':  # Если необходимо делать запрос через общедоступные прокси

            browser = self.get_browser(self.display_browser, through_proxy)

            return browser

        elif through_proxy == 'private_proxy':  # Если необходимо делать запрос через приватные прокси

            def get_information_for_proxy(proxy):

                pass


            # ВЫШЕ ОПРЕДЕЛЕНИЕ ФУНКЦИЙ

            host, port, username, password = get_information_for_proxy(proxy)
            browser = proxy_with_authorization.proxy_chrome(host, port, username, password)

            return browser

        else:  # Если прокси не используюется

            browser = self.get_browser(self.display_browser, through_proxy)

            return browser


    def get_links_to_page(self):

        """ Получаем ссылки на страницы """

        links_to_page = []

        for number in range(self.number_pages_parsing):

            number += 1
            entry = f'https://www.weblancer.net/jobs/?page={number}'
            links_to_page.append(entry)

        return links_to_page


    def work_with_page_content(self, result, page):

        """ Работаем с содержимым страницы """

        def get_order_links(page):

            """ Получаем ссылки заказов """

            order_links_lxml = page.xpath('//div[@class="title"]/a')
            links_headers = [order.attrib['href'] for order in order_links_lxml]

            return links_headers


        def get_order_headers(page):

            """ Получаем заголовки заказов """

            order_headers_lxml = page.xpath('//div[@class="title"]/a')
            order_headers = [order.text_content() for order in order_headers_lxml]

            return order_headers


        def get_order_descriptions(page):

            """ Получаем описание заказов """

            order_descriptions_lxml = page.xpath('//div[@class="title"]/following::div[1]')
            order_descriptions = [order.text_content() for order in order_descriptions_lxml]

            return order_descriptions


        def combine_data(result, links, headers, descriptions):

            for link, header, description in itertools.zip_longest(links, headers, descriptions):

                link = f'https://www.weblancer.net{link}'
                entry = [link, header, description]
                result.append(entry)


        # ВЫШЕ ОПРЕДЕЛЕНИЕ ФУНКЦИЙ

        page = lxml.html.document_fromstring(page)

        order_links = get_order_links(page)
        order_headers = get_order_headers(page)
        order_descriptions = get_order_descriptions(page)

        combined_data = combine_data(result, order_links, order_headers, order_descriptions)


    async def get_page_content(self, session, link, result):

        """ Получаем содержимое страницы """

        async with session.get(link) as response:

            page = await response.read()
            self.work_with_page_content(result, page)


    def get_old_orders(self):

        """ Получаем старые заказы """

        done_links = []
        database = db.DataBase()
        list_links = database.get_existing_records()

        for element in list_links:

            link = element['link']
            done_links.append(link)

        return done_links


    def get_new_orders(self, orders):

        """ Получаем новые заказы """

        def get_links_current_orders(orders):

            """ Получаем ссылки заказов """

            order_links = []

            for order in orders:

                link = order[0]
                order_links.append(link)

            return order_links


        def get_new_orders(old_orders, links_current_orders):

            """ Получаем новые заказы """

            new_orders = []

            for order in links_current_orders:

                if not(order in old_orders): new_orders.append(order)

            return new_orders


        def combine_data(links_current_orders, new_orders):

            """ Совмещаем данные """

            result = []

#            print('links_current_orders')
#            print(links_current_orders)
#            print(len(links_current_orders))
#            print('links_current_orders')
#            print()
#            print('new_orders')
#            print(new_orders)
#            print(len(new_orders))
#            print('new_orders')

            for order in links_current_orders:

                if order[0] in new_orders: result.append(order)

            return result


        # ВЫШЕ ОПРЕДЕЛЕНИЕ ФУНКЦИЙ

        old_orders = self.get_old_orders()
        links_current_orders = get_links_current_orders(orders)
        new_orders = get_new_orders(old_orders, links_current_orders)
        combined_data = combine_data(orders, new_orders)

        return combined_data


    def get_necessary_orders(self, new_orders):

        """ Получаем нужные заказы """

        necessary_orders = []

        for order in new_orders:

            header = order[1]
            description = order[2]
            entry = f'{header} {description}'

            for word in self.list_of_keywords:

                if word in entry: necessary_orders.append(order)

        return necessary_orders


    def remove_duplicate_orders(self, orders):

        """ Убираем повторяющиеся заказы """

        result = []
        temp_result = []

        for order in orders:

            entry = 'Ё'.join(order)
            temp_result.append(entry)

        temp_result = set(temp_result)

        for order in temp_result:

            order = order.split('Ё')
            result.append(order)

        return result


    def add_new_orders_to_database(self, orders):

        """ Добавляем новые заказы в базу данных """

        database = db.DataBase()

        for order in orders:

            link = order[0]
            database.add_new_entry(link)


    def add_to_final_result(self, final_result, result):

        """ Добавляем все в итоговый результат """

        [final_result.append(order) for order in result]


    async def start_parsing(self, final_result):

        """ Начать парсинг """

        links_to_page = self.get_links_to_page()

        tasks = []
        result = []

        async with aiohttp.ClientSession() as session:

            for link in links_to_page:

                task = asyncio.create_task(self.get_page_content(session, link, result))
                tasks.append(task)

            await asyncio.gather(*tasks)

        new_orders = self.get_new_orders(result)
        necessary_orders = self.get_necessary_orders(new_orders)
        result = self.remove_duplicate_orders(necessary_orders)
        self.add_new_orders_to_database(result)
        self.add_to_final_result(final_result, result)
