from datetime import datetime, timedelta, date
from airflow import DAG

from airflow.operators.python import PythonOperator
from airflow.operators.empty import EmptyOperator
import urllib.parse
import psycopg2
import pandas as pd
import sqlalchemy
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
from airflow.models.xcom import XCom
import time
from random import choice
import pendulum

import requests
from bs4 import BeautifulSoup
import re

from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
import json



LOCAL_TZ = pendulum.timezone("Europe/Moscow")
auth_key = 'afd19fc6-1eeb-4807-9ba2-5d1fdf2a1c2b'
url_maps = 'https://geocode-maps.yandex.ru/1.x/'
# ------------------------------------------------------------------------------
# Конфиг с параметрами
# ------------------------------------------------------------------------------
with open('config/config.json') as f:
    config_json = json.load(f)
    conn_string = config_json['db_local_string']
# ------------------------------------------------------------------------------
# user_agents
# ------------------------------------------------------------------------------
with open('config/user_agent_2gis.txt') as file:
    user_agent_lines = file.read().split('\n')
# ------------------------------------------------------------------------------
# SQL
# ------------------------------------------------------------------------------
q_new_objects_for_geo = """
select
    *
from
    stg.renovation_objects
where
    object_id not in (
    select
        object_id
    from
        etl.renovation_objects);
"""
q_objects_status = """
insert
    into
    etl.renovation_object_status (object_id,
    status)
select
    t1.object_id,
    t1.status
from
    stg.renovation_object_status t1
left join etl.renovation_object_status t2 on
    t1.object_id = t2.object_id
    and t1.status = t2.status
where
    t2.object_id is null;
"""

# ------------------------------------------------------------------------------
# Selenium params
# ------------------------------------------------------------------------------
chrome_options = Options()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--window-size=1920,1080')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-dev-shm-usage')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument("--disable-notifications")
chrome_options.add_argument("--remote-debugging-port=9222")
chrome_options.add_experimental_option("excludeSwitches", ["enable-automation"])
chrome_options.add_experimental_option('useAutomationExtension', False)
chrome_options.add_experimental_option("excludeSwitches", ["disable-popup-blocking"])

pagination_xpath = "//ul[@class='uk-pagination']/li/span"
pagination_click_xpath = "//ul[@class='uk-pagination']/li/span[text() = {element}]"
obj_list_id_xpath = "//tbody[@class='js_container_table']/tr"
obj_list_xpath = "//tbody[@class='js_container_table']/tr/td[@class = 'td-cell object-address']"
obj_list_status_xpath = "//tbody[@class='js_container_table']/tr/td[@class = 'td-cell object-statuses']//div[@class = 'status']/span"
drowdown_xpath = "//div[@class='nav-sort']/div[@class='custom-select-container customSelect']/span[@class='custom-select-opener uk-input input-control'][1]"
drowdown48_xpath = "//div[@class='custom-select-container customSelect is-open']//div[@class='custom-select-panel']/div[@data-value=48]"

def get_geo_json(_geocode):
    params = {'apikey':auth_key, 
             'geocode':_geocode,
             'format':'json'}
    response = requests.get(url = url_maps, params = params)
    if response.status_code == 200:
        address_formatted = response.json()['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']['metaDataProperty']\
        ['GeocoderMetaData']['Address']['formatted']
        return [address_formatted, response.json()['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']]
    else:
         return [None, None]

def get_img_links():
    obj_list, obj_status_list = [], []
    url = 'https://fr.mos.ru/uchastnikam-programmy/karta-renovatsii/?ft=1&category[]=OLD'
    print(url)
    remote_webdriver = 'remote_chromedriver'
    with webdriver.Remote(f'{remote_webdriver}:4444/wd/hub',options=chrome_options) as driver:
        print('driver was create!')
        driver.maximize_window()
        driver.get(url)
        time.sleep(10)
        driver.find_element(By.XPATH, "//div[@class='cb_but']").click()
        print('Cookie banner was clicked!')
        # сколько элементов?
        all_obj_str = driver.find_element(By.XPATH, "//*[@id='objects-filter-submit']/font")
        all_obj_cnt = int(re.findall(pattern='[0-9]+', string=all_obj_str.text)[0])
        print('Всего объектов: ' + str(all_obj_cnt))
        page_num_cnt = (all_obj_cnt//48)+1
        print('Всего страниц: ' + str(page_num_cnt))
        # выведем 48 элементов
        driver.find_element(By.XPATH, drowdown_xpath).click()
        time.sleep(5)
        driver.find_element(By.XPATH, drowdown48_xpath).click()
        time.sleep(10)
        pagination_list = driver.find_elements(By.XPATH, pagination_xpath)
        pagination_elements = []
        for pagination_element in pagination_list:
            if pagination_element.text not in ['Предыдущие', 'Следующие']:
                pagination_elements.append(int(pagination_element.text))
        stop_variable = 0
        while(stop_variable == 0):
            for i, element in enumerate(pagination_elements):
                if i == 0:
                # сначала мы на активном листе, поэтому НИЧЕГО не жмякаем
                    print('Начало итерации по списку страниц')
                    print('Активный элемент {element} в карусели пагинации'.format(element = element))
                    _obj_list_id = driver.find_elements(By.XPATH, obj_list_id_xpath)
                    _obj_list = driver.find_elements(By.XPATH, obj_list_xpath)
                    _obj_list_status = driver.find_elements(By.XPATH, obj_list_status_xpath)
                    for obj_id, obj, obj_status in zip(_obj_list_id, _obj_list, _obj_list_status):
                            obj_list.append(pd.DataFrame({'object_id':[obj_id.get_attribute('data-id')], 'address':[obj.get_attribute('data-text')]}))
                            obj_status_list.append(pd.DataFrame({'object_id':[obj_id.get_attribute('data-id')], 'status':[obj_status.text.replace('\n', ' ')]}))
                elif i == 5:
                    print('Последний элемент {element} в карусели пагинации'.format(element = element))
                    print('Окончание итерации по списку страниц.\nПереключение на следующий список страниц.')
                    _obj_list_id = driver.find_elements(By.XPATH, obj_list_id_xpath)
                    _obj_list = driver.find_elements(By.XPATH, obj_list_xpath)
                    _obj_list_status = driver.find_elements(By.XPATH, obj_list_status_xpath)
                    for obj_id, obj, obj_status in zip(_obj_list_id, _obj_list, _obj_list_status):
                            obj_list.append(pd.DataFrame({'object_id':[obj_id.get_attribute('data-id')], 'address':[obj.get_attribute('data-text')]}))
                            obj_status_list.append(pd.DataFrame({'object_id':[obj_id.get_attribute('data-id')], 'status':[obj_status.text.replace('\n', ' ')]}))
                    print('\n')
                    # цикл заново начинаем, если это не был последний элемент
                    if element <  page_num_cnt:
                        driver.find_element(By.XPATH, "//ul[@class='uk-pagination']/li[@class = 'uk-pagination-next-li']").click()
                        time.sleep(10)
                        pagination_list = driver.find_elements(By.XPATH, pagination_xpath)
                        pagination_elements = []
                        for pagination_element in pagination_list:
                            if pagination_element.text not in ['Предыдущие', 'Следующие']:
                                pagination_elements.append(int(pagination_element.text))
                        print('Pagination list after all iterations')
                        print(pagination_elements)
                    else:
                        print('Обработан конечный элемент!')
                        stop_variable = 1

                else:
                    print('Кликнем по следующему элементу {element} в карусели пагинации'.format(element = element))
                    driver.find_element(By.XPATH, pagination_click_xpath.format(element = element)).click()
                    time.sleep(10)
                    _obj_list_id = driver.find_elements(By.XPATH, obj_list_id_xpath)
                    _obj_list = driver.find_elements(By.XPATH, obj_list_xpath)
                    _obj_list_status = driver.find_elements(By.XPATH, obj_list_status_xpath)
                    for obj_id, obj, obj_status in zip(_obj_list_id, _obj_list, _obj_list_status):
                            obj_list.append(pd.DataFrame({'object_id':[obj_id.get_attribute('data-id')], 'address':[obj.get_attribute('data-text')]}))
                            obj_status_list.append(pd.DataFrame({'object_id':[obj_id.get_attribute('data-id')], 'status':[obj_status.text.replace('\n', ' ')]}))
                if element == page_num_cnt:
                    print('Обработан конечный элемент!')
                    stop_variable = 1
        return pd.concat(obj_list), pd.concat(obj_status_list)


def get_objects_to_stg():
    df_obj, df_obj_status = get_img_links()
    # заинсертим данные
    conn = create_engine(conn_string)
    df_obj.to_sql(name = 'renovation_objects', schema='stg', con = conn, if_exists='replace', index = False)
    df_obj_status.to_sql(name = 'renovation_object_status', schema='stg', con = conn, if_exists='replace', index = False)

def load_from_stg_to_etl():
    conn = create_engine(conn_string)
    print("""Start getting address for new objects""")
    df = pd.read_sql_query(q_new_objects_for_geo, conn)
    if len(df)>0:
        print('Определяем адрес для ' + str(len(df)) + ' объектов реновации')
        df[['address_formatted', 'address_json']] = df.apply(lambda x: get_geo_json('Москва, ' + x['address']), axis =1,
                                                                       result_type = 'expand')
        df_base = df[df['address_formatted'].notna()].copy()
        print('Успешно определили адрес для ' + str(len(df_base)) + ' объектов реновации')
        df_base.to_sql(name = 'renovation_objects', schema = 'etl', con = create_engine(conn_string), 
            if_exists = 'append', index = False, dtype={"address_json": sqlalchemy.types.JSON})
    print("""Start inserting renovation objects status""")
    create_engine(conn_string).execute(q_objects_status)

with DAG(
        dag_id='parse_renovation',
        start_date = datetime(year = 2023, month = 3, day = 30, hour = 0, minute = 0, tzinfo=LOCAL_TZ),
        schedule_interval=None,
        catchup=False,
        default_args = {
           'owner':'d.kakurin',
       },
        tags=['parsing'],
) as dag:
    start_task = EmptyOperator(
        task_id="start_task"
    )

    get_objects = PythonOperator(
        task_id='get_object',
        python_callable=get_objects_to_stg
    )

    load_from_stg_to_etl = PythonOperator(
        task_id='load_from_stg_to_etl',
        python_callable=load_from_stg_to_etl
    )

    end_task = EmptyOperator(
        task_id="end_task"
    )


start_task >> get_objects >>  load_from_stg_to_etl >> end_task